Skip to content

Commit

Permalink
Setting for usecorai and use it if set
Browse files Browse the repository at this point in the history
Refactor perform_request in question.php so it works out if usecoreai is checked
and if it is then process using the ai subsystem introduced with Moodle 4.4
  • Loading branch information
marcusgreen committed Oct 27, 2024
1 parent 79b65d9 commit dde1b2a
Show file tree
Hide file tree
Showing 3 changed files with 137 additions and 42 deletions.
2 changes: 2 additions & 0 deletions lang/en/qtype_aitext.php
Original file line number Diff line number Diff line change
Expand Up @@ -108,6 +108,8 @@
$string['thedefaultmarksscheme'] = 'Deduct a point from the total score for each grammar or spelling mistake.';
$string['thedefaultprompt'] = 'Explain if there is anything wrong with the grammar and spelling in the text.';
$string['untestedquestionbehaviour'] = 'Untested question behaviour';
$string['usecoreai'] = 'Use core AI';
$string['usecoreai_setting'] = 'Use the core AI subsystem for connecting to external AI/LLM systems (introduced with Moodle 4.4)';
$string['wordcount'] = 'Word count: {$a}';
$string['wordcounttoofew'] = 'Word count: {$a->count}, less than the required {$a->limit} words.';
$string['wordcounttoomuch'] = 'Word count: {$a->count}, more than the limit of {$a->limit} words.';
171 changes: 129 additions & 42 deletions question.php
Original file line number Diff line number Diff line change
Expand Up @@ -143,6 +143,53 @@ public function compute_final_grade($responses, $totaltries) {
public function apply_attempt_state(question_attempt_step $step) {
$this->step = $step;
}
/**
* Allow different ai managers
*
* @param array $setupdata
* @return manager
*/
public function get_ai_manager(array $setupdata) {
if (get_config('qtype_aitext', 'usecoreai')) {
return new \core_ai\manager();
} else {
return new local_ai_manager\manager($setupdata[0]);
}
}

/**
* Summary of perform_request
* @param mixed $manager
* @param array $options
* @return void
*/
public function perform_request($manager, $prompt) {
if (get_config('qtype_aitext', 'usecoreai')) {
global $USER;
$action = new \core_ai\aiactions\generate_text(
contextid: $this->contextid,
userid: $USER->id,
prompttext: $prompt
);
$result = $manager->process_action($action);
$data = (object) $result->get_response_data();
$content = $data->generatedcontent;
return $content;

} else {
$llmresponse = $manager->perform_request($prompt, ['component' => 'qtype_aitext', 'contextid' => $this->contextid]);
if ($llmresponse->get_code() !== 200) {
throw new moodle_exception(
'err_retrievingfeedback',
'qtype_aitext',
'',
$llmresponse->get_errormessage(),
$llmresponse->get_debuginfo()
);
}
return $llmresponse->get_content();
}
}

/**
* Get the spellchecking response.
Expand All @@ -155,67 +202,107 @@ public function apply_attempt_state(question_attempt_step $step) {
*/
private function get_spellchecking(array $response): string {
$fullaiprompt = $this->build_full_ai_spellchecking_prompt($response['answer']);
$ai = new local_ai_manager\manager('feedback');
$llmresponse = $ai->perform_request($fullaiprompt, ['component' => 'qtype_aitext', 'contextid' => $this->contextid]);
if ($llmresponse->get_code() !== 200) {
throw new moodle_exception(
'err_airesponsefailed',
'qtype_aitext',
'',
$llmresponse->get_errormessage(),
$llmresponse->get_debuginfo()
);
}
return $llmresponse->get_content();
$manager = $this->get_ai_manager(['feedback']);;
$llmresponse = $this->perform_request($manager, $fullaiprompt);
return $llmresponse;
}

/**
* Grade response by making a call to external
* large language model such as ChatGPT
* Grade a student's response using AI feedback
*
* @param array $response
* @return void
* @param array $response The student's response data
* @return array Grade result with fraction and state
*/
public function grade_response(array $response): array {

if ($this->spellcheck) {
$spellcheckresponse = $this->get_spellchecking($response);
$this->insert_attempt_step_data('-spellcheckresponse', $spellcheckresponse);
$this->handle_spellcheck($response);
}

if (!$this->is_complete_response($response)) {
$grade = [0 => 0, question_state::$needsgrading];
return $grade;
}
$ai = new local_ai_manager\manager('feedback');
if (is_array($response)) {
$fullaiprompt = $this->build_full_ai_prompt($response['answer'], $this->aiprompt,
$this->defaultmark, $this->markscheme);
$llmresponse = $ai->perform_request($fullaiprompt, ['component' => 'qtype_aitext', 'contextid' => $this->contextid]);
if ($llmresponse->get_code() !== 200) {
throw new moodle_exception('err_retrievingfeedback', 'qtype_aitext', '', $llmresponse->get_errormessage(),
$llmresponse->get_debuginfo());
}
$feedback = $llmresponse->get_content();
return $this->get_needs_grading_response();
}

$fullaiprompt = $this->build_full_ai_prompt(
$response['answer'],
$this->aiprompt,
$this->defaultmark,
$this->markscheme
);
$feedback = $this->get_ai_feedback($response);
$contentobject = $this->process_feedback($feedback);
$grade = $this->calculate_grade($contentobject);

// If there are no marks, write the feedback and set to needs grading .
$this->store_attempt_data($contentobject, $feedback, $fullaiprompt);

return $grade;
}

/**
* Process spellchecking for the response
*
* @param array $response The student's response data
*/
private function handle_spellcheck(array $response): void {
$spellcheckresponse = $this->get_spellchecking($response);
$this->insert_attempt_step_data('-spellcheckresponse', $spellcheckresponse);
}

/**
* Get the default response for questions that need manual grading
*
* @return array Array containing grade fraction and needs grading state
*/
private function get_needs_grading_response(): array {
return [0 => 0, question_state::$needsgrading];
}

/**
* Request and retrieve AI feedback for the response
*
* @param array $response The student's response data
* @return string The AI generated feedback content
* @throws moodle_exception When AI feedback retrieval fails
*/
private function get_ai_feedback(array $response): string {

$fullaiprompt = $this->build_full_ai_prompt(
$response['answer'],
$this->aiprompt,
$this->defaultmark,
$this->markscheme
);
$manager = $this->get_ai_manager(setupdata: ['feedback']);
$llmresponse = $this->perform_request($manager, $fullaiprompt);
return $llmresponse;

}

/**
* Calculate the grade based on AI feedback
*
* @param stdClass $contentobject Object containing marks and feedback
* @return array Array containing grade fraction and state
*/
private function calculate_grade(stdClass $contentobject): array {
if (is_null($contentobject->marks)) {
$grade = [0 => 0, question_state::$needsgrading];
} else {
$fraction = $contentobject->marks / $this->defaultmark;
$grade = [$fraction, question_state::graded_state_for_fraction($fraction)];
return $this->get_needs_grading_response();
}
// The -aicontent data is used in question preview. Only needs to happen in preview.

$fraction = $contentobject->marks / $this->defaultmark;
return [$fraction, question_state::graded_state_for_fraction($fraction)];
}

/**
* Store the attempt data in the database
*
* @param stdClass $contentobject Object containing feedback
* @param string $feedback Raw feedback from AI
*/
private function store_attempt_data(stdClass $contentobject, string $feedback, string $fullaiprompt): void {
$this->insert_attempt_step_data('-aiprompt', $fullaiprompt);
$this->insert_attempt_step_data('-aicontent', $contentobject->feedback);

$this->insert_attempt_step_data('-comment', $contentobject->feedback);
$this->insert_attempt_step_data('-commentformat', FORMAT_HTML);

return $grade;
}

/**
Expand Down Expand Up @@ -306,7 +393,7 @@ protected function llm_translate(string $text): string {
if (current_language() == 'en') {
return $text;
}
$ai = new local_ai_manager\manager('translate');
$ai = $this->get_ai_manager(setupdata: ['translate']);;
$cache = cache::make('qtype_aitext', 'stringdata');
if (($translation = $cache->get(current_language().'_'.$text)) === false) {
$prompt = 'translate "'.$text .'" into '.current_language();
Expand Down
6 changes: 6 additions & 0 deletions settings.php
Original file line number Diff line number Diff line change
Expand Up @@ -66,6 +66,12 @@
new lang_string('responseformat_setting', 'qtype_aitext'),
0, ['plain' => 'plain', 'editor' => 'editor', 'monospaced' => 'monospaced']
));
$settings->add(new admin_setting_configcheckbox(
'qtype_aitext/usecoreai',
new lang_string('usecoreai', 'qtype_aitext'),
new lang_string('usecoreai_setting', 'qtype_aitext'),
1
));

}

0 comments on commit dde1b2a

Please sign in to comment.