diff --git a/lang/en/qtype_aitext.php b/lang/en/qtype_aitext.php index f05f7b0..691e492 100755 --- a/lang/en/qtype_aitext.php +++ b/lang/en/qtype_aitext.php @@ -108,6 +108,8 @@ $string['thedefaultmarksscheme'] = 'Deduct a point from the total score for each grammar or spelling mistake.'; $string['thedefaultprompt'] = 'Explain if there is anything wrong with the grammar and spelling in the text.'; $string['untestedquestionbehaviour'] = 'Untested question behaviour'; +$string['usecoreai'] = 'Use core AI'; +$string['usecoreai_setting'] = 'Use the core AI subsystem for connecting to external AI/LLM systems (introduced with Moodle 4.4)'; $string['wordcount'] = 'Word count: {$a}'; $string['wordcounttoofew'] = 'Word count: {$a->count}, less than the required {$a->limit} words.'; $string['wordcounttoomuch'] = 'Word count: {$a->count}, more than the limit of {$a->limit} words.'; diff --git a/question.php b/question.php index 27acf13..f21c438 100755 --- a/question.php +++ b/question.php @@ -143,6 +143,53 @@ public function compute_final_grade($responses, $totaltries) { public function apply_attempt_state(question_attempt_step $step) { $this->step = $step; } + /** + * Allow different ai managers + * + * @param array $setupdata + * @return manager + */ + public function get_ai_manager(array $setupdata) { + if (get_config('qtype_aitext', 'usecoreai')) { + return new \core_ai\manager(); + } else { + return new local_ai_manager\manager($setupdata[0]); + } + } + + /** + * Summary of perform_request + * @param mixed $manager + * @param array $options + * @return void + */ + public function perform_request($manager, $prompt) { + if (get_config('qtype_aitext', 'usecoreai')) { + global $USER; + $action = new \core_ai\aiactions\generate_text( + contextid: $this->contextid, + userid: $USER->id, + prompttext: $prompt + ); + $result = $manager->process_action($action); + $data = (object) $result->get_response_data(); + $content = $data->generatedcontent; + return $content; + + } else { + $llmresponse = $manager->perform_request($prompt, ['component' => 'qtype_aitext', 'contextid' => $this->contextid]); + if ($llmresponse->get_code() !== 200) { + throw new moodle_exception( + 'err_retrievingfeedback', + 'qtype_aitext', + '', + $llmresponse->get_errormessage(), + $llmresponse->get_debuginfo() + ); + } + return $llmresponse->get_content(); + } + } /** * Get the spellchecking response. @@ -155,67 +202,107 @@ public function apply_attempt_state(question_attempt_step $step) { */ private function get_spellchecking(array $response): string { $fullaiprompt = $this->build_full_ai_spellchecking_prompt($response['answer']); - $ai = new local_ai_manager\manager('feedback'); - $llmresponse = $ai->perform_request($fullaiprompt, ['component' => 'qtype_aitext', 'contextid' => $this->contextid]); - if ($llmresponse->get_code() !== 200) { - throw new moodle_exception( - 'err_airesponsefailed', - 'qtype_aitext', - '', - $llmresponse->get_errormessage(), - $llmresponse->get_debuginfo() - ); - } - return $llmresponse->get_content(); - } + $manager = $this->get_ai_manager(['feedback']);; + $llmresponse = $this->perform_request($manager, $fullaiprompt); + return $llmresponse; + } /** - * Grade response by making a call to external - * large language model such as ChatGPT + * Grade a student's response using AI feedback * - * @param array $response - * @return void + * @param array $response The student's response data + * @return array Grade result with fraction and state */ public function grade_response(array $response): array { - if ($this->spellcheck) { - $spellcheckresponse = $this->get_spellchecking($response); - $this->insert_attempt_step_data('-spellcheckresponse', $spellcheckresponse); + $this->handle_spellcheck($response); } - + if (!$this->is_complete_response($response)) { - $grade = [0 => 0, question_state::$needsgrading]; - return $grade; - } - $ai = new local_ai_manager\manager('feedback'); - if (is_array($response)) { - $fullaiprompt = $this->build_full_ai_prompt($response['answer'], $this->aiprompt, - $this->defaultmark, $this->markscheme); - $llmresponse = $ai->perform_request($fullaiprompt, ['component' => 'qtype_aitext', 'contextid' => $this->contextid]); - if ($llmresponse->get_code() !== 200) { - throw new moodle_exception('err_retrievingfeedback', 'qtype_aitext', '', $llmresponse->get_errormessage(), - $llmresponse->get_debuginfo()); - } - $feedback = $llmresponse->get_content(); + return $this->get_needs_grading_response(); } + $fullaiprompt = $this->build_full_ai_prompt( + $response['answer'], + $this->aiprompt, + $this->defaultmark, + $this->markscheme + ); + $feedback = $this->get_ai_feedback($response); $contentobject = $this->process_feedback($feedback); + $grade = $this->calculate_grade($contentobject); + + $this->store_attempt_data($contentobject, $feedback, $fullaiprompt); + + return $grade; + } - // If there are no marks, write the feedback and set to needs grading . + /** + * Process spellchecking for the response + * + * @param array $response The student's response data + */ + private function handle_spellcheck(array $response): void { + $spellcheckresponse = $this->get_spellchecking($response); + $this->insert_attempt_step_data('-spellcheckresponse', $spellcheckresponse); + } + + /** + * Get the default response for questions that need manual grading + * + * @return array Array containing grade fraction and needs grading state + */ + private function get_needs_grading_response(): array { + return [0 => 0, question_state::$needsgrading]; + } + + /** + * Request and retrieve AI feedback for the response + * + * @param array $response The student's response data + * @return string The AI generated feedback content + * @throws moodle_exception When AI feedback retrieval fails + */ + private function get_ai_feedback(array $response): string { + + $fullaiprompt = $this->build_full_ai_prompt( + $response['answer'], + $this->aiprompt, + $this->defaultmark, + $this->markscheme + ); + $manager = $this->get_ai_manager(setupdata: ['feedback']); + $llmresponse = $this->perform_request($manager, $fullaiprompt); + return $llmresponse; + + } + + /** + * Calculate the grade based on AI feedback + * + * @param stdClass $contentobject Object containing marks and feedback + * @return array Array containing grade fraction and state + */ + private function calculate_grade(stdClass $contentobject): array { if (is_null($contentobject->marks)) { - $grade = [0 => 0, question_state::$needsgrading]; - } else { - $fraction = $contentobject->marks / $this->defaultmark; - $grade = [$fraction, question_state::graded_state_for_fraction($fraction)]; + return $this->get_needs_grading_response(); } - // The -aicontent data is used in question preview. Only needs to happen in preview. + + $fraction = $contentobject->marks / $this->defaultmark; + return [$fraction, question_state::graded_state_for_fraction($fraction)]; + } + + /** + * Store the attempt data in the database + * + * @param stdClass $contentobject Object containing feedback + * @param string $feedback Raw feedback from AI + */ + private function store_attempt_data(stdClass $contentobject, string $feedback, string $fullaiprompt): void { $this->insert_attempt_step_data('-aiprompt', $fullaiprompt); $this->insert_attempt_step_data('-aicontent', $contentobject->feedback); - $this->insert_attempt_step_data('-comment', $contentobject->feedback); $this->insert_attempt_step_data('-commentformat', FORMAT_HTML); - - return $grade; } /** @@ -306,7 +393,7 @@ protected function llm_translate(string $text): string { if (current_language() == 'en') { return $text; } - $ai = new local_ai_manager\manager('translate'); + $ai = $this->get_ai_manager(setupdata: ['translate']);; $cache = cache::make('qtype_aitext', 'stringdata'); if (($translation = $cache->get(current_language().'_'.$text)) === false) { $prompt = 'translate "'.$text .'" into '.current_language(); diff --git a/settings.php b/settings.php index 78a2da9..fbf5872 100644 --- a/settings.php +++ b/settings.php @@ -66,6 +66,12 @@ new lang_string('responseformat_setting', 'qtype_aitext'), 0, ['plain' => 'plain', 'editor' => 'editor', 'monospaced' => 'monospaced'] )); + $settings->add(new admin_setting_configcheckbox( + 'qtype_aitext/usecoreai', + new lang_string('usecoreai', 'qtype_aitext'), + new lang_string('usecoreai_setting', 'qtype_aitext'), + 1 + )); }