From a2f71c996320b463c68421770bc17099e4854760 Mon Sep 17 00:00:00 2001 From: Marcus Green Date: Fri, 1 Nov 2024 16:22:09 +0000 Subject: [PATCH] Use Moodle 4.5 Core AI subsystem Conditionally use tool_aiconnect or moodle 4.5 coreai subsystem A new function called perform_request checks the usemebisai setting and if it is set then the mebis-lp ai subsystem is used, otherwise the Core AI subsystem is used. The tool_aiconnect approach has been removed --- lang/en/qtype_aitext.php | 2 ++ question.php | 71 ++++++++++++++++++++++------------------ settings.php | 8 ++++- 3 files changed, 49 insertions(+), 32 deletions(-) diff --git a/lang/en/qtype_aitext.php b/lang/en/qtype_aitext.php index 54a1103..1149ac1 100755 --- a/lang/en/qtype_aitext.php +++ b/lang/en/qtype_aitext.php @@ -103,6 +103,8 @@ $string['showprompt'] = 'Show prompt'; $string['thedefaultmarksscheme'] = 'Deduct a point from the total score for each grammar or spelling mistake.'; $string['thedefaultprompt'] = 'Explain if there is anything wrong with the grammar and spelling in the text.'; +$string['usemebis_ai'] = 'Use Mebis AI'; +$string['usemebis_ai_setting'] = 'Use the Local aiconnector plugin from Mebis-lp to process AI related queries (must be installed)'; $string['untestedquestionbehaviour'] = 'Untested question behaviour'; $string['wordcount'] = 'Word count: {$a}'; $string['wordcounttoofew'] = 'Word count: {$a->count}, less than the required {$a->limit} words.'; diff --git a/question.php b/question.php index d7fcfd6..dee9e9d 100755 --- a/question.php +++ b/question.php @@ -143,6 +143,42 @@ public function compute_final_grade($responses, $totaltries) { public function apply_attempt_state(question_attempt_step $step) { $this->step = $step; } + /** + * Call the llm using either the 4.5 core api + * or the the mebis ai depending on the usemebisai + * settings checkbox. + * + * @param string $prompt + * @return string $response + */ + public function perform_request(string $prompt, string $purpose): string { + if (get_config('qtype_aitext', 'usemebisai')) { + $manager = new local_ai_manager\manager($purpose); + $llmresponse = (object) $manager->perform_request($prompt, ['component' => 'qtype_aitext', 'contextid' => $this->contextid]); + if ($llmresponse->get_code() !== 200) { + throw new moodle_exception( + 'err_retrievingfeedback', + 'qtype_aitext', + '', + $llmresponse->get_errormessage(), + $llmresponse->get_debuginfo() + ); + } + return $llmresponse->get_content(); + } else { + global $USER; + $manager = new \core_ai\manager(); + $action = new \core_ai\aiactions\generate_text( + contextid: $this->contextid, + userid: $USER->id, + prompttext: $prompt + ); + $llmresponse = $manager->process_action($action); + $responsedata = $llmresponse->get_response_data(); + return $responsedata['generatedcontent']; + + } + } /** * Get the spellchecking response. @@ -155,18 +191,8 @@ public function apply_attempt_state(question_attempt_step $step) { */ private function get_spellchecking(array $response):string { $fullaiprompt = $this->build_full_ai_spellchecking_prompt($response['answer']); - $ai = new local_ai_manager\manager('feedback'); - $llmresponse = $ai->perform_request($fullaiprompt, ['component' => 'qtype_aitext', 'contextid' => $this->contextid]); - if ($llmresponse->get_code() !== 200) { - throw new moodle_exception( - 'err_airesponsefailed', - 'qtype_aitext', - '', - $llmresponse->get_errormessage(), - $llmresponse->get_debuginfo() - ); - } - return $llmresponse->get_content(); + $response = $this->perform_request($fullaiprompt, 'feedback'); + return $response; } /** @@ -187,18 +213,11 @@ public function grade_response(array $response): array { $grade = [0 => 0, question_state::$needsgrading]; return $grade; } - $ai = new local_ai_manager\manager('feedback'); if (is_array($response)) { $fullaiprompt = $this->build_full_ai_prompt($response['answer'], $this->aiprompt, $this->defaultmark, $this->markscheme); - $llmresponse = $ai->perform_request($fullaiprompt, ['component' => 'qtype_aitext', 'contextid' => $this->contextid]); - if ($llmresponse->get_code() !== 200) { - throw new moodle_exception('err_retrievingfeedback', 'qtype_aitext', '', $llmresponse->get_errormessage(), - $llmresponse->get_debuginfo()); - } - $feedback = $llmresponse->get_content(); + $feedback = $this->perform_request($fullaiprompt, 'feedback'); } - $contentobject = $this->process_feedback($feedback); // If there are no marks, write the feedback and set to needs grading . @@ -256,7 +275,6 @@ public function build_full_ai_prompt($response, $aiprompt, $defaultmark, $marksc * @throws coding_exception */ public function build_full_ai_spellchecking_prompt(string $response): string { - // $response = strip_tags($response); return get_string('spellcheck_prompt', 'qtype_aitext') . ($response); } @@ -282,10 +300,6 @@ public function process_feedback(string $feedback) { $contentobject->feedback = trim($contentobject->feedback); $contentobject->feedback = preg_replace(['/\[\[/', '/\]\]/'], '"', $contentobject->feedback); $disclaimer = get_config('qtype_aitext', 'disclaimer'); - // TODO Model currently is only used for connecting and at this point I believe. We need to remove all the model - // selection logic or make local_ai_manager support the selection of models. - $disclaimer = str_replace("[[model]]", - \local_ai_manager\ai_manager_utils::get_connector_instance_by_purpose('feedback')->get_model(), $disclaimer); $contentobject->feedback .= ' '.$this->llm_translate($disclaimer); } else { $contentobject = (object) [ @@ -307,15 +321,10 @@ protected function llm_translate(string $text): string { if (current_language() == 'en') { return $text; } - $ai = new local_ai_manager\manager('translate'); $cache = cache::make('qtype_aitext', 'stringdata'); if (($translation = $cache->get(current_language().'_'.$text)) === false) { $prompt = 'translate "'.$text .'" into '.current_language(); - $llmresponse = $ai->perform_request($prompt); - if ($llmresponse->get_code() !== 200) { - throw new moodle_exception('Could not retrieve the translation from the AI tool'); - } - $translation = $llmresponse->get_content(); + $translation = $this->perform_request($prompt, 'translate'); $translation = trim($translation, '"'); $cache->set(current_language().'_'.$text, $translation); } diff --git a/settings.php b/settings.php index 78a2da9..ba11fc6 100644 --- a/settings.php +++ b/settings.php @@ -38,7 +38,7 @@ 'qtype_aitext/disclaimer', new lang_string('disclaimer', 'qtype_aitext'), new lang_string('disclaimer_setting', 'qtype_aitext'), - '(Response provided by [[model]])' + '(Response provided by an AI System)' )); $settings->add(new admin_setting_configtextarea( 'qtype_aitext/prompt', @@ -66,6 +66,12 @@ new lang_string('responseformat_setting', 'qtype_aitext'), 0, ['plain' => 'plain', 'editor' => 'editor', 'monospaced' => 'monospaced'] )); + $settings->add(new admin_setting_configcheckbox( + 'qtype_aitext/usemebisai', + new lang_string('usemebis_ai', 'qtype_aitext'), + new lang_string('usemebis_ai_setting', 'qtype_aitext'), + 0 + )); }