diff --git a/lang/en/qtype_aitext.php b/lang/en/qtype_aitext.php index 54a1103..1149ac1 100755 --- a/lang/en/qtype_aitext.php +++ b/lang/en/qtype_aitext.php @@ -103,6 +103,8 @@ $string['showprompt'] = 'Show prompt'; $string['thedefaultmarksscheme'] = 'Deduct a point from the total score for each grammar or spelling mistake.'; $string['thedefaultprompt'] = 'Explain if there is anything wrong with the grammar and spelling in the text.'; +$string['usemebis_ai'] = 'Use Mebis AI'; +$string['usemebis_ai_setting'] = 'Use the Local aiconnector plugin from Mebis-lp to process AI related queries (must be installed)'; $string['untestedquestionbehaviour'] = 'Untested question behaviour'; $string['wordcount'] = 'Word count: {$a}'; $string['wordcounttoofew'] = 'Word count: {$a->count}, less than the required {$a->limit} words.'; diff --git a/question.php b/question.php index d7fcfd6..9ddeba4 100755 --- a/question.php +++ b/question.php @@ -143,6 +143,43 @@ public function compute_final_grade($responses, $totaltries) { public function apply_attempt_state(question_attempt_step $step) { $this->step = $step; } + /** + * Call the llm using either the 4.5 core api + * or the the mebis ai depending on the usemebis + * settings checkbox. + * + * @param string $prompt + * @return string $response + */ + public function perform_request(string $prompt, string $purpose): string { + xdebug_break(); + if (get_config('qtype_aitext', 'usemebisai')) { + $manager = new local_ai_manager\manager($purpose); + $llmresponse = (object) $manager->perform_request($prompt, ['component' => 'qtype_aitext', 'contextid' => $this->contextid]); + if ($llmresponse->get_code() !== 200) { + throw new moodle_exception( + 'err_retrievingfeedback', + 'qtype_aitext', + '', + $llmresponse->get_errormessage(), + $llmresponse->get_debuginfo() + ); + } + return $llmresponse->get_content(); + } else { + global $USER; + $manager = new \core_ai\manager(); + $action = new \core_ai\aiactions\generate_text( + contextid: $this->contextid, + userid: $USER->id, + prompttext: $prompt + ); + $llmresponse = $manager->process_action($action); + $responsedata = $llmresponse->get_response_data(); + return $responsedata['generatedcontent']; + + } + } /** * Get the spellchecking response. @@ -153,20 +190,10 @@ public function apply_attempt_state(question_attempt_step $step) { * @throws dml_exception * @throws moodle_exception */ - private function get_spellchecking(array $response):string { + private function get_spellchecking(array $response): string { $fullaiprompt = $this->build_full_ai_spellchecking_prompt($response['answer']); - $ai = new local_ai_manager\manager('feedback'); - $llmresponse = $ai->perform_request($fullaiprompt, ['component' => 'qtype_aitext', 'contextid' => $this->contextid]); - if ($llmresponse->get_code() !== 200) { - throw new moodle_exception( - 'err_airesponsefailed', - 'qtype_aitext', - '', - $llmresponse->get_errormessage(), - $llmresponse->get_debuginfo() - ); - } - return $llmresponse->get_content(); + $response = $this->perform_request($fullaiprompt, 'feedback'); + return $response; } /** @@ -178,7 +205,7 @@ private function get_spellchecking(array $response):string { */ public function grade_response(array $response): array { - if($this->spellcheck) { + if ($this->spellcheck) { $spellcheckresponse = $this->get_spellchecking($response); $this->insert_attempt_step_data('-spellcheckresponse', $spellcheckresponse); } @@ -187,18 +214,12 @@ public function grade_response(array $response): array { $grade = [0 => 0, question_state::$needsgrading]; return $grade; } - $ai = new local_ai_manager\manager('feedback'); if (is_array($response)) { $fullaiprompt = $this->build_full_ai_prompt($response['answer'], $this->aiprompt, $this->defaultmark, $this->markscheme); - $llmresponse = $ai->perform_request($fullaiprompt, ['component' => 'qtype_aitext', 'contextid' => $this->contextid]); - if ($llmresponse->get_code() !== 200) { - throw new moodle_exception('err_retrievingfeedback', 'qtype_aitext', '', $llmresponse->get_errormessage(), - $llmresponse->get_debuginfo()); - } - $feedback = $llmresponse->get_content(); + $feedback = $this->perform_request($fullaiprompt, 'feedback'); } - + xdebug_break(); $contentobject = $this->process_feedback($feedback); // If there are no marks, write the feedback and set to needs grading . @@ -256,7 +277,6 @@ public function build_full_ai_prompt($response, $aiprompt, $defaultmark, $marksc * @throws coding_exception */ public function build_full_ai_spellchecking_prompt(string $response): string { - // $response = strip_tags($response); return get_string('spellcheck_prompt', 'qtype_aitext') . ($response); } @@ -282,8 +302,8 @@ public function process_feedback(string $feedback) { $contentobject->feedback = trim($contentobject->feedback); $contentobject->feedback = preg_replace(['/\[\[/', '/\]\]/'], '"', $contentobject->feedback); $disclaimer = get_config('qtype_aitext', 'disclaimer'); - // TODO Model currently is only used for connecting and at this point I believe. We need to remove all the model - // selection logic or make local_ai_manager support the selection of models. + // TODO Model currently is only used for connecting and at this point I believe. + // We need to remove all the model selection logic or make local_ai_manager support the selection of models. $disclaimer = str_replace("[[model]]", \local_ai_manager\ai_manager_utils::get_connector_instance_by_purpose('feedback')->get_model(), $disclaimer); $contentobject->feedback .= ' '.$this->llm_translate($disclaimer); @@ -307,15 +327,11 @@ protected function llm_translate(string $text): string { if (current_language() == 'en') { return $text; } - $ai = new local_ai_manager\manager('translate'); $cache = cache::make('qtype_aitext', 'stringdata'); if (($translation = $cache->get(current_language().'_'.$text)) === false) { $prompt = 'translate "'.$text .'" into '.current_language(); - $llmresponse = $ai->perform_request($prompt); - if ($llmresponse->get_code() !== 200) { - throw new moodle_exception('Could not retrieve the translation from the AI tool'); - } - $translation = $llmresponse->get_content(); + $llmresponse = $this->perform_request($prompt, 'translate'); + $translation = $llmresponse['content']; $translation = trim($translation, '"'); $cache->set(current_language().'_'.$text, $translation); } diff --git a/settings.php b/settings.php index 78a2da9..708262a 100644 --- a/settings.php +++ b/settings.php @@ -66,6 +66,12 @@ new lang_string('responseformat_setting', 'qtype_aitext'), 0, ['plain' => 'plain', 'editor' => 'editor', 'monospaced' => 'monospaced'] )); + $settings->add(new admin_setting_configcheckbox( + 'qtype_aitext/usemebisai', + new lang_string('usemebis_ai', 'qtype_aitext'), + new lang_string('usemebis_ai_setting', 'qtype_aitext'), + 0 + )); }