Skip to content

Commit

Permalink
Use Moodle 4.5 Core AI subsystem
Browse files Browse the repository at this point in the history
Conditionally use tool_aiconnect or moodle 4.5 coreai subsystem
A new function called perform_request checks the usemebisai setting
and if it is set then the mebis-lp ai subsystem is used, otherwise the
Core AI subsystem is used. The tool_aiconnect approach has been removed
  • Loading branch information
marcusgreen committed Nov 1, 2024
1 parent 7a87ebe commit c3b60dc
Show file tree
Hide file tree
Showing 3 changed files with 55 additions and 31 deletions.
2 changes: 2 additions & 0 deletions lang/en/qtype_aitext.php
Original file line number Diff line number Diff line change
Expand Up @@ -103,6 +103,8 @@
$string['showprompt'] = 'Show prompt';
$string['thedefaultmarksscheme'] = 'Deduct a point from the total score for each grammar or spelling mistake.';
$string['thedefaultprompt'] = 'Explain if there is anything wrong with the grammar and spelling in the text.';
$string['usemebis_ai'] = 'Use Mebis AI';
$string['usemebis_ai_setting'] = 'Use the Local aiconnector plugin from Mebis-lp to process AI related queries (must be installed)';
$string['untestedquestionbehaviour'] = 'Untested question behaviour';
$string['wordcount'] = 'Word count: {$a}';
$string['wordcounttoofew'] = 'Word count: {$a->count}, less than the required {$a->limit} words.';
Expand Down
78 changes: 47 additions & 31 deletions question.php
Original file line number Diff line number Diff line change
Expand Up @@ -143,6 +143,43 @@ public function compute_final_grade($responses, $totaltries) {
public function apply_attempt_state(question_attempt_step $step) {
$this->step = $step;
}
/**
* Call the llm using either the 4.5 core api
* or the the mebis ai depending on the usemebis
* settings checkbox.
*
* @param string $prompt
* @return string $response
*/
public function perform_request(string $prompt, string $purpose): string {
xdebug_break();
if (get_config('qtype_aitext', 'usemebisai')) {
$manager = new local_ai_manager\manager($purpose);
$llmresponse = (object) $manager->perform_request($prompt, ['component' => 'qtype_aitext', 'contextid' => $this->contextid]);
if ($llmresponse->get_code() !== 200) {
throw new moodle_exception(
'err_retrievingfeedback',
'qtype_aitext',
'',
$llmresponse->get_errormessage(),
$llmresponse->get_debuginfo()
);
}
return $llmresponse->get_content();
} else {
global $USER;
$manager = new \core_ai\manager();
$action = new \core_ai\aiactions\generate_text(
contextid: $this->contextid,
userid: $USER->id,
prompttext: $prompt
);
$llmresponse = $manager->process_action($action);
$responsedata = $llmresponse->get_response_data();
return $responsedata['generatedcontent'];

}
}

/**
* Get the spellchecking response.
Expand All @@ -153,20 +190,10 @@ public function apply_attempt_state(question_attempt_step $step) {
* @throws dml_exception
* @throws moodle_exception
*/
private function get_spellchecking(array $response):string {
private function get_spellchecking(array $response): string {
$fullaiprompt = $this->build_full_ai_spellchecking_prompt($response['answer']);
$ai = new local_ai_manager\manager('feedback');
$llmresponse = $ai->perform_request($fullaiprompt, ['component' => 'qtype_aitext', 'contextid' => $this->contextid]);
if ($llmresponse->get_code() !== 200) {
throw new moodle_exception(
'err_airesponsefailed',
'qtype_aitext',
'',
$llmresponse->get_errormessage(),
$llmresponse->get_debuginfo()
);
}
return $llmresponse->get_content();
$response = $this->perform_request($fullaiprompt, 'feedback');
return $response;
}

/**
Expand All @@ -178,7 +205,7 @@ private function get_spellchecking(array $response):string {
*/
public function grade_response(array $response): array {

if($this->spellcheck) {
if ($this->spellcheck) {
$spellcheckresponse = $this->get_spellchecking($response);
$this->insert_attempt_step_data('-spellcheckresponse', $spellcheckresponse);
}
Expand All @@ -187,18 +214,12 @@ public function grade_response(array $response): array {
$grade = [0 => 0, question_state::$needsgrading];
return $grade;
}
$ai = new local_ai_manager\manager('feedback');
if (is_array($response)) {
$fullaiprompt = $this->build_full_ai_prompt($response['answer'], $this->aiprompt,
$this->defaultmark, $this->markscheme);
$llmresponse = $ai->perform_request($fullaiprompt, ['component' => 'qtype_aitext', 'contextid' => $this->contextid]);
if ($llmresponse->get_code() !== 200) {
throw new moodle_exception('err_retrievingfeedback', 'qtype_aitext', '', $llmresponse->get_errormessage(),
$llmresponse->get_debuginfo());
}
$feedback = $llmresponse->get_content();
$feedback = $this->perform_request($fullaiprompt, 'feedback');
}

xdebug_break();
$contentobject = $this->process_feedback($feedback);

// If there are no marks, write the feedback and set to needs grading .
Expand Down Expand Up @@ -256,7 +277,6 @@ public function build_full_ai_prompt($response, $aiprompt, $defaultmark, $marksc
* @throws coding_exception
*/
public function build_full_ai_spellchecking_prompt(string $response): string {
// $response = strip_tags($response);
return get_string('spellcheck_prompt', 'qtype_aitext') . ($response);
}

Expand All @@ -282,8 +302,8 @@ public function process_feedback(string $feedback) {
$contentobject->feedback = trim($contentobject->feedback);
$contentobject->feedback = preg_replace(['/\[\[/', '/\]\]/'], '"', $contentobject->feedback);
$disclaimer = get_config('qtype_aitext', 'disclaimer');
// TODO Model currently is only used for connecting and at this point I believe. We need to remove all the model
// selection logic or make local_ai_manager support the selection of models.
// TODO Model currently is only used for connecting and at this point I believe.
// We need to remove all the model selection logic or make local_ai_manager support the selection of models.
$disclaimer = str_replace("[[model]]",
\local_ai_manager\ai_manager_utils::get_connector_instance_by_purpose('feedback')->get_model(), $disclaimer);
$contentobject->feedback .= ' '.$this->llm_translate($disclaimer);
Expand All @@ -307,15 +327,11 @@ protected function llm_translate(string $text): string {
if (current_language() == 'en') {
return $text;
}
$ai = new local_ai_manager\manager('translate');
$cache = cache::make('qtype_aitext', 'stringdata');
if (($translation = $cache->get(current_language().'_'.$text)) === false) {
$prompt = 'translate "'.$text .'" into '.current_language();
$llmresponse = $ai->perform_request($prompt);
if ($llmresponse->get_code() !== 200) {
throw new moodle_exception('Could not retrieve the translation from the AI tool');
}
$translation = $llmresponse->get_content();
$llmresponse = $this->perform_request($prompt, 'translate');
$translation = $llmresponse['content'];
$translation = trim($translation, '"');
$cache->set(current_language().'_'.$text, $translation);
}
Expand Down
6 changes: 6 additions & 0 deletions settings.php
Original file line number Diff line number Diff line change
Expand Up @@ -66,6 +66,12 @@
new lang_string('responseformat_setting', 'qtype_aitext'),
0, ['plain' => 'plain', 'editor' => 'editor', 'monospaced' => 'monospaced']
));
$settings->add(new admin_setting_configcheckbox(
'qtype_aitext/usemebisai',
new lang_string('usemebis_ai', 'qtype_aitext'),
new lang_string('usemebis_ai_setting', 'qtype_aitext'),
0
));

}

0 comments on commit c3b60dc

Please sign in to comment.