diff --git a/lang/en/qtype_aitext.php b/lang/en/qtype_aitext.php index 9f3a069..02f8d6d 100755 --- a/lang/en/qtype_aitext.php +++ b/lang/en/qtype_aitext.php @@ -31,6 +31,8 @@ $string['answerfiles'] = 'Answer files'; $string['answertext'] = 'Answer text'; $string['attachmentsoptional'] = 'Attachments are optional'; +$string['batchmode'] = 'Batch mode'; +$string['batchmode_setting'] = 'Requests to the external LLM will be queued'; $string['cachedef_stringdata'] = 'Cachedef stringdata'; $string['defaultmarksscheme'] = 'Marks scheme'; $string['defaultmarksscheme_setting'] = 'This will be the default marks scheme for new questions. Questions authors should alter this to suit the question.'; @@ -69,12 +71,9 @@ $string['pluginnameadding'] = 'Adding an AI Text question'; $string['pluginnameediting'] = 'Editing an AI Text question'; $string['pluginnamesummary'] = 'Allows a response of a file upload and/or online text. The student response is processed by the configured AI/Large language model which returns feedback and optionally a grade..'; -$string['privacy::responsefieldlines'] = 'Number of lines indicating the size of the input box (textarea).'; -$string['privacy:metadata'] = 'AI Text question type plugin allows question authors to set default options as user preferences.'; $string['privacy:preference:attachments'] = 'Number of allowed attachments.'; $string['privacy:preference:attachmentsrequired'] = 'Number of required attachments.'; $string['privacy:preference:defaultmark'] = 'The default mark set for a given question.'; -$string['privacy:preference:disclaimer'] = 'Text to indicate the feedback and/or marking is from a LLM'; $string['privacy:preference:maxbytes'] = 'Maximum file size.'; $string['privacy:preference:responseformat'] = 'What is the response format (HTML editor, plain text, etc.)?'; $string['prompt'] = 'Prompt'; @@ -83,9 +82,6 @@ $string['responsefieldlines'] = 'Input box size'; $string['responseformat'] = 'Response format'; $string['responseformat_setting'] = 'The editor the student uses when responding'; -$string['responseoptions'] = 'Response options'; -$string['responsenotrequired'] = 'Text input is optional'; -$string['responseisrequired'] = 'Require the student to enter text'; $string['responsenotrequired'] = 'Text input is optional'; $string['responseoptions'] = 'Response options'; $string['responsetemplate'] = 'Response template'; @@ -98,7 +94,8 @@ $string['showprompt'] = 'Show prompt'; $string['thedefaultmarksscheme'] = 'Deduct a point from the total score for each grammar or spelling mistake.'; $string['thedefaultprompt'] = 'Explain if there is anything wrong with the grammar and spelling in the text.'; -$string['untestedquestionbehaviour'] = 'Untested question behaviour'; +$string['usecoreai'] = 'Use core ai'; +$string['usecoreai_setting'] = 'If you are using Moodle 4.5 or above you can use the core ai subsystem. Otherwise you will need to have tool_aiconnect installed.'; $string['wordcount'] = 'Word count: {$a}'; $string['wordcounttoofew'] = 'Word count: {$a->count}, less than the required {$a->limit} words.'; $string['wordcounttoomuch'] = 'Word count: {$a->count}, more than the limit of {$a->limit} words.'; diff --git a/question.php b/question.php index 14b3600..ebe27fe 100755 --- a/question.php +++ b/question.php @@ -42,6 +42,14 @@ class qtype_aitext_question extends question_graded_automatically_with_countback */ public $responseformat; + + /** + * LLM Model, will vary between AI systems, e.g. gpt4 or llama3 + + * @var mixed $model Store the llm model used for the question. + */ + public $model; + /** * Count of lines of text * @@ -55,12 +63,6 @@ class qtype_aitext_question extends question_graded_automatically_with_countback /** @var int indicates whether the maximum number of words required */ public $maxwordlimit; - /** - * LLM Model, will vary between AI systems, e.g. gpt4 or llama3 - * @var stream_set_blocking - */ - public $model; - /** * used in the question editing interface @@ -145,22 +147,45 @@ public function apply_attempt_state(question_attempt_step $step) { * large language model such as ChatGPT * * @param array $response - * @return void + * @return array An array containing the grade fraction and the question state. + * */ public function grade_response(array $response): array { + global $DB; if (!$this->is_complete_response($response)) { - $grade = [0 => 0, question_state::$needsgrading]; - return $grade; + return [0 => 0, question_state::$needsgrading]; } - $ai = new ai\ai($this->model); - if (is_array($response)) { + if (get_config('qtype_aitext', 'usecoreai')) { $fullaiprompt = $this->build_full_ai_prompt($response['answer'], $this->aiprompt, + - $this->defaultmark, $this->markscheme); + + global $USER; + $contextid = 1; + $action = new \core_ai\aiactions\summarise_text( + contextid: $contextid, + userid: $USER->id, + prompttext: $fullaiprompt, + ); + $manager = new \core_ai\manager(); + $result = $manager->process_action($action); + $data = (object) $result->get_response_data(); + $contentobject = json_decode($data->generatedcontent); + + } else { + $ai = new ai\ai($this->model); + if (get_config('qtype_aitext', 'batchmode')) { + $this->queue_ai_processing($response['answer'], $this->aiprompt, $this->defaultmark, $this->markscheme); + return [0 => 0, question_state::$needsgrading]; + } + if (is_array($response)) { + $fullaiprompt = $this->build_full_ai_prompt($response['answer'], $this->aiprompt, $this->defaultmark, $this->markscheme); - $llmresponse = $ai->prompt_completion($fullaiprompt); - $feedback = $llmresponse['response']['choices'][0]['message']['content']; - } + $llmresponse = $ai->prompt_completion($fullaiprompt); + $feedback = $llmresponse['response']['choices'][0]['message']['content']; + } - $contentobject = $this->process_feedback($feedback); + $contentobject = $this->process_feedback($feedback); + } // If there are no marks, write the feedback and set to needs grading . if (is_null($contentobject->marks)) { @@ -173,6 +198,31 @@ public function grade_response(array $response): array { return $grade; } + /** + * Queues the AI processing in batch mode. + * + * @param string $answer The student's answer. + * @param string $aiprompt The AI prompt. + * @param float $defaultmark The default mark. + * @param string $markscheme The mark scheme. + * @package qtype_aitext + */ + private function queue_ai_processing(string $answer, string $aiprompt, float $defaultmark, string $markscheme): void{ + global $DB; + $data = [ + 'activity' => 'qtype_aitext', + 'status' => 0, + 'tries' => 0, + 'prompttext' => $this->build_full_ai_prompt($answer, $aiprompt, $defaultmark, $markscheme), + 'actiondata' => $this->step->get_id(), + 'timecreated' => time(), + 'timemodified' => time(), + + ]; + + $DB->insert_record('tool_aiconnect_queue', $data); + } + /** * Inserts the AI feedback and prompt into the attempt step data. * @@ -238,7 +288,8 @@ public function process_feedback(string $feedback) { $contentobject = json_decode($feedback); if (json_last_error() === JSON_ERROR_NONE) { $contentobject->feedback = trim($contentobject->feedback); - $contentobject->feedback = preg_replace(['/\[\[/', '/\]\]/'], '"', $contentobject->feedback); + $contentobject->feedback = preg_replace(['/\[\[/', '/\]\]/'], '"', + $contentobject->feedback); $disclaimer = get_config('qtype_aitext', 'disclaimer'); $disclaimer = str_replace("[[model]]", $this->model, $disclaimer); $contentobject->feedback .= ' '.$this->llm_translate($disclaimer); diff --git a/settings.php b/settings.php index 78a2da9..1b8a927 100644 --- a/settings.php +++ b/settings.php @@ -67,5 +67,17 @@ 0, ['plain' => 'plain', 'editor' => 'editor', 'monospaced' => 'monospaced'] )); + $settings->add(new admin_setting_configcheckbox( + 'qtype_aitext/batchmode', + new lang_string('batchmode', 'qtype_aitext'), + new lang_string('batchmode_setting', 'qtype_aitext'), + 0 + )); + $settings->add(new admin_setting_configcheckbox( + 'qtype_aitext/usecoreai', + new lang_string('usecoreai', 'qtype_aitext'), + new lang_string('usecoreai_setting', 'qtype_aitext'), + 0)); + }