');
$mform->addElement('editor', 'generalfeedback', get_string('generalfeedback', 'question')
, ['rows' => 10], $this->editoroptions);
@@ -67,12 +76,12 @@ protected function definition_inner($mform) {
$mform->addElement('textarea', 'sampleanswer', get_string('sampleanswer', 'qtype_aitext'),
['maxlen' => 50, 'rows' => 6, 'size' => 30]);
$mform->setType('sampleanswer', PARAM_RAW);
+ $mform->setDefault('sampleanswer', '');
$mform->addHelpButton('sampleanswer', 'sampleanswer', 'qtype_aitext');
$mform->addElement('static', 'sampleanswereval', '', '
'
. get_string('sampleanswerevaluate', 'qtype_aitext') . '' .
'
');
-
$mform->addElement('header', 'responseoptions', get_string('responseoptions', 'qtype_aitext'));
$mform->setExpanded('responseoptions');
diff --git a/question.php b/question.php
index e47e474..8fb8a06 100755
--- a/question.php
+++ b/question.php
@@ -55,6 +55,12 @@ class qtype_aitext_question extends question_graded_automatically_with_countback
/** @var int indicates whether the maximum number of words required */
public $maxwordlimit;
+ /**
+ * LLM Model, will vary between AI systems, e.g. gpt4 or llama3
+ * @var stream_set_blocking
+ */
+ public $model;
+
/**
* used in the question editing interface
@@ -146,7 +152,7 @@ public function grade_response(array $response) : array {
$grade = [0 => 0, question_state::$needsgrading];
return $grade;
}
- $ai = new ai\ai();
+ $ai = new ai\ai($this->model);
if (is_array($response)) {
$fullaiprompt = $this->build_full_ai_prompt($response['answer'], $this->aiprompt,
$this->defaultmark, $this->markscheme);
@@ -173,8 +179,16 @@ public function grade_response(array $response) : array {
return $grade;
}
+ /**
+ * Used by prompttester in the editing form
+ *
+ * @param string $response
+ * @param string $aiprompt
+ * @param number $defaultmark
+ * @param string $markscheme
+ * @return void
+ */
public function build_full_ai_prompt($response, $aiprompt, $defaultmark, $markscheme) {
-
$responsetext = strip_tags($response);
$responsetext = '[['.$responsetext.']]';
$prompt = get_config('qtype_aitext', 'prompt');
diff --git a/questiontype.php b/questiontype.php
index d8d3b1f..c42835b 100755
--- a/questiontype.php
+++ b/questiontype.php
@@ -78,6 +78,8 @@ public function save_defaults_for_new_questions(stdClass $fromform): void {
$this->set_default_value('responseformat', $fromform->responseformat);
$this->set_default_value('responsefieldlines', $fromform->responsefieldlines);
$this->set_default_value('markscheme', $fromform->markscheme);
+ $this->set_default_value('sampleanswer', $fromform->sampleanswer);
+
}
/**
* Write the question data from the editing form to the database
@@ -97,6 +99,8 @@ public function save_question_options($formdata) {
$options->aiprompt = $formdata->aiprompt;
$options->markscheme = $formdata->markscheme;
$options->sampleanswer = $formdata->sampleanswer;
+
+ $options->model = trim($formdata->model);
$options->responseformat = $formdata->responseformat;
$options->responsefieldlines = $formdata->responsefieldlines;
$options->minwordlimit = isset($formdata->minwordenabled) ? $formdata->minwordlimit : null;
@@ -144,6 +148,7 @@ protected function initialise_question_instance(question_definition $question, $
$question->aiprompt = $questiondata->options->aiprompt;
$question->markscheme = $questiondata->options->markscheme;
$question->sampleanswer = $questiondata->options->sampleanswer;
+ $question->model = $questiondata->options->model;
}
/**
* Delete a question from the database
@@ -249,7 +254,8 @@ public function extra_question_fields() {
'responsetemplateformat',
'aiprompt',
'markscheme',
- 'sampleanswer'
+ 'sampleanswer',
+ 'model',
];
}
/**
diff --git a/renderer.php b/renderer.php
index 427f073..dbcaaf8 100755
--- a/renderer.php
+++ b/renderer.php
@@ -123,6 +123,7 @@ public function feedback(question_attempt $qa, question_display_options $options
// Get data written in the question.php grade_response method.
// This probably should be retrieved by an api call.
$comment = $qa->get_current_manual_comment();
+ xdebug_break();
if ($this->page->pagetype == 'question-bank-previewquestion-preview') {
$this->page->requires->js_call_amd('qtype_aitext/showprompt', 'init', []);
if ($comment[0] > '') {
diff --git a/tests/behat/add.feature b/tests/behat/add.feature
index cbd70e1..c25ea64 100755
--- a/tests/behat/add.feature
+++ b/tests/behat/add.feature
@@ -37,7 +37,6 @@ Feature: Test creating an AIText question
| Mark scheme | Give one mark if correct |
Then I should see "aitext-002"
-
@javascript
Scenario: Create an AI Text question for testing some default options
When I am on the "Course 1" "core_question > course question bank" page logged in as teacher
@@ -46,6 +45,7 @@ Feature: Test creating an AIText question
| Question text | Write an aitext with 500 words. |
| General feedback | This is general feedback |
| id_responsefieldlines | 15 |
+
Then I should see "aitext-003"
# Checking that the next new question form displays user preferences settings.
And I press "Create a new question ..."
diff --git a/tests/helper.php b/tests/helper.php
index 30b20ee..7e64bde 100755
--- a/tests/helper.php
+++ b/tests/helper.php
@@ -48,6 +48,8 @@ public static function make_aitext_question(array $options) {
'questiontext' => $options['questiontext'] ?? '',
'aiprompt' => $options['aiprompt'] ?? 0,
'markscheme' => $options['markscheme'] ?? 0,
+ 'sampleanswer' => $options['sampleanswer'] ?? 0,
+ 'model' => $options['model'] ?? 'gpt-4',
];
@@ -73,6 +75,8 @@ protected function initialise_aitext_question() {
$q->responsefieldlines = 10;
$q->minwordlimit = null;
$q->maxwordlimit = null;
+ $q->sampleanswer = '';
+ $q->model = 'gpt-4';
$q->graderinfo = '';
$q->graderinfoformat = FORMAT_HTML;
$q->qtype = question_bank::get_qtype('aitext');
@@ -111,6 +115,8 @@ public function get_aitext_question_form_data_editor() {
$fromform->status = \core_question\local\bank\question_version_status::QUESTION_STATUS_READY;
$fromform->aiprompt = 'A prompt for the LLM';
$fromform->markscheme = 'Give one mark if the answer is correct';
+ $fromform->sampleanswer = 'my sample answer';
+ $fromform->model = 'gpt-4';
return $fromform;
}
@@ -144,6 +150,7 @@ public function get_aitext_question_form_data_plain() {
$fromform->aiprompt = 'Evaluate this';
$fromform->markscheme = 'One mark if correct';
$fromform->maxbytes = 0;
+ $fromform->model = 'gpt-4',
$fromform->graderinfo = array('text' => '', 'format' => FORMAT_HTML);
$fromform->responsetemplate = array('text' => '', 'format' => FORMAT_HTML);
$fromform->status = \core_question\local\bank\question_version_status::QUESTION_STATUS_READY;
diff --git a/version.php b/version.php
index 70e91e2..a3cfc97 100755
--- a/version.php
+++ b/version.php
@@ -25,7 +25,7 @@
defined('MOODLE_INTERNAL') || die();
$plugin->component = 'qtype_aitext';
-$plugin->version = 2024050300;
+$plugin->version = 2024051100;
$plugin->requires = 2020110900;
$plugin->release = '0.01';
$plugin->maturity = MATURITY_BETA;