diff --git a/.github/workflows/moodle-ci.yml b/.github/workflows/moodle-ci.yml index f82fde5..a11f0ce 100644 --- a/.github/workflows/moodle-ci.yml +++ b/.github/workflows/moodle-ci.yml @@ -31,7 +31,7 @@ jobs: matrix: include: - php: '8.1' - moodle-branch: 'MOODLE_403_STABLE' + moodle-branch: 'MOODLE_404_STABLE' database: 'mariadb' # - php: '8.0' # moodle-branch: 'master' @@ -59,10 +59,16 @@ jobs: echo $(cd ci/vendor/bin; pwd) >> $GITHUB_PATH # PHPUnit depends on en_AU.UTF-8 locale sudo locale-gen en_AU.UTF-8 + env: + DB: ${{ matrix.database }} + MOODLE_BRANCH: ${{ matrix.moodle-branch }} - name: Install Moodle # Need explicit IP to stop mysql client fail on attempt to use unix socket. - run: moodle-plugin-ci install --plugin ./plugin --db-host=127.0.0.1 + run: | + moodle-plugin-ci add-plugin --branch main mebis-lp/moodle-local_ai_manager + moodle-plugin-ci install --plugin ./plugin --db-host=127.0.0.1 + env: DB: ${{ matrix.database }} MOODLE_BRANCH: ${{ matrix.moodle-branch }} diff --git a/question.php b/question.php index a8dd898..64d08fa 100755 --- a/question.php +++ b/question.php @@ -57,7 +57,7 @@ class qtype_aitext_question extends question_graded_automatically_with_countback /** * LLM Model, will vary between AI systems, e.g. gpt4 or llama3 - * @var stream_set_blocking + * @var $model */ public $model; @@ -166,7 +166,6 @@ public function perform_request(string $prompt, string $purpose): string { } return $llmresponse->get_content(); } else { - xdebug_break(); global $USER; $manager = new \core_ai\manager(); $action = new \core_ai\aiactions\generate_text( @@ -301,10 +300,6 @@ public function process_feedback(string $feedback) { $contentobject->feedback = trim($contentobject->feedback); $contentobject->feedback = preg_replace(['/\[\[/', '/\]\]/'], '"', $contentobject->feedback); $disclaimer = get_config('qtype_aitext', 'disclaimer'); - // TODO Model currently is only used for connecting and at this point I believe. - // We need to remove all the model selection logic or make local_ai_manager support the selection of models. - $disclaimer = str_replace("[[model]]", - \local_ai_manager\ai_manager_utils::get_connector_instance_by_purpose('feedback')->get_model(), $disclaimer); $contentobject->feedback .= ' '.$this->llm_translate($disclaimer); } else { $contentobject = (object) [ diff --git a/settings.php b/settings.php index 708262a..ba11fc6 100644 --- a/settings.php +++ b/settings.php @@ -38,7 +38,7 @@ 'qtype_aitext/disclaimer', new lang_string('disclaimer', 'qtype_aitext'), new lang_string('disclaimer_setting', 'qtype_aitext'), - '(Response provided by [[model]])' + '(Response provided by an AI System)' )); $settings->add(new admin_setting_configtextarea( 'qtype_aitext/prompt',