diff --git a/.github/workflows/moodle-ci.yml b/.github/workflows/moodle-ci.yml index f82fde5..a217eed 100644 --- a/.github/workflows/moodle-ci.yml +++ b/.github/workflows/moodle-ci.yml @@ -50,6 +50,13 @@ jobs: ini-values: max_input_vars=5000 # none to use phpdbg fallback. Specify pcov (Moodle 3.10 and up) or xdebug to use them instead. coverage: none + - name: Install NVM and Node + run: | + curl -o- https://raw.githubusercontent.com/nvm-sh/nvm/v0.40.0/install.sh | bash + export NVM_DIR="$HOME/.nvm" + [ -s "$NVM_DIR/nvm.sh" ] && \. "$NVM_DIR/nvm.sh" + nvm install 20 + nvm use 20 - name: Deploy moodle-plugin-ci run: | diff --git a/classes/ai/ai.php b/classes/ai/ai.php index 3b8ceaf..eba607f 100644 --- a/classes/ai/ai.php +++ b/classes/ai/ai.php @@ -132,7 +132,7 @@ private function make_request(array $data, string $apikey, $multipart = null): a * @return string|array The generated completion or null if the model is empty. * @throws moodle_exception If the model is empty. */ - public function prompt_completion($prompttext) { + public function prompt_completion($prompttext) : array { $data = $this->get_prompt_data($prompttext); $result = $this->make_request($data, $this->apikey); diff --git a/tests/test_aiconnect.php b/tests/test_aiconnect.php index 08c5918..c3daacf 100644 --- a/tests/test_aiconnect.php +++ b/tests/test_aiconnect.php @@ -30,13 +30,6 @@ */ class test_aiconnect extends \advanced_testcase { - /** - * Where most of the functionality lives - * - * @var ai $ai - * - */ - /** * The class with most of the functionality * @var $ai @@ -53,29 +46,10 @@ public function setUp(): void { if (defined('TEST_LLM_APIKEY')) { set_config('apikey', TEST_LLM_APIKEY, 'tool_aiconnect'); $this->ai = new ai\ai(); + } else { + exit('Test will only run if TEST_LLM_APIKEY is defined in config.php'); } } - /** - * Work around the get_prompt_data method - * being private - * - * @return void - */ - public function test_get_prompt_data(): void { - $this->assertTrue(true); - $mockai = $this->getMockBuilder(ai\ai::class)->getMock(); - $getpromptdata = new \ReflectionMethod( - ai\ai::class, - 'get_prompt_data' - ); - $getpromptdata->setAccessible(true); - - $result = $getpromptdata->invokeArgs( - $mockai, - ['myprompt'] - ); - $this->assertStringContainsString("You: myprompt", $result['messages'][0]['content']); - } /** * Ask the LLM to do some maths @@ -93,7 +67,7 @@ public function test_prompt_completion(): void { } /** * Confirm that an array of models are returned. - * + * This may not work as expected with ollama * @return void */ public function test_get_models(): void { @@ -101,6 +75,7 @@ public function test_get_models(): void { if (!$this->ai) { $this->markTestSkipped(); } + $result = $this->ai->get_models(); $this->assertIsArray($result->models); }