diff --git a/model-providers/ollama/deployment/src/main/java/io/quarkiverse/langchain4j/ollama/deployment/OllamaProcessor.java b/model-providers/ollama/deployment/src/main/java/io/quarkiverse/langchain4j/ollama/deployment/OllamaProcessor.java index a74e0adb9..8395f0c59 100644 --- a/model-providers/ollama/deployment/src/main/java/io/quarkiverse/langchain4j/ollama/deployment/OllamaProcessor.java +++ b/model-providers/ollama/deployment/src/main/java/io/quarkiverse/langchain4j/ollama/deployment/OllamaProcessor.java @@ -108,7 +108,6 @@ void generateBeans(OllamaRecorder recorder, var builder = SyntheticBeanBuildItem .configure(CHAT_MODEL) .setRuntimeInit() - .setRuntimeInit() .defaultBean() .scope(ApplicationScoped.class) .supplier(recorder.chatModel(config, fixedRuntimeConfig, configName)); diff --git a/model-providers/ollama/deployment/src/test/java/io/quarkiverse/langchain4j/ollama/deployment/ToolsTest.java b/model-providers/ollama/deployment/src/test/java/io/quarkiverse/langchain4j/ollama/deployment/ToolsTest.java index 09c910b65..3d6847205 100644 --- a/model-providers/ollama/deployment/src/test/java/io/quarkiverse/langchain4j/ollama/deployment/ToolsTest.java +++ b/model-providers/ollama/deployment/src/test/java/io/quarkiverse/langchain4j/ollama/deployment/ToolsTest.java @@ -108,10 +108,10 @@ public interface MathAssistant { @Test @ActivateRequestContext void test_multiple_tools() { - String msg = "What is the square root of the sum of the numbers of letters in the words " + + String msg = "What is the square root with maximal precision of the sum of the numbers of letters in the words " + "\"hello\" and \"world\""; String response = mathAssistant.chat(msg); - assertThat(response).contains("approximately 3.16"); + assertThat(response).contains("3.162278"); } diff --git a/model-providers/ollama/runtime/src/main/java/io/quarkiverse/langchain4j/ollama/OllamaDefaultToolsHandler.java b/model-providers/ollama/runtime/src/main/java/io/quarkiverse/langchain4j/ollama/OllamaDefaultToolsHandler.java index 2ddbbd750..490c52cb0 100644 --- a/model-providers/ollama/runtime/src/main/java/io/quarkiverse/langchain4j/ollama/OllamaDefaultToolsHandler.java +++ b/model-providers/ollama/runtime/src/main/java/io/quarkiverse/langchain4j/ollama/OllamaDefaultToolsHandler.java @@ -25,14 +25,14 @@ public class OllamaDefaultToolsHandler implements ToolsHandler { You must always select one of the above tools and respond with a JSON object matching the following schema, and only this json object: - {{ + { "tool": , "tool_input": - }} - Do not use new tools, just use the ones from the list. Do not forget the parameters in tool_input field. + } + Do not use other tools than the ones from the list above. Always provide the "tool_input" field. If several tools are necessary, answer them sequentially. - When the user provides sufficient information , answer with the __conversational_response tool. + When the user provides sufficient information, answer with the __conversational_response tool. """); static final ToolSpecification DEFAULT_RESPONSE_TOOL = ToolSpecification.builder() @@ -93,8 +93,7 @@ public AiMessage getAiMessageFromResponse(ChatResponse response, List availableTools = toolSpecifications.stream().map(ToolSpecification::name).toList(); if (!availableTools.contains(toolResponse.tool)) { return AiMessage.from(String.format( - "The LLM wants to call a tool %s that is not part of the available tools %s", + "Ollama server wants to call a tool '%s' that is not part of the available tools %s", toolResponse.tool, availableTools)); } // Extract tools request from response