From 2709c71b85f4647d4385f69544c6e097a6a65e13 Mon Sep 17 00:00:00 2001 From: Patrick Wiltrout Date: Fri, 8 Nov 2024 14:29:20 -0500 Subject: [PATCH 1/3] added ollama model param configs to config.toml for temp and num_ctx --- src/config.ts | 8 ++++++++ src/lib/providers/ollama.ts | 13 +++++++++---- 2 files changed, 17 insertions(+), 4 deletions(-) diff --git a/src/config.ts b/src/config.ts index bb693359..c3f0dfbc 100644 --- a/src/config.ts +++ b/src/config.ts @@ -18,6 +18,10 @@ interface Config { SEARXNG: string; OLLAMA: string; }; + OLLAMA_PARAMS: { + TEMPERATURE: number; + NUM_CTX: number; + } } type RecursivePartial = { @@ -45,6 +49,10 @@ export const getSearxngApiEndpoint = () => export const getOllamaApiEndpoint = () => loadConfig().API_ENDPOINTS.OLLAMA; +export const getModelTemperature = () => loadConfig().OLLAMA_PARAMS.TEMPERATURE; + +export const getModelNumCtx = () => loadConfig().OLLAMA_PARAMS.NUM_CTX; + export const updateConfig = (config: RecursivePartial) => { const currentConfig = loadConfig(); diff --git a/src/lib/providers/ollama.ts b/src/lib/providers/ollama.ts index ed68bfaf..b3753eb3 100644 --- a/src/lib/providers/ollama.ts +++ b/src/lib/providers/ollama.ts @@ -1,10 +1,11 @@ import { OllamaEmbeddings } from '@langchain/community/embeddings/ollama'; -import { getOllamaApiEndpoint } from '../../config'; +import { getModelNumCtx, getModelTemperature, getOllamaApiEndpoint } from '../../config'; import logger from '../../utils/logger'; import { ChatOllama } from '@langchain/community/chat_models/ollama'; export const loadOllamaChatModels = async () => { const ollamaEndpoint = getOllamaApiEndpoint(); + if (!ollamaEndpoint) return {}; @@ -16,20 +17,24 @@ export const loadOllamaChatModels = async () => { }); const { models: ollamaModels } = (await response.json()) as any; - + const chatModels = ollamaModels.reduce((acc, model) => { + const modelTemperature = getModelTemperature(); + const modelNumCtx = getModelNumCtx(); acc[model.model] = { displayName: model.name, model: new ChatOllama({ baseUrl: ollamaEndpoint, model: model.model, - temperature: 0.7, + temperature: modelTemperature, + numCtx: modelNumCtx, }), }; return acc; }, {}); - + + return chatModels; } catch (err) { logger.error(`Error loading Ollama models: ${err}`); From d6551d0db21bebddeb6a30d2199a6d6bdc4ca2f5 Mon Sep 17 00:00:00 2001 From: Patrick Wiltrout Date: Fri, 8 Nov 2024 14:31:19 -0500 Subject: [PATCH 2/3] added sorting to model dropdows --- ui/components/SettingsDialog.tsx | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/ui/components/SettingsDialog.tsx b/ui/components/SettingsDialog.tsx index 716dd7d4..abe7c76f 100644 --- a/ui/components/SettingsDialog.tsx +++ b/ui/components/SettingsDialog.tsx @@ -281,7 +281,7 @@ const SettingsDialog = ({ ? chatModelProvider.map((model) => ({ value: model.name, label: model.displayName, - })) + })).sort((a, b) => a.label.localeCompare(b.label)) : [ { value: '', @@ -392,7 +392,7 @@ const SettingsDialog = ({ ? embeddingModelProvider.map((model) => ({ label: model.displayName, value: model.name, - })) + })).sort((a, b) => a.label.localeCompare(b.label)) : [ { label: 'No embedding models available', From 62411d3ea6aa6a2135659103f1b398fc9066d0fa Mon Sep 17 00:00:00 2001 From: Patrick Wiltrout Date: Fri, 8 Nov 2024 14:38:20 -0500 Subject: [PATCH 3/3] updated sample.config.toml with new ollama configs --- sample.config.toml | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/sample.config.toml b/sample.config.toml index f6c69436..895edee1 100644 --- a/sample.config.toml +++ b/sample.config.toml @@ -9,4 +9,8 @@ ANTHROPIC = "" # Anthropic API key - sk-ant-1234567890abcdef1234567890abcdef [API_ENDPOINTS] SEARXNG = "http://localhost:32768" # SearxNG API URL -OLLAMA = "" # Ollama API URL - http://host.docker.internal:11434 \ No newline at end of file +OLLAMA = "" # Ollama API URL - http://host.docker.internal:11434 + +[OLLAMA_PARAMS] +TEMPERATURE = 0.7 # ollama default temp is 0.8 +NUM_CTX = 2_048 # ollama num_ctx default is 2048 \ No newline at end of file