From 367669f05b1b1b9e1ecc7f7e1862ea1eec6d989c Mon Sep 17 00:00:00 2001 From: guinmoon Date: Sat, 4 May 2024 17:38:22 +0300 Subject: [PATCH] Gemma added to downloadable models --- LLMFarm/Chats/LLMTextInput.swift | 4 +-- LLMFarm/Settings/downloadable_models.json | 43 +++++++++++++++++++---- 2 files changed, 38 insertions(+), 9 deletions(-) diff --git a/LLMFarm/Chats/LLMTextInput.swift b/LLMFarm/Chats/LLMTextInput.swift index ff75718..3b64bbd 100644 --- a/LLMFarm/Chats/LLMTextInput.swift +++ b/LLMFarm/Chats/LLMTextInput.swift @@ -105,11 +105,11 @@ public struct LLMTextInput: View { .resizable() .aspectRatio(contentMode: .fit) .frame(maxWidth: 30,maxHeight: 40) - .clipShape(Circle()) +// .clipShape(Circle()) #endif // image! } - .cornerRadius(20) /// make the background rounded + .cornerRadius(5) /// make the background rounded .overlay( /// apply a rounded border RoundedRectangle(cornerRadius: 5) .stroke(.gray, lineWidth: 1) diff --git a/LLMFarm/Settings/downloadable_models.json b/LLMFarm/Settings/downloadable_models.json index 36c8945..09c047f 100644 --- a/LLMFarm/Settings/downloadable_models.json +++ b/LLMFarm/Settings/downloadable_models.json @@ -18,19 +18,31 @@ ] }, { - "name": "Phi 2 2.7B", + "name": "Gemma 1.1 2B it", "models": [ { - "file_name": "phi-2.Q4_K_M.gguf", - "url": "https://huggingface.co/TheBloke/phi-2-GGUF/resolve/main/phi-2.Q4_K_M.gguf?download=true", - "size": "1.48", - "Q": "Q4_K_M" + "url": "https://huggingface.co/guinmoon/LLMFarm_Models/resolve/main/gemma-2b-it-Q5_K_M.gguf?download=true", + "file_name": "gemma-2b-it-Q5_K_M.gguf", + "size": "", + "Q": "Q5_K_M" }, { - "url": "https://huggingface.co/ggml-org/models/resolve/main/phi-2/ggml-model-q8_0.gguf?download=true", - "file_name": "phi-2-q8_0.gguf", + "url": "https://huggingface.co/guinmoon/LLMFarm_Models/resolve/main/gemma-2b-it.Q8_0.gguf?download=true", + "file_name": "gemma-2b-it.Q8_0.gguf", "size": "", "Q": "Q8_0" + }, + { + "url": "https://huggingface.co/guinmoon/LLMFarm_Models/resolve/main/gemma-2b-it-IQ4_NL.gguf?download=true", + "file_name": "gemma-2b-it-IQ4_NL.gguf", + "size": "", + "Q": "IQ4_NL" + }, + { + "url": "https://huggingface.co/guinmoon/LLMFarm_Models/resolve/main/gemma-2b-it-IQ3_S.gguf?download=true", + "file_name": "gemma-2b-it-IQ3_S.gguf", + "size": "", + "Q": "IQ3_S" } ] }, @@ -57,6 +69,23 @@ } ] }, + { + "name": "Phi 2 2.7B", + "models": [ + { + "file_name": "phi-2.Q4_K_M.gguf", + "url": "https://huggingface.co/TheBloke/phi-2-GGUF/resolve/main/phi-2.Q4_K_M.gguf?download=true", + "size": "1.48", + "Q": "Q4_K_M" + }, + { + "url": "https://huggingface.co/ggml-org/models/resolve/main/phi-2/ggml-model-q8_0.gguf?download=true", + "file_name": "phi-2-q8_0.gguf", + "size": "", + "Q": "Q8_0" + } + ] + }, { "name": "MobileVLM 1.7B", "models": [