Skip to content

Commit

Permalink
Using Gemma on GTX 1060
Browse files Browse the repository at this point in the history
  • Loading branch information
sachinsshetty committed Mar 18, 2024
1 parent 074b019 commit 4c45ef1
Show file tree
Hide file tree
Showing 3 changed files with 56 additions and 4 deletions.
8 changes: 7 additions & 1 deletion src/continue-dev/config.json
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,12 @@
"provider": "ollama",
"model": "mistral:latest",
"apiBase": "http://localhost:11434/"
},
{
"title": "Gemma2B",
"provider": "ollama",
"model": "gemma:2b",
"apiBase": "http://localhost:11434/"
}
],
"slashCommands": [
Expand Down Expand Up @@ -63,7 +69,7 @@
"provider": "ollama",
"model": "mistral:latest"
},
"allowAnonymousTelemetry": true,
"allowAnonymousTelemetry": false,
"embeddingsProvider": {
"provider": "transformers.js"
}
Expand Down
41 changes: 41 additions & 0 deletions src/ollama/bhoomi/json_output.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,41 @@
import json

from langchain_community.chat_models import ChatOllama
from langchain_core.messages import HumanMessage
from langchain_core.output_parsers import StrOutputParser
from langchain_core.prompts import ChatPromptTemplate

json_schema = {
"title": "Person",
"description": "Identifying information about a person.",
"type": "object",
"properties": {
"name": {"title": "Name", "description": "The person's name", "type": "string"},
"age": {"title": "Age", "description": "The person's age", "type": "integer"},
"fav_food": {
"title": "Fav Food",
"description": "The person's favorite food",
"type": "string",
},
},
"required": ["name", "age"],
}

llm = ChatOllama(model="gemma:2b")

messages = [
HumanMessage(
content="Please tell me about a person using the following JSON schema:"
),
HumanMessage(content="{dumps}"),
HumanMessage(
content="Now, considering the schema, tell me about a person named Sachin who is 32 years old and loves Biryani."
),
]

prompt = ChatPromptTemplate.from_messages(messages)
dumps = json.dumps(json_schema, indent=2)

chain = prompt | llm | StrOutputParser()

print(chain.invoke({"dumps": dumps}))
11 changes: 8 additions & 3 deletions src/ollama/bhoomi/main.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,14 +2,19 @@

from langchain_community.llms import Ollama

llm = Ollama(model="mistral")
llm = Ollama(model="gemma:2b")

# test 1

# llm.invoke("Tell me a joke")

# test 2
query = "Tell me a joke"
query = "What are the top 3 important facilities required for a Robot to survive on Mars?"

for chunks in llm.stream(query):
query2 = "What top 5 Sensors are necessary for Robots on Mars?"

query3 = "Write a python program to connect the 5 sensors"


for chunks in llm.stream(query2):
print(chunks)

0 comments on commit 4c45ef1

Please sign in to comment.