Skip to content

Commit

Permalink
Merge branch 'cookbook/ss-demo' of https://github.com/phidatahq/phidata
Browse files Browse the repository at this point in the history
… into cookbook/ss-demo
  • Loading branch information
ashpreetbedi committed Apr 11, 2024
2 parents ceb75d5 + 6636a37 commit 11bb1f8
Show file tree
Hide file tree
Showing 2 changed files with 21 additions and 15 deletions.
10 changes: 4 additions & 6 deletions cookbook/integrations/singlestore/auto_rag/app.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,8 @@
)
st.title("Local RAG with Web Scraping")
st.markdown("##### :orange_heart: Built using [phidata](https://github.com/phidatahq/phidata)")


def restart_assistant():
st.session_state["web_assistant"] = None
st.session_state["web_assistant_run_id"] = None
Expand All @@ -38,7 +40,7 @@ def main() -> None:
return

# Get model
local_rag_model = st.sidebar.selectbox("Select Model", options=["openhermes", "codellama"])
local_rag_model = st.sidebar.selectbox("Select Model", options=["GPT-4", "Hermes2", "Claude"])
# Set assistant_type in session state
if "local_rag_model" not in st.session_state:
st.session_state["local_rag_model"] = local_rag_model
Expand Down Expand Up @@ -111,11 +113,7 @@ def main() -> None:
if "url_scrape_key" not in st.session_state:
st.session_state["url_scrape_key"] = 0

scraped_url = st.sidebar.text_input(
"Input URL",
type="default",
key=st.session_state["url_scrape_key"]
)
scraped_url = st.sidebar.text_input("Input URL", type="default", key=st.session_state["url_scrape_key"])
append_button = st.sidebar.button("Search URL")
if append_button:
if scraped_url is not None:
Expand Down
26 changes: 17 additions & 9 deletions cookbook/integrations/singlestore/auto_rag/assistant.py
Original file line number Diff line number Diff line change
@@ -1,29 +1,29 @@
from typing import Optional
from os import getenv

from phi.assistant import Assistant
from phi.knowledge import AssistantKnowledge
from phi.llm.ollama import Ollama
from phi.llm.openai import OpenAIChat
from phi.llm.anthropic import Claude
from phi.llm.ollama import Hermes
from phi.embedder.ollama import OllamaEmbedder
from phi.storage.assistant.singlestore import S2AssistantStorage
from phi.vectordb.singlestore import S2VectorDb

from resources import config # type: ignore

# Setup SingleStore connection
db_url = (
f"mysql+pymysql://{config['username']}:{config['password']}@{config['host']}:{config['port']}/{config['database']}?charset=utf8mb4"
)
db_url = f"mysql+pymysql://{getenv("SINGLESTORE_USERNAME")}:{getenv("SINGLESTORE_PASSWORD")}@{getenv("SINGLESTORE_HOST")}:{getenv("SINGLESTORE_PORT")}/{getenv("SINGLESTORE_DATABASE")}?ssl_ca={getenv("SINGLESTORE_SSL_CERT")}&ssl_verify_cert=true"

local_assistant_storage = S2AssistantStorage(
table_name="local_rag_assistant",
schema=config["database"],
schema=getenv("SINGLESTORE_DATABASE"),
db_url=db_url,
)

local_assistant_knowledge = AssistantKnowledge(
vector_db=S2VectorDb(
collection="web_documents_singlestore",
schema=config["database"],
schema=getenv("SINGLESTORE_DATABASE"),
db_url=db_url,
# Assuming OllamaEmbedder or a compatible embedder is used for SingleStore
embedder=OllamaEmbedder(model="nomic-embed-text", dimensions=768),
Expand All @@ -33,17 +33,25 @@


def get_local_rag_assistant(
model: str = "openhermes",
model: str = "GPT-4",
user_id: Optional[str] = None,
run_id: Optional[str] = None,
debug_mode: bool = False,
) -> Assistant:
"""Get a Local URL RAG Assistant with SingleStore backend."""

if model == "GPT-4":
llm = OpenAIChat(model="gpt-4-turbo-preview")
elif model == "Hermes2":
llm = Hermes(model="adrienbrault/nous-hermes2pro:Q8_0")
elif model == "Claude":
llm = Claude(model="claude-3-opus-20240229")

return Assistant(
name="local_rag_assistant",
run_id=run_id,
user_id=user_id,
llm=Ollama(model=model),
llm=llm,
storage=local_assistant_storage,
knowledge_base=local_assistant_knowledge,
add_chat_history_to_messages=False,
Expand Down

0 comments on commit 11bb1f8

Please sign in to comment.