Skip to content

Commit

Permalink
v2.4.12
Browse files Browse the repository at this point in the history
  • Loading branch information
ashpreetbedi committed May 20, 2024
1 parent 3ebe959 commit fce6838
Show file tree
Hide file tree
Showing 10 changed files with 550 additions and 160 deletions.
14 changes: 8 additions & 6 deletions cookbook/examples/personalization/README.md
Original file line number Diff line number Diff line change
@@ -1,8 +1,9 @@
# Personalized Memory & Autonomous RAG
# ChatGPT like Memory for Agents

This cookbook implements Personalized Memory & Autonomous retrieval-augmented generation.
This cookbook implements Personalization i.e. ChatGPT like Memory for an Assistant.

i.e. the Assistant will remember details about the user across runs. Similar to how [ChatGPT implements Memory](https://openai.com/index/memory-and-new-controls-for-chatgpt/).
Share details about yourself with the Assistant, and it will remember them across runs and personalize the responses to your preferences.
Similar to how [ChatGPT implements Memory](https://openai.com/index/memory-and-new-controls-for-chatgpt/).

> Note: Fork and clone this repository if needed
Expand Down Expand Up @@ -56,9 +57,10 @@ streamlit run cookbook/examples/personalization/app.py
```

- Open [localhost:8501](http://localhost:8501) to view the streamlit app.
- Add to memory: "call me 'your highness'"
- Add to memory: "always respond with a nice greeting and salutation"
- Add to memory: "i like cats so add a cat pun in the response"
- Enter a username to associate with the memory.
- Add to memory: "Call me bestie"
- Add to memory: "Always respond with a nice greeting and salutation"
- Add to memory: "I like docs so add a dog pun in the response"
- Add a website to the knowledge base: https://techcrunch.com/2024/04/18/meta-releases-llama-3-claims-its-among-the-best-open-models-available/
- Ask questions like:
- What did Meta release?
Expand Down
238 changes: 183 additions & 55 deletions cookbook/examples/personalization/app.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,26 +9,21 @@
from phi.tools.streamlit.components import get_username_sidebar
from phi.utils.log import logger

from assistant import get_personalized_auto_rag_assistant # type: ignore
from assistant import get_personalized_assistant # type: ignore

nest_asyncio.apply()
st.set_page_config(
page_title="Personalized Memory & Auto RAG",
page_title="Assistant with Memory",
page_icon=":orange_heart:",
)
st.title("Personalized Memory & Auto RAG")
st.title("Assistant with Personalized Memory")
st.markdown("##### :orange_heart: built using [phidata](https://github.com/phidatahq/phidata)")


def restart_assistant():
logger.debug("---*--- Restarting Assistant ---*---")
st.session_state["personalized_auto_rag_assistant"] = None
st.session_state["personalized_auto_rag_assistant_run_id"] = None
if "url_scrape_key" in st.session_state:
st.session_state["url_scrape_key"] += 1
if "file_uploader_key" in st.session_state:
st.session_state["file_uploader_key"] += 1
st.rerun()
with st.expander(":rainbow[:point_down: How to use]"):
st.markdown("Tell the Assistant about your preferences and they will remember them across conversations.")
st.markdown("- Call me bestie")
st.markdown("- Always respond with a nice greeting and salutation")
st.markdown("- I like dogs so add a dog pun in the response")


def main() -> None:
Expand All @@ -40,37 +35,146 @@ def main() -> None:
st.write(":technologist: Please enter a username")
return

# Get LLM model
llm_model = st.sidebar.selectbox("Select LLM", options=["gpt-4o", "gpt-4-turbo"])
# Get the LLM to use
llm_id = st.sidebar.selectbox("Select LLM", options=["gpt-4o", "gpt-4-turbo"])
# Set assistant_type in session state
if "llm_model" not in st.session_state:
st.session_state["llm_model"] = llm_model
if "llm_id" not in st.session_state:
st.session_state["llm_id"] = llm_id
# Restart the assistant if assistant_type has changed
elif st.session_state["llm_model"] != llm_model:
st.session_state["llm_model"] = llm_model
elif st.session_state["llm_id"] != llm_id:
st.session_state["llm_id"] = llm_id
restart_assistant()

# Sidebar checkboxes for selecting tools
st.sidebar.markdown("### Select Tools")

# Enable Calculator
if "calculator_enabled" not in st.session_state:
st.session_state["calculator_enabled"] = True
# Get calculator_enabled from session state if set
calculator_enabled = st.session_state["calculator_enabled"]
# Checkbox for enabling calculator
calculator = st.sidebar.checkbox("Calculator", value=calculator_enabled, help="Enable calculator.")
if calculator_enabled != calculator:
st.session_state["calculator_enabled"] = calculator
calculator_enabled = calculator
restart_assistant()

# Enable file tools
if "file_tools_enabled" not in st.session_state:
st.session_state["file_tools_enabled"] = True
# Get file_tools_enabled from session state if set
file_tools_enabled = st.session_state["file_tools_enabled"]
# Checkbox for enabling shell tools
file_tools = st.sidebar.checkbox("File Tools", value=file_tools_enabled, help="Enable file tools.")
if file_tools_enabled != file_tools:
st.session_state["file_tools_enabled"] = file_tools
file_tools_enabled = file_tools
restart_assistant()

# Enable Web Search via DuckDuckGo
if "ddg_search_enabled" not in st.session_state:
st.session_state["ddg_search_enabled"] = True
# Get ddg_search_enabled from session state if set
ddg_search_enabled = st.session_state["ddg_search_enabled"]
# Checkbox for enabling web search
ddg_search = st.sidebar.checkbox("Web Search", value=ddg_search_enabled, help="Enable web search using DuckDuckGo.")
if ddg_search_enabled != ddg_search:
st.session_state["ddg_search_enabled"] = ddg_search
ddg_search_enabled = ddg_search
restart_assistant()

# Enable shell tools
if "shell_tools_enabled" not in st.session_state:
st.session_state["shell_tools_enabled"] = False
# Get shell_tools_enabled from session state if set
shell_tools_enabled = st.session_state["shell_tools_enabled"]
# Checkbox for enabling shell tools
shell_tools = st.sidebar.checkbox("Shell Tools", value=shell_tools_enabled, help="Enable shell tools.")
if shell_tools_enabled != shell_tools:
st.session_state["shell_tools_enabled"] = shell_tools
shell_tools_enabled = shell_tools
restart_assistant()

# Sidebar checkboxes for selecting team members
st.sidebar.markdown("### Select Team Members")

# Enable Python Assistant
if "python_assistant_enabled" not in st.session_state:
st.session_state["python_assistant_enabled"] = False
# Get python_assistant_enabled from session state if set
python_assistant_enabled = st.session_state["python_assistant_enabled"]
# Checkbox for enabling web search
python_assistant = st.sidebar.checkbox(
"Python Assistant",
value=python_assistant_enabled,
help="Enable the Python Assistant for writing and running python code.",
)
if python_assistant_enabled != python_assistant:
st.session_state["python_assistant_enabled"] = python_assistant
python_assistant_enabled = python_assistant
restart_assistant()

# Enable Research Assistant
if "research_assistant_enabled" not in st.session_state:
st.session_state["research_assistant_enabled"] = False
# Get research_assistant_enabled from session state if set
research_assistant_enabled = st.session_state["research_assistant_enabled"]
# Checkbox for enabling web search
research_assistant = st.sidebar.checkbox(
"Research Assistant",
value=research_assistant_enabled,
help="Enable the research assistant (uses Exa).",
)
if research_assistant_enabled != research_assistant:
st.session_state["research_assistant_enabled"] = research_assistant
research_assistant_enabled = research_assistant
restart_assistant()

# Enable Investment Assistant
if "investment_assistant_enabled" not in st.session_state:
st.session_state["investment_assistant_enabled"] = False
# Get investment_assistant_enabled from session state if set
investment_assistant_enabled = st.session_state["investment_assistant_enabled"]
# Checkbox for enabling web search
investment_assistant = st.sidebar.checkbox(
"Investment Assistant",
value=investment_assistant_enabled,
help="Enable the investment assistant. NOTE: This is not financial advice.",
)
if investment_assistant_enabled != investment_assistant:
st.session_state["investment_assistant_enabled"] = investment_assistant
investment_assistant_enabled = investment_assistant
restart_assistant()

# Get the assistant
personalized_auto_rag_assistant: Assistant
if (
"personalized_auto_rag_assistant" not in st.session_state
or st.session_state["personalized_auto_rag_assistant"] is None
):
logger.info(f"---*--- Creating {llm_model} Assistant ---*---")
personalized_auto_rag_assistant = get_personalized_auto_rag_assistant(llm_model=llm_model, user_id=user_id)
st.session_state["personalized_auto_rag_assistant"] = personalized_auto_rag_assistant
personalized_assistant: Assistant
if "personalized_assistant" not in st.session_state or st.session_state["personalized_assistant"] is None:
logger.info(f"---*--- Creating {llm_id} Assistant ---*---")
personalized_assistant = get_personalized_assistant(
llm_id=llm_id,
user_id=user_id,
calculator=calculator_enabled,
ddg_search=ddg_search_enabled,
file_tools=file_tools_enabled,
shell_tools=shell_tools_enabled,
python_assistant=python_assistant_enabled,
research_assistant=research_assistant_enabled,
investment_assistant=investment_assistant_enabled,
)
st.session_state["personalized_assistant"] = personalized_assistant
else:
personalized_auto_rag_assistant = st.session_state["personalized_auto_rag_assistant"]
personalized_assistant = st.session_state["personalized_assistant"]

# Create assistant run (i.e. log to database) and save run_id in session state
try:
st.session_state["personalized_auto_rag_assistant_run_id"] = personalized_auto_rag_assistant.create_run()
st.session_state["assistant_run_id"] = personalized_assistant.create_run()
except Exception:
st.warning("Could not create assistant, is the database running?")
return

# Load existing messages
assistant_chat_history = personalized_auto_rag_assistant.memory.get_chat_history()
assistant_chat_history = personalized_assistant.memory.get_chat_history()
if len(assistant_chat_history) > 0:
logger.debug("Loading chat history")
st.session_state["messages"] = assistant_chat_history
Expand All @@ -96,13 +200,13 @@ def main() -> None:
with st.chat_message("assistant"):
resp_container = st.empty()
response = ""
for delta in personalized_auto_rag_assistant.run(question):
for delta in personalized_assistant.run(question):
response += delta # type: ignore
resp_container.markdown(response)
st.session_state["messages"].append({"role": "assistant", "content": response})

# Load knowledge base
if personalized_auto_rag_assistant.knowledge_base:
if personalized_assistant.knowledge_base:
# -*- Add websites to knowledge base
if "url_scrape_key" not in st.session_state:
st.session_state["url_scrape_key"] = 0
Expand All @@ -118,7 +222,7 @@ def main() -> None:
scraper = WebsiteReader(max_links=2, max_depth=1)
web_documents: List[Document] = scraper.read(input_url)
if web_documents:
personalized_auto_rag_assistant.knowledge_base.load_documents(web_documents, upsert=True)
personalized_assistant.knowledge_base.load_documents(web_documents, upsert=True)
else:
st.sidebar.error("Could not read website")
st.session_state[f"{input_url}_uploaded"] = True
Expand All @@ -138,43 +242,67 @@ def main() -> None:
reader = PDFReader()
auto_rag_documents: List[Document] = reader.read(uploaded_file)
if auto_rag_documents:
personalized_auto_rag_assistant.knowledge_base.load_documents(auto_rag_documents, upsert=True)
personalized_assistant.knowledge_base.load_documents(auto_rag_documents, upsert=True)
else:
st.sidebar.error("Could not read PDF")
st.session_state[f"{auto_rag_name}_uploaded"] = True
alert.empty()

if personalized_auto_rag_assistant.knowledge_base and personalized_auto_rag_assistant.knowledge_base.vector_db:
if personalized_assistant.knowledge_base and personalized_assistant.knowledge_base.vector_db:
if st.sidebar.button("Clear Knowledge Base"):
personalized_auto_rag_assistant.knowledge_base.vector_db.clear()
personalized_assistant.knowledge_base.vector_db.clear()
st.sidebar.success("Knowledge base cleared")

if personalized_auto_rag_assistant.storage:
personalized_auto_rag_assistant_run_ids: List[str] = personalized_auto_rag_assistant.storage.get_all_run_ids(
user_id=user_id
)
new_personalized_auto_rag_assistant_run_id = st.sidebar.selectbox(
"Run ID", options=personalized_auto_rag_assistant_run_ids
)
if st.session_state["personalized_auto_rag_assistant_run_id"] != new_personalized_auto_rag_assistant_run_id:
logger.info(f"---*--- Loading {llm_model} run: {new_personalized_auto_rag_assistant_run_id} ---*---")
st.session_state["personalized_auto_rag_assistant"] = get_personalized_auto_rag_assistant(
llm_model=llm_model, user_id=user_id, run_id=new_personalized_auto_rag_assistant_run_id
if personalized_assistant.storage:
assistant_run_ids: List[str] = personalized_assistant.storage.get_all_run_ids(user_id=user_id)
new_assistant_run_id = st.sidebar.selectbox("Run ID", options=assistant_run_ids)
if st.session_state["assistant_run_id"] != new_assistant_run_id:
logger.info(f"---*--- Loading {llm_id} run: {new_assistant_run_id} ---*---")
st.session_state["personalized_assistant"] = get_personalized_assistant(
llm_id=llm_id,
user_id=user_id,
run_id=new_assistant_run_id,
calculator=calculator_enabled,
ddg_search=ddg_search_enabled,
file_tools=file_tools_enabled,
shell_tools=shell_tools_enabled,
python_assistant=python_assistant_enabled,
research_assistant=research_assistant_enabled,
investment_assistant=investment_assistant_enabled,
)
st.rerun()

# Show Assistant memory
if personalized_auto_rag_assistant.memory.memories and len(personalized_auto_rag_assistant.memory.memories) > 0:
logger.info("Loading assistant memory")
with st.status("Assistant Memory", expanded=False, state="complete"):
with st.container():
memory_container = st.empty()
memory_container.markdown(
"\n".join([f"- {m.memory}" for m in personalized_auto_rag_assistant.memory.memories])
)
with st.status("Assistant Memory", expanded=False, state="complete"):
with st.container():
memory_container = st.empty()
if personalized_assistant.memory.memories and len(personalized_assistant.memory.memories) > 0:
memory_container.markdown("\n".join([f"- {m.memory}" for m in personalized_assistant.memory.memories]))
else:
memory_container.warning("No memories yet.")

# Show team member memory
if personalized_assistant.team and len(personalized_assistant.team) > 0:
for team_member in personalized_assistant.team:
if len(team_member.memory.chat_history) > 0:
with st.status(f"{team_member.name} Memory", expanded=False, state="complete"):
with st.container():
_team_member_memory_container = st.empty()
_team_member_memory_container.json(team_member.memory.get_llm_messages())

if st.sidebar.button("New Run"):
restart_assistant()


def restart_assistant():
logger.debug("---*--- Restarting Assistant ---*---")
st.session_state["personalized_assistant"] = None
st.session_state["assistant_run_id"] = None
if "url_scrape_key" in st.session_state:
st.session_state["url_scrape_key"] += 1
if "file_uploader_key" in st.session_state:
st.session_state["file_uploader_key"] += 1
st.rerun()


main()
Loading

0 comments on commit fce6838

Please sign in to comment.