diff --git a/cookbook/claude/README.md b/cookbook/claude/README.md index 3c9d39cdb..bc79d11b7 100644 --- a/cookbook/claude/README.md +++ b/cookbook/claude/README.md @@ -20,35 +20,51 @@ export ANTHROPIC_API_KEY=xxx ### 3. Install libraries ```shell -pip install -U anthropic phidata duckduckgo-search duckdb yfinance exa_py +pip install -U anthropic duckduckgo-search duckdb yfinance exa_py phidata ``` -### 4. Web search function calling +### 4. Run Assistant + +- stream on ```shell -python cookbook/claude/web_search.py +python cookbook/claude/assistant.py ``` -### 5. YFinance function calling +- stream off ```shell -python cookbook/claude/finance.py +python cookbook/claude/assistant_stream_off.py ``` -### 6. Structured output +### 5. Run Assistant with Tools + +- Web search ```shell -python cookbook/claude/structured_output.py +python cookbook/claude/web_search.py +``` + +- YFinance + +```shell +python cookbook/claude/finance.py ``` -### 7. Data Analyst +- Data Analyst ```shell python cookbook/claude/data_analyst.py ``` -### 8. Exa Search +- Exa Search ```shell python cookbook/claude/exa_search.py ``` + +### 6. Run Assistant with Structured output + +```shell +python cookbook/claude/structured_output.py +``` diff --git a/cookbook/claude/exa_search.py b/cookbook/claude/exa_search.py index 07d019352..a55655cd2 100644 --- a/cookbook/claude/exa_search.py +++ b/cookbook/claude/exa_search.py @@ -1,6 +1,12 @@ from phi.assistant import Assistant -from phi.llm.anthropic import Claude from phi.tools.exa import ExaTools +from phi.tools.website import WebsiteTools +from phi.llm.anthropic import Claude -assistant = Assistant(llm=Claude(), tools=[ExaTools()], show_tool_calls=True) -assistant.cli_app(markdown=True) +assistant = Assistant(llm=Claude(), tools=[ExaTools(), WebsiteTools()], show_tool_calls=True) +assistant.print_response( + "Produce this table: research chromatic homotopy theory." + "Access each link in the result outputting the summary for that article, its link, and keywords; " + "After the table output make conceptual ascii art of the overarching themes and constructions", + markdown=True, +) diff --git a/cookbook/claude/finance.py b/cookbook/claude/finance.py index 9d0fe0013..f94064f9a 100644 --- a/cookbook/claude/finance.py +++ b/cookbook/claude/finance.py @@ -3,9 +3,13 @@ from phi.llm.anthropic import Claude assistant = Assistant( - llm=Claude(model="claude-3-opus-20240229"), + name="Finance Assistant", + llm=Claude(model="claude-3-haiku-20240307"), tools=[YFinanceTools(stock_price=True, analyst_recommendations=True, stock_fundamentals=True)], show_tool_calls=True, + description="You are an investment analyst that researches stock prices, analyst recommendations, and stock fundamentals.", + instructions=["Format your response using markdown and use tables to display data where possible."], + debug_mode=True, ) -assistant.print_response("Share the NVDA stock price and some analyst recommendations", markdown=True) -assistant.print_response("Summarize fundamentals for TSLA", markdown=True) +assistant.print_response("Share the NVDA stock price and analyst recommendations", markdown=True) +# assistant.print_response("Summarize fundamentals for TSLA", markdown=True) diff --git a/cookbook/cohere/README.md b/cookbook/cohere/README.md index 67eecf4d9..29892422f 100644 --- a/cookbook/cohere/README.md +++ b/cookbook/cohere/README.md @@ -1,4 +1,4 @@ -# Cohere function calling +# CohereChat function calling Currently "command-r" model supports function calling @@ -11,7 +11,7 @@ python3 -m venv ~/.venvs/aienv source ~/.venvs/aienv/bin/activate ``` -### 2. Export your Cohere API Key +### 2. Export your CohereChat API Key ```shell export CO_API_KEY=xxx @@ -20,7 +20,7 @@ export CO_API_KEY=xxx ### 3. Install libraries ```shell -pip install -U cohere phidata duckduckgo-search yfinance exa_py +pip install -U cohere duckduckgo-search yfinance exa_py phidata ``` ### 4. Web search function calling diff --git a/cookbook/cohere/assistant.py b/cookbook/cohere/assistant.py index 724e9ac55..177a8ace4 100644 --- a/cookbook/cohere/assistant.py +++ b/cookbook/cohere/assistant.py @@ -1,7 +1,9 @@ from phi.assistant import Assistant -from phi.llm.cohere import Cohere +from phi.llm.cohere import CohereChat assistant = Assistant( - llm=Cohere(model="command-r"), description="You help people with their health and fitness goals.", debug_mode=True + llm=CohereChat(model="command-r"), + description="You help people with their health and fitness goals.", + debug_mode=True, ) assistant.print_response("Share a quick healthy breakfast recipe.", markdown=True) diff --git a/cookbook/cohere/assistant_stream_off.py b/cookbook/cohere/assistant_stream_off.py index 7cd13ad93..b5ee1e9ef 100644 --- a/cookbook/cohere/assistant_stream_off.py +++ b/cookbook/cohere/assistant_stream_off.py @@ -1,7 +1,9 @@ from phi.assistant import Assistant -from phi.llm.cohere import Cohere +from phi.llm.cohere import CohereChat assistant = Assistant( - llm=Cohere(model="command-r"), description="You help people with their health and fitness goals.", debug_mode=True + llm=CohereChat(model="command-r"), + description="You help people with their health and fitness goals.", + debug_mode=True, ) assistant.print_response("Share a quick healthy breakfast recipe.", markdown=True, stream=False) diff --git a/cookbook/cohere/exa_search.py b/cookbook/cohere/exa_search.py index c496880b0..57b46f660 100644 --- a/cookbook/cohere/exa_search.py +++ b/cookbook/cohere/exa_search.py @@ -1,6 +1,6 @@ from phi.assistant import Assistant -from phi.llm.cohere import Cohere +from phi.llm.cohere import CohereChat from phi.tools.exa import ExaTools -assistant = Assistant(llm=Cohere(model="command-r"), tools=[ExaTools()], show_tool_calls=True) +assistant = Assistant(llm=CohereChat(model="command-r"), tools=[ExaTools()], show_tool_calls=True) assistant.cli_app(markdown=True) diff --git a/cookbook/cohere/finance.py b/cookbook/cohere/finance.py index a6d40bd69..2e922cab9 100644 --- a/cookbook/cohere/finance.py +++ b/cookbook/cohere/finance.py @@ -1,11 +1,11 @@ from phi.assistant import Assistant from phi.tools.yfinance import YFinanceTools -from phi.llm.cohere import Cohere +from phi.llm.cohere import CohereChat assistant = Assistant( - llm=Cohere(model="command-r"), + llm=CohereChat(model="command-r"), tools=[YFinanceTools(stock_price=True, analyst_recommendations=True, stock_fundamentals=True)], show_tool_calls=True, ) -assistant.print_response("Share the NVDA stock price and some analyst recommendations", markdown=True) +assistant.print_response("Share the NVDA stock price and analyst recommendations", markdown=True) assistant.print_response("Summarize fundamentals for TSLA", markdown=True) diff --git a/cookbook/cohere/structured_output.py b/cookbook/cohere/structured_output.py index 221bbaca6..bed281123 100644 --- a/cookbook/cohere/structured_output.py +++ b/cookbook/cohere/structured_output.py @@ -2,7 +2,7 @@ from pydantic import BaseModel, Field from rich.pretty import pprint from phi.assistant import Assistant -from phi.llm.cohere import Cohere +from phi.llm.cohere import CohereChat class MovieScript(BaseModel): @@ -17,7 +17,7 @@ class MovieScript(BaseModel): movie_assistant = Assistant( - llm=Cohere(model="command-r"), + llm=CohereChat(model="command-r"), description="You help people write movie scripts.", output_model=MovieScript, # debug_mode=True, diff --git a/cookbook/cohere/web_search.py b/cookbook/cohere/web_search.py index e9a2a249f..e48b8e8ef 100644 --- a/cookbook/cohere/web_search.py +++ b/cookbook/cohere/web_search.py @@ -1,6 +1,6 @@ from phi.assistant import Assistant from phi.tools.duckduckgo import DuckDuckGo -from phi.llm.cohere import Cohere +from phi.llm.cohere import CohereChat -assistant = Assistant(llm=Cohere(model="command-r"), tools=[DuckDuckGo()], show_tool_calls=True) +assistant = Assistant(llm=CohereChat(model="command-r"), tools=[DuckDuckGo()], show_tool_calls=True) assistant.print_response("Share 1 story from france and 1 from germany?", markdown=True) diff --git a/cookbook/groq/exa_search.py b/cookbook/groq/exa_search.py deleted file mode 100644 index 6c6c89d35..000000000 --- a/cookbook/groq/exa_search.py +++ /dev/null @@ -1,7 +0,0 @@ -from phi.assistant import Assistant -from phi.tools.exa import ExaTools -from phi.tools.website import WebsiteTools -from phi.llm.groq import Groq - -assistant = Assistant(llm=Groq(model="mixtral-8x7b-32768"), tools=[ExaTools(), WebsiteTools()], show_tool_calls=True) -assistant.cli_app(markdown=True, stream=False) diff --git a/cookbook/groq/finance.py b/cookbook/groq/finance.py index c45a8cb2d..ce0bbe5e3 100644 --- a/cookbook/groq/finance.py +++ b/cookbook/groq/finance.py @@ -6,6 +6,7 @@ llm=Groq(model="mixtral-8x7b-32768"), tools=[YFinanceTools(stock_price=True, analyst_recommendations=True, stock_fundamentals=True)], show_tool_calls=True, + # debug_mode=True, ) -assistant.print_response("Share the NVDA stock price and some analyst recommendations", markdown=True, stream=False) -assistant.print_response("Summarize fundamentals for TSLA", markdown=True, stream=False) +assistant.print_response("Share the NVDA stock price and analyst recommendations", markdown=True, stream=False) +# assistant.print_response("Summarize fundamentals for TSLA", markdown=True, stream=False) diff --git a/cookbook/groq/web_search.py b/cookbook/groq/web_search.py index 448002c25..f3d748e77 100644 --- a/cookbook/groq/web_search.py +++ b/cookbook/groq/web_search.py @@ -2,5 +2,10 @@ from phi.tools.duckduckgo import DuckDuckGo from phi.llm.groq import Groq -assistant = Assistant(llm=Groq(model="mixtral-8x7b-32768"), tools=[DuckDuckGo()], show_tool_calls=True, debug_mode=True) +assistant = Assistant( + llm=Groq(model="mixtral-8x7b-32768"), + tools=[DuckDuckGo()], + show_tool_calls=True, + # debug_mode=True +) assistant.print_response("Tell me about OpenAI Sora", markdown=True, stream=False) diff --git a/cookbook/hermes2/finance.py b/cookbook/hermes2/finance.py index 4fd32620d..e86f7ead2 100644 --- a/cookbook/hermes2/finance.py +++ b/cookbook/hermes2/finance.py @@ -7,5 +7,5 @@ tools=[YFinanceTools(stock_price=True, analyst_recommendations=True, stock_fundamentals=True)], show_tool_calls=True, ) -assistant.print_response("Share the NVDA stock price and some analyst recommendations", markdown=True) +assistant.print_response("Share the NVDA stock price and analyst recommendations", markdown=True) assistant.print_response("Summarize fundamentals for TSLA", markdown=True) diff --git a/phi/assistant/assistant.py b/phi/assistant/assistant.py index d089ccbf5..2831d85ec 100644 --- a/phi/assistant/assistant.py +++ b/phi/assistant/assistant.py @@ -575,11 +575,11 @@ def _run( # -*- Update run output self.output = run_output + logger.debug(f"*********** Run End: {self.run_id} ***********") # -*- Yield final response if not streaming if not stream: yield run_output - logger.debug(f"*********** Run End: {self.run_id} ***********") def run( self, message: Optional[Union[List, Dict, str]] = None, stream: bool = True, **kwargs: Any diff --git a/phi/llm/base.py b/phi/llm/base.py index 389d819dd..91ed87f33 100644 --- a/phi/llm/base.py +++ b/phi/llm/base.py @@ -46,6 +46,9 @@ class LLM(BaseModel): system_prompt: Optional[str] = None instructions: Optional[List[str]] = None + # State from the run + run_id: Optional[str] = None + model_config = ConfigDict(arbitrary_types_allowed=True) @property diff --git a/phi/llm/cohere/__init__.py b/phi/llm/cohere/__init__.py index 29fb74719..b3b0e328d 100644 --- a/phi/llm/cohere/__init__.py +++ b/phi/llm/cohere/__init__.py @@ -1 +1 @@ -from phi.llm.cohere.cohere import Cohere +from phi.llm.cohere.chat import CohereChat diff --git a/phi/llm/cohere/cohere.py b/phi/llm/cohere/chat.py similarity index 92% rename from phi/llm/cohere/cohere.py rename to phi/llm/cohere/chat.py index 38a33f27a..58eddeee3 100644 --- a/phi/llm/cohere/cohere.py +++ b/phi/llm/cohere/chat.py @@ -2,15 +2,12 @@ from textwrap import dedent from typing import Optional, List, Dict, Any - from phi.llm.base import LLM from phi.llm.message import Message from phi.tools.function import FunctionCall from phi.utils.log import logger from phi.utils.timer import Timer -from phi.utils.tools import ( - get_function_call_for_tool_call, -) +from phi.utils.tools import get_function_call_for_tool_call try: from cohere import Client as CohereClient @@ -27,10 +24,10 @@ raise -class Cohere(LLM): +class CohereChat(LLM): name: str = "cohere" model: str = "command-r" - # # -*- Request parameters + # -*- Request parameters temperature: Optional[float] = None max_tokens: Optional[int] = None top_k: Optional[int] = None @@ -38,10 +35,12 @@ class Cohere(LLM): frequency_penalty: Optional[float] = None presence_penalty: Optional[float] = None request_params: Optional[Dict[str, Any]] = None + # Use cohere conversation_id to create a persistent conversation + use_conversation_id: bool = True # -*- Client parameters api_key: Optional[str] = None client_params: Optional[Dict[str, Any]] = None - # -*- Provide the client manually + # -*- Provide the Cohere client manually cohere_client: Optional[CohereClient] = None @property @@ -57,6 +56,8 @@ def client(self) -> CohereClient: @property def api_kwargs(self) -> Dict[str, Any]: _request_params: Dict[str, Any] = {} + if self.use_conversation_id and self.run_id is not None: + _request_params["conversation_id"] = self.run_id if self.temperature: _request_params["temperature"] = self.temperature if self.max_tokens: @@ -96,7 +97,26 @@ def get_tools(self) -> Optional[List[Dict[str, Any]]]: def invoke(self, messages: List[Message], tool_results: Optional[List[ChatRequestToolResultsItem]] = None) -> Chat: api_kwargs: Dict[str, Any] = self.api_kwargs - api_kwargs["chat_history"] = [] + chat_message = None + + if not self.use_conversation_id or self.run_id is None: + logger.debug("Providing chat_history to cohere.") + chat_history = [] + for m in messages: + if m.role == "system": + api_kwargs["preamble"] = m.content + elif m.role == "user": + if last_user_message is not None: + # Append the previously tracked user message to chat_history before updating it + api_kwargs["chat_history"].append({"role": "USER", "message": last_user_message}) + # Update the last user message + last_user_message = m.content + else: + api_kwargs["chat_history"].append({"role": "CHATBOT", "message": m.content or ""}) + + api_kwargs["chat_history"] = chat_history + + user_message: List = [] # Track the last user message to prevent adding it to chat_history last_user_message = None diff --git a/phi/llm/openai/chat.py b/phi/llm/openai/chat.py index 743cccca8..adf80c9a2 100644 --- a/phi/llm/openai/chat.py +++ b/phi/llm/openai/chat.py @@ -57,7 +57,7 @@ class OpenAIChat(LLM): default_headers: Optional[Any] = None default_query: Optional[Any] = None client_params: Optional[Dict[str, Any]] = None - # -*- Provide the OpenAIClient manually + # -*- Provide the OpenAI client manually openai_client: Optional[OpenAIClient] = None @property diff --git a/phi/task/llm/llm_task.py b/phi/task/llm/llm_task.py index 189d5448a..db5c13496 100644 --- a/phi/task/llm/llm_task.py +++ b/phi/task/llm/llm_task.py @@ -171,19 +171,21 @@ def set_default_llm(self) -> None: self.llm = OpenAIChat() - def add_response_format_to_llm(self) -> None: - if self.output_model is not None and self.llm is not None: - self.llm.response_format = {"type": "json_object"} - - def add_tools_to_llm(self) -> None: + def update_llm(self) -> None: if self.llm is None: logger.error(f"Task LLM is None: {self.__class__.__name__}") return + # Set response_format if it is not set on the llm + if self.output_model is not None and self.llm.response_format is None: + self.llm.response_format = {"type": "json_object"} + + # Add tools to the LLM if self.tools is not None: for tool in self.tools: self.llm.add_tool(tool) + # Add default tools to the LLM if self.use_tools: if self.read_chat_history_tool and self.memory is not None: self.llm.add_tool(self.get_chat_history) @@ -207,11 +209,13 @@ def add_tools_to_llm(self) -> None: if self.tool_call_limit is not None and self.tool_call_limit < self.llm.function_call_limit: self.llm.function_call_limit = self.tool_call_limit + if self.run_id is not None: + self.llm.run_id = self.run_id + def prepare_task(self) -> None: self.set_task_id() self.set_default_llm() - self.add_response_format_to_llm() - self.add_tools_to_llm() + self.update_llm() def get_json_output_prompt(self) -> str: json_output_prompt = "\nProvide your output as a JSON containing the following fields:"