diff --git a/cookbook/llms/claude/assistant_stream_off.py b/cookbook/llms/claude/assistant_stream_off.py index d2565830e9..72e3a2cd4f 100644 --- a/cookbook/llms/claude/assistant_stream_off.py +++ b/cookbook/llms/claude/assistant_stream_off.py @@ -6,6 +6,5 @@ llm=Claude(model="claude-3-5-sonnet-20240620"), tools=[DuckDuckGo()], show_tool_calls=True, - debug_mode=True, ) assistant.print_response("Whats happening in France?", markdown=True, stream=False) diff --git a/cookbook/providers/openai/agent.py b/cookbook/providers/openai/agent.py index ecaec79eaf..63822d1c92 100644 --- a/cookbook/providers/openai/agent.py +++ b/cookbook/providers/openai/agent.py @@ -1,11 +1,17 @@ -from phi.agent import Agent +from phi.agent import Agent, RunResponse # noqa from phi.model.openai import OpenAIChat -from phi.tools.duckduckgo import DuckDuckGo +from phi.tools.yfinance import YFinanceTools agent = Agent( - model=OpenAIChat(model="gpt-4o"), - tools=[DuckDuckGo()], + model=OpenAIChat(id="gpt-4o"), + tools=[YFinanceTools(stock_price=True)], show_tool_calls=True, - # debug_mode=True, + markdown=True, ) -agent.print_response("Whats happening in France?", markdown=True) + +# Get the response in a variable +# run: RunResponse = agent.run("What is the stock price of NVDA and TSLA") +# print(run.content) + +# Print the response in the terminal +agent.print_response("What is the stock price of NVDA and TSLA") diff --git a/cookbook/providers/openai/agent_stream.py b/cookbook/providers/openai/agent_stream.py index 5830b6324a..6e0154ce35 100644 --- a/cookbook/providers/openai/agent_stream.py +++ b/cookbook/providers/openai/agent_stream.py @@ -1,11 +1,19 @@ -from phi.agent import Agent +from typing import Iterator # noqa +from phi.agent import Agent, RunResponse # noqa from phi.model.openai import OpenAIChat -from phi.tools.duckduckgo import DuckDuckGo +from phi.tools.yfinance import YFinanceTools agent = Agent( - model=OpenAIChat(model="gpt-4o"), - tools=[DuckDuckGo()], + model=OpenAIChat(id="gpt-4o"), + tools=[YFinanceTools(stock_price=True)], show_tool_calls=True, - # debug_mode=True, + markdown=True, ) -agent.print_response("Whats happening in France?", markdown=True, stream=True) + +# Get the response in a variable +# run_response: Iterator[RunResponse] = agent.run("What is the stock price of NVDA and TSLA", stream=True) +# for chunk in run_response: +# print(chunk.content) + +# Print the response in the terminal +agent.print_response("What is the stock price of NVDA and TSLA", stream=True) diff --git a/cookbook/providers/openai/basic.py b/cookbook/providers/openai/basic.py index 0cd846d5f4..01d1ace843 100644 --- a/cookbook/providers/openai/basic.py +++ b/cookbook/providers/openai/basic.py @@ -1,11 +1,11 @@ -from phi.agent import Agent, RunResponse +from phi.agent import Agent, RunResponse # noqa from phi.model.openai import OpenAIChat -agent = Agent( - model=OpenAIChat(model="gpt-4o"), - description="You help people with their health and fitness goals.", -) +agent = Agent(model=OpenAIChat(id="gpt-4o"), instructions=["Respond in a southern tone"], markdown=True) -run: RunResponse = agent.run("Share a healthy breakfast recipe") # type: ignore +# Get the response in a variable +# run: RunResponse = agent.run("Explain simulation theory") +# print(run.content) -print(run.content) +# Print the response in the terminal +agent.print_response("Explain simulation theory") diff --git a/cookbook/providers/openai/basic_stream.py b/cookbook/providers/openai/basic_stream.py index cd5f2df308..d865e98ea1 100644 --- a/cookbook/providers/openai/basic_stream.py +++ b/cookbook/providers/openai/basic_stream.py @@ -1,9 +1,13 @@ -from phi.agent import Agent +from typing import Iterator # noqa +from phi.agent import Agent, RunResponse # noqa from phi.model.openai import OpenAIChat -agent = Agent( - model=OpenAIChat(model="gpt-4o"), - description="You help people with their health and fitness goals.", -) +agent = Agent(model=OpenAIChat(id="gpt-4o"), instructions=["Respond in a southern tone"], markdown=True) -agent.print_response("Share a healthy breakfast recipe", stream=True) +# Get the response in a variable +# run_response: Iterator[RunResponse] = agent.run("Explain simulation theory", stream=True) +# for chunk in run_response: +# print(chunk.content) + +# Print the response in the terminal +agent.print_response("Explain simulation theory", stream=True)