Skip to content

Commit

Permalink
Add model_copy to make ollama chat deep copy work with a set client (#…
Browse files Browse the repository at this point in the history
…1603)

## Description
If you set the client manually for Ollama, then there is an issue when
the agent model is copied where certain properties can't be pickled.
This resolves it.
  • Loading branch information
dirkbrnd authored Dec 20, 2024
1 parent 49dd0e3 commit 30c962e
Show file tree
Hide file tree
Showing 3 changed files with 24 additions and 6 deletions.
8 changes: 2 additions & 6 deletions cookbook/agents/14_generate_image.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,9 +16,5 @@
images = image_agent.get_images()
if images and isinstance(images, list):
for image_response in images:
image_data = image_response.get("data") # type: ignore
if image_data:
for image in image_data:
image_url = image.get("url") # type: ignore
if image_url:
print(image_url)
image_url = image_response.url
print(image_url)
18 changes: 18 additions & 0 deletions cookbook/providers/ollama/agent_set_client.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,18 @@
"""Run `pip install yfinance` to install dependencies."""

from ollama import Client as OllamaClient
from phi.agent import Agent, RunResponse # noqa
from phi.model.ollama import Ollama
from phi.playground import Playground, serve_playground_app
from phi.tools.yfinance import YFinanceTools

agent = Agent(
model=Ollama(id="llama3.1:8b", client=OllamaClient()),
tools=[YFinanceTools(stock_price=True)],
markdown=True,
)

app = Playground(agents=[agent]).get_app()

if __name__ == "__main__":
serve_playground_app("agent_set_client:app", reload=True)
4 changes: 4 additions & 0 deletions phi/model/ollama/chat.py
Original file line number Diff line number Diff line change
Expand Up @@ -722,3 +722,7 @@ async def aresponse_stream(self, messages: List[Message]) -> Any:
async for post_tool_call_response in self.ahandle_post_tool_call_messages_stream(messages=messages):
yield post_tool_call_response
logger.debug("---------- Ollama Async Response End ----------")

def model_copy(self, *, update: Optional[dict[str, Any]] = None, deep: bool = False) -> "Ollama":
new_model = Ollama(**self.model_dump(exclude={"client"}), client=self.client)
return new_model

0 comments on commit 30c962e

Please sign in to comment.