From 53f636d1a9a3782d4f1a6637a6b88cef6fd2a136 Mon Sep 17 00:00:00 2001 From: ysolanky Date: Tue, 22 Oct 2024 17:52:30 -0400 Subject: [PATCH] update-gemini-add_tool-fn-phi-1733 --- cookbook/providers/google/agent_stream.py | 1 - cookbook/providers/google/basic.py | 2 +- phi/memory/agent.py | 7 +- phi/model/google/gemini.py | 99 +++++++++++------------ 4 files changed, 55 insertions(+), 54 deletions(-) diff --git a/cookbook/providers/google/agent_stream.py b/cookbook/providers/google/agent_stream.py index d38224899..041777b31 100644 --- a/cookbook/providers/google/agent_stream.py +++ b/cookbook/providers/google/agent_stream.py @@ -11,7 +11,6 @@ instructions=["Use tables where possible."], markdown=True, show_tool_calls=True, - debug_mode=True, ) # Get the response in a variable diff --git a/cookbook/providers/google/basic.py b/cookbook/providers/google/basic.py index 90e5ef2f2..63f2d98df 100644 --- a/cookbook/providers/google/basic.py +++ b/cookbook/providers/google/basic.py @@ -1,7 +1,7 @@ from phi.agent import Agent, RunResponse # noqa from phi.model.google import Gemini -agent = Agent(model=Gemini(id="gemini-1.5-flash"), markdown=True, debug_mode=True) +agent = Agent(model=Gemini(id="gemini-1.5-flash"), markdown=True) # Get the response in a variable # run: RunResponse = agent.run("Share a 2 sentence horror story") diff --git a/phi/memory/agent.py b/phi/memory/agent.py index ee86aebb7..bce7a068e 100644 --- a/phi/memory/agent.py +++ b/phi/memory/agent.py @@ -164,10 +164,13 @@ def get_messages_from_last_n_chats( return messages_from_last_n_history def get_message_pairs( - self, user_role: str = "user", assistant_role: str = "assistant" + self, user_role: str = "user", assistant_role: Optional[List[str]] = None ) -> List[Tuple[Message, Message]]: """Returns a list of tuples of (user message, assistant response).""" + if assistant_role is None: + assistant_role = ["assistant", "model", "CHATBOT"] + chats_as_message_pairs: List[Tuple[Message, Message]] = [] for chat in self.chats: if chat.response and chat.response.messages: @@ -182,7 +185,7 @@ def get_message_pairs( # Start from the end to look for the assistant response for message in chat.response.messages[::-1]: - if message.role == assistant_role: + if message.role in assistant_role: assistant_messages_from_chat = message break diff --git a/phi/model/google/gemini.py b/phi/model/google/gemini.py index 01c3ee0f3..0c9f83fb4 100644 --- a/phi/model/google/gemini.py +++ b/phi/model/google/gemini.py @@ -189,57 +189,58 @@ def add_tool(self, tool: Union["Tool", "Toolkit", Callable, dict, "Function"]) - Args: tool: The tool to add. Can be a Tool, Toolkit, Callable, dict, or Function. """ - # Initialize function declarations if necessary if self.function_declarations is None: self.function_declarations = [] - # Initialize functions if necessary - if self.functions is None: - self.functions = {} - - if isinstance(tool, Toolkit): - # Add all functions from the toolkit - self.functions.update(tool.functions) - for func in tool.functions.values(): - function_declaration = FunctionDeclaration( - name=func.name, - description=func.description, - parameters=self._format_functions(func.parameters), - ) - self.function_declarations.append(function_declaration) - logger.debug(f"Functions from toolkit '{tool.name}' added to LLM.") - - elif isinstance(tool, Function): - # Add the single Function instance - self.functions[tool.name] = tool - function_declaration = FunctionDeclaration( - name=tool.name, - description=tool.description, - parameters=self._format_functions(tool.parameters), - ) - self.function_declarations.append(function_declaration) - logger.debug(f"Function '{tool.name}' added to LLM.") - - elif callable(tool): - # Convert the callable to a Function instance and add it - func = Function.from_callable(tool) - self.functions[func.name] = func - function_declaration = FunctionDeclaration( - name=func.name, - description=func.description, - parameters=self._format_functions(func.parameters), - ) - self.function_declarations.append(function_declaration) - logger.debug(f"Function '{func.name}' added to LLM.") - - elif isinstance(tool, Tool): - logger.warning(f"Tool of type '{type(tool).__name__}' is not yet supported by Gemini.") - - elif isinstance(tool, dict): - logger.warning("Tool of type 'dict' is not yet supported by Gemini.") - - else: - logger.warning(f"Unsupported tool type: {type(tool).__name__}") + # If the tool is a Tool or Dict, log a warning. + if isinstance(tool, Tool) or isinstance(tool, Dict): + logger.warning("Tool of type 'Tool' or 'dict' is not yet supported by Gemini.") + + # If the tool is a Callable or Toolkit, add its functions to the Model + elif callable(tool) or isinstance(tool, Toolkit) or isinstance(tool, Function): + if self.functions is None: + self.functions = {} + + if isinstance(tool, Toolkit): + # For each function in the toolkit + for name, func in tool.functions.items(): + # If the function does not exist in self.functions, add to self.tools + if name not in self.functions: + self.functions[name] = func + function_declaration = FunctionDeclaration( + name=func.name, + description=func.description, + parameters=self._format_functions(func.parameters), + ) + self.function_declarations.append(function_declaration) + logger.debug(f"Function {name} from {tool.name} added to model.") + + elif isinstance(tool, Function): + if tool.name not in self.functions: + self.functions[tool.name] = tool + function_declaration = FunctionDeclaration( + name=tool.name, + description=tool.description, + parameters=self._format_functions(tool.parameters), + ) + self.function_declarations.append(function_declaration) + logger.debug(f"Function {tool.name} added to model.") + + elif callable(tool): + try: + function_name = tool.__name__ + if function_name not in self.functions: + func = Function.from_callable(tool) + self.functions[func.name] = func + function_declaration = FunctionDeclaration( + name=func.name, + description=func.description, + parameters=self._format_functions(func.parameters), + ) + self.function_declarations.append(function_declaration) + logger.debug(f"Function '{func.name}' added to model.") + except Exception as e: + logger.warning(f"Could not add function {tool}: {e}") def invoke(self, messages: List[Message]): """ @@ -477,8 +478,6 @@ def response(self, messages: List[Message]) -> ModelResponse: response: GenerateContentResponse = self.invoke(messages=messages) response_timer.stop() logger.debug(f"Time to generate response: {response_timer.elapsed:.4f}s") - logger.debug(f"Gemini response type: {type(response)}") - logger.debug(f"Gemini response: {response}") # Create assistant message assistant_message = self._create_assistant_message(response=response, response_timer=response_timer)