Skip to content

Commit

Permalink
assistant delegation complete
Browse files Browse the repository at this point in the history
  • Loading branch information
ashpreetbedi committed Jan 9, 2024
1 parent 953e25b commit 577807d
Show file tree
Hide file tree
Showing 11 changed files with 136 additions and 80 deletions.
18 changes: 15 additions & 3 deletions phi/assistant/assistant.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
from typing import Optional
from typing import Optional, Dict, List

from phi.task.task import Task
from phi.task.llm import LLMTask
Expand All @@ -9,12 +9,17 @@ class Assistant(LLMTask):
name: str = "assistant"
description: Optional[str] = None

def get_delegation_function(self, task: Task) -> Function:
def get_delegation_function(
self, task: Task, assistant_responses: Optional[Dict[str, List[str]]] = None
) -> Function:
# Update assistant task
self.conversation_id = task.conversation_id
self.conversation_memory = task.conversation_memory
self.conversation_message = task.conversation_message
self.conversation_tasks = task.conversation_tasks
self.conversation_responses = task.conversation_responses
self.conversation_response_iterator = task.conversation_response_iterator
self.parse_output = False

# Prepare the delegation function
f_name = f"run_{self.name}"
Expand All @@ -34,7 +39,14 @@ def get_delegation_function(self, task: Task) -> Function:
"""

def delegation_function(task_description: str):
return self.run(message=task_description, stream=False)
assistant_response = self.run(message=task_description, stream=False)

if self.show_output and assistant_responses is not None:
if self.__class__.__name__ not in assistant_responses:
assistant_responses[self.__class__.__name__] = []
assistant_responses[self.__class__.__name__].append(assistant_response) # type: ignore

return assistant_response

_f = Function.from_callable(delegation_function)
_f.name = f_name
Expand Down
2 changes: 1 addition & 1 deletion phi/assistant/openai/__init__.py
Original file line number Diff line number Diff line change
@@ -1 +1 @@
from phi.assistant.openai.assistant import OpenAiAssistant
from phi.assistant.openai.assistant import OpenAIAssistant
46 changes: 23 additions & 23 deletions phi/assistant/openai/assistant.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,13 +18,13 @@
raise


class OpenAiAssistant(BaseModel):
class OpenAIAssistant(BaseModel):
# -*- LLM settings
model: str = "gpt-4-1106-preview"
openai: Optional[OpenAI] = None

# -*- OpenAiAssistant settings
# OpenAiAssistant id which can be referenced in API endpoints.
# -*- OpenAIAssistant settings
# OpenAIAssistant id which can be referenced in API endpoints.
id: Optional[str] = None
# The object type, populated by the API. Always assistant.
object: Optional[str] = None
Expand All @@ -35,30 +35,30 @@ class OpenAiAssistant(BaseModel):
# The system instructions that the assistant uses. The maximum length is 32768 characters.
instructions: Optional[str] = None

# -*- OpenAiAssistant Tools
# -*- OpenAIAssistant Tools
# A list of tools provided to the assistant. There can be a maximum of 128 tools per assistant.
# Tools can be of types code_interpreter, retrieval, or function.
tools: Optional[List[Union[Tool, ToolRegistry, Callable, Dict]]] = None
# -*- Functions available to the OpenAiAssistant to call
# -*- Functions available to the OpenAIAssistant to call
# Functions extracted from the tools which can be executed locally by the assistant.
functions: Optional[Dict[str, Function]] = None

# -*- OpenAiAssistant Files
# -*- OpenAIAssistant Files
# A list of file IDs attached to this assistant.
# There can be a maximum of 20 files attached to the assistant.
# Files are ordered by their creation date in ascending order.
file_ids: Optional[List[str]] = None
# Files attached to this assistant.
files: Optional[List[File]] = None

# -*- OpenAiAssistant Storage
# -*- OpenAIAssistant Storage
# storage: Optional[AssistantStorage] = None
# Create table if it doesn't exist
# create_storage: bool = True
# AssistantRow from the database: DO NOT SET THIS MANUALLY
# database_row: Optional[AssistantRow] = None

# -*- OpenAiAssistant Knowledge Base
# -*- OpenAIAssistant Knowledge Base
# knowledge_base: Optional[KnowledgeBase] = None

# Set of 16 key-value pairs that can be attached to an object.
Expand Down Expand Up @@ -92,18 +92,18 @@ def client(self) -> OpenAI:
return self.openai or OpenAI()

@model_validator(mode="after")
def extract_functions_from_tools(self) -> "OpenAiAssistant":
def extract_functions_from_tools(self) -> "OpenAIAssistant":
if self.tools is not None:
for tool in self.tools:
if self.functions is None:
self.functions = {}
if isinstance(tool, ToolRegistry):
self.functions.update(tool.functions)
logger.debug(f"Functions from {tool.name} added to OpenAiAssistant.")
logger.debug(f"Functions from {tool.name} added to OpenAIAssistant.")
elif callable(tool):
f = Function.from_callable(tool)
self.functions[f.name] = f
logger.debug(f"Function {f.name} added to OpenAiAssistant")
logger.debug(f"Function {f.name} added to OpenAIAssistant")
return self

def __enter__(self):
Expand Down Expand Up @@ -137,7 +137,7 @@ def get_tools_for_api(self) -> Optional[List[Dict[str, Any]]]:
tools_for_api.append({"type": "function", "function": _f.to_dict()})
return tools_for_api

def create(self) -> "OpenAiAssistant":
def create(self) -> "OpenAIAssistant":
request_body: Dict[str, Any] = {}
if self.name is not None:
request_body["name"] = self.name
Expand All @@ -163,7 +163,7 @@ def create(self) -> "OpenAiAssistant":
**request_body,
)
self.load_from_openai(self.openai_assistant)
logger.debug(f"OpenAiAssistant created: {self.id}")
logger.debug(f"OpenAIAssistant created: {self.id}")
return self

def get_id(self) -> Optional[str]:
Expand All @@ -172,28 +172,28 @@ def get_id(self) -> Optional[str]:
def get_from_openai(self) -> OpenAIAssistantType:
_assistant_id = self.get_id()
if _assistant_id is None:
raise AssistantIdNotSet("OpenAiAssistant.id not set")
raise AssistantIdNotSet("OpenAIAssistant.id not set")

self.openai_assistant = self.client.beta.assistants.retrieve(
assistant_id=_assistant_id,
)
self.load_from_openai(self.openai_assistant)
return self.openai_assistant

def get(self, use_cache: bool = True) -> "OpenAiAssistant":
def get(self, use_cache: bool = True) -> "OpenAIAssistant":
if self.openai_assistant is not None and use_cache:
return self

self.get_from_openai()
return self

def get_or_create(self, use_cache: bool = True) -> "OpenAiAssistant":
def get_or_create(self, use_cache: bool = True) -> "OpenAIAssistant":
try:
return self.get(use_cache=use_cache)
except AssistantIdNotSet:
return self.create()

def update(self) -> "OpenAiAssistant":
def update(self) -> "OpenAIAssistant":
try:
assistant_to_update = self.get_from_openai()
if assistant_to_update is not None:
Expand Down Expand Up @@ -227,11 +227,11 @@ def update(self) -> "OpenAiAssistant":
**request_body,
)
self.load_from_openai(self.openai_assistant)
logger.debug(f"OpenAiAssistant updated: {self.id}")
logger.debug(f"OpenAIAssistant updated: {self.id}")
return self
raise ValueError("OpenAiAssistant not available")
raise ValueError("OpenAIAssistant not available")
except AssistantIdNotSet:
logger.warning("OpenAiAssistant not available")
logger.warning("OpenAIAssistant not available")
raise

def delete(self) -> OpenAIAssistantDeleted:
Expand All @@ -241,10 +241,10 @@ def delete(self) -> OpenAIAssistantDeleted:
deletion_status = self.client.beta.assistants.delete(
assistant_id=assistant_to_delete.id,
)
logger.debug(f"OpenAiAssistant deleted: {deletion_status.id}")
logger.debug(f"OpenAIAssistant deleted: {deletion_status.id}")
return deletion_status
except AssistantIdNotSet:
logger.warning("OpenAiAssistant not available")
logger.warning("OpenAIAssistant not available")
raise

def to_dict(self) -> Dict[str, Any]:
Expand Down Expand Up @@ -275,7 +275,7 @@ def __str__(self) -> str:
return json.dumps(self.to_dict(), indent=4)

def __repr__(self) -> str:
return f"<OpenAiAssistant name={self.name} id={self.id}>"
return f"<OpenAIAssistant name={self.name} id={self.id}>"

#
# def run(self, thread: Optional["Thread"]) -> "Thread":
Expand Down
8 changes: 4 additions & 4 deletions phi/assistant/openai/row.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,9 +4,9 @@


class AssistantRow(BaseModel):
"""Interface between OpenAiAssistant class and the database"""
"""Interface between OpenAIAssistant class and the database"""

# OpenAiAssistant id which can be referenced in API endpoints.
# OpenAIAssistant id which can be referenced in API endpoints.
id: str
# The object type, which is always assistant.
object: str
Expand All @@ -18,13 +18,13 @@ class AssistantRow(BaseModel):
instructions: Optional[str] = None
# LLM data (name, model, etc.)
llm: Optional[Dict[str, Any]] = None
# OpenAiAssistant Tools
# OpenAIAssistant Tools
tools: Optional[List[Dict[str, Any]]] = None
# Files attached to this assistant.
files: Optional[List[Dict[str, Any]]] = None
# Metadata attached to this assistant.
metadata: Optional[Dict[str, Any]] = None
# OpenAiAssistant Memory
# OpenAIAssistant Memory
memory: Optional[Dict[str, Any]] = None
# True if this assistant is active
is_active: Optional[bool] = None
Expand Down
20 changes: 10 additions & 10 deletions phi/assistant/openai/run.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@

from pydantic import BaseModel, ConfigDict, model_validator

from phi.assistant.openai.assistant import OpenAiAssistant
from phi.assistant.openai.assistant import OpenAIAssistant
from phi.assistant.openai.exceptions import ThreadIdNotSet, AssistantIdNotSet, RunIdNotSet
from phi.tools import Tool, ToolRegistry
from phi.tools.function import Function
Expand Down Expand Up @@ -33,8 +33,8 @@ class Run(BaseModel):

# The ID of the thread that was executed on as a part of this run.
thread_id: Optional[str] = None
# OpenAiAssistant used for this run
assistant: Optional[OpenAiAssistant] = None
# OpenAIAssistant used for this run
assistant: Optional[OpenAIAssistant] = None
# The ID of the assistant used for execution of this run.
assistant_id: Optional[str] = None

Expand Down Expand Up @@ -106,11 +106,11 @@ def extract_functions_from_tools(self) -> "Run":
self.functions = {}
if isinstance(tool, ToolRegistry):
self.functions.update(tool.functions)
logger.debug(f"Functions from {tool.name} added to OpenAiAssistant.")
logger.debug(f"Functions from {tool.name} added to OpenAIAssistant.")
elif callable(tool):
f = Function.from_callable(tool)
self.functions[f.name] = f
logger.debug(f"Function {f.name} added to OpenAiAssistant")
logger.debug(f"Function {f.name} added to OpenAIAssistant")
return self

def load_from_openai(self, openai_run: OpenAIRun):
Expand Down Expand Up @@ -149,7 +149,7 @@ def get_tools_for_api(self) -> Optional[List[Dict[str, Any]]]:
def create(
self,
thread_id: Optional[str] = None,
assistant: Optional[OpenAiAssistant] = None,
assistant: Optional[OpenAIAssistant] = None,
assistant_id: Optional[str] = None,
) -> "Run":
_thread_id = thread_id or self.thread_id
Expand All @@ -160,7 +160,7 @@ def create(
if _assistant_id is None:
_assistant_id = self.assistant.get_id() if self.assistant is not None else self.assistant_id
if _assistant_id is None:
raise AssistantIdNotSet("OpenAiAssistant.id not set")
raise AssistantIdNotSet("OpenAIAssistant.id not set")

request_body: Dict[str, Any] = {}
if self.model is not None:
Expand Down Expand Up @@ -209,7 +209,7 @@ def get_or_create(
self,
use_cache: bool = True,
thread_id: Optional[str] = None,
assistant: Optional[OpenAiAssistant] = None,
assistant: Optional[OpenAIAssistant] = None,
assistant_id: Optional[str] = None,
) -> "Run":
try:
Expand Down Expand Up @@ -267,7 +267,7 @@ def wait(
def run(
self,
thread_id: Optional[str] = None,
assistant: Optional[OpenAiAssistant] = None,
assistant: Optional[OpenAIAssistant] = None,
assistant_id: Optional[str] = None,
wait: bool = True,
callback: Optional[Callable[[OpenAIRun], None]] = None,
Expand All @@ -287,7 +287,7 @@ def run(
# -*- Check if run requires action
if self.status == "requires_action":
if self.assistant is None:
logger.warning("OpenAiAssistant not available to complete required_action")
logger.warning("OpenAIAssistant not available to complete required_action")
return self
if self.required_action is not None:
if self.required_action.type == "submit_tool_outputs":
Expand Down
18 changes: 9 additions & 9 deletions phi/assistant/openai/thread.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,13 +4,13 @@

from phi.assistant.openai.run import Run
from phi.assistant.openai.message import Message
from phi.assistant.openai.assistant import OpenAiAssistant
from phi.assistant.openai.assistant import OpenAIAssistant
from phi.assistant.openai.exceptions import ThreadIdNotSet
from phi.utils.log import logger

try:
from openai import OpenAI
from openai.types.beta.assistant import Assistant as OpenAIAssistant
from openai.types.beta.assistant import Assistant as OpenAIAssistantType
from openai.types.beta.thread import Thread as OpenAIThread
from openai.types.beta.thread_deleted import ThreadDeleted as OpenAIThreadDeleted
except ImportError:
Expand All @@ -25,8 +25,8 @@ class Thread(BaseModel):
# The object type, populated by the API. Always thread.
object: Optional[str] = None

# OpenAiAssistant used for this thread
assistant: Optional[OpenAiAssistant] = None
# OpenAIAssistant used for this thread
assistant: Optional[OpenAIAssistant] = None
# The ID of the assistant for this thread.
assistant_id: Optional[str] = None

Expand All @@ -42,7 +42,7 @@ class Thread(BaseModel):

openai: Optional[OpenAI] = None
openai_thread: Optional[OpenAIThread] = None
openai_assistant: Optional[OpenAIAssistant] = None
openai_assistant: Optional[OpenAIAssistantType] = None

model_config = ConfigDict(arbitrary_types_allowed=True)

Expand Down Expand Up @@ -162,7 +162,7 @@ def add(self, messages: List[Union[Message, Dict]]) -> None:
def run(
self,
message: Optional[Union[str, Message]] = None,
assistant: Optional[OpenAiAssistant] = None,
assistant: Optional[OpenAIAssistant] = None,
assistant_id: Optional[str] = None,
run: Optional[Run] = None,
wait: bool = True,
Expand Down Expand Up @@ -232,15 +232,15 @@ def print_messages(self) -> None:
table.add_column("User")
table.add_column(m.get_content_with_files())
elif m.role == "assistant":
table.add_row("OpenAiAssistant", Markdown(m.get_content_with_files()))
table.add_row("OpenAIAssistant", Markdown(m.get_content_with_files()))
table.add_section()
else:
table.add_row(m.role, Markdown(m.get_content_with_files()))
table.add_section()
console.print(table)

def print_response(
self, message: str, assistant: OpenAiAssistant, current_message_only: bool = False, markdown: bool = False
self, message: str, assistant: OpenAIAssistant, current_message_only: bool = False, markdown: bool = False
) -> None:
from rich.progress import Progress, SpinnerColumn, TextColumn

Expand All @@ -263,7 +263,7 @@ def print_response(
total_messages = len(response_messages)
for idx, response_message in enumerate(response_messages[::-1], start=1):
response_message.pprint(
title=f"[bold] :robot: OpenAiAssistant ({idx}/{total_messages}) [/bold]", markdown=markdown
title=f"[bold] :robot: OpenAIAssistant ({idx}/{total_messages}) [/bold]", markdown=markdown
)
else:
for m in self.messages[::-1]:
Expand Down
Loading

0 comments on commit 577807d

Please sign in to comment.