Skip to content

Commit

Permalink
pre-ws-proto
Browse files Browse the repository at this point in the history
  • Loading branch information
ashpreetbedi committed Sep 26, 2023
1 parent 63859b6 commit d254514
Show file tree
Hide file tree
Showing 7 changed files with 121 additions and 38 deletions.
10 changes: 6 additions & 4 deletions phi/ai/operator.py
Original file line number Diff line number Diff line change
@@ -1,19 +1,21 @@
from phi.api.schemas.ai import ConversationType
from phi.cli.config import PhiCliConfig
from phi.workspace.config import WorkspaceConfig


def phi_ai_conversation(
phi_config: PhiCliConfig,
start_new_conversation: bool = False,
show_previous_messages: bool = False,
autonomous_conversation: bool = True,
print_conversation_history: bool = False,
stream: bool = False,
) -> None:
"""Start a conversation with Phi AI."""

from phi.ai.phi_ai import PhiAI

ai = PhiAI(new_conversation=start_new_conversation, phi_config=phi_config)
if show_previous_messages:
conversation_type = ConversationType.AUTO if autonomous_conversation else ConversationType.RAG
ai = PhiAI(new_conversation=start_new_conversation, conversation_type=conversation_type, phi_config=phi_config)
if print_conversation_history:
ai.print_conversation_history()

ai.start_conversation(stream=stream)
58 changes: 40 additions & 18 deletions phi/ai/phi_ai.py
Original file line number Diff line number Diff line change
@@ -1,9 +1,12 @@
from typing import Optional, Dict, List, Any, Iterator

from phi.api.schemas.user import UserSchema
from phi.api.schemas.ai import ConversationType
from phi.cli.config import PhiCliConfig
from phi.cli.console import console
from phi.cli.settings import phi_cli_settings
from phi.llm.schemas import Function
from phi.llm.function.shell import ShellScriptsRegistry
from phi.workspace.config import WorkspaceConfig
from phi.utils.log import logger
from phi.utils.json_io import write_json_file, read_json_file
Expand All @@ -13,6 +16,7 @@ class PhiAI:
def __init__(
self,
new_conversation: bool = False,
conversation_type: ConversationType = ConversationType.RAG,
phi_config: Optional[PhiCliConfig] = None,
):
logger.debug("--**-- Starting Phi AI --**--")
Expand All @@ -23,20 +27,41 @@ def __init__(
_user = _phi_config.user
if _user is None:
raise ValueError("User not found. Please run `phi auth`")
_active_workspace = _phi_config.get_active_ws_config()

self.conversation_db: Optional[List[Dict[str, Any]]] = None
self.functions: Dict[str, Function] = {
"run_shell_command": Function.from_callable(ShellScriptsRegistry.run_shell_command)
}
logger.debug(f"Functions: {self.functions.keys()}")

_conversation_id = None
_conversation_history = None
self.conversation_db: Optional[List[Dict[str, Any]]] = None
if not new_conversation:
latest_conversation = self.get_latest_conversation()
if latest_conversation is not None:
_conversation_id = latest_conversation.get("conversation_id")
_conversation_history = latest_conversation.get("conversation_history")
# Check if the latest conversation is of the same type and user
if latest_conversation.get("conversation_type") == conversation_type and latest_conversation.get(
"user"
) == _user.model_dump(include={"id_user", "email"}):
logger.debug("Found conversation for the same user and type")
# Check if the latest conversation is for the same workspace
if _active_workspace is not None and latest_conversation.get(
"workspace"
) == _active_workspace.model_dump(include={"ws_dir_name"}):
logger.debug("Found conversation for the same workspace")
# Check if the latest conversation has the same functions
if latest_conversation.get("functions") == list(self.functions.keys()):
logger.debug("Found conversation with the same functions")
_conversation_id = latest_conversation.get("conversation_id")
_conversation_history = latest_conversation.get("conversation_history")

if _conversation_id is None:
from phi.api.ai import conversation_create, ConversationCreateResponse

_conversation: Optional[ConversationCreateResponse] = conversation_create(user=_user)
_conversation: Optional[ConversationCreateResponse] = conversation_create(
user=_user, conversation_type=conversation_type, functions=self.functions
)
if _conversation is None:
logger.error("Could not create conversation, please authenticate using `phi auth`")
exit(0)
Expand All @@ -46,13 +71,15 @@ def __init__(

self.phi_config: PhiCliConfig = _phi_config
self.user: UserSchema = _user
self.active_workspace: Optional[WorkspaceConfig] = _active_workspace
self.conversation_id: int = _conversation_id
self.conversation_history: List[Dict[str, Any]] = _conversation_history or []
self.conversation_type: ConversationType = conversation_type

self.save_conversation()
logger.debug(f"--**-- Conversation: {self.conversation_id} --**--")

def start_conversation(self, stream: bool = False):
async def start_conversation(self, stream: bool = False):
from rich import box
from rich.prompt import Prompt
from rich.live import Live
Expand All @@ -76,6 +103,8 @@ def start_conversation(self, stream: bool = False):
user=self.user,
conversation_id=self.conversation_id,
message=user_message,
conversation_type=self.conversation_type,
functions=self.functions,
stream=stream,
)
if api_response is None:
Expand Down Expand Up @@ -119,24 +148,17 @@ def print_conversation_history(self):
else:
table.add_row(message["role"], Markdown(message["content"]))
console.print(table)
# for message in self.conversation_history:
# if message["role"] == "system":
# continue
# elif message["role"] == "assistant":
# padding = " " * (self.column_width - len("Phi"))
# print_info(f":sunglasses: Phi{padding}: {message['content']}")
# elif message["role"] == "user":
# username = self.user.username or "You"
# padding = " " * (self.column_width - len(username))
# print_info(f":sunglasses: {username}{padding}: {message['content']}")
# else:
# padding = " " * (self.column_width - len(message["role"]))
# print_info(f":sunglasses: {message['role']}:{padding}: {message['content']}")

def to_dict(self) -> Dict[str, Any]:
return {
"conversation_id": self.conversation_id,
"conversation_history": self.conversation_history,
"conversation_type": self.conversation_type,
"user": self.user.model_dump(include={"id_user", "email"}),
"workspace": self.active_workspace.model_dump(include={"ws_dir_name"})
if self.active_workspace is not None
else None,
"functions": list(self.functions.keys()),
}

def save_conversation(self):
Expand Down
29 changes: 25 additions & 4 deletions phi/api/ai.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,10 +10,15 @@
ConversationCreateResponse,
)
from phi.api.schemas.user import UserSchema
from phi.llm.schemas import Function
from phi.utils.log import logger


def conversation_create(user: UserSchema) -> Optional[ConversationCreateResponse]:
def conversation_create(
user: UserSchema,
conversation_type: ConversationType = ConversationType.RAG,
functions: Optional[Dict[str, Function]] = None,
) -> Optional[ConversationCreateResponse]:
logger.debug("--o-o-- Creating Conversation")
with api.AuthenticatedClient() as api_client:
try:
Expand All @@ -22,8 +27,13 @@ def conversation_create(user: UserSchema) -> Optional[ConversationCreateResponse
json={
"user": user.model_dump(include={"id_user", "email"}),
"conversation": {
"type": ConversationType.RAG,
"type": conversation_type,
"client": ConversationClient.CLI,
"functions": {
k: v.model_dump(include={"name", "description", "parameters"}) for k, v in functions.items()
}
if functions is not None
else None,
},
},
)
Expand All @@ -43,7 +53,12 @@ def conversation_create(user: UserSchema) -> Optional[ConversationCreateResponse


def conversation_chat(
user: UserSchema, conversation_id: int, message: str, stream: bool = True
user: UserSchema,
conversation_id: int,
message: str,
conversation_type: ConversationType = ConversationType.RAG,
functions: Optional[Dict[str, Function]] = None,
stream: bool = True,
) -> Optional[Iterator[str]]:
with api.AuthenticatedClient() as api_client:
if stream:
Expand All @@ -57,8 +72,14 @@ def conversation_chat(
"conversation": {
"id": conversation_id,
"message": message,
"type": ConversationType.RAG,
"type": conversation_type,
"client": ConversationClient.CLI,
"functions": {
k: v.model_dump(include={"name", "description", "parameters"})
for k, v in functions.items()
}
if functions is not None
else None,
"stream": stream,
},
},
Expand Down
2 changes: 1 addition & 1 deletion phi/cli/config.py
Original file line number Diff line number Diff line change
Expand Up @@ -48,7 +48,7 @@ def user(self, user: Optional[UserSchema]) -> None:
clear_user_cache = (
self._user is not None # previous user is not None
and self._user.email != "anon" # previous user is not anon
and (user.email != self._user.email or user.id_user != self._user.id_user) # new user is different
and (user.email != self._user.email or user.id_user != self._user.id_user) # new user is different
)
self._user = user
if clear_user_cache:
Expand Down
27 changes: 17 additions & 10 deletions phi/cli/entrypoint.py
Original file line number Diff line number Diff line change
Expand Up @@ -209,24 +209,30 @@ def set(

@phi_cli.command(short_help="Chat with Phi AI", options_metavar="")
def ai(
batch: bool = typer.Option(
False,
"-b",
"--batch",
help="Return the response as a batch i.e do not stream the response.",
),
start_new_conversation: bool = typer.Option(
False,
"-n",
"--new",
help="Start a new conversation.",
),
show_previous_messages: bool = typer.Option(
False,
autonomous_conversation: bool = typer.Option(
True,
"-a",
"--all",
"--autonomous",
help="Start an autonomous conversation that can call functions and take actions.",
),
print_conversation_history: bool = typer.Option(
False,
"-h",
"--history",
help="Show all previous messages.",
),
batch: bool = typer.Option(
False,
"-b",
"--batch",
help="Return the response as a batch i.e do not stream the response.",
),
print_debug_log: bool = typer.Option(
False,
"-d",
Expand Down Expand Up @@ -257,7 +263,8 @@ def ai(
phi_ai_conversation(
phi_config=phi_config,
start_new_conversation=start_new_conversation,
show_previous_messages=show_previous_messages,
autonomous_conversation=autonomous_conversation,
print_conversation_history=print_conversation_history,
stream=(not batch),
)

Expand Down
30 changes: 30 additions & 0 deletions phi/llm/function/shell.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,30 @@
from typing import List

from phi.llm.function.registry import FunctionRegistry
from phi.utils.log import logger


class ShellScriptsRegistry(FunctionRegistry):
def __init__(self):
super().__init__(name="shell_script_registry")
self.register(self.run_shell_command)

def run_shell_command(self, args: List[str]) -> str:
"""Runs a shell command and returns the output or error.
:param args: The command to run as a list of strings.
:return: The output of the command.
"""
logger.info(f"Running shell command: {args}")

import subprocess

result = subprocess.run(args, stdout=subprocess.PIPE, stderr=subprocess.PIPE)

logger.debug("Return code:", result.returncode)
logger.debug("Have {} bytes in stdout:\n{}".format(len(result.stdout), result.stdout.decode()))
logger.debug("Have {} bytes in stderr:\n{}".format(len(result.stderr), result.stderr.decode()))

if result.returncode != 0:
return f"error: {result.stderr.decode()}"
return result.stdout.decode()
3 changes: 2 additions & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,8 @@ dependencies = [
"rich",
"tomli",
"typer",
"typing-extensions"
"typing-extensions",
"wsproto",
]

[project.optional-dependencies]
Expand Down

0 comments on commit d254514

Please sign in to comment.