Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Example / Game Generator (PHI-2194) #1579

Open
wants to merge 9 commits into
base: main
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
5 changes: 5 additions & 0 deletions cookbook/run_qdrant.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
docker run -p 6333:6333 \
-d \
-v qdrant-volume:/qdrant/storage \
--name qdrant \
qdrant/qdrant
Original file line number Diff line number Diff line change
@@ -1,20 +1,20 @@
from phi.agent import Agent
from phi.model.openai import OpenAIChat
from phi.tools.fal_tools import FalTools

fal_agent = Agent(
name="Fal Video Generator Agent",
model=OpenAIChat(id="gpt-4o"),
tools=[FalTools("fal-ai/hunyuan-video")],
description="You are an AI agent that can generate videos using the Fal API.",
instructions=[
"When the user asks you to create a video, use the `generate_media` tool to create the video.",
"Return the URL as raw to the user.",
"Don't convert video URL to markdown or anything else.",
],
markdown=True,
debug_mode=True,
show_tool_calls=True,
)

fal_agent.print_response("Generate video of balloon in the ocean")
from phi.agent import Agent
from phi.model.openai import OpenAIChat
from phi.tools.fal_tools import FalTools
fal_agent = Agent(
name="Fal Video Generator Agent",
model=OpenAIChat(id="gpt-4o"),
tools=[FalTools("fal-ai/hunyuan-video")],
description="You are an AI agent that can generate videos using the Fal API.",
instructions=[
"When the user asks you to create a video, use the `generate_media` tool to create the video.",
"Return the URL as raw to the user.",
"Don't convert video URL to markdown or anything else.",
],
markdown=True,
debug_mode=True,
show_tool_calls=True,
)
fal_agent.print_response("Generate video of balloon in the ocean")
1 change: 1 addition & 0 deletions cookbook/workflows/.gitignore
Original file line number Diff line number Diff line change
@@ -1 +1,2 @@
reports
games
9 changes: 9 additions & 0 deletions cookbook/workflows/05_playground.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@
2. Run the script using: `python cookbook/workflows/05_playground.py`
"""

from cookbook.workflows.game_generator import GameGenerator
from phi.playground import Playground, serve_playground_app
from phi.storage.workflow.sqlite import SqlWorkflowStorage

Expand Down Expand Up @@ -35,6 +36,14 @@
),
)

game_generator = GameGenerator(
workflow_id="game-generator",
storage=SqlWorkflowStorage(
table_name="game_generator_workflows",
db_file="tmp/workflows.db",
),
)

# Initialize the Playground with the workflows
app = Playground(workflows=[blog_post_generator, news_report_generator, investment_report_generator]).get_app()

Expand Down
136 changes: 136 additions & 0 deletions cookbook/workflows/game_generator.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,136 @@
"""
1. Install dependencies using: `pip install openai yfinance phidata`
2. Run the script using: `python cookbook/workflows/investment_report_generator.py`
"""

import json
from pathlib import Path
from typing import Iterator

from pydantic import BaseModel, Field

from phi.agent import Agent, RunResponse
from phi.model.openai import OpenAIChat
from phi.run.response import RunEvent
from phi.storage.workflow.sqlite import SqlWorkflowStorage
from phi.utils.log import logger
from phi.utils.pprint import pprint_run_response
from phi.utils.string import hash_string_sha256
from phi.utils.web import open_html_file
from phi.workflow import Workflow


games_dir = Path(__file__).parent.joinpath("games")
games_dir.mkdir(parents=True, exist_ok=True)
game_output_path = games_dir / "game_output_file.html"
game_output_path.unlink(missing_ok=True)


class GameOutput(BaseModel):
reasoning: str = Field(..., description="Explain your reasoning")
code: str = Field(..., description="The html5 code for the game")
instructions: str = Field(..., description="Instructions how to play the game")


class QAOutput(BaseModel):
reasoning: str = Field(..., description="Explain your reasoning")
correct: bool = Field(False, description="Does the game pass your criteria?")


class GameGenerator(Workflow):
# This description is only used in the workflow UI
description: str = "Generator for single-page HTML5 games"

game_developer: Agent = Agent(
name="Game Developer Agent",
description="You are a game developer that produces working HTML5 code.",
model=OpenAIChat(id="gpt-4o"),
instructions=[
"Create a game based on the user's prompt. "
"The game should be HTML5, completely self-contained and must be runnable simply by opening on a browser",
"Ensure the game has a alert that pops up if the user dies and then allows the user to restart or exit the game.",
"Ensure instructions for the game are displayed on the HTML page."
"Use user-friendly colours and make the game canvas large enough for the game to be playable on a larger screen.",
],
response_model=GameOutput,
)

qa_agent: Agent = Agent(
name="QA Agent",
model=OpenAIChat(id="gpt-4o"),
description="You are a game QA and you evaluate html5 code for correctness.",
instructions=[
"You will be given some HTML5 code."
"Your task is to read the code and evaluate it for correctness, but also that it matches the original task description.",
],
response_model=QAOutput,
)

def run(self, game_description: str) -> Iterator[RunResponse]:
logger.info(f"Game description: {game_description}")

game_output = self.game_developer.run(game_description)

if game_output and game_output.content and isinstance(game_output.content, GameOutput):
game_code = game_output.content.code
logger.info(f"Game code: {game_code}")
else:
yield RunResponse(
run_id=self.run_id, event=RunEvent.workflow_completed, content="Sorry, could not generate a game."
)
return

logger.info("QA'ing the game code")
qa_input = {
"game_description": game_description,
"game_code": game_code,
}
qa_output = self.qa_agent.run(json.dumps(qa_input, indent=2))

if qa_output and qa_output.content and isinstance(qa_output.content, QAOutput):
logger.info(qa_output.content)
if not qa_output.content.correct:
raise Exception(f"QA failed for code: {game_code}")

# Store the resulting code
game_output_path.write_text(game_code)

yield RunResponse(
run_id=self.run_id, event=RunEvent.workflow_completed, content=game_output.content.instructions
)
else:
yield RunResponse(
run_id=self.run_id, event=RunEvent.workflow_completed, content="Sorry, could not QA the game."
)
return


# Run the workflow if the script is executed directly
if __name__ == "__main__":
from rich.prompt import Prompt

game_description = Prompt.ask(
"[bold]Describe the game you want to make (keep it simple)[/bold]\n✨",
# default="An asteroids game."
default="An asteroids game. Make sure the asteroids move randomly and are random sizes. They should continually spawn more and become more difficult over time. Keep score. Make my spaceship's movement realistic.",
)

hash_of_description = hash_string_sha256(game_description)

# Initialize the investment analyst workflow
game_generator = GameGenerator(
session_id=f"game-gen-{hash_of_description}",
storage=SqlWorkflowStorage(
table_name="game_generator_workflows",
db_file="tmp/workflows.db",
),
)

# Execute the workflow
result: Iterator[RunResponse] = game_generator.run(game_description=game_description)

# Print the report
pprint_run_response(result)

if game_output_path.exists():
open_html_file(game_output_path)
2 changes: 1 addition & 1 deletion phi/agent/agent.py
Original file line number Diff line number Diff line change
Expand Up @@ -497,7 +497,7 @@ def update_model(self) -> None:
"Please provide a `model` or install `openai`."
)
exit(1)
self.model = OpenAIChat()
self.model = OpenAIChat() # We default to OpenAIChat as a base model

# Set response_format if it is not set on the Model
if self.response_model is not None and self.model.response_format is None:
Expand Down
5 changes: 2 additions & 3 deletions phi/embedder/ollama.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,9 +5,8 @@

try:
from ollama import Client as OllamaClient
except ImportError:
logger.error("`ollama` not installed")
raise
except (ModuleNotFoundError, ImportError):
raise ImportError("`ollama` not installed. Please install using `pip install ollama`")


class OllamaEmbedder(Embedder):
Expand Down
2 changes: 1 addition & 1 deletion phi/tools/baidusearch.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@ class BaiduSearch(Toolkit):
Args:
fixed_max_results (Optional[int]): A fixed number of maximum results.
fixed_language (Optional[str]): A fixed language for the search results.
headers (Optional[Any]): Headers to be used in the search request.
headers (Optional[Any]):
proxy (Optional[str]): Proxy to be used in the search request.
debug (Optional[bool]): Enable debug output.
"""
Expand Down
12 changes: 12 additions & 0 deletions phi/tools/duckduckgo.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,18 @@


class DuckDuckGo(Toolkit):
"""
DuckDuckGo is a toolkit for searching DuckDuckGo easily.

Args:
search (bool): Enable DuckDuckGo search function.
news (bool): Enable DuckDuckGo news function.
fixed_max_results (Optional[int]): A fixed number of maximum results.
headers (Optional[Any]):
proxy (Optional[str]): Proxy to be used in the search request.
proxies (Optional[Any]): A list of proxies to be used in the search request.
timeout (Optional[int]): The maximum number of seconds to wait for a response.
"""
def __init__(
self,
search: bool = True,
Expand Down
3 changes: 1 addition & 2 deletions phi/tools/linear_tools.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,6 @@ def __init__(
if not self.api_token:
api_error_message = "API token 'LINEAR_API_KEY' is missing. Please set it as an environment variable."
logger.error(api_error_message)
raise ValueError(api_error_message)

self.endpoint = "https://api.linear.app/graphql"
self.headers = {"Authorization": f"{self.api_token}"}
Expand Down Expand Up @@ -359,7 +358,7 @@ def get_high_priority_issues(self) -> Optional[str]:

query = """
query HighPriorityIssues {
issues(filter: {
issues(filter: {
priority: { lte: 2 }
}) {
nodes {
Expand Down
4 changes: 3 additions & 1 deletion phi/tools/replicate.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
import os
from os import getenv
from typing import Optional
from urllib.parse import urlparse
from uuid import uuid4

Expand All @@ -18,10 +19,11 @@
class ReplicateTools(Toolkit):
def __init__(
self,
api_key: Optional[str] = None,
model: str = "minimax/video-01",
):
super().__init__(name="replicate_toolkit")
self.api_key = getenv("REPLICATE_API_TOKEN")
self.api_key = api_key or getenv("REPLICATE_API_TOKEN")
if not self.api_key:
logger.error("REPLICATE_API_TOKEN not set. Please set the REPLICATE_API_TOKEN environment variable.")
self.model = model
Expand Down
17 changes: 17 additions & 0 deletions phi/utils/string.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,17 @@
import hashlib


def hash_string_sha256(input_string):
# Encode the input string to bytes
encoded_string = input_string.encode("utf-8")

# Create a SHA-256 hash object
sha256_hash = hashlib.sha256()

# Update the hash object with the encoded string
sha256_hash.update(encoded_string)

# Get the hexadecimal digest of the hash
hex_digest = sha256_hash.hexdigest()

return hex_digest
24 changes: 24 additions & 0 deletions phi/utils/web.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,24 @@
import webbrowser
from pathlib import Path

from phi.utils.log import logger


def open_html_file(file_path: Path):
"""
Opens the specified HTML file in the default web browser.

:param file_path: Path to the HTML file.
"""
# Resolve the absolute path
absolute_path = file_path.resolve()

if not absolute_path.is_file():
logger.error(f"The file '{absolute_path}' does not exist.")
raise FileNotFoundError(f"The file '{absolute_path}' does not exist.")

# Convert the file path to a file URI
file_url = absolute_path.as_uri()

# Open the file in the default web browser
webbrowser.open(file_url)
2 changes: 1 addition & 1 deletion phi/vectordb/qdrant/qdrant.py
Original file line number Diff line number Diff line change
Expand Up @@ -83,7 +83,7 @@ def client(self) -> QdrantClient:
https=self.https,
api_key=self.api_key,
prefix=self.prefix,
timeout=self.timeout,
timeout=int(self.timeout) if self.timeout is not None else None,
host=self.host,
path=self.path,
**self.kwargs,
Expand Down
Loading