-
-
Notifications
You must be signed in to change notification settings - Fork 5
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
Additional autogen examples for Agents
- Loading branch information
1 parent
bbb9208
commit 59bef64
Showing
3 changed files
with
197 additions
and
0 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,78 @@ | ||
import autogen | ||
from typing import Literal | ||
from typing_extensions import Annotated | ||
|
||
local_llm_config = [ | ||
{ | ||
# Choose your model name. | ||
"model": "gemma:2b", | ||
"base_url": "http://localhost:11434/v1", | ||
# You need to provide your API key here. | ||
"api_key": "ollama", | ||
} | ||
] | ||
|
||
|
||
# Create the agent and include examples of the function calling JSON in the prompt | ||
# to help guide the model | ||
chatbot = autogen.AssistantAgent( | ||
name="chatbot", | ||
system_message="""For currency exchange tasks, | ||
only use the functions you have been provided with. | ||
Output 'TERMINATE' when an answer has been provided. | ||
Do not include the function name or result in the JSON. | ||
Example of the return JSON is: | ||
{ | ||
"parameter_1_name": 100.00, | ||
"parameter_2_name": "ABC", | ||
"parameter_3_name": "DEF", | ||
}. | ||
Another example of the return JSON is: | ||
{ | ||
"parameter_1_name": "GHI", | ||
"parameter_2_name": "ABC", | ||
"parameter_3_name": "DEF", | ||
"parameter_4_name": 123.00, | ||
}. """, | ||
|
||
llm_config=local_llm_config, | ||
) | ||
|
||
user_proxy = autogen.UserProxyAgent( | ||
name="user_proxy", | ||
is_termination_msg=lambda x: x.get("content", "") and "TERMINATE" in x.get("content", ""), | ||
human_input_mode="NEVER", | ||
max_consecutive_auto_reply=1, | ||
) | ||
|
||
|
||
CurrencySymbol = Literal["USD", "EUR"] | ||
|
||
# Define our function that we expect to call | ||
def exchange_rate(base_currency: CurrencySymbol, quote_currency: CurrencySymbol) -> float: | ||
if base_currency == quote_currency: | ||
return 1.0 | ||
elif base_currency == "USD" and quote_currency == "EUR": | ||
return 1 / 1.1 | ||
elif base_currency == "EUR" and quote_currency == "USD": | ||
return 1.1 | ||
else: | ||
raise ValueError(f"Unknown currencies {base_currency}, {quote_currency}") | ||
|
||
# Register the function with the agent | ||
@user_proxy.register_for_execution() | ||
@chatbot.register_for_llm(description="Currency exchange calculator.") | ||
def currency_calculator( | ||
base_amount: Annotated[float, "Amount of currency in base_currency"], | ||
base_currency: Annotated[CurrencySymbol, "Base currency"] = "USD", | ||
quote_currency: Annotated[CurrencySymbol, "Quote currency"] = "EUR", | ||
) -> str: | ||
quote_amount = exchange_rate(base_currency, quote_currency) * base_amount | ||
return f"{format(quote_amount, '.2f')} {quote_currency}" | ||
|
||
# start the conversation | ||
res = user_proxy.initiate_chat( | ||
chatbot, | ||
message="How much is 123.45 EUR in USD?", | ||
summary_method="reflection_with_llm", | ||
) |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,76 @@ | ||
from pathlib import Path | ||
from autogen import AssistantAgent, UserProxyAgent | ||
from autogen.coding import DockerCommandLineCodeExecutor | ||
import os | ||
|
||
|
||
config_list = [ | ||
{ | ||
# Choose your model name. | ||
"model": "gemma:2b", | ||
"base_url": "http://localhost:11434/v1", | ||
# You need to provide your API key here. | ||
"api_key": "ollama", | ||
} | ||
] | ||
|
||
|
||
work_dir = Path("groupchat") | ||
work_dir.mkdir(exist_ok=True) | ||
|
||
# Create Docker command line code executor. | ||
code_executor = DockerCommandLineCodeExecutor( | ||
image="python:3.12-slim", # Execute code using the given docker image name. | ||
timeout=10, # Timeout for each code execution in seconds. | ||
work_dir=work_dir, # Use the temporary directory to store the code files. | ||
) | ||
|
||
|
||
# User Proxy will execute code and finish the chat upon typing 'exit' | ||
user_proxy = UserProxyAgent( | ||
name="UserProxy", | ||
system_message="A human admin", | ||
code_execution_config={ | ||
"last_n_messages": 2, | ||
"executor": code_executor, | ||
}, | ||
human_input_mode="TERMINATE", | ||
is_termination_msg=lambda x: "TERMINATE" in x.get("content"), | ||
) | ||
|
||
# Python Coder agent | ||
coder = AssistantAgent( | ||
name="softwareCoder", | ||
description="Software Coder, writes Python code as required and reiterates with feedback from the Code Reviewer.", | ||
system_message="You are a senior Python developer, a specialist in writing succinct Python functions.", | ||
llm_config={"config_list": config_list}, | ||
) | ||
|
||
# Code Reviewer agent | ||
reviewer = AssistantAgent( | ||
name="codeReviewer", | ||
description="Code Reviewer, reviews written code for correctness, efficiency, and security. Asks the Software Coder to address issues.", | ||
system_message="You are a Code Reviewer, experienced in checking code for correctness, efficiency, and security. Review and provide feedback to the Software Coder until you are satisfied, then return the word TERMINATE", | ||
is_termination_msg=lambda x: "TERMINATE" in x.get("content"), | ||
llm_config={"config_list": config_list}, | ||
) | ||
|
||
from autogen import GroupChat, GroupChatManager | ||
|
||
# Establish the Group Chat and disallow a speaker being selected consecutively | ||
groupchat = GroupChat(agents=[user_proxy, coder, reviewer], messages=[], max_round=12, allow_repeat_speaker=False) | ||
|
||
# Manages the group of multiple agents | ||
manager = GroupChatManager(groupchat=groupchat, llm_config={"config_list": config_list}) | ||
|
||
from autogen.cache import Cache | ||
|
||
# Cache LLM responses. | ||
with Cache.disk() as cache: | ||
# Start the chat with a request to write a function | ||
user_proxy.initiate_chat( | ||
manager, | ||
message="Write a Python function for the Fibonacci sequence, the function will have one parameter for the number in the sequence, which the function will return the Fibonacci number for.", | ||
cache=cache, | ||
) | ||
# type exit to terminate the chat |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,43 @@ | ||
from pathlib import Path | ||
from autogen import AssistantAgent, UserProxyAgent | ||
from autogen.coding import DockerCommandLineCodeExecutor | ||
import os | ||
|
||
|
||
config_list = [ | ||
{ | ||
# Choose your model name. | ||
"model": "gemma:2b", | ||
"base_url": "http://localhost:11434/v1", | ||
# You need to provide your API key here. | ||
"api_key": "ollama", | ||
} | ||
] | ||
|
||
# Setting up the code executor. | ||
workdir = Path("coding") | ||
workdir.mkdir(exist_ok=True) | ||
# Create a Docker command line code executor. | ||
code_executor = DockerCommandLineCodeExecutor( | ||
image="python:3.12-slim", # Execute code using the given docker image name. | ||
timeout=10, # Timeout for each code execution in seconds. | ||
work_dir=workdir, # Use the temporary directory to store the code files. | ||
) | ||
|
||
|
||
# Setting up the agents. | ||
user_proxy_agent = UserProxyAgent( | ||
name="User", | ||
code_execution_config={"executor": code_executor}, | ||
is_termination_msg=lambda msg: "TERMINATE" in msg.get("content"), | ||
) | ||
|
||
assistant_agent = AssistantAgent( | ||
name="Mistral Assistant", | ||
llm_config={"config_list": config_list}, | ||
) | ||
|
||
chat_result = user_proxy_agent.initiate_chat( | ||
assistant_agent, | ||
message="Count how many prime numbers from 1 to 10000.", | ||
) |