From 840566953ca635f84ab3a3e1febcaf9680d9bdc3 Mon Sep 17 00:00:00 2001 From: Braelyn Boynton Date: Mon, 11 Dec 2023 12:22:51 -0600 Subject: [PATCH 1/4] Adds example agent monitoring with AgentOps --- README.md | 21 +++++++++++++++++++++ autogen/agentchat/assistant_agent.py | 3 +++ autogen/agentchat/conversable_agent.py | 5 +++++ setup.py | 1 + 4 files changed, 30 insertions(+) diff --git a/README.md b/README.md index 461a93fd86b..e9a794dae1a 100644 --- a/README.md +++ b/README.md @@ -144,6 +144,27 @@ response = autogen.Completion.create(context=test_instance, **config) Please find more [code examples](https://microsoft.github.io/autogen/docs/Examples#tune-gpt-models) for this feature. --> +## Monitoring +Basic monitoring is demonstrated in `test/twoagent-monitored.py` + +Monitoring can be implemented using the AgentOps library. + +1. `pip install agentops` +2. Create an account at https://agentops.ai and generate an API key +3. `export AGENTOPS_API_KEY=` +4. When creating a `ConversableAgent` or `AssistantAgent`, first create an Agent Ops client instance + ```python + from agentops import Client + ao_client = Client(api_key=os.environ.get('AGENTOPS_API_KEY'), + tags=['describe your session here']) + ``` +5. Pass this client instance into the constructor arguments of your Agent + ```python + assistant = AssistantAgent("assistant", llm_config={"config_list": config_list}, ao_client=ao_client) + ``` +6. Run your agent(s) and view the results of your session on the [AgentOps Dashboard](https://app.agentops.ai/dashboard) + + ## Documentation You can find detailed documentation about AutoGen [here](https://microsoft.github.io/autogen/). diff --git a/autogen/agentchat/assistant_agent.py b/autogen/agentchat/assistant_agent.py index 55230708f67..4352a30a9c6 100644 --- a/autogen/agentchat/assistant_agent.py +++ b/autogen/agentchat/assistant_agent.py @@ -1,4 +1,5 @@ from typing import Callable, Dict, Literal, Optional, Union +from agentops import Client as AOClient from .conversable_agent import ConversableAgent @@ -36,6 +37,7 @@ def __init__( max_consecutive_auto_reply: Optional[int] = None, human_input_mode: Optional[str] = "NEVER", code_execution_config: Optional[Union[Dict, Literal[False]]] = False, + ao_client: AOClient = None, **kwargs, ): """ @@ -63,5 +65,6 @@ def __init__( human_input_mode, code_execution_config=code_execution_config, llm_config=llm_config, + ao_client=ao_client, **kwargs, ) diff --git a/autogen/agentchat/conversable_agent.py b/autogen/agentchat/conversable_agent.py index 7a268823a78..769155b558d 100644 --- a/autogen/agentchat/conversable_agent.py +++ b/autogen/agentchat/conversable_agent.py @@ -9,6 +9,7 @@ from autogen.code_utils import DEFAULT_MODEL, UNKNOWN, content_str, execute_code, extract_code, infer_lang from .agent import Agent +from agentops import Client as AOClient try: from termcolor import colored @@ -54,6 +55,7 @@ def __init__( code_execution_config: Optional[Union[Dict, Literal[False]]] = None, llm_config: Optional[Union[Dict, Literal[False]]] = None, default_auto_reply: Optional[Union[str, Dict, None]] = "", + ao_client: AOClient = None ): """ Args: @@ -134,6 +136,7 @@ def __init__( self.register_reply([Agent, None], ConversableAgent.generate_async_function_call_reply) self.register_reply([Agent, None], ConversableAgent.check_termination_and_human_reply) self.register_reply([Agent, None], ConversableAgent.a_check_termination_and_human_reply) + self.ao_client = ao_client def register_reply( self, @@ -343,6 +346,7 @@ def send( """ # When the agent composes and sends the message, the role of the message is "assistant" # unless it's "function". + self.ao_client.record_action("send_message_to_another_agent", tags=['conversable_agent', str(self.name())]) valid = self._append_oai_message(message, "assistant", recipient) if valid: recipient.receive(message, self, request_reply, silent) @@ -475,6 +479,7 @@ def receive( Raises: ValueError: if the message can't be converted into a valid ChatCompletion message. """ + self.ao_client.record_action("received_message_from_another_agent", tags=['conversable_agent', str(self.name())]) self._process_received_message(message, sender, silent) if request_reply is False or request_reply is None and self.reply_at_receive[sender] is False: return diff --git a/setup.py b/setup.py index 9c1e1e3bd34..b683e4cc623 100644 --- a/setup.py +++ b/setup.py @@ -20,6 +20,7 @@ "flaml", "python-dotenv", "tiktoken", + "agentops" ] setuptools.setup( From 416e51f3fcc80b24f58e00d7110c1ef66546eb92 Mon Sep 17 00:00:00 2001 From: Braelyn Boynton Date: Mon, 11 Dec 2023 12:31:36 -0600 Subject: [PATCH 2/4] Monitored Example --- autogen/agentchat/conversable_agent.py | 1 + test/twoagent-monitored.py | 16 ++++++++++++++++ 2 files changed, 17 insertions(+) create mode 100644 test/twoagent-monitored.py diff --git a/autogen/agentchat/conversable_agent.py b/autogen/agentchat/conversable_agent.py index 7f7d21cb42b..2a8526626e0 100644 --- a/autogen/agentchat/conversable_agent.py +++ b/autogen/agentchat/conversable_agent.py @@ -101,6 +101,7 @@ def __init__( default_auto_reply (str or dict or None): default auto reply when no code execution or llm-based reply is generated. description (str): a short description of the agent. This description is used by other agents (e.g. the GroupChatManager) to decide when to call upon this agent. (Default: system_message) + ao_client (AgentOps Client Instance): an instance of and AgentOps client. All agents should share one instance. (Default: None) """ super().__init__(name) # a dictionary of conversations, default value is list diff --git a/test/twoagent-monitored.py b/test/twoagent-monitored.py new file mode 100644 index 00000000000..a31fed523f1 --- /dev/null +++ b/test/twoagent-monitored.py @@ -0,0 +1,16 @@ +import os +from autogen import AssistantAgent, UserProxyAgent, config_list_from_json +from agentops import Client + +ao_client = Client(api_key=os.environ.get('AGENTOPS_API_KEY'), + tags=['autogen', 'Autogen Example']) + +# Load LLM inference endpoints from an env variable or a file +# See https://microsoft.github.io/autogen/docs/FAQ#set-your-api-endpoints +# and OAI_CONFIG_LIST_sample +config_list = config_list_from_json(env_or_file="OAI_CONFIG_LIST") + +# pass in the AgentOps client instance for agent monitoring and visibility +assistant = AssistantAgent("assistant", llm_config={"config_list": config_list}, ao_client=ao_client) +user_proxy = UserProxyAgent("user_proxy", code_execution_config={"work_dir": "coding"}) +user_proxy.initiate_chat(assistant, message="Plot a chart of NVDA and TESLA stock price change YTD.") From 746ba591afccecb53925c0f34b95fa500213b5e8 Mon Sep 17 00:00:00 2001 From: Braelyn Boynton Date: Mon, 11 Dec 2023 12:40:06 -0600 Subject: [PATCH 3/4] only use AOClient if it exists --- autogen/agentchat/conversable_agent.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/autogen/agentchat/conversable_agent.py b/autogen/agentchat/conversable_agent.py index 2a8526626e0..1e3b96f2eed 100644 --- a/autogen/agentchat/conversable_agent.py +++ b/autogen/agentchat/conversable_agent.py @@ -353,7 +353,8 @@ def send( """ # When the agent composes and sends the message, the role of the message is "assistant" # unless it's "function". - self.ao_client.record_action("send_message_to_another_agent", tags=['conversable_agent', str(self.name())]) + if self.ao_client: + self.ao_client.record_action("send_message_to_another_agent", tags=['conversable_agent', str(self.name())]) valid = self._append_oai_message(message, "assistant", recipient) if valid: recipient.receive(message, self, request_reply, silent) @@ -486,7 +487,8 @@ def receive( Raises: ValueError: if the message can't be converted into a valid ChatCompletion message. """ - self.ao_client.record_action("received_message_from_another_agent", tags=['conversable_agent', str(self.name())]) + if self.ao_client: + self.ao_client.record_action("received_message_from_another_agent", tags=['conversable_agent', str(self.name())]) self._process_received_message(message, sender, silent) if request_reply is False or request_reply is None and self.reply_at_receive[sender] is False: return From 7b2d8164cbab1fe2d5ce38a2ce3bb353c1bf661c Mon Sep 17 00:00:00 2001 From: Braelyn Boynton Date: Mon, 11 Dec 2023 13:14:26 -0600 Subject: [PATCH 4/4] add AGENTOPS_API_KEY placeholder to test_utils --- test/oai/test_utils.py | 1 + 1 file changed, 1 insertion(+) diff --git a/test/oai/test_utils.py b/test/oai/test_utils.py index 579fc6f9d8a..ebd113cb625 100644 --- a/test/oai/test_utils.py +++ b/test/oai/test_utils.py @@ -17,6 +17,7 @@ "OPENAI_API_KEY": "sk-********************", "HUGGING_FACE_API_KEY": "**************************", "ANOTHER_API_KEY": "1234567890234567890", + "AGENTOPS_API_KEY": "********-****-****-****-************" } # Example model to API key mappings