From 0fe284b21f8eaf55cfccf5c8bb5608e50a554128 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?No=C3=ABl=20Barron?= Date: Tue, 6 Aug 2024 10:29:37 -0500 Subject: [PATCH] Doc and comment typos improvements (#1319) * typographical corrections in the descriptions, comment improvements, general formatting for consistency * consistent indentation for better readability, improved comments, typographical corrections * updated docstrings for better clarity, added type hint for **kwargs, typographical corrections (no functionality changes) * Fix format --------- Co-authored-by: Li Jiang --- flaml/autogen/agentchat/agent.py | 4 +- flaml/autogen/agentchat/assistant_agent.py | 36 +++++----- website/src/components/HomepageFeatures.js | 11 +-- website/src/css/custom.css | 82 ++++++++++------------ 4 files changed, 65 insertions(+), 68 deletions(-) diff --git a/flaml/autogen/agentchat/agent.py b/flaml/autogen/agentchat/agent.py index 9302124998..b83709dc30 100644 --- a/flaml/autogen/agentchat/agent.py +++ b/flaml/autogen/agentchat/agent.py @@ -25,10 +25,10 @@ def name(self): return self._name def send(self, message: Union[Dict, str], recipient: "Agent", request_reply: Optional[bool] = None): - """(Aabstract method) Send a message to another agent.""" + """(Abstract method) Send a message to another agent.""" async def a_send(self, message: Union[Dict, str], recipient: "Agent", request_reply: Optional[bool] = None): - """(Aabstract async method) Send a message to another agent.""" + """(Abstract async method) Send a message to another agent.""" def receive(self, message: Union[Dict, str], sender: "Agent", request_reply: Optional[bool] = None): """(Abstract method) Receive a message from another agent.""" diff --git a/flaml/autogen/agentchat/assistant_agent.py b/flaml/autogen/agentchat/assistant_agent.py index 71cd12cc84..8d0c21ad4a 100644 --- a/flaml/autogen/agentchat/assistant_agent.py +++ b/flaml/autogen/agentchat/assistant_agent.py @@ -4,24 +4,24 @@ class AssistantAgent(ConversableAgent): - """(In preview) Assistant agent, designed to solve a task with LLM. + """(In preview) Assistant agent, designed to solve tasks with LLM. AssistantAgent is a subclass of ConversableAgent configured with a default system message. - The default system message is designed to solve a task with LLM, - including suggesting python code blocks and debugging. - `human_input_mode` is default to "NEVER" - and `code_execution_config` is default to False. - This agent doesn't execute code by default, and expects the user to execute the code. + The default system message is designed to solve tasks with LLM, + including suggesting Python code blocks and debugging. + `human_input_mode` defaults to "NEVER" + and `code_execution_config` defaults to False. + This agent doesn't execute code by default and expects the user to execute the code. """ DEFAULT_SYSTEM_MESSAGE = """You are a helpful AI assistant. Solve tasks using your coding and language skills. -In the following cases, suggest python code (in a python coding block) or shell script (in a sh coding block) for the user to execute. +In the following cases, suggest Python code (in a Python coding block) or shell script (in an sh coding block) for the user to execute. 1. When you need to collect info, use the code to output the info you need, for example, browse or search the web, download/read a file, print the content of a webpage or a file, get the current date/time. After sufficient info is printed and the task is ready to be solved based on your language skill, you can solve the task by yourself. 2. When you need to perform some task with code, use the code to perform the task and output the result. Finish the task smartly. Solve the task step by step if you need to. If a plan is not provided, explain your plan first. Be clear which step uses code, and which step uses your language skill. When using code, you must indicate the script type in the code block. The user cannot provide any other feedback or perform any other action beyond executing the code you suggest. The user can't modify your code. So do not suggest incomplete code which requires users to modify. Don't use a code block if it's not intended to be executed by the user. -If you want the user to save the code in a file before executing it, put # filename: inside the code block as the first line. Don't include multiple code blocks in one response. Do not ask users to copy and paste the result. Instead, use 'print' function for the output when relevant. Check the execution result returned by the user. +If you want the user to save the code in a file before executing it, put # filename: inside the code block as the first line. Don't include multiple code blocks in one response. Do not ask users to copy and paste the result. Instead, use the 'print' function for the output when relevant. Check the execution result returned by the user. If the result indicates there is an error, fix the error and output the code again. Suggest the full code instead of partial code or code changes. If the error can't be fixed or if the task is not solved even after the code is executed successfully, analyze the problem, revisit your assumption, collect additional info you need, and think of a different approach to try. When you find an answer, verify the answer carefully. Include verifiable evidence in your response if possible. Reply "TERMINATE" in the end when everything is done. @@ -36,23 +36,23 @@ def __init__( max_consecutive_auto_reply: Optional[int] = None, human_input_mode: Optional[str] = "NEVER", code_execution_config: Optional[Union[Dict, bool]] = False, - **kwargs, + **kwargs: Dict, ): """ Args: - name (str): agent name. - system_message (str): system message for the ChatCompletion inference. - Please override this attribute if you want to reprogram the agent. - llm_config (dict): llm inference configuration. - Please refer to [autogen.Completion.create](/docs/reference/autogen/oai/completion#create) + name (str): Agent name. + system_message (Optional[str]): System message for the ChatCompletion inference. + Override this attribute if you want to reprogram the agent. + llm_config (Optional[Union[Dict, bool]]): LLM inference configuration. + Refer to [autogen.Completion.create](/docs/reference/autogen/oai/completion#create) for available options. - is_termination_msg (function): a function that takes a message in the form of a dictionary + is_termination_msg (Optional[Callable[[Dict], bool]]): A function that takes a message in the form of a dictionary and returns a boolean value indicating if this received message is a termination message. The dict can contain the following keys: "content", "role", "name", "function_call". - max_consecutive_auto_reply (int): the maximum number of consecutive auto replies. - default to None (no limit provided, class attribute MAX_CONSECUTIVE_AUTO_REPLY will be used as the limit in this case). + max_consecutive_auto_reply (Optional[int]): The maximum number of consecutive auto replies. + Defaults to None (no limit provided, class attribute MAX_CONSECUTIVE_AUTO_REPLY will be used as the limit in this case). The limit only plays a role when human_input_mode is not "ALWAYS". - **kwargs (dict): Please refer to other kwargs in + **kwargs (Dict): Additional keyword arguments. Refer to other kwargs in [ConversableAgent](conversable_agent#__init__). """ super().__init__( diff --git a/website/src/components/HomepageFeatures.js b/website/src/components/HomepageFeatures.js index 56f7cd4619..105e2692c7 100644 --- a/website/src/components/HomepageFeatures.js +++ b/website/src/components/HomepageFeatures.js @@ -21,19 +21,20 @@ const FeatureList = [ description: ( <> By automatically adapting LLMs to applications, FLAML - maximizes the benefits of expensive LLMs and reduce monetary cost. + maximizes the benefits of expensive LLMs and reduces monetary costs. FLAML enables users to build and use intelligent adaptive AI agents with minimal effort. ), }, +// Uncomment if needed // { // title: 'Easy to Customize or Extend', // Svg: require('../../static/img/extend.svg').default, // description: ( // <> -// FLAML is designed easy to extend, such as adding custom learners or metrics. +// FLAML is designed to be easy to extend, such as adding custom learners or metrics. // The customization level ranges smoothly from minimal -// (training data and task type as only input) to full (tuning a user-defined function). +// (training data and task type as the only input) to full (tuning a user-defined function). // // ), // }, @@ -42,8 +43,8 @@ const FeatureList = [ Svg: require('../../static/img/fast.svg').default, description: ( <> - FLAML offers a fast auto tuning tool powered by a novel cost-effective tuning approach. - It is capable of handling large search space with heterogeneous evaluation cost + FLAML offers a fast auto-tuning tool powered by a novel cost-effective tuning approach. + It is capable of handling large search spaces with heterogeneous evaluation costs and complex constraints/guidance/early stopping. ), diff --git a/website/src/css/custom.css b/website/src/css/custom.css index 0f08b07492..328257bc6c 100644 --- a/website/src/css/custom.css +++ b/website/src/css/custom.css @@ -16,73 +16,69 @@ } .docusaurus-highlight-code-line { -background-color: rgba(0, 0, 0, 0.1); -display: block; -margin: 0 calc(-1 * var(--ifm-pre-padding)); -padding: 0 var(--ifm-pre-padding); + background-color: rgba(0, 0, 0, 0.1); + display: block; + margin: 0 calc(-1 * var(--ifm-pre-padding)); + padding: 0 var(--ifm-pre-padding); } + html[data-theme='dark'] .docusaurus-highlight-code-line { -background-color: rgb(0, 0, 0, 0.3); + background-color: rgba(0, 0, 0, 0.3); } .admonition-content a { -text-decoration: underline; -font-weight: 600; -color: inherit; + text-decoration: underline; + font-weight: 600; + color: inherit; } a { -font-weight: 600; + font-weight: 600; } blockquote { - /* samsung blue with lots of transparency */ + /* Samsung blue with lots of transparency */ background-color: #0c4da224; } -@media (prefers-color-scheme: dark) { -:root { - --ifm-hero-text-color: white; -} -} -@media (prefers-color-scheme: dark) { -.hero.hero--primary { --ifm-hero-text-color: white;} -} @media (prefers-color-scheme: dark) { -blockquote { - --ifm-color-emphasis-300: var(--ifm-color-primary); - /* border-left: 6px solid var(--ifm-color-emphasis-300); */ -} -} -@media (prefers-color-scheme: dark) { -code { - /* background-color: rgb(41, 45, 62); */ -} -} + :root { + --ifm-hero-text-color: white; + } + .hero.hero--primary { + --ifm-hero-text-color: white; + } -/* Docusaurus still defaults to their green! */ -@media (prefers-color-scheme: dark) { -.react-toggle-thumb { - border-color: var(--ifm-color-primary) !important; -} -} + blockquote { + --ifm-color-emphasis-300: var(--ifm-color-primary); + /* border-left: 6px solid var(--ifm-color-emphasis-300); */ + } + code { + /* background-color: rgb(41, 45, 62); */ + } + + /* Docusaurus still defaults to their green! */ + .react-toggle-thumb { + border-color: var(--ifm-color-primary) !important; + } +} .header-github-link:hover { -opacity: 0.6; + opacity: 0.6; } .header-github-link:before { -content: ''; -width: 24px; -height: 24px; -display: flex; -background: url("data:image/svg+xml,%3Csvg viewBox='0 0 24 24' xmlns='http://www.w3.org/2000/svg'%3E%3Cpath d='M12 .297c-6.63 0-12 5.373-12 12 0 5.303 3.438 9.8 8.205 11.385.6.113.82-.258.82-.577 0-.285-.01-1.04-.015-2.04-3.338.724-4.042-1.61-4.042-1.61C4.422 18.07 3.633 17.7 3.633 17.7c-1.087-.744.084-.729.084-.729 1.205.084 1.838 1.236 1.838 1.236 1.07 1.835 2.809 1.305 3.495.998.108-.776.417-1.305.76-1.605-2.665-.3-5.466-1.332-5.466-5.93 0-1.31.465-2.38 1.235-3.22-.135-.303-.54-1.523.105-3.176 0 0 1.005-.322 3.3 1.23.96-.267 1.98-.399 3-.405 1.02.006 2.04.138 3 .405 2.28-1.552 3.285-1.23 3.285-1.23.645 1.653.24 2.873.12 3.176.765.84 1.23 1.91 1.23 3.22 0 4.61-2.805 5.625-5.475 5.92.42.36.81 1.096.81 2.22 0 1.606-.015 2.896-.015 3.286 0 .315.21.69.825.57C20.565 22.092 24 17.592 24 12.297c0-6.627-5.373-12-12-12'/%3E%3C/svg%3E") - no-repeat; + content: ''; + width: 24px; + height: 24px; + display: flex; + background: url("data:image/svg+xml,%3Csvg viewBox='0 0 24 24' xmlns='http://www.w3.org/2000/svg'%3E%3Cpath d='M12 .297c-6.63 0-12 5.373-12 12 0 5.303 3.438 9.8 8.205 11.385.6.113.82-.258.82-.577 0-.285-.01-1.04-.015-2.04-3.338.724-4.042-1.61-4.042-1.61C4.422 18.07 3.633 17.7 3.633 17.7c-1.087-.744.084-.729.084-.729 1.205.084 1.838 1.236 1.838 1.236 1.07 1.835 2.809 1.305 3.495.998.108-.776.417-1.305.76-1.605-2.665-.3-5.466-1.332-5.466-5.93 0-1.31.465-2.38 1.235-3.22-.135-.303-.54-1.523.105-3.176 0 0 1.005-.322 3.3 1.23.96-.267 1.98-.399 3-.405 1.02.006 2.04.138 3 .405 2.28-1.552 3.285-1.23 3.285-1.23.645 1.653.24 2.873.12 3.176.765.84 1.23 1.91 1.23 3.22 0 4.61-2.805 5.625-5.475 5.92.42.36.81 1.096.81 2.22 0 1.606-.015 2.896-.015 3.286 0 .315.21.69.825.57C20.565 22.092 24 17.592 24 12.297c0-6.627-5.373-12-12-12'/%3E%3C/svg%3E") + no-repeat; } html[data-theme='dark'] .header-github-link:before { -background: url("data:image/svg+xml,%3Csvg viewBox='0 0 24 24' xmlns='http://www.w3.org/2000/svg'%3E%3Cpath fill='white' d='M12 .297c-6.63 0-12 5.373-12 12 0 5.303 3.438 9.8 8.205 11.385.6.113.82-.258.82-.577 0-.285-.01-1.04-.015-2.04-3.338.724-4.042-1.61-4.042-1.61C4.422 18.07 3.633 17.7 3.633 17.7c-1.087-.744.084-.729.084-.729 1.205.084 1.838 1.236 1.838 1.236 1.07 1.835 2.809 1.305 3.495.998.108-.776.417-1.305.76-1.605-2.665-.3-5.466-1.332-5.466-5.93 0-1.31.465-2.38 1.235-3.22-.135-.303-.54-1.523.105-3.176 0 0 1.005-.322 3.3 1.23.96-.267 1.98-.399 3-.405 1.02.006 2.04.138 3 .405 2.28-1.552 3.285-1.23 3.285-1.23.645 1.653.24 2.873.12 3.176.765.84 1.23 1.91 1.23 3.22 0 4.61-2.805 5.625-5.475 5.92.42.36.81 1.096.81 2.22 0 1.606-.015 2.896-.015 3.286 0 .315.21.69.825.57C20.565 22.092 24 17.592 24 12.297c0-6.627-5.373-12-12-12'/%3E%3C/svg%3E") - no-repeat; + background: url("data:image/svg+xml,%3Csvg viewBox='0 0 24 24' xmlns='http://www.w3.org/2000/svg'%3E%3Cpath fill='white' d='M12 .297c-6.63 0-12 5.373-12 12 0 5.303 3.438 9.8 8.205 11.385.6.113.82-.258.82-.577 0-.285-.01-1.04-.015-2.04-3.338.724-4.042-1.61-4.042-1.61C4.422 18.07 3.633 17.7 3.633 17.7c-1.087-.744.084-.729.084-.729 1.205.084 1.838 1.236 1.838 1.236 1.07 1.835 2.809 1.305 3.495.998.108-.776.417-1.305.76-1.605-2.665-.3-5.466-1.332-5.466-5.93 0-1.31.465-2.38 1.235-3.22-.135-.303-.54-1.523.105-3.176 0 0 1.005-.322 3.3 1.23.96-.267 1.98-.399 3-.405 1.02.006 2.04.138 3 .405 2.28-1.552 3.285-1.23 3.285-1.23.645 1.653.24 2.873.12 3.176.765.84 1.23 1.91 1.23 3.22 0 4.61-2.805 5.625-5.475 5.92.42.36.81 1.096.81 2.22 0 1.606-.015 2.896-.015 3.286 0 .315.21.69.825.57C20.565 22.092 24 17.592 24 12.297c0-6.627-5.373-12-12-12'/%3E%3C/svg%3E") + no-repeat; }