From 01eac58c450259620114248b61efdff6c258fa49 Mon Sep 17 00:00:00 2001 From: Felix Ocker Date: Fri, 8 Nov 2024 00:49:49 +0100 Subject: [PATCH 1/4] fix versions for dependencies with breaking changes --- requirements.txt | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/requirements.txt b/requirements.txt index 56f866f..d33ab18 100644 --- a/requirements.txt +++ b/requirements.txt @@ -21,10 +21,10 @@ peft==0.3.0 langchain==0.0.229 deepspeed==0.9.2 sentence_transformers==2.2.2 +openai==0.28.0 +huggingface-hub==0.25.2 tensorboard -openai scipy termcolor flask flask_cors -sentence_transformers \ No newline at end of file From b7390b3804dc82bcbaa69b35efe7ea6c89538561 Mon Sep 17 00:00:00 2001 From: Felix Ocker Date: Fri, 8 Nov 2024 00:50:52 +0100 Subject: [PATCH 2/4] ignore venv and ide --- .gitignore | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/.gitignore b/.gitignore index 5d75d17..2a9b36c 100644 --- a/.gitignore +++ b/.gitignore @@ -2,6 +2,10 @@ data/ data.zip *.DS_store +.idea/ +.venv/ +.python-version + __MACOSX/ run.bash From eb117bb4081fc916c0d86edc231a1bb79b522535 Mon Sep 17 00:00:00 2001 From: Felix Ocker Date: Fri, 8 Nov 2024 00:52:36 +0100 Subject: [PATCH 3/4] update openai model to a supported one --- toolbench/inference/LLM/chatgpt_function_model.py | 9 ++++++--- 1 file changed, 6 insertions(+), 3 deletions(-) diff --git a/toolbench/inference/LLM/chatgpt_function_model.py b/toolbench/inference/LLM/chatgpt_function_model.py index 280759e..1d55558 100644 --- a/toolbench/inference/LLM/chatgpt_function_model.py +++ b/toolbench/inference/LLM/chatgpt_function_model.py @@ -6,8 +6,11 @@ import random +OAI_MODEL = "gpt-3.5-turbo-0125" + + @retry(wait=wait_random_exponential(min=1, max=40), stop=stop_after_attempt(3)) -def chat_completion_request(key, messages, functions=None,function_call=None,key_pos=None, model="gpt-3.5-turbo-16k-0613",stop=None,process_id=0, **args): +def chat_completion_request(key, messages, functions=None,function_call=None,key_pos=None, model=OAI_MODEL,stop=None,process_id=0, **args): use_messages = [] for message in messages: if not("valid" in message.keys() and message["valid"] == False): @@ -29,7 +32,7 @@ def chat_completion_request(key, messages, functions=None,function_call=None,key json_data.update({"function_call": function_call}) try: - if model == "gpt-3.5-turbo-16k-0613": + if model == OAI_MODEL: openai.api_key = key else: raise NotImplementedError @@ -45,7 +48,7 @@ def chat_completion_request(key, messages, functions=None,function_call=None,key return e class ChatGPTFunction: - def __init__(self, model="gpt-3.5-turbo-16k-0613", openai_key=""): + def __init__(self, model=OAI_MODEL, openai_key=""): self.model = model self.conversation_history = [] self.openai_key = openai_key From 763af95863b627a23ff6549b6d1f37603555d7a3 Mon Sep 17 00:00:00 2001 From: Felix Ocker Date: Fri, 8 Nov 2024 20:22:19 +0100 Subject: [PATCH 4/4] support arrays as inputs for tools --- toolbench/inference/Downstream_tasks/rapidapi.py | 13 ++++++++----- 1 file changed, 8 insertions(+), 5 deletions(-) diff --git a/toolbench/inference/Downstream_tasks/rapidapi.py b/toolbench/inference/Downstream_tasks/rapidapi.py index e4e26b9..5fbc8a1 100644 --- a/toolbench/inference/Downstream_tasks/rapidapi.py +++ b/toolbench/inference/Downstream_tasks/rapidapi.py @@ -199,7 +199,11 @@ def api_json_to_openai_json(self, api_json,standard_tool_name): map_type = { "NUMBER": "integer", "STRING": "string", - "BOOLEAN": "boolean" + "BOOLEAN": "boolean", + "ARRAY": "array", + "integer": "integer", + "number": "number", + "array": "array", } pure_api_name = change_name(standardize(api_json["api_name"])) @@ -219,10 +223,6 @@ def api_json_to_openai_json(self, api_json,standard_tool_name): param_type = map_type[para["type"]] else: param_type = "string" - prompt = { - "type":param_type, - "description":para["description"][:description_max_length], - } default_value = para['default'] if len(str(default_value)) != 0: @@ -237,6 +237,9 @@ def api_json_to_openai_json(self, api_json,standard_tool_name): "description":para["description"][:description_max_length] } + if "items" in para: + prompt["items"] = para["items"] + templete["parameters"]["properties"][name] = prompt templete["parameters"]["required"].append(name) for para in api_json["optional_parameters"]: