From c5b86a520b03d442ea76c671bb9f95a80b3d1484 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Th=C3=A9ophilus=20Homawoo?= Date: Fri, 16 Feb 2024 14:51:31 +0100 Subject: [PATCH] Threading --- .../llama_index/agent/openai/step.py | 18 +++++++++++++----- 1 file changed, 13 insertions(+), 5 deletions(-) diff --git a/llama-index-integrations/agent/llama-index-agent-openai/llama_index/agent/openai/step.py b/llama-index-integrations/agent/llama-index-agent-openai/llama_index/agent/openai/step.py index 935d2db19f44c..30085f8984ce1 100644 --- a/llama-index-integrations/agent/llama-index-agent-openai/llama_index/agent/openai/step.py +++ b/llama-index-integrations/agent/llama-index-agent-openai/llama_index/agent/openai/step.py @@ -464,6 +464,7 @@ def _run_step( # TODO: return response else: is_done = False + threads: List[Thread] = [] for tool_call in latest_tool_calls: # Some validation if not isinstance(tool_call, get_args(OpenAIToolCall)): @@ -472,17 +473,24 @@ def _run_step( if tool_call.type != "function": raise ValueError("Invalid tool type. Unsupported by OpenAI") # TODO: maybe execute this with multi-threading - self._call_function( - tools, - tool_call, - task.extra_state["new_memory"], - task.extra_state["sources"], + thread = Thread( + target=self._call_function, + args=( + tools, + tool_call, + task.extra_state["new_memory"], + task.extra_state["sources"], + ), ) + thread.start() + threads.append(thread) # change function call to the default value, if a custom function was given # as an argument (none and auto are predefined by OpenAI) if tool_choice not in ("auto", "none"): tool_choice = "auto" task.extra_state["n_function_calls"] += 1 + for thread in threads: + thread.join() new_steps = [ step.get_next_step( step_id=str(uuid.uuid4()),