From f6164db176f547bb3ba94962f0c8cb7cc8b0a103 Mon Sep 17 00:00:00 2001 From: "andrii.batutin" Date: Sun, 22 Sep 2024 16:01:08 +0200 Subject: [PATCH] feat: running basic tests --- .../examples/simple_planing.py | 8 +++----- .../llama_index/packs/ersatz_o1/base.py | 9 ++++++--- 2 files changed, 9 insertions(+), 8 deletions(-) diff --git a/llama-index-packs/llama-index-packs-ersatz-o1/examples/simple_planing.py b/llama-index-packs/llama-index-packs-ersatz-o1/examples/simple_planing.py index 3df9dc38eccb6..dde0413e32836 100644 --- a/llama-index-packs/llama-index-packs-ersatz-o1/examples/simple_planing.py +++ b/llama-index-packs/llama-index-packs-ersatz-o1/examples/simple_planing.py @@ -8,19 +8,17 @@ llm = OpenAI(model="gpt-4-turbo") task_context = """ -Every day, Wendi feeds each of her chickens three cups of mixed chicken feed, containing seeds, mealworms and vegetables -to help keep them healthy. She gives the chickens their feed in three separate meals. In the morning, she gives her flock of -chickens 15 cups of feed. In the afternoon, she gives her chickens another 25 cups of feed. +Tim gets a promotion that offers him a 5% raise on his $20000 a month salary. It also gives him a bonus worth half a month’s salary. """ query_engine = ErsatzO1QueryEngine( context=task_context, llm=llm, - reasoning_paths=5, + reasoning_paths=20, verbose=True, ) if __name__ == "__main__": - res = query_engine.query("How many cups of feed does she need to give her chickens in the final meal of the day if the size of Wendi’s flock is 20 chickens?") + res = query_engine.query("How much money will he make in a year?") print(res) diff --git a/llama-index-packs/llama-index-packs-ersatz-o1/llama_index/packs/ersatz_o1/base.py b/llama-index-packs/llama-index-packs-ersatz-o1/llama_index/packs/ersatz_o1/base.py index ca628c36f31bb..d4b7fec4a2841 100644 --- a/llama-index-packs/llama-index-packs-ersatz-o1/llama_index/packs/ersatz_o1/base.py +++ b/llama-index-packs/llama-index-packs-ersatz-o1/llama_index/packs/ersatz_o1/base.py @@ -79,7 +79,7 @@ async def async_textual_reasoning( llm: LLM, num_paths: int = 5, verbose: bool = False, - temperature: float = 0.01, + temperature: float = 1.99, ) -> List[Tuple[str, int]]: """ Perform asynchronous textual reasoning using the ErsatzO1 approach. @@ -96,8 +96,11 @@ async def async_textual_reasoning( question=query_str, context=context, ) - answer, confidence = parse_response(str(response)) - results.append((answer, confidence)) + try: + answer, confidence = parse_response(str(response)) + results.append((answer, confidence)) + except: + print(f"Error parsing response: {response}") return results