From fffcd155505921ba72962ec8cd49e5b67c8d2ef9 Mon Sep 17 00:00:00 2001 From: Logan Date: Fri, 29 Dec 2023 11:36:02 -0600 Subject: [PATCH] fix token counter (#9744) --- llama_index/callbacks/token_counting.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/llama_index/callbacks/token_counting.py b/llama_index/callbacks/token_counting.py index b1e70ba1953a4..d13a5e283cf87 100644 --- a/llama_index/callbacks/token_counting.py +++ b/llama_index/callbacks/token_counting.py @@ -50,7 +50,7 @@ def get_llm_token_counts( response_tokens = 0 if response is not None and response.raw is not None: - usage = response.raw.get("usage") # type: ignore + usage = dict(response.raw.get("usage")) # type: ignore if usage is not None: messages_tokens = usage.get("prompt_tokens", 0)