Skip to content

Commit

Permalink
Change number of tokens if error to 0, as is standard by OpenAI & Ollama
Browse files Browse the repository at this point in the history
  • Loading branch information
alexjoham committed Oct 23, 2024
1 parent 9fe9e0a commit 13c5db1
Show file tree
Hide file tree
Showing 2 changed files with 4 additions and 4 deletions.
4 changes: 2 additions & 2 deletions app/llm/external/ollama.py
Original file line number Diff line number Diff line change
Expand Up @@ -119,8 +119,8 @@ def chat(
)
return convert_to_iris_message(
response.get("message"),
response.get("prompt_eval_count", -1),
response.get("eval_count", -1),
response.get("prompt_eval_count", 0),
response.get("eval_count", 0),
response.get("model", self.model),
)

Expand Down
4 changes: 2 additions & 2 deletions app/llm/external/openai_chat.py
Original file line number Diff line number Diff line change
Expand Up @@ -75,8 +75,8 @@ def convert_to_iris_message(
"""
Convert a ChatCompletionMessage to a PyrisMessage
"""
num_input_tokens = getattr(usage, "prompt_tokens", -1)
num_output_tokens = getattr(usage, "completion_tokens", -1)
num_input_tokens = getattr(usage, "prompt_tokens", 0)
num_output_tokens = getattr(usage, "completion_tokens", 0)

tokens = TokenUsageDTO(
model=model,
Expand Down

0 comments on commit 13c5db1

Please sign in to comment.