Skip to content

Commit

Permalink
Fix the bug in Gemini API token calculation error.
Browse files Browse the repository at this point in the history
  • Loading branch information
yym68686 committed Aug 28, 2024
1 parent 11ad0b9 commit 8c6c1ba
Show file tree
Hide file tree
Showing 2 changed files with 4 additions and 3 deletions.
2 changes: 1 addition & 1 deletion setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@

setup(
name="modelmerge",
version="0.11.18",
version="0.11.19",
description="modelmerge is a multi-large language model API aggregator.",
long_description=Path.open(Path("README.md"), encoding="utf-8").read(),
long_description_content_type="text/markdown",
Expand Down
5 changes: 3 additions & 2 deletions src/ModelMerge/models/genimi.py
Original file line number Diff line number Diff line change
Expand Up @@ -305,6 +305,7 @@ async def ask_stream_async(
function_full_response: str = "{"
need_function_call = False
revicing_function_call = False
total_tokens = 0
try:
async with self.aclient.stream(
"post",
Expand All @@ -317,7 +318,7 @@ async def ask_stream_async(
async for line in response.aiter_lines():
if not line:
continue
print(line)
# print(line)
if line and '\"text\": \"' in line:
content = line.split('\"text\": \"')[1][:-1]
content = "\n".join(content.split("\\n"))
Expand Down Expand Up @@ -368,4 +369,4 @@ async def ask_stream_async(
async for chunk in self.ask_stream_async(function_response, response_role, convo_id=convo_id, function_name=function_call_name, total_tokens=total_tokens, model=model, function_arguments=function_call, api_key=kwargs.get('api_key', self.api_key), plugins=kwargs.get("plugins", PLUGINS)):
yield chunk
else:
self.add_to_conversation([{"text": full_response}], response_role, convo_id=convo_id, pass_history=pass_history)
self.add_to_conversation([{"text": full_response}], response_role, convo_id=convo_id, total_tokens=total_tokens, pass_history=pass_history)

0 comments on commit 8c6c1ba

Please sign in to comment.