Skip to content

Commit

Permalink
✨ Add support for reasoning content and thinking state in response st…
Browse files Browse the repository at this point in the history
…reaming
  • Loading branch information
yym68686 committed Feb 10, 2025
1 parent 5b8cea5 commit 25956c6
Show file tree
Hide file tree
Showing 2 changed files with 29 additions and 2 deletions.
22 changes: 22 additions & 0 deletions response.py
Original file line number Diff line number Diff line change
Expand Up @@ -139,6 +139,8 @@ async def fetch_gpt_response_stream(client, url, headers, payload):
timestamp = int(datetime.timestamp(datetime.now()))
random.seed(timestamp)
random_str = ''.join(random.choices(string.ascii_letters + string.digits, k=29))
is_thinking = False
has_send_thinking = False
async with client.stream('POST', url, headers=headers, json=payload) as response:
error_message = await check_response(response, "fetch_gpt_response_stream")
if error_message:
Expand All @@ -158,6 +160,26 @@ async def fetch_gpt_response_stream(client, url, headers, payload):
return
line = json.loads(result)
line['id'] = f"chatcmpl-{random_str}"

content = safe_get(line, "choices", 0, "delta", "content", default="")
if "<think>" in content:
is_thinking = True
if "</think>" in content:
is_thinking = False

content = content.replace("<think>", "").replace("</think>", "")
if not has_send_thinking:
content = content.replace("\n\n", "")
reasoning_content = safe_get(line, "choices", 0, "delta", "reasoning_content", default="")
if not content and not reasoning_content:
continue

if is_thinking and content:
sse_string = await generate_sse_response(timestamp, payload["model"], reasoning_content=content)
yield sse_string
has_send_thinking = True
continue

no_stream_content = safe_get(line, "choices", 0, "message", "content", default=None)
if no_stream_content:
sse_string = await generate_sse_response(safe_get(line, "created", default=None), safe_get(line, "model", default=None), content=no_stream_content)
Expand Down
9 changes: 7 additions & 2 deletions utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -703,9 +703,14 @@ def safe_get(data, *keys, default=None):

import random
import string
async def generate_sse_response(timestamp, model, content=None, tools_id=None, function_call_name=None, function_call_content=None, role=None, total_tokens=0, prompt_tokens=0, completion_tokens=0):
async def generate_sse_response(timestamp, model, content=None, tools_id=None, function_call_name=None, function_call_content=None, role=None, total_tokens=0, prompt_tokens=0, completion_tokens=0, reasoning_content=None):
random.seed(timestamp)
random_str = ''.join(random.choices(string.ascii_letters + string.digits, k=29))

delta_content = {"role": "assistant", "content": content} if content else {}
if reasoning_content:
delta_content = {"role": "assistant", "content": "", "reasoning_content": reasoning_content}

sample_data = {
"id": f"chatcmpl-{random_str}",
"object": "chat.completion.chunk",
Expand All @@ -714,7 +719,7 @@ async def generate_sse_response(timestamp, model, content=None, tools_id=None, f
"choices": [
{
"index": 0,
"delta": {"content": content} if content else {},
"delta": delta_content,
"logprobs": None,
"finish_reason": None if content else "stop"
}
Expand Down

0 comments on commit 25956c6

Please sign in to comment.