Skip to content

Commit

Permalink
fix:return conv_uid and update wechat
Browse files Browse the repository at this point in the history
  • Loading branch information
Aries-ckt committed Mar 21, 2024
1 parent 0cf08f3 commit ab9d8a3
Show file tree
Hide file tree
Showing 3 changed files with 2 additions and 4 deletions.
Binary file modified assets/wechat.jpg
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
4 changes: 1 addition & 3 deletions dbgpt/app/openapi/api_v1/api_v1.py
Original file line number Diff line number Diff line change
Expand Up @@ -439,9 +439,7 @@ async def stream_generator(chat, incremental: bool, model_name: str):
_type_: streaming responses
"""
span = root_tracer.start_span("stream_generator")
msg = "[LLM_ERROR]: llm server has no output, maybe your prompt template is wrong."

stream_id = f"chatcmpl-{str(uuid.uuid1())}"
previous_response = ""
async for chunk in chat.stream_call():
if chunk:
Expand All @@ -453,7 +451,7 @@ async def stream_generator(chat, incremental: bool, model_name: str):
delta=DeltaMessage(role="assistant", content=incremental_output),
)
chunk = ChatCompletionStreamResponse(
id=stream_id, choices=[choice_data], model=model_name
id=chat.chat_session_id, choices=[choice_data], model=model_name
)
yield f"data: {chunk.json(exclude_unset=True, ensure_ascii=False)}\n\n"
else:
Expand Down
2 changes: 1 addition & 1 deletion dbgpt/client/client.py
Original file line number Diff line number Diff line change
Expand Up @@ -247,7 +247,7 @@ async def chat_stream(
if response.status_code == 200:
async for line in response.aiter_lines():
try:
if line == "data: [DONE]\n":
if line.strip() == "data: [DONE]":
break
if line.startswith("data:"):
json_data = json.loads(line[len("data: ") :])
Expand Down

0 comments on commit ab9d8a3

Please sign in to comment.