Skip to content

Commit

Permalink
Bug fix: can not chat with deepseek (#1879)
Browse files Browse the repository at this point in the history
  • Loading branch information
Menghuan1918 committed Jul 4, 2024
1 parent 0c6c357 commit 114192e
Showing 1 changed file with 8 additions and 1 deletion.
9 changes: 8 additions & 1 deletion request_llms/oai_std_model_template.py
Original file line number Diff line number Diff line change
Expand Up @@ -44,7 +44,7 @@ def decode_chunk(chunk):
try:
chunk = json.loads(chunk[6:])
except:
respose = "API_ERROR"
respose = ""
finish_reason = chunk
# 错误处理部分
if "error" in chunk:
Expand Down Expand Up @@ -200,10 +200,13 @@ def predict_no_ui_long_connection(

stream_response = response.iter_lines()
result = ""
finish_reason = ""
while True:
try:
chunk = next(stream_response)
except StopIteration:
if result == "":
raise RuntimeError(f"获得空的回复,可能原因:{finish_reason}")
break
except requests.exceptions.ConnectionError:
chunk = next(stream_response) # 失败了,重试一次?再失败就没办法了。
Expand Down Expand Up @@ -351,6 +354,10 @@ def predict(
response_text, finish_reason = decode_chunk(chunk)
# 返回的数据流第一次为空,继续等待
if response_text == "" and finish_reason != "False":
status_text = f"finish_reason: {finish_reason}"
yield from update_ui(
chatbot=chatbot, history=history, msg=status_text
)
continue
if chunk:
try:
Expand Down

0 comments on commit 114192e

Please sign in to comment.