Skip to content

Commit

Permalink
revise qwen
Browse files Browse the repository at this point in the history
  • Loading branch information
binary-husky committed Jul 2, 2024
1 parent 6cd2d80 commit 0c6c357
Show file tree
Hide file tree
Showing 2 changed files with 8 additions and 3 deletions.
3 changes: 2 additions & 1 deletion request_llms/bridge_qwen.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
import time
import os
from toolbox import update_ui, get_conf, update_ui_lastest_msg
from toolbox import check_packages, report_exception
from toolbox import check_packages, report_exception, log_chat

model_name = 'Qwen'

Expand Down Expand Up @@ -59,6 +59,7 @@ def predict(inputs, llm_kwargs, plugin_kwargs, chatbot, history=[], system_promp
chatbot[-1] = (inputs, response)
yield from update_ui(chatbot=chatbot, history=history)

log_chat(llm_model=llm_kwargs["llm_model"], input_str=inputs, output_str=response)
# 总结输出
if response == f"[Local Message] 等待{model_name}响应中 ...":
response = f"[Local Message] {model_name}响应异常 ..."
Expand Down
8 changes: 6 additions & 2 deletions request_llms/com_qwenapi.py
Original file line number Diff line number Diff line change
Expand Up @@ -65,8 +65,12 @@ def generate(self, inputs, llm_kwargs, history, system_prompt):
self.result_buf += f"[Local Message] 请求错误:状态码:{response.status_code},错误码:{response.code},消息:{response.message}"
yield self.result_buf
break
logging.info(f'[raw_input] {inputs}')
logging.info(f'[response] {self.result_buf}')

# 耗尽generator避免报错
while True:
try: next(responses)
except: break

return self.result_buf


Expand Down

0 comments on commit 0c6c357

Please sign in to comment.