From 0c6c357e9c19976badcf05611bdcc36eced73287 Mon Sep 17 00:00:00 2001 From: binary-husky Date: Tue, 2 Jul 2024 14:22:45 +0000 Subject: [PATCH] revise qwen --- request_llms/bridge_qwen.py | 3 ++- request_llms/com_qwenapi.py | 8 ++++++-- 2 files changed, 8 insertions(+), 3 deletions(-) diff --git a/request_llms/bridge_qwen.py b/request_llms/bridge_qwen.py index 2b1eeed27..0a06545bb 100644 --- a/request_llms/bridge_qwen.py +++ b/request_llms/bridge_qwen.py @@ -1,7 +1,7 @@ import time import os from toolbox import update_ui, get_conf, update_ui_lastest_msg -from toolbox import check_packages, report_exception +from toolbox import check_packages, report_exception, log_chat model_name = 'Qwen' @@ -59,6 +59,7 @@ def predict(inputs, llm_kwargs, plugin_kwargs, chatbot, history=[], system_promp chatbot[-1] = (inputs, response) yield from update_ui(chatbot=chatbot, history=history) + log_chat(llm_model=llm_kwargs["llm_model"], input_str=inputs, output_str=response) # 总结输出 if response == f"[Local Message] 等待{model_name}响应中 ...": response = f"[Local Message] {model_name}响应异常 ..." diff --git a/request_llms/com_qwenapi.py b/request_llms/com_qwenapi.py index 2cde52c1b..a3adad0b3 100644 --- a/request_llms/com_qwenapi.py +++ b/request_llms/com_qwenapi.py @@ -65,8 +65,12 @@ def generate(self, inputs, llm_kwargs, history, system_prompt): self.result_buf += f"[Local Message] 请求错误:状态码:{response.status_code},错误码:{response.code},消息:{response.message}" yield self.result_buf break - logging.info(f'[raw_input] {inputs}') - logging.info(f'[response] {self.result_buf}') + + # 耗尽generator避免报错 + while True: + try: next(responses) + except: break + return self.result_buf