From 89930935001b43d6a87315ac04a6eb50d2718dbe Mon Sep 17 00:00:00 2001 From: sastpg <3200105875@zju.edu.cn> Date: Sat, 6 May 2023 14:37:22 +0800 Subject: [PATCH 1/6] =?UTF-8?q?=E9=98=B2=E6=AD=A2=E6=B6=88=E6=81=AF?= =?UTF-8?q?=E6=92=A4=E5=9B=9E=EF=BC=8C=E5=A2=9E=E5=8A=A0=E5=AF=B9=E8=AF=9D?= =?UTF-8?q?=E9=A3=8E=E6=A0=BC=E5=88=87=E6=8D=A2?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- model/bing/new_bing_model.py | 285 ++++++++++++++++++++--------------- 1 file changed, 162 insertions(+), 123 deletions(-) diff --git a/model/bing/new_bing_model.py b/model/bing/new_bing_model.py index 933866e2..fa0498c5 100644 --- a/model/bing/new_bing_model.py +++ b/model/bing/new_bing_model.py @@ -6,106 +6,133 @@ from EdgeGPT import Chatbot, ConversationStyle from ImageGen import ImageGen from common import functions -from model.bing.jailbroken_sydney import SydneyBot - -user_session = dict() +import random +import json +# from model.bing.jailbroken_sydney import SydneyBot +user_chathistory = dict() suggestion_session = dict() + # newBing对话模型逆向网页gitAPI class BingModel(Model): - style = ConversationStyle.creative + style = 'creative' bot: Chatbot = None - cookies: list = None + cookies = None + def __init__(self): try: self.cookies = model_conf_val("bing", "cookies") self.jailbreak = model_conf_val("bing", "jailbreak") - self.bot = SydneyBot(cookies=self.cookies, options={}) if ( - self.jailbreak) else Chatbot(cookies=self.cookies) except Exception as e: log.warn(e) - async def reply_text_stream(self, query: str, context=None) -> dict: - async def handle_answer(final, answer): - if final: - try: - reply = self.build_source_attributions(answer, context) - log.info("[NewBing] reply:{}", reply) - yield True, reply - except Exception as e: - log.warn(answer) - log.warn(e) - await user_session.get(context['from_user_id'], None).reset() - yield True, answer - else: - try: - yield False, answer - except Exception as e: - log.warn(answer) - log.warn(e) - await user_session.get(context['from_user_id'], None).reset() - yield True, answer + async def reply_text_stream(self, query: str, context=None): + pass + def reply(self, query: str, context=None): if not context or not context.get('type') or context.get('type') == 'TEXT': - clear_memory_commands = common_conf_val( - 'clear_memory_commands', ['#清除记忆']) - if query in clear_memory_commands: - user_session[context['from_user_id']] = None - yield True, '记忆已清除' - bot = user_session.get(context['from_user_id'], None) - if not bot: - bot = self.bot - else: - query = self.get_quick_ask_query(query, context) - user_session[context['from_user_id']] = bot - log.info("[NewBing] query={}".format(query)) - if self.jailbreak: - async for final, answer in bot.ask_stream(query, conversation_style=self.style, message_id=bot.user_message_id): - async for result in handle_answer(final, answer): - yield result - else: - async for final, answer in bot.ask_stream(query, conversation_style=self.style): - async for result in handle_answer(final, answer): - yield result + return asyncio.run(self.__reply(query, context)) + elif context.get('type', None) == 'IMAGE_CREATE': + return self.create_img(query) - def reply(self, query: str, context=None) -> tuple[str, dict]: - if not context or not context.get('type') or context.get('type') == 'TEXT': - clear_memory_commands = common_conf_val( + async def __reply(self, query: str, context): + clear_memory_commands = common_conf_val( 'clear_memory_commands', ['#清除记忆']) - if query in clear_memory_commands: - user_session[context['from_user_id']] = None - return '记忆已清除' - bot = user_session.get(context['from_user_id'], None) - if (bot == None): - bot = self.bot - else: - query = self.get_quick_ask_query(query, context) - - user_session[context['from_user_id']] = bot - log.info("[NewBing] query={}".format(query)) + if query in clear_memory_commands: + user_chathistory[context['from_user_id']] = None + return '记忆已清除' + + # deal chat_history + chat_style = "" + chat_history = "" + if user_chathistory.get(context['from_user_id'], None) == None: if (self.jailbreak): - task = bot.ask(query, conversation_style=self.style, - message_id=bot.user_message_id) - else: - task = bot.ask(query, conversation_style=self.style) - - answer = asyncio.run(task) - if isinstance(answer, str): - return answer - try: - reply = answer["item"]["messages"][-1] - except Exception as e: - user_session.get(context['from_user_id'], None).reset() - log.warn(answer) - return "本轮对话已超时,已开启新的一轮对话,请重新提问。" - return self.build_source_attributions(answer, context) - elif context.get('type', None) == 'IMAGE_CREATE': - if functions.contain_chinese(query): - return "ImageGen目前仅支持使用英文关键词生成图片" - return self.create_img(query) + chars = model_conf_val("bing", "jailbreak_prompt") + chars = chars + "\n\n" + chat_history = ''.join(chars) + user_chathistory[context['from_user_id']] = ['creative', chat_history] + else: + if not chat_history.endswith("\n\n"): + if chat_history.endswith("\n"): + chat_history += "\n" + else: + chat_history += "\n\n" + chat_style = user_chathistory[context['from_user_id']][0] + chat_history = user_chathistory[context['from_user_id']][1] + + query = self.get_quick_ask_query(query, context) + if query == "输入的序号不在建议列表范围中": + return "对不起,您输入的序号不在建议列表范围中(数字1-9均会被认为是建议列表),请重新输入。" + if "[style]已切换至" in query: + return query + + print(user_chathistory) + + log.info("[NewBing] query={}".format(query)) + bot = await Chatbot.create(cookies=self.cookies) + reply_text = "" + reference = "" + suggestion = "" + async def stream_output(): + nonlocal chat_history + nonlocal chat_style + nonlocal reply_text + nonlocal reference + nonlocal suggestion + chat_history += f"[user](#message)\n{query}\n\n" + wrote = 0 + async for final, response in bot.ask_stream( + prompt=query, + raw=True, + webpage_context=chat_history, + conversation_style=chat_style, + search_result=True + ): + if not final and response["type"] == 1 and "messages" in response["arguments"][0]: + message = response["arguments"][0]["messages"][0] + # match message.get("messageType"): + if message.get("messageType") == "InternalSearchQuery": + pass + #chat_history += f"[assistant](#search_query)\n{message['hiddenText']}\n\n" + elif message.get("messageType") == "InternalSearchResult": + #chat_history += f"[assistant](#search_results)\n{message['hiddenText']}\n\n" + reference += f"[assistant](#search_results)\n{message['hiddenText']}" + elif message.get("messageType") == None: + if "cursor" in response["arguments"][0]: + chat_history += "[assistant](#message)\n" + wrote = 0 + if message.get("contentOrigin") == "Apology": + log.info("检测到AI生成内容被撤回...已阻止") + break + else: + chat_history += message["text"][wrote:] + reply_text += message["text"][wrote:] + wrote = len(message["text"]) + if "suggestedResponses" in message: + suggestion = list(map(lambda x: x["text"], message["suggestedResponses"])) + chat_history += f"""\n[assistant](#suggestions) +```json +{{"suggestedUserResponses": {suggestion}}} +```\n\n""" + break + if final and not response["item"]["messages"][-1].get("text"): + raise Exception("发送的消息被过滤或者对话超时") + + + try: + await stream_output() + except Exception as e: + log.info(e) + + # 更新历史对话 + user_chathistory[context['from_user_id']][1] = chat_history + await bot.close() + return self.build_source_attributions(reply_text, reference, suggestion, context) + + def create_img(self, query): try: @@ -123,51 +150,63 @@ def get_quick_ask_query(self, query, context): if (len(query) == 1 and query.isdigit() and query != "0"): suggestion_dict = suggestion_session[context['from_user_id']] if (suggestion_dict != None): - query = suggestion_dict[int(query)-1] - if (query == None): + try: + query = suggestion_dict[int(query)-1] + if (query == None): + return "输入的序号不在建议列表范围中" + else: + query = "在上面的基础上,"+query + except: return "输入的序号不在建议列表范围中" - else: - query = "在上面的基础上,"+query + elif(query == "/creative"): + user_chathistory[context['from_user_id']][0] = query[1:] + return "[style]已切换至创造模式" + elif(query == "/balanced"): + user_chathistory[context['from_user_id']][0] = query[1:] + return "[style]已切换至平衡模式" + elif(query == "/precise"): + user_chathistory[context['from_user_id']][0] = query[1:] + return "[style]已切换至精确模式" return query - def build_source_attributions(self, answer, context): - reference = "" - reply = answer["item"]["messages"][-1] - reply_text = reply["text"] - if "sourceAttributions" in reply: - for i, attribution in enumerate(reply["sourceAttributions"]): - display_name = attribution["providerDisplayName"] - url = attribution["seeMoreUrl"] - reference += f"{i+1}、[{display_name}]({url})\n\n" - - if len(reference) > 0: - reference = "***\n"+reference - - suggestion = "" - if "suggestedResponses" in reply: - suggestion_dict = dict() - for i, attribution in enumerate(reply["suggestedResponses"]): - suggestion_dict[i] = attribution["text"] - suggestion += f">{i+1}、{attribution['text']}\n\n" - suggestion_session[context['from_user_id'] - ] = suggestion_dict - - if len(suggestion) > 0: - suggestion = "***\n你可以通过输入序号快速追问我以下建议问题:\n\n"+suggestion - - throttling = answer["item"]["throttling"] - throttling_str = "" - - if throttling["numUserMessagesInConversation"] == throttling["maxNumUserMessagesInConversation"]: - user_session.get(context['from_user_id'], None).reset() - throttling_str = "(对话轮次已达上限,本次聊天已结束,将开启新的对话)" + def build_source_attributions(self, reply_text, reference, suggestion, context): + if not reply_text.endswith("\n\n"): + if reply_text.endswith("\n"): + reply_text += "\n" else: - throttling_str = f"对话轮次: {throttling['numUserMessagesInConversation']}/{throttling['maxNumUserMessagesInConversation']}\n" + reply_text += "\n\n" + + references = "" + print(reference[36:-3]) + if 'json' in reference: + reference_dict = json.loads(reference[36:-3]) + for i in range(len(reference_dict['web_search_results'])): + r = reference_dict['web_search_results'][i] + title = r['title'] + url = r['url'] + references += f"{i+1}、[{title}]({url})\n\n" + + + suggestions = "" + suggestion_dict = dict() + if len(suggestion) > 0: + for i in range(len(suggestion)): + suggestion_dict[i] = suggestion[i] + suggestions += f">{i+1}、{suggestion[i]}\n\n" + suggestions = "=====\n💡你可能想问(输入序号):\n\n" + suggestions + suggestion_session[context['from_user_id']] = suggestion_dict + + # throttling = answer["item"]["throttling"] + # throttling_str = "" + + # if not self.jailbreak: + # if throttling["numUserMessagesInConversation"] == throttling["maxNumUserMessagesInConversation"]: + # user_session.get(context['from_user_id'], None).reset() + # throttling_str = "(对话轮次已达上限,本次聊天已结束,将开启新的对话)" + # else: + # throttling_str = f"对话轮次: {throttling['numUserMessagesInConversation']}/{throttling['maxNumUserMessagesInConversation']}\n" + + response = f"{reply_text}******\n{references}{suggestions}******\n" + log.info("[NewBing] reply={}", response) + return response - response = f"{reply_text}\n{reference}\n{suggestion}\n***\n{throttling_str}" - log.info("[NewBing] reply={}", response) - return response - else: - user_session.get(context['from_user_id'], None).reset() - log.warn("[NewBing] reply={}", answer) - return "对话被接口拒绝,已开启新的一轮对话。" From 64b0781e38ad10f4d65be63c052b60a8ffc385ba Mon Sep 17 00:00:00 2001 From: sastpg <3200105875@zju.edu.cn> Date: Sat, 6 May 2023 16:13:39 +0800 Subject: [PATCH 2/6] =?UTF-8?q?=E9=80=82=E9=85=8DEdgeGPT=200.3.5;=20?= =?UTF-8?q?=E5=A2=9E=E5=8A=A0=E9=98=B2=E6=AD=A2=E5=BE=AE=E8=BD=AF=E6=92=A4?= =?UTF-8?q?=E5=9B=9EAI=E6=B6=88=E6=81=AF;=20=E5=A2=9E=E5=8A=A0=E5=AF=B9?= =?UTF-8?q?=E8=AF=9D=E9=A3=8E=E6=A0=BC=E5=88=87=E6=8D=A2?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- model/bing/new_bing_model.py | 129 +++++++++++++++++++++++++++-------- 1 file changed, 101 insertions(+), 28 deletions(-) diff --git a/model/bing/new_bing_model.py b/model/bing/new_bing_model.py index fa0498c5..026d5e04 100644 --- a/model/bing/new_bing_model.py +++ b/model/bing/new_bing_model.py @@ -8,20 +8,17 @@ from common import functions import random import json -# from model.bing.jailbroken_sydney import SydneyBot + user_chathistory = dict() suggestion_session = dict() - # newBing对话模型逆向网页gitAPI class BingModel(Model): - style = 'creative' bot: Chatbot = None cookies = None - def __init__(self): try: self.cookies = model_conf_val("bing", "cookies") @@ -29,8 +26,60 @@ def __init__(self): except Exception as e: log.warn(e) - async def reply_text_stream(self, query: str, context=None): - pass + async def reply_text_stream(self, query: str, context=None) -> dict: + async def handle_answer(final, answer): + if final: + try: + reply = self.build_source_attributions(answer, context) + log.info("[NewBing] reply:{}", reply) + await bot.close() + yield True, reply + except Exception as e: + log.warn(answer) + log.warn(e) + await user_chathistory.get(context['from_user_id'], None).reset() + yield True, answer + else: + try: + yield False, answer + except Exception as e: + log.warn(answer) + log.warn(e) + await user_chathistory.get(context['from_user_id'], None).reset() + yield True, answer + + if not context or not context.get('type') or context.get('type') == 'TEXT': + clear_memory_commands = common_conf_val( + 'clear_memory_commands', ['#清除记忆']) + if query in clear_memory_commands: + user_chathistory[context['from_user_id']] = None + yield True, '记忆已清除' + + chat_style = "" + chat_history = "" + if user_chathistory.get(context['from_user_id'], None) == None: + if (self.jailbreak): + chars = model_conf_val("bing", "jailbreak_prompt") + chars = chars + "\n\n" + chat_history = ''.join(chars) + user_chathistory[context['from_user_id']] = ['creative', chat_history] + else: + if not chat_history.endswith("\n\n"): + if chat_history.endswith("\n"): + chat_history += "\n" + else: + chat_history += "\n\n" + chat_style = user_chathistory[context['from_user_id']][0] + chat_history = user_chathistory[context['from_user_id']][1] + + query = self.get_quick_ask_query(query, context) + bot = await Chatbot.create(cookies=self.cookies) + user_chathistory[context['from_user_id']][1] += f"[user](#message)\n{query}\n\n" + log.info("[NewBing] query={}".format(query)) + + async for final, answer in bot.ask_stream(prompt=query, raw=True, webpage_context=chat_history, conversation_style=chat_style, search_result=True): + async for result in handle_answer(final, answer): + yield result def reply(self, query: str, context=None): if not context or not context.get('type') or context.get('type') == 'TEXT': @@ -69,8 +118,6 @@ async def __reply(self, query: str, context): if "[style]已切换至" in query: return query - print(user_chathistory) - log.info("[NewBing] query={}".format(query)) bot = await Chatbot.create(cookies=self.cookies) reply_text = "" @@ -93,10 +140,9 @@ async def stream_output(): ): if not final and response["type"] == 1 and "messages" in response["arguments"][0]: message = response["arguments"][0]["messages"][0] - # match message.get("messageType"): if message.get("messageType") == "InternalSearchQuery": - pass #chat_history += f"[assistant](#search_query)\n{message['hiddenText']}\n\n" + pass elif message.get("messageType") == "InternalSearchResult": #chat_history += f"[assistant](#search_results)\n{message['hiddenText']}\n\n" reference += f"[assistant](#search_results)\n{message['hiddenText']}" @@ -120,7 +166,6 @@ async def stream_output(): break if final and not response["item"]["messages"][-1].get("text"): raise Exception("发送的消息被过滤或者对话超时") - try: await stream_output() @@ -130,9 +175,7 @@ async def stream_output(): # 更新历史对话 user_chathistory[context['from_user_id']][1] = chat_history await bot.close() - return self.build_source_attributions(reply_text, reference, suggestion, context) - - + return self.build_source_text(reply_text, reference, suggestion, context) def create_img(self, query): try: @@ -169,7 +212,50 @@ def get_quick_ask_query(self, query, context): return "[style]已切换至精确模式" return query - def build_source_attributions(self, reply_text, reference, suggestion, context): + def build_source_attributions(self, answer, context): + reference = "" + reply = answer["item"]["messages"][-1] + reply_text = reply["text"] + user_chathistory[context['from_user_id']][1] += f"[assistant](#message)\n{reply_text}\n" + if "sourceAttributions" in reply: + for i, attribution in enumerate(reply["sourceAttributions"]): + display_name = attribution["providerDisplayName"] + url = attribution["seeMoreUrl"] + reference += f"{i+1}、[{display_name}]({url})\n\n" + + if len(reference) > 0: + reference = "***\n"+reference + + suggestion = "" + if "suggestedResponses" in reply: + suggestion_dict = dict() + for i, attribution in enumerate(reply["suggestedResponses"]): + suggestion_dict[i] = attribution["text"] + suggestion += f">{i+1}、{attribution['text']}\n\n" + suggestion_session[context['from_user_id'] + ] = suggestion_dict + + if len(suggestion) > 0: + suggestion = "***\n你可以通过输入序号快速追问我以下建议问题:\n\n"+suggestion + + throttling = answer["item"]["throttling"] + throttling_str = "" + + if throttling["numUserMessagesInConversation"] == throttling["maxNumUserMessagesInConversation"]: + user_chathistory.get(context['from_user_id'], None).reset() + throttling_str = "(对话轮次已达上限,本次聊天已结束,将开启新的对话)" + else: + throttling_str = f"对话轮次: {throttling['numUserMessagesInConversation']}/{throttling['maxNumUserMessagesInConversation']}\n" + + response = f"{reply_text}\n{reference}\n{suggestion}\n***\n{throttling_str}" + log.info("[NewBing] reply={}", response) + return response + else: + user_chathistory.get(context['from_user_id'], None).reset() + log.warn("[NewBing] reply={}", answer) + return "对话被接口拒绝,已开启新的一轮对话。" + + def build_source_text(self, reply_text, reference, suggestion, context): if not reply_text.endswith("\n\n"): if reply_text.endswith("\n"): reply_text += "\n" @@ -177,7 +263,6 @@ def build_source_attributions(self, reply_text, reference, suggestion, context): reply_text += "\n\n" references = "" - print(reference[36:-3]) if 'json' in reference: reference_dict = json.loads(reference[36:-3]) for i in range(len(reference_dict['web_search_results'])): @@ -186,7 +271,6 @@ def build_source_attributions(self, reply_text, reference, suggestion, context): url = r['url'] references += f"{i+1}、[{title}]({url})\n\n" - suggestions = "" suggestion_dict = dict() if len(suggestion) > 0: @@ -196,17 +280,6 @@ def build_source_attributions(self, reply_text, reference, suggestion, context): suggestions = "=====\n💡你可能想问(输入序号):\n\n" + suggestions suggestion_session[context['from_user_id']] = suggestion_dict - # throttling = answer["item"]["throttling"] - # throttling_str = "" - - # if not self.jailbreak: - # if throttling["numUserMessagesInConversation"] == throttling["maxNumUserMessagesInConversation"]: - # user_session.get(context['from_user_id'], None).reset() - # throttling_str = "(对话轮次已达上限,本次聊天已结束,将开启新的对话)" - # else: - # throttling_str = f"对话轮次: {throttling['numUserMessagesInConversation']}/{throttling['maxNumUserMessagesInConversation']}\n" - response = f"{reply_text}******\n{references}{suggestions}******\n" log.info("[NewBing] reply={}", response) return response - From 6f6a9504f83dfb329cd8d84dc1d1578225346051 Mon Sep 17 00:00:00 2001 From: sastpg <3200105875@zju.edu.cn> Date: Sat, 6 May 2023 16:37:25 +0800 Subject: [PATCH 3/6] update requirements --- requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements.txt b/requirements.txt index ac663cbe..7f357e20 100644 --- a/requirements.txt +++ b/requirements.txt @@ -3,7 +3,7 @@ flask flask_socketio itchat-uos==1.5.0.dev0 openai -EdgeGPT +EdgeGPT==0.3.5 requests discord.py>=2.0.0 wechatpy From c591df2fd795b801a8f068692405df38ce2b4245 Mon Sep 17 00:00:00 2001 From: sastpg <3200105875@zju.edu.cn> Date: Mon, 8 May 2023 21:45:12 +0800 Subject: [PATCH 4/6] Update reference, fix bug --- model/bing/new_bing_model.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/model/bing/new_bing_model.py b/model/bing/new_bing_model.py index 026d5e04..1526cb0d 100644 --- a/model/bing/new_bing_model.py +++ b/model/bing/new_bing_model.py @@ -264,7 +264,7 @@ def build_source_text(self, reply_text, reference, suggestion, context): references = "" if 'json' in reference: - reference_dict = json.loads(reference[36:-3]) + reference_dict = json.loads(reference[37:-4]) for i in range(len(reference_dict['web_search_results'])): r = reference_dict['web_search_results'][i] title = r['title'] From a79e134655f0b6948e5033f929f3be4496dcd93a Mon Sep 17 00:00:00 2001 From: sastpg <3200105875@zju.edu.cn> Date: Mon, 8 May 2023 22:49:42 +0800 Subject: [PATCH 5/6] refactor variable name --- model/bing/new_bing_model.py | 44 ++++++++++++++++++------------------ 1 file changed, 22 insertions(+), 22 deletions(-) diff --git a/model/bing/new_bing_model.py b/model/bing/new_bing_model.py index 1526cb0d..39b75587 100644 --- a/model/bing/new_bing_model.py +++ b/model/bing/new_bing_model.py @@ -9,7 +9,7 @@ import random import json -user_chathistory = dict() +user_chat_history = dict() suggestion_session = dict() # newBing对话模型逆向网页gitAPI @@ -17,7 +17,7 @@ class BingModel(Model): bot: Chatbot = None - cookies = None + cookies_list = None def __init__(self): try: @@ -37,7 +37,7 @@ async def handle_answer(final, answer): except Exception as e: log.warn(answer) log.warn(e) - await user_chathistory.get(context['from_user_id'], None).reset() + await user_chat_history.get(context['from_user_id'], None).reset() yield True, answer else: try: @@ -45,36 +45,36 @@ async def handle_answer(final, answer): except Exception as e: log.warn(answer) log.warn(e) - await user_chathistory.get(context['from_user_id'], None).reset() + await user_chat_history.get(context['from_user_id'], None).reset() yield True, answer if not context or not context.get('type') or context.get('type') == 'TEXT': clear_memory_commands = common_conf_val( 'clear_memory_commands', ['#清除记忆']) if query in clear_memory_commands: - user_chathistory[context['from_user_id']] = None + user_chat_history[context['from_user_id']] = None yield True, '记忆已清除' chat_style = "" chat_history = "" - if user_chathistory.get(context['from_user_id'], None) == None: + if user_chat_history.get(context['from_user_id'], None) == None: if (self.jailbreak): chars = model_conf_val("bing", "jailbreak_prompt") chars = chars + "\n\n" chat_history = ''.join(chars) - user_chathistory[context['from_user_id']] = ['creative', chat_history] + user_chat_history[context['from_user_id']] = ['creative', chat_history] else: if not chat_history.endswith("\n\n"): if chat_history.endswith("\n"): chat_history += "\n" else: chat_history += "\n\n" - chat_style = user_chathistory[context['from_user_id']][0] - chat_history = user_chathistory[context['from_user_id']][1] + chat_style = user_chat_history[context['from_user_id']][0] + chat_history = user_chat_history[context['from_user_id']][1] query = self.get_quick_ask_query(query, context) bot = await Chatbot.create(cookies=self.cookies) - user_chathistory[context['from_user_id']][1] += f"[user](#message)\n{query}\n\n" + user_chat_history[context['from_user_id']][1] += f"[user](#message)\n{query}\n\n" log.info("[NewBing] query={}".format(query)) async for final, answer in bot.ask_stream(prompt=query, raw=True, webpage_context=chat_history, conversation_style=chat_style, search_result=True): @@ -91,26 +91,26 @@ async def __reply(self, query: str, context): clear_memory_commands = common_conf_val( 'clear_memory_commands', ['#清除记忆']) if query in clear_memory_commands: - user_chathistory[context['from_user_id']] = None + user_chat_history[context['from_user_id']] = None return '记忆已清除' # deal chat_history chat_style = "" chat_history = "" - if user_chathistory.get(context['from_user_id'], None) == None: + if user_chat_history.get(context['from_user_id'], None) == None: if (self.jailbreak): chars = model_conf_val("bing", "jailbreak_prompt") chars = chars + "\n\n" chat_history = ''.join(chars) - user_chathistory[context['from_user_id']] = ['creative', chat_history] + user_chat_history[context['from_user_id']] = ['creative', chat_history] else: if not chat_history.endswith("\n\n"): if chat_history.endswith("\n"): chat_history += "\n" else: chat_history += "\n\n" - chat_style = user_chathistory[context['from_user_id']][0] - chat_history = user_chathistory[context['from_user_id']][1] + chat_style = user_chat_history[context['from_user_id']][0] + chat_history = user_chat_history[context['from_user_id']][1] query = self.get_quick_ask_query(query, context) if query == "输入的序号不在建议列表范围中": @@ -173,7 +173,7 @@ async def stream_output(): log.info(e) # 更新历史对话 - user_chathistory[context['from_user_id']][1] = chat_history + user_chat_history[context['from_user_id']][1] = chat_history await bot.close() return self.build_source_text(reply_text, reference, suggestion, context) @@ -202,13 +202,13 @@ def get_quick_ask_query(self, query, context): except: return "输入的序号不在建议列表范围中" elif(query == "/creative"): - user_chathistory[context['from_user_id']][0] = query[1:] + user_chat_history[context['from_user_id']][0] = query[1:] return "[style]已切换至创造模式" elif(query == "/balanced"): - user_chathistory[context['from_user_id']][0] = query[1:] + user_chat_history[context['from_user_id']][0] = query[1:] return "[style]已切换至平衡模式" elif(query == "/precise"): - user_chathistory[context['from_user_id']][0] = query[1:] + user_chat_history[context['from_user_id']][0] = query[1:] return "[style]已切换至精确模式" return query @@ -216,7 +216,7 @@ def build_source_attributions(self, answer, context): reference = "" reply = answer["item"]["messages"][-1] reply_text = reply["text"] - user_chathistory[context['from_user_id']][1] += f"[assistant](#message)\n{reply_text}\n" + user_chat_history[context['from_user_id']][1] += f"[assistant](#message)\n{reply_text}\n" if "sourceAttributions" in reply: for i, attribution in enumerate(reply["sourceAttributions"]): display_name = attribution["providerDisplayName"] @@ -242,7 +242,7 @@ def build_source_attributions(self, answer, context): throttling_str = "" if throttling["numUserMessagesInConversation"] == throttling["maxNumUserMessagesInConversation"]: - user_chathistory.get(context['from_user_id'], None).reset() + user_chat_history.get(context['from_user_id'], None).reset() throttling_str = "(对话轮次已达上限,本次聊天已结束,将开启新的对话)" else: throttling_str = f"对话轮次: {throttling['numUserMessagesInConversation']}/{throttling['maxNumUserMessagesInConversation']}\n" @@ -251,7 +251,7 @@ def build_source_attributions(self, answer, context): log.info("[NewBing] reply={}", response) return response else: - user_chathistory.get(context['from_user_id'], None).reset() + user_chat_history.get(context['from_user_id'], None).reset() log.warn("[NewBing] reply={}", answer) return "对话被接口拒绝,已开启新的一轮对话。" From 5f62d13d9c7bcef170890b16102498bc7e7fef3f Mon Sep 17 00:00:00 2001 From: sastpg <3200105875@zju.edu.cn> Date: Wed, 10 May 2023 11:18:50 +0800 Subject: [PATCH 6/6] fix reference bug; add server disconnected response --- model/bing/new_bing_model.py | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/model/bing/new_bing_model.py b/model/bing/new_bing_model.py index 39b75587..fc66920f 100644 --- a/model/bing/new_bing_model.py +++ b/model/bing/new_bing_model.py @@ -119,7 +119,11 @@ async def __reply(self, query: str, context): return query log.info("[NewBing] query={}".format(query)) - bot = await Chatbot.create(cookies=self.cookies) + try: + bot = await Chatbot.create(cookies=self.cookies) + except Exception as e: + log.info(e) + return "RemoteProtocolError: Bing Server disconnected without sending a response." reply_text = "" reference = "" suggestion = "" @@ -263,7 +267,7 @@ def build_source_text(self, reply_text, reference, suggestion, context): reply_text += "\n\n" references = "" - if 'json' in reference: + if 'json' in reference and reference[29] != 'W': reference_dict = json.loads(reference[37:-4]) for i in range(len(reference_dict['web_search_results'])): r = reference_dict['web_search_results'][i]