From 4216c5196ef963a41f64eba8f30fa1a9922b1a20 Mon Sep 17 00:00:00 2001 From: 505030475 Date: Thu, 27 Jul 2023 22:30:55 +0800 Subject: [PATCH 1/3] verify ignore history practice --- core_functional.py | 19 +++++++++++++++++-- request_llm/bridge_chatglm.py | 7 ++----- request_llm/bridge_chatglmft.py | 7 ++----- request_llm/bridge_chatgpt.py | 7 ++----- request_llm/bridge_claude.py | 7 ++----- request_llm/bridge_internlm.py | 7 ++----- request_llm/bridge_jittorllms_llama.py | 7 ++----- request_llm/bridge_jittorllms_pangualpha.py | 7 ++----- request_llm/bridge_jittorllms_rwkv.py | 7 ++----- request_llm/bridge_moss.py | 7 ++----- request_llm/bridge_newbingfree.py | 7 ++----- request_llm/bridge_stackclaude.py | 10 ++-------- request_llm/bridge_tgui.py | 7 ++----- 13 files changed, 41 insertions(+), 65 deletions(-) diff --git a/core_functional.py b/core_functional.py index 7bc35827c..0d773ca08 100644 --- a/core_functional.py +++ b/core_functional.py @@ -1,7 +1,7 @@ # 'primary' 颜色对应 theme.py 中的 primary_hue # 'secondary' 颜色对应 theme.py 中的 neutral_hue # 'stop' 颜色对应 theme.py 中的 color_er -# 默认按钮颜色是 secondary +import importlib from toolbox import clear_line_break @@ -14,7 +14,12 @@ def get_core_functions(): r"Furthermore, list all modification and explain the reasons to do so in markdown table." + "\n\n", # 后语 "Suffix": r"", - "Color": r"secondary", # 按钮颜色 + # 按钮颜色 (默认 secondary) + "Color": r"secondary", + # 按钮是否可见 (默认 True,即可见) + "Visible": True, + # 是否在触发时清除历史 (默认 False,即不处理之前的对话历史) + "AutoClearHistory": True }, "中文学术润色": { "Prefix": r"作为一名中文学术论文写作改进助理,你的任务是改进所提供文本的拼写、语法、清晰、简洁和整体可读性," + @@ -76,3 +81,13 @@ def get_core_functions(): "Suffix": r"", } } + + +def handle_core_functionality(additional_fn, inputs, history): + import core_functional + importlib.reload(core_functional) # 热更新prompt + core_functional = core_functional.get_core_functions() + if "PreProcess" in core_functional[additional_fn]: inputs = core_functional[additional_fn]["PreProcess"](inputs) # 获取预处理函数(如果有的话) + inputs = core_functional[additional_fn]["Prefix"] + inputs + core_functional[additional_fn]["Suffix"] + history = [] if core_functional[additional_fn].get("AutoClearHistory", False) else history + return inputs, history diff --git a/request_llm/bridge_chatglm.py b/request_llm/bridge_chatglm.py index d761c65b9..0fe557c22 100644 --- a/request_llm/bridge_chatglm.py +++ b/request_llm/bridge_chatglm.py @@ -144,11 +144,8 @@ def predict(inputs, llm_kwargs, plugin_kwargs, chatbot, history=[], system_promp return if additional_fn is not None: - import core_functional - importlib.reload(core_functional) # 热更新prompt - core_functional = core_functional.get_core_functions() - if "PreProcess" in core_functional[additional_fn]: inputs = core_functional[additional_fn]["PreProcess"](inputs) # 获取预处理函数(如果有的话) - inputs = core_functional[additional_fn]["Prefix"] + inputs + core_functional[additional_fn]["Suffix"] + from core_functional import handle_core_functionality + inputs, history = handle_core_functionality(additional_fn, inputs, history) # 处理历史信息 history_feedin = [] diff --git a/request_llm/bridge_chatglmft.py b/request_llm/bridge_chatglmft.py index cd0ccb99f..dd5d1e91d 100644 --- a/request_llm/bridge_chatglmft.py +++ b/request_llm/bridge_chatglmft.py @@ -185,11 +185,8 @@ def predict(inputs, llm_kwargs, plugin_kwargs, chatbot, history=[], system_promp return if additional_fn is not None: - import core_functional - importlib.reload(core_functional) # 热更新prompt - core_functional = core_functional.get_core_functions() - if "PreProcess" in core_functional[additional_fn]: inputs = core_functional[additional_fn]["PreProcess"](inputs) # 获取预处理函数(如果有的话) - inputs = core_functional[additional_fn]["Prefix"] + inputs + core_functional[additional_fn]["Suffix"] + from core_functional import handle_core_functionality + inputs, history = handle_core_functionality(additional_fn, inputs, history) # 处理历史信息 history_feedin = [] diff --git a/request_llm/bridge_chatgpt.py b/request_llm/bridge_chatgpt.py index 11457c94d..d2a524004 100644 --- a/request_llm/bridge_chatgpt.py +++ b/request_llm/bridge_chatgpt.py @@ -129,11 +129,8 @@ def predict(inputs, llm_kwargs, plugin_kwargs, chatbot, history=[], system_promp return if additional_fn is not None: - import core_functional - importlib.reload(core_functional) # 热更新prompt - core_functional = core_functional.get_core_functions() - if "PreProcess" in core_functional[additional_fn]: inputs = core_functional[additional_fn]["PreProcess"](inputs) # 获取预处理函数(如果有的话) - inputs = core_functional[additional_fn]["Prefix"] + inputs + core_functional[additional_fn]["Suffix"] + from core_functional import handle_core_functionality + inputs, history = handle_core_functionality(additional_fn, inputs, history) raw_input = inputs logging.info(f'[raw_input] {raw_input}') diff --git a/request_llm/bridge_claude.py b/request_llm/bridge_claude.py index af79fc82d..e17b04196 100644 --- a/request_llm/bridge_claude.py +++ b/request_llm/bridge_claude.py @@ -116,11 +116,8 @@ def predict(inputs, llm_kwargs, plugin_kwargs, chatbot, history=[], system_promp return if additional_fn is not None: - import core_functional - importlib.reload(core_functional) # 热更新prompt - core_functional = core_functional.get_core_functions() - if "PreProcess" in core_functional[additional_fn]: inputs = core_functional[additional_fn]["PreProcess"](inputs) # 获取预处理函数(如果有的话) - inputs = core_functional[additional_fn]["Prefix"] + inputs + core_functional[additional_fn]["Suffix"] + from core_functional import handle_core_functionality + inputs, history = handle_core_functionality(additional_fn, inputs, history) raw_input = inputs logging.info(f'[raw_input] {raw_input}') diff --git a/request_llm/bridge_internlm.py b/request_llm/bridge_internlm.py index 82a940f25..6b8b75267 100644 --- a/request_llm/bridge_internlm.py +++ b/request_llm/bridge_internlm.py @@ -290,11 +290,8 @@ def predict(inputs, llm_kwargs, plugin_kwargs, chatbot, history=[], system_promp return if additional_fn is not None: - import core_functional - importlib.reload(core_functional) # 热更新prompt - core_functional = core_functional.get_core_functions() - if "PreProcess" in core_functional[additional_fn]: inputs = core_functional[additional_fn]["PreProcess"](inputs) # 获取预处理函数(如果有的话) - inputs = core_functional[additional_fn]["Prefix"] + inputs + core_functional[additional_fn]["Suffix"] + from core_functional import handle_core_functionality + inputs, history = handle_core_functionality(additional_fn, inputs, history) # 处理历史信息 history_feedin = [] diff --git a/request_llm/bridge_jittorllms_llama.py b/request_llm/bridge_jittorllms_llama.py index 6dfac681a..552db0f82 100644 --- a/request_llm/bridge_jittorllms_llama.py +++ b/request_llm/bridge_jittorllms_llama.py @@ -154,11 +154,8 @@ def predict(inputs, llm_kwargs, plugin_kwargs, chatbot, history=[], system_promp return if additional_fn is not None: - import core_functional - importlib.reload(core_functional) # 热更新prompt - core_functional = core_functional.get_core_functions() - if "PreProcess" in core_functional[additional_fn]: inputs = core_functional[additional_fn]["PreProcess"](inputs) # 获取预处理函数(如果有的话) - inputs = core_functional[additional_fn]["Prefix"] + inputs + core_functional[additional_fn]["Suffix"] + from core_functional import handle_core_functionality + inputs, history = handle_core_functionality(additional_fn, inputs, history) # 处理历史信息 history_feedin = [] diff --git a/request_llm/bridge_jittorllms_pangualpha.py b/request_llm/bridge_jittorllms_pangualpha.py index ad02565ae..4f937d4c4 100644 --- a/request_llm/bridge_jittorllms_pangualpha.py +++ b/request_llm/bridge_jittorllms_pangualpha.py @@ -154,11 +154,8 @@ def predict(inputs, llm_kwargs, plugin_kwargs, chatbot, history=[], system_promp return if additional_fn is not None: - import core_functional - importlib.reload(core_functional) # 热更新prompt - core_functional = core_functional.get_core_functions() - if "PreProcess" in core_functional[additional_fn]: inputs = core_functional[additional_fn]["PreProcess"](inputs) # 获取预处理函数(如果有的话) - inputs = core_functional[additional_fn]["Prefix"] + inputs + core_functional[additional_fn]["Suffix"] + from core_functional import handle_core_functionality + inputs, history = handle_core_functionality(additional_fn, inputs, history) # 处理历史信息 history_feedin = [] diff --git a/request_llm/bridge_jittorllms_rwkv.py b/request_llm/bridge_jittorllms_rwkv.py index 1252eead8..b0e41afdb 100644 --- a/request_llm/bridge_jittorllms_rwkv.py +++ b/request_llm/bridge_jittorllms_rwkv.py @@ -154,11 +154,8 @@ def predict(inputs, llm_kwargs, plugin_kwargs, chatbot, history=[], system_promp return if additional_fn is not None: - import core_functional - importlib.reload(core_functional) # 热更新prompt - core_functional = core_functional.get_core_functions() - if "PreProcess" in core_functional[additional_fn]: inputs = core_functional[additional_fn]["PreProcess"](inputs) # 获取预处理函数(如果有的话) - inputs = core_functional[additional_fn]["Prefix"] + inputs + core_functional[additional_fn]["Suffix"] + from core_functional import handle_core_functionality + inputs, history = handle_core_functionality(additional_fn, inputs, history) # 处理历史信息 history_feedin = [] diff --git a/request_llm/bridge_moss.py b/request_llm/bridge_moss.py index 7a1ab56d0..c4c8142f2 100644 --- a/request_llm/bridge_moss.py +++ b/request_llm/bridge_moss.py @@ -224,11 +224,8 @@ def predict(inputs, llm_kwargs, plugin_kwargs, chatbot, history=[], system_promp yield from update_ui(chatbot=chatbot, history=history) if additional_fn is not None: - import core_functional - importlib.reload(core_functional) # 热更新prompt - core_functional = core_functional.get_core_functions() - if "PreProcess" in core_functional[additional_fn]: inputs = core_functional[additional_fn]["PreProcess"](inputs) # 获取预处理函数(如果有的话) - inputs = core_functional[additional_fn]["Prefix"] + inputs + core_functional[additional_fn]["Suffix"] + from core_functional import handle_core_functionality + inputs, history = handle_core_functionality(additional_fn, inputs, history) # 处理历史信息 history_feedin = [] diff --git a/request_llm/bridge_newbingfree.py b/request_llm/bridge_newbingfree.py index 11c2ea781..e99189c16 100644 --- a/request_llm/bridge_newbingfree.py +++ b/request_llm/bridge_newbingfree.py @@ -224,11 +224,8 @@ def predict(inputs, llm_kwargs, plugin_kwargs, chatbot, history=[], system_promp return if additional_fn is not None: - import core_functional - importlib.reload(core_functional) # 热更新prompt - core_functional = core_functional.get_core_functions() - if "PreProcess" in core_functional[additional_fn]: inputs = core_functional[additional_fn]["PreProcess"](inputs) # 获取预处理函数(如果有的话) - inputs = core_functional[additional_fn]["Prefix"] + inputs + core_functional[additional_fn]["Suffix"] + from core_functional import handle_core_functionality + inputs, history = handle_core_functionality(additional_fn, inputs, history) history_feedin = [] for i in range(len(history)//2): diff --git a/request_llm/bridge_stackclaude.py b/request_llm/bridge_stackclaude.py index bbc13241c..2a4920c22 100644 --- a/request_llm/bridge_stackclaude.py +++ b/request_llm/bridge_stackclaude.py @@ -248,14 +248,8 @@ def predict(inputs, llm_kwargs, plugin_kwargs, chatbot, history=[], system_promp return if additional_fn is not None: - import core_functional - importlib.reload(core_functional) # 热更新prompt - core_functional = core_functional.get_core_functions() - if "PreProcess" in core_functional[additional_fn]: - inputs = core_functional[additional_fn]["PreProcess"]( - inputs) # 获取预处理函数(如果有的话) - inputs = core_functional[additional_fn]["Prefix"] + \ - inputs + core_functional[additional_fn]["Suffix"] + from core_functional import handle_core_functionality + inputs, history = handle_core_functionality(additional_fn, inputs, history) history_feedin = [] for i in range(len(history)//2): diff --git a/request_llm/bridge_tgui.py b/request_llm/bridge_tgui.py index fcf852f04..4f9b41cdd 100644 --- a/request_llm/bridge_tgui.py +++ b/request_llm/bridge_tgui.py @@ -96,11 +96,8 @@ def predict(inputs, llm_kwargs, plugin_kwargs, chatbot, history=[], system_promp additional_fn代表点击的哪个按钮,按钮见functional.py """ if additional_fn is not None: - import core_functional - importlib.reload(core_functional) # 热更新prompt - core_functional = core_functional.get_core_functions() - if "PreProcess" in core_functional[additional_fn]: inputs = core_functional[additional_fn]["PreProcess"](inputs) # 获取预处理函数(如果有的话) - inputs = core_functional[additional_fn]["Prefix"] + inputs + core_functional[additional_fn]["Suffix"] + from core_functional import handle_core_functionality + inputs, history = handle_core_functionality(additional_fn, inputs, history) raw_input = "What I would like to say is the following: " + inputs history.extend([inputs, ""]) From e4ba0e6c85ec4c87e39dd08a838cf32db1fa4d22 Mon Sep 17 00:00:00 2001 From: awwaawwa <8493196+awwaawwa@users.noreply.github.com> Date: Thu, 27 Jul 2023 23:07:59 +0800 Subject: [PATCH 2/3] add clear history tips --- core_functional.py | 6 ++++-- request_llm/bridge_chatglm.py | 2 +- request_llm/bridge_chatglmft.py | 2 +- request_llm/bridge_chatgpt.py | 2 +- request_llm/bridge_claude.py | 2 +- request_llm/bridge_internlm.py | 2 +- request_llm/bridge_jittorllms_llama.py | 2 +- request_llm/bridge_jittorllms_pangualpha.py | 2 +- request_llm/bridge_jittorllms_rwkv.py | 2 +- request_llm/bridge_moss.py | 2 +- request_llm/bridge_newbingfree.py | 2 +- request_llm/bridge_stackclaude.py | 2 +- request_llm/bridge_tgui.py | 2 +- 13 files changed, 16 insertions(+), 14 deletions(-) diff --git a/core_functional.py b/core_functional.py index 0d773ca08..813d1b6c9 100644 --- a/core_functional.py +++ b/core_functional.py @@ -83,11 +83,13 @@ def get_core_functions(): } -def handle_core_functionality(additional_fn, inputs, history): +def handle_core_functionality(additional_fn, inputs, history, chatbot): import core_functional importlib.reload(core_functional) # 热更新prompt core_functional = core_functional.get_core_functions() if "PreProcess" in core_functional[additional_fn]: inputs = core_functional[additional_fn]["PreProcess"](inputs) # 获取预处理函数(如果有的话) inputs = core_functional[additional_fn]["Prefix"] + inputs + core_functional[additional_fn]["Suffix"] - history = [] if core_functional[additional_fn].get("AutoClearHistory", False) else history + if core_functional[additional_fn].get("AutoClearHistory", False): + history = [] + chatbot.append((f'[{additional_fn}] 是否已清空历史消息?', "[Local Message] 已清空所有历史消息。")) return inputs, history diff --git a/request_llm/bridge_chatglm.py b/request_llm/bridge_chatglm.py index 0fe557c22..6dac86395 100644 --- a/request_llm/bridge_chatglm.py +++ b/request_llm/bridge_chatglm.py @@ -145,7 +145,7 @@ def predict(inputs, llm_kwargs, plugin_kwargs, chatbot, history=[], system_promp if additional_fn is not None: from core_functional import handle_core_functionality - inputs, history = handle_core_functionality(additional_fn, inputs, history) + inputs, history = handle_core_functionality(additional_fn, inputs, history, chatbot) # 处理历史信息 history_feedin = [] diff --git a/request_llm/bridge_chatglmft.py b/request_llm/bridge_chatglmft.py index dd5d1e91d..4e21c989b 100644 --- a/request_llm/bridge_chatglmft.py +++ b/request_llm/bridge_chatglmft.py @@ -186,7 +186,7 @@ def predict(inputs, llm_kwargs, plugin_kwargs, chatbot, history=[], system_promp if additional_fn is not None: from core_functional import handle_core_functionality - inputs, history = handle_core_functionality(additional_fn, inputs, history) + inputs, history = handle_core_functionality(additional_fn, inputs, history, chatbot) # 处理历史信息 history_feedin = [] diff --git a/request_llm/bridge_chatgpt.py b/request_llm/bridge_chatgpt.py index d2a524004..ea48fbaff 100644 --- a/request_llm/bridge_chatgpt.py +++ b/request_llm/bridge_chatgpt.py @@ -130,7 +130,7 @@ def predict(inputs, llm_kwargs, plugin_kwargs, chatbot, history=[], system_promp if additional_fn is not None: from core_functional import handle_core_functionality - inputs, history = handle_core_functionality(additional_fn, inputs, history) + inputs, history = handle_core_functionality(additional_fn, inputs, history, chatbot) raw_input = inputs logging.info(f'[raw_input] {raw_input}') diff --git a/request_llm/bridge_claude.py b/request_llm/bridge_claude.py index e17b04196..6084b1f15 100644 --- a/request_llm/bridge_claude.py +++ b/request_llm/bridge_claude.py @@ -117,7 +117,7 @@ def predict(inputs, llm_kwargs, plugin_kwargs, chatbot, history=[], system_promp if additional_fn is not None: from core_functional import handle_core_functionality - inputs, history = handle_core_functionality(additional_fn, inputs, history) + inputs, history = handle_core_functionality(additional_fn, inputs, history, chatbot) raw_input = inputs logging.info(f'[raw_input] {raw_input}') diff --git a/request_llm/bridge_internlm.py b/request_llm/bridge_internlm.py index 6b8b75267..a0ba3bab0 100644 --- a/request_llm/bridge_internlm.py +++ b/request_llm/bridge_internlm.py @@ -291,7 +291,7 @@ def predict(inputs, llm_kwargs, plugin_kwargs, chatbot, history=[], system_promp if additional_fn is not None: from core_functional import handle_core_functionality - inputs, history = handle_core_functionality(additional_fn, inputs, history) + inputs, history = handle_core_functionality(additional_fn, inputs, history, chatbot) # 处理历史信息 history_feedin = [] diff --git a/request_llm/bridge_jittorllms_llama.py b/request_llm/bridge_jittorllms_llama.py index 552db0f82..d4853578f 100644 --- a/request_llm/bridge_jittorllms_llama.py +++ b/request_llm/bridge_jittorllms_llama.py @@ -155,7 +155,7 @@ def predict(inputs, llm_kwargs, plugin_kwargs, chatbot, history=[], system_promp if additional_fn is not None: from core_functional import handle_core_functionality - inputs, history = handle_core_functionality(additional_fn, inputs, history) + inputs, history = handle_core_functionality(additional_fn, inputs, history, chatbot) # 处理历史信息 history_feedin = [] diff --git a/request_llm/bridge_jittorllms_pangualpha.py b/request_llm/bridge_jittorllms_pangualpha.py index 4f937d4c4..20a302130 100644 --- a/request_llm/bridge_jittorllms_pangualpha.py +++ b/request_llm/bridge_jittorllms_pangualpha.py @@ -155,7 +155,7 @@ def predict(inputs, llm_kwargs, plugin_kwargs, chatbot, history=[], system_promp if additional_fn is not None: from core_functional import handle_core_functionality - inputs, history = handle_core_functionality(additional_fn, inputs, history) + inputs, history = handle_core_functionality(additional_fn, inputs, history, chatbot) # 处理历史信息 history_feedin = [] diff --git a/request_llm/bridge_jittorllms_rwkv.py b/request_llm/bridge_jittorllms_rwkv.py index b0e41afdb..ee4f592f5 100644 --- a/request_llm/bridge_jittorllms_rwkv.py +++ b/request_llm/bridge_jittorllms_rwkv.py @@ -155,7 +155,7 @@ def predict(inputs, llm_kwargs, plugin_kwargs, chatbot, history=[], system_promp if additional_fn is not None: from core_functional import handle_core_functionality - inputs, history = handle_core_functionality(additional_fn, inputs, history) + inputs, history = handle_core_functionality(additional_fn, inputs, history, chatbot) # 处理历史信息 history_feedin = [] diff --git a/request_llm/bridge_moss.py b/request_llm/bridge_moss.py index c4c8142f2..3c6217d2b 100644 --- a/request_llm/bridge_moss.py +++ b/request_llm/bridge_moss.py @@ -225,7 +225,7 @@ def predict(inputs, llm_kwargs, plugin_kwargs, chatbot, history=[], system_promp if additional_fn is not None: from core_functional import handle_core_functionality - inputs, history = handle_core_functionality(additional_fn, inputs, history) + inputs, history = handle_core_functionality(additional_fn, inputs, history, chatbot) # 处理历史信息 history_feedin = [] diff --git a/request_llm/bridge_newbingfree.py b/request_llm/bridge_newbingfree.py index e99189c16..cc6e9b733 100644 --- a/request_llm/bridge_newbingfree.py +++ b/request_llm/bridge_newbingfree.py @@ -225,7 +225,7 @@ def predict(inputs, llm_kwargs, plugin_kwargs, chatbot, history=[], system_promp if additional_fn is not None: from core_functional import handle_core_functionality - inputs, history = handle_core_functionality(additional_fn, inputs, history) + inputs, history = handle_core_functionality(additional_fn, inputs, history, chatbot) history_feedin = [] for i in range(len(history)//2): diff --git a/request_llm/bridge_stackclaude.py b/request_llm/bridge_stackclaude.py index 2a4920c22..3f2ee6742 100644 --- a/request_llm/bridge_stackclaude.py +++ b/request_llm/bridge_stackclaude.py @@ -249,7 +249,7 @@ def predict(inputs, llm_kwargs, plugin_kwargs, chatbot, history=[], system_promp if additional_fn is not None: from core_functional import handle_core_functionality - inputs, history = handle_core_functionality(additional_fn, inputs, history) + inputs, history = handle_core_functionality(additional_fn, inputs, history, chatbot) history_feedin = [] for i in range(len(history)//2): diff --git a/request_llm/bridge_tgui.py b/request_llm/bridge_tgui.py index 4f9b41cdd..3e03f7b39 100644 --- a/request_llm/bridge_tgui.py +++ b/request_llm/bridge_tgui.py @@ -97,7 +97,7 @@ def predict(inputs, llm_kwargs, plugin_kwargs, chatbot, history=[], system_promp """ if additional_fn is not None: from core_functional import handle_core_functionality - inputs, history = handle_core_functionality(additional_fn, inputs, history) + inputs, history = handle_core_functionality(additional_fn, inputs, history, chatbot) raw_input = "What I would like to say is the following: " + inputs history.extend([inputs, ""]) From 86e33ea99aa34614efc31aeb42d10f39cf6cba98 Mon Sep 17 00:00:00 2001 From: binary-husky <96192199+binary-husky@users.noreply.github.com> Date: Fri, 28 Jul 2023 21:09:51 +0800 Subject: [PATCH 3/3] Update core_functional.py --- core_functional.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/core_functional.py b/core_functional.py index 813d1b6c9..4a476c60f 100644 --- a/core_functional.py +++ b/core_functional.py @@ -19,7 +19,7 @@ def get_core_functions(): # 按钮是否可见 (默认 True,即可见) "Visible": True, # 是否在触发时清除历史 (默认 False,即不处理之前的对话历史) - "AutoClearHistory": True + "AutoClearHistory": False }, "中文学术润色": { "Prefix": r"作为一名中文学术论文写作改进助理,你的任务是改进所提供文本的拼写、语法、清晰、简洁和整体可读性," + @@ -91,5 +91,4 @@ def handle_core_functionality(additional_fn, inputs, history, chatbot): inputs = core_functional[additional_fn]["Prefix"] + inputs + core_functional[additional_fn]["Suffix"] if core_functional[additional_fn].get("AutoClearHistory", False): history = [] - chatbot.append((f'[{additional_fn}] 是否已清空历史消息?', "[Local Message] 已清空所有历史消息。")) return inputs, history