Skip to content

Commit

Permalink
Merge remote-tracking branch 'origin/enable_clear_history_option'
Browse files Browse the repository at this point in the history
  • Loading branch information
binary-husky committed Jul 30, 2023
2 parents 19ef6a5 + 155a7e1 commit 109afcf
Show file tree
Hide file tree
Showing 13 changed files with 42 additions and 65 deletions.
20 changes: 18 additions & 2 deletions core_functional.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
# 'primary' 颜色对应 theme.py 中的 primary_hue
# 'secondary' 颜色对应 theme.py 中的 neutral_hue
# 'stop' 颜色对应 theme.py 中的 color_er
# 默认按钮颜色是 secondary
import importlib
from toolbox import clear_line_break


Expand All @@ -14,7 +14,12 @@ def get_core_functions():
r"Furthermore, list all modification and explain the reasons to do so in markdown table." + "\n\n",
# 后语
"Suffix": r"",
"Color": r"secondary", # 按钮颜色
# 按钮颜色 (默认 secondary)
"Color": r"secondary",
# 按钮是否可见 (默认 True,即可见)
"Visible": True,
# 是否在触发时清除历史 (默认 False,即不处理之前的对话历史)
"AutoClearHistory": False
},
"中文学术润色": {
"Prefix": r"作为一名中文学术论文写作改进助理,你的任务是改进所提供文本的拼写、语法、清晰、简洁和整体可读性," +
Expand Down Expand Up @@ -76,3 +81,14 @@ def get_core_functions():
"Suffix": r"",
}
}


def handle_core_functionality(additional_fn, inputs, history, chatbot):
import core_functional
importlib.reload(core_functional) # 热更新prompt
core_functional = core_functional.get_core_functions()
if "PreProcess" in core_functional[additional_fn]: inputs = core_functional[additional_fn]["PreProcess"](inputs) # 获取预处理函数(如果有的话)
inputs = core_functional[additional_fn]["Prefix"] + inputs + core_functional[additional_fn]["Suffix"]
if core_functional[additional_fn].get("AutoClearHistory", False):
history = []
return inputs, history
7 changes: 2 additions & 5 deletions request_llm/bridge_chatglm.py
Original file line number Diff line number Diff line change
Expand Up @@ -144,11 +144,8 @@ def predict(inputs, llm_kwargs, plugin_kwargs, chatbot, history=[], system_promp
return

if additional_fn is not None:
import core_functional
importlib.reload(core_functional) # 热更新prompt
core_functional = core_functional.get_core_functions()
if "PreProcess" in core_functional[additional_fn]: inputs = core_functional[additional_fn]["PreProcess"](inputs) # 获取预处理函数(如果有的话)
inputs = core_functional[additional_fn]["Prefix"] + inputs + core_functional[additional_fn]["Suffix"]
from core_functional import handle_core_functionality
inputs, history = handle_core_functionality(additional_fn, inputs, history, chatbot)

# 处理历史信息
history_feedin = []
Expand Down
7 changes: 2 additions & 5 deletions request_llm/bridge_chatglmft.py
Original file line number Diff line number Diff line change
Expand Up @@ -185,11 +185,8 @@ def predict(inputs, llm_kwargs, plugin_kwargs, chatbot, history=[], system_promp
return

if additional_fn is not None:
import core_functional
importlib.reload(core_functional) # 热更新prompt
core_functional = core_functional.get_core_functions()
if "PreProcess" in core_functional[additional_fn]: inputs = core_functional[additional_fn]["PreProcess"](inputs) # 获取预处理函数(如果有的话)
inputs = core_functional[additional_fn]["Prefix"] + inputs + core_functional[additional_fn]["Suffix"]
from core_functional import handle_core_functionality
inputs, history = handle_core_functionality(additional_fn, inputs, history, chatbot)

# 处理历史信息
history_feedin = []
Expand Down
7 changes: 2 additions & 5 deletions request_llm/bridge_chatgpt.py
Original file line number Diff line number Diff line change
Expand Up @@ -129,11 +129,8 @@ def predict(inputs, llm_kwargs, plugin_kwargs, chatbot, history=[], system_promp
return

if additional_fn is not None:
import core_functional
importlib.reload(core_functional) # 热更新prompt
core_functional = core_functional.get_core_functions()
if "PreProcess" in core_functional[additional_fn]: inputs = core_functional[additional_fn]["PreProcess"](inputs) # 获取预处理函数(如果有的话)
inputs = core_functional[additional_fn]["Prefix"] + inputs + core_functional[additional_fn]["Suffix"]
from core_functional import handle_core_functionality
inputs, history = handle_core_functionality(additional_fn, inputs, history, chatbot)

raw_input = inputs
logging.info(f'[raw_input] {raw_input}')
Expand Down
7 changes: 2 additions & 5 deletions request_llm/bridge_claude.py
Original file line number Diff line number Diff line change
Expand Up @@ -116,11 +116,8 @@ def predict(inputs, llm_kwargs, plugin_kwargs, chatbot, history=[], system_promp
return

if additional_fn is not None:
import core_functional
importlib.reload(core_functional) # 热更新prompt
core_functional = core_functional.get_core_functions()
if "PreProcess" in core_functional[additional_fn]: inputs = core_functional[additional_fn]["PreProcess"](inputs) # 获取预处理函数(如果有的话)
inputs = core_functional[additional_fn]["Prefix"] + inputs + core_functional[additional_fn]["Suffix"]
from core_functional import handle_core_functionality
inputs, history = handle_core_functionality(additional_fn, inputs, history, chatbot)

raw_input = inputs
logging.info(f'[raw_input] {raw_input}')
Expand Down
7 changes: 2 additions & 5 deletions request_llm/bridge_internlm.py
Original file line number Diff line number Diff line change
Expand Up @@ -290,11 +290,8 @@ def predict(inputs, llm_kwargs, plugin_kwargs, chatbot, history=[], system_promp
return

if additional_fn is not None:
import core_functional
importlib.reload(core_functional) # 热更新prompt
core_functional = core_functional.get_core_functions()
if "PreProcess" in core_functional[additional_fn]: inputs = core_functional[additional_fn]["PreProcess"](inputs) # 获取预处理函数(如果有的话)
inputs = core_functional[additional_fn]["Prefix"] + inputs + core_functional[additional_fn]["Suffix"]
from core_functional import handle_core_functionality
inputs, history = handle_core_functionality(additional_fn, inputs, history, chatbot)

# 处理历史信息
history_feedin = []
Expand Down
7 changes: 2 additions & 5 deletions request_llm/bridge_jittorllms_llama.py
Original file line number Diff line number Diff line change
Expand Up @@ -154,11 +154,8 @@ def predict(inputs, llm_kwargs, plugin_kwargs, chatbot, history=[], system_promp
return

if additional_fn is not None:
import core_functional
importlib.reload(core_functional) # 热更新prompt
core_functional = core_functional.get_core_functions()
if "PreProcess" in core_functional[additional_fn]: inputs = core_functional[additional_fn]["PreProcess"](inputs) # 获取预处理函数(如果有的话)
inputs = core_functional[additional_fn]["Prefix"] + inputs + core_functional[additional_fn]["Suffix"]
from core_functional import handle_core_functionality
inputs, history = handle_core_functionality(additional_fn, inputs, history, chatbot)

# 处理历史信息
history_feedin = []
Expand Down
7 changes: 2 additions & 5 deletions request_llm/bridge_jittorllms_pangualpha.py
Original file line number Diff line number Diff line change
Expand Up @@ -154,11 +154,8 @@ def predict(inputs, llm_kwargs, plugin_kwargs, chatbot, history=[], system_promp
return

if additional_fn is not None:
import core_functional
importlib.reload(core_functional) # 热更新prompt
core_functional = core_functional.get_core_functions()
if "PreProcess" in core_functional[additional_fn]: inputs = core_functional[additional_fn]["PreProcess"](inputs) # 获取预处理函数(如果有的话)
inputs = core_functional[additional_fn]["Prefix"] + inputs + core_functional[additional_fn]["Suffix"]
from core_functional import handle_core_functionality
inputs, history = handle_core_functionality(additional_fn, inputs, history, chatbot)

# 处理历史信息
history_feedin = []
Expand Down
7 changes: 2 additions & 5 deletions request_llm/bridge_jittorllms_rwkv.py
Original file line number Diff line number Diff line change
Expand Up @@ -154,11 +154,8 @@ def predict(inputs, llm_kwargs, plugin_kwargs, chatbot, history=[], system_promp
return

if additional_fn is not None:
import core_functional
importlib.reload(core_functional) # 热更新prompt
core_functional = core_functional.get_core_functions()
if "PreProcess" in core_functional[additional_fn]: inputs = core_functional[additional_fn]["PreProcess"](inputs) # 获取预处理函数(如果有的话)
inputs = core_functional[additional_fn]["Prefix"] + inputs + core_functional[additional_fn]["Suffix"]
from core_functional import handle_core_functionality
inputs, history = handle_core_functionality(additional_fn, inputs, history, chatbot)

# 处理历史信息
history_feedin = []
Expand Down
7 changes: 2 additions & 5 deletions request_llm/bridge_moss.py
Original file line number Diff line number Diff line change
Expand Up @@ -224,11 +224,8 @@ def predict(inputs, llm_kwargs, plugin_kwargs, chatbot, history=[], system_promp
yield from update_ui(chatbot=chatbot, history=history)

if additional_fn is not None:
import core_functional
importlib.reload(core_functional) # 热更新prompt
core_functional = core_functional.get_core_functions()
if "PreProcess" in core_functional[additional_fn]: inputs = core_functional[additional_fn]["PreProcess"](inputs) # 获取预处理函数(如果有的话)
inputs = core_functional[additional_fn]["Prefix"] + inputs + core_functional[additional_fn]["Suffix"]
from core_functional import handle_core_functionality
inputs, history = handle_core_functionality(additional_fn, inputs, history, chatbot)

# 处理历史信息
history_feedin = []
Expand Down
7 changes: 2 additions & 5 deletions request_llm/bridge_newbingfree.py
Original file line number Diff line number Diff line change
Expand Up @@ -224,11 +224,8 @@ def predict(inputs, llm_kwargs, plugin_kwargs, chatbot, history=[], system_promp
return

if additional_fn is not None:
import core_functional
importlib.reload(core_functional) # 热更新prompt
core_functional = core_functional.get_core_functions()
if "PreProcess" in core_functional[additional_fn]: inputs = core_functional[additional_fn]["PreProcess"](inputs) # 获取预处理函数(如果有的话)
inputs = core_functional[additional_fn]["Prefix"] + inputs + core_functional[additional_fn]["Suffix"]
from core_functional import handle_core_functionality
inputs, history = handle_core_functionality(additional_fn, inputs, history, chatbot)

history_feedin = []
for i in range(len(history)//2):
Expand Down
10 changes: 2 additions & 8 deletions request_llm/bridge_stackclaude.py
Original file line number Diff line number Diff line change
Expand Up @@ -248,14 +248,8 @@ def predict(inputs, llm_kwargs, plugin_kwargs, chatbot, history=[], system_promp
return

if additional_fn is not None:
import core_functional
importlib.reload(core_functional) # 热更新prompt
core_functional = core_functional.get_core_functions()
if "PreProcess" in core_functional[additional_fn]:
inputs = core_functional[additional_fn]["PreProcess"](
inputs) # 获取预处理函数(如果有的话)
inputs = core_functional[additional_fn]["Prefix"] + \
inputs + core_functional[additional_fn]["Suffix"]
from core_functional import handle_core_functionality
inputs, history = handle_core_functionality(additional_fn, inputs, history, chatbot)

history_feedin = []
for i in range(len(history)//2):
Expand Down
7 changes: 2 additions & 5 deletions request_llm/bridge_tgui.py
Original file line number Diff line number Diff line change
Expand Up @@ -96,11 +96,8 @@ def predict(inputs, llm_kwargs, plugin_kwargs, chatbot, history=[], system_promp
additional_fn代表点击的哪个按钮,按钮见functional.py
"""
if additional_fn is not None:
import core_functional
importlib.reload(core_functional) # 热更新prompt
core_functional = core_functional.get_core_functions()
if "PreProcess" in core_functional[additional_fn]: inputs = core_functional[additional_fn]["PreProcess"](inputs) # 获取预处理函数(如果有的话)
inputs = core_functional[additional_fn]["Prefix"] + inputs + core_functional[additional_fn]["Suffix"]
from core_functional import handle_core_functionality
inputs, history = handle_core_functionality(additional_fn, inputs, history, chatbot)

raw_input = "What I would like to say is the following: " + inputs
history.extend([inputs, ""])
Expand Down

0 comments on commit 109afcf

Please sign in to comment.