From 7fa205aefc861a99184f1fa3f3549961b70ce515 Mon Sep 17 00:00:00 2001 From: xuyuan23 <643854343@qq.com> Date: Sun, 28 May 2023 17:37:33 +0800 Subject: [PATCH 1/6] add multi language support. --- .env.template | 4 ++ .gitignore | 5 +- README.zh.md | 4 +- pilot/configs/config.py | 3 ++ pilot/language/lang_content_mapping.py | 73 +++++++++++++++++++++++++ pilot/language/translation_handler.py | 12 +++++ pilot/server/webserver.py | 74 +++++++++++++------------- 7 files changed, 135 insertions(+), 40 deletions(-) create mode 100644 pilot/language/lang_content_mapping.py create mode 100644 pilot/language/translation_handler.py diff --git a/.env.template b/.env.template index c71ef4f03..4cc051599 100644 --- a/.env.template +++ b/.env.template @@ -92,3 +92,7 @@ VECTOR_STORE_TYPE=Chroma #MILVUS_USERNAME #MILVUS_PASSWORD #MILVUS_SECURE= + + +LANGUAGE=en +#LANGUAGE=zh diff --git a/.gitignore b/.gitignore index a3821bb5f..2f91f3757 100644 --- a/.gitignore +++ b/.gitignore @@ -140,4 +140,7 @@ dmypy.json logs nltk_data .vectordb -pilot/data/ \ No newline at end of file +pilot/data/ + +logswebserver.log.* +.history/* \ No newline at end of file diff --git a/README.zh.md b/README.zh.md index 2c20b65ba..015d754b2 100644 --- a/README.zh.md +++ b/README.zh.md @@ -246,8 +246,8 @@ Run the Python interpreter and type the commands: ## 贡献者 -|[
csunny](https://github.com/csunny)
|[
xudafeng](https://github.com/xudafeng)
|[
明天](https://github.com/yhjun1026)
| [
Aries-ckt](https://github.com/Aries-ckt)
|[
thebigbone](https://github.com/thebigbone)
| -| :---: | :---: | :---: | :---: |:---: | +|[
csunny](https://github.com/csunny)
|[
xudafeng](https://github.com/xudafeng)
|[
明天](https://github.com/yhjun1026)
| [
Aries-ckt](https://github.com/Aries-ckt)
|[
thebigbone](https://github.com/thebigbone)
|[
Shinexy](https://github.com/xuyuan23)
| +| :---: | :---: | :---: | :---: |:---: |:---: | [git-contributor 说明](https://github.com/xudafeng/git-contributor),自动生成时间:`Fri May 19 2023 00:24:18 GMT+0800`。 diff --git a/pilot/configs/config.py b/pilot/configs/config.py index 518275f34..0ca283883 100644 --- a/pilot/configs/config.py +++ b/pilot/configs/config.py @@ -17,6 +17,9 @@ class Config(metaclass=Singleton): def __init__(self) -> None: """Initialize the Config class""" + # Gradio language version: en, cn + self.LANGUAGE = os.getenv("LANGUAGE", "en") + self.debug_mode = False self.skip_reprompt = False self.temperature = float(os.getenv("TEMPERATURE", 0.7)) diff --git a/pilot/language/lang_content_mapping.py b/pilot/language/lang_content_mapping.py new file mode 100644 index 000000000..b76621df1 --- /dev/null +++ b/pilot/language/lang_content_mapping.py @@ -0,0 +1,73 @@ + +## 短期内在该文件中配置,长期考虑将会存储在默认的数据库中存储,并可以支持多种语言的配置 + +lang_dicts = { + "zh": { + "unique_id": "中文内容", + "db_gpt_introduction": "[DB-GPT](https://github.com/csunny/DB-GPT) 是一个开源的以数据库为基础的GPT实验项目,使用本地化的GPT大模型与您的数据和环境进行交互,无数据泄露风险,100% 私密,100% 安全。", + "learn_more_markdown": "该服务是仅供非商业用途的研究预览。受 Vicuna-13B 模型 [License](https://github.com/facebookresearch/llama/blob/main/MODEL_CARD.md) 的约束", + "model_control_param": "模型参数", + "sql_generate_mode_direct": "直接生成结果", + "sql_generate_mode_none": "不执行结果", + "max_input_token_size": "最大输出Token数", + "please_choose_database": "请选择数据", + "sql_generate_diagnostics": "SQL生成与诊断", + "knowledge_qa_type_llm_native_dialogue": "LLM原生对话", + "knowledge_qa_type_default_knowledge_base_dialogue": "默认知识库对话", + "knowledge_qa_type_add_knowledge_base_dialogue": "新增知识库对话", + "create_knowledge_base": "新建知识库", + "sql_schema_info": "数据库{dbname}的Schema信息如下: {message}\n", + "current_dialogue_mode": "当前对话模式", + "database_smart_assistant": "数据库智能助手", + "sql_vs_setting": "自动执行模式下, DB-GPT可以具备执行SQL、从网络读取知识自动化存储学习的能力", + "knowledge_qa": "知识问答", + "configure_knowledge_base": "配置知识库", + "new_klg_name": "新知识库名称", + "add_as_new_klg": "添加为新知识库", + "add_file_to_klg": "向知识库中添加文件", + "upload_file": "上传文件", + "add_file": "添加文件", + "upload_and_load_to_klg": "上传并加载到知识库", + "upload_folder": "上传文件夹", + "add_folder": "添加文件夹", + "send": "发送", + "regenerate": "重新生成", + "clear_box": "清理" + }, + "en": { + "unique_id": "English Content", + "db_gpt_introduction": "[DB-GPT](https://github.com/csunny/DB-GPT) is an experimental open-source project that uses localized GPT large models to interact with your data and environment. With this solution, you can be assured that there is no risk of data leakage, and your data is 100% private and secure.", + "learn_more_markdown": "The service is a research preview intended for non-commercial use only. subject to the model [License](https://github.com/facebookresearch/llama/blob/main/MODEL_CARD.md) of Vicuna-13B", + "model_control_param": "Model Parameters", + "sql_generate_mode_direct": "Execute directly", + "sql_generate_mode_none": "Execute without model", + "max_input_token_size": "Maximum output token size", + "please_choose_database": "Please choose database", + "sql_generate_diagnostics": "SQL Generation & Diagnostics", + "knowledge_qa_type_llm_native_dialogue": "LLM native dialogue", + "knowledge_qa_type_default_knowledge_base_dialogue": "Default Knowledge Base", + "knowledge_qa_type_add_knowledge_base_dialogue": "Added Knowledge Base", + "create_knowledge_base": "Create Knowledge Base", + "sql_schema_info": "the schema information of database {}: {}\n", + "current_dialogue_mode": "Current dialogue mode", + "database_smart_assistant": "Database smart assistant", + "sql_vs_setting": "In the automatic execution mode, DB-GPT can have the ability to execute SQL, read knowledge from the network, automatically store and learn", + "knowledge_qa": "Knowledge QA", + "configure_knowledge_base": "Configure Knowledge Base", + "new_klg_name": "New Knowledge Base name", + "add_as_new_klg": "Add as new knowledge base", + "add_file_to_klg": "Add file to knowledge base", + "upload_file": "Upload file", + "add_file": "Add file", + "upload_and_load_to_klg": "Upload and load to knowledge base", + "upload_folder": "Upload folder", + "add_folder": "Add folder", + "send": "Send", + "regenerate": "Regenerate", + "clear_box": "Clear" + } +} + +def get_lang_content(key, language="zh"): + return lang_dicts.get(language, {}).get(key, "") + diff --git a/pilot/language/translation_handler.py b/pilot/language/translation_handler.py new file mode 100644 index 000000000..28075c680 --- /dev/null +++ b/pilot/language/translation_handler.py @@ -0,0 +1,12 @@ + +from pilot.configs.config import Config +from pilot.language.lang_content_mapping import get_lang_content + +CFG = Config() + + + +def get_lang_text(key): + return get_lang_content(key, CFG.LANGUAGE) + + diff --git a/pilot/server/webserver.py b/pilot/server/webserver.py index 8b33706a7..403852c88 100644 --- a/pilot/server/webserver.py +++ b/pilot/server/webserver.py @@ -56,7 +56,11 @@ from pilot.commands.command import execute_ai_response_json from pilot.scene.base import ChatScene from pilot.scene.chat_factory import ChatFactory +from pilot.language.translation_handler import get_lang_text + +# 加载插件 +CFG = Config() logger = build_logger("webserver", LOGDIR + "webserver.log") headers = {"User-Agent": "dbgpt Client"} @@ -67,15 +71,13 @@ enable_moderation = False models = [] dbs = [] -vs_list = ["新建知识库"] + get_vector_storelist() +vs_list = [get_lang_text("create_knowledge_base")] + get_vector_storelist() autogpt = False vector_store_client = None vector_store_name = {"vs_name": ""} priority = {"vicuna-13b": "aaa"} -# 加载插件 -CFG = Config() CHAT_FACTORY = ChatFactory() DB_SETTINGS = { @@ -86,6 +88,11 @@ } +llm_native_dialogue = get_lang_text("knowledge_qa_type_llm_native_dialogue") +default_knowledge_base_dialogue = get_lang_text("knowledge_qa_type_default_knowledge_base_dialogue") +add_knowledge_base_dialogue = get_lang_text("knowledge_qa_type_add_knowledge_base_dialogue") +knowledge_qa_type_list = [llm_native_dialogue, default_knowledge_base_dialogue, add_knowledge_base_dialogue] + def get_simlar(q): docsearch = knownledge_tovec_st(os.path.join(DATASETS_DIR, "plan.md")) docs = docsearch.similarity_search_with_score(q, k=1) @@ -102,7 +109,7 @@ def gen_sqlgen_conversation(dbname): schemas = mo.get_schema(dbname) for s in schemas: message += s["schema_info"] + ";" - return f"数据库{dbname}的Schema信息如下: {message}\n" + return get_lang_text("sql_schema_info").format(dbname, message) def get_database_list(): @@ -491,14 +498,14 @@ def http_bot( def change_sql_mode(sql_mode): - if sql_mode in ["直接执行结果"]: + if sql_mode in [get_lang_text("sql_generate_mode_direct")]: return gr.update(visible=True) else: return gr.update(visible=False) def change_mode(mode): - if mode in ["默认知识库对话", "LLM原生对话"]: + if mode in [default_knowledge_base_dialogue, llm_native_dialogue]: return gr.update(visible=False) else: return gr.update(visible=True) @@ -509,20 +516,13 @@ def change_tab(): def build_single_model_ui(): - notice_markdown = """ - # DB-GPT - - [DB-GPT](https://github.com/csunny/DB-GPT) 是一个开源的以数据库为基础的GPT实验项目,使用本地化的GPT大模型与您的数据和环境进行交互,无数据泄露风险,100% 私密,100% 安全。 - """ - learn_more_markdown = """ - ### Licence - The service is a research preview intended for non-commercial use only. subject to the model [License](https://github.com/facebookresearch/llama/blob/main/MODEL_CARD.md) of Vicuna-13B - """ + notice_markdown = get_lang_text("db_gpt_introduction") + learn_more_markdown = get_lang_text("learn_more_markdown") state = gr.State() gr.Markdown(notice_markdown, elem_id="notice_markdown") - with gr.Accordion("参数", open=False, visible=False) as parameter_row: + with gr.Accordion(get_lang_text("model_control_param"), open=False, visible=False) as parameter_row: temperature = gr.Slider( minimum=0.0, maximum=1.0, @@ -538,56 +538,56 @@ def build_single_model_ui(): value=512, step=64, interactive=True, - label="最大输出Token数", + label=get_lang_text("max_input_token_size"), ) tabs = gr.Tabs() with tabs: - tab_sql = gr.TabItem("SQL生成与诊断", elem_id="SQL") + tab_sql = gr.TabItem(get_lang_text("sql_generate_diagnostics"), elem_id="SQL") with tab_sql: # TODO A selector to choose database with gr.Row(elem_id="db_selector"): db_selector = gr.Dropdown( - label="请选择数据库", + label=get_lang_text("please_choose_database"), choices=dbs, value=dbs[0] if len(models) > 0 else "", interactive=True, show_label=True, ).style(container=False) - sql_mode = gr.Radio(["直接执行结果", "不执行结果"], show_label=False, value="不执行结果") - sql_vs_setting = gr.Markdown("自动执行模式下, DB-GPT可以具备执行SQL、从网络读取知识自动化存储学习的能力") + sql_mode = gr.Radio([get_lang_text("sql_generate_mode_direct"), get_lang_text("sql_generate_mode_none")], show_label=False, value=get_lang_text("sql_generate_mode_none")) + sql_vs_setting = gr.Markdown(get_lang_text("sql_vs_setting")) sql_mode.change(fn=change_sql_mode, inputs=sql_mode, outputs=sql_vs_setting) - tab_qa = gr.TabItem("知识问答", elem_id="QA") + tab_qa = gr.TabItem(get_lang_text("knowledge_qa"), elem_id="QA") with tab_qa: mode = gr.Radio( - ["LLM原生对话", "默认知识库对话", "新增知识库对话"], show_label=False, value="LLM原生对话" + [llm_native_dialogue, default_knowledge_base_dialogue, add_knowledge_base_dialogue], show_label=False, value=llm_native_dialogue ) - vs_setting = gr.Accordion("配置知识库", open=False) + vs_setting = gr.Accordion(get_lang_text("configure_knowledge_base"), open=False) mode.change(fn=change_mode, inputs=mode, outputs=vs_setting) with vs_setting: - vs_name = gr.Textbox(label="新知识库名称", lines=1, interactive=True) - vs_add = gr.Button("添加为新知识库") + vs_name = gr.Textbox(label=get_lang_text("new_klg_name"), lines=1, interactive=True) + vs_add = gr.Button(get_lang_text("add_as_new_klg")) with gr.Column() as doc2vec: - gr.Markdown("向知识库中添加文件") - with gr.Tab("上传文件"): + gr.Markdown(get_lang_text("add_file_to_klg")) + with gr.Tab(get_lang_text("upload_file")): files = gr.File( - label="添加文件", + label=get_lang_text("add_file"), file_types=[".txt", ".md", ".docx", ".pdf"], file_count="multiple", allow_flagged_uploads=True, show_label=False, ) - load_file_button = gr.Button("上传并加载到知识库") - with gr.Tab("上传文件夹"): + load_file_button = gr.Button(get_lang_text("upload_and_load_to_klg")) + with gr.Tab(get_lang_text("upload_folder")): folder_files = gr.File( - label="添加文件夹", + label=get_lang_text("add_folder"), accept_multiple_files=True, file_count="directory", show_label=False, ) - load_folder_button = gr.Button("上传并加载到知识库") + load_folder_button = gr.Button(get_lang_text("upload_and_load_to_klg")) with gr.Blocks(): chatbot = grChatbot(elem_id="chatbot", visible=False).style(height=550) @@ -599,11 +599,11 @@ def build_single_model_ui(): visible=False, ).style(container=False) with gr.Column(scale=2, min_width=50): - send_btn = gr.Button(value="发送", visible=False) + send_btn = gr.Button(value=get_lang_text("send"), visible=False) with gr.Row(visible=False) as button_row: - regenerate_btn = gr.Button(value="重新生成", interactive=False) - clear_btn = gr.Button(value="清理", interactive=False) + regenerate_btn = gr.Button(value=get_lang_text("regenerate"), interactive=False) + clear_btn = gr.Button(value=get_lang_text("clear_box"), interactive=False) gr.Markdown(learn_more_markdown) btn_list = [regenerate_btn, clear_btn] @@ -649,7 +649,7 @@ def build_single_model_ui(): def build_webdemo(): with gr.Blocks( - title="数据库智能助手", + title=get_lang_text("database_smart_assistant"), # theme=gr.themes.Base(), theme=gr.themes.Default(), css=block_css, From b9570e395e92ef4ba9d40c39b1eaefc1531976e5 Mon Sep 17 00:00:00 2001 From: xuyuan23 <643854343@qq.com> Date: Sun, 28 May 2023 18:27:20 +0800 Subject: [PATCH 2/6] use tool 'black' to check and reformated files --- pilot/language/lang_content_mapping.py | 9 ++--- pilot/language/translation_handler.py | 4 -- pilot/server/webserver.py | 52 +++++++++++++++++++++----- 3 files changed, 46 insertions(+), 19 deletions(-) diff --git a/pilot/language/lang_content_mapping.py b/pilot/language/lang_content_mapping.py index b76621df1..88cc92ffd 100644 --- a/pilot/language/lang_content_mapping.py +++ b/pilot/language/lang_content_mapping.py @@ -1,4 +1,3 @@ - ## 短期内在该文件中配置,长期考虑将会存储在默认的数据库中存储,并可以支持多种语言的配置 lang_dicts = { @@ -32,7 +31,7 @@ "add_folder": "添加文件夹", "send": "发送", "regenerate": "重新生成", - "clear_box": "清理" + "clear_box": "清理", }, "en": { "unique_id": "English Content", @@ -64,10 +63,10 @@ "add_folder": "Add folder", "send": "Send", "regenerate": "Regenerate", - "clear_box": "Clear" - } + "clear_box": "Clear", + }, } + def get_lang_content(key, language="zh"): return lang_dicts.get(language, {}).get(key, "") - diff --git a/pilot/language/translation_handler.py b/pilot/language/translation_handler.py index 28075c680..0a46d09ab 100644 --- a/pilot/language/translation_handler.py +++ b/pilot/language/translation_handler.py @@ -1,12 +1,8 @@ - from pilot.configs.config import Config from pilot.language.lang_content_mapping import get_lang_content CFG = Config() - def get_lang_text(key): return get_lang_content(key, CFG.LANGUAGE) - - diff --git a/pilot/server/webserver.py b/pilot/server/webserver.py index 403852c88..fde6fd540 100644 --- a/pilot/server/webserver.py +++ b/pilot/server/webserver.py @@ -89,9 +89,18 @@ llm_native_dialogue = get_lang_text("knowledge_qa_type_llm_native_dialogue") -default_knowledge_base_dialogue = get_lang_text("knowledge_qa_type_default_knowledge_base_dialogue") -add_knowledge_base_dialogue = get_lang_text("knowledge_qa_type_add_knowledge_base_dialogue") -knowledge_qa_type_list = [llm_native_dialogue, default_knowledge_base_dialogue, add_knowledge_base_dialogue] +default_knowledge_base_dialogue = get_lang_text( + "knowledge_qa_type_default_knowledge_base_dialogue" +) +add_knowledge_base_dialogue = get_lang_text( + "knowledge_qa_type_add_knowledge_base_dialogue" +) +knowledge_qa_type_list = [ + llm_native_dialogue, + default_knowledge_base_dialogue, + add_knowledge_base_dialogue, +] + def get_simlar(q): docsearch = knownledge_tovec_st(os.path.join(DATASETS_DIR, "plan.md")) @@ -522,7 +531,9 @@ def build_single_model_ui(): state = gr.State() gr.Markdown(notice_markdown, elem_id="notice_markdown") - with gr.Accordion(get_lang_text("model_control_param"), open=False, visible=False) as parameter_row: + with gr.Accordion( + get_lang_text("model_control_param"), open=False, visible=False + ) as parameter_row: temperature = gr.Slider( minimum=0.0, maximum=1.0, @@ -554,19 +565,36 @@ def build_single_model_ui(): show_label=True, ).style(container=False) - sql_mode = gr.Radio([get_lang_text("sql_generate_mode_direct"), get_lang_text("sql_generate_mode_none")], show_label=False, value=get_lang_text("sql_generate_mode_none")) + sql_mode = gr.Radio( + [ + get_lang_text("sql_generate_mode_direct"), + get_lang_text("sql_generate_mode_none"), + ], + show_label=False, + value=get_lang_text("sql_generate_mode_none"), + ) sql_vs_setting = gr.Markdown(get_lang_text("sql_vs_setting")) sql_mode.change(fn=change_sql_mode, inputs=sql_mode, outputs=sql_vs_setting) tab_qa = gr.TabItem(get_lang_text("knowledge_qa"), elem_id="QA") with tab_qa: mode = gr.Radio( - [llm_native_dialogue, default_knowledge_base_dialogue, add_knowledge_base_dialogue], show_label=False, value=llm_native_dialogue + [ + llm_native_dialogue, + default_knowledge_base_dialogue, + add_knowledge_base_dialogue, + ], + show_label=False, + value=llm_native_dialogue, + ) + vs_setting = gr.Accordion( + get_lang_text("configure_knowledge_base"), open=False ) - vs_setting = gr.Accordion(get_lang_text("configure_knowledge_base"), open=False) mode.change(fn=change_mode, inputs=mode, outputs=vs_setting) with vs_setting: - vs_name = gr.Textbox(label=get_lang_text("new_klg_name"), lines=1, interactive=True) + vs_name = gr.Textbox( + label=get_lang_text("new_klg_name"), lines=1, interactive=True + ) vs_add = gr.Button(get_lang_text("add_as_new_klg")) with gr.Column() as doc2vec: gr.Markdown(get_lang_text("add_file_to_klg")) @@ -579,7 +607,9 @@ def build_single_model_ui(): show_label=False, ) - load_file_button = gr.Button(get_lang_text("upload_and_load_to_klg")) + load_file_button = gr.Button( + get_lang_text("upload_and_load_to_klg") + ) with gr.Tab(get_lang_text("upload_folder")): folder_files = gr.File( label=get_lang_text("add_folder"), @@ -587,7 +617,9 @@ def build_single_model_ui(): file_count="directory", show_label=False, ) - load_folder_button = gr.Button(get_lang_text("upload_and_load_to_klg")) + load_folder_button = gr.Button( + get_lang_text("upload_and_load_to_klg") + ) with gr.Blocks(): chatbot = grChatbot(elem_id="chatbot", visible=False).style(height=550) From d24c160cd8aa421d6aaf18e9736058511d831c0a Mon Sep 17 00:00:00 2001 From: xuyuan23 <643854343@qq.com> Date: Sun, 28 May 2023 21:06:03 +0800 Subject: [PATCH 3/6] modify file conversation using config language, and fix 'sql_schema_info' format --- pilot/conversation.py | 13 +++++++------ pilot/language/lang_content_mapping.py | 8 +++++--- 2 files changed, 12 insertions(+), 9 deletions(-) diff --git a/pilot/conversation.py b/pilot/conversation.py index ba5ab2701..4d2673c1c 100644 --- a/pilot/conversation.py +++ b/pilot/conversation.py @@ -5,6 +5,7 @@ import uuid from enum import auto, Enum from typing import List, Any +from pilot.language.translation_handler import get_lang_text from pilot.configs.config import Config @@ -263,15 +264,15 @@ def gen_sqlgen_conversation(dbname): default_conversation = conv_one_shot conversation_sql_mode = { - "auto_execute_ai_response": "直接执行结果", - "dont_execute_ai_response": "不直接执行结果", + "auto_execute_ai_response": get_lang_text("sql_generate_mode_direct"), + "dont_execute_ai_response": get_lang_text("sql_generate_mode_none"), } conversation_types = { - "native": "LLM原生对话", - "default_knownledge": "默认知识库对话", - "custome": "新增知识库对话", - "auto_execute_plugin": "对话使用插件", + "native": get_lang_text("knowledge_qa_type_llm_native_dialogue"), + "default_knownledge": get_lang_text("knowledge_qa_type_default_knowledge_base_dialogue"), + "custome": get_lang_text("knowledge_qa_type_add_knowledge_base_dialogue"), + "auto_execute_plugin": get_lang_text("dialogue_use_plugin"), } conv_templates = { diff --git a/pilot/language/lang_content_mapping.py b/pilot/language/lang_content_mapping.py index 88cc92ffd..c6126e671 100644 --- a/pilot/language/lang_content_mapping.py +++ b/pilot/language/lang_content_mapping.py @@ -6,16 +6,17 @@ "db_gpt_introduction": "[DB-GPT](https://github.com/csunny/DB-GPT) 是一个开源的以数据库为基础的GPT实验项目,使用本地化的GPT大模型与您的数据和环境进行交互,无数据泄露风险,100% 私密,100% 安全。", "learn_more_markdown": "该服务是仅供非商业用途的研究预览。受 Vicuna-13B 模型 [License](https://github.com/facebookresearch/llama/blob/main/MODEL_CARD.md) 的约束", "model_control_param": "模型参数", - "sql_generate_mode_direct": "直接生成结果", - "sql_generate_mode_none": "不执行结果", + "sql_generate_mode_direct": "直接执行结果", + "sql_generate_mode_none": "不直接执行结果", "max_input_token_size": "最大输出Token数", "please_choose_database": "请选择数据", "sql_generate_diagnostics": "SQL生成与诊断", "knowledge_qa_type_llm_native_dialogue": "LLM原生对话", "knowledge_qa_type_default_knowledge_base_dialogue": "默认知识库对话", "knowledge_qa_type_add_knowledge_base_dialogue": "新增知识库对话", + "dialogue_use_plugin": "对话使用插件", "create_knowledge_base": "新建知识库", - "sql_schema_info": "数据库{dbname}的Schema信息如下: {message}\n", + "sql_schema_info": "数据库{}的Schema信息如下: {}\n", "current_dialogue_mode": "当前对话模式", "database_smart_assistant": "数据库智能助手", "sql_vs_setting": "自动执行模式下, DB-GPT可以具备执行SQL、从网络读取知识自动化存储学习的能力", @@ -46,6 +47,7 @@ "knowledge_qa_type_llm_native_dialogue": "LLM native dialogue", "knowledge_qa_type_default_knowledge_base_dialogue": "Default Knowledge Base", "knowledge_qa_type_add_knowledge_base_dialogue": "Added Knowledge Base", + "dialogue_use_plugin": "Dialogue Extension", "create_knowledge_base": "Create Knowledge Base", "sql_schema_info": "the schema information of database {}: {}\n", "current_dialogue_mode": "Current dialogue mode", From 49d174336a4c3ca329f2a5569583b6ad50f31421 Mon Sep 17 00:00:00 2001 From: xuyuan23 <643854343@qq.com> Date: Sun, 28 May 2023 21:09:01 +0800 Subject: [PATCH 4/6] reformatted file 'conversation' --- pilot/conversation.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/pilot/conversation.py b/pilot/conversation.py index 4d2673c1c..d2f6565ca 100644 --- a/pilot/conversation.py +++ b/pilot/conversation.py @@ -270,7 +270,9 @@ def gen_sqlgen_conversation(dbname): conversation_types = { "native": get_lang_text("knowledge_qa_type_llm_native_dialogue"), - "default_knownledge": get_lang_text("knowledge_qa_type_default_knowledge_base_dialogue"), + "default_knownledge": get_lang_text( + "knowledge_qa_type_default_knowledge_base_dialogue" + ), "custome": get_lang_text("knowledge_qa_type_add_knowledge_base_dialogue"), "auto_execute_plugin": get_lang_text("dialogue_use_plugin"), } From d67f149052ac68ac83c947ce51a712f801dfb647 Mon Sep 17 00:00:00 2001 From: xuyuan23 <643854343@qq.com> Date: Sun, 28 May 2023 22:00:57 +0800 Subject: [PATCH 5/6] Introduction to the usage method of adding multi-language interface support --- README.md | 9 +++++++-- README.zh.md | 3 +++ pilot/language/lang_content_mapping.py | 6 +++--- 3 files changed, 13 insertions(+), 5 deletions(-) diff --git a/README.md b/README.md index 7f3d18de1..87ab44781 100644 --- a/README.md +++ b/README.md @@ -190,6 +190,11 @@ We provide a user interface for Gradio, which allows you to use DB-GPT through o To use multiple models, modify the LLM_MODEL parameter in the .env configuration file to switch between the models. +### Multi Language Usage +To use multiple language model, modify the LLM_MODEL parameter in the .env configuration file to switch between the models. + +In the .env configuration file, modify the LANGUAGE parameter to switch between different languages, the default is English (Chinese zh, English en, other languages ​​to be added) + ### Create your own knowledge repository: 1.Place personal knowledge files or folders in the pilot/datasets directory. @@ -239,8 +244,8 @@ This project is standing on the shoulders of giants and is not going to work wit ## Contributors -|[
csunny](https://github.com/csunny)
|[
xudafeng](https://github.com/xudafeng)
|[
明天](https://github.com/yhjun1026)
| [
Aries-ckt](https://github.com/Aries-ckt)
|[
thebigbone](https://github.com/thebigbone)
| -| :---: | :---: | :---: | :---: |:---: | +|[
csunny](https://github.com/csunny)
|[
xudafeng](https://github.com/xudafeng)
|[
明天](https://github.com/yhjun1026)
| [
Aries-ckt](https://github.com/Aries-ckt)
|[
thebigbone](https://github.com/thebigbone)
|[
Shinexy](https://github.com/xuyuan23)
| +| :---: | :---: | :---: | :---: |:---: |:---: | This project follows the git-contributor [spec](https://github.com/xudafeng/git-contributor), auto updated at `Fri May 19 2023 00:24:18 GMT+0800`. diff --git a/README.zh.md b/README.zh.md index 015d754b2..3db20702c 100644 --- a/README.zh.md +++ b/README.zh.md @@ -188,6 +188,9 @@ $ python webserver.py ### 多模型使用 在.env 配置文件当中, 修改LLM_MODEL参数来切换使用的模型。 +### 多语言用户界面模式 +在.env 配置文件当中,修改LANGUAGE参数来切换使用不同的语言,默认是英文(中文zh, 英文en, 其他语言待补充) + ### 打造属于你的知识库: 1.将个人知识文件或者文件夹放入pilot/datasets目录中 diff --git a/pilot/language/lang_content_mapping.py b/pilot/language/lang_content_mapping.py index c6126e671..f293b8510 100644 --- a/pilot/language/lang_content_mapping.py +++ b/pilot/language/lang_content_mapping.py @@ -45,15 +45,15 @@ "please_choose_database": "Please choose database", "sql_generate_diagnostics": "SQL Generation & Diagnostics", "knowledge_qa_type_llm_native_dialogue": "LLM native dialogue", - "knowledge_qa_type_default_knowledge_base_dialogue": "Default Knowledge Base", - "knowledge_qa_type_add_knowledge_base_dialogue": "Added Knowledge Base", + "knowledge_qa_type_default_knowledge_base_dialogue": "Default documents", + "knowledge_qa_type_add_knowledge_base_dialogue": "Added documents", "dialogue_use_plugin": "Dialogue Extension", "create_knowledge_base": "Create Knowledge Base", "sql_schema_info": "the schema information of database {}: {}\n", "current_dialogue_mode": "Current dialogue mode", "database_smart_assistant": "Database smart assistant", "sql_vs_setting": "In the automatic execution mode, DB-GPT can have the ability to execute SQL, read knowledge from the network, automatically store and learn", - "knowledge_qa": "Knowledge QA", + "knowledge_qa": "Documents QA", "configure_knowledge_base": "Configure Knowledge Base", "new_klg_name": "New Knowledge Base name", "add_as_new_klg": "Add as new knowledge base", From 6182c89ac0a37ea2311de6a2fe8148214a11266d Mon Sep 17 00:00:00 2001 From: xuyuan23 <643854343@qq.com> Date: Sun, 28 May 2023 22:09:41 +0800 Subject: [PATCH 6/6] Modify the English label expression --- pilot/language/lang_content_mapping.py | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/pilot/language/lang_content_mapping.py b/pilot/language/lang_content_mapping.py index f293b8510..5d165b51c 100644 --- a/pilot/language/lang_content_mapping.py +++ b/pilot/language/lang_content_mapping.py @@ -52,15 +52,15 @@ "sql_schema_info": "the schema information of database {}: {}\n", "current_dialogue_mode": "Current dialogue mode", "database_smart_assistant": "Database smart assistant", - "sql_vs_setting": "In the automatic execution mode, DB-GPT can have the ability to execute SQL, read knowledge from the network, automatically store and learn", + "sql_vs_setting": "In the automatic execution mode, DB-GPT can have the ability to execute SQL, read data from the network, automatically store and learn", "knowledge_qa": "Documents QA", - "configure_knowledge_base": "Configure Knowledge Base", - "new_klg_name": "New Knowledge Base name", - "add_as_new_klg": "Add as new knowledge base", - "add_file_to_klg": "Add file to knowledge base", + "configure_knowledge_base": "Configure Documents", + "new_klg_name": "New document name", + "add_as_new_klg": "Add as new documents", + "add_file_to_klg": "Add file to documents", "upload_file": "Upload file", "add_file": "Add file", - "upload_and_load_to_klg": "Upload and load to knowledge base", + "upload_and_load_to_klg": "Upload and load to documents", "upload_folder": "Upload folder", "add_folder": "Add folder", "send": "Send",