diff --git a/README.zh.md b/README.zh.md index 14a36a86e..eea2c58c0 100644 --- a/README.zh.md +++ b/README.zh.md @@ -127,6 +127,7 @@ DB-GPT是一个开源的数据库领域大模型框架。目的是构建大模 - [internlm-chat-20b](https://huggingface.co/internlm/internlm-chat-20b) - [qwen-7b-chat](https://huggingface.co/Qwen/Qwen-7B-Chat) - [qwen-14b-chat](https://huggingface.co/Qwen/Qwen-14B-Chat) + - [qwen-72b-chat](https://huggingface.co/Qwen/Qwen-72B-Chat) - [wizardlm-13b](https://huggingface.co/WizardLM/WizardLM-13B-V1.2) - [orca-2-7b](https://huggingface.co/microsoft/Orca-2-7b) - [orca-2-13b](https://huggingface.co/microsoft/Orca-2-13b) diff --git a/pilot/configs/model_config.py b/pilot/configs/model_config.py index b66102e24..d4175c1ef 100644 --- a/pilot/configs/model_config.py +++ b/pilot/configs/model_config.py @@ -88,6 +88,18 @@ def get_device() -> str: "qwen-14b-chat-int8": os.path.join(MODEL_PATH, "Qwen-14B-Chat-Int8"), # https://huggingface.co/Qwen/Qwen-14B-Chat-Int4 "qwen-14b-chat-int4": os.path.join(MODEL_PATH, "Qwen-14B-Chat-Int4"), + # https://huggingface.co/Qwen/Qwen-72B-Chat + "qwen-72b-chat": os.path.join(MODEL_PATH, "Qwen-72B-Chat"), + # https://huggingface.co/Qwen/Qwen-72B-Chat-Int8 + "qwen-72b-chat-int8": os.path.join(MODEL_PATH, "Qwen-72B-Chat-Int8"), + # https://huggingface.co/Qwen/Qwen-72B-Chat-Int4 + "qwen-72b-chat-int4": os.path.join(MODEL_PATH, "Qwen-72B-Chat-Int4"), + # https://huggingface.co/Qwen/Qwen-1_8B-Chat + "qwen-1.8b-chat": os.path.join(MODEL_PATH, "Qwen-1_8B-Chat"), + # https://huggingface.co/Qwen/Qwen-1_8B-Chat-Int8 + "qwen-1.8b-chat-int8": os.path.join(MODEL_PATH, "wen-1_8B-Chat-Int8"), + # https://huggingface.co/Qwen/Qwen-1_8B-Chat-Int4 + "qwen-1.8b-chat-int4": os.path.join(MODEL_PATH, "Qwen-1_8B-Chat-Int4"), # (Llama2 based) We only support WizardLM-13B-V1.2 for now, which is trained from Llama-2 13b, see https://huggingface.co/WizardLM/WizardLM-13B-V1.2 "wizardlm-13b": os.path.join(MODEL_PATH, "WizardLM-13B-V1.2"), # wget https://huggingface.co/TheBloke/vicuna-13B-v1.5-GGUF/resolve/main/vicuna-13b-v1.5.Q4_K_M.gguf -O models/ggml-model-q4_0.gguf diff --git a/pilot/openapi/api_v1/feedback/feed_back_db.py b/pilot/openapi/api_v1/feedback/feed_back_db.py index 02afb3215..f38d041b0 100644 --- a/pilot/openapi/api_v1/feedback/feed_back_db.py +++ b/pilot/openapi/api_v1/feedback/feed_back_db.py @@ -49,7 +49,6 @@ def __init__(self): def create_or_update_chat_feed_back(self, feed_back: FeedBackBody): # Todo: We need to have user information first. - def_user_name = "" session = self.get_session() chat_feed_back = ChatFeedBackEntity( @@ -60,7 +59,7 @@ def create_or_update_chat_feed_back(self, feed_back: FeedBackBody): question=feed_back.question, knowledge_space=feed_back.knowledge_space, messages=feed_back.messages, - user_name=def_user_name, + user_name=feed_back.user_name, gmt_created=datetime.now(), gmt_modified=datetime.now(), ) @@ -76,7 +75,7 @@ def create_or_update_chat_feed_back(self, feed_back: FeedBackBody): result.question = feed_back.question result.knowledge_space = feed_back.knowledge_space result.messages = feed_back.messages - result.user_name = def_user_name + result.user_name = feed_back.user_name result.gmt_created = datetime.now() result.gmt_modified = datetime.now() else: diff --git a/pilot/openapi/api_v1/feedback/feed_back_model.py b/pilot/openapi/api_v1/feedback/feed_back_model.py index fabc30c09..8da94ebab 100644 --- a/pilot/openapi/api_v1/feedback/feed_back_model.py +++ b/pilot/openapi/api_v1/feedback/feed_back_model.py @@ -1,4 +1,5 @@ from pydantic.main import BaseModel +from typing import Optional class FeedBackBody(BaseModel): @@ -12,14 +13,16 @@ class FeedBackBody(BaseModel): """question: human question""" question: str - """knowledge_space: knowledge space""" - knowledge_space: str - """score: rating of the llm's answer""" score: int """ques_type: question type""" ques_type: str + user_name: Optional[str] = None + """messages: rating detail""" - messages: str + messages: Optional[str] = None + + """knowledge_space: knowledge space""" + knowledge_space: Optional[str] = None diff --git a/pilot/server/prompt/prompt_manage_db.py b/pilot/server/prompt/prompt_manage_db.py index eff75c270..36168131e 100644 --- a/pilot/server/prompt/prompt_manage_db.py +++ b/pilot/server/prompt/prompt_manage_db.py @@ -55,6 +55,7 @@ def create_prompt(self, prompt: PromptManageRequest): prompt_name=prompt.prompt_name, content=prompt.content, user_name=prompt.user_name, + sys_code=prompt.sys_code, gmt_created=datetime.now(), gmt_modified=datetime.now(), ) @@ -83,6 +84,8 @@ def get_prompts(self, query: PromptManageEntity): prompts = prompts.filter( PromptManageEntity.prompt_name == query.prompt_name ) + if query.sys_code is not None: + prompts = prompts.filter(PromptManageEntity.sys_code == query.sys_code) prompts = prompts.order_by(PromptManageEntity.gmt_created.desc()) result = prompts.all() diff --git a/pilot/server/prompt/request/request.py b/pilot/server/prompt/request/request.py index c1b0683ec..9bf83897f 100644 --- a/pilot/server/prompt/request/request.py +++ b/pilot/server/prompt/request/request.py @@ -1,24 +1,44 @@ from typing import List from pydantic import BaseModel +from typing import Optional +from pydantic import BaseModel class PromptManageRequest(BaseModel): - """chat_scene: for example: chat_with_db_execute, chat_excel, chat_with_db_qa""" - - chat_scene: str = None - - """sub_chat_scene: sub chat scene""" - sub_chat_scene: str = None - - """prompt_type: common or private""" - prompt_type: str = None - - """content: prompt content""" - content: str = None - - """user_name: user name""" - user_name: str = None - - """prompt_name: prompt name""" - prompt_name: str = None + """Model for managing prompts.""" + + chat_scene: Optional[str] = None + """ + The chat scene, e.g. chat_with_db_execute, chat_excel, chat_with_db_qa. + """ + + sub_chat_scene: Optional[str] = None + """ + The sub chat scene. + """ + + prompt_type: Optional[str] = None + """ + The prompt type, either common or private. + """ + + content: Optional[str] = None + """ + The prompt content. + """ + + user_name: Optional[str] = None + """ + The user name. + """ + + sys_code: Optional[str] = None + """ + System code + """ + + prompt_name: Optional[str] = None + """ + The prompt name. + """ diff --git a/pilot/server/prompt/service.py b/pilot/server/prompt/service.py index c108d8b88..b8f71aeac 100644 --- a/pilot/server/prompt/service.py +++ b/pilot/server/prompt/service.py @@ -17,9 +17,15 @@ def create_prompt(self, request: PromptManageRequest): query = PromptManageRequest( prompt_name=request.prompt_name, ) + err_sys_str = "" + if query.sys_code: + query.sys_code = request.sys_code + err_sys_str = f" and sys_code: {request.sys_code}" prompt_name = prompt_manage_dao.get_prompts(query) if len(prompt_name) > 0: - raise Exception(f"prompt name:{request.prompt_name} have already named") + raise Exception( + f"prompt name: {request.prompt_name}{err_sys_str} have already named" + ) prompt_manage_dao.create_prompt(request) return True @@ -32,6 +38,7 @@ def get_prompts(self, request: PromptManageRequest): prompt_type=request.prompt_type, prompt_name=request.prompt_name, user_name=request.user_name, + sys_code=request.sys_code, ) responses = [] prompts = prompt_manage_dao.get_prompts(query)