Skip to content

Commit

Permalink
feat: Support i18n
Browse files Browse the repository at this point in the history
  • Loading branch information
fangyinc committed Mar 25, 2024
1 parent fa06be6 commit f00f0c7
Show file tree
Hide file tree
Showing 175 changed files with 11,948 additions and 69,505 deletions.
1 change: 1 addition & 0 deletions .env.template
Original file line number Diff line number Diff line change
Expand Up @@ -166,6 +166,7 @@ VECTOR_STORE_TYPE=Chroma
#*******************************************************************#
#** WebServer Language Support **#
#*******************************************************************#
# en, zh, fr, ja, ko, ru
LANGUAGE=en
#LANGUAGE=zh

Expand Down
6 changes: 4 additions & 2 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -63,7 +63,7 @@ coverage.xml
.pytest_cache/

# Translations
*.mo
# *.mo
*.pot

# Django stuff:
Expand Down Expand Up @@ -182,4 +182,6 @@ thirdparty

# Ignore awel DAG visualization files
/examples/**/*.gv
/examples/**/*.gv.pdf
/examples/**/*.gv.pdf
/i18n/locales/**/**/*_ai_translated.po
/i18n/locales/**/**/*~
26 changes: 6 additions & 20 deletions dbgpt/app/dbgpt_server.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,14 +22,10 @@
# initialize_components import time cost about 0.1s
from dbgpt.app.component_configs import initialize_components
from dbgpt.component import SystemApp
from dbgpt.configs.model_config import (
EMBEDDING_MODEL_CONFIG,
LLM_MODEL_CONFIG,
LOGDIR,
ROOT_PATH,
)
from dbgpt.configs.model_config import EMBEDDING_MODEL_CONFIG, LLM_MODEL_CONFIG, LOGDIR
from dbgpt.serve.core import add_exception_handler
from dbgpt.util.fastapi import PriorityAPIRouter
from dbgpt.util.i18n_utils import _, set_default_language
from dbgpt.util.parameter_utils import _get_dict_from_obj
from dbgpt.util.system_utils import get_system_info
from dbgpt.util.tracer import SpanType, SpanTypeRunName, initialize_tracer, root_tracer
Expand All @@ -47,10 +43,12 @@
static_file_path = os.path.join(ROOT_PATH, "dbgpt", "app/static")

CFG = Config()
set_default_language(CFG.LANGUAGE)


app = FastAPI(
title="DBGPT OPEN API",
description="This is dbgpt, with auto docs for the API and everything",
title=_("DB-GPT Open API"),
description=_("DB-GPT Open API"),
version=version,
openapi_tags=[],
)
Expand All @@ -65,18 +63,6 @@
)


@app.get("/doc", include_in_schema=False)
async def custom_swagger_ui_html():
return get_swagger_ui_html(
openapi_url=app.openapi_url,
title="Custom Swagger UI",
swagger_js_url="/swagger_static/swagger-ui-bundle.js",
swagger_css_url="/swagger_static/swagger-ui.css",
)


# applications.get_swagger_ui_html = swagger_monkey_patch

system_app = SystemApp(app)


Expand Down
20 changes: 12 additions & 8 deletions dbgpt/app/openapi/api_v1/api_v1.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@
from dbgpt.app.scene import BaseChat, ChatFactory, ChatScene
from dbgpt.component import ComponentType
from dbgpt.configs.model_config import KNOWLEDGE_UPLOAD_ROOT_PATH
from dbgpt.core.awel import CommonLLMHttpRequestBody, CommonLLMHTTPRequestContext
from dbgpt.core.awel import CommonLLMHttpRequestBody
from dbgpt.core.schema.api import (
ChatCompletionResponseStreamChoice,
ChatCompletionStreamResponse,
Expand Down Expand Up @@ -355,17 +355,21 @@ async def chat_completions(
media_type="text/event-stream",
)
elif dialogue.chat_mode == ChatScene.ChatFlow.value():
flow_ctx = CommonLLMHTTPRequestContext(
conv_uid=dialogue.conv_uid,
chat_mode=dialogue.chat_mode,
user_name=dialogue.user_name,
sys_code=dialogue.sys_code,
)
flow_req = CommonLLMHttpRequestBody(
model=dialogue.model_name,
messages=dialogue.user_input,
stream=True,
context=flow_ctx,
# context=flow_ctx,
# temperature=
# max_new_tokens=
# enable_vis=
conv_uid=dialogue.conv_uid,
span_id=root_tracer.get_current_span_id(),
chat_mode=dialogue.chat_mode,
chat_param=dialogue.select_param,
user_name=dialogue.user_name,
sys_code=dialogue.sys_code,
incremental=dialogue.incremental,
)
return StreamingResponse(
flow_service.chat_flow(dialogue.select_param, flow_req),
Expand Down
15 changes: 2 additions & 13 deletions dbgpt/app/openapi/api_v2.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@
from dbgpt.app.scene import BaseChat, ChatScene
from dbgpt.client.schema import ChatCompletionRequestBody, ChatMode
from dbgpt.component import logger
from dbgpt.core.awel import CommonLLMHttpRequestBody, CommonLLMHTTPRequestContext
from dbgpt.core.awel import CommonLLMHttpRequestBody
from dbgpt.core.schema.api import (
ChatCompletionResponse,
ChatCompletionResponseChoice,
Expand Down Expand Up @@ -253,18 +253,7 @@ async def chat_flow_stream_wrapper(
token (APIToken): token
"""
flow_service = get_chat_flow()
flow_ctx = CommonLLMHTTPRequestContext(
conv_uid=request.conv_uid,
chat_mode=request.chat_mode,
user_name=request.user_name,
sys_code=request.sys_code,
)
flow_req = CommonLLMHttpRequestBody(
model=request.model,
messages=request.chat_param,
stream=True,
context=flow_ctx,
)
flow_req = CommonLLMHttpRequestBody(**request.dict())
async for output in flow_service.chat_flow(request.chat_param, flow_req):
if output.startswith("data: [DONE]"):
yield output
Expand Down
2 changes: 1 addition & 1 deletion dbgpt/app/static/404.html

Large diffs are not rendered by default.

2 changes: 1 addition & 1 deletion dbgpt/app/static/404/index.html

Large diffs are not rendered by default.

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

Large diffs are not rendered by default.

This file was deleted.

Large diffs are not rendered by default.

Large diffs are not rendered by default.

This file was deleted.

Large diffs are not rendered by default.

Large diffs are not rendered by default.

This file was deleted.

Loading

0 comments on commit f00f0c7

Please sign in to comment.