From 2d39db3013ea607a82e136b18dadacfd806c65aa Mon Sep 17 00:00:00 2001 From: csunny Date: Wed, 31 Jan 2024 16:01:21 +0800 Subject: [PATCH] fix: fix code format error --- dbgpt/model/utils/chatgpt_utils.py | 12 +- .../advanced_usage/More_proxyllms.md | 164 ++++++++++++++++++ docs/sidebars.js | 6 +- 3 files changed, 177 insertions(+), 5 deletions(-) create mode 100644 docs/docs/installation/advanced_usage/More_proxyllms.md diff --git a/dbgpt/model/utils/chatgpt_utils.py b/dbgpt/model/utils/chatgpt_utils.py index f0cc300d9..c232a0de2 100644 --- a/dbgpt/model/utils/chatgpt_utils.py +++ b/dbgpt/model/utils/chatgpt_utils.py @@ -71,7 +71,9 @@ def _initialize_openai_v1(init_params: OpenAIParameters): os.getenv("AZURE_OPENAI_KEY") if api_type == "azure" else None, ) api_version = api_version or os.getenv("OPENAI_API_VERSION") - api_azure_deployment = init_params.api_azure_deployment or os.getenv("API_AZURE_DEPLOYMENT") + api_azure_deployment = init_params.api_azure_deployment or os.getenv( + "API_AZURE_DEPLOYMENT" + ) if not base_url and full_url: base_url = full_url.split("/chat/completions")[0] @@ -85,7 +87,7 @@ def _initialize_openai_v1(init_params: OpenAIParameters): openai_params = { "api_key": api_key, "base_url": base_url, - "api_azure_deployment": api_azure_deployment + "api_azure_deployment": api_azure_deployment, } return openai_params, api_type, api_version @@ -111,7 +113,9 @@ def _initialize_openai(params: OpenAIParameters): ) api_version = params.api_version or os.getenv("OPENAI_API_VERSION") - api_azure_deployment = params.api_azure_deployment or os.getenv("API_AZURE_DEPLOYMENT") + api_azure_deployment = params.api_azure_deployment or os.getenv( + "API_AZURE_DEPLOYMENT" + ) if not api_base and params.full_url: # Adapt previous proxy_server_url configuration @@ -127,7 +131,7 @@ def _initialize_openai(params: OpenAIParameters): if params.proxies: openai.proxy = params.proxies if params.api_azure_deployment: - openai.api_azure_deployment =api_azure_deployment + openai.api_azure_deployment = api_azure_deployment def _build_openai_client(init_params: OpenAIParameters) -> Tuple[str, ClientType]: diff --git a/docs/docs/installation/advanced_usage/More_proxyllms.md b/docs/docs/installation/advanced_usage/More_proxyllms.md new file mode 100644 index 000000000..c50f6f9b0 --- /dev/null +++ b/docs/docs/installation/advanced_usage/More_proxyllms.md @@ -0,0 +1,164 @@ +# ProxyLLMs +DB-GPT can be deployed on servers with lower hardware through proxy LLMs, and now dbgpt support many proxy llms, such as OpenAI、Azure、Wenxin、Tongyi、Zhipu and so on. + +### Proxy model + +import Tabs from '@theme/Tabs'; +import TabItem from '@theme/TabItem'; + + + + Install dependencies + +```python +pip install -e ".[openai]" +``` + +Download embedding model + +```python +cd DB-GPT +mkdir models and cd models +git clone https://huggingface.co/GanymedeNil/text2vec-large-chinese +``` + +Configure the proxy and modify LLM_MODEL, PROXY_API_URL and API_KEY in the `.env`file + +```python +# .env +LLM_MODEL=chatgpt_proxyllm +PROXY_API_KEY={your-openai-sk} +PROXY_SERVER_URL=https://api.openai.com/v1/chat/completions +# If you use gpt-4 +# PROXYLLM_BACKEND=gpt-4 +``` + + + + Install dependencies + +```python +pip install -e ".[openai]" +``` + +Download embedding model + +```python +cd DB-GPT +mkdir models and cd models +git clone https://huggingface.co/GanymedeNil/text2vec-large-chinese # change this to other embedding model if needed. +``` + +Configure the proxy and modify LLM_MODEL, PROXY_API_URL and API_KEY in the `.env`file + +```python +# .env +LLM_MODEL=proxyllm +PROXY_API_KEY=xxxx +PROXY_API_BASE=https://xxxxxx.openai.azure.com/ +PROXY_API_TYPE=azure +PROXY_SERVER_URL=xxxx +PROXY_API_VERSION=2023-05-15 +PROXYLLM_BACKEND=gpt-35-turbo +API_AZURE_DEPLOYMENT=xxxx[deployment_name] +``` + + + +Install dependencies + +```python +pip install dashscope +``` + +Download embedding model + +```python +cd DB-GPT +mkdir models and cd models + +# embedding model +git clone https://huggingface.co/GanymedeNil/text2vec-large-chinese +or +git clone https://huggingface.co/moka-ai/m3e-large +``` + +Configure the proxy and modify LLM_MODEL, PROXY_API_URL and API_KEY in the `.env`file + +```python +# .env +# Aliyun tongyiqianwen +LLM_MODEL=tongyi_proxyllm +TONGYI_PROXY_API_KEY={your-tongyi-sk} +PROXY_SERVER_URL={your_service_url} +``` + + +Install dependencies + +```python +pip install zhipuai +``` + +Download embedding model + +```python +cd DB-GPT +mkdir models and cd models + +# embedding model +git clone https://huggingface.co/GanymedeNil/text2vec-large-chinese +or +git clone https://huggingface.co/moka-ai/m3e-large +``` + +Configure the proxy and modify LLM_MODEL, PROXY_API_URL and API_KEY in the `.env`file + +```python +# .env +LLM_MODEL=zhipu_proxyllm +PROXY_SERVER_URL={your_service_url} +ZHIPU_MODEL_VERSION={version} +ZHIPU_PROXY_API_KEY={your-zhipu-sk} +``` + + + + +Download embedding model + +```python +cd DB-GPT +mkdir models and cd models + +# embedding model +git clone https://huggingface.co/GanymedeNil/text2vec-large-chinese +or +git clone https://huggingface.co/moka-ai/m3e-large +``` + +Configure the proxy and modify LLM_MODEL, MODEL_VERSION, API_KEY and API_SECRET in the `.env`file + +```python +# .env +LLM_MODEL=wenxin_proxyllm +WEN_XIN_MODEL_VERSION={version} # ERNIE-Bot or ERNIE-Bot-turbo +WEN_XIN_API_KEY={your-wenxin-sk} +WEN_XIN_API_SECRET={your-wenxin-sct} +``` + + + + +:::info note + +⚠️ Be careful not to overwrite the contents of the `.env` configuration file +::: \ No newline at end of file diff --git a/docs/sidebars.js b/docs/sidebars.js index f35ad82fb..948df697e 100755 --- a/docs/sidebars.js +++ b/docs/sidebars.js @@ -162,8 +162,12 @@ const sidebars = { }, { type: 'category', - label: 'Adanced Usage', + label: 'Advanced Usage', items: [ + { + type: 'doc', + id: 'installation/advanced_usage/More_proxyllms', + }, { type: 'doc', id: 'installation/advanced_usage/vLLM_inference',