Skip to content

Commit

Permalink
fix: fix code format error
Browse files Browse the repository at this point in the history
  • Loading branch information
csunny committed Jan 31, 2024
1 parent 45cf7a9 commit 2d39db3
Show file tree
Hide file tree
Showing 3 changed files with 177 additions and 5 deletions.
12 changes: 8 additions & 4 deletions dbgpt/model/utils/chatgpt_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -71,7 +71,9 @@ def _initialize_openai_v1(init_params: OpenAIParameters):
os.getenv("AZURE_OPENAI_KEY") if api_type == "azure" else None,
)
api_version = api_version or os.getenv("OPENAI_API_VERSION")
api_azure_deployment = init_params.api_azure_deployment or os.getenv("API_AZURE_DEPLOYMENT")
api_azure_deployment = init_params.api_azure_deployment or os.getenv(
"API_AZURE_DEPLOYMENT"
)
if not base_url and full_url:
base_url = full_url.split("/chat/completions")[0]

Expand All @@ -85,7 +87,7 @@ def _initialize_openai_v1(init_params: OpenAIParameters):
openai_params = {
"api_key": api_key,
"base_url": base_url,
"api_azure_deployment": api_azure_deployment
"api_azure_deployment": api_azure_deployment,
}
return openai_params, api_type, api_version

Expand All @@ -111,7 +113,9 @@ def _initialize_openai(params: OpenAIParameters):
)
api_version = params.api_version or os.getenv("OPENAI_API_VERSION")

api_azure_deployment = params.api_azure_deployment or os.getenv("API_AZURE_DEPLOYMENT")
api_azure_deployment = params.api_azure_deployment or os.getenv(
"API_AZURE_DEPLOYMENT"
)

if not api_base and params.full_url:
# Adapt previous proxy_server_url configuration
Expand All @@ -127,7 +131,7 @@ def _initialize_openai(params: OpenAIParameters):
if params.proxies:
openai.proxy = params.proxies
if params.api_azure_deployment:
openai.api_azure_deployment =api_azure_deployment
openai.api_azure_deployment = api_azure_deployment


def _build_openai_client(init_params: OpenAIParameters) -> Tuple[str, ClientType]:
Expand Down
164 changes: 164 additions & 0 deletions docs/docs/installation/advanced_usage/More_proxyllms.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,164 @@
# ProxyLLMs
DB-GPT can be deployed on servers with lower hardware through proxy LLMs, and now dbgpt support many proxy llms, such as OpenAI、Azure、Wenxin、Tongyi、Zhipu and so on.

### Proxy model

import Tabs from '@theme/Tabs';
import TabItem from '@theme/TabItem';

<Tabs
defaultValue="openai"
values={[
{label: 'Open AI', value: 'openai'},
{label: 'Azure', value: 'Azure'},
{label: 'Qwen', value: 'qwen'},
{label: 'ChatGLM', value: 'chatglm'},
{label: 'WenXin', value: 'erniebot'},
]}>
<TabItem value="openai" label="open ai">
Install dependencies

```python
pip install -e ".[openai]"
```

Download embedding model

```python
cd DB-GPT
mkdir models and cd models
git clone https://huggingface.co/GanymedeNil/text2vec-large-chinese
```

Configure the proxy and modify LLM_MODEL, PROXY_API_URL and API_KEY in the `.env`file

```python
# .env
LLM_MODEL=chatgpt_proxyllm
PROXY_API_KEY={your-openai-sk}
PROXY_SERVER_URL=https://api.openai.com/v1/chat/completions
# If you use gpt-4
# PROXYLLM_BACKEND=gpt-4
```
</TabItem>

<TabItem value="Azure" label="Azure">
Install dependencies

```python
pip install -e ".[openai]"
```

Download embedding model

```python
cd DB-GPT
mkdir models and cd models
git clone https://huggingface.co/GanymedeNil/text2vec-large-chinese # change this to other embedding model if needed.
```

Configure the proxy and modify LLM_MODEL, PROXY_API_URL and API_KEY in the `.env`file

```python
# .env
LLM_MODEL=proxyllm
PROXY_API_KEY=xxxx
PROXY_API_BASE=https://xxxxxx.openai.azure.com/
PROXY_API_TYPE=azure
PROXY_SERVER_URL=xxxx
PROXY_API_VERSION=2023-05-15
PROXYLLM_BACKEND=gpt-35-turbo
API_AZURE_DEPLOYMENT=xxxx[deployment_name]
```
</TabItem>

<TabItem value="qwen" label="通义千问">
Install dependencies

```python
pip install dashscope
```

Download embedding model

```python
cd DB-GPT
mkdir models and cd models

# embedding model
git clone https://huggingface.co/GanymedeNil/text2vec-large-chinese
or
git clone https://huggingface.co/moka-ai/m3e-large
```

Configure the proxy and modify LLM_MODEL, PROXY_API_URL and API_KEY in the `.env`file

```python
# .env
# Aliyun tongyiqianwen
LLM_MODEL=tongyi_proxyllm
TONGYI_PROXY_API_KEY={your-tongyi-sk}
PROXY_SERVER_URL={your_service_url}
```
</TabItem>
<TabItem value="chatglm" label="chatglm" >
Install dependencies

```python
pip install zhipuai
```

Download embedding model

```python
cd DB-GPT
mkdir models and cd models

# embedding model
git clone https://huggingface.co/GanymedeNil/text2vec-large-chinese
or
git clone https://huggingface.co/moka-ai/m3e-large
```

Configure the proxy and modify LLM_MODEL, PROXY_API_URL and API_KEY in the `.env`file

```python
# .env
LLM_MODEL=zhipu_proxyllm
PROXY_SERVER_URL={your_service_url}
ZHIPU_MODEL_VERSION={version}
ZHIPU_PROXY_API_KEY={your-zhipu-sk}
```
</TabItem>

<TabItem value="erniebot" label="文心一言" default>

Download embedding model

```python
cd DB-GPT
mkdir models and cd models

# embedding model
git clone https://huggingface.co/GanymedeNil/text2vec-large-chinese
or
git clone https://huggingface.co/moka-ai/m3e-large
```

Configure the proxy and modify LLM_MODEL, MODEL_VERSION, API_KEY and API_SECRET in the `.env`file

```python
# .env
LLM_MODEL=wenxin_proxyllm
WEN_XIN_MODEL_VERSION={version} # ERNIE-Bot or ERNIE-Bot-turbo
WEN_XIN_API_KEY={your-wenxin-sk}
WEN_XIN_API_SECRET={your-wenxin-sct}
```
</TabItem>
</Tabs>


:::info note

⚠️ Be careful not to overwrite the contents of the `.env` configuration file
:::
6 changes: 5 additions & 1 deletion docs/sidebars.js
Original file line number Diff line number Diff line change
Expand Up @@ -162,8 +162,12 @@ const sidebars = {
},
{
type: 'category',
label: 'Adanced Usage',
label: 'Advanced Usage',
items: [
{
type: 'doc',
id: 'installation/advanced_usage/More_proxyllms',
},
{
type: 'doc',
id: 'installation/advanced_usage/vLLM_inference',
Expand Down

0 comments on commit 2d39db3

Please sign in to comment.