Skip to content

Commit

Permalink
feat(model): Support DeepSeek proxy LLM (#1491)
Browse files Browse the repository at this point in the history
  • Loading branch information
fangyinc authored May 7, 2024
1 parent 863b540 commit b38305b
Show file tree
Hide file tree
Showing 7 changed files with 152 additions and 1 deletion.
5 changes: 5 additions & 0 deletions .env.template
Original file line number Diff line number Diff line change
Expand Up @@ -219,6 +219,11 @@ TONGYI_PROXY_API_KEY={your-tongyi-sk}
# MOONSHOT_API_BASE=https://api.moonshot.cn/v1
# MOONSHOT_API_KEY={your-moonshot-api-key}

## Deepseek Proxyllm, https://platform.deepseek.com/api-docs/
# DEEPSEEK_MODEL_VERSION=deepseek-chat
# DEEPSEEK_API_BASE=https://api.deepseek.com/v1
# DEEPSEEK_API_KEY={your-deepseek-api-key}


#*******************************************************************#
#** SUMMARY_CONFIG **#
Expand Down
4 changes: 3 additions & 1 deletion README.zh.md
Original file line number Diff line number Diff line change
Expand Up @@ -167,7 +167,9 @@
- 🔥🔥🔥 [Yi-34B-Chat](https://huggingface.co/01-ai/Yi-34B-Chat)
- [更多开源模型](https://www.yuque.com/eosphoros/dbgpt-docs/iqaaqwriwhp6zslc#qQktR)

- 支持在线代理模型
- 支持在线代理模型
- [x] [DeepSeek.deepseek-chat](https://platform.deepseek.com/api-docs/)
- [x] [Ollama.API](https://github.com/ollama/ollama/blob/main/docs/api.md)
- [x] [月之暗面.Moonshot](https://platform.moonshot.cn/docs/)
- [x] [零一万物.Yi](https://platform.lingyiwanwu.com/docs)
- [x] [OpenAI·ChatGPT](https://api.openai.com/)
Expand Down
10 changes: 10 additions & 0 deletions dbgpt/_private/config.py
Original file line number Diff line number Diff line change
Expand Up @@ -128,6 +128,16 @@ def __init__(self) -> None:
os.environ["moonshot_proxyllm_api_base"] = os.getenv(
"MOONSHOT_API_BASE", "https://api.moonshot.cn/v1"
)
# Deepseek proxy
self.deepseek_proxy_api_key = os.getenv("DEEPSEEK_API_KEY")
if self.deepseek_proxy_api_key:
os.environ["deepseek_proxyllm_proxy_api_key"] = self.deepseek_proxy_api_key
os.environ["deepseek_proxyllm_proxyllm_backend"] = os.getenv(
"DEEPSEEK_MODEL_VERSION", "deepseek-chat"
)
os.environ["deepseek_proxyllm_api_base"] = os.getenv(
"DEEPSEEK_API_BASE", "https://api.deepseek.com/v1"
)

self.proxy_server_url = os.getenv("PROXY_SERVER_URL")

Expand Down
2 changes: 2 additions & 0 deletions dbgpt/configs/model_config.py
Original file line number Diff line number Diff line change
Expand Up @@ -70,6 +70,8 @@ def get_device() -> str:
# https://platform.moonshot.cn/docs/
"moonshot_proxyllm": "moonshot_proxyllm",
"ollama_proxyllm": "ollama_proxyllm",
# https://platform.deepseek.com/api-docs/
"deepseek_proxyllm": "deepseek_proxyllm",
"llama-2-7b": os.path.join(MODEL_PATH, "Llama-2-7b-chat-hf"),
"llama-2-13b": os.path.join(MODEL_PATH, "Llama-2-13b-chat-hf"),
"llama-2-70b": os.path.join(MODEL_PATH, "Llama-2-70b-chat-hf"),
Expand Down
26 changes: 26 additions & 0 deletions dbgpt/model/adapter/proxy_adapter.py
Original file line number Diff line number Diff line change
Expand Up @@ -294,6 +294,31 @@ def get_async_generate_stream_function(self, model, model_path: str):
return moonshot_generate_stream


class DeepseekProxyLLMModelAdapter(ProxyLLMModelAdapter):
"""Deepseek proxy LLM model adapter.
See Also: `Deepseek Documentation <https://platform.deepseek.com/api-docs/>`_
"""

def support_async(self) -> bool:
return True

def do_match(self, lower_model_name_or_path: Optional[str] = None):
return lower_model_name_or_path == "deepseek_proxyllm"

def get_llm_client_class(
self, params: ProxyModelParameters
) -> Type[ProxyLLMClient]:
from dbgpt.model.proxy.llms.deepseek import DeepseekLLMClient

return DeepseekLLMClient

def get_async_generate_stream_function(self, model, model_path: str):
from dbgpt.model.proxy.llms.deepseek import deepseek_generate_stream

return deepseek_generate_stream


register_model_adapter(OpenAIProxyLLMModelAdapter)
register_model_adapter(TongyiProxyLLMModelAdapter)
register_model_adapter(OllamaLLMModelAdapter)
Expand All @@ -305,3 +330,4 @@ def get_async_generate_stream_function(self, model, model_path: str):
register_model_adapter(BaichuanProxyLLMModelAdapter)
register_model_adapter(YiProxyLLMModelAdapter)
register_model_adapter(MoonshotProxyLLMModelAdapter)
register_model_adapter(DeepseekProxyLLMModelAdapter)
2 changes: 2 additions & 0 deletions dbgpt/model/proxy/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,7 @@ def __lazy_import(name):
"YiLLMClient": "dbgpt.model.proxy.llms.yi",
"MoonshotLLMClient": "dbgpt.model.proxy.llms.moonshot",
"OllamaLLMClient": "dbgpt.model.proxy.llms.ollama",
"DeepseekLLMClient": "dbgpt.model.proxy.llms.deepseek",
}

if name in module_path:
Expand All @@ -35,4 +36,5 @@ def __getattr__(name):
"YiLLMClient",
"MoonshotLLMClient",
"OllamaLLMClient",
"DeepseekLLMClient",
]
104 changes: 104 additions & 0 deletions dbgpt/model/proxy/llms/deepseek.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,104 @@
import os
from typing import TYPE_CHECKING, Any, Dict, Optional, Union, cast

from dbgpt.core import ModelRequest, ModelRequestContext
from dbgpt.model.proxy.llms.proxy_model import ProxyModel

from .chatgpt import OpenAILLMClient

if TYPE_CHECKING:
from httpx._types import ProxiesTypes
from openai import AsyncAzureOpenAI, AsyncOpenAI

ClientType = Union[AsyncAzureOpenAI, AsyncOpenAI]

# 32K model
_DEFAULT_MODEL = "deepseek-chat"


async def deepseek_generate_stream(
model: ProxyModel, tokenizer, params, device, context_len=2048
):
client: DeepseekLLMClient = cast(DeepseekLLMClient, model.proxy_llm_client)
context = ModelRequestContext(stream=True, user_name=params.get("user_name"))
request = ModelRequest.build_request(
client.default_model,
messages=params["messages"],
temperature=params.get("temperature"),
context=context,
max_new_tokens=params.get("max_new_tokens"),
)
async for r in client.generate_stream(request):
yield r


class DeepseekLLMClient(OpenAILLMClient):
"""Deepseek LLM Client.
Deepseek's API is compatible with OpenAI's API, so we inherit from OpenAILLMClient.
API Reference: https://platform.deepseek.com/api-docs/
"""

def __init__(
self,
api_key: Optional[str] = None,
api_base: Optional[str] = None,
api_type: Optional[str] = None,
api_version: Optional[str] = None,
model: Optional[str] = _DEFAULT_MODEL,
proxies: Optional["ProxiesTypes"] = None,
timeout: Optional[int] = 240,
model_alias: Optional[str] = "deepseek_proxyllm",
context_length: Optional[int] = None,
openai_client: Optional["ClientType"] = None,
openai_kwargs: Optional[Dict[str, Any]] = None,
**kwargs,
):
api_base = (
api_base or os.getenv("DEEPSEEK_API_BASE") or "https://api.deepseek.com/v1"
)
api_key = api_key or os.getenv("DEEPSEEK_API_KEY")
model = model or _DEFAULT_MODEL
if not context_length:
if "deepseek-chat" in model:
context_length = 1024 * 32
elif "deepseek-coder" in model:
context_length = 1024 * 16
else:
# 8k
context_length = 1024 * 8

if not api_key:
raise ValueError(
"Deepseek API key is required, please set 'DEEPSEEK_API_KEY' in "
"environment variable or pass it to the client."
)
super().__init__(
api_key=api_key,
api_base=api_base,
api_type=api_type,
api_version=api_version,
model=model,
proxies=proxies,
timeout=timeout,
model_alias=model_alias,
context_length=context_length,
openai_client=openai_client,
openai_kwargs=openai_kwargs,
**kwargs,
)

def check_sdk_version(self, version: str) -> None:
if not version >= "1.0":
raise ValueError(
"Deepseek API requires openai>=1.0, please upgrade it by "
"`pip install --upgrade 'openai>=1.0'`"
)

@property
def default_model(self) -> str:
model = self._model
if not model:
model = _DEFAULT_MODEL
return model

0 comments on commit b38305b

Please sign in to comment.