Skip to content

Commit

Permalink
feat(model): Support Baichuan API (eosphoros-ai#1009)
Browse files Browse the repository at this point in the history
Co-authored-by: BaiChuanHelper <[email protected]>
  • Loading branch information
2 people authored and Hopshine committed Sep 10, 2024
1 parent d0e4da6 commit e3b44a0
Show file tree
Hide file tree
Showing 4 changed files with 41 additions and 44 deletions.
1 change: 1 addition & 0 deletions README.zh.md
Original file line number Diff line number Diff line change
Expand Up @@ -119,6 +119,7 @@ DB-GPT是一个开源的数据库领域大模型框架。目的是构建大模

- 支持在线代理模型
- [x] [OpenAI·ChatGPT](https://api.openai.com/)
- [x] [百川·Baichuan](https://platform.baichuan-ai.com/)
- [x] [阿里·通义](https://www.aliyun.com/product/dashscope)
- [x] [百度·文心](https://cloud.baidu.com/product/wenxinworkshop?track=dingbutonglan)
- [x] [智谱·ChatGLM](http://open.bigmodel.cn/)
Expand Down
6 changes: 2 additions & 4 deletions dbgpt/_private/config.py
Original file line number Diff line number Diff line change
Expand Up @@ -88,12 +88,10 @@ def __init__(self) -> None:

# baichuan proxy
self.bc_proxy_api_key = os.getenv("BAICHUAN_PROXY_API_KEY")
self.bc_proxy_api_secret = os.getenv("BAICHUAN_PROXY_API_SECRET")
self.bc_model_version = os.getenv("BAICHUN_MODEL_NAME")
self.bc_model_name = os.getenv("BAICHUN_MODEL_NAME", "Baichuan2-Turbo-192k")
if self.bc_proxy_api_key and self.bc_proxy_api_secret:
os.environ["bc_proxyllm_proxy_api_key"] = self.bc_proxy_api_key
os.environ["bc_proxyllm_proxy_api_secret"] = self.bc_proxy_api_secret
os.environ["bc_proxyllm_proxyllm_backend"] = self.bc_model_version
os.environ["bc_proxyllm_proxyllm_backend"] = self.bc_model_name

# gemini proxy
self.gemini_proxy_api_key = os.getenv("GEMINI_PROXY_API_KEY")
Expand Down
77 changes: 37 additions & 40 deletions dbgpt/model/proxy/llms/baichuan.py
Original file line number Diff line number Diff line change
@@ -1,74 +1,51 @@
import hashlib
import json
import time
import requests
import json
from typing import List
from dbgpt.model.proxy.llms.proxy_model import ProxyModel
from dbgpt.core.interface.message import ModelMessage, ModelMessageRoleType
from dbgpt.model.parameter import ProxyModelParameters

BAICHUAN_DEFAULT_MODEL = "Baichuan2-53B"

BAICHUAN_DEFAULT_MODEL = "Baichuan2-Turbo-192k"

def _calculate_md5(text: str) -> str:
"""Calculate md5"""
md5 = hashlib.md5()
md5.update(text.encode("utf-8"))
encrypted = md5.hexdigest()
return encrypted

def baichuan_generate_stream(model: ProxyModel, tokenizer=None, params=None, device=None, context_len=4096):
url = "https://api.baichuan-ai.com/v1/chat/completions"

def _sign(data: dict, secret_key: str, timestamp: str):
data_str = json.dumps(data)
signature = _calculate_md5(secret_key + data_str + timestamp)
return signature


def baichuan_generate_stream(
model: ProxyModel, tokenizer, params, device, context_len=4096
):
model_params = model.get_params()
url = "https://api.baichuan-ai.com/v1/stream/chat"

model_name = model_params.proxyllm_backend or BAICHUAN_DEFAULT_MODEL
proxy_api_key = model_params.proxy_api_key
proxy_api_secret = model_params.proxy_api_secret

history = []
messages: List[ModelMessage] = params["messages"]

# Add history conversation
for message in messages:
if message.role == ModelMessageRoleType.HUMAN:
history.append({"role": "user", "content": message.content})
elif message.role == ModelMessageRoleType.SYSTEM:
history.append({"role": "system", "content": message.content})
# As of today, system message is not supported.
history.append({"role": "user", "content": message.content})
elif message.role == ModelMessageRoleType.AI:
history.append({"role": "assistant", "content": "message.content"})
history.append({"role": "assistant", "content": message.content})
else:
pass

payload = {
"model": model_name,
"messages": history,
"parameters": {
"temperature": params.get("temperature"),
"top_k": params.get("top_k", 10),
},
"temperature": params.get("temperature", 0.3),
"top_k": params.get("top_k", 5),
"top_p": params.get("top_p", 0.85),
"stream": True,
}

timestamp = int(time.time())
_signature = _sign(payload, proxy_api_secret, str(timestamp))

headers = {
"Content-Type": "application/json",
"Authorization": "Bearer " + proxy_api_key,
"X-BC-Request-Id": params.get("request_id") or "dbgpt",
"X-BC-Timestamp": str(timestamp),
"X-BC-Signature": _signature,
"X-BC-Sign-Algo": "MD5",
}

res = requests.post(url=url, json=payload, headers=headers, stream=True)
print(f"Send request to {url} with real model {model_name}")
print(f"Sending request to {url} with model {model_name}")
res = requests.post(url=url, json=payload, headers=headers)

text = ""
for line in res.iter_lines():
Expand All @@ -81,7 +58,27 @@ def baichuan_generate_stream(
decoded_line = json_data.decode("utf-8")
if decoded_line.lower() != "[DONE]".lower():
obj = json.loads(json_data)
if obj["data"]["messages"][0].get("content") is not None:
content = obj["data"]["messages"][0].get("content")
if obj["choices"][0]["delta"].get("content") is not None:
content = obj["choices"][0]["delta"].get("content")
text += content
yield text

def main():
model_params = ProxyModelParameters(
model_name="not-used",
model_path="not-used",
proxy_server_url="not-used",
proxy_api_key="YOUR_BAICHUAN_API_KEY",
proxyllm_backend="Baichuan2-Turbo-192k"
)
final_text = ""
for part in baichuan_generate_stream(
model=ProxyModel(model_params=model_params),
params={"messages": [ModelMessage(
role=ModelMessageRoleType.HUMAN,
content="背诵《论语》第一章")]}):
final_text = part
print(final_text)

if __name__ == "__main__":
main()
1 change: 1 addition & 0 deletions docs/docs/modules/smmf.md
Original file line number Diff line number Diff line change
Expand Up @@ -67,6 +67,7 @@ In DB-GPT, seamless support for FastChat, vLLM and llama.cpp is directly provide

#### Proxy Models
- [OpenAI·ChatGPT](https://api.openai.com/)
- [百川·Baichuan](https://platform.baichuan-ai.com/)
- [Alibaba·通义](https://www.aliyun.com/product/dashscope)
- [Google·Bard](https://bard.google.com/)
- [Baidu·文心](https://cloud.baidu.com/product/wenxinworkshop?track=dingbutonglan)
Expand Down

0 comments on commit e3b44a0

Please sign in to comment.