Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

feat(Model): unified proxyllm apisecret #862

Merged
merged 2 commits into from
Nov 28, 2023
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 2 additions & 2 deletions pilot/configs/config.py
Original file line number Diff line number Diff line change
Expand Up @@ -66,7 +66,7 @@ def __init__(self) -> None:

# wenxin
self.wenxin_proxy_api_key = os.getenv("WEN_XIN_API_KEY")
self.wenxin_proxy_api_secret = os.getenv("WEN_XIN_SECRET_KEY")
self.wenxin_proxy_api_secret = os.getenv("WEN_XIN_API_SECRET")
self.wenxin_model_version = os.getenv("WEN_XIN_MODEL_VERSION")
if self.wenxin_proxy_api_key and self.wenxin_proxy_api_secret:
os.environ["wenxin_proxyllm_proxy_api_key"] = self.wenxin_proxy_api_key
Expand All @@ -84,7 +84,7 @@ def __init__(self) -> None:
os.environ["spark_proxyllm_proxy_api_key"] = self.spark_proxy_api_key
os.environ["spark_proxyllm_proxy_api_secret"] = self.spark_proxy_api_secret
os.environ["spark_proxyllm_proxyllm_backend"] = self.spark_api_version
os.environ["spark_proxyllm_proxy_app_id"] = self.spark_proxy_api_appid
os.environ["spark_proxyllm_proxy_api_app_id"] = self.spark_proxy_api_appid

# baichuan proxy
self.bc_proxy_api_key = os.getenv("BAICHUAN_PROXY_API_KEY")
Expand Down
37 changes: 3 additions & 34 deletions pilot/model/proxy/llms/wenxin.py
Original file line number Diff line number Diff line change
Expand Up @@ -75,9 +75,8 @@ def wenxin_generate_stream(
if not model_version:
yield f"Unsupport model version {model_name}"

keys: [] = model_params.proxy_api_key.split(";")
proxy_api_key = keys[0]
proxy_api_secret = keys[1]
proxy_api_key = model_params.proxy_api_key
proxy_api_secret = model_params.proxy_api_secret
access_token = _build_access_token(proxy_api_key, proxy_api_secret)

headers = {"Content-Type": "application/json", "Accept": "application/json"}
Expand All @@ -88,37 +87,7 @@ def wenxin_generate_stream(
yield "Failed to get access token. please set the correct api_key and secret key."

messages: List[ModelMessage] = params["messages"]
# Add history conversation
# system = ""
# if len(messages) > 1 and messages[0].role == ModelMessageRoleType.SYSTEM:
# role_define = messages.pop(0)
# system = role_define.content
# else:
# message = messages.pop(0)
# if message.role == ModelMessageRoleType.HUMAN:
# history.append({"role": "user", "content": message.content})
# for message in messages:
# if message.role == ModelMessageRoleType.SYSTEM:
# history.append({"role": "user", "content": message.content})
# # elif message.role == ModelMessageRoleType.HUMAN:
# # history.append({"role": "user", "content": message.content})
# elif message.role == ModelMessageRoleType.AI:
# history.append({"role": "assistant", "content": message.content})
# else:
# pass
#
# # temp_his = history[::-1]
# temp_his = history
# last_user_input = None
# for m in temp_his:
# if m["role"] == "user":
# last_user_input = m
# break
#
# if last_user_input:
# history.remove(last_user_input)
# history.append(last_user_input)
#

history, systems = __convert_2_wenxin_messages(messages)
system = ""
if systems and len(systems) > 0:
Expand Down