Skip to content

Commit

Permalink
Merge pull request #1410 from binary-husky/frontier
Browse files Browse the repository at this point in the history
fix spark image understanding api
  • Loading branch information
binary-husky committed Dec 23, 2023
2 parents bb431db + e3e9921 commit 6ca0dd2
Show file tree
Hide file tree
Showing 4 changed files with 13 additions and 9 deletions.
2 changes: 2 additions & 0 deletions crazy_functions/crazy_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -139,6 +139,8 @@ def can_multi_process(llm):
if llm.startswith('gpt-'): return True
if llm.startswith('api2d-'): return True
if llm.startswith('azure-'): return True
if llm.startswith('spark'): return True
if llm.startswith('zhipuai'): return True
return False

def request_gpt_model_multi_threads_with_very_awesome_ui_and_high_efficiency(
Expand Down
4 changes: 2 additions & 2 deletions request_llms/bridge_spark.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,7 @@ def predict_no_ui_long_connection(inputs, llm_kwargs, history=[], sys_prompt="",

from .com_sparkapi import SparkRequestInstance
sri = SparkRequestInstance()
for response in sri.generate(inputs, llm_kwargs, history, sys_prompt):
for response in sri.generate(inputs, llm_kwargs, history, sys_prompt, use_image_api=False):
if len(observe_window) >= 1:
observe_window[0] = response
if len(observe_window) >= 2:
Expand All @@ -52,7 +52,7 @@ def predict(inputs, llm_kwargs, plugin_kwargs, chatbot, history=[], system_promp
# 开始接收回复
from .com_sparkapi import SparkRequestInstance
sri = SparkRequestInstance()
for response in sri.generate(inputs, llm_kwargs, history, system_prompt):
for response in sri.generate(inputs, llm_kwargs, history, system_prompt, use_image_api=True):
chatbot[-1] = (inputs, response)
yield from update_ui(chatbot=chatbot, history=history)

Expand Down
12 changes: 7 additions & 5 deletions request_llms/com_sparkapi.py
Original file line number Diff line number Diff line change
Expand Up @@ -72,12 +72,12 @@ def __init__(self):

self.result_buf = ""

def generate(self, inputs, llm_kwargs, history, system_prompt):
def generate(self, inputs, llm_kwargs, history, system_prompt, use_image_api=False):
llm_kwargs = llm_kwargs
history = history
system_prompt = system_prompt
import _thread as thread
thread.start_new_thread(self.create_blocking_request, (inputs, llm_kwargs, history, system_prompt))
thread.start_new_thread(self.create_blocking_request, (inputs, llm_kwargs, history, system_prompt, use_image_api))
while True:
self.time_to_yield_event.wait(timeout=1)
if self.time_to_yield_event.is_set():
Expand All @@ -86,18 +86,20 @@ def generate(self, inputs, llm_kwargs, history, system_prompt):
return self.result_buf


def create_blocking_request(self, inputs, llm_kwargs, history, system_prompt):
def create_blocking_request(self, inputs, llm_kwargs, history, system_prompt, use_image_api):
if llm_kwargs['llm_model'] == 'sparkv2':
gpt_url = self.gpt_url_v2
elif llm_kwargs['llm_model'] == 'sparkv3':
gpt_url = self.gpt_url_v3
else:
gpt_url = self.gpt_url
file_manifest = []
if llm_kwargs.get('most_recent_uploaded'):
if use_image_api and llm_kwargs.get('most_recent_uploaded'):
if llm_kwargs['most_recent_uploaded'].get('path'):
file_manifest = get_pictures_list(llm_kwargs['most_recent_uploaded']['path'])
gpt_url = self.gpt_url_img
if len(file_manifest) > 0:
print('正在使用讯飞图片理解API')
gpt_url = self.gpt_url_img
wsParam = Ws_Param(self.appid, self.api_key, self.api_secret, gpt_url)
websocket.enableTrace(False)
wsUrl = wsParam.create_url()
Expand Down
4 changes: 2 additions & 2 deletions themes/green.css
Original file line number Diff line number Diff line change
Expand Up @@ -256,13 +256,13 @@ textarea.svelte-1pie7s6 {
max-height: 95% !important;
overflow-y: auto !important;
}*/
.app.svelte-1mya07g.svelte-1mya07g {
/* .app.svelte-1mya07g.svelte-1mya07g {
max-width: 100%;
position: relative;
padding: var(--size-4);
width: 100%;
height: 100%;
}
} */

.gradio-container-3-32-2 h1 {
font-weight: 700 !important;
Expand Down

0 comments on commit 6ca0dd2

Please sign in to comment.