From 08e184ea559ca75c5cd98fab579328fca9c4170c Mon Sep 17 00:00:00 2001 From: 505030475 <505030475@qq.com> Date: Sat, 13 May 2023 00:28:29 +0800 Subject: [PATCH] =?UTF-8?q?=E6=B7=BB=E5=8A=A0=E5=9B=BE=E7=89=87=E7=94=9F?= =?UTF-8?q?=E6=88=90=E6=8E=A5=E5=8F=A3=E6=8F=92=E4=BB=B6?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- crazy_functional.py | 10 +++ ...76\347\211\207\347\224\237\346\210\220.py" | 64 +++++++++++++++++++ 2 files changed, 74 insertions(+) create mode 100644 "crazy_functions/\345\233\276\347\211\207\347\224\237\346\210\220.py" diff --git a/crazy_functional.py b/crazy_functional.py index 23cbd30ee..3e7b12f15 100644 --- a/crazy_functional.py +++ b/crazy_functional.py @@ -236,5 +236,15 @@ def get_crazy_functions(): "Function": HotReload(同时问询_指定模型) }, }) + from crazy_functions.图片生成 import 图片生成 + function_plugins.update({ + "图片生成(先切换模型到openai或api2d)": { + "Color": "stop", + "AsButton": False, + "AdvancedArgs": True, # 调用时,唤起高级参数输入区(默认False) + "ArgsReminder": "在这里输入分辨率, 如256x256(默认)", # 高级参数输入区的显示提示 + "Function": HotReload(图片生成) + }, + }) ###################### 第n组插件 ########################### return function_plugins diff --git "a/crazy_functions/\345\233\276\347\211\207\347\224\237\346\210\220.py" "b/crazy_functions/\345\233\276\347\211\207\347\224\237\346\210\220.py" new file mode 100644 index 000000000..ae832c597 --- /dev/null +++ "b/crazy_functions/\345\233\276\347\211\207\347\224\237\346\210\220.py" @@ -0,0 +1,64 @@ +from toolbox import CatchException, update_ui, get_conf, select_api_key +from .crazy_utils import request_gpt_model_in_new_thread_with_ui_alive +import datetime + + +def gen_image(llm_kwargs, prompt, resolution="256x256"): + import requests, json, time, os + from request_llm.bridge_all import model_info + + proxies, = get_conf('proxies') + # Set up OpenAI API key and model + api_key = select_api_key(llm_kwargs['api_key'], llm_kwargs['llm_model']) + chat_endpoint = model_info[llm_kwargs['llm_model']]['endpoint'] + # 'https://api.openai.com/v1/chat/completions' + img_endpoint = chat_endpoint.replace('chat/completions','images/generations') + # # Generate the image + url = img_endpoint + headers = { + 'Authorization': f"Bearer {api_key}", + 'Content-Type': 'application/json' + } + data = { + 'prompt': prompt, + 'n': 1, + 'size': '256x256', + 'response_format': 'url' + } + response = requests.post(url, headers=headers, json=data, proxies=proxies) + print(response.content) + image_url = json.loads(response.content.decode('utf8'))['data'][0]['url'] + + # 文件保存到本地 + r = requests.get(image_url, proxies=proxies) + file_path = 'gpt_log/image_gen/' + os.makedirs(file_path, exist_ok=True) + file_name = 'Image' + time.strftime("%Y-%m-%d-%H-%M-%S", time.localtime()) + '.png' + with open(file_path+file_name, 'wb+') as f: f.write(r.content) + + + return image_url, file_path+file_name + + + +@CatchException +def 图片生成(prompt, llm_kwargs, plugin_kwargs, chatbot, history, system_prompt, web_port): + """ + txt 输入栏用户输入的文本,例如需要翻译的一段话,再例如一个包含了待处理文件的路径 + llm_kwargs gpt模型参数,如温度和top_p等,一般原样传递下去就行 + plugin_kwargs 插件模型的参数,暂时没有用武之地 + chatbot 聊天显示框的句柄,用于显示给用户 + history 聊天历史,前情提要 + system_prompt 给gpt的静默提醒 + web_port 当前软件运行的端口号 + """ + history = [] # 清空历史,以免输入溢出 + chatbot.append(("这是什么功能?", "[Local Message] 生成图像, 请先把模型切换至gpt-xxxx或者api2d-xxxx。如果中文效果不理想, 尝试Prompt。正在处理中 .....")) + yield from update_ui(chatbot=chatbot, history=history) # 刷新界面 # 由于请求gpt需要一段时间,我们先及时地做一次界面更新 + resolution = plugin_kwargs.get("advanced_arg", '256x256') + image_url, image_path = gen_image(llm_kwargs, prompt, resolution) + chatbot.append([prompt, + f'`{image_url}`\n\n'+ + f'
' + ]) + yield from update_ui(chatbot=chatbot, history=history) # 刷新界面 # 界面更新