From 81a6b8c1509a6db2a7e40b09302f84da223fe456 Mon Sep 17 00:00:00 2001 From: Xie Yanbo Date: Wed, 6 Nov 2024 16:22:01 +0800 Subject: [PATCH] support xAI --- README-CN.md | 1 + README.md | 1 + book_maker/cli.py | 10 ++++++++++ book_maker/translator/__init__.py | 2 ++ book_maker/translator/xai_translator.py | 20 ++++++++++++++++++++ 5 files changed, 34 insertions(+) create mode 100644 book_maker/translator/xai_translator.py diff --git a/README-CN.md b/README-CN.md index 4df4cec4..214be601 100644 --- a/README-CN.md +++ b/README-CN.md @@ -27,6 +27,7 @@ bilingual_book_maker 是一个 AI 翻译工具,使用 ChatGPT 帮助用户制 - 可以使用彩云进行翻译 `--model caiyun --caiyun_key ${caiyun_key}` - 可以使用 Gemini 进行翻译 `--model gemini --gemini_key ${gemini_key}` - 可以使用腾讯交互翻译(免费)进行翻译`--model tencentransmart` +- 可以使用[xAI](https://x.ai)进行翻译`--model xai --xai_key ${xai_key}` - 可以使用 [Ollama](https://github.com/ollama/ollama) 自托管模型进行翻译,使用 `--ollama_model ${ollama_model_name}` - 如果 ollama server 不运行在本地,使用 `--api_base http://x.x.x.x:port/v1` 指向 ollama server 地址 - 使用 `--test` 命令如果大家没付费可以加上这个先看看效果(有 limit 稍微有些慢) diff --git a/README.md b/README.md index e5d6dc32..4f977a11 100644 --- a/README.md +++ b/README.md @@ -34,6 +34,7 @@ Find more info here for using liteLLM: https://github.com/BerriAI/litellm/blob/m - If you want to use a specific model alias with Gemini (eg `gemini-1.5-flash-002` or `gemini-1.5-flash-8b-exp-0924`), you can use `--model gemini --model_list gemini-1.5-flash-002,gemini-1.5-flash-8b-exp-0924`. `--model_list` takes a comma-separated list of model aliases. - Support [Claude](https://console.anthropic.com/docs) model, use `--model claude --claude_key ${claude_key}` - Support [Tencent TranSmart](https://transmart.qq.com) model (Free), use `--model tencentransmart` +- Support [xAI](https://x.ai) model, use `--model xai --xai_key ${xai_key}` - Support [Ollama](https://github.com/ollama/ollama) self-host models, use `--ollama_model ${ollama_model_name}` - If ollama server is not running on localhost, use `--api_base http://x.x.x.x:port/v1` to point to the ollama server address - Use `--test` option to preview the result if you haven't paid for the service. Note that there is a limit and it may take some time. diff --git a/book_maker/cli.py b/book_maker/cli.py index 5404b5f3..10f3b57b 100644 --- a/book_maker/cli.py +++ b/book_maker/cli.py @@ -122,6 +122,14 @@ def main(): help="You can get Groq Key from https://console.groq.com/keys", ) + # for xAI + parser.add_argument( + "--xai_key", + dest="xai_key", + type=str, + help="You can get xAI Key from https://console.x.ai/", + ) + parser.add_argument( "--test", dest="test", @@ -376,6 +384,8 @@ def main(): API_KEY = options.gemini_key or env.get("BBM_GOOGLE_GEMINI_KEY") elif options.model == "groq": API_KEY = options.groq_key or env.get("BBM_GROQ_API_KEY") + elif options.model == "xai": + API_KEY = options.xai_key or env.get("BBM_XAI_API_KEY") else: API_KEY = "" diff --git a/book_maker/translator/__init__.py b/book_maker/translator/__init__.py index 3044027e..003dbb8e 100644 --- a/book_maker/translator/__init__.py +++ b/book_maker/translator/__init__.py @@ -8,6 +8,7 @@ from book_maker.translator.groq_translator import GroqClient from book_maker.translator.tencent_transmart_translator import TencentTranSmart from book_maker.translator.custom_api_translator import CustomAPI +from book_maker.translator.xai_translator import XAIClient MODEL_DICT = { "openai": ChatGPTAPI, @@ -25,5 +26,6 @@ "groq": GroqClient, "tencentransmart": TencentTranSmart, "customapi": CustomAPI, + "xai": XAIClient, # add more here } diff --git a/book_maker/translator/xai_translator.py b/book_maker/translator/xai_translator.py new file mode 100644 index 00000000..faa8332f --- /dev/null +++ b/book_maker/translator/xai_translator.py @@ -0,0 +1,20 @@ +from openai import OpenAI +from .chatgptapi_translator import ChatGPTAPI +from os import linesep +from itertools import cycle + + +XAI_MODEL_LIST = [ + "grok-beta", +] + + +class XAIClient(ChatGPTAPI): + def __init__(self, key, language, api_base=None, **kwargs) -> None: + super().__init__(key, language) + self.model_list = XAI_MODEL_LIST + self.api_url = str(api_base) if api_base else "https://api.x.ai/v1" + self.openai_client = OpenAI(api_key=key, base_url=self.api_url) + + def rotate_model(self): + self.model = self.model_list[0]