mirror of
https://github.com/yihong0618/bilingual_book_maker.git
synced 2025-06-05 19:15:34 +00:00
support xAI (#430)
This commit is contained in:
parent
9261d92e20
commit
78fc7985d5
@ -27,6 +27,7 @@ bilingual_book_maker 是一个 AI 翻译工具,使用 ChatGPT 帮助用户制
|
|||||||
- 可以使用彩云进行翻译 `--model caiyun --caiyun_key ${caiyun_key}`
|
- 可以使用彩云进行翻译 `--model caiyun --caiyun_key ${caiyun_key}`
|
||||||
- 可以使用 Gemini 进行翻译 `--model gemini --gemini_key ${gemini_key}`
|
- 可以使用 Gemini 进行翻译 `--model gemini --gemini_key ${gemini_key}`
|
||||||
- 可以使用腾讯交互翻译(免费)进行翻译`--model tencentransmart`
|
- 可以使用腾讯交互翻译(免费)进行翻译`--model tencentransmart`
|
||||||
|
- 可以使用[xAI](https://x.ai)进行翻译`--model xai --xai_key ${xai_key}`
|
||||||
- 可以使用 [Ollama](https://github.com/ollama/ollama) 自托管模型进行翻译,使用 `--ollama_model ${ollama_model_name}`
|
- 可以使用 [Ollama](https://github.com/ollama/ollama) 自托管模型进行翻译,使用 `--ollama_model ${ollama_model_name}`
|
||||||
- 如果 ollama server 不运行在本地,使用 `--api_base http://x.x.x.x:port/v1` 指向 ollama server 地址
|
- 如果 ollama server 不运行在本地,使用 `--api_base http://x.x.x.x:port/v1` 指向 ollama server 地址
|
||||||
- 使用 `--test` 命令如果大家没付费可以加上这个先看看效果(有 limit 稍微有些慢)
|
- 使用 `--test` 命令如果大家没付费可以加上这个先看看效果(有 limit 稍微有些慢)
|
||||||
|
@ -34,6 +34,7 @@ Find more info here for using liteLLM: https://github.com/BerriAI/litellm/blob/m
|
|||||||
- If you want to use a specific model alias with Gemini (eg `gemini-1.5-flash-002` or `gemini-1.5-flash-8b-exp-0924`), you can use `--model gemini --model_list gemini-1.5-flash-002,gemini-1.5-flash-8b-exp-0924`. `--model_list` takes a comma-separated list of model aliases.
|
- If you want to use a specific model alias with Gemini (eg `gemini-1.5-flash-002` or `gemini-1.5-flash-8b-exp-0924`), you can use `--model gemini --model_list gemini-1.5-flash-002,gemini-1.5-flash-8b-exp-0924`. `--model_list` takes a comma-separated list of model aliases.
|
||||||
- Support [Claude](https://console.anthropic.com/docs) model, use `--model claude --claude_key ${claude_key}`
|
- Support [Claude](https://console.anthropic.com/docs) model, use `--model claude --claude_key ${claude_key}`
|
||||||
- Support [Tencent TranSmart](https://transmart.qq.com) model (Free), use `--model tencentransmart`
|
- Support [Tencent TranSmart](https://transmart.qq.com) model (Free), use `--model tencentransmart`
|
||||||
|
- Support [xAI](https://x.ai) model, use `--model xai --xai_key ${xai_key}`
|
||||||
- Support [Ollama](https://github.com/ollama/ollama) self-host models, use `--ollama_model ${ollama_model_name}`
|
- Support [Ollama](https://github.com/ollama/ollama) self-host models, use `--ollama_model ${ollama_model_name}`
|
||||||
- If ollama server is not running on localhost, use `--api_base http://x.x.x.x:port/v1` to point to the ollama server address
|
- If ollama server is not running on localhost, use `--api_base http://x.x.x.x:port/v1` to point to the ollama server address
|
||||||
- Use `--test` option to preview the result if you haven't paid for the service. Note that there is a limit and it may take some time.
|
- Use `--test` option to preview the result if you haven't paid for the service. Note that there is a limit and it may take some time.
|
||||||
|
@ -122,6 +122,14 @@ def main():
|
|||||||
help="You can get Groq Key from https://console.groq.com/keys",
|
help="You can get Groq Key from https://console.groq.com/keys",
|
||||||
)
|
)
|
||||||
|
|
||||||
|
# for xAI
|
||||||
|
parser.add_argument(
|
||||||
|
"--xai_key",
|
||||||
|
dest="xai_key",
|
||||||
|
type=str,
|
||||||
|
help="You can get xAI Key from https://console.x.ai/",
|
||||||
|
)
|
||||||
|
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
"--test",
|
"--test",
|
||||||
dest="test",
|
dest="test",
|
||||||
@ -376,6 +384,8 @@ So you are close to reaching the limit. You have to choose your own value, there
|
|||||||
API_KEY = options.gemini_key or env.get("BBM_GOOGLE_GEMINI_KEY")
|
API_KEY = options.gemini_key or env.get("BBM_GOOGLE_GEMINI_KEY")
|
||||||
elif options.model == "groq":
|
elif options.model == "groq":
|
||||||
API_KEY = options.groq_key or env.get("BBM_GROQ_API_KEY")
|
API_KEY = options.groq_key or env.get("BBM_GROQ_API_KEY")
|
||||||
|
elif options.model == "xai":
|
||||||
|
API_KEY = options.xai_key or env.get("BBM_XAI_API_KEY")
|
||||||
else:
|
else:
|
||||||
API_KEY = ""
|
API_KEY = ""
|
||||||
|
|
||||||
|
@ -8,6 +8,7 @@ from book_maker.translator.gemini_translator import Gemini
|
|||||||
from book_maker.translator.groq_translator import GroqClient
|
from book_maker.translator.groq_translator import GroqClient
|
||||||
from book_maker.translator.tencent_transmart_translator import TencentTranSmart
|
from book_maker.translator.tencent_transmart_translator import TencentTranSmart
|
||||||
from book_maker.translator.custom_api_translator import CustomAPI
|
from book_maker.translator.custom_api_translator import CustomAPI
|
||||||
|
from book_maker.translator.xai_translator import XAIClient
|
||||||
|
|
||||||
MODEL_DICT = {
|
MODEL_DICT = {
|
||||||
"openai": ChatGPTAPI,
|
"openai": ChatGPTAPI,
|
||||||
@ -25,5 +26,6 @@ MODEL_DICT = {
|
|||||||
"groq": GroqClient,
|
"groq": GroqClient,
|
||||||
"tencentransmart": TencentTranSmart,
|
"tencentransmart": TencentTranSmart,
|
||||||
"customapi": CustomAPI,
|
"customapi": CustomAPI,
|
||||||
|
"xai": XAIClient,
|
||||||
# add more here
|
# add more here
|
||||||
}
|
}
|
||||||
|
20
book_maker/translator/xai_translator.py
Normal file
20
book_maker/translator/xai_translator.py
Normal file
@ -0,0 +1,20 @@
|
|||||||
|
from openai import OpenAI
|
||||||
|
from .chatgptapi_translator import ChatGPTAPI
|
||||||
|
from os import linesep
|
||||||
|
from itertools import cycle
|
||||||
|
|
||||||
|
|
||||||
|
XAI_MODEL_LIST = [
|
||||||
|
"grok-beta",
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
class XAIClient(ChatGPTAPI):
|
||||||
|
def __init__(self, key, language, api_base=None, **kwargs) -> None:
|
||||||
|
super().__init__(key, language)
|
||||||
|
self.model_list = XAI_MODEL_LIST
|
||||||
|
self.api_url = str(api_base) if api_base else "https://api.x.ai/v1"
|
||||||
|
self.openai_client = OpenAI(api_key=key, base_url=self.api_url)
|
||||||
|
|
||||||
|
def rotate_model(self):
|
||||||
|
self.model = self.model_list[0]
|
Loading…
x
Reference in New Issue
Block a user