diff --git a/README-CN.md b/README-CN.md index a24b591..b1e4468 100644 --- a/README-CN.md +++ b/README-CN.md @@ -21,6 +21,7 @@ bilingual_book_maker 是一个 AI 翻译工具,使用 ChatGPT 帮助用户制 - 本地放了一个 `test_books/animal_farm.epub` 给大家测试 - 默认用了 [GPT-3.5-turbo](https://openai.com/blog/introducing-chatgpt-and-whisper-apis) 模型,也就是 ChatGPT 正在使用的模型,用 `--model gpt3` 来使用 gpt3 模型 - 可以使用 DeepL 封装的 api 进行翻译,需要付费,[DeepL Translator](https://rapidapi.com/splintPRO/api/deepl-translator) 来获得 token `--model deepl --deepl_key ${deepl_key}` +- 可以使用 [Claude](https://console.anthropic.com/docs) 模型进行翻译 `--model claude --claude_key ${claude_key}` - 可以使用 google 来翻译 `--model google` - 可用使用彩云进行翻译 `--model caiyun --caiyun_key ${caiyun_key}` - 使用 `--test` 命令如果大家没付费可以加上这个先看看效果(有 limit 稍微有些慢) @@ -62,8 +63,10 @@ export OPENAI_API_KEY=${your_api_key} python3 make_book.py --book_name test_books/animal_farm.epub --model gpt3 --language ja # Use the DeepL model with Japanese -python3 make_book.py --book_name test_books/animal_farm.epub --model deepl --deepl_token ${deepl_key}--language ja +python3 make_book.py --book_name test_books/animal_farm.epub --model deepl --deepl_key ${deepl_key} --language ja +# Use the Claude model with Japanese +python3 make_book.py --book_name test_books/animal_farm.epub --model claude --claude_key ${claude_key} --language ja # Translate contents in
python3 make_book.py --book_name test_books/animal_farm.epub --translate-tags div,p diff --git a/README.md b/README.md index 330a5b3..e5d2f03 100644 --- a/README.md +++ b/README.md @@ -20,7 +20,8 @@ The bilingual_book_maker is an AI translation tool that uses ChatGPT to assist u Or, just set environment variable `BMM_OPENAI_API_KEY` instead. - A sample book, `test_books/animal_farm.epub`, is provided for testing purposes. - The default underlying model is [GPT-3.5-turbo](https://openai.com/blog/introducing-chatgpt-and-whisper-apis), which is used by ChatGPT currently. Use `--model gpt3` to change the underlying model to `GPT3` -5. support DeepL model [DeepL Translator](https://rapidapi.com/splintPRO/api/deepl-translator) need pay to get the token use `--model deepl --deepl_key ${deepl_key}` +- Support DeepL model [DeepL Translator](https://rapidapi.com/splintPRO/api/deepl-translator) need pay to get the token use `--model deepl --deepl_key ${deepl_key}` +- Support [Claude](https://console.anthropic.com/docs) model, use `--model claude --claude_key ${claude_key}` - Use `--test` option to preview the result if you haven't paid for the service. Note that there is a limit and it may take some time. - Set the target language like `--language "Simplified Chinese"`. Default target language is `"Simplified Chinese"`. Read available languages by helper message: `python make_book.py --help` @@ -70,8 +71,10 @@ export OPENAI_API_KEY=${your_api_key} python3 make_book.py --book_name test_books/animal_farm.epub --model gpt3 --language ja # Use the DeepL model with Japanese -python3 make_book.py --book_name test_books/animal_farm.epub --model deepl --deepl_token ${deepl_token}--language ja +python3 make_book.py --book_name test_books/animal_farm.epub --model deepl --deepl_key ${deepl_key} --language ja +# Use the Claude model with Japanese +python3 make_book.py --book_name test_books/animal_farm.epub --model claude --claude_key ${claude_key} --language ja # Translate contents in
python3 make_book.py --book_name test_books/animal_farm.epub --translate-tags div,p diff --git a/book_maker/cli.py b/book_maker/cli.py index 07bdf47..ed43488 100644 --- a/book_maker/cli.py +++ b/book_maker/cli.py @@ -92,6 +92,12 @@ def main(): type=str, help="you can apply deepl key from here (https://rapidapi.com/splintPRO/api/deepl-translator", ) + parser.add_argument( + "--claude_key", + dest="claude_key", + type=str, + help="you can find claude key from here (https://console.anthropic.com/account/keys)", + ) parser.add_argument( "--test", @@ -273,6 +279,10 @@ So you are close to reaching the limit. You have to choose your own value, there API_KEY = options.deepl_key or env.get("BBM_DEEPL_API_KEY") if not API_KEY: raise Exception("Please provid deepl key") + elif options.model == "claude": + API_KEY = options.claude_key or env.get("BBM_CLAUDE_API_KEY") + if not API_KEY: + raise Exception("Please provid claude key") else: API_KEY = "" diff --git a/book_maker/translator/__init__.py b/book_maker/translator/__init__.py index 9c76ea7..34f2926 100644 --- a/book_maker/translator/__init__.py +++ b/book_maker/translator/__init__.py @@ -3,6 +3,7 @@ from book_maker.translator.chatgptapi_translator import ChatGPTAPI from book_maker.translator.deepl_translator import DeepL from book_maker.translator.google_translator import Google from book_maker.translator.gpt3_translator import GPT3 +from book_maker.translator.claude_translator import Claude MODEL_DICT = { "chatgptapi": ChatGPTAPI, @@ -10,5 +11,6 @@ MODEL_DICT = { "google": Google, "caiyun": Caiyun, "deepl": DeepL, + "claude": Claude, # add more here } diff --git a/book_maker/translator/claude_translator.py b/book_maker/translator/claude_translator.py new file mode 100644 index 0000000..958536c --- /dev/null +++ b/book_maker/translator/claude_translator.py @@ -0,0 +1,52 @@ +import re +import requests +from rich import print + +from .base_translator import Base + + +class Claude(Base): + def __init__( + self, key, language, api_base=None, prompt_template=None, **kwargs + ) -> None: + super().__init__(key, language) + self.api_url = ( + f"{api_base}v1/complete" + if api_base + else "https://api.anthropic.com/v1/complete" + ) + self.headers = { + "Content-Type": "application/json", + "x-api-key": key, + } + self.data = { + "prompt": "", + "model": "claude-v1.3", + "max_tokens_to_sample": 1024, + "temperature": 1, + "stop_sequences": ["\n\nHuman:"], + } + self.session = requests.session() + self.language = language + self.prompt_template = ( + prompt_template + or "\n\nHuman: Help me translate the text within triple backticks into {language} and provide only the translated result.\n```{text}```\n\nAssistant: " + ) + + def rotate_key(self): + pass + + def translate(self, text): + print(text) + self.rotate_key() + self.data["prompt"] = self.prompt_template.format( + text=text, + language=self.language, + ) + r = self.session.post(self.api_url, headers=self.headers, json=self.data) + if not r.ok: + return text + t_text = r.json().get("completion").strip() + + print("[bold green]" + re.sub("\n{3,}", "\n\n", t_text) + "[/bold green]") + return t_text