feat: support deepl free

This commit is contained in:
yihong0618 2023-05-14 22:35:09 +08:00
parent cd56b1e0ab
commit fe20a5c848
7 changed files with 108 additions and 2 deletions

View File

@ -21,6 +21,7 @@ bilingual_book_maker 是一个 AI 翻译工具,使用 ChatGPT 帮助用户制
- 本地放了一个 `test_books/animal_farm.epub` 给大家测试 - 本地放了一个 `test_books/animal_farm.epub` 给大家测试
- 默认用了 [GPT-3.5-turbo](https://openai.com/blog/introducing-chatgpt-and-whisper-apis) 模型,也就是 ChatGPT 正在使用的模型,用 `--model gpt3` 来使用 gpt3 模型 - 默认用了 [GPT-3.5-turbo](https://openai.com/blog/introducing-chatgpt-and-whisper-apis) 模型,也就是 ChatGPT 正在使用的模型,用 `--model gpt3` 来使用 gpt3 模型
- 可以使用 DeepL 封装的 api 进行翻译,需要付费,[DeepL Translator](https://rapidapi.com/splintPRO/api/deepl-translator) 来获得 token `--model deepl --deepl_key ${deepl_key}` - 可以使用 DeepL 封装的 api 进行翻译,需要付费,[DeepL Translator](https://rapidapi.com/splintPRO/api/deepl-translator) 来获得 token `--model deepl --deepl_key ${deepl_key}`
- 可以使用 DeepL free `--model deeplfree`
- 可以使用 [Claude](https://console.anthropic.com/docs) 模型进行翻译 `--model claude --claude_key ${claude_key}` - 可以使用 [Claude](https://console.anthropic.com/docs) 模型进行翻译 `--model claude --claude_key ${claude_key}`
- 可以使用 google 来翻译 `--model google` - 可以使用 google 来翻译 `--model google`
- 可用使用彩云进行翻译 `--model caiyun --caiyun_key ${caiyun_key}` - 可用使用彩云进行翻译 `--model caiyun --caiyun_key ${caiyun_key}`

View File

@ -22,6 +22,7 @@ The bilingual_book_maker is an AI translation tool that uses ChatGPT to assist u
- The default underlying model is [GPT-3.5-turbo](https://openai.com/blog/introducing-chatgpt-and-whisper-apis), which is used by ChatGPT currently. Use `--model gpt4` to change the underlying model to `GPT4` and use `--model gpt3` to change the model to `GPT3`. - The default underlying model is [GPT-3.5-turbo](https://openai.com/blog/introducing-chatgpt-and-whisper-apis), which is used by ChatGPT currently. Use `--model gpt4` to change the underlying model to `GPT4` and use `--model gpt3` to change the model to `GPT3`.
If using `GPT4`, you can add `--use_context` to add a context paragraph to each passage sent to the model for translation (see below) If using `GPT4`, you can add `--use_context` to add a context paragraph to each passage sent to the model for translation (see below)
- support DeepL model [DeepL Translator](https://rapidapi.com/splintPRO/api/deepl-translator) need pay to get the token use `--model deepl --deepl_key ${deepl_key}` - support DeepL model [DeepL Translator](https://rapidapi.com/splintPRO/api/deepl-translator) need pay to get the token use `--model deepl --deepl_key ${deepl_key}`
- support DeepL free model `--model deeplfree`
- The default underlying model is [GPT-3.5-turbo](https://openai.com/blog/introducing-chatgpt-and-whisper-apis), which is used by ChatGPT currently. Use `--model gpt3` to change the underlying model to `GPT3` - The default underlying model is [GPT-3.5-turbo](https://openai.com/blog/introducing-chatgpt-and-whisper-apis), which is used by ChatGPT currently. Use `--model gpt3` to change the underlying model to `GPT3`
- Support DeepL model [DeepL Translator](https://rapidapi.com/splintPRO/api/deepl-translator) need pay to get the token use `--model deepl --deepl_key ${deepl_key}` - Support DeepL model [DeepL Translator](https://rapidapi.com/splintPRO/api/deepl-translator) need pay to get the token use `--model deepl --deepl_key ${deepl_key}`
- Support [Claude](https://console.anthropic.com/docs) model, use `--model claude --claude_key ${claude_key}` - Support [Claude](https://console.anthropic.com/docs) model, use `--model claude --claude_key ${claude_key}`

View File

@ -1,6 +1,7 @@
from book_maker.translator.caiyun_translator import Caiyun from book_maker.translator.caiyun_translator import Caiyun
from book_maker.translator.chatgptapi_translator import ChatGPTAPI from book_maker.translator.chatgptapi_translator import ChatGPTAPI
from book_maker.translator.deepl_translator import DeepL from book_maker.translator.deepl_translator import DeepL
from book_maker.translator.deepl_free_translator import DeepLFree
from book_maker.translator.google_translator import Google from book_maker.translator.google_translator import Google
from book_maker.translator.gpt3_translator import GPT3 from book_maker.translator.gpt3_translator import GPT3
from book_maker.translator.gpt4_translator import GPT4 from book_maker.translator.gpt4_translator import GPT4
@ -12,6 +13,7 @@ MODEL_DICT = {
"google": Google, "google": Google,
"caiyun": Caiyun, "caiyun": Caiyun,
"deepl": DeepL, "deepl": DeepL,
"deeplfree": DeepLFree,
"gpt4": GPT4, "gpt4": GPT4,
"claude": Claude, "claude": Claude,
# add more here # add more here

View File

@ -0,0 +1,67 @@
import time
import random
import re
from book_maker.utils import LANGUAGES, TO_LANGUAGE_CODE
from .base_translator import Base
from rich import print
from PyDeepLX import PyDeepLX
class DeepLFree(Base):
"""
DeepL free translator
"""
def __init__(self, key, language, **kwargs) -> None:
super().__init__(key, language)
l = None
l = language if language in LANGUAGES else TO_LANGUAGE_CODE.get(language)
if l not in [
"bg",
"zh",
"cs",
"da",
"nl",
"en-US",
"en-GB",
"et",
"fi",
"fr",
"de",
"el",
"hu",
"id",
"it",
"ja",
"lv",
"lt",
"pl",
"pt-PT",
"pt-BR",
"ro",
"ru",
"sk",
"sl",
"es",
"sv",
"tr",
"uk",
"ko",
"nb",
]:
raise Exception(f"DeepL do not support {l}")
self.language = l
self.time_random = [0.3, 0.5, 1, 1.3, 1.5, 2]
def rotate_key(self):
pass
def translate(self, text):
print(text)
t_text = PyDeepLX.translate(text, "EN", self.language)
# spider rule
time.sleep(random.choice(self.time_random))
print("[bold green]" + re.sub("\n{3,}", "\n\n", t_text) + "[/bold green]")
return t_text

View File

@ -12,7 +12,7 @@ from rich import print
class DeepL(Base): class DeepL(Base):
""" """
caiyun translator DeepL translator
""" """
def __init__(self, key, language, **kwargs) -> None: def __init__(self, key, language, **kwargs) -> None:

View File

@ -2,7 +2,16 @@
from setuptools import find_packages, setup from setuptools import find_packages, setup
packages = ["bs4", "openai", "requests", "ebooklib", "rich", "tqdm", "tiktoken"] packages = [
"bs4",
"openai",
"requests",
"ebooklib",
"rich",
"tqdm",
"tiktoken",
"PyDeepLX",
]
setup( setup(

View File

@ -40,6 +40,32 @@ def test_google_translate_epub(test_book_dir, tmpdir):
assert os.path.getsize(os.path.join(tmpdir, "Liber_Esther_bilingual.epub")) != 0 assert os.path.getsize(os.path.join(tmpdir, "Liber_Esther_bilingual.epub")) != 0
def test_deepl_free_translate_epub(test_book_dir, tmpdir):
"""Test deepl free translate epub"""
shutil.copyfile(
os.path.join(test_book_dir, "Liber_Esther.epub"),
os.path.join(tmpdir, "Liber_Esther.epub"),
)
subprocess.run(
[
sys.executable,
"make_book.py",
"--book_name",
os.path.join(tmpdir, "Liber_Esther.epub"),
"--test",
"--test_num",
"20",
"--model",
"deeplfree",
],
env=os.environ.copy(),
)
assert os.path.isfile(os.path.join(tmpdir, "Liber_Esther_bilingual.epub"))
assert os.path.getsize(os.path.join(tmpdir, "Liber_Esther_bilingual.epub")) != 0
def test_google_translate_epub_cli(): def test_google_translate_epub_cli():
pass pass