mirror of
https://github.com/yihong0618/bilingual_book_maker.git
synced 2025-06-07 03:55:30 +00:00
feat Gemini (#366)
* feat: gemini init Signed-off-by: yihong0618 <zouzou0208@gmail.com> * fix: useless code Signed-off-by: yihong0618 <zouzou0208@gmail.com> * feat: gemini model --------- Signed-off-by: yihong0618 <zouzou0208@gmail.com>
This commit is contained in:
parent
e5bfee9cec
commit
9a20b17970
BIN
.lemo.temp.bin
BIN
.lemo.temp.bin
Binary file not shown.
@ -25,6 +25,7 @@ bilingual_book_maker 是一个 AI 翻译工具,使用 ChatGPT 帮助用户制
|
|||||||
- 可以使用 [Claude](https://console.anthropic.com/docs) 模型进行翻译 `--model claude --claude_key ${claude_key}`
|
- 可以使用 [Claude](https://console.anthropic.com/docs) 模型进行翻译 `--model claude --claude_key ${claude_key}`
|
||||||
- 可以使用 google 来翻译 `--model google`
|
- 可以使用 google 来翻译 `--model google`
|
||||||
- 可用使用彩云进行翻译 `--model caiyun --caiyun_key ${caiyun_key}`
|
- 可用使用彩云进行翻译 `--model caiyun --caiyun_key ${caiyun_key}`
|
||||||
|
- 可用使用 Gemini 进行翻译 `--model gemini --gemini_key ${gemini_key}`
|
||||||
- 使用 `--test` 命令如果大家没付费可以加上这个先看看效果(有 limit 稍微有些慢)
|
- 使用 `--test` 命令如果大家没付费可以加上这个先看看效果(有 limit 稍微有些慢)
|
||||||
- 使用 `--language` 指定目标语言,例如: `--language "Simplified Chinese"`,预设值为 `"Simplified Chinese"`.
|
- 使用 `--language` 指定目标语言,例如: `--language "Simplified Chinese"`,预设值为 `"Simplified Chinese"`.
|
||||||
请阅读 helper message 来查找可用的目标语言: `python make_book.py --help`
|
请阅读 helper message 来查找可用的目标语言: `python make_book.py --help`
|
||||||
@ -57,6 +58,9 @@ python3 make_book.py --book_name test_books/animal_farm.epub --openai_key ${open
|
|||||||
# 或翻译完整本书
|
# 或翻译完整本书
|
||||||
python3 make_book.py --book_name test_books/animal_farm.epub --openai_key ${openai_key} --language zh-hans
|
python3 make_book.py --book_name test_books/animal_farm.epub --openai_key ${openai_key} --language zh-hans
|
||||||
|
|
||||||
|
# Or translate the whole book using Gemini
|
||||||
|
python3 make_book.py --book_name test_books/animal_farm.epub --gemini_key ${gemini_key} --model gemini
|
||||||
|
|
||||||
# 指定环境变量来略过 --openai_key
|
# 指定环境变量来略过 --openai_key
|
||||||
export OPENAI_API_KEY=${your_api_key}
|
export OPENAI_API_KEY=${your_api_key}
|
||||||
|
|
||||||
|
@ -7,7 +7,7 @@ The bilingual_book_maker is an AI translation tool that uses ChatGPT to assist u
|
|||||||

|

|
||||||
|
|
||||||
## Supported Models
|
## Supported Models
|
||||||
gpt-4, gpt-3.5-turbo, claude-2, palm, llama-2, azure-openai, command-nightly
|
gpt-4, gpt-3.5-turbo, claude-2, palm, llama-2, azure-openai, command-nightly, gemini
|
||||||
For using Non-OpenAI models, use class `liteLLM()` - liteLLM supports all models above.
|
For using Non-OpenAI models, use class `liteLLM()` - liteLLM supports all models above.
|
||||||
Find more info here for using liteLLM: https://github.com/BerriAI/litellm/blob/main/setup.py
|
Find more info here for using liteLLM: https://github.com/BerriAI/litellm/blob/main/setup.py
|
||||||
|
|
||||||
@ -28,6 +28,7 @@ Find more info here for using liteLLM: https://github.com/BerriAI/litellm/blob/m
|
|||||||
If using `GPT4`, you can add `--use_context` to add a context paragraph to each passage sent to the model for translation (see below)
|
If using `GPT4`, you can add `--use_context` to add a context paragraph to each passage sent to the model for translation (see below)
|
||||||
- support DeepL model [DeepL Translator](https://rapidapi.com/splintPRO/api/dpl-translator) need pay to get the token use `--model deepl --deepl_key ${deepl_key}`
|
- support DeepL model [DeepL Translator](https://rapidapi.com/splintPRO/api/dpl-translator) need pay to get the token use `--model deepl --deepl_key ${deepl_key}`
|
||||||
- support DeepL free model `--model deeplfree`
|
- support DeepL free model `--model deeplfree`
|
||||||
|
- support Google [Gemini](https://makersuite.google.com/app/apikey) model `--model gemini --gemini_key ${gemini_key}`
|
||||||
- Support [Claude](https://console.anthropic.com/docs) model, use `--model claude --claude_key ${claude_key}`
|
- Support [Claude](https://console.anthropic.com/docs) model, use `--model claude --claude_key ${claude_key}`
|
||||||
- Use `--test` option to preview the result if you haven't paid for the service. Note that there is a limit and it may take some time.
|
- Use `--test` option to preview the result if you haven't paid for the service. Note that there is a limit and it may take some time.
|
||||||
- Set the target language like `--language "Simplified Chinese"`. Default target language is `"Simplified Chinese"`.
|
- Set the target language like `--language "Simplified Chinese"`. Default target language is `"Simplified Chinese"`.
|
||||||
@ -72,6 +73,9 @@ python3 make_book.py --book_name test_books/Lex_Fridman_episode_322.srt --openai
|
|||||||
# Or translate the whole book
|
# Or translate the whole book
|
||||||
python3 make_book.py --book_name test_books/animal_farm.epub --openai_key ${openai_key} --language zh-hans
|
python3 make_book.py --book_name test_books/animal_farm.epub --openai_key ${openai_key} --language zh-hans
|
||||||
|
|
||||||
|
# Or translate the whole book using Gemini
|
||||||
|
python3 make_book.py --book_name test_books/animal_farm.epub --gemini_key ${gemini_key} --model gemini
|
||||||
|
|
||||||
# Set env OPENAI_API_KEY to ignore option --openai_key
|
# Set env OPENAI_API_KEY to ignore option --openai_key
|
||||||
export OPENAI_API_KEY=${your_api_key}
|
export OPENAI_API_KEY=${your_api_key}
|
||||||
|
|
||||||
@ -81,6 +85,7 @@ python3 make_book.py --book_name test_books/animal_farm.epub --model gpt4 --use_
|
|||||||
# Use the DeepL model with Japanese
|
# Use the DeepL model with Japanese
|
||||||
python3 make_book.py --book_name test_books/animal_farm.epub --model deepl --deepl_key ${deepl_key} --language ja
|
python3 make_book.py --book_name test_books/animal_farm.epub --model deepl --deepl_key ${deepl_key} --language ja
|
||||||
|
|
||||||
|
|
||||||
# Use the Claude model with Japanese
|
# Use the Claude model with Japanese
|
||||||
python3 make_book.py --book_name test_books/animal_farm.epub --model claude --claude_key ${claude_key} --language ja
|
python3 make_book.py --book_name test_books/animal_farm.epub --model claude --claude_key ${claude_key} --language ja
|
||||||
|
|
||||||
|
@ -106,6 +106,14 @@ def main():
|
|||||||
help="you should build your own translation api",
|
help="you should build your own translation api",
|
||||||
)
|
)
|
||||||
|
|
||||||
|
# for Google Gemini
|
||||||
|
parser.add_argument(
|
||||||
|
"--gemini_key",
|
||||||
|
dest="gemini_key",
|
||||||
|
type=str,
|
||||||
|
help="You can get Gemini Key from https://makersuite.google.com/app/apikey",
|
||||||
|
)
|
||||||
|
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
"--test",
|
"--test",
|
||||||
dest="test",
|
dest="test",
|
||||||
@ -308,6 +316,8 @@ So you are close to reaching the limit. You have to choose your own value, there
|
|||||||
API_KEY = options.custom_api or env.get("BBM_CUSTOM_API")
|
API_KEY = options.custom_api or env.get("BBM_CUSTOM_API")
|
||||||
if not API_KEY:
|
if not API_KEY:
|
||||||
raise Exception("Please provide custom translate api")
|
raise Exception("Please provide custom translate api")
|
||||||
|
elif options.model == "gemini":
|
||||||
|
API_KEY = options.gemini_key or env.get("BBM_GOOGLE_GEMINI_KEY")
|
||||||
else:
|
else:
|
||||||
API_KEY = ""
|
API_KEY = ""
|
||||||
|
|
||||||
|
@ -4,6 +4,7 @@ from book_maker.translator.deepl_translator import DeepL
|
|||||||
from book_maker.translator.deepl_free_translator import DeepLFree
|
from book_maker.translator.deepl_free_translator import DeepLFree
|
||||||
from book_maker.translator.google_translator import Google
|
from book_maker.translator.google_translator import Google
|
||||||
from book_maker.translator.claude_translator import Claude
|
from book_maker.translator.claude_translator import Claude
|
||||||
|
from book_maker.translator.gemini_translator import Gemini
|
||||||
from book_maker.translator.custom_api_translator import CustomAPI
|
from book_maker.translator.custom_api_translator import CustomAPI
|
||||||
|
|
||||||
MODEL_DICT = {
|
MODEL_DICT = {
|
||||||
@ -14,6 +15,7 @@ MODEL_DICT = {
|
|||||||
"deeplfree": DeepLFree,
|
"deeplfree": DeepLFree,
|
||||||
"gpt4": ChatGPTAPI,
|
"gpt4": ChatGPTAPI,
|
||||||
"claude": Claude,
|
"claude": Claude,
|
||||||
"customapi": CustomAPI
|
"gemini": Gemini,
|
||||||
|
"customapi": CustomAPI,
|
||||||
# add more here
|
# add more here
|
||||||
}
|
}
|
||||||
|
@ -128,7 +128,8 @@ class ChatGPTAPI(Base):
|
|||||||
print(f"Get {attempt_count} consecutive exceptions")
|
print(f"Get {attempt_count} consecutive exceptions")
|
||||||
raise
|
raise
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
print(str(e), "!!")
|
print(str(e))
|
||||||
|
return
|
||||||
|
|
||||||
# todo: Determine whether to print according to the cli option
|
# todo: Determine whether to print according to the cli option
|
||||||
if needprint:
|
if needprint:
|
||||||
|
83
book_maker/translator/gemini_translator.py
Normal file
83
book_maker/translator/gemini_translator.py
Normal file
@ -0,0 +1,83 @@
|
|||||||
|
import re
|
||||||
|
import time
|
||||||
|
|
||||||
|
import google.generativeai as genai
|
||||||
|
from google.generativeai.types.generation_types import (
|
||||||
|
StopCandidateException,
|
||||||
|
BlockedPromptException,
|
||||||
|
)
|
||||||
|
from rich import print
|
||||||
|
|
||||||
|
from .base_translator import Base
|
||||||
|
|
||||||
|
generation_config = {
|
||||||
|
"temperature": 0.7,
|
||||||
|
"top_p": 1,
|
||||||
|
"top_k": 1,
|
||||||
|
"max_output_tokens": 2048,
|
||||||
|
}
|
||||||
|
|
||||||
|
safety_settings = [
|
||||||
|
{"category": "HARM_CATEGORY_HARASSMENT", "threshold": "BLOCK_MEDIUM_AND_ABOVE"},
|
||||||
|
{"category": "HARM_CATEGORY_HATE_SPEECH", "threshold": "BLOCK_MEDIUM_AND_ABOVE"},
|
||||||
|
{
|
||||||
|
"category": "HARM_CATEGORY_SEXUALLY_EXPLICIT",
|
||||||
|
"threshold": "BLOCK_MEDIUM_AND_ABOVE",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"category": "HARM_CATEGORY_DANGEROUS_CONTENT",
|
||||||
|
"threshold": "BLOCK_MEDIUM_AND_ABOVE",
|
||||||
|
},
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
class Gemini(Base):
|
||||||
|
"""
|
||||||
|
Google gemini translator
|
||||||
|
"""
|
||||||
|
|
||||||
|
DEFAULT_PROMPT = "Please help me to translate,`{text}` to {language}, please return only translated content not include the origin text"
|
||||||
|
|
||||||
|
def __init__(self, key, language, **kwargs) -> None:
|
||||||
|
genai.configure(api_key=key)
|
||||||
|
super().__init__(key, language)
|
||||||
|
model = genai.GenerativeModel(
|
||||||
|
model_name="gemini-pro",
|
||||||
|
generation_config=generation_config,
|
||||||
|
safety_settings=safety_settings,
|
||||||
|
)
|
||||||
|
self.convo = model.start_chat()
|
||||||
|
|
||||||
|
def rotate_key(self):
|
||||||
|
pass
|
||||||
|
|
||||||
|
def translate(self, text):
|
||||||
|
t_text = ""
|
||||||
|
try:
|
||||||
|
self.convo.send_message(
|
||||||
|
self.DEFAULT_PROMPT.format(text=text, language=self.language)
|
||||||
|
)
|
||||||
|
print(text)
|
||||||
|
t_text = self.convo.last.text.strip()
|
||||||
|
except StopCandidateException as e:
|
||||||
|
print("Here")
|
||||||
|
match = re.search(r'content\s*{\s*parts\s*{\s*text:\s*"([^"]+)"', str(e))
|
||||||
|
if match:
|
||||||
|
t_text = match.group(1)
|
||||||
|
t_text = re.sub(r"\\n", "\n", t_text)
|
||||||
|
else:
|
||||||
|
t_text = "Can not translate"
|
||||||
|
except BlockedPromptException as e:
|
||||||
|
print(str(e))
|
||||||
|
t_text = "Can not translate by SAFETY reason.(因安全问题不能翻译)"
|
||||||
|
except Exception as e:
|
||||||
|
print(str(e))
|
||||||
|
t_text = "Can not translate by other reason.(因安全问题不能翻译)"
|
||||||
|
|
||||||
|
if len(self.convo.history) > 10:
|
||||||
|
self.convo.history = self.convo.history[2:]
|
||||||
|
|
||||||
|
print("[bold green]" + re.sub("\n{3,}", "\n\n", t_text) + "[/bold green]")
|
||||||
|
# for limit
|
||||||
|
time.sleep(0.5)
|
||||||
|
return t_text
|
Loading…
x
Reference in New Issue
Block a user