mirror of
https://github.com/yihong0618/bilingual_book_maker.git
synced 2025-06-06 11:35:49 +00:00
feat: support groq translator (#399)
* feat: support Tencent TranSmart * feat: support groq translator
This commit is contained in:
parent
307898f5cd
commit
a9220ea6bf
@ -114,6 +114,14 @@ def main():
|
|||||||
help="You can get Gemini Key from https://makersuite.google.com/app/apikey",
|
help="You can get Gemini Key from https://makersuite.google.com/app/apikey",
|
||||||
)
|
)
|
||||||
|
|
||||||
|
# for Groq
|
||||||
|
parser.add_argument(
|
||||||
|
"--groq_key",
|
||||||
|
dest="groq_key",
|
||||||
|
type=str,
|
||||||
|
help="You can get Groq Key from https://console.groq.com/keys",
|
||||||
|
)
|
||||||
|
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
"--test",
|
"--test",
|
||||||
dest="test",
|
dest="test",
|
||||||
@ -341,6 +349,8 @@ So you are close to reaching the limit. You have to choose your own value, there
|
|||||||
raise Exception("Please provide custom translate api")
|
raise Exception("Please provide custom translate api")
|
||||||
elif options.model == "gemini":
|
elif options.model == "gemini":
|
||||||
API_KEY = options.gemini_key or env.get("BBM_GOOGLE_GEMINI_KEY")
|
API_KEY = options.gemini_key or env.get("BBM_GOOGLE_GEMINI_KEY")
|
||||||
|
elif options.model == "groq":
|
||||||
|
API_KEY = options.groq_key or env.get("BBM_GROQ_API_KEY")
|
||||||
else:
|
else:
|
||||||
API_KEY = ""
|
API_KEY = ""
|
||||||
|
|
||||||
@ -423,7 +433,7 @@ So you are close to reaching the limit. You have to choose your own value, there
|
|||||||
if not options.api_base:
|
if not options.api_base:
|
||||||
raise ValueError("`api_base` must be provided when using `deployment_id`")
|
raise ValueError("`api_base` must be provided when using `deployment_id`")
|
||||||
e.translate_model.set_deployment_id(options.deployment_id)
|
e.translate_model.set_deployment_id(options.deployment_id)
|
||||||
if options.model == "openai":
|
if options.model in ("openai", "groq"):
|
||||||
# Currently only supports `openai` when you also have --model_list set
|
# Currently only supports `openai` when you also have --model_list set
|
||||||
if options.model_list:
|
if options.model_list:
|
||||||
e.translate_model.set_model_list(options.model_list.split(","))
|
e.translate_model.set_model_list(options.model_list.split(","))
|
||||||
|
@ -5,6 +5,7 @@ from book_maker.translator.deepl_free_translator import DeepLFree
|
|||||||
from book_maker.translator.google_translator import Google
|
from book_maker.translator.google_translator import Google
|
||||||
from book_maker.translator.claude_translator import Claude
|
from book_maker.translator.claude_translator import Claude
|
||||||
from book_maker.translator.gemini_translator import Gemini
|
from book_maker.translator.gemini_translator import Gemini
|
||||||
|
from book_maker.translator.groq_translator import GroqClient
|
||||||
from book_maker.translator.tencent_transmart_translator import TencentTranSmart
|
from book_maker.translator.tencent_transmart_translator import TencentTranSmart
|
||||||
from book_maker.translator.custom_api_translator import CustomAPI
|
from book_maker.translator.custom_api_translator import CustomAPI
|
||||||
|
|
||||||
@ -18,6 +19,7 @@ MODEL_DICT = {
|
|||||||
"deeplfree": DeepLFree,
|
"deeplfree": DeepLFree,
|
||||||
"claude": Claude,
|
"claude": Claude,
|
||||||
"gemini": Gemini,
|
"gemini": Gemini,
|
||||||
|
"groq": GroqClient,
|
||||||
"tencentransmart": TencentTranSmart,
|
"tencentransmart": TencentTranSmart,
|
||||||
"customapi": CustomAPI,
|
"customapi": CustomAPI,
|
||||||
# add more here
|
# add more here
|
||||||
|
45
book_maker/translator/groq_translator.py
Normal file
45
book_maker/translator/groq_translator.py
Normal file
@ -0,0 +1,45 @@
|
|||||||
|
from groq import Groq
|
||||||
|
from .chatgptapi_translator import ChatGPTAPI
|
||||||
|
from os import linesep
|
||||||
|
from itertools import cycle
|
||||||
|
|
||||||
|
|
||||||
|
GROQ_MODEL_LIST = [
|
||||||
|
"llama3-8b-8192",
|
||||||
|
"llama3-70b-8192",
|
||||||
|
"mixtral-8x7b-32768",
|
||||||
|
"gemma-7b-it",
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
class GroqClient(ChatGPTAPI):
|
||||||
|
def rotate_model(self):
|
||||||
|
if not self.model_list:
|
||||||
|
model_list = list(set(GROQ_MODEL_LIST))
|
||||||
|
print(f"Using model list {model_list}")
|
||||||
|
self.model_list = cycle(model_list)
|
||||||
|
self.model = next(self.model_list)
|
||||||
|
|
||||||
|
def create_chat_completion(self, text):
|
||||||
|
self.groq_client = Groq(api_key=next(self.keys))
|
||||||
|
|
||||||
|
content = f"{self.prompt_template.format(text=text, language=self.language, crlf=linesep)}"
|
||||||
|
sys_content = self.system_content or self.prompt_sys_msg.format(crlf="\n")
|
||||||
|
|
||||||
|
messages = [
|
||||||
|
{"role": "system", "content": sys_content},
|
||||||
|
{"role": "user", "content": content},
|
||||||
|
]
|
||||||
|
|
||||||
|
if self.deployment_id:
|
||||||
|
return self.groq_client.chat.completions.create(
|
||||||
|
engine=self.deployment_id,
|
||||||
|
messages=messages,
|
||||||
|
temperature=self.temperature,
|
||||||
|
azure=True,
|
||||||
|
)
|
||||||
|
return self.groq_client.chat.completions.create(
|
||||||
|
model=self.model,
|
||||||
|
messages=messages,
|
||||||
|
temperature=self.temperature,
|
||||||
|
)
|
39
pdm.lock
generated
39
pdm.lock
generated
@ -5,7 +5,7 @@
|
|||||||
groups = ["default"]
|
groups = ["default"]
|
||||||
strategy = ["cross_platform", "inherit_metadata"]
|
strategy = ["cross_platform", "inherit_metadata"]
|
||||||
lock_version = "4.4.1"
|
lock_version = "4.4.1"
|
||||||
content_hash = "sha256:e79301208d739e75b71b215fa08dea8b83fe165669b183636ab5066c4f1f991e"
|
content_hash = "sha256:7792e48118ca2396a823aeef510a3bbec973033b44e04c7d81368e0285275716"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "aiohttp"
|
name = "aiohttp"
|
||||||
@ -747,6 +747,25 @@ files = [
|
|||||||
{file = "googleapis_common_protos-1.63.0-py2.py3-none-any.whl", hash = "sha256:ae45f75702f7c08b541f750854a678bd8f534a1a6bace6afe975f1d0a82d6632"},
|
{file = "googleapis_common_protos-1.63.0-py2.py3-none-any.whl", hash = "sha256:ae45f75702f7c08b541f750854a678bd8f534a1a6bace6afe975f1d0a82d6632"},
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "groq"
|
||||||
|
version = "0.5.0"
|
||||||
|
requires_python = ">=3.7"
|
||||||
|
summary = "The official Python library for the groq API"
|
||||||
|
groups = ["default"]
|
||||||
|
dependencies = [
|
||||||
|
"anyio<5,>=3.5.0",
|
||||||
|
"distro<2,>=1.7.0",
|
||||||
|
"httpx<1,>=0.23.0",
|
||||||
|
"pydantic<3,>=1.9.0",
|
||||||
|
"sniffio",
|
||||||
|
"typing-extensions<5,>=4.7",
|
||||||
|
]
|
||||||
|
files = [
|
||||||
|
{file = "groq-0.5.0-py3-none-any.whl", hash = "sha256:a7e6be1118bcdfea3ed071ec00f505a34d4e6ec28c435adb5a5afd33545683a1"},
|
||||||
|
{file = "groq-0.5.0.tar.gz", hash = "sha256:d476cdc3383b45d2a4dc1876142a9542e663ea1029f9e07a05de24f895cae48c"},
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "grpcio"
|
name = "grpcio"
|
||||||
version = "1.63.0"
|
version = "1.63.0"
|
||||||
@ -958,7 +977,7 @@ files = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "litellm"
|
name = "litellm"
|
||||||
version = "1.35.34"
|
version = "1.35.38"
|
||||||
requires_python = "!=2.7.*,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,!=3.7.*,>=3.8"
|
requires_python = "!=2.7.*,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,!=3.7.*,>=3.8"
|
||||||
summary = "Library to easily interface with LLM API providers"
|
summary = "Library to easily interface with LLM API providers"
|
||||||
groups = ["default"]
|
groups = ["default"]
|
||||||
@ -974,8 +993,8 @@ dependencies = [
|
|||||||
"tokenizers",
|
"tokenizers",
|
||||||
]
|
]
|
||||||
files = [
|
files = [
|
||||||
{file = "litellm-1.35.34-py3-none-any.whl", hash = "sha256:832849e6e9cb706fe8163b7362fedf35f2b46bd6409e5b3678fd645d1cb0436e"},
|
{file = "litellm-1.35.38-py3-none-any.whl", hash = "sha256:79ab3403c945b340a751d889cf49030fee050487dff6294a21fb9586c49e3faf"},
|
||||||
{file = "litellm-1.35.34.tar.gz", hash = "sha256:57194748f51ec55bf38b4260a374dbfcf832b8347c8f16ca75da5b852c089318"},
|
{file = "litellm-1.35.38.tar.gz", hash = "sha256:1a0b195c74d45ba0c2391c5be533c211ee1bcdba6be09e6950037432f62f79ea"},
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
@ -1244,7 +1263,7 @@ files = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "openai"
|
name = "openai"
|
||||||
version = "1.25.0"
|
version = "1.25.2"
|
||||||
requires_python = ">=3.7.1"
|
requires_python = ">=3.7.1"
|
||||||
summary = "The official Python library for the openai API"
|
summary = "The official Python library for the openai API"
|
||||||
groups = ["default"]
|
groups = ["default"]
|
||||||
@ -1258,8 +1277,8 @@ dependencies = [
|
|||||||
"typing-extensions<5,>=4.7",
|
"typing-extensions<5,>=4.7",
|
||||||
]
|
]
|
||||||
files = [
|
files = [
|
||||||
{file = "openai-1.25.0-py3-none-any.whl", hash = "sha256:d0cfdf6afb31a5dabf3b95966cb31f3c757a0edaf3228715409cb404b9933de0"},
|
{file = "openai-1.25.2-py3-none-any.whl", hash = "sha256:8df66384343e81ae49f5b9ee7d2d67df3b98d578d1e3a50994111e1c6062bf5e"},
|
||||||
{file = "openai-1.25.0.tar.gz", hash = "sha256:22c35b26b8281cd2759b1a4c05ac99e2f2b26a9df71f90a0b4ddb75aa27adc81"},
|
{file = "openai-1.25.2.tar.gz", hash = "sha256:a2b4a6ae9afb8b5578b5b69315ecc6c01564d4c8dbe18fe2375bb9547ff5038c"},
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
@ -1819,7 +1838,7 @@ files = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "tqdm"
|
name = "tqdm"
|
||||||
version = "4.66.2"
|
version = "4.66.4"
|
||||||
requires_python = ">=3.7"
|
requires_python = ">=3.7"
|
||||||
summary = "Fast, Extensible Progress Meter"
|
summary = "Fast, Extensible Progress Meter"
|
||||||
groups = ["default"]
|
groups = ["default"]
|
||||||
@ -1827,8 +1846,8 @@ dependencies = [
|
|||||||
"colorama; platform_system == \"Windows\"",
|
"colorama; platform_system == \"Windows\"",
|
||||||
]
|
]
|
||||||
files = [
|
files = [
|
||||||
{file = "tqdm-4.66.2-py3-none-any.whl", hash = "sha256:1ee4f8a893eb9bef51c6e35730cebf234d5d0b6bd112b0271e10ed7c24a02bd9"},
|
{file = "tqdm-4.66.4-py3-none-any.whl", hash = "sha256:b75ca56b413b030bc3f00af51fd2c1a1a5eac6a0c1cca83cbb37a5c52abce644"},
|
||||||
{file = "tqdm-4.66.2.tar.gz", hash = "sha256:6cd52cdf0fef0e0f543299cfc96fec90d7b8a7e88745f411ec33eb44d5ed3531"},
|
{file = "tqdm-4.66.4.tar.gz", hash = "sha256:e4d936c9de8727928f3be6079590e97d9abfe8d39a590be678eb5919ffc186bb"},
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
|
@ -27,6 +27,7 @@ dependencies = [
|
|||||||
"rich",
|
"rich",
|
||||||
"tiktoken",
|
"tiktoken",
|
||||||
"tqdm",
|
"tqdm",
|
||||||
|
"groq>=0.5.0",
|
||||||
]
|
]
|
||||||
|
|
||||||
[project.scripts]
|
[project.scripts]
|
||||||
|
@ -32,6 +32,7 @@ google-auth==2.29.0
|
|||||||
google-auth-httplib2==0.2.0
|
google-auth-httplib2==0.2.0
|
||||||
google-generativeai==0.5.2
|
google-generativeai==0.5.2
|
||||||
googleapis-common-protos==1.63.0
|
googleapis-common-protos==1.63.0
|
||||||
|
groq==0.5.0
|
||||||
grpcio==1.63.0
|
grpcio==1.63.0
|
||||||
grpcio-status==1.62.2
|
grpcio-status==1.62.2
|
||||||
h11==0.14.0
|
h11==0.14.0
|
||||||
@ -43,13 +44,13 @@ idna==3.7
|
|||||||
importlib-metadata==7.1.0
|
importlib-metadata==7.1.0
|
||||||
jinja2==3.1.3
|
jinja2==3.1.3
|
||||||
langdetect==1.0.9
|
langdetect==1.0.9
|
||||||
litellm==1.35.34
|
litellm==1.35.38
|
||||||
lxml==5.2.1
|
lxml==5.2.1
|
||||||
markdown-it-py==3.0.0
|
markdown-it-py==3.0.0
|
||||||
markupsafe==2.1.5
|
markupsafe==2.1.5
|
||||||
mdurl==0.1.2
|
mdurl==0.1.2
|
||||||
multidict==6.0.5
|
multidict==6.0.5
|
||||||
openai==1.25.0
|
openai==1.25.2
|
||||||
packaging==24.0
|
packaging==24.0
|
||||||
proto-plus==1.23.0
|
proto-plus==1.23.0
|
||||||
protobuf==4.25.3
|
protobuf==4.25.3
|
||||||
@ -73,7 +74,7 @@ socksio==1.0.0
|
|||||||
soupsieve==2.5
|
soupsieve==2.5
|
||||||
tiktoken==0.6.0
|
tiktoken==0.6.0
|
||||||
tokenizers==0.19.1
|
tokenizers==0.19.1
|
||||||
tqdm==4.66.2
|
tqdm==4.66.4
|
||||||
typing-extensions==4.11.0
|
typing-extensions==4.11.0
|
||||||
uritemplate==4.1.1
|
uritemplate==4.1.1
|
||||||
urllib3==2.2.1
|
urllib3==2.2.1
|
||||||
|
Loading…
x
Reference in New Issue
Block a user