feat: add support for --interval option to gemini

This commit is contained in:
Risin 2024-10-15 18:40:44 +09:00
parent 696ea341b5
commit 5ad87bca4f
3 changed files with 13 additions and 2 deletions

View File

@ -316,6 +316,12 @@ So you are close to reaching the limit. You have to choose your own value, there
action="store_true", action="store_true",
help="Use pre-generated batch translations to create files. Run with --batch first before using this option", help="Use pre-generated batch translations to create files. Run with --batch first before using this option",
) )
parser.add_argument(
"--interval",
type=float,
default=0.01,
help="Request interval in seconds (e.g., 0.1 for 100ms). Currently only supported for Gemini models. Deafult: 0.01",
)
options = parser.parse_args() options = parser.parse_args()
@ -422,6 +428,7 @@ So you are close to reaching the limit. You have to choose your own value, there
single_translate=options.single_translate, single_translate=options.single_translate,
context_flag=options.context_flag, context_flag=options.context_flag,
temperature=options.temperature, temperature=options.temperature,
interval=options.interval,
) )
# other options # other options
if options.allow_navigable_strings: if options.allow_navigable_strings:

View File

@ -35,6 +35,7 @@ class EPUBBookLoader(BaseBookLoader):
context_flag=False, context_flag=False,
temperature=1.0, temperature=1.0,
context_paragraph_limit=0, context_paragraph_limit=0,
interval=0.01,
): ):
self.epub_name = epub_name self.epub_name = epub_name
self.new_epub = epub.EpubBook() self.new_epub = epub.EpubBook()
@ -45,6 +46,7 @@ class EPUBBookLoader(BaseBookLoader):
context_flag=context_flag, context_flag=context_flag,
context_paragraph_limit=context_paragraph_limit, context_paragraph_limit=context_paragraph_limit,
temperature=temperature, temperature=temperature,
interval=interval,
**prompt_config_to_kwargs(prompt_config), **prompt_config_to_kwargs(prompt_config),
) )
self.is_test = is_test self.is_test = is_test

View File

@ -43,9 +43,11 @@ class Gemini(Base):
key, key,
language, language,
temperature=1.0, temperature=1.0,
interval=0.01,
**kwargs, **kwargs,
) -> None: ) -> None:
super().__init__(key, language) super().__init__(key, language)
self.interval = interval
generation_config["temperature"] = temperature generation_config["temperature"] = temperature
model = genai.GenerativeModel( model = genai.GenerativeModel(
model_name="gemini-pro", model_name="gemini-pro",
@ -90,8 +92,8 @@ class Gemini(Base):
self.convo.history = self.convo.history[2:] self.convo.history = self.convo.history[2:]
print("[bold green]" + re.sub("\n{3,}", "\n\n", t_text) + "[/bold green]") print("[bold green]" + re.sub("\n{3,}", "\n\n", t_text) + "[/bold green]")
# for limit # for rate limit(RPM)
time.sleep(0.5) time.sleep(self.interval)
if num: if num:
t_text = str(num) + "\n" + t_text t_text = str(num) + "\n" + t_text
return t_text return t_text