mirror of
https://github.com/yihong0618/bilingual_book_maker.git
synced 2025-06-05 19:15:34 +00:00
support: gpt4o (#425)
* support: gpt4o * fix: remove wrong code * fix: fix ci error
This commit is contained in:
parent
9e4e7b59c7
commit
6912206cb1
@ -331,7 +331,7 @@ So you are close to reaching the limit. You have to choose your own value, there
|
|||||||
translate_model = MODEL_DICT.get(options.model)
|
translate_model = MODEL_DICT.get(options.model)
|
||||||
assert translate_model is not None, "unsupported model"
|
assert translate_model is not None, "unsupported model"
|
||||||
API_KEY = ""
|
API_KEY = ""
|
||||||
if options.model in ["openai", "chatgptapi", "gpt4", "gpt4omini"]:
|
if options.model in ["openai", "chatgptapi", "gpt4", "gpt4omini", "gpt4o"]:
|
||||||
if OPENAI_API_KEY := (
|
if OPENAI_API_KEY := (
|
||||||
options.openai_key
|
options.openai_key
|
||||||
or env.get(
|
or env.get(
|
||||||
@ -449,6 +449,7 @@ So you are close to reaching the limit. You have to choose your own value, there
|
|||||||
"chatgptapi",
|
"chatgptapi",
|
||||||
"gpt4",
|
"gpt4",
|
||||||
"gpt4omini",
|
"gpt4omini",
|
||||||
|
"gpt4o",
|
||||||
], "only support chatgptapi for deployment_id"
|
], "only support chatgptapi for deployment_id"
|
||||||
if not options.api_base:
|
if not options.api_base:
|
||||||
raise ValueError("`api_base` must be provided when using `deployment_id`")
|
raise ValueError("`api_base` must be provided when using `deployment_id`")
|
||||||
@ -471,6 +472,8 @@ So you are close to reaching the limit. You have to choose your own value, there
|
|||||||
e.translate_model.set_gpt4_models()
|
e.translate_model.set_gpt4_models()
|
||||||
if options.model == "gpt4omini":
|
if options.model == "gpt4omini":
|
||||||
e.translate_model.set_gpt4omini_models()
|
e.translate_model.set_gpt4omini_models()
|
||||||
|
if options.model == "gpt4o":
|
||||||
|
e.translate_model.set_gpt4o_models()
|
||||||
if options.block_size > 0:
|
if options.block_size > 0:
|
||||||
e.block_size = options.block_size
|
e.block_size = options.block_size
|
||||||
if options.batch_flag:
|
if options.batch_flag:
|
||||||
|
@ -14,6 +14,7 @@ MODEL_DICT = {
|
|||||||
"chatgptapi": ChatGPTAPI,
|
"chatgptapi": ChatGPTAPI,
|
||||||
"gpt4": ChatGPTAPI,
|
"gpt4": ChatGPTAPI,
|
||||||
"gpt4omini": ChatGPTAPI,
|
"gpt4omini": ChatGPTAPI,
|
||||||
|
"gpt4o": ChatGPTAPI,
|
||||||
"google": Google,
|
"google": Google,
|
||||||
"caiyun": Caiyun,
|
"caiyun": Caiyun,
|
||||||
"deepl": DeepL,
|
"deepl": DeepL,
|
||||||
|
@ -42,6 +42,12 @@ GPT4oMINI_MODEL_LIST = [
|
|||||||
"gpt-4o-mini",
|
"gpt-4o-mini",
|
||||||
"gpt-4o-mini-2024-07-18",
|
"gpt-4o-mini-2024-07-18",
|
||||||
]
|
]
|
||||||
|
GPT4o_MODEL_LIST = [
|
||||||
|
"gpt-4o",
|
||||||
|
"gpt-4o-2024-05-13",
|
||||||
|
"gpt-4o-2024-08-06",
|
||||||
|
"chatgpt-4o-latest",
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
class ChatGPTAPI(Base):
|
class ChatGPTAPI(Base):
|
||||||
@ -404,6 +410,18 @@ class ChatGPTAPI(Base):
|
|||||||
print(f"Using model list {model_list}")
|
print(f"Using model list {model_list}")
|
||||||
self.model_list = cycle(model_list)
|
self.model_list = cycle(model_list)
|
||||||
|
|
||||||
|
def set_gpt4o_models(self):
|
||||||
|
# for issue #375 azure can not use model list
|
||||||
|
if self.deployment_id:
|
||||||
|
self.model_list = cycle(["gpt-4o"])
|
||||||
|
else:
|
||||||
|
my_model_list = [
|
||||||
|
i["id"] for i in self.openai_client.models.list().model_dump()["data"]
|
||||||
|
]
|
||||||
|
model_list = list(set(my_model_list) & set(GPT4o_MODEL_LIST))
|
||||||
|
print(f"Using model list {model_list}")
|
||||||
|
self.model_list = cycle(model_list)
|
||||||
|
|
||||||
def set_model_list(self, model_list):
|
def set_model_list(self, model_list):
|
||||||
model_list = list(set(model_list))
|
model_list = list(set(model_list))
|
||||||
print(f"Using model list {model_list}")
|
print(f"Using model list {model_list}")
|
||||||
|
Loading…
x
Reference in New Issue
Block a user