mirror of
https://github.com/yihong0618/bilingual_book_maker.git
synced 2025-06-01 00:50:12 +00:00
feat(model): support openai o series model (o1-preview, o1, o1-mini, o3-mini)
This commit is contained in:
parent
e6b0de14db
commit
8bfd1b146d
@ -388,7 +388,7 @@ So you are close to reaching the limit. You have to choose your own value, there
|
||||
translate_model = MODEL_DICT.get(options.model)
|
||||
assert translate_model is not None, "unsupported model"
|
||||
API_KEY = ""
|
||||
if options.model in ["openai", "chatgptapi", "gpt4", "gpt4omini", "gpt4o"]:
|
||||
if options.model in ["openai", "chatgptapi", "gpt4", "gpt4omini", "gpt4o", "o1preview", "o1", "o1mini", "o3mini"]:
|
||||
if OPENAI_API_KEY := (
|
||||
options.openai_key
|
||||
or env.get(
|
||||
@ -510,6 +510,10 @@ So you are close to reaching the limit. You have to choose your own value, there
|
||||
"gpt4",
|
||||
"gpt4omini",
|
||||
"gpt4o",
|
||||
"o1",
|
||||
"o1preview",
|
||||
"o1mini",
|
||||
"o3mini",
|
||||
], "only support chatgptapi for deployment_id"
|
||||
if not options.api_base:
|
||||
raise ValueError("`api_base` must be provided when using `deployment_id`")
|
||||
@ -534,6 +538,14 @@ So you are close to reaching the limit. You have to choose your own value, there
|
||||
e.translate_model.set_gpt4omini_models()
|
||||
if options.model == "gpt4o":
|
||||
e.translate_model.set_gpt4o_models()
|
||||
if options.model == "o1preview":
|
||||
e.translate_model.set_o1preview_models()
|
||||
if options.model == "o1":
|
||||
e.translate_model.set_o1_models()
|
||||
if options.model == "o1mini":
|
||||
e.translate_model.set_o1mini_models()
|
||||
if options.model == "o3mini":
|
||||
e.translate_model.set_o3mini_models()
|
||||
if options.model.startswith("claude-"):
|
||||
e.translate_model.set_claude_model(options.model)
|
||||
if options.block_size > 0:
|
||||
|
@ -16,6 +16,10 @@ MODEL_DICT = {
|
||||
"gpt4": ChatGPTAPI,
|
||||
"gpt4omini": ChatGPTAPI,
|
||||
"gpt4o": ChatGPTAPI,
|
||||
"o1preview": ChatGPTAPI,
|
||||
"o1": ChatGPTAPI,
|
||||
"o1mini": ChatGPTAPI,
|
||||
"o3mini": ChatGPTAPI,
|
||||
"google": Google,
|
||||
"caiyun": Caiyun,
|
||||
"deepl": DeepL,
|
||||
|
@ -48,7 +48,21 @@ GPT4o_MODEL_LIST = [
|
||||
"gpt-4o-2024-08-06",
|
||||
"chatgpt-4o-latest",
|
||||
]
|
||||
|
||||
O1PREVIEW_MODEL_LIST = [
|
||||
"o1-preview",
|
||||
"o1-preview-2024-09-12",
|
||||
]
|
||||
O1_MODEL_LIST = [
|
||||
"o1",
|
||||
"o1-2024-12-17",
|
||||
]
|
||||
O1MINI_MODEL_LIST = [
|
||||
"o1-mini",
|
||||
"o1-mini-2024-09-12",
|
||||
]
|
||||
O3MINI_MODEL_LIST = [
|
||||
"o3-mini",
|
||||
]
|
||||
|
||||
class ChatGPTAPI(Base):
|
||||
DEFAULT_PROMPT = "Please help me to translate,`{text}` to {language}, please return only translated content not include the origin text"
|
||||
@ -422,6 +436,54 @@ class ChatGPTAPI(Base):
|
||||
print(f"Using model list {model_list}")
|
||||
self.model_list = cycle(model_list)
|
||||
|
||||
def set_o1preview_models(self):
|
||||
# for issue #375 azure can not use model list
|
||||
if self.deployment_id:
|
||||
self.model_list = cycle(["o1-preview"])
|
||||
else:
|
||||
my_model_list = [
|
||||
i["id"] for i in self.openai_client.models.list().model_dump()["data"]
|
||||
]
|
||||
model_list = list(set(my_model_list) & set(O1PREVIEW_MODEL_LIST))
|
||||
print(f"Using model list {model_list}")
|
||||
self.model_list = cycle(model_list)
|
||||
|
||||
def set_o1_models(self):
|
||||
# for issue #375 azure can not use model list
|
||||
if self.deployment_id:
|
||||
self.model_list = cycle(["o1"])
|
||||
else:
|
||||
my_model_list = [
|
||||
i["id"] for i in self.openai_client.models.list().model_dump()["data"]
|
||||
]
|
||||
model_list = list(set(my_model_list) & set(O1_MODEL_LIST))
|
||||
print(f"Using model list {model_list}")
|
||||
self.model_list = cycle(model_list)
|
||||
|
||||
def set_o1mini_models(self):
|
||||
# for issue #375 azure can not use model list
|
||||
if self.deployment_id:
|
||||
self.model_list = cycle(["o1-mini"])
|
||||
else:
|
||||
my_model_list = [
|
||||
i["id"] for i in self.openai_client.models.list().model_dump()["data"]
|
||||
]
|
||||
model_list = list(set(my_model_list) & set(O1MINI_MODEL_LIST))
|
||||
print(f"Using model list {model_list}")
|
||||
self.model_list = cycle(model_list)
|
||||
|
||||
def set_o3mini_models(self):
|
||||
# for issue #375 azure can not use model list
|
||||
if self.deployment_id:
|
||||
self.model_list = cycle(["o3-mini"])
|
||||
else:
|
||||
my_model_list = [
|
||||
i["id"] for i in self.openai_client.models.list().model_dump()["data"]
|
||||
]
|
||||
model_list = list(set(my_model_list) & set(O3MINI_MODEL_LIST))
|
||||
print(f"Using model list {model_list}")
|
||||
self.model_list = cycle(model_list)
|
||||
|
||||
def set_model_list(self, model_list):
|
||||
model_list = list(set(model_list))
|
||||
print(f"Using model list {model_list}")
|
||||
|
@ -2,7 +2,7 @@
|
||||
## Models
|
||||
`-m, --model <Model>` <br>
|
||||
|
||||
Currently `bbook_maker` supports these models: `chatgptapi` , `gpt3` , `google` , `caiyun` , `deepl` , `deeplfree` , `gpt4` , `gpt4omini` , `claude` , `customapi`.
|
||||
Currently `bbook_maker` supports these models: `chatgptapi` , `gpt3` , `google` , `caiyun` , `deepl` , `deeplfree` , `gpt4` , `gpt4omini` , `o1-preview` , `o1` , `o1-mini` , `o3-mini` , `claude` , `customapi`.
|
||||
Default model is `chatgptapi` .
|
||||
|
||||
### OPENAI models
|
||||
|
Loading…
x
Reference in New Issue
Block a user