mirror of
https://github.com/yihong0618/bilingual_book_maker.git
synced 2025-06-02 09:30:24 +00:00
feat: support model gpt4o-mini (#419)
This commit is contained in:
parent
dd3da508a6
commit
32f4eb3566
@ -312,7 +312,7 @@ So you are close to reaching the limit. You have to choose your own value, there
|
||||
translate_model = MODEL_DICT.get(options.model)
|
||||
assert translate_model is not None, "unsupported model"
|
||||
API_KEY = ""
|
||||
if options.model in ["openai", "chatgptapi", "gpt4"]:
|
||||
if options.model in ["openai", "chatgptapi", "gpt4", "gpt4omini"]:
|
||||
if OPENAI_API_KEY := (
|
||||
options.openai_key
|
||||
or env.get(
|
||||
@ -429,6 +429,7 @@ So you are close to reaching the limit. You have to choose your own value, there
|
||||
assert options.model in [
|
||||
"chatgptapi",
|
||||
"gpt4",
|
||||
"gpt4omini",
|
||||
], "only support chatgptapi for deployment_id"
|
||||
if not options.api_base:
|
||||
raise ValueError("`api_base` must be provided when using `deployment_id`")
|
||||
@ -439,7 +440,7 @@ So you are close to reaching the limit. You have to choose your own value, there
|
||||
e.translate_model.set_model_list(options.model_list.split(","))
|
||||
else:
|
||||
raise ValueError(
|
||||
"When using `openai` model, you must also provide `--model_list`. For default model sets use `--model chatgptapi` or `--model gpt4`",
|
||||
"When using `openai` model, you must also provide `--model_list`. For default model sets use `--model chatgptapi` or `--model gpt4` or `--model gpt4omini`",
|
||||
)
|
||||
# TODO refactor, quick fix for gpt4 model
|
||||
if options.model == "chatgptapi":
|
||||
@ -449,6 +450,8 @@ So you are close to reaching the limit. You have to choose your own value, there
|
||||
e.translate_model.set_gpt35_models()
|
||||
if options.model == "gpt4":
|
||||
e.translate_model.set_gpt4_models()
|
||||
if options.model == "gpt4omini":
|
||||
e.translate_model.set_gpt4omini_models()
|
||||
if options.block_size > 0:
|
||||
e.block_size = options.block_size
|
||||
|
||||
|
@ -13,6 +13,7 @@ MODEL_DICT = {
|
||||
"openai": ChatGPTAPI,
|
||||
"chatgptapi": ChatGPTAPI,
|
||||
"gpt4": ChatGPTAPI,
|
||||
"gpt4omini": ChatGPTAPI,
|
||||
"google": Google,
|
||||
"caiyun": Caiyun,
|
||||
"deepl": DeepL,
|
||||
|
@ -32,6 +32,11 @@ GPT4_MODEL_LIST = [
|
||||
"gpt-4-32k-0613",
|
||||
]
|
||||
|
||||
GPT4oMINI_MODEL_LIST = [
|
||||
"gpt-4o-mini",
|
||||
"gpt-4o-mini-2024-07-18",
|
||||
]
|
||||
|
||||
|
||||
class ChatGPTAPI(Base):
|
||||
DEFAULT_PROMPT = "Please help me to translate,`{text}` to {language}, please return only translated content not include the origin text"
|
||||
@ -335,6 +340,18 @@ class ChatGPTAPI(Base):
|
||||
print(f"Using model list {model_list}")
|
||||
self.model_list = cycle(model_list)
|
||||
|
||||
def set_gpt4omini_models(self):
|
||||
# for issue #375 azure can not use model list
|
||||
if self.deployment_id:
|
||||
self.model_list = cycle(["gpt-4o-mini"])
|
||||
else:
|
||||
my_model_list = [
|
||||
i["id"] for i in self.openai_client.models.list().model_dump()["data"]
|
||||
]
|
||||
model_list = list(set(my_model_list) & set(GPT4oMINI_MODEL_LIST))
|
||||
print(f"Using model list {model_list}")
|
||||
self.model_list = cycle(model_list)
|
||||
|
||||
def set_model_list(self, model_list):
|
||||
model_list = list(set(model_list))
|
||||
print(f"Using model list {model_list}")
|
||||
|
@ -2,7 +2,7 @@
|
||||
## Models
|
||||
`-m, --model <Model>` <br>
|
||||
|
||||
Currently `bbook_maker` supports these models: `chatgptapi` , `gpt3` , `google` , `caiyun` , `deepl` , `deeplfree` , `gpt4` , `claude` , `customapi`.
|
||||
Currently `bbook_maker` supports these models: `chatgptapi` , `gpt3` , `google` , `caiyun` , `deepl` , `deeplfree` , `gpt4` , `gpt4omini` , `claude` , `customapi`.
|
||||
Default model is `chatgptapi` .
|
||||
|
||||
### OPENAI models
|
||||
|
Loading…
x
Reference in New Issue
Block a user