mirror of
https://github.com/yihong0618/bilingual_book_maker.git
synced 2025-06-07 03:55:30 +00:00
parent
66e12f1e54
commit
365d6628bf
@ -403,8 +403,11 @@ So you are close to reaching the limit. You have to choose your own value, there
|
|||||||
raise ValueError("`api_base` must be provided when using `deployment_id`")
|
raise ValueError("`api_base` must be provided when using `deployment_id`")
|
||||||
e.translate_model.set_deployment_id(options.deployment_id)
|
e.translate_model.set_deployment_id(options.deployment_id)
|
||||||
# TODO refactor, quick fix for gpt4 model
|
# TODO refactor, quick fix for gpt4 model
|
||||||
|
if options.model == "chatgptapi":
|
||||||
|
print(21232)
|
||||||
|
e.translate_model.set_gpt35_models()
|
||||||
if options.model == "gpt4":
|
if options.model == "gpt4":
|
||||||
e.translate_model.set_gpt4_models("gpt4")
|
e.translate_model.set_gpt4_models()
|
||||||
if options.block_size > 0:
|
if options.block_size > 0:
|
||||||
e.block_size = options.block_size
|
e.block_size = options.block_size
|
||||||
|
|
||||||
|
@ -66,13 +66,7 @@ class ChatGPTAPI(Base):
|
|||||||
self.system_content = environ.get("OPENAI_API_SYS_MSG") or ""
|
self.system_content = environ.get("OPENAI_API_SYS_MSG") or ""
|
||||||
self.deployment_id = None
|
self.deployment_id = None
|
||||||
self.temperature = temperature
|
self.temperature = temperature
|
||||||
# gpt3 all models for save the limit
|
self.model_list = None
|
||||||
my_model_list = [
|
|
||||||
i["id"] for i in self.openai_client.models.list().model_dump()["data"]
|
|
||||||
]
|
|
||||||
model_list = list(set(my_model_list) & set(GPT35_MODEL_LIST))
|
|
||||||
print(f"Using model list {model_list}")
|
|
||||||
self.model_list = cycle(model_list)
|
|
||||||
|
|
||||||
def rotate_key(self):
|
def rotate_key(self):
|
||||||
self.openai_client.api_key = next(self.keys)
|
self.openai_client.api_key = next(self.keys)
|
||||||
@ -314,7 +308,23 @@ class ChatGPTAPI(Base):
|
|||||||
azure_deployment=self.deployment_id,
|
azure_deployment=self.deployment_id,
|
||||||
)
|
)
|
||||||
|
|
||||||
def set_gpt4_models(self, model="gpt4"):
|
def set_gpt35_models(self):
|
||||||
|
# gpt3 all models for save the limit
|
||||||
|
if self.deployment_id:
|
||||||
|
self.model_list = cycle(["gpt-35-turbo"])
|
||||||
|
else:
|
||||||
|
my_model_list = [
|
||||||
|
i["id"] for i in self.openai_client.models.list().model_dump()["data"]
|
||||||
|
]
|
||||||
|
model_list = list(set(my_model_list) & set(GPT35_MODEL_LIST))
|
||||||
|
print(f"Using model list {model_list}")
|
||||||
|
self.model_list = cycle(model_list)
|
||||||
|
|
||||||
|
def set_gpt4_models(self):
|
||||||
|
# for issue #375 azure can not use model list
|
||||||
|
if self.deployment_id:
|
||||||
|
self.model_list = cycle(["gpt-4"])
|
||||||
|
else:
|
||||||
my_model_list = [
|
my_model_list = [
|
||||||
i["id"] for i in self.openai_client.models.list().model_dump()["data"]
|
i["id"] for i in self.openai_client.models.list().model_dump()["data"]
|
||||||
]
|
]
|
||||||
|
Loading…
x
Reference in New Issue
Block a user