From 32f4eb35668fcdf16f519830a738349b5bc721c8 Mon Sep 17 00:00:00 2001 From: mkXultra <34665721+mkXultra@users.noreply.github.com> Date: Thu, 8 Aug 2024 19:16:58 +0900 Subject: [PATCH] feat: support model gpt4o-mini (#419) --- book_maker/cli.py | 7 +++++-- book_maker/translator/__init__.py | 1 + book_maker/translator/chatgptapi_translator.py | 17 +++++++++++++++++ docs/model_lang.md | 2 +- 4 files changed, 24 insertions(+), 3 deletions(-) diff --git a/book_maker/cli.py b/book_maker/cli.py index 57ac7b76..8898fb32 100644 --- a/book_maker/cli.py +++ b/book_maker/cli.py @@ -312,7 +312,7 @@ def main(): translate_model = MODEL_DICT.get(options.model) assert translate_model is not None, "unsupported model" API_KEY = "" - if options.model in ["openai", "chatgptapi", "gpt4"]: + if options.model in ["openai", "chatgptapi", "gpt4", "gpt4omini"]: if OPENAI_API_KEY := ( options.openai_key or env.get( @@ -429,6 +429,7 @@ def main(): assert options.model in [ "chatgptapi", "gpt4", + "gpt4omini", ], "only support chatgptapi for deployment_id" if not options.api_base: raise ValueError("`api_base` must be provided when using `deployment_id`") @@ -439,7 +440,7 @@ def main(): e.translate_model.set_model_list(options.model_list.split(",")) else: raise ValueError( - "When using `openai` model, you must also provide `--model_list`. For default model sets use `--model chatgptapi` or `--model gpt4`", + "When using `openai` model, you must also provide `--model_list`. For default model sets use `--model chatgptapi` or `--model gpt4` or `--model gpt4omini`", ) # TODO refactor, quick fix for gpt4 model if options.model == "chatgptapi": @@ -449,6 +450,8 @@ def main(): e.translate_model.set_gpt35_models() if options.model == "gpt4": e.translate_model.set_gpt4_models() + if options.model == "gpt4omini": + e.translate_model.set_gpt4omini_models() if options.block_size > 0: e.block_size = options.block_size diff --git a/book_maker/translator/__init__.py b/book_maker/translator/__init__.py index d3efcc34..d7e84cbe 100644 --- a/book_maker/translator/__init__.py +++ b/book_maker/translator/__init__.py @@ -13,6 +13,7 @@ "openai": ChatGPTAPI, "chatgptapi": ChatGPTAPI, "gpt4": ChatGPTAPI, + "gpt4omini": ChatGPTAPI, "google": Google, "caiyun": Caiyun, "deepl": DeepL, diff --git a/book_maker/translator/chatgptapi_translator.py b/book_maker/translator/chatgptapi_translator.py index 22f27962..def1c22c 100644 --- a/book_maker/translator/chatgptapi_translator.py +++ b/book_maker/translator/chatgptapi_translator.py @@ -32,6 +32,11 @@ "gpt-4-32k-0613", ] +GPT4oMINI_MODEL_LIST = [ + "gpt-4o-mini", + "gpt-4o-mini-2024-07-18", +] + class ChatGPTAPI(Base): DEFAULT_PROMPT = "Please help me to translate,`{text}` to {language}, please return only translated content not include the origin text" @@ -335,6 +340,18 @@ def set_gpt4_models(self): print(f"Using model list {model_list}") self.model_list = cycle(model_list) + def set_gpt4omini_models(self): + # for issue #375 azure can not use model list + if self.deployment_id: + self.model_list = cycle(["gpt-4o-mini"]) + else: + my_model_list = [ + i["id"] for i in self.openai_client.models.list().model_dump()["data"] + ] + model_list = list(set(my_model_list) & set(GPT4oMINI_MODEL_LIST)) + print(f"Using model list {model_list}") + self.model_list = cycle(model_list) + def set_model_list(self, model_list): model_list = list(set(model_list)) print(f"Using model list {model_list}") diff --git a/docs/model_lang.md b/docs/model_lang.md index 5974ffe9..336da505 100644 --- a/docs/model_lang.md +++ b/docs/model_lang.md @@ -2,7 +2,7 @@ ## Models `-m, --model `
-Currently `bbook_maker` supports these models: `chatgptapi` , `gpt3` , `google` , `caiyun` , `deepl` , `deeplfree` , `gpt4` , `claude` , `customapi`. +Currently `bbook_maker` supports these models: `chatgptapi` , `gpt3` , `google` , `caiyun` , `deepl` , `deeplfree` , `gpt4` , `gpt4omini` , `claude` , `customapi`. Default model is `chatgptapi` . ### OPENAI models