From 1dc93403acbfa0405649244520416b263c66edaa Mon Sep 17 00:00:00 2001 From: Stainless Bot <107565488+stainless-bot@users.noreply.github.com> Date: Tue, 16 Apr 2024 12:54:34 -0400 Subject: [PATCH] feat: extract chat models to a named enum (#1322) --- api.md | 6 + src/openai/resources/chat/completions.py | 201 +----------------- src/openai/types/__init__.py | 1 + .../types/chat/completion_create_params.py | 28 +-- src/openai/types/chat_model.py | 27 +++ 5 files changed, 45 insertions(+), 218 deletions(-) create mode 100644 src/openai/types/chat_model.py diff --git a/api.md b/api.md index 38f77592e8..c772fb7c7b 100644 --- a/api.md +++ b/api.md @@ -18,6 +18,12 @@ Methods: # Chat +Types: + +```python +from openai.types import ChatModel +``` + ## Completions Types: diff --git a/src/openai/resources/chat/completions.py b/src/openai/resources/chat/completions.py index 1a23e7876e..3b070b716e 100644 --- a/src/openai/resources/chat/completions.py +++ b/src/openai/resources/chat/completions.py @@ -8,6 +8,7 @@ import httpx from ... import _legacy_response +from ...types import ChatModel from ..._types import NOT_GIVEN, Body, Query, Headers, NotGiven from ..._utils import ( required_args, @@ -47,30 +48,7 @@ def create( self, *, messages: Iterable[ChatCompletionMessageParam], - model: Union[ - str, - Literal[ - "gpt-4-turbo", - "gpt-4-turbo-2024-04-09", - "gpt-4-0125-preview", - "gpt-4-turbo-preview", - "gpt-4-1106-preview", - "gpt-4-vision-preview", - "gpt-4", - "gpt-4-0314", - "gpt-4-0613", - "gpt-4-32k", - "gpt-4-32k-0314", - "gpt-4-32k-0613", - "gpt-3.5-turbo", - "gpt-3.5-turbo-16k", - "gpt-3.5-turbo-0301", - "gpt-3.5-turbo-0613", - "gpt-3.5-turbo-1106", - "gpt-3.5-turbo-0125", - "gpt-3.5-turbo-16k-0613", - ], - ], + model: Union[str, ChatModel], frequency_penalty: Optional[float] | NotGiven = NOT_GIVEN, function_call: completion_create_params.FunctionCall | NotGiven = NOT_GIVEN, functions: Iterable[completion_create_params.Function] | NotGiven = NOT_GIVEN, @@ -238,30 +216,7 @@ def create( self, *, messages: Iterable[ChatCompletionMessageParam], - model: Union[ - str, - Literal[ - "gpt-4-turbo", - "gpt-4-turbo-2024-04-09", - "gpt-4-0125-preview", - "gpt-4-turbo-preview", - "gpt-4-1106-preview", - "gpt-4-vision-preview", - "gpt-4", - "gpt-4-0314", - "gpt-4-0613", - "gpt-4-32k", - "gpt-4-32k-0314", - "gpt-4-32k-0613", - "gpt-3.5-turbo", - "gpt-3.5-turbo-16k", - "gpt-3.5-turbo-0301", - "gpt-3.5-turbo-0613", - "gpt-3.5-turbo-1106", - "gpt-3.5-turbo-0125", - "gpt-3.5-turbo-16k-0613", - ], - ], + model: Union[str, ChatModel], stream: Literal[True], frequency_penalty: Optional[float] | NotGiven = NOT_GIVEN, function_call: completion_create_params.FunctionCall | NotGiven = NOT_GIVEN, @@ -429,30 +384,7 @@ def create( self, *, messages: Iterable[ChatCompletionMessageParam], - model: Union[ - str, - Literal[ - "gpt-4-turbo", - "gpt-4-turbo-2024-04-09", - "gpt-4-0125-preview", - "gpt-4-turbo-preview", - "gpt-4-1106-preview", - "gpt-4-vision-preview", - "gpt-4", - "gpt-4-0314", - "gpt-4-0613", - "gpt-4-32k", - "gpt-4-32k-0314", - "gpt-4-32k-0613", - "gpt-3.5-turbo", - "gpt-3.5-turbo-16k", - "gpt-3.5-turbo-0301", - "gpt-3.5-turbo-0613", - "gpt-3.5-turbo-1106", - "gpt-3.5-turbo-0125", - "gpt-3.5-turbo-16k-0613", - ], - ], + model: Union[str, ChatModel], stream: bool, frequency_penalty: Optional[float] | NotGiven = NOT_GIVEN, function_call: completion_create_params.FunctionCall | NotGiven = NOT_GIVEN, @@ -620,30 +552,7 @@ def create( self, *, messages: Iterable[ChatCompletionMessageParam], - model: Union[ - str, - Literal[ - "gpt-4-turbo", - "gpt-4-turbo-2024-04-09", - "gpt-4-0125-preview", - "gpt-4-turbo-preview", - "gpt-4-1106-preview", - "gpt-4-vision-preview", - "gpt-4", - "gpt-4-0314", - "gpt-4-0613", - "gpt-4-32k", - "gpt-4-32k-0314", - "gpt-4-32k-0613", - "gpt-3.5-turbo", - "gpt-3.5-turbo-16k", - "gpt-3.5-turbo-0301", - "gpt-3.5-turbo-0613", - "gpt-3.5-turbo-1106", - "gpt-3.5-turbo-0125", - "gpt-3.5-turbo-16k-0613", - ], - ], + model: Union[str, ChatModel], frequency_penalty: Optional[float] | NotGiven = NOT_GIVEN, function_call: completion_create_params.FunctionCall | NotGiven = NOT_GIVEN, functions: Iterable[completion_create_params.Function] | NotGiven = NOT_GIVEN, @@ -719,30 +628,7 @@ async def create( self, *, messages: Iterable[ChatCompletionMessageParam], - model: Union[ - str, - Literal[ - "gpt-4-turbo", - "gpt-4-turbo-2024-04-09", - "gpt-4-0125-preview", - "gpt-4-turbo-preview", - "gpt-4-1106-preview", - "gpt-4-vision-preview", - "gpt-4", - "gpt-4-0314", - "gpt-4-0613", - "gpt-4-32k", - "gpt-4-32k-0314", - "gpt-4-32k-0613", - "gpt-3.5-turbo", - "gpt-3.5-turbo-16k", - "gpt-3.5-turbo-0301", - "gpt-3.5-turbo-0613", - "gpt-3.5-turbo-1106", - "gpt-3.5-turbo-0125", - "gpt-3.5-turbo-16k-0613", - ], - ], + model: Union[str, ChatModel], frequency_penalty: Optional[float] | NotGiven = NOT_GIVEN, function_call: completion_create_params.FunctionCall | NotGiven = NOT_GIVEN, functions: Iterable[completion_create_params.Function] | NotGiven = NOT_GIVEN, @@ -910,30 +796,7 @@ async def create( self, *, messages: Iterable[ChatCompletionMessageParam], - model: Union[ - str, - Literal[ - "gpt-4-turbo", - "gpt-4-turbo-2024-04-09", - "gpt-4-0125-preview", - "gpt-4-turbo-preview", - "gpt-4-1106-preview", - "gpt-4-vision-preview", - "gpt-4", - "gpt-4-0314", - "gpt-4-0613", - "gpt-4-32k", - "gpt-4-32k-0314", - "gpt-4-32k-0613", - "gpt-3.5-turbo", - "gpt-3.5-turbo-16k", - "gpt-3.5-turbo-0301", - "gpt-3.5-turbo-0613", - "gpt-3.5-turbo-1106", - "gpt-3.5-turbo-0125", - "gpt-3.5-turbo-16k-0613", - ], - ], + model: Union[str, ChatModel], stream: Literal[True], frequency_penalty: Optional[float] | NotGiven = NOT_GIVEN, function_call: completion_create_params.FunctionCall | NotGiven = NOT_GIVEN, @@ -1101,30 +964,7 @@ async def create( self, *, messages: Iterable[ChatCompletionMessageParam], - model: Union[ - str, - Literal[ - "gpt-4-turbo", - "gpt-4-turbo-2024-04-09", - "gpt-4-0125-preview", - "gpt-4-turbo-preview", - "gpt-4-1106-preview", - "gpt-4-vision-preview", - "gpt-4", - "gpt-4-0314", - "gpt-4-0613", - "gpt-4-32k", - "gpt-4-32k-0314", - "gpt-4-32k-0613", - "gpt-3.5-turbo", - "gpt-3.5-turbo-16k", - "gpt-3.5-turbo-0301", - "gpt-3.5-turbo-0613", - "gpt-3.5-turbo-1106", - "gpt-3.5-turbo-0125", - "gpt-3.5-turbo-16k-0613", - ], - ], + model: Union[str, ChatModel], stream: bool, frequency_penalty: Optional[float] | NotGiven = NOT_GIVEN, function_call: completion_create_params.FunctionCall | NotGiven = NOT_GIVEN, @@ -1292,30 +1132,7 @@ async def create( self, *, messages: Iterable[ChatCompletionMessageParam], - model: Union[ - str, - Literal[ - "gpt-4-turbo", - "gpt-4-turbo-2024-04-09", - "gpt-4-0125-preview", - "gpt-4-turbo-preview", - "gpt-4-1106-preview", - "gpt-4-vision-preview", - "gpt-4", - "gpt-4-0314", - "gpt-4-0613", - "gpt-4-32k", - "gpt-4-32k-0314", - "gpt-4-32k-0613", - "gpt-3.5-turbo", - "gpt-3.5-turbo-16k", - "gpt-3.5-turbo-0301", - "gpt-3.5-turbo-0613", - "gpt-3.5-turbo-1106", - "gpt-3.5-turbo-0125", - "gpt-3.5-turbo-16k-0613", - ], - ], + model: Union[str, ChatModel], frequency_penalty: Optional[float] | NotGiven = NOT_GIVEN, function_call: completion_create_params.FunctionCall | NotGiven = NOT_GIVEN, functions: Iterable[completion_create_params.Function] | NotGiven = NOT_GIVEN, diff --git a/src/openai/types/__init__.py b/src/openai/types/__init__.py index 4bbcdddc2a..b6f35cfecf 100644 --- a/src/openai/types/__init__.py +++ b/src/openai/types/__init__.py @@ -11,6 +11,7 @@ FunctionParameters as FunctionParameters, ) from .embedding import Embedding as Embedding +from .chat_model import ChatModel as ChatModel from .completion import Completion as Completion from .moderation import Moderation as Moderation from .batch_error import BatchError as BatchError diff --git a/src/openai/types/chat/completion_create_params.py b/src/openai/types/chat/completion_create_params.py index 1e0f7f8195..964b246c41 100644 --- a/src/openai/types/chat/completion_create_params.py +++ b/src/openai/types/chat/completion_create_params.py @@ -6,6 +6,7 @@ from typing_extensions import Literal, Required, TypedDict from ...types import shared_params +from ..chat_model import ChatModel from .chat_completion_tool_param import ChatCompletionToolParam from .chat_completion_message_param import ChatCompletionMessageParam from .chat_completion_tool_choice_option_param import ChatCompletionToolChoiceOptionParam @@ -28,32 +29,7 @@ class CompletionCreateParamsBase(TypedDict, total=False): [Example Python code](https://cookbook.openai.com/examples/how_to_format_inputs_to_chatgpt_models). """ - model: Required[ - Union[ - str, - Literal[ - "gpt-4-turbo", - "gpt-4-turbo-2024-04-09", - "gpt-4-0125-preview", - "gpt-4-turbo-preview", - "gpt-4-1106-preview", - "gpt-4-vision-preview", - "gpt-4", - "gpt-4-0314", - "gpt-4-0613", - "gpt-4-32k", - "gpt-4-32k-0314", - "gpt-4-32k-0613", - "gpt-3.5-turbo", - "gpt-3.5-turbo-16k", - "gpt-3.5-turbo-0301", - "gpt-3.5-turbo-0613", - "gpt-3.5-turbo-1106", - "gpt-3.5-turbo-0125", - "gpt-3.5-turbo-16k-0613", - ], - ] - ] + model: Required[Union[str, ChatModel]] """ID of the model to use. See the diff --git a/src/openai/types/chat_model.py b/src/openai/types/chat_model.py new file mode 100644 index 0000000000..219dab5138 --- /dev/null +++ b/src/openai/types/chat_model.py @@ -0,0 +1,27 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from typing_extensions import Literal + +__all__ = ["ChatModel"] + +ChatModel = Literal[ + "gpt-4-turbo", + "gpt-4-turbo-2024-04-09", + "gpt-4-0125-preview", + "gpt-4-turbo-preview", + "gpt-4-1106-preview", + "gpt-4-vision-preview", + "gpt-4", + "gpt-4-0314", + "gpt-4-0613", + "gpt-4-32k", + "gpt-4-32k-0314", + "gpt-4-32k-0613", + "gpt-3.5-turbo", + "gpt-3.5-turbo-16k", + "gpt-3.5-turbo-0301", + "gpt-3.5-turbo-0613", + "gpt-3.5-turbo-1106", + "gpt-3.5-turbo-0125", + "gpt-3.5-turbo-16k-0613", +]