Commit 7da727a4
Changed files (40)
src
openai
_utils
resources
chat
completions
containers
fine_tuning
checkpoints
uploads
vector_stores
types
beta
evals
fine_tuning
graders
responses
vector_stores
src/openai/_utils/_transform.py
@@ -16,6 +16,7 @@ from ._utils import (
lru_cache,
is_mapping,
is_iterable,
+ is_sequence,
)
from .._files import is_base64_file_input
from ._typing import (
@@ -24,6 +25,7 @@ from ._typing import (
extract_type_arg,
is_iterable_type,
is_required_type,
+ is_sequence_type,
is_annotated_type,
strip_annotated_type,
)
@@ -184,6 +186,8 @@ def _transform_recursive(
(is_list_type(stripped_type) and is_list(data))
# Iterable[T]
or (is_iterable_type(stripped_type) and is_iterable(data) and not isinstance(data, str))
+ # Sequence[T]
+ or (is_sequence_type(stripped_type) and is_sequence(data) and not isinstance(data, str))
):
# dicts are technically iterable, but it is an iterable on the keys of the dict and is not usually
# intended as an iterable, so we don't transform it.
@@ -346,6 +350,8 @@ async def _async_transform_recursive(
(is_list_type(stripped_type) and is_list(data))
# Iterable[T]
or (is_iterable_type(stripped_type) and is_iterable(data) and not isinstance(data, str))
+ # Sequence[T]
+ or (is_sequence_type(stripped_type) and is_sequence(data) and not isinstance(data, str))
):
# dicts are technically iterable, but it is an iterable on the keys of the dict and is not usually
# intended as an iterable, so we don't transform it.
src/openai/resources/chat/completions/completions.py
@@ -19,7 +19,7 @@ from .messages import (
MessagesWithStreamingResponse,
AsyncMessagesWithStreamingResponse,
)
-from ...._types import NOT_GIVEN, Body, Query, Headers, NotGiven
+from ...._types import NOT_GIVEN, Body, Query, Headers, NotGiven, SequenceNotStr
from ...._utils import required_args, maybe_transform, async_maybe_transform
from ...._compat import cached_property
from ...._resource import SyncAPIResource, AsyncAPIResource
@@ -260,7 +260,7 @@ class Completions(SyncAPIResource):
safety_identifier: str | NotGiven = NOT_GIVEN,
seed: Optional[int] | NotGiven = NOT_GIVEN,
service_tier: Optional[Literal["auto", "default", "flex", "scale", "priority"]] | NotGiven = NOT_GIVEN,
- stop: Union[Optional[str], List[str], None] | NotGiven = NOT_GIVEN,
+ stop: Union[Optional[str], SequenceNotStr[str], None] | NotGiven = NOT_GIVEN,
store: Optional[bool] | NotGiven = NOT_GIVEN,
stream: Optional[Literal[False]] | NotGiven = NOT_GIVEN,
stream_options: Optional[ChatCompletionStreamOptionsParam] | NotGiven = NOT_GIVEN,
@@ -549,7 +549,7 @@ class Completions(SyncAPIResource):
safety_identifier: str | NotGiven = NOT_GIVEN,
seed: Optional[int] | NotGiven = NOT_GIVEN,
service_tier: Optional[Literal["auto", "default", "flex", "scale", "priority"]] | NotGiven = NOT_GIVEN,
- stop: Union[Optional[str], List[str], None] | NotGiven = NOT_GIVEN,
+ stop: Union[Optional[str], SequenceNotStr[str], None] | NotGiven = NOT_GIVEN,
store: Optional[bool] | NotGiven = NOT_GIVEN,
stream_options: Optional[ChatCompletionStreamOptionsParam] | NotGiven = NOT_GIVEN,
temperature: Optional[float] | NotGiven = NOT_GIVEN,
@@ -837,7 +837,7 @@ class Completions(SyncAPIResource):
safety_identifier: str | NotGiven = NOT_GIVEN,
seed: Optional[int] | NotGiven = NOT_GIVEN,
service_tier: Optional[Literal["auto", "default", "flex", "scale", "priority"]] | NotGiven = NOT_GIVEN,
- stop: Union[Optional[str], List[str], None] | NotGiven = NOT_GIVEN,
+ stop: Union[Optional[str], SequenceNotStr[str], None] | NotGiven = NOT_GIVEN,
store: Optional[bool] | NotGiven = NOT_GIVEN,
stream_options: Optional[ChatCompletionStreamOptionsParam] | NotGiven = NOT_GIVEN,
temperature: Optional[float] | NotGiven = NOT_GIVEN,
@@ -1124,7 +1124,7 @@ class Completions(SyncAPIResource):
safety_identifier: str | NotGiven = NOT_GIVEN,
seed: Optional[int] | NotGiven = NOT_GIVEN,
service_tier: Optional[Literal["auto", "default", "flex", "scale", "priority"]] | NotGiven = NOT_GIVEN,
- stop: Union[Optional[str], List[str], None] | NotGiven = NOT_GIVEN,
+ stop: Union[Optional[str], SequenceNotStr[str], None] | NotGiven = NOT_GIVEN,
store: Optional[bool] | NotGiven = NOT_GIVEN,
stream: Optional[Literal[False]] | Literal[True] | NotGiven = NOT_GIVEN,
stream_options: Optional[ChatCompletionStreamOptionsParam] | NotGiven = NOT_GIVEN,
@@ -1696,7 +1696,7 @@ class AsyncCompletions(AsyncAPIResource):
safety_identifier: str | NotGiven = NOT_GIVEN,
seed: Optional[int] | NotGiven = NOT_GIVEN,
service_tier: Optional[Literal["auto", "default", "flex", "scale", "priority"]] | NotGiven = NOT_GIVEN,
- stop: Union[Optional[str], List[str], None] | NotGiven = NOT_GIVEN,
+ stop: Union[Optional[str], SequenceNotStr[str], None] | NotGiven = NOT_GIVEN,
store: Optional[bool] | NotGiven = NOT_GIVEN,
stream: Optional[Literal[False]] | NotGiven = NOT_GIVEN,
stream_options: Optional[ChatCompletionStreamOptionsParam] | NotGiven = NOT_GIVEN,
@@ -1985,7 +1985,7 @@ class AsyncCompletions(AsyncAPIResource):
safety_identifier: str | NotGiven = NOT_GIVEN,
seed: Optional[int] | NotGiven = NOT_GIVEN,
service_tier: Optional[Literal["auto", "default", "flex", "scale", "priority"]] | NotGiven = NOT_GIVEN,
- stop: Union[Optional[str], List[str], None] | NotGiven = NOT_GIVEN,
+ stop: Union[Optional[str], SequenceNotStr[str], None] | NotGiven = NOT_GIVEN,
store: Optional[bool] | NotGiven = NOT_GIVEN,
stream_options: Optional[ChatCompletionStreamOptionsParam] | NotGiven = NOT_GIVEN,
temperature: Optional[float] | NotGiven = NOT_GIVEN,
@@ -2273,7 +2273,7 @@ class AsyncCompletions(AsyncAPIResource):
safety_identifier: str | NotGiven = NOT_GIVEN,
seed: Optional[int] | NotGiven = NOT_GIVEN,
service_tier: Optional[Literal["auto", "default", "flex", "scale", "priority"]] | NotGiven = NOT_GIVEN,
- stop: Union[Optional[str], List[str], None] | NotGiven = NOT_GIVEN,
+ stop: Union[Optional[str], SequenceNotStr[str], None] | NotGiven = NOT_GIVEN,
store: Optional[bool] | NotGiven = NOT_GIVEN,
stream_options: Optional[ChatCompletionStreamOptionsParam] | NotGiven = NOT_GIVEN,
temperature: Optional[float] | NotGiven = NOT_GIVEN,
@@ -2560,7 +2560,7 @@ class AsyncCompletions(AsyncAPIResource):
safety_identifier: str | NotGiven = NOT_GIVEN,
seed: Optional[int] | NotGiven = NOT_GIVEN,
service_tier: Optional[Literal["auto", "default", "flex", "scale", "priority"]] | NotGiven = NOT_GIVEN,
- stop: Union[Optional[str], List[str], None] | NotGiven = NOT_GIVEN,
+ stop: Union[Optional[str], SequenceNotStr[str], None] | NotGiven = NOT_GIVEN,
store: Optional[bool] | NotGiven = NOT_GIVEN,
stream: Optional[Literal[False]] | Literal[True] | NotGiven = NOT_GIVEN,
stream_options: Optional[ChatCompletionStreamOptionsParam] | NotGiven = NOT_GIVEN,
src/openai/resources/containers/containers.py
@@ -2,14 +2,13 @@
from __future__ import annotations
-from typing import List
from typing_extensions import Literal
import httpx
from ... import _legacy_response
from ...types import container_list_params, container_create_params
-from ..._types import NOT_GIVEN, Body, Query, Headers, NoneType, NotGiven
+from ..._types import NOT_GIVEN, Body, Query, Headers, NoneType, NotGiven, SequenceNotStr
from ..._utils import maybe_transform, async_maybe_transform
from ..._compat import cached_property
from ..._resource import SyncAPIResource, AsyncAPIResource
@@ -60,7 +59,7 @@ class Containers(SyncAPIResource):
*,
name: str,
expires_after: container_create_params.ExpiresAfter | NotGiven = NOT_GIVEN,
- file_ids: List[str] | NotGiven = NOT_GIVEN,
+ file_ids: SequenceNotStr[str] | NotGiven = NOT_GIVEN,
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
# The extra values given here take precedence over values defined on the client or passed to this method.
extra_headers: Headers | None = None,
@@ -256,7 +255,7 @@ class AsyncContainers(AsyncAPIResource):
*,
name: str,
expires_after: container_create_params.ExpiresAfter | NotGiven = NOT_GIVEN,
- file_ids: List[str] | NotGiven = NOT_GIVEN,
+ file_ids: SequenceNotStr[str] | NotGiven = NOT_GIVEN,
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
# The extra values given here take precedence over values defined on the client or passed to this method.
extra_headers: Headers | None = None,
src/openai/resources/fine_tuning/checkpoints/permissions.py
@@ -2,13 +2,12 @@
from __future__ import annotations
-from typing import List
from typing_extensions import Literal
import httpx
from .... import _legacy_response
-from ...._types import NOT_GIVEN, Body, Query, Headers, NotGiven
+from ...._types import NOT_GIVEN, Body, Query, Headers, NotGiven, SequenceNotStr
from ...._utils import maybe_transform, async_maybe_transform
from ...._compat import cached_property
from ...._resource import SyncAPIResource, AsyncAPIResource
@@ -47,7 +46,7 @@ class Permissions(SyncAPIResource):
self,
fine_tuned_model_checkpoint: str,
*,
- project_ids: List[str],
+ project_ids: SequenceNotStr[str],
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
# The extra values given here take precedence over values defined on the client or passed to this method.
extra_headers: Headers | None = None,
@@ -215,7 +214,7 @@ class AsyncPermissions(AsyncAPIResource):
self,
fine_tuned_model_checkpoint: str,
*,
- project_ids: List[str],
+ project_ids: SequenceNotStr[str],
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
# The extra values given here take precedence over values defined on the client or passed to this method.
extra_headers: Headers | None = None,
src/openai/resources/uploads/uploads.py
@@ -8,7 +8,6 @@ import logging
import builtins
from typing import List, overload
from pathlib import Path
-
import anyio
import httpx
@@ -22,7 +21,7 @@ from .parts import (
AsyncPartsWithStreamingResponse,
)
from ...types import FilePurpose, upload_create_params, upload_complete_params
-from ..._types import NOT_GIVEN, Body, Query, Headers, NotGiven
+from ..._types import NOT_GIVEN, Body, Query, Headers, NotGiven, SequenceNotStr
from ..._utils import maybe_transform, async_maybe_transform
from ..._compat import cached_property
from ..._resource import SyncAPIResource, AsyncAPIResource
@@ -281,7 +280,7 @@ class Uploads(SyncAPIResource):
self,
upload_id: str,
*,
- part_ids: List[str],
+ part_ids: SequenceNotStr[str],
md5: str | NotGiven = NOT_GIVEN,
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
# The extra values given here take precedence over values defined on the client or passed to this method.
@@ -589,7 +588,7 @@ class AsyncUploads(AsyncAPIResource):
self,
upload_id: str,
*,
- part_ids: List[str],
+ part_ids: SequenceNotStr[str],
md5: str | NotGiven = NOT_GIVEN,
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
# The extra values given here take precedence over values defined on the client or passed to this method.
src/openai/resources/vector_stores/file_batches.py
@@ -3,7 +3,7 @@
from __future__ import annotations
import asyncio
-from typing import Dict, List, Iterable, Optional
+from typing import Dict, Iterable, Optional
from typing_extensions import Union, Literal
from concurrent.futures import Future, ThreadPoolExecutor, as_completed
@@ -12,7 +12,7 @@ import sniffio
from ... import _legacy_response
from ...types import FileChunkingStrategyParam
-from ..._types import NOT_GIVEN, Body, Query, Headers, NotGiven, FileTypes
+from ..._types import NOT_GIVEN, Body, Query, Headers, NotGiven, FileTypes, SequenceNotStr
from ..._utils import is_given, maybe_transform, async_maybe_transform
from ..._compat import cached_property
from ..._resource import SyncAPIResource, AsyncAPIResource
@@ -52,7 +52,7 @@ class FileBatches(SyncAPIResource):
self,
vector_store_id: str,
*,
- file_ids: List[str],
+ file_ids: SequenceNotStr[str],
attributes: Optional[Dict[str, Union[str, float, bool]]] | NotGiven = NOT_GIVEN,
chunking_strategy: FileChunkingStrategyParam | NotGiven = NOT_GIVEN,
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
@@ -389,7 +389,7 @@ class AsyncFileBatches(AsyncAPIResource):
self,
vector_store_id: str,
*,
- file_ids: List[str],
+ file_ids: SequenceNotStr[str],
attributes: Optional[Dict[str, Union[str, float, bool]]] | NotGiven = NOT_GIVEN,
chunking_strategy: FileChunkingStrategyParam | NotGiven = NOT_GIVEN,
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
src/openai/resources/vector_stores/vector_stores.py
@@ -2,7 +2,7 @@
from __future__ import annotations
-from typing import List, Union, Optional
+from typing import Union, Optional
from typing_extensions import Literal
import httpx
@@ -23,7 +23,7 @@ from ...types import (
vector_store_search_params,
vector_store_update_params,
)
-from ..._types import NOT_GIVEN, Body, Query, Headers, NotGiven
+from ..._types import NOT_GIVEN, Body, Query, Headers, NotGiven, SequenceNotStr
from ..._utils import maybe_transform, async_maybe_transform
from ..._compat import cached_property
from ..._resource import SyncAPIResource, AsyncAPIResource
@@ -80,7 +80,7 @@ class VectorStores(SyncAPIResource):
*,
chunking_strategy: FileChunkingStrategyParam | NotGiven = NOT_GIVEN,
expires_after: vector_store_create_params.ExpiresAfter | NotGiven = NOT_GIVEN,
- file_ids: List[str] | NotGiven = NOT_GIVEN,
+ file_ids: SequenceNotStr[str] | NotGiven = NOT_GIVEN,
metadata: Optional[Metadata] | NotGiven = NOT_GIVEN,
name: str | NotGiven = NOT_GIVEN,
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
@@ -332,7 +332,7 @@ class VectorStores(SyncAPIResource):
self,
vector_store_id: str,
*,
- query: Union[str, List[str]],
+ query: Union[str, SequenceNotStr[str]],
filters: vector_store_search_params.Filters | NotGiven = NOT_GIVEN,
max_num_results: int | NotGiven = NOT_GIVEN,
ranking_options: vector_store_search_params.RankingOptions | NotGiven = NOT_GIVEN,
@@ -425,7 +425,7 @@ class AsyncVectorStores(AsyncAPIResource):
*,
chunking_strategy: FileChunkingStrategyParam | NotGiven = NOT_GIVEN,
expires_after: vector_store_create_params.ExpiresAfter | NotGiven = NOT_GIVEN,
- file_ids: List[str] | NotGiven = NOT_GIVEN,
+ file_ids: SequenceNotStr[str] | NotGiven = NOT_GIVEN,
metadata: Optional[Metadata] | NotGiven = NOT_GIVEN,
name: str | NotGiven = NOT_GIVEN,
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
@@ -677,7 +677,7 @@ class AsyncVectorStores(AsyncAPIResource):
self,
vector_store_id: str,
*,
- query: Union[str, List[str]],
+ query: Union[str, SequenceNotStr[str]],
filters: vector_store_search_params.Filters | NotGiven = NOT_GIVEN,
max_num_results: int | NotGiven = NOT_GIVEN,
ranking_options: vector_store_search_params.RankingOptions | NotGiven = NOT_GIVEN,
src/openai/resources/completions.py
@@ -2,14 +2,14 @@
from __future__ import annotations
-from typing import Dict, List, Union, Iterable, Optional
+from typing import Dict, Union, Iterable, Optional
from typing_extensions import Literal, overload
import httpx
from .. import _legacy_response
from ..types import completion_create_params
-from .._types import NOT_GIVEN, Body, Query, Headers, NotGiven
+from .._types import NOT_GIVEN, Body, Query, Headers, NotGiven, SequenceNotStr
from .._utils import required_args, maybe_transform, async_maybe_transform
from .._compat import cached_property
from .._resource import SyncAPIResource, AsyncAPIResource
@@ -49,7 +49,7 @@ class Completions(SyncAPIResource):
self,
*,
model: Union[str, Literal["gpt-3.5-turbo-instruct", "davinci-002", "babbage-002"]],
- prompt: Union[str, List[str], Iterable[int], Iterable[Iterable[int]], None],
+ prompt: Union[str, SequenceNotStr[str], Iterable[int], Iterable[Iterable[int]], None],
best_of: Optional[int] | NotGiven = NOT_GIVEN,
echo: Optional[bool] | NotGiven = NOT_GIVEN,
frequency_penalty: Optional[float] | NotGiven = NOT_GIVEN,
@@ -59,7 +59,7 @@ class Completions(SyncAPIResource):
n: Optional[int] | NotGiven = NOT_GIVEN,
presence_penalty: Optional[float] | NotGiven = NOT_GIVEN,
seed: Optional[int] | NotGiven = NOT_GIVEN,
- stop: Union[Optional[str], List[str], None] | NotGiven = NOT_GIVEN,
+ stop: Union[Optional[str], SequenceNotStr[str], None] | NotGiven = NOT_GIVEN,
stream: Optional[Literal[False]] | NotGiven = NOT_GIVEN,
stream_options: Optional[ChatCompletionStreamOptionsParam] | NotGiven = NOT_GIVEN,
suffix: Optional[str] | NotGiven = NOT_GIVEN,
@@ -204,7 +204,7 @@ class Completions(SyncAPIResource):
self,
*,
model: Union[str, Literal["gpt-3.5-turbo-instruct", "davinci-002", "babbage-002"]],
- prompt: Union[str, List[str], Iterable[int], Iterable[Iterable[int]], None],
+ prompt: Union[str, SequenceNotStr[str], Iterable[int], Iterable[Iterable[int]], None],
stream: Literal[True],
best_of: Optional[int] | NotGiven = NOT_GIVEN,
echo: Optional[bool] | NotGiven = NOT_GIVEN,
@@ -215,7 +215,7 @@ class Completions(SyncAPIResource):
n: Optional[int] | NotGiven = NOT_GIVEN,
presence_penalty: Optional[float] | NotGiven = NOT_GIVEN,
seed: Optional[int] | NotGiven = NOT_GIVEN,
- stop: Union[Optional[str], List[str], None] | NotGiven = NOT_GIVEN,
+ stop: Union[Optional[str], SequenceNotStr[str], None] | NotGiven = NOT_GIVEN,
stream_options: Optional[ChatCompletionStreamOptionsParam] | NotGiven = NOT_GIVEN,
suffix: Optional[str] | NotGiven = NOT_GIVEN,
temperature: Optional[float] | NotGiven = NOT_GIVEN,
@@ -359,7 +359,7 @@ class Completions(SyncAPIResource):
self,
*,
model: Union[str, Literal["gpt-3.5-turbo-instruct", "davinci-002", "babbage-002"]],
- prompt: Union[str, List[str], Iterable[int], Iterable[Iterable[int]], None],
+ prompt: Union[str, SequenceNotStr[str], Iterable[int], Iterable[Iterable[int]], None],
stream: bool,
best_of: Optional[int] | NotGiven = NOT_GIVEN,
echo: Optional[bool] | NotGiven = NOT_GIVEN,
@@ -370,7 +370,7 @@ class Completions(SyncAPIResource):
n: Optional[int] | NotGiven = NOT_GIVEN,
presence_penalty: Optional[float] | NotGiven = NOT_GIVEN,
seed: Optional[int] | NotGiven = NOT_GIVEN,
- stop: Union[Optional[str], List[str], None] | NotGiven = NOT_GIVEN,
+ stop: Union[Optional[str], SequenceNotStr[str], None] | NotGiven = NOT_GIVEN,
stream_options: Optional[ChatCompletionStreamOptionsParam] | NotGiven = NOT_GIVEN,
suffix: Optional[str] | NotGiven = NOT_GIVEN,
temperature: Optional[float] | NotGiven = NOT_GIVEN,
@@ -514,7 +514,7 @@ class Completions(SyncAPIResource):
self,
*,
model: Union[str, Literal["gpt-3.5-turbo-instruct", "davinci-002", "babbage-002"]],
- prompt: Union[str, List[str], Iterable[int], Iterable[Iterable[int]], None],
+ prompt: Union[str, SequenceNotStr[str], Iterable[int], Iterable[Iterable[int]], None],
best_of: Optional[int] | NotGiven = NOT_GIVEN,
echo: Optional[bool] | NotGiven = NOT_GIVEN,
frequency_penalty: Optional[float] | NotGiven = NOT_GIVEN,
@@ -524,7 +524,7 @@ class Completions(SyncAPIResource):
n: Optional[int] | NotGiven = NOT_GIVEN,
presence_penalty: Optional[float] | NotGiven = NOT_GIVEN,
seed: Optional[int] | NotGiven = NOT_GIVEN,
- stop: Union[Optional[str], List[str], None] | NotGiven = NOT_GIVEN,
+ stop: Union[Optional[str], SequenceNotStr[str], None] | NotGiven = NOT_GIVEN,
stream: Optional[Literal[False]] | Literal[True] | NotGiven = NOT_GIVEN,
stream_options: Optional[ChatCompletionStreamOptionsParam] | NotGiven = NOT_GIVEN,
suffix: Optional[str] | NotGiven = NOT_GIVEN,
@@ -599,7 +599,7 @@ class AsyncCompletions(AsyncAPIResource):
self,
*,
model: Union[str, Literal["gpt-3.5-turbo-instruct", "davinci-002", "babbage-002"]],
- prompt: Union[str, List[str], Iterable[int], Iterable[Iterable[int]], None],
+ prompt: Union[str, SequenceNotStr[str], Iterable[int], Iterable[Iterable[int]], None],
best_of: Optional[int] | NotGiven = NOT_GIVEN,
echo: Optional[bool] | NotGiven = NOT_GIVEN,
frequency_penalty: Optional[float] | NotGiven = NOT_GIVEN,
@@ -609,7 +609,7 @@ class AsyncCompletions(AsyncAPIResource):
n: Optional[int] | NotGiven = NOT_GIVEN,
presence_penalty: Optional[float] | NotGiven = NOT_GIVEN,
seed: Optional[int] | NotGiven = NOT_GIVEN,
- stop: Union[Optional[str], List[str], None] | NotGiven = NOT_GIVEN,
+ stop: Union[Optional[str], SequenceNotStr[str], None] | NotGiven = NOT_GIVEN,
stream: Optional[Literal[False]] | NotGiven = NOT_GIVEN,
stream_options: Optional[ChatCompletionStreamOptionsParam] | NotGiven = NOT_GIVEN,
suffix: Optional[str] | NotGiven = NOT_GIVEN,
@@ -754,7 +754,7 @@ class AsyncCompletions(AsyncAPIResource):
self,
*,
model: Union[str, Literal["gpt-3.5-turbo-instruct", "davinci-002", "babbage-002"]],
- prompt: Union[str, List[str], Iterable[int], Iterable[Iterable[int]], None],
+ prompt: Union[str, SequenceNotStr[str], Iterable[int], Iterable[Iterable[int]], None],
stream: Literal[True],
best_of: Optional[int] | NotGiven = NOT_GIVEN,
echo: Optional[bool] | NotGiven = NOT_GIVEN,
@@ -765,7 +765,7 @@ class AsyncCompletions(AsyncAPIResource):
n: Optional[int] | NotGiven = NOT_GIVEN,
presence_penalty: Optional[float] | NotGiven = NOT_GIVEN,
seed: Optional[int] | NotGiven = NOT_GIVEN,
- stop: Union[Optional[str], List[str], None] | NotGiven = NOT_GIVEN,
+ stop: Union[Optional[str], SequenceNotStr[str], None] | NotGiven = NOT_GIVEN,
stream_options: Optional[ChatCompletionStreamOptionsParam] | NotGiven = NOT_GIVEN,
suffix: Optional[str] | NotGiven = NOT_GIVEN,
temperature: Optional[float] | NotGiven = NOT_GIVEN,
@@ -909,7 +909,7 @@ class AsyncCompletions(AsyncAPIResource):
self,
*,
model: Union[str, Literal["gpt-3.5-turbo-instruct", "davinci-002", "babbage-002"]],
- prompt: Union[str, List[str], Iterable[int], Iterable[Iterable[int]], None],
+ prompt: Union[str, SequenceNotStr[str], Iterable[int], Iterable[Iterable[int]], None],
stream: bool,
best_of: Optional[int] | NotGiven = NOT_GIVEN,
echo: Optional[bool] | NotGiven = NOT_GIVEN,
@@ -920,7 +920,7 @@ class AsyncCompletions(AsyncAPIResource):
n: Optional[int] | NotGiven = NOT_GIVEN,
presence_penalty: Optional[float] | NotGiven = NOT_GIVEN,
seed: Optional[int] | NotGiven = NOT_GIVEN,
- stop: Union[Optional[str], List[str], None] | NotGiven = NOT_GIVEN,
+ stop: Union[Optional[str], SequenceNotStr[str], None] | NotGiven = NOT_GIVEN,
stream_options: Optional[ChatCompletionStreamOptionsParam] | NotGiven = NOT_GIVEN,
suffix: Optional[str] | NotGiven = NOT_GIVEN,
temperature: Optional[float] | NotGiven = NOT_GIVEN,
@@ -1064,7 +1064,7 @@ class AsyncCompletions(AsyncAPIResource):
self,
*,
model: Union[str, Literal["gpt-3.5-turbo-instruct", "davinci-002", "babbage-002"]],
- prompt: Union[str, List[str], Iterable[int], Iterable[Iterable[int]], None],
+ prompt: Union[str, SequenceNotStr[str], Iterable[int], Iterable[Iterable[int]], None],
best_of: Optional[int] | NotGiven = NOT_GIVEN,
echo: Optional[bool] | NotGiven = NOT_GIVEN,
frequency_penalty: Optional[float] | NotGiven = NOT_GIVEN,
@@ -1074,7 +1074,7 @@ class AsyncCompletions(AsyncAPIResource):
n: Optional[int] | NotGiven = NOT_GIVEN,
presence_penalty: Optional[float] | NotGiven = NOT_GIVEN,
seed: Optional[int] | NotGiven = NOT_GIVEN,
- stop: Union[Optional[str], List[str], None] | NotGiven = NOT_GIVEN,
+ stop: Union[Optional[str], SequenceNotStr[str], None] | NotGiven = NOT_GIVEN,
stream: Optional[Literal[False]] | Literal[True] | NotGiven = NOT_GIVEN,
stream_options: Optional[ChatCompletionStreamOptionsParam] | NotGiven = NOT_GIVEN,
suffix: Optional[str] | NotGiven = NOT_GIVEN,
src/openai/resources/embeddings.py
@@ -4,14 +4,14 @@ from __future__ import annotations
import array
import base64
-from typing import List, Union, Iterable, cast
+from typing import Union, Iterable, cast
from typing_extensions import Literal
import httpx
from .. import _legacy_response
from ..types import embedding_create_params
-from .._types import NOT_GIVEN, Body, Query, Headers, NotGiven
+from .._types import NOT_GIVEN, Body, Query, Headers, NotGiven, SequenceNotStr
from .._utils import is_given, maybe_transform
from .._compat import cached_property
from .._extras import numpy as np, has_numpy
@@ -47,7 +47,7 @@ class Embeddings(SyncAPIResource):
def create(
self,
*,
- input: Union[str, List[str], Iterable[int], Iterable[Iterable[int]]],
+ input: Union[str, SequenceNotStr[str], Iterable[int], Iterable[Iterable[int]]],
model: Union[str, EmbeddingModel],
dimensions: int | NotGiven = NOT_GIVEN,
encoding_format: Literal["float", "base64"] | NotGiven = NOT_GIVEN,
@@ -166,7 +166,7 @@ class AsyncEmbeddings(AsyncAPIResource):
async def create(
self,
*,
- input: Union[str, List[str], Iterable[int], Iterable[Iterable[int]]],
+ input: Union[str, SequenceNotStr[str], Iterable[int], Iterable[Iterable[int]]],
model: Union[str, EmbeddingModel],
dimensions: int | NotGiven = NOT_GIVEN,
encoding_format: Literal["float", "base64"] | NotGiven = NOT_GIVEN,
src/openai/resources/images.py
@@ -2,14 +2,14 @@
from __future__ import annotations
-from typing import List, Union, Mapping, Optional, cast
+from typing import Union, Mapping, Optional, cast
from typing_extensions import Literal, overload
import httpx
from .. import _legacy_response
from ..types import image_edit_params, image_generate_params, image_create_variation_params
-from .._types import NOT_GIVEN, Body, Query, Headers, NotGiven, FileTypes
+from .._types import NOT_GIVEN, Body, Query, Headers, NotGiven, FileTypes, SequenceNotStr
from .._utils import extract_files, required_args, maybe_transform, deepcopy_minimal, async_maybe_transform
from .._compat import cached_property
from .._resource import SyncAPIResource, AsyncAPIResource
@@ -121,7 +121,7 @@ class Images(SyncAPIResource):
def edit(
self,
*,
- image: Union[FileTypes, List[FileTypes]],
+ image: Union[FileTypes, SequenceNotStr[FileTypes]],
prompt: str,
background: Optional[Literal["transparent", "opaque", "auto"]] | NotGiven = NOT_GIVEN,
input_fidelity: Optional[Literal["high", "low"]] | NotGiven = NOT_GIVEN,
@@ -234,7 +234,7 @@ class Images(SyncAPIResource):
def edit(
self,
*,
- image: Union[FileTypes, List[FileTypes]],
+ image: Union[FileTypes, SequenceNotStr[FileTypes]],
prompt: str,
stream: Literal[True],
background: Optional[Literal["transparent", "opaque", "auto"]] | NotGiven = NOT_GIVEN,
@@ -347,7 +347,7 @@ class Images(SyncAPIResource):
def edit(
self,
*,
- image: Union[FileTypes, List[FileTypes]],
+ image: Union[FileTypes, SequenceNotStr[FileTypes]],
prompt: str,
stream: bool,
background: Optional[Literal["transparent", "opaque", "auto"]] | NotGiven = NOT_GIVEN,
@@ -460,7 +460,7 @@ class Images(SyncAPIResource):
def edit(
self,
*,
- image: Union[FileTypes, List[FileTypes]],
+ image: Union[FileTypes, SequenceNotStr[FileTypes]],
prompt: str,
background: Optional[Literal["transparent", "opaque", "auto"]] | NotGiven = NOT_GIVEN,
input_fidelity: Optional[Literal["high", "low"]] | NotGiven = NOT_GIVEN,
@@ -1009,7 +1009,7 @@ class AsyncImages(AsyncAPIResource):
async def edit(
self,
*,
- image: Union[FileTypes, List[FileTypes]],
+ image: Union[FileTypes, SequenceNotStr[FileTypes]],
prompt: str,
background: Optional[Literal["transparent", "opaque", "auto"]] | NotGiven = NOT_GIVEN,
input_fidelity: Optional[Literal["high", "low"]] | NotGiven = NOT_GIVEN,
@@ -1122,7 +1122,7 @@ class AsyncImages(AsyncAPIResource):
async def edit(
self,
*,
- image: Union[FileTypes, List[FileTypes]],
+ image: Union[FileTypes, SequenceNotStr[FileTypes]],
prompt: str,
stream: Literal[True],
background: Optional[Literal["transparent", "opaque", "auto"]] | NotGiven = NOT_GIVEN,
@@ -1235,7 +1235,7 @@ class AsyncImages(AsyncAPIResource):
async def edit(
self,
*,
- image: Union[FileTypes, List[FileTypes]],
+ image: Union[FileTypes, SequenceNotStr[FileTypes]],
prompt: str,
stream: bool,
background: Optional[Literal["transparent", "opaque", "auto"]] | NotGiven = NOT_GIVEN,
@@ -1348,7 +1348,7 @@ class AsyncImages(AsyncAPIResource):
async def edit(
self,
*,
- image: Union[FileTypes, List[FileTypes]],
+ image: Union[FileTypes, SequenceNotStr[FileTypes]],
prompt: str,
background: Optional[Literal["transparent", "opaque", "auto"]] | NotGiven = NOT_GIVEN,
input_fidelity: Optional[Literal["high", "low"]] | NotGiven = NOT_GIVEN,
src/openai/resources/moderations.py
@@ -2,13 +2,13 @@
from __future__ import annotations
-from typing import List, Union, Iterable
+from typing import Union, Iterable
import httpx
from .. import _legacy_response
from ..types import moderation_create_params
-from .._types import NOT_GIVEN, Body, Query, Headers, NotGiven
+from .._types import NOT_GIVEN, Body, Query, Headers, NotGiven, SequenceNotStr
from .._utils import maybe_transform, async_maybe_transform
from .._compat import cached_property
from .._resource import SyncAPIResource, AsyncAPIResource
@@ -44,7 +44,7 @@ class Moderations(SyncAPIResource):
def create(
self,
*,
- input: Union[str, List[str], Iterable[ModerationMultiModalInputParam]],
+ input: Union[str, SequenceNotStr[str], Iterable[ModerationMultiModalInputParam]],
model: Union[str, ModerationModel] | NotGiven = NOT_GIVEN,
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
# The extra values given here take precedence over values defined on the client or passed to this method.
@@ -114,7 +114,7 @@ class AsyncModerations(AsyncAPIResource):
async def create(
self,
*,
- input: Union[str, List[str], Iterable[ModerationMultiModalInputParam]],
+ input: Union[str, SequenceNotStr[str], Iterable[ModerationMultiModalInputParam]],
model: Union[str, ModerationModel] | NotGiven = NOT_GIVEN,
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
# The extra values given here take precedence over values defined on the client or passed to this method.
src/openai/types/beta/assistant_create_params.py
@@ -2,9 +2,10 @@
from __future__ import annotations
-from typing import List, Union, Iterable, Optional
+from typing import Union, Iterable, Optional
from typing_extensions import Literal, Required, TypeAlias, TypedDict
+from ..._types import SequenceNotStr
from ..shared.chat_model import ChatModel
from .assistant_tool_param import AssistantToolParam
from ..shared_params.metadata import Metadata
@@ -123,7 +124,7 @@ class AssistantCreateParams(TypedDict, total=False):
class ToolResourcesCodeInterpreter(TypedDict, total=False):
- file_ids: List[str]
+ file_ids: SequenceNotStr[str]
"""
A list of [file](https://platform.openai.com/docs/api-reference/files) IDs made
available to the `code_interpreter` tool. There can be a maximum of 20 files
@@ -170,7 +171,7 @@ class ToolResourcesFileSearchVectorStore(TypedDict, total=False):
If not set, will use the `auto` strategy.
"""
- file_ids: List[str]
+ file_ids: SequenceNotStr[str]
"""
A list of [file](https://platform.openai.com/docs/api-reference/files) IDs to
add to the vector store. There can be a maximum of 10000 files in a vector
@@ -189,7 +190,7 @@ class ToolResourcesFileSearchVectorStore(TypedDict, total=False):
class ToolResourcesFileSearch(TypedDict, total=False):
- vector_store_ids: List[str]
+ vector_store_ids: SequenceNotStr[str]
"""
The
[vector store](https://platform.openai.com/docs/api-reference/vector-stores/object)
src/openai/types/beta/assistant_update_params.py
@@ -2,9 +2,10 @@
from __future__ import annotations
-from typing import List, Union, Iterable, Optional
+from typing import Union, Iterable, Optional
from typing_extensions import Literal, TypedDict
+from ..._types import SequenceNotStr
from .assistant_tool_param import AssistantToolParam
from ..shared_params.metadata import Metadata
from ..shared.reasoning_effort import ReasoningEffort
@@ -158,7 +159,7 @@ class AssistantUpdateParams(TypedDict, total=False):
class ToolResourcesCodeInterpreter(TypedDict, total=False):
- file_ids: List[str]
+ file_ids: SequenceNotStr[str]
"""
Overrides the list of
[file](https://platform.openai.com/docs/api-reference/files) IDs made available
@@ -168,7 +169,7 @@ class ToolResourcesCodeInterpreter(TypedDict, total=False):
class ToolResourcesFileSearch(TypedDict, total=False):
- vector_store_ids: List[str]
+ vector_store_ids: SequenceNotStr[str]
"""
Overrides the
[vector store](https://platform.openai.com/docs/api-reference/vector-stores/object)
src/openai/types/beta/thread_create_and_run_params.py
@@ -2,9 +2,10 @@
from __future__ import annotations
-from typing import List, Union, Iterable, Optional
+from typing import Union, Iterable, Optional
from typing_extensions import Literal, Required, TypeAlias, TypedDict
+from ..._types import SequenceNotStr
from ..shared.chat_model import ChatModel
from .assistant_tool_param import AssistantToolParam
from ..shared_params.metadata import Metadata
@@ -217,7 +218,7 @@ class ThreadMessage(TypedDict, total=False):
class ThreadToolResourcesCodeInterpreter(TypedDict, total=False):
- file_ids: List[str]
+ file_ids: SequenceNotStr[str]
"""
A list of [file](https://platform.openai.com/docs/api-reference/files) IDs made
available to the `code_interpreter` tool. There can be a maximum of 20 files
@@ -265,7 +266,7 @@ class ThreadToolResourcesFileSearchVectorStore(TypedDict, total=False):
If not set, will use the `auto` strategy.
"""
- file_ids: List[str]
+ file_ids: SequenceNotStr[str]
"""
A list of [file](https://platform.openai.com/docs/api-reference/files) IDs to
add to the vector store. There can be a maximum of 10000 files in a vector
@@ -284,7 +285,7 @@ class ThreadToolResourcesFileSearchVectorStore(TypedDict, total=False):
class ThreadToolResourcesFileSearch(TypedDict, total=False):
- vector_store_ids: List[str]
+ vector_store_ids: SequenceNotStr[str]
"""
The
[vector store](https://platform.openai.com/docs/api-reference/vector-stores/object)
@@ -334,7 +335,7 @@ class Thread(TypedDict, total=False):
class ToolResourcesCodeInterpreter(TypedDict, total=False):
- file_ids: List[str]
+ file_ids: SequenceNotStr[str]
"""
A list of [file](https://platform.openai.com/docs/api-reference/files) IDs made
available to the `code_interpreter` tool. There can be a maximum of 20 files
@@ -343,7 +344,7 @@ class ToolResourcesCodeInterpreter(TypedDict, total=False):
class ToolResourcesFileSearch(TypedDict, total=False):
- vector_store_ids: List[str]
+ vector_store_ids: SequenceNotStr[str]
"""
The ID of the
[vector store](https://platform.openai.com/docs/api-reference/vector-stores/object)
src/openai/types/beta/thread_create_params.py
@@ -2,9 +2,10 @@
from __future__ import annotations
-from typing import List, Union, Iterable, Optional
+from typing import Union, Iterable, Optional
from typing_extensions import Literal, Required, TypeAlias, TypedDict
+from ..._types import SequenceNotStr
from ..shared_params.metadata import Metadata
from .code_interpreter_tool_param import CodeInterpreterToolParam
from .threads.message_content_part_param import MessageContentPartParam
@@ -96,7 +97,7 @@ class Message(TypedDict, total=False):
class ToolResourcesCodeInterpreter(TypedDict, total=False):
- file_ids: List[str]
+ file_ids: SequenceNotStr[str]
"""
A list of [file](https://platform.openai.com/docs/api-reference/files) IDs made
available to the `code_interpreter` tool. There can be a maximum of 20 files
@@ -143,7 +144,7 @@ class ToolResourcesFileSearchVectorStore(TypedDict, total=False):
If not set, will use the `auto` strategy.
"""
- file_ids: List[str]
+ file_ids: SequenceNotStr[str]
"""
A list of [file](https://platform.openai.com/docs/api-reference/files) IDs to
add to the vector store. There can be a maximum of 10000 files in a vector
@@ -162,7 +163,7 @@ class ToolResourcesFileSearchVectorStore(TypedDict, total=False):
class ToolResourcesFileSearch(TypedDict, total=False):
- vector_store_ids: List[str]
+ vector_store_ids: SequenceNotStr[str]
"""
The
[vector store](https://platform.openai.com/docs/api-reference/vector-stores/object)
src/openai/types/beta/thread_update_params.py
@@ -2,9 +2,10 @@
from __future__ import annotations
-from typing import List, Optional
+from typing import Optional
from typing_extensions import TypedDict
+from ..._types import SequenceNotStr
from ..shared_params.metadata import Metadata
__all__ = ["ThreadUpdateParams", "ToolResources", "ToolResourcesCodeInterpreter", "ToolResourcesFileSearch"]
@@ -31,7 +32,7 @@ class ThreadUpdateParams(TypedDict, total=False):
class ToolResourcesCodeInterpreter(TypedDict, total=False):
- file_ids: List[str]
+ file_ids: SequenceNotStr[str]
"""
A list of [file](https://platform.openai.com/docs/api-reference/files) IDs made
available to the `code_interpreter` tool. There can be a maximum of 20 files
@@ -40,7 +41,7 @@ class ToolResourcesCodeInterpreter(TypedDict, total=False):
class ToolResourcesFileSearch(TypedDict, total=False):
- vector_store_ids: List[str]
+ vector_store_ids: SequenceNotStr[str]
"""
The
[vector store](https://platform.openai.com/docs/api-reference/vector-stores/object)
src/openai/types/chat/completion_create_params.py
@@ -5,6 +5,7 @@ from __future__ import annotations
from typing import Dict, List, Union, Iterable, Optional
from typing_extensions import Literal, Required, TypeAlias, TypedDict
+from ..._types import SequenceNotStr
from ..shared.chat_model import ChatModel
from ..shared_params.metadata import Metadata
from ..shared.reasoning_effort import ReasoningEffort
@@ -243,7 +244,7 @@ class CompletionCreateParamsBase(TypedDict, total=False):
parameter.
"""
- stop: Union[Optional[str], List[str], None]
+ stop: Union[Optional[str], SequenceNotStr[str], None]
"""Not supported with latest reasoning models `o3` and `o4-mini`.
Up to 4 sequences where the API will stop generating further tokens. The
src/openai/types/evals/run_create_params.py
@@ -2,9 +2,10 @@
from __future__ import annotations
-from typing import Dict, List, Union, Iterable, Optional
+from typing import Dict, Union, Iterable, Optional
from typing_extensions import Literal, Required, TypeAlias, TypedDict
+from ..._types import SequenceNotStr
from ..responses.tool_param import ToolParam
from ..shared_params.metadata import Metadata
from ..shared.reasoning_effort import ReasoningEffort
@@ -119,13 +120,13 @@ class DataSourceCreateEvalResponsesRunDataSourceSourceResponses(TypedDict, total
temperature: Optional[float]
"""Sampling temperature. This is a query parameter used to select responses."""
- tools: Optional[List[str]]
+ tools: Optional[SequenceNotStr[str]]
"""List of tool names. This is a query parameter used to select responses."""
top_p: Optional[float]
"""Nucleus sampling parameter. This is a query parameter used to select responses."""
- users: Optional[List[str]]
+ users: Optional[SequenceNotStr[str]]
"""List of user identifiers. This is a query parameter used to select responses."""
src/openai/types/fine_tuning/checkpoints/permission_create_params.py
@@ -2,12 +2,13 @@
from __future__ import annotations
-from typing import List
from typing_extensions import Required, TypedDict
+from ...._types import SequenceNotStr
+
__all__ = ["PermissionCreateParams"]
class PermissionCreateParams(TypedDict, total=False):
- project_ids: Required[List[str]]
+ project_ids: Required[SequenceNotStr[str]]
"""The project identifiers to grant access to."""
src/openai/types/fine_tuning/job_create_params.py
@@ -2,9 +2,10 @@
from __future__ import annotations
-from typing import List, Union, Iterable, Optional
+from typing import Union, Iterable, Optional
from typing_extensions import Literal, Required, TypedDict
+from ..._types import SequenceNotStr
from .dpo_method_param import DpoMethodParam
from ..shared_params.metadata import Metadata
from .supervised_method_param import SupervisedMethodParam
@@ -137,7 +138,7 @@ class IntegrationWandb(TypedDict, total=False):
If not set, we will use the Job ID as the name.
"""
- tags: List[str]
+ tags: SequenceNotStr[str]
"""A list of tags to be attached to the newly created run.
These tags are passed through directly to WandB. Some default tags are generated
src/openai/types/graders/label_model_grader_param.py
@@ -2,9 +2,10 @@
from __future__ import annotations
-from typing import List, Union, Iterable
+from typing import Union, Iterable
from typing_extensions import Literal, Required, TypeAlias, TypedDict
+from ..._types import SequenceNotStr
from ..responses.response_input_text_param import ResponseInputTextParam
__all__ = ["LabelModelGraderParam", "Input", "InputContent", "InputContentOutputText", "InputContentInputImage"]
@@ -54,7 +55,7 @@ class Input(TypedDict, total=False):
class LabelModelGraderParam(TypedDict, total=False):
input: Required[Iterable[Input]]
- labels: Required[List[str]]
+ labels: Required[SequenceNotStr[str]]
"""The labels to assign to each item in the evaluation."""
model: Required[str]
@@ -63,7 +64,7 @@ class LabelModelGraderParam(TypedDict, total=False):
name: Required[str]
"""The name of the grader."""
- passing_labels: Required[List[str]]
+ passing_labels: Required[SequenceNotStr[str]]
"""The labels that indicate a passing result. Must be a subset of labels."""
type: Required[Literal["label_model"]]
src/openai/types/realtime/realtime_tools_config_param.py
@@ -5,6 +5,8 @@ from __future__ import annotations
from typing import Dict, List, Union, Optional
from typing_extensions import Literal, Required, TypeAlias, TypedDict
+from ..._types import SequenceNotStr
+
__all__ = [
"RealtimeToolsConfigParam",
"RealtimeToolsConfigUnionParam",
@@ -45,11 +47,11 @@ class McpAllowedToolsMcpToolFilter(TypedDict, total=False):
it will match this filter.
"""
- tool_names: List[str]
+ tool_names: SequenceNotStr[str]
"""List of allowed tool names."""
-McpAllowedTools: TypeAlias = Union[List[str], McpAllowedToolsMcpToolFilter]
+McpAllowedTools: TypeAlias = Union[SequenceNotStr[str], McpAllowedToolsMcpToolFilter]
class McpRequireApprovalMcpToolApprovalFilterAlways(TypedDict, total=False):
@@ -61,7 +63,7 @@ class McpRequireApprovalMcpToolApprovalFilterAlways(TypedDict, total=False):
it will match this filter.
"""
- tool_names: List[str]
+ tool_names: SequenceNotStr[str]
"""List of allowed tool names."""
@@ -74,7 +76,7 @@ class McpRequireApprovalMcpToolApprovalFilterNever(TypedDict, total=False):
it will match this filter.
"""
- tool_names: List[str]
+ tool_names: SequenceNotStr[str]
"""List of allowed tool names."""
src/openai/types/realtime/realtime_tools_config_union_param.py
@@ -2,9 +2,11 @@
from __future__ import annotations
-from typing import Dict, List, Union, Optional
+from typing import Dict, Union, Optional
from typing_extensions import Literal, Required, TypeAlias, TypedDict
+from ..._types import SequenceNotStr
+
__all__ = [
"RealtimeToolsConfigUnionParam",
"Function",
@@ -44,11 +46,11 @@ class McpAllowedToolsMcpToolFilter(TypedDict, total=False):
it will match this filter.
"""
- tool_names: List[str]
+ tool_names: SequenceNotStr[str]
"""List of allowed tool names."""
-McpAllowedTools: TypeAlias = Union[List[str], McpAllowedToolsMcpToolFilter]
+McpAllowedTools: TypeAlias = Union[SequenceNotStr[str], McpAllowedToolsMcpToolFilter]
class McpRequireApprovalMcpToolApprovalFilterAlways(TypedDict, total=False):
@@ -60,7 +62,7 @@ class McpRequireApprovalMcpToolApprovalFilterAlways(TypedDict, total=False):
it will match this filter.
"""
- tool_names: List[str]
+ tool_names: SequenceNotStr[str]
"""List of allowed tool names."""
@@ -73,7 +75,7 @@ class McpRequireApprovalMcpToolApprovalFilterNever(TypedDict, total=False):
it will match this filter.
"""
- tool_names: List[str]
+ tool_names: SequenceNotStr[str]
"""List of allowed tool names."""
src/openai/types/responses/file_search_tool_param.py
@@ -2,9 +2,10 @@
from __future__ import annotations
-from typing import List, Union, Optional
+from typing import Union, Optional
from typing_extensions import Literal, Required, TypeAlias, TypedDict
+from ..._types import SequenceNotStr
from ..shared_params.compound_filter import CompoundFilter
from ..shared_params.comparison_filter import ComparisonFilter
@@ -29,7 +30,7 @@ class FileSearchToolParam(TypedDict, total=False):
type: Required[Literal["file_search"]]
"""The type of the file search tool. Always `file_search`."""
- vector_store_ids: Required[List[str]]
+ vector_store_ids: Required[SequenceNotStr[str]]
"""The IDs of the vector stores to search."""
filters: Optional[Filters]
src/openai/types/responses/response_computer_tool_call_param.py
@@ -2,9 +2,11 @@
from __future__ import annotations
-from typing import List, Union, Iterable
+from typing import Union, Iterable
from typing_extensions import Literal, Required, TypeAlias, TypedDict
+from ..._types import SequenceNotStr
+
__all__ = [
"ResponseComputerToolCallParam",
"Action",
@@ -86,7 +88,7 @@ class ActionDrag(TypedDict, total=False):
class ActionKeypress(TypedDict, total=False):
- keys: Required[List[str]]
+ keys: Required[SequenceNotStr[str]]
"""The combination of keys the model is requesting to be pressed.
This is an array of strings, each representing a key.
src/openai/types/responses/response_file_search_tool_call_param.py
@@ -2,9 +2,11 @@
from __future__ import annotations
-from typing import Dict, List, Union, Iterable, Optional
+from typing import Dict, Union, Iterable, Optional
from typing_extensions import Literal, Required, TypedDict
+from ..._types import SequenceNotStr
+
__all__ = ["ResponseFileSearchToolCallParam", "Result"]
@@ -35,7 +37,7 @@ class ResponseFileSearchToolCallParam(TypedDict, total=False):
id: Required[str]
"""The unique ID of the file search tool call."""
- queries: Required[List[str]]
+ queries: Required[SequenceNotStr[str]]
"""The queries used to search for files."""
status: Required[Literal["in_progress", "searching", "completed", "incomplete", "failed"]]
src/openai/types/responses/response_input_item_param.py
@@ -2,9 +2,10 @@
from __future__ import annotations
-from typing import Dict, List, Union, Iterable, Optional
+from typing import Dict, Union, Iterable, Optional
from typing_extensions import Literal, Required, TypeAlias, TypedDict
+from ..._types import SequenceNotStr
from .easy_input_message_param import EasyInputMessageParam
from .response_output_message_param import ResponseOutputMessageParam
from .response_reasoning_item_param import ResponseReasoningItemParam
@@ -135,7 +136,7 @@ class ImageGenerationCall(TypedDict, total=False):
class LocalShellCallAction(TypedDict, total=False):
- command: Required[List[str]]
+ command: Required[SequenceNotStr[str]]
"""The command to run."""
env: Required[Dict[str, str]]
src/openai/types/responses/response_input_param.py
@@ -5,6 +5,7 @@ from __future__ import annotations
from typing import Dict, List, Union, Iterable, Optional
from typing_extensions import Literal, Required, TypeAlias, TypedDict
+from ..._types import SequenceNotStr
from .easy_input_message_param import EasyInputMessageParam
from .response_output_message_param import ResponseOutputMessageParam
from .response_reasoning_item_param import ResponseReasoningItemParam
@@ -136,7 +137,7 @@ class ImageGenerationCall(TypedDict, total=False):
class LocalShellCallAction(TypedDict, total=False):
- command: Required[List[str]]
+ command: Required[SequenceNotStr[str]]
"""The command to run."""
env: Required[Dict[str, str]]
src/openai/types/responses/tool_param.py
@@ -2,10 +2,11 @@
from __future__ import annotations
-from typing import Dict, List, Union, Optional
+from typing import Dict, Union, Optional
from typing_extensions import Literal, Required, TypeAlias, TypedDict
from ..chat import ChatCompletionFunctionToolParam
+from ..._types import SequenceNotStr
from .custom_tool_param import CustomToolParam
from .computer_tool_param import ComputerToolParam
from .function_tool_param import FunctionToolParam
@@ -40,11 +41,11 @@ class McpAllowedToolsMcpToolFilter(TypedDict, total=False):
it will match this filter.
"""
- tool_names: List[str]
+ tool_names: SequenceNotStr[str]
"""List of allowed tool names."""
-McpAllowedTools: TypeAlias = Union[List[str], McpAllowedToolsMcpToolFilter]
+McpAllowedTools: TypeAlias = Union[SequenceNotStr[str], McpAllowedToolsMcpToolFilter]
class McpRequireApprovalMcpToolApprovalFilterAlways(TypedDict, total=False):
@@ -56,7 +57,7 @@ class McpRequireApprovalMcpToolApprovalFilterAlways(TypedDict, total=False):
it will match this filter.
"""
- tool_names: List[str]
+ tool_names: SequenceNotStr[str]
"""List of allowed tool names."""
@@ -69,7 +70,7 @@ class McpRequireApprovalMcpToolApprovalFilterNever(TypedDict, total=False):
it will match this filter.
"""
- tool_names: List[str]
+ tool_names: SequenceNotStr[str]
"""List of allowed tool names."""
@@ -152,7 +153,7 @@ class CodeInterpreterContainerCodeInterpreterToolAuto(TypedDict, total=False):
type: Required[Literal["auto"]]
"""Always `auto`."""
- file_ids: List[str]
+ file_ids: SequenceNotStr[str]
"""An optional list of uploaded files to make available to your code."""
src/openai/types/responses/web_search_tool_param.py
@@ -2,14 +2,16 @@
from __future__ import annotations
-from typing import List, Optional
+from typing import Optional
from typing_extensions import Literal, Required, TypedDict
+from ..._types import SequenceNotStr
+
__all__ = ["WebSearchToolParam", "Filters", "UserLocation"]
class Filters(TypedDict, total=False):
- allowed_domains: Optional[List[str]]
+ allowed_domains: Optional[SequenceNotStr[str]]
"""Allowed domains for the search.
If not provided, all domains are allowed. Subdomains of the provided domains are
src/openai/types/vector_stores/file_batch_create_params.py
@@ -2,16 +2,17 @@
from __future__ import annotations
-from typing import Dict, List, Union, Optional
+from typing import Dict, Union, Optional
from typing_extensions import Required, TypedDict
+from ..._types import SequenceNotStr
from ..file_chunking_strategy_param import FileChunkingStrategyParam
__all__ = ["FileBatchCreateParams"]
class FileBatchCreateParams(TypedDict, total=False):
- file_ids: Required[List[str]]
+ file_ids: Required[SequenceNotStr[str]]
"""
A list of [File](https://platform.openai.com/docs/api-reference/files) IDs that
the vector store should use. Useful for tools like `file_search` that can access
src/openai/types/completion_create_params.py
@@ -2,9 +2,10 @@
from __future__ import annotations
-from typing import Dict, List, Union, Iterable, Optional
+from typing import Dict, Union, Iterable, Optional
from typing_extensions import Literal, Required, TypedDict
+from .._types import SequenceNotStr
from .chat.chat_completion_stream_options_param import ChatCompletionStreamOptionsParam
__all__ = ["CompletionCreateParamsBase", "CompletionCreateParamsNonStreaming", "CompletionCreateParamsStreaming"]
@@ -21,7 +22,7 @@ class CompletionCreateParamsBase(TypedDict, total=False):
them.
"""
- prompt: Required[Union[str, List[str], Iterable[int], Iterable[Iterable[int]], None]]
+ prompt: Required[Union[str, SequenceNotStr[str], Iterable[int], Iterable[Iterable[int]], None]]
"""
The prompt(s) to generate completions for, encoded as a string, array of
strings, array of tokens, or array of token arrays.
@@ -119,7 +120,7 @@ class CompletionCreateParamsBase(TypedDict, total=False):
response parameter to monitor changes in the backend.
"""
- stop: Union[Optional[str], List[str], None]
+ stop: Union[Optional[str], SequenceNotStr[str], None]
"""Not supported with latest reasoning models `o3` and `o4-mini`.
Up to 4 sequences where the API will stop generating further tokens. The
src/openai/types/container_create_params.py
@@ -2,9 +2,10 @@
from __future__ import annotations
-from typing import List
from typing_extensions import Literal, Required, TypedDict
+from .._types import SequenceNotStr
+
__all__ = ["ContainerCreateParams", "ExpiresAfter"]
@@ -15,7 +16,7 @@ class ContainerCreateParams(TypedDict, total=False):
expires_after: ExpiresAfter
"""Container expiration time in seconds relative to the 'anchor' time."""
- file_ids: List[str]
+ file_ids: SequenceNotStr[str]
"""IDs of files to copy to the container."""
src/openai/types/embedding_create_params.py
@@ -2,16 +2,17 @@
from __future__ import annotations
-from typing import List, Union, Iterable
+from typing import Union, Iterable
from typing_extensions import Literal, Required, TypedDict
+from .._types import SequenceNotStr
from .embedding_model import EmbeddingModel
__all__ = ["EmbeddingCreateParams"]
class EmbeddingCreateParams(TypedDict, total=False):
- input: Required[Union[str, List[str], Iterable[int], Iterable[Iterable[int]]]]
+ input: Required[Union[str, SequenceNotStr[str], Iterable[int], Iterable[Iterable[int]]]]
"""Input text to embed, encoded as a string or array of tokens.
To embed multiple inputs in a single request, pass an array of strings or array
src/openai/types/eval_create_params.py
@@ -2,9 +2,10 @@
from __future__ import annotations
-from typing import Dict, List, Union, Iterable, Optional
+from typing import Dict, Union, Iterable, Optional
from typing_extensions import Literal, Required, TypeAlias, TypedDict
+from .._types import SequenceNotStr
from .shared_params.metadata import Metadata
from .graders.python_grader_param import PythonGraderParam
from .graders.score_model_grader_param import ScoreModelGraderParam
@@ -159,7 +160,7 @@ class TestingCriterionLabelModel(TypedDict, total=False):
May include variable references to the `item` namespace, ie {{item.name}}.
"""
- labels: Required[List[str]]
+ labels: Required[SequenceNotStr[str]]
"""The labels to classify to each item in the evaluation."""
model: Required[str]
@@ -168,7 +169,7 @@ class TestingCriterionLabelModel(TypedDict, total=False):
name: Required[str]
"""The name of the grader."""
- passing_labels: Required[List[str]]
+ passing_labels: Required[SequenceNotStr[str]]
"""The labels that indicate a passing result. Must be a subset of labels."""
type: Required[Literal["label_model"]]
src/openai/types/image_edit_params.py
@@ -2,17 +2,17 @@
from __future__ import annotations
-from typing import List, Union, Optional
+from typing import Union, Optional
from typing_extensions import Literal, Required, TypedDict
-from .._types import FileTypes
+from .._types import FileTypes, SequenceNotStr
from .image_model import ImageModel
__all__ = ["ImageEditParamsBase", "ImageEditParamsNonStreaming", "ImageEditParamsStreaming"]
class ImageEditParamsBase(TypedDict, total=False):
- image: Required[Union[FileTypes, List[FileTypes]]]
+ image: Required[Union[FileTypes, SequenceNotStr[FileTypes]]]
"""The image(s) to edit. Must be a supported image file or an array of images.
For `gpt-image-1`, each image should be a `png`, `webp`, or `jpg` file less than
src/openai/types/moderation_create_params.py
@@ -2,9 +2,10 @@
from __future__ import annotations
-from typing import List, Union, Iterable
+from typing import Union, Iterable
from typing_extensions import Required, TypedDict
+from .._types import SequenceNotStr
from .moderation_model import ModerationModel
from .moderation_multi_modal_input_param import ModerationMultiModalInputParam
@@ -12,7 +13,7 @@ __all__ = ["ModerationCreateParams"]
class ModerationCreateParams(TypedDict, total=False):
- input: Required[Union[str, List[str], Iterable[ModerationMultiModalInputParam]]]
+ input: Required[Union[str, SequenceNotStr[str], Iterable[ModerationMultiModalInputParam]]]
"""Input (or inputs) to classify.
Can be a single string, an array of strings, or an array of multi-modal input
src/openai/types/upload_complete_params.py
@@ -2,14 +2,15 @@
from __future__ import annotations
-from typing import List
from typing_extensions import Required, TypedDict
+from .._types import SequenceNotStr
+
__all__ = ["UploadCompleteParams"]
class UploadCompleteParams(TypedDict, total=False):
- part_ids: Required[List[str]]
+ part_ids: Required[SequenceNotStr[str]]
"""The ordered list of Part IDs."""
md5: str
src/openai/types/vector_store_create_params.py
@@ -2,9 +2,10 @@
from __future__ import annotations
-from typing import List, Optional
+from typing import Optional
from typing_extensions import Literal, Required, TypedDict
+from .._types import SequenceNotStr
from .shared_params.metadata import Metadata
from .file_chunking_strategy_param import FileChunkingStrategyParam
@@ -22,7 +23,7 @@ class VectorStoreCreateParams(TypedDict, total=False):
expires_after: ExpiresAfter
"""The expiration policy for a vector store."""
- file_ids: List[str]
+ file_ids: SequenceNotStr[str]
"""
A list of [File](https://platform.openai.com/docs/api-reference/files) IDs that
the vector store should use. Useful for tools like `file_search` that can access
src/openai/types/vector_store_search_params.py
@@ -2,9 +2,10 @@
from __future__ import annotations
-from typing import List, Union
+from typing import Union
from typing_extensions import Literal, Required, TypeAlias, TypedDict
+from .._types import SequenceNotStr
from .shared_params.compound_filter import CompoundFilter
from .shared_params.comparison_filter import ComparisonFilter
@@ -12,7 +13,7 @@ __all__ = ["VectorStoreSearchParams", "Filters", "RankingOptions"]
class VectorStoreSearchParams(TypedDict, total=False):
- query: Required[Union[str, List[str]]]
+ query: Required[Union[str, SequenceNotStr[str]]]
"""A query string for a search"""
filters: Filters