Commit a7f4ba7e
Changed files (5)
src
openai
resources
chat
types
src/openai/resources/chat/completions.py
@@ -149,6 +149,11 @@ class Completions(SyncAPIResource):
[GPT-4 Turbo](https://platform.openai.com/docs/models/gpt-4-and-gpt-4-turbo) and
all GPT-3.5 Turbo models newer than `gpt-3.5-turbo-1106`.
+ Setting to `{ "type": "json_schema", "json_schema": {...} }` enables Structured
+ Outputs which guarantees the model will match your supplied JSON schema. Learn
+ more in the
+ [Structured Outputs guide](https://platform.openai.com/docs/guides/structured-outputs).
+
Setting to `{ "type": "json_object" }` enables JSON mode, which guarantees the
message the model generates is valid JSON.
@@ -347,6 +352,11 @@ class Completions(SyncAPIResource):
[GPT-4 Turbo](https://platform.openai.com/docs/models/gpt-4-and-gpt-4-turbo) and
all GPT-3.5 Turbo models newer than `gpt-3.5-turbo-1106`.
+ Setting to `{ "type": "json_schema", "json_schema": {...} }` enables Structured
+ Outputs which guarantees the model will match your supplied JSON schema. Learn
+ more in the
+ [Structured Outputs guide](https://platform.openai.com/docs/guides/structured-outputs).
+
Setting to `{ "type": "json_object" }` enables JSON mode, which guarantees the
message the model generates is valid JSON.
@@ -538,6 +548,11 @@ class Completions(SyncAPIResource):
[GPT-4 Turbo](https://platform.openai.com/docs/models/gpt-4-and-gpt-4-turbo) and
all GPT-3.5 Turbo models newer than `gpt-3.5-turbo-1106`.
+ Setting to `{ "type": "json_schema", "json_schema": {...} }` enables Structured
+ Outputs which guarantees the model will match your supplied JSON schema. Learn
+ more in the
+ [Structured Outputs guide](https://platform.openai.com/docs/guides/structured-outputs).
+
Setting to `{ "type": "json_object" }` enables JSON mode, which guarantees the
message the model generates is valid JSON.
@@ -805,6 +820,11 @@ class AsyncCompletions(AsyncAPIResource):
[GPT-4 Turbo](https://platform.openai.com/docs/models/gpt-4-and-gpt-4-turbo) and
all GPT-3.5 Turbo models newer than `gpt-3.5-turbo-1106`.
+ Setting to `{ "type": "json_schema", "json_schema": {...} }` enables Structured
+ Outputs which guarantees the model will match your supplied JSON schema. Learn
+ more in the
+ [Structured Outputs guide](https://platform.openai.com/docs/guides/structured-outputs).
+
Setting to `{ "type": "json_object" }` enables JSON mode, which guarantees the
message the model generates is valid JSON.
@@ -1003,6 +1023,11 @@ class AsyncCompletions(AsyncAPIResource):
[GPT-4 Turbo](https://platform.openai.com/docs/models/gpt-4-and-gpt-4-turbo) and
all GPT-3.5 Turbo models newer than `gpt-3.5-turbo-1106`.
+ Setting to `{ "type": "json_schema", "json_schema": {...} }` enables Structured
+ Outputs which guarantees the model will match your supplied JSON schema. Learn
+ more in the
+ [Structured Outputs guide](https://platform.openai.com/docs/guides/structured-outputs).
+
Setting to `{ "type": "json_object" }` enables JSON mode, which guarantees the
message the model generates is valid JSON.
@@ -1194,6 +1219,11 @@ class AsyncCompletions(AsyncAPIResource):
[GPT-4 Turbo](https://platform.openai.com/docs/models/gpt-4-and-gpt-4-turbo) and
all GPT-3.5 Turbo models newer than `gpt-3.5-turbo-1106`.
+ Setting to `{ "type": "json_schema", "json_schema": {...} }` enables Structured
+ Outputs which guarantees the model will match your supplied JSON schema. Learn
+ more in the
+ [Structured Outputs guide](https://platform.openai.com/docs/guides/structured-outputs).
+
Setting to `{ "type": "json_object" }` enables JSON mode, which guarantees the
message the model generates is valid JSON.
src/openai/types/chat/completion_create_params.py
@@ -126,6 +126,11 @@ class CompletionCreateParamsBase(TypedDict, total=False):
[GPT-4 Turbo](https://platform.openai.com/docs/models/gpt-4-and-gpt-4-turbo) and
all GPT-3.5 Turbo models newer than `gpt-3.5-turbo-1106`.
+ Setting to `{ "type": "json_schema", "json_schema": {...} }` enables Structured
+ Outputs which guarantees the model will match your supplied JSON schema. Learn
+ more in the
+ [Structured Outputs guide](https://platform.openai.com/docs/guides/structured-outputs).
+
Setting to `{ "type": "json_object" }` enables JSON mode, which guarantees the
message the model generates is valid JSON.
src/openai/types/chat_model.py
@@ -6,8 +6,8 @@ __all__ = ["ChatModel"]
ChatModel: TypeAlias = Literal[
"gpt-4o",
- "gpt-4o-2024-08-06",
"gpt-4o-2024-05-13",
+ "gpt-4o-2024-08-06",
"gpt-4o-mini",
"gpt-4o-mini-2024-07-18",
"gpt-4-turbo",
.stats.yml
@@ -1,2 +1,2 @@
configured_endpoints: 68
-openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/openai-4097c2f86beb3f3bb021775cd1dfa240e960caf842aeefc2e08da4dc0851ea79.yml
+openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/openai-97797a9363b9960b5f2fbdc84426a2b91e75533ecd409fe99e37c231180a4339.yml
pyproject.toml
@@ -202,7 +202,6 @@ unfixable = [
"T201",
"T203",
]
-ignore-init-module-imports = true
[tool.ruff.lint.flake8-tidy-imports.banned-api]
"functools.lru_cache".msg = "This function does not retain type information for the wrapped function's arguments; The `lru_cache` function from `_utils` should be used instead"
@@ -214,7 +213,7 @@ combine-as-imports = true
extra-standard-library = ["typing_extensions"]
known-first-party = ["openai", "tests"]
-[tool.ruff.per-file-ignores]
+[tool.ruff.lint.per-file-ignores]
"bin/**.py" = ["T201", "T203"]
"scripts/**.py" = ["T201", "T203"]
"tests/**.py" = ["T201", "T203"]