Commit a6a94e08
Changed files (1)
src
openai
resources
beta
chat
src/openai/resources/beta/chat/completions.py
@@ -15,7 +15,10 @@ from ...._compat import cached_property
from ...._resource import SyncAPIResource, AsyncAPIResource
from ...._response import to_streamed_response_wrapper, async_to_streamed_response_wrapper
from ...._streaming import Stream
-from ....types.chat import completion_create_params
+from ....types.chat import (
+ ChatCompletionReasoningEffort,
+ completion_create_params,
+)
from ...._base_client import make_request_options
from ....lib._parsing import (
ResponseFormatT,
@@ -79,6 +82,7 @@ class Completions(SyncAPIResource):
parallel_tool_calls: bool | NotGiven = NOT_GIVEN,
prediction: Optional[ChatCompletionPredictionContentParam] | NotGiven = NOT_GIVEN,
presence_penalty: Optional[float] | NotGiven = NOT_GIVEN,
+ reasoning_effort: ChatCompletionReasoningEffort | NotGiven = NOT_GIVEN,
seed: Optional[int] | NotGiven = NOT_GIVEN,
service_tier: Optional[Literal["auto", "default"]] | NotGiven = NOT_GIVEN,
stop: Union[Optional[str], List[str]] | NotGiven = NOT_GIVEN,
@@ -173,6 +177,7 @@ class Completions(SyncAPIResource):
"parallel_tool_calls": parallel_tool_calls,
"prediction": prediction,
"presence_penalty": presence_penalty,
+ "reasoning_effort": reasoning_effort,
"response_format": _type_to_response_format(response_format),
"seed": seed,
"service_tier": service_tier,
@@ -222,6 +227,7 @@ class Completions(SyncAPIResource):
parallel_tool_calls: bool | NotGiven = NOT_GIVEN,
prediction: Optional[ChatCompletionPredictionContentParam] | NotGiven = NOT_GIVEN,
presence_penalty: Optional[float] | NotGiven = NOT_GIVEN,
+ reasoning_effort: ChatCompletionReasoningEffort | NotGiven = NOT_GIVEN,
seed: Optional[int] | NotGiven = NOT_GIVEN,
service_tier: Optional[Literal["auto", "default"]] | NotGiven = NOT_GIVEN,
stop: Union[Optional[str], List[str]] | NotGiven = NOT_GIVEN,
@@ -287,6 +293,7 @@ class Completions(SyncAPIResource):
parallel_tool_calls=parallel_tool_calls,
prediction=prediction,
presence_penalty=presence_penalty,
+ reasoning_effort=reasoning_effort,
seed=seed,
service_tier=service_tier,
store=store,
@@ -350,6 +357,7 @@ class AsyncCompletions(AsyncAPIResource):
parallel_tool_calls: bool | NotGiven = NOT_GIVEN,
prediction: Optional[ChatCompletionPredictionContentParam] | NotGiven = NOT_GIVEN,
presence_penalty: Optional[float] | NotGiven = NOT_GIVEN,
+ reasoning_effort: ChatCompletionReasoningEffort | NotGiven = NOT_GIVEN,
seed: Optional[int] | NotGiven = NOT_GIVEN,
service_tier: Optional[Literal["auto", "default"]] | NotGiven = NOT_GIVEN,
stop: Union[Optional[str], List[str]] | NotGiven = NOT_GIVEN,
@@ -444,6 +452,7 @@ class AsyncCompletions(AsyncAPIResource):
"parallel_tool_calls": parallel_tool_calls,
"prediction": prediction,
"presence_penalty": presence_penalty,
+ "reasoning_effort": reasoning_effort,
"response_format": _type_to_response_format(response_format),
"seed": seed,
"service_tier": service_tier,
@@ -493,6 +502,7 @@ class AsyncCompletions(AsyncAPIResource):
parallel_tool_calls: bool | NotGiven = NOT_GIVEN,
prediction: Optional[ChatCompletionPredictionContentParam] | NotGiven = NOT_GIVEN,
presence_penalty: Optional[float] | NotGiven = NOT_GIVEN,
+ reasoning_effort: ChatCompletionReasoningEffort | NotGiven = NOT_GIVEN,
seed: Optional[int] | NotGiven = NOT_GIVEN,
service_tier: Optional[Literal["auto", "default"]] | NotGiven = NOT_GIVEN,
stop: Union[Optional[str], List[str]] | NotGiven = NOT_GIVEN,
@@ -559,6 +569,7 @@ class AsyncCompletions(AsyncAPIResource):
parallel_tool_calls=parallel_tool_calls,
prediction=prediction,
presence_penalty=presence_penalty,
+ reasoning_effort=reasoning_effort,
seed=seed,
service_tier=service_tier,
stop=stop,