Commit d50550bd
Changed files (2)
src
openai
resources
beta
threads
runs
tests
src/openai/resources/beta/threads/runs/runs.py
@@ -950,6 +950,7 @@ class Runs(SyncAPIResource):
max_prompt_tokens: Optional[int] | NotGiven = NOT_GIVEN,
metadata: Optional[object] | NotGiven = NOT_GIVEN,
model: Union[str, ChatModel, None] | NotGiven = NOT_GIVEN,
+ parallel_tool_calls: bool | NotGiven = NOT_GIVEN,
response_format: Optional[AssistantResponseFormatOptionParam] | NotGiven = NOT_GIVEN,
temperature: Optional[float] | NotGiven = NOT_GIVEN,
tool_choice: Optional[AssistantToolChoiceOptionParam] | NotGiven = NOT_GIVEN,
@@ -979,6 +980,7 @@ class Runs(SyncAPIResource):
max_prompt_tokens: Optional[int] | NotGiven = NOT_GIVEN,
metadata: Optional[object] | NotGiven = NOT_GIVEN,
model: Union[str, ChatModel, None] | NotGiven = NOT_GIVEN,
+ parallel_tool_calls: bool | NotGiven = NOT_GIVEN,
response_format: Optional[AssistantResponseFormatOptionParam] | NotGiven = NOT_GIVEN,
temperature: Optional[float] | NotGiven = NOT_GIVEN,
tool_choice: Optional[AssistantToolChoiceOptionParam] | NotGiven = NOT_GIVEN,
@@ -1008,6 +1010,7 @@ class Runs(SyncAPIResource):
max_prompt_tokens: Optional[int] | NotGiven = NOT_GIVEN,
metadata: Optional[object] | NotGiven = NOT_GIVEN,
model: Union[str, ChatModel, None] | NotGiven = NOT_GIVEN,
+ parallel_tool_calls: bool | NotGiven = NOT_GIVEN,
response_format: Optional[AssistantResponseFormatOptionParam] | NotGiven = NOT_GIVEN,
temperature: Optional[float] | NotGiven = NOT_GIVEN,
tool_choice: Optional[AssistantToolChoiceOptionParam] | NotGiven = NOT_GIVEN,
@@ -1051,6 +1054,7 @@ class Runs(SyncAPIResource):
"tool_choice": tool_choice,
"stream": True,
"tools": tools,
+ "parallel_tool_calls": parallel_tool_calls,
"truncation_strategy": truncation_strategy,
"top_p": top_p,
},
@@ -2246,6 +2250,7 @@ class AsyncRuns(AsyncAPIResource):
max_prompt_tokens: Optional[int] | NotGiven = NOT_GIVEN,
metadata: Optional[object] | NotGiven = NOT_GIVEN,
model: Union[str, ChatModel, None] | NotGiven = NOT_GIVEN,
+ parallel_tool_calls: bool | NotGiven = NOT_GIVEN,
response_format: Optional[AssistantResponseFormatOptionParam] | NotGiven = NOT_GIVEN,
temperature: Optional[float] | NotGiven = NOT_GIVEN,
tool_choice: Optional[AssistantToolChoiceOptionParam] | NotGiven = NOT_GIVEN,
@@ -2275,6 +2280,7 @@ class AsyncRuns(AsyncAPIResource):
max_prompt_tokens: Optional[int] | NotGiven = NOT_GIVEN,
metadata: Optional[object] | NotGiven = NOT_GIVEN,
model: Union[str, ChatModel, None] | NotGiven = NOT_GIVEN,
+ parallel_tool_calls: bool | NotGiven = NOT_GIVEN,
response_format: Optional[AssistantResponseFormatOptionParam] | NotGiven = NOT_GIVEN,
temperature: Optional[float] | NotGiven = NOT_GIVEN,
tool_choice: Optional[AssistantToolChoiceOptionParam] | NotGiven = NOT_GIVEN,
@@ -2304,6 +2310,7 @@ class AsyncRuns(AsyncAPIResource):
max_prompt_tokens: Optional[int] | NotGiven = NOT_GIVEN,
metadata: Optional[object] | NotGiven = NOT_GIVEN,
model: Union[str, ChatModel, None] | NotGiven = NOT_GIVEN,
+ parallel_tool_calls: bool | NotGiven = NOT_GIVEN,
response_format: Optional[AssistantResponseFormatOptionParam] | NotGiven = NOT_GIVEN,
temperature: Optional[float] | NotGiven = NOT_GIVEN,
tool_choice: Optional[AssistantToolChoiceOptionParam] | NotGiven = NOT_GIVEN,
@@ -2349,6 +2356,7 @@ class AsyncRuns(AsyncAPIResource):
"tool_choice": tool_choice,
"stream": True,
"tools": tools,
+ "parallel_tool_calls": parallel_tool_calls,
"truncation_strategy": truncation_strategy,
"top_p": top_p,
},
tests/lib/test_assistants.py
@@ -28,6 +28,17 @@ def test_create_and_run_stream_method_definition_in_sync(sync: bool, client: Ope
)
+@pytest.mark.parametrize("sync", [True, False], ids=["sync", "async"])
+def test_run_stream_method_definition_in_sync(sync: bool, client: OpenAI, async_client: AsyncOpenAI) -> None:
+ checking_client: OpenAI | AsyncOpenAI = client if sync else async_client
+
+ assert_signatures_in_sync(
+ checking_client.beta.threads.runs.create,
+ checking_client.beta.threads.runs.stream,
+ exclude_params={"stream"},
+ )
+
+
@pytest.mark.parametrize("sync", [True, False], ids=["sync", "async"])
def test_create_and_poll_method_definition_in_sync(sync: bool, client: OpenAI, async_client: AsyncOpenAI) -> None:
checking_client: OpenAI | AsyncOpenAI = client if sync else async_client