main
1# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
2
3from __future__ import annotations
4
5import os
6from typing import Any, cast
7
8import pytest
9
10from openai import OpenAI, AsyncOpenAI
11from tests.utils import assert_matches_type
12from openai._utils import assert_signatures_in_sync
13from openai.types.responses import (
14 Response,
15 CompactedResponse,
16)
17
18base_url = os.environ.get("TEST_API_BASE_URL", "http://127.0.0.1:4010")
19
20
21class TestResponses:
22 parametrize = pytest.mark.parametrize("client", [False, True], indirect=True, ids=["loose", "strict"])
23
24 @parametrize
25 def test_method_create_overload_1(self, client: OpenAI) -> None:
26 response = client.responses.create()
27 assert_matches_type(Response, response, path=["response"])
28
29 @parametrize
30 def test_method_create_with_all_params_overload_1(self, client: OpenAI) -> None:
31 response = client.responses.create(
32 background=True,
33 conversation="string",
34 include=["file_search_call.results"],
35 input="string",
36 instructions="instructions",
37 max_output_tokens=0,
38 max_tool_calls=0,
39 metadata={"foo": "string"},
40 model="gpt-5.1",
41 parallel_tool_calls=True,
42 previous_response_id="previous_response_id",
43 prompt={
44 "id": "id",
45 "variables": {"foo": "string"},
46 "version": "version",
47 },
48 prompt_cache_key="prompt-cache-key-1234",
49 prompt_cache_retention="in-memory",
50 reasoning={
51 "effort": "none",
52 "generate_summary": "auto",
53 "summary": "auto",
54 },
55 safety_identifier="safety-identifier-1234",
56 service_tier="auto",
57 store=True,
58 stream=False,
59 stream_options={"include_obfuscation": True},
60 temperature=1,
61 text={
62 "format": {"type": "text"},
63 "verbosity": "low",
64 },
65 tool_choice="none",
66 tools=[
67 {
68 "name": "name",
69 "parameters": {"foo": "bar"},
70 "strict": True,
71 "type": "function",
72 "description": "description",
73 }
74 ],
75 top_logprobs=0,
76 top_p=1,
77 truncation="auto",
78 user="user-1234",
79 )
80 assert_matches_type(Response, response, path=["response"])
81
82 @parametrize
83 def test_raw_response_create_overload_1(self, client: OpenAI) -> None:
84 http_response = client.responses.with_raw_response.create()
85
86 assert http_response.is_closed is True
87 assert http_response.http_request.headers.get("X-Stainless-Lang") == "python"
88 response = http_response.parse()
89 assert_matches_type(Response, response, path=["response"])
90
91 @parametrize
92 def test_streaming_response_create_overload_1(self, client: OpenAI) -> None:
93 with client.responses.with_streaming_response.create() as http_response:
94 assert not http_response.is_closed
95 assert http_response.http_request.headers.get("X-Stainless-Lang") == "python"
96
97 response = http_response.parse()
98 assert_matches_type(Response, response, path=["response"])
99
100 assert cast(Any, http_response.is_closed) is True
101
102 @parametrize
103 def test_method_create_overload_2(self, client: OpenAI) -> None:
104 response_stream = client.responses.create(
105 stream=True,
106 )
107 response_stream.response.close()
108
109 @parametrize
110 def test_method_create_with_all_params_overload_2(self, client: OpenAI) -> None:
111 response_stream = client.responses.create(
112 stream=True,
113 background=True,
114 conversation="string",
115 include=["file_search_call.results"],
116 input="string",
117 instructions="instructions",
118 max_output_tokens=0,
119 max_tool_calls=0,
120 metadata={"foo": "string"},
121 model="gpt-5.1",
122 parallel_tool_calls=True,
123 previous_response_id="previous_response_id",
124 prompt={
125 "id": "id",
126 "variables": {"foo": "string"},
127 "version": "version",
128 },
129 prompt_cache_key="prompt-cache-key-1234",
130 prompt_cache_retention="in-memory",
131 reasoning={
132 "effort": "none",
133 "generate_summary": "auto",
134 "summary": "auto",
135 },
136 safety_identifier="safety-identifier-1234",
137 service_tier="auto",
138 store=True,
139 stream_options={"include_obfuscation": True},
140 temperature=1,
141 text={
142 "format": {"type": "text"},
143 "verbosity": "low",
144 },
145 tool_choice="none",
146 tools=[
147 {
148 "name": "name",
149 "parameters": {"foo": "bar"},
150 "strict": True,
151 "type": "function",
152 "description": "description",
153 }
154 ],
155 top_logprobs=0,
156 top_p=1,
157 truncation="auto",
158 user="user-1234",
159 )
160 response_stream.response.close()
161
162 @parametrize
163 def test_raw_response_create_overload_2(self, client: OpenAI) -> None:
164 response = client.responses.with_raw_response.create(
165 stream=True,
166 )
167
168 assert response.http_request.headers.get("X-Stainless-Lang") == "python"
169 stream = response.parse()
170 stream.close()
171
172 @parametrize
173 def test_streaming_response_create_overload_2(self, client: OpenAI) -> None:
174 with client.responses.with_streaming_response.create(
175 stream=True,
176 ) as response:
177 assert not response.is_closed
178 assert response.http_request.headers.get("X-Stainless-Lang") == "python"
179
180 stream = response.parse()
181 stream.close()
182
183 assert cast(Any, response.is_closed) is True
184
185 @parametrize
186 def test_method_retrieve_overload_1(self, client: OpenAI) -> None:
187 response = client.responses.retrieve(
188 response_id="resp_677efb5139a88190b512bc3fef8e535d",
189 )
190 assert_matches_type(Response, response, path=["response"])
191
192 @parametrize
193 def test_method_retrieve_with_all_params_overload_1(self, client: OpenAI) -> None:
194 response = client.responses.retrieve(
195 response_id="resp_677efb5139a88190b512bc3fef8e535d",
196 include=["file_search_call.results"],
197 include_obfuscation=True,
198 starting_after=0,
199 stream=False,
200 )
201 assert_matches_type(Response, response, path=["response"])
202
203 @parametrize
204 def test_raw_response_retrieve_overload_1(self, client: OpenAI) -> None:
205 http_response = client.responses.with_raw_response.retrieve(
206 response_id="resp_677efb5139a88190b512bc3fef8e535d",
207 )
208
209 assert http_response.is_closed is True
210 assert http_response.http_request.headers.get("X-Stainless-Lang") == "python"
211 response = http_response.parse()
212 assert_matches_type(Response, response, path=["response"])
213
214 @parametrize
215 def test_streaming_response_retrieve_overload_1(self, client: OpenAI) -> None:
216 with client.responses.with_streaming_response.retrieve(
217 response_id="resp_677efb5139a88190b512bc3fef8e535d",
218 ) as http_response:
219 assert not http_response.is_closed
220 assert http_response.http_request.headers.get("X-Stainless-Lang") == "python"
221
222 response = http_response.parse()
223 assert_matches_type(Response, response, path=["response"])
224
225 assert cast(Any, http_response.is_closed) is True
226
227 @parametrize
228 def test_path_params_retrieve_overload_1(self, client: OpenAI) -> None:
229 with pytest.raises(ValueError, match=r"Expected a non-empty value for `response_id` but received ''"):
230 client.responses.with_raw_response.retrieve(
231 response_id="",
232 )
233
234 @parametrize
235 def test_method_retrieve_overload_2(self, client: OpenAI) -> None:
236 response_stream = client.responses.retrieve(
237 response_id="resp_677efb5139a88190b512bc3fef8e535d",
238 stream=True,
239 )
240 response_stream.response.close()
241
242 @parametrize
243 def test_method_retrieve_with_all_params_overload_2(self, client: OpenAI) -> None:
244 response_stream = client.responses.retrieve(
245 response_id="resp_677efb5139a88190b512bc3fef8e535d",
246 stream=True,
247 include=["file_search_call.results"],
248 include_obfuscation=True,
249 starting_after=0,
250 )
251 response_stream.response.close()
252
253 @parametrize
254 def test_raw_response_retrieve_overload_2(self, client: OpenAI) -> None:
255 response = client.responses.with_raw_response.retrieve(
256 response_id="resp_677efb5139a88190b512bc3fef8e535d",
257 stream=True,
258 )
259
260 assert response.http_request.headers.get("X-Stainless-Lang") == "python"
261 stream = response.parse()
262 stream.close()
263
264 @parametrize
265 def test_streaming_response_retrieve_overload_2(self, client: OpenAI) -> None:
266 with client.responses.with_streaming_response.retrieve(
267 response_id="resp_677efb5139a88190b512bc3fef8e535d",
268 stream=True,
269 ) as response:
270 assert not response.is_closed
271 assert response.http_request.headers.get("X-Stainless-Lang") == "python"
272
273 stream = response.parse()
274 stream.close()
275
276 assert cast(Any, response.is_closed) is True
277
278 @parametrize
279 def test_path_params_retrieve_overload_2(self, client: OpenAI) -> None:
280 with pytest.raises(ValueError, match=r"Expected a non-empty value for `response_id` but received ''"):
281 client.responses.with_raw_response.retrieve(
282 response_id="",
283 stream=True,
284 )
285
286 @parametrize
287 def test_method_delete(self, client: OpenAI) -> None:
288 response = client.responses.delete(
289 "resp_677efb5139a88190b512bc3fef8e535d",
290 )
291 assert response is None
292
293 @parametrize
294 def test_raw_response_delete(self, client: OpenAI) -> None:
295 http_response = client.responses.with_raw_response.delete(
296 "resp_677efb5139a88190b512bc3fef8e535d",
297 )
298
299 assert http_response.is_closed is True
300 assert http_response.http_request.headers.get("X-Stainless-Lang") == "python"
301 response = http_response.parse()
302 assert response is None
303
304 @parametrize
305 def test_streaming_response_delete(self, client: OpenAI) -> None:
306 with client.responses.with_streaming_response.delete(
307 "resp_677efb5139a88190b512bc3fef8e535d",
308 ) as http_response:
309 assert not http_response.is_closed
310 assert http_response.http_request.headers.get("X-Stainless-Lang") == "python"
311
312 response = http_response.parse()
313 assert response is None
314
315 assert cast(Any, http_response.is_closed) is True
316
317 @parametrize
318 def test_path_params_delete(self, client: OpenAI) -> None:
319 with pytest.raises(ValueError, match=r"Expected a non-empty value for `response_id` but received ''"):
320 client.responses.with_raw_response.delete(
321 "",
322 )
323
324 @parametrize
325 def test_method_cancel(self, client: OpenAI) -> None:
326 response = client.responses.cancel(
327 "resp_677efb5139a88190b512bc3fef8e535d",
328 )
329 assert_matches_type(Response, response, path=["response"])
330
331 @parametrize
332 def test_raw_response_cancel(self, client: OpenAI) -> None:
333 http_response = client.responses.with_raw_response.cancel(
334 "resp_677efb5139a88190b512bc3fef8e535d",
335 )
336
337 assert http_response.is_closed is True
338 assert http_response.http_request.headers.get("X-Stainless-Lang") == "python"
339 response = http_response.parse()
340 assert_matches_type(Response, response, path=["response"])
341
342 @parametrize
343 def test_streaming_response_cancel(self, client: OpenAI) -> None:
344 with client.responses.with_streaming_response.cancel(
345 "resp_677efb5139a88190b512bc3fef8e535d",
346 ) as http_response:
347 assert not http_response.is_closed
348 assert http_response.http_request.headers.get("X-Stainless-Lang") == "python"
349
350 response = http_response.parse()
351 assert_matches_type(Response, response, path=["response"])
352
353 assert cast(Any, http_response.is_closed) is True
354
355 @parametrize
356 def test_path_params_cancel(self, client: OpenAI) -> None:
357 with pytest.raises(ValueError, match=r"Expected a non-empty value for `response_id` but received ''"):
358 client.responses.with_raw_response.cancel(
359 "",
360 )
361
362 @parametrize
363 def test_method_compact(self, client: OpenAI) -> None:
364 response = client.responses.compact()
365 assert_matches_type(CompactedResponse, response, path=["response"])
366
367 @parametrize
368 def test_method_compact_with_all_params(self, client: OpenAI) -> None:
369 response = client.responses.compact(
370 input="string",
371 instructions="instructions",
372 model="gpt-5.1",
373 previous_response_id="resp_123",
374 )
375 assert_matches_type(CompactedResponse, response, path=["response"])
376
377 @parametrize
378 def test_raw_response_compact(self, client: OpenAI) -> None:
379 http_response = client.responses.with_raw_response.compact()
380
381 assert http_response.is_closed is True
382 assert http_response.http_request.headers.get("X-Stainless-Lang") == "python"
383 response = http_response.parse()
384 assert_matches_type(CompactedResponse, response, path=["response"])
385
386 @parametrize
387 def test_streaming_response_compact(self, client: OpenAI) -> None:
388 with client.responses.with_streaming_response.compact() as http_response:
389 assert not http_response.is_closed
390 assert http_response.http_request.headers.get("X-Stainless-Lang") == "python"
391
392 response = http_response.parse()
393 assert_matches_type(CompactedResponse, response, path=["response"])
394
395 assert cast(Any, http_response.is_closed) is True
396
397
398@pytest.mark.parametrize("sync", [True, False], ids=["sync", "async"])
399def test_parse_method_in_sync(sync: bool, client: OpenAI, async_client: AsyncOpenAI) -> None:
400 checking_client: OpenAI | AsyncOpenAI = client if sync else async_client
401
402 assert_signatures_in_sync(
403 checking_client.responses.create,
404 checking_client.responses.parse,
405 exclude_params={"stream", "tools"},
406 )
407
408
409class TestAsyncResponses:
410 parametrize = pytest.mark.parametrize(
411 "async_client", [False, True, {"http_client": "aiohttp"}], indirect=True, ids=["loose", "strict", "aiohttp"]
412 )
413
414 @parametrize
415 async def test_method_create_overload_1(self, async_client: AsyncOpenAI) -> None:
416 response = await async_client.responses.create()
417 assert_matches_type(Response, response, path=["response"])
418
419 @parametrize
420 async def test_method_create_with_all_params_overload_1(self, async_client: AsyncOpenAI) -> None:
421 response = await async_client.responses.create(
422 background=True,
423 conversation="string",
424 include=["file_search_call.results"],
425 input="string",
426 instructions="instructions",
427 max_output_tokens=0,
428 max_tool_calls=0,
429 metadata={"foo": "string"},
430 model="gpt-5.1",
431 parallel_tool_calls=True,
432 previous_response_id="previous_response_id",
433 prompt={
434 "id": "id",
435 "variables": {"foo": "string"},
436 "version": "version",
437 },
438 prompt_cache_key="prompt-cache-key-1234",
439 prompt_cache_retention="in-memory",
440 reasoning={
441 "effort": "none",
442 "generate_summary": "auto",
443 "summary": "auto",
444 },
445 safety_identifier="safety-identifier-1234",
446 service_tier="auto",
447 store=True,
448 stream=False,
449 stream_options={"include_obfuscation": True},
450 temperature=1,
451 text={
452 "format": {"type": "text"},
453 "verbosity": "low",
454 },
455 tool_choice="none",
456 tools=[
457 {
458 "name": "name",
459 "parameters": {"foo": "bar"},
460 "strict": True,
461 "type": "function",
462 "description": "description",
463 }
464 ],
465 top_logprobs=0,
466 top_p=1,
467 truncation="auto",
468 user="user-1234",
469 )
470 assert_matches_type(Response, response, path=["response"])
471
472 @parametrize
473 async def test_raw_response_create_overload_1(self, async_client: AsyncOpenAI) -> None:
474 http_response = await async_client.responses.with_raw_response.create()
475
476 assert http_response.is_closed is True
477 assert http_response.http_request.headers.get("X-Stainless-Lang") == "python"
478 response = http_response.parse()
479 assert_matches_type(Response, response, path=["response"])
480
481 @parametrize
482 async def test_streaming_response_create_overload_1(self, async_client: AsyncOpenAI) -> None:
483 async with async_client.responses.with_streaming_response.create() as http_response:
484 assert not http_response.is_closed
485 assert http_response.http_request.headers.get("X-Stainless-Lang") == "python"
486
487 response = await http_response.parse()
488 assert_matches_type(Response, response, path=["response"])
489
490 assert cast(Any, http_response.is_closed) is True
491
492 @parametrize
493 async def test_method_create_overload_2(self, async_client: AsyncOpenAI) -> None:
494 response_stream = await async_client.responses.create(
495 stream=True,
496 )
497 await response_stream.response.aclose()
498
499 @parametrize
500 async def test_method_create_with_all_params_overload_2(self, async_client: AsyncOpenAI) -> None:
501 response_stream = await async_client.responses.create(
502 stream=True,
503 background=True,
504 conversation="string",
505 include=["file_search_call.results"],
506 input="string",
507 instructions="instructions",
508 max_output_tokens=0,
509 max_tool_calls=0,
510 metadata={"foo": "string"},
511 model="gpt-5.1",
512 parallel_tool_calls=True,
513 previous_response_id="previous_response_id",
514 prompt={
515 "id": "id",
516 "variables": {"foo": "string"},
517 "version": "version",
518 },
519 prompt_cache_key="prompt-cache-key-1234",
520 prompt_cache_retention="in-memory",
521 reasoning={
522 "effort": "none",
523 "generate_summary": "auto",
524 "summary": "auto",
525 },
526 safety_identifier="safety-identifier-1234",
527 service_tier="auto",
528 store=True,
529 stream_options={"include_obfuscation": True},
530 temperature=1,
531 text={
532 "format": {"type": "text"},
533 "verbosity": "low",
534 },
535 tool_choice="none",
536 tools=[
537 {
538 "name": "name",
539 "parameters": {"foo": "bar"},
540 "strict": True,
541 "type": "function",
542 "description": "description",
543 }
544 ],
545 top_logprobs=0,
546 top_p=1,
547 truncation="auto",
548 user="user-1234",
549 )
550 await response_stream.response.aclose()
551
552 @parametrize
553 async def test_raw_response_create_overload_2(self, async_client: AsyncOpenAI) -> None:
554 response = await async_client.responses.with_raw_response.create(
555 stream=True,
556 )
557
558 assert response.http_request.headers.get("X-Stainless-Lang") == "python"
559 stream = response.parse()
560 await stream.close()
561
562 @parametrize
563 async def test_streaming_response_create_overload_2(self, async_client: AsyncOpenAI) -> None:
564 async with async_client.responses.with_streaming_response.create(
565 stream=True,
566 ) as response:
567 assert not response.is_closed
568 assert response.http_request.headers.get("X-Stainless-Lang") == "python"
569
570 stream = await response.parse()
571 await stream.close()
572
573 assert cast(Any, response.is_closed) is True
574
575 @parametrize
576 async def test_method_retrieve_overload_1(self, async_client: AsyncOpenAI) -> None:
577 response = await async_client.responses.retrieve(
578 response_id="resp_677efb5139a88190b512bc3fef8e535d",
579 )
580 assert_matches_type(Response, response, path=["response"])
581
582 @parametrize
583 async def test_method_retrieve_with_all_params_overload_1(self, async_client: AsyncOpenAI) -> None:
584 response = await async_client.responses.retrieve(
585 response_id="resp_677efb5139a88190b512bc3fef8e535d",
586 include=["file_search_call.results"],
587 include_obfuscation=True,
588 starting_after=0,
589 stream=False,
590 )
591 assert_matches_type(Response, response, path=["response"])
592
593 @parametrize
594 async def test_raw_response_retrieve_overload_1(self, async_client: AsyncOpenAI) -> None:
595 http_response = await async_client.responses.with_raw_response.retrieve(
596 response_id="resp_677efb5139a88190b512bc3fef8e535d",
597 )
598
599 assert http_response.is_closed is True
600 assert http_response.http_request.headers.get("X-Stainless-Lang") == "python"
601 response = http_response.parse()
602 assert_matches_type(Response, response, path=["response"])
603
604 @parametrize
605 async def test_streaming_response_retrieve_overload_1(self, async_client: AsyncOpenAI) -> None:
606 async with async_client.responses.with_streaming_response.retrieve(
607 response_id="resp_677efb5139a88190b512bc3fef8e535d",
608 ) as http_response:
609 assert not http_response.is_closed
610 assert http_response.http_request.headers.get("X-Stainless-Lang") == "python"
611
612 response = await http_response.parse()
613 assert_matches_type(Response, response, path=["response"])
614
615 assert cast(Any, http_response.is_closed) is True
616
617 @parametrize
618 async def test_path_params_retrieve_overload_1(self, async_client: AsyncOpenAI) -> None:
619 with pytest.raises(ValueError, match=r"Expected a non-empty value for `response_id` but received ''"):
620 await async_client.responses.with_raw_response.retrieve(
621 response_id="",
622 )
623
624 @parametrize
625 async def test_method_retrieve_overload_2(self, async_client: AsyncOpenAI) -> None:
626 response_stream = await async_client.responses.retrieve(
627 response_id="resp_677efb5139a88190b512bc3fef8e535d",
628 stream=True,
629 )
630 await response_stream.response.aclose()
631
632 @parametrize
633 async def test_method_retrieve_with_all_params_overload_2(self, async_client: AsyncOpenAI) -> None:
634 response_stream = await async_client.responses.retrieve(
635 response_id="resp_677efb5139a88190b512bc3fef8e535d",
636 stream=True,
637 include=["file_search_call.results"],
638 include_obfuscation=True,
639 starting_after=0,
640 )
641 await response_stream.response.aclose()
642
643 @parametrize
644 async def test_raw_response_retrieve_overload_2(self, async_client: AsyncOpenAI) -> None:
645 response = await async_client.responses.with_raw_response.retrieve(
646 response_id="resp_677efb5139a88190b512bc3fef8e535d",
647 stream=True,
648 )
649
650 assert response.http_request.headers.get("X-Stainless-Lang") == "python"
651 stream = response.parse()
652 await stream.close()
653
654 @parametrize
655 async def test_streaming_response_retrieve_overload_2(self, async_client: AsyncOpenAI) -> None:
656 async with async_client.responses.with_streaming_response.retrieve(
657 response_id="resp_677efb5139a88190b512bc3fef8e535d",
658 stream=True,
659 ) as response:
660 assert not response.is_closed
661 assert response.http_request.headers.get("X-Stainless-Lang") == "python"
662
663 stream = await response.parse()
664 await stream.close()
665
666 assert cast(Any, response.is_closed) is True
667
668 @parametrize
669 async def test_path_params_retrieve_overload_2(self, async_client: AsyncOpenAI) -> None:
670 with pytest.raises(ValueError, match=r"Expected a non-empty value for `response_id` but received ''"):
671 await async_client.responses.with_raw_response.retrieve(
672 response_id="",
673 stream=True,
674 )
675
676 @parametrize
677 async def test_method_delete(self, async_client: AsyncOpenAI) -> None:
678 response = await async_client.responses.delete(
679 "resp_677efb5139a88190b512bc3fef8e535d",
680 )
681 assert response is None
682
683 @parametrize
684 async def test_raw_response_delete(self, async_client: AsyncOpenAI) -> None:
685 http_response = await async_client.responses.with_raw_response.delete(
686 "resp_677efb5139a88190b512bc3fef8e535d",
687 )
688
689 assert http_response.is_closed is True
690 assert http_response.http_request.headers.get("X-Stainless-Lang") == "python"
691 response = http_response.parse()
692 assert response is None
693
694 @parametrize
695 async def test_streaming_response_delete(self, async_client: AsyncOpenAI) -> None:
696 async with async_client.responses.with_streaming_response.delete(
697 "resp_677efb5139a88190b512bc3fef8e535d",
698 ) as http_response:
699 assert not http_response.is_closed
700 assert http_response.http_request.headers.get("X-Stainless-Lang") == "python"
701
702 response = await http_response.parse()
703 assert response is None
704
705 assert cast(Any, http_response.is_closed) is True
706
707 @parametrize
708 async def test_path_params_delete(self, async_client: AsyncOpenAI) -> None:
709 with pytest.raises(ValueError, match=r"Expected a non-empty value for `response_id` but received ''"):
710 await async_client.responses.with_raw_response.delete(
711 "",
712 )
713
714 @parametrize
715 async def test_method_cancel(self, async_client: AsyncOpenAI) -> None:
716 response = await async_client.responses.cancel(
717 "resp_677efb5139a88190b512bc3fef8e535d",
718 )
719 assert_matches_type(Response, response, path=["response"])
720
721 @parametrize
722 async def test_raw_response_cancel(self, async_client: AsyncOpenAI) -> None:
723 http_response = await async_client.responses.with_raw_response.cancel(
724 "resp_677efb5139a88190b512bc3fef8e535d",
725 )
726
727 assert http_response.is_closed is True
728 assert http_response.http_request.headers.get("X-Stainless-Lang") == "python"
729 response = http_response.parse()
730 assert_matches_type(Response, response, path=["response"])
731
732 @parametrize
733 async def test_streaming_response_cancel(self, async_client: AsyncOpenAI) -> None:
734 async with async_client.responses.with_streaming_response.cancel(
735 "resp_677efb5139a88190b512bc3fef8e535d",
736 ) as http_response:
737 assert not http_response.is_closed
738 assert http_response.http_request.headers.get("X-Stainless-Lang") == "python"
739
740 response = await http_response.parse()
741 assert_matches_type(Response, response, path=["response"])
742
743 assert cast(Any, http_response.is_closed) is True
744
745 @parametrize
746 async def test_path_params_cancel(self, async_client: AsyncOpenAI) -> None:
747 with pytest.raises(ValueError, match=r"Expected a non-empty value for `response_id` but received ''"):
748 await async_client.responses.with_raw_response.cancel(
749 "",
750 )
751
752 @parametrize
753 async def test_method_compact(self, async_client: AsyncOpenAI) -> None:
754 response = await async_client.responses.compact()
755 assert_matches_type(CompactedResponse, response, path=["response"])
756
757 @parametrize
758 async def test_method_compact_with_all_params(self, async_client: AsyncOpenAI) -> None:
759 response = await async_client.responses.compact(
760 input="string",
761 instructions="instructions",
762 model="gpt-5.1",
763 previous_response_id="resp_123",
764 )
765 assert_matches_type(CompactedResponse, response, path=["response"])
766
767 @parametrize
768 async def test_raw_response_compact(self, async_client: AsyncOpenAI) -> None:
769 http_response = await async_client.responses.with_raw_response.compact()
770
771 assert http_response.is_closed is True
772 assert http_response.http_request.headers.get("X-Stainless-Lang") == "python"
773 response = http_response.parse()
774 assert_matches_type(CompactedResponse, response, path=["response"])
775
776 @parametrize
777 async def test_streaming_response_compact(self, async_client: AsyncOpenAI) -> None:
778 async with async_client.responses.with_streaming_response.compact() as http_response:
779 assert not http_response.is_closed
780 assert http_response.http_request.headers.get("X-Stainless-Lang") == "python"
781
782 response = await http_response.parse()
783 assert_matches_type(CompactedResponse, response, path=["response"])
784
785 assert cast(Any, http_response.is_closed) is True