main
  1# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
  2
  3from __future__ import annotations
  4
  5import os
  6from typing import Any, cast
  7
  8import pytest
  9
 10from openai import OpenAI, AsyncOpenAI
 11from tests.utils import assert_matches_type
 12from openai.types import Batch
 13from openai.pagination import SyncCursorPage, AsyncCursorPage
 14
 15base_url = os.environ.get("TEST_API_BASE_URL", "http://127.0.0.1:4010")
 16
 17
 18class TestBatches:
 19    parametrize = pytest.mark.parametrize("client", [False, True], indirect=True, ids=["loose", "strict"])
 20
 21    @parametrize
 22    def test_method_create(self, client: OpenAI) -> None:
 23        batch = client.batches.create(
 24            completion_window="24h",
 25            endpoint="/v1/responses",
 26            input_file_id="string",
 27        )
 28        assert_matches_type(Batch, batch, path=["response"])
 29
 30    @parametrize
 31    def test_method_create_with_all_params(self, client: OpenAI) -> None:
 32        batch = client.batches.create(
 33            completion_window="24h",
 34            endpoint="/v1/responses",
 35            input_file_id="string",
 36            metadata={"foo": "string"},
 37            output_expires_after={
 38                "anchor": "created_at",
 39                "seconds": 3600,
 40            },
 41        )
 42        assert_matches_type(Batch, batch, path=["response"])
 43
 44    @parametrize
 45    def test_raw_response_create(self, client: OpenAI) -> None:
 46        response = client.batches.with_raw_response.create(
 47            completion_window="24h",
 48            endpoint="/v1/responses",
 49            input_file_id="string",
 50        )
 51
 52        assert response.is_closed is True
 53        assert response.http_request.headers.get("X-Stainless-Lang") == "python"
 54        batch = response.parse()
 55        assert_matches_type(Batch, batch, path=["response"])
 56
 57    @parametrize
 58    def test_streaming_response_create(self, client: OpenAI) -> None:
 59        with client.batches.with_streaming_response.create(
 60            completion_window="24h",
 61            endpoint="/v1/responses",
 62            input_file_id="string",
 63        ) as response:
 64            assert not response.is_closed
 65            assert response.http_request.headers.get("X-Stainless-Lang") == "python"
 66
 67            batch = response.parse()
 68            assert_matches_type(Batch, batch, path=["response"])
 69
 70        assert cast(Any, response.is_closed) is True
 71
 72    @parametrize
 73    def test_method_retrieve(self, client: OpenAI) -> None:
 74        batch = client.batches.retrieve(
 75            "string",
 76        )
 77        assert_matches_type(Batch, batch, path=["response"])
 78
 79    @parametrize
 80    def test_raw_response_retrieve(self, client: OpenAI) -> None:
 81        response = client.batches.with_raw_response.retrieve(
 82            "string",
 83        )
 84
 85        assert response.is_closed is True
 86        assert response.http_request.headers.get("X-Stainless-Lang") == "python"
 87        batch = response.parse()
 88        assert_matches_type(Batch, batch, path=["response"])
 89
 90    @parametrize
 91    def test_streaming_response_retrieve(self, client: OpenAI) -> None:
 92        with client.batches.with_streaming_response.retrieve(
 93            "string",
 94        ) as response:
 95            assert not response.is_closed
 96            assert response.http_request.headers.get("X-Stainless-Lang") == "python"
 97
 98            batch = response.parse()
 99            assert_matches_type(Batch, batch, path=["response"])
100
101        assert cast(Any, response.is_closed) is True
102
103    @parametrize
104    def test_path_params_retrieve(self, client: OpenAI) -> None:
105        with pytest.raises(ValueError, match=r"Expected a non-empty value for `batch_id` but received ''"):
106            client.batches.with_raw_response.retrieve(
107                "",
108            )
109
110    @parametrize
111    def test_method_list(self, client: OpenAI) -> None:
112        batch = client.batches.list()
113        assert_matches_type(SyncCursorPage[Batch], batch, path=["response"])
114
115    @parametrize
116    def test_method_list_with_all_params(self, client: OpenAI) -> None:
117        batch = client.batches.list(
118            after="string",
119            limit=0,
120        )
121        assert_matches_type(SyncCursorPage[Batch], batch, path=["response"])
122
123    @parametrize
124    def test_raw_response_list(self, client: OpenAI) -> None:
125        response = client.batches.with_raw_response.list()
126
127        assert response.is_closed is True
128        assert response.http_request.headers.get("X-Stainless-Lang") == "python"
129        batch = response.parse()
130        assert_matches_type(SyncCursorPage[Batch], batch, path=["response"])
131
132    @parametrize
133    def test_streaming_response_list(self, client: OpenAI) -> None:
134        with client.batches.with_streaming_response.list() as response:
135            assert not response.is_closed
136            assert response.http_request.headers.get("X-Stainless-Lang") == "python"
137
138            batch = response.parse()
139            assert_matches_type(SyncCursorPage[Batch], batch, path=["response"])
140
141        assert cast(Any, response.is_closed) is True
142
143    @parametrize
144    def test_method_cancel(self, client: OpenAI) -> None:
145        batch = client.batches.cancel(
146            "string",
147        )
148        assert_matches_type(Batch, batch, path=["response"])
149
150    @parametrize
151    def test_raw_response_cancel(self, client: OpenAI) -> None:
152        response = client.batches.with_raw_response.cancel(
153            "string",
154        )
155
156        assert response.is_closed is True
157        assert response.http_request.headers.get("X-Stainless-Lang") == "python"
158        batch = response.parse()
159        assert_matches_type(Batch, batch, path=["response"])
160
161    @parametrize
162    def test_streaming_response_cancel(self, client: OpenAI) -> None:
163        with client.batches.with_streaming_response.cancel(
164            "string",
165        ) as response:
166            assert not response.is_closed
167            assert response.http_request.headers.get("X-Stainless-Lang") == "python"
168
169            batch = response.parse()
170            assert_matches_type(Batch, batch, path=["response"])
171
172        assert cast(Any, response.is_closed) is True
173
174    @parametrize
175    def test_path_params_cancel(self, client: OpenAI) -> None:
176        with pytest.raises(ValueError, match=r"Expected a non-empty value for `batch_id` but received ''"):
177            client.batches.with_raw_response.cancel(
178                "",
179            )
180
181
182class TestAsyncBatches:
183    parametrize = pytest.mark.parametrize(
184        "async_client", [False, True, {"http_client": "aiohttp"}], indirect=True, ids=["loose", "strict", "aiohttp"]
185    )
186
187    @parametrize
188    async def test_method_create(self, async_client: AsyncOpenAI) -> None:
189        batch = await async_client.batches.create(
190            completion_window="24h",
191            endpoint="/v1/responses",
192            input_file_id="string",
193        )
194        assert_matches_type(Batch, batch, path=["response"])
195
196    @parametrize
197    async def test_method_create_with_all_params(self, async_client: AsyncOpenAI) -> None:
198        batch = await async_client.batches.create(
199            completion_window="24h",
200            endpoint="/v1/responses",
201            input_file_id="string",
202            metadata={"foo": "string"},
203            output_expires_after={
204                "anchor": "created_at",
205                "seconds": 3600,
206            },
207        )
208        assert_matches_type(Batch, batch, path=["response"])
209
210    @parametrize
211    async def test_raw_response_create(self, async_client: AsyncOpenAI) -> None:
212        response = await async_client.batches.with_raw_response.create(
213            completion_window="24h",
214            endpoint="/v1/responses",
215            input_file_id="string",
216        )
217
218        assert response.is_closed is True
219        assert response.http_request.headers.get("X-Stainless-Lang") == "python"
220        batch = response.parse()
221        assert_matches_type(Batch, batch, path=["response"])
222
223    @parametrize
224    async def test_streaming_response_create(self, async_client: AsyncOpenAI) -> None:
225        async with async_client.batches.with_streaming_response.create(
226            completion_window="24h",
227            endpoint="/v1/responses",
228            input_file_id="string",
229        ) as response:
230            assert not response.is_closed
231            assert response.http_request.headers.get("X-Stainless-Lang") == "python"
232
233            batch = await response.parse()
234            assert_matches_type(Batch, batch, path=["response"])
235
236        assert cast(Any, response.is_closed) is True
237
238    @parametrize
239    async def test_method_retrieve(self, async_client: AsyncOpenAI) -> None:
240        batch = await async_client.batches.retrieve(
241            "string",
242        )
243        assert_matches_type(Batch, batch, path=["response"])
244
245    @parametrize
246    async def test_raw_response_retrieve(self, async_client: AsyncOpenAI) -> None:
247        response = await async_client.batches.with_raw_response.retrieve(
248            "string",
249        )
250
251        assert response.is_closed is True
252        assert response.http_request.headers.get("X-Stainless-Lang") == "python"
253        batch = response.parse()
254        assert_matches_type(Batch, batch, path=["response"])
255
256    @parametrize
257    async def test_streaming_response_retrieve(self, async_client: AsyncOpenAI) -> None:
258        async with async_client.batches.with_streaming_response.retrieve(
259            "string",
260        ) as response:
261            assert not response.is_closed
262            assert response.http_request.headers.get("X-Stainless-Lang") == "python"
263
264            batch = await response.parse()
265            assert_matches_type(Batch, batch, path=["response"])
266
267        assert cast(Any, response.is_closed) is True
268
269    @parametrize
270    async def test_path_params_retrieve(self, async_client: AsyncOpenAI) -> None:
271        with pytest.raises(ValueError, match=r"Expected a non-empty value for `batch_id` but received ''"):
272            await async_client.batches.with_raw_response.retrieve(
273                "",
274            )
275
276    @parametrize
277    async def test_method_list(self, async_client: AsyncOpenAI) -> None:
278        batch = await async_client.batches.list()
279        assert_matches_type(AsyncCursorPage[Batch], batch, path=["response"])
280
281    @parametrize
282    async def test_method_list_with_all_params(self, async_client: AsyncOpenAI) -> None:
283        batch = await async_client.batches.list(
284            after="string",
285            limit=0,
286        )
287        assert_matches_type(AsyncCursorPage[Batch], batch, path=["response"])
288
289    @parametrize
290    async def test_raw_response_list(self, async_client: AsyncOpenAI) -> None:
291        response = await async_client.batches.with_raw_response.list()
292
293        assert response.is_closed is True
294        assert response.http_request.headers.get("X-Stainless-Lang") == "python"
295        batch = response.parse()
296        assert_matches_type(AsyncCursorPage[Batch], batch, path=["response"])
297
298    @parametrize
299    async def test_streaming_response_list(self, async_client: AsyncOpenAI) -> None:
300        async with async_client.batches.with_streaming_response.list() as response:
301            assert not response.is_closed
302            assert response.http_request.headers.get("X-Stainless-Lang") == "python"
303
304            batch = await response.parse()
305            assert_matches_type(AsyncCursorPage[Batch], batch, path=["response"])
306
307        assert cast(Any, response.is_closed) is True
308
309    @parametrize
310    async def test_method_cancel(self, async_client: AsyncOpenAI) -> None:
311        batch = await async_client.batches.cancel(
312            "string",
313        )
314        assert_matches_type(Batch, batch, path=["response"])
315
316    @parametrize
317    async def test_raw_response_cancel(self, async_client: AsyncOpenAI) -> None:
318        response = await async_client.batches.with_raw_response.cancel(
319            "string",
320        )
321
322        assert response.is_closed is True
323        assert response.http_request.headers.get("X-Stainless-Lang") == "python"
324        batch = response.parse()
325        assert_matches_type(Batch, batch, path=["response"])
326
327    @parametrize
328    async def test_streaming_response_cancel(self, async_client: AsyncOpenAI) -> None:
329        async with async_client.batches.with_streaming_response.cancel(
330            "string",
331        ) as response:
332            assert not response.is_closed
333            assert response.http_request.headers.get("X-Stainless-Lang") == "python"
334
335            batch = await response.parse()
336            assert_matches_type(Batch, batch, path=["response"])
337
338        assert cast(Any, response.is_closed) is True
339
340    @parametrize
341    async def test_path_params_cancel(self, async_client: AsyncOpenAI) -> None:
342        with pytest.raises(ValueError, match=r"Expected a non-empty value for `batch_id` but received ''"):
343            await async_client.batches.with_raw_response.cancel(
344                "",
345            )