main
  1# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
  2
  3from __future__ import annotations
  4
  5import os
  6from typing import Any, cast
  7
  8import pytest
  9
 10from openai import OpenAI, AsyncOpenAI
 11from tests.utils import assert_matches_type
 12from openai.types.audio import TranscriptionCreateResponse
 13
 14base_url = os.environ.get("TEST_API_BASE_URL", "http://127.0.0.1:4010")
 15
 16
 17class TestTranscriptions:
 18    parametrize = pytest.mark.parametrize("client", [False, True], indirect=True, ids=["loose", "strict"])
 19
 20    @parametrize
 21    def test_method_create_overload_1(self, client: OpenAI) -> None:
 22        transcription = client.audio.transcriptions.create(
 23            file=b"raw file contents",
 24            model="gpt-4o-transcribe",
 25        )
 26        assert_matches_type(TranscriptionCreateResponse, transcription, path=["response"])
 27
 28    @parametrize
 29    def test_method_create_with_all_params_overload_1(self, client: OpenAI) -> None:
 30        transcription = client.audio.transcriptions.create(
 31            file=b"raw file contents",
 32            model="gpt-4o-transcribe",
 33            chunking_strategy="auto",
 34            include=["logprobs"],
 35            known_speaker_names=["string"],
 36            known_speaker_references=["string"],
 37            language="language",
 38            prompt="prompt",
 39            response_format="json",
 40            stream=False,
 41            temperature=0,
 42            timestamp_granularities=["word"],
 43        )
 44        assert_matches_type(TranscriptionCreateResponse, transcription, path=["response"])
 45
 46    @parametrize
 47    def test_raw_response_create_overload_1(self, client: OpenAI) -> None:
 48        response = client.audio.transcriptions.with_raw_response.create(
 49            file=b"raw file contents",
 50            model="gpt-4o-transcribe",
 51        )
 52
 53        assert response.is_closed is True
 54        assert response.http_request.headers.get("X-Stainless-Lang") == "python"
 55        transcription = response.parse()
 56        assert_matches_type(TranscriptionCreateResponse, transcription, path=["response"])
 57
 58    @parametrize
 59    def test_streaming_response_create_overload_1(self, client: OpenAI) -> None:
 60        with client.audio.transcriptions.with_streaming_response.create(
 61            file=b"raw file contents",
 62            model="gpt-4o-transcribe",
 63        ) as response:
 64            assert not response.is_closed
 65            assert response.http_request.headers.get("X-Stainless-Lang") == "python"
 66
 67            transcription = response.parse()
 68            assert_matches_type(TranscriptionCreateResponse, transcription, path=["response"])
 69
 70        assert cast(Any, response.is_closed) is True
 71
 72    @parametrize
 73    def test_method_create_overload_2(self, client: OpenAI) -> None:
 74        transcription_stream = client.audio.transcriptions.create(
 75            file=b"raw file contents",
 76            model="gpt-4o-transcribe",
 77            stream=True,
 78        )
 79        transcription_stream.response.close()
 80
 81    @parametrize
 82    def test_method_create_with_all_params_overload_2(self, client: OpenAI) -> None:
 83        transcription_stream = client.audio.transcriptions.create(
 84            file=b"raw file contents",
 85            model="gpt-4o-transcribe",
 86            stream=True,
 87            chunking_strategy="auto",
 88            include=["logprobs"],
 89            known_speaker_names=["string"],
 90            known_speaker_references=["string"],
 91            language="language",
 92            prompt="prompt",
 93            response_format="json",
 94            temperature=0,
 95            timestamp_granularities=["word"],
 96        )
 97        transcription_stream.response.close()
 98
 99    @parametrize
100    def test_raw_response_create_overload_2(self, client: OpenAI) -> None:
101        response = client.audio.transcriptions.with_raw_response.create(
102            file=b"raw file contents",
103            model="gpt-4o-transcribe",
104            stream=True,
105        )
106
107        assert response.http_request.headers.get("X-Stainless-Lang") == "python"
108        stream = response.parse()
109        stream.close()
110
111    @parametrize
112    def test_streaming_response_create_overload_2(self, client: OpenAI) -> None:
113        with client.audio.transcriptions.with_streaming_response.create(
114            file=b"raw file contents",
115            model="gpt-4o-transcribe",
116            stream=True,
117        ) as response:
118            assert not response.is_closed
119            assert response.http_request.headers.get("X-Stainless-Lang") == "python"
120
121            stream = response.parse()
122            stream.close()
123
124        assert cast(Any, response.is_closed) is True
125
126
127class TestAsyncTranscriptions:
128    parametrize = pytest.mark.parametrize(
129        "async_client", [False, True, {"http_client": "aiohttp"}], indirect=True, ids=["loose", "strict", "aiohttp"]
130    )
131
132    @parametrize
133    async def test_method_create_overload_1(self, async_client: AsyncOpenAI) -> None:
134        transcription = await async_client.audio.transcriptions.create(
135            file=b"raw file contents",
136            model="gpt-4o-transcribe",
137        )
138        assert_matches_type(TranscriptionCreateResponse, transcription, path=["response"])
139
140    @parametrize
141    async def test_method_create_with_all_params_overload_1(self, async_client: AsyncOpenAI) -> None:
142        transcription = await async_client.audio.transcriptions.create(
143            file=b"raw file contents",
144            model="gpt-4o-transcribe",
145            chunking_strategy="auto",
146            include=["logprobs"],
147            known_speaker_names=["string"],
148            known_speaker_references=["string"],
149            language="language",
150            prompt="prompt",
151            response_format="json",
152            stream=False,
153            temperature=0,
154            timestamp_granularities=["word"],
155        )
156        assert_matches_type(TranscriptionCreateResponse, transcription, path=["response"])
157
158    @parametrize
159    async def test_raw_response_create_overload_1(self, async_client: AsyncOpenAI) -> None:
160        response = await async_client.audio.transcriptions.with_raw_response.create(
161            file=b"raw file contents",
162            model="gpt-4o-transcribe",
163        )
164
165        assert response.is_closed is True
166        assert response.http_request.headers.get("X-Stainless-Lang") == "python"
167        transcription = response.parse()
168        assert_matches_type(TranscriptionCreateResponse, transcription, path=["response"])
169
170    @parametrize
171    async def test_streaming_response_create_overload_1(self, async_client: AsyncOpenAI) -> None:
172        async with async_client.audio.transcriptions.with_streaming_response.create(
173            file=b"raw file contents",
174            model="gpt-4o-transcribe",
175        ) as response:
176            assert not response.is_closed
177            assert response.http_request.headers.get("X-Stainless-Lang") == "python"
178
179            transcription = await response.parse()
180            assert_matches_type(TranscriptionCreateResponse, transcription, path=["response"])
181
182        assert cast(Any, response.is_closed) is True
183
184    @parametrize
185    async def test_method_create_overload_2(self, async_client: AsyncOpenAI) -> None:
186        transcription_stream = await async_client.audio.transcriptions.create(
187            file=b"raw file contents",
188            model="gpt-4o-transcribe",
189            stream=True,
190        )
191        await transcription_stream.response.aclose()
192
193    @parametrize
194    async def test_method_create_with_all_params_overload_2(self, async_client: AsyncOpenAI) -> None:
195        transcription_stream = await async_client.audio.transcriptions.create(
196            file=b"raw file contents",
197            model="gpt-4o-transcribe",
198            stream=True,
199            chunking_strategy="auto",
200            include=["logprobs"],
201            known_speaker_names=["string"],
202            known_speaker_references=["string"],
203            language="language",
204            prompt="prompt",
205            response_format="json",
206            temperature=0,
207            timestamp_granularities=["word"],
208        )
209        await transcription_stream.response.aclose()
210
211    @parametrize
212    async def test_raw_response_create_overload_2(self, async_client: AsyncOpenAI) -> None:
213        response = await async_client.audio.transcriptions.with_raw_response.create(
214            file=b"raw file contents",
215            model="gpt-4o-transcribe",
216            stream=True,
217        )
218
219        assert response.http_request.headers.get("X-Stainless-Lang") == "python"
220        stream = response.parse()
221        await stream.close()
222
223    @parametrize
224    async def test_streaming_response_create_overload_2(self, async_client: AsyncOpenAI) -> None:
225        async with async_client.audio.transcriptions.with_streaming_response.create(
226            file=b"raw file contents",
227            model="gpt-4o-transcribe",
228            stream=True,
229        ) as response:
230            assert not response.is_closed
231            assert response.http_request.headers.get("X-Stainless-Lang") == "python"
232
233            stream = await response.parse()
234            await stream.close()
235
236        assert cast(Any, response.is_closed) is True