Commit 98d779fb

Stainless Bot <107565488+stainless-bot@users.noreply.github.com>
2024-01-19 00:41:33
chore(internal): share client instances between all tests (#1088)
1 parent c484e0c
tests/api_resources/audio/test_speech.py
@@ -12,18 +12,14 @@ from respx import MockRouter
 import openai._legacy_response as _legacy_response
 from openai import OpenAI, AsyncOpenAI
 from tests.utils import assert_matches_type
-from openai._client import OpenAI, AsyncOpenAI
 
 # pyright: reportDeprecated=false
 
 base_url = os.environ.get("TEST_API_BASE_URL", "http://127.0.0.1:4010")
-api_key = "My API Key"
 
 
 class TestSpeech:
-    strict_client = OpenAI(base_url=base_url, api_key=api_key, _strict_response_validation=True)
-    loose_client = OpenAI(base_url=base_url, api_key=api_key, _strict_response_validation=False)
-    parametrize = pytest.mark.parametrize("client", [strict_client, loose_client], ids=["strict", "loose"])
+    parametrize = pytest.mark.parametrize("client", [False, True], indirect=True, ids=["loose", "strict"])
 
     @parametrize
     @pytest.mark.respx(base_url=base_url)
@@ -86,15 +82,13 @@ class TestSpeech:
 
 
 class TestAsyncSpeech:
-    strict_client = AsyncOpenAI(base_url=base_url, api_key=api_key, _strict_response_validation=True)
-    loose_client = AsyncOpenAI(base_url=base_url, api_key=api_key, _strict_response_validation=False)
-    parametrize = pytest.mark.parametrize("client", [strict_client, loose_client], ids=["strict", "loose"])
+    parametrize = pytest.mark.parametrize("async_client", [False, True], indirect=True, ids=["loose", "strict"])
 
     @parametrize
     @pytest.mark.respx(base_url=base_url)
-    async def test_method_create(self, client: AsyncOpenAI, respx_mock: MockRouter) -> None:
+    async def test_method_create(self, async_client: AsyncOpenAI, respx_mock: MockRouter) -> None:
         respx_mock.post("/audio/speech").mock(return_value=httpx.Response(200, json={"foo": "bar"}))
-        speech = await client.audio.speech.create(
+        speech = await async_client.audio.speech.create(
             input="string",
             model="string",
             voice="alloy",
@@ -104,9 +98,9 @@ class TestAsyncSpeech:
 
     @parametrize
     @pytest.mark.respx(base_url=base_url)
-    async def test_method_create_with_all_params(self, client: AsyncOpenAI, respx_mock: MockRouter) -> None:
+    async def test_method_create_with_all_params(self, async_client: AsyncOpenAI, respx_mock: MockRouter) -> None:
         respx_mock.post("/audio/speech").mock(return_value=httpx.Response(200, json={"foo": "bar"}))
-        speech = await client.audio.speech.create(
+        speech = await async_client.audio.speech.create(
             input="string",
             model="string",
             voice="alloy",
@@ -118,10 +112,10 @@ class TestAsyncSpeech:
 
     @parametrize
     @pytest.mark.respx(base_url=base_url)
-    async def test_raw_response_create(self, client: AsyncOpenAI, respx_mock: MockRouter) -> None:
+    async def test_raw_response_create(self, async_client: AsyncOpenAI, respx_mock: MockRouter) -> None:
         respx_mock.post("/audio/speech").mock(return_value=httpx.Response(200, json={"foo": "bar"}))
 
-        response = await client.audio.speech.with_raw_response.create(
+        response = await async_client.audio.speech.with_raw_response.create(
             input="string",
             model="string",
             voice="alloy",
@@ -134,9 +128,9 @@ class TestAsyncSpeech:
 
     @parametrize
     @pytest.mark.respx(base_url=base_url)
-    async def test_streaming_response_create(self, client: AsyncOpenAI, respx_mock: MockRouter) -> None:
+    async def test_streaming_response_create(self, async_client: AsyncOpenAI, respx_mock: MockRouter) -> None:
         respx_mock.post("/audio/speech").mock(return_value=httpx.Response(200, json={"foo": "bar"}))
-        async with client.audio.speech.with_streaming_response.create(
+        async with async_client.audio.speech.with_streaming_response.create(
             input="string",
             model="string",
             voice="alloy",
tests/api_resources/audio/test_transcriptions.py
@@ -9,17 +9,13 @@ import pytest
 
 from openai import OpenAI, AsyncOpenAI
 from tests.utils import assert_matches_type
-from openai._client import OpenAI, AsyncOpenAI
 from openai.types.audio import Transcription
 
 base_url = os.environ.get("TEST_API_BASE_URL", "http://127.0.0.1:4010")
-api_key = "My API Key"
 
 
 class TestTranscriptions:
-    strict_client = OpenAI(base_url=base_url, api_key=api_key, _strict_response_validation=True)
-    loose_client = OpenAI(base_url=base_url, api_key=api_key, _strict_response_validation=False)
-    parametrize = pytest.mark.parametrize("client", [strict_client, loose_client], ids=["strict", "loose"])
+    parametrize = pytest.mark.parametrize("client", [False, True], indirect=True, ids=["loose", "strict"])
 
     @parametrize
     def test_method_create(self, client: OpenAI) -> None:
@@ -69,21 +65,19 @@ class TestTranscriptions:
 
 
 class TestAsyncTranscriptions:
-    strict_client = AsyncOpenAI(base_url=base_url, api_key=api_key, _strict_response_validation=True)
-    loose_client = AsyncOpenAI(base_url=base_url, api_key=api_key, _strict_response_validation=False)
-    parametrize = pytest.mark.parametrize("client", [strict_client, loose_client], ids=["strict", "loose"])
+    parametrize = pytest.mark.parametrize("async_client", [False, True], indirect=True, ids=["loose", "strict"])
 
     @parametrize
-    async def test_method_create(self, client: AsyncOpenAI) -> None:
-        transcription = await client.audio.transcriptions.create(
+    async def test_method_create(self, async_client: AsyncOpenAI) -> None:
+        transcription = await async_client.audio.transcriptions.create(
             file=b"raw file contents",
             model="whisper-1",
         )
         assert_matches_type(Transcription, transcription, path=["response"])
 
     @parametrize
-    async def test_method_create_with_all_params(self, client: AsyncOpenAI) -> None:
-        transcription = await client.audio.transcriptions.create(
+    async def test_method_create_with_all_params(self, async_client: AsyncOpenAI) -> None:
+        transcription = await async_client.audio.transcriptions.create(
             file=b"raw file contents",
             model="whisper-1",
             language="string",
@@ -94,8 +88,8 @@ class TestAsyncTranscriptions:
         assert_matches_type(Transcription, transcription, path=["response"])
 
     @parametrize
-    async def test_raw_response_create(self, client: AsyncOpenAI) -> None:
-        response = await client.audio.transcriptions.with_raw_response.create(
+    async def test_raw_response_create(self, async_client: AsyncOpenAI) -> None:
+        response = await async_client.audio.transcriptions.with_raw_response.create(
             file=b"raw file contents",
             model="whisper-1",
         )
@@ -106,8 +100,8 @@ class TestAsyncTranscriptions:
         assert_matches_type(Transcription, transcription, path=["response"])
 
     @parametrize
-    async def test_streaming_response_create(self, client: AsyncOpenAI) -> None:
-        async with client.audio.transcriptions.with_streaming_response.create(
+    async def test_streaming_response_create(self, async_client: AsyncOpenAI) -> None:
+        async with async_client.audio.transcriptions.with_streaming_response.create(
             file=b"raw file contents",
             model="whisper-1",
         ) as response:
tests/api_resources/audio/test_translations.py
@@ -9,17 +9,13 @@ import pytest
 
 from openai import OpenAI, AsyncOpenAI
 from tests.utils import assert_matches_type
-from openai._client import OpenAI, AsyncOpenAI
 from openai.types.audio import Translation
 
 base_url = os.environ.get("TEST_API_BASE_URL", "http://127.0.0.1:4010")
-api_key = "My API Key"
 
 
 class TestTranslations:
-    strict_client = OpenAI(base_url=base_url, api_key=api_key, _strict_response_validation=True)
-    loose_client = OpenAI(base_url=base_url, api_key=api_key, _strict_response_validation=False)
-    parametrize = pytest.mark.parametrize("client", [strict_client, loose_client], ids=["strict", "loose"])
+    parametrize = pytest.mark.parametrize("client", [False, True], indirect=True, ids=["loose", "strict"])
 
     @parametrize
     def test_method_create(self, client: OpenAI) -> None:
@@ -68,21 +64,19 @@ class TestTranslations:
 
 
 class TestAsyncTranslations:
-    strict_client = AsyncOpenAI(base_url=base_url, api_key=api_key, _strict_response_validation=True)
-    loose_client = AsyncOpenAI(base_url=base_url, api_key=api_key, _strict_response_validation=False)
-    parametrize = pytest.mark.parametrize("client", [strict_client, loose_client], ids=["strict", "loose"])
+    parametrize = pytest.mark.parametrize("async_client", [False, True], indirect=True, ids=["loose", "strict"])
 
     @parametrize
-    async def test_method_create(self, client: AsyncOpenAI) -> None:
-        translation = await client.audio.translations.create(
+    async def test_method_create(self, async_client: AsyncOpenAI) -> None:
+        translation = await async_client.audio.translations.create(
             file=b"raw file contents",
             model="whisper-1",
         )
         assert_matches_type(Translation, translation, path=["response"])
 
     @parametrize
-    async def test_method_create_with_all_params(self, client: AsyncOpenAI) -> None:
-        translation = await client.audio.translations.create(
+    async def test_method_create_with_all_params(self, async_client: AsyncOpenAI) -> None:
+        translation = await async_client.audio.translations.create(
             file=b"raw file contents",
             model="whisper-1",
             prompt="string",
@@ -92,8 +86,8 @@ class TestAsyncTranslations:
         assert_matches_type(Translation, translation, path=["response"])
 
     @parametrize
-    async def test_raw_response_create(self, client: AsyncOpenAI) -> None:
-        response = await client.audio.translations.with_raw_response.create(
+    async def test_raw_response_create(self, async_client: AsyncOpenAI) -> None:
+        response = await async_client.audio.translations.with_raw_response.create(
             file=b"raw file contents",
             model="whisper-1",
         )
@@ -104,8 +98,8 @@ class TestAsyncTranslations:
         assert_matches_type(Translation, translation, path=["response"])
 
     @parametrize
-    async def test_streaming_response_create(self, client: AsyncOpenAI) -> None:
-        async with client.audio.translations.with_streaming_response.create(
+    async def test_streaming_response_create(self, async_client: AsyncOpenAI) -> None:
+        async with async_client.audio.translations.with_streaming_response.create(
             file=b"raw file contents",
             model="whisper-1",
         ) as response:
tests/api_resources/beta/assistants/test_files.py
@@ -9,18 +9,14 @@ import pytest
 
 from openai import OpenAI, AsyncOpenAI
 from tests.utils import assert_matches_type
-from openai._client import OpenAI, AsyncOpenAI
 from openai.pagination import SyncCursorPage, AsyncCursorPage
 from openai.types.beta.assistants import AssistantFile, FileDeleteResponse
 
 base_url = os.environ.get("TEST_API_BASE_URL", "http://127.0.0.1:4010")
-api_key = "My API Key"
 
 
 class TestFiles:
-    strict_client = OpenAI(base_url=base_url, api_key=api_key, _strict_response_validation=True)
-    loose_client = OpenAI(base_url=base_url, api_key=api_key, _strict_response_validation=False)
-    parametrize = pytest.mark.parametrize("client", [strict_client, loose_client], ids=["strict", "loose"])
+    parametrize = pytest.mark.parametrize("client", [False, True], indirect=True, ids=["loose", "strict"])
 
     @parametrize
     def test_method_create(self, client: OpenAI) -> None:
@@ -211,21 +207,19 @@ class TestFiles:
 
 
 class TestAsyncFiles:
-    strict_client = AsyncOpenAI(base_url=base_url, api_key=api_key, _strict_response_validation=True)
-    loose_client = AsyncOpenAI(base_url=base_url, api_key=api_key, _strict_response_validation=False)
-    parametrize = pytest.mark.parametrize("client", [strict_client, loose_client], ids=["strict", "loose"])
+    parametrize = pytest.mark.parametrize("async_client", [False, True], indirect=True, ids=["loose", "strict"])
 
     @parametrize
-    async def test_method_create(self, client: AsyncOpenAI) -> None:
-        file = await client.beta.assistants.files.create(
+    async def test_method_create(self, async_client: AsyncOpenAI) -> None:
+        file = await async_client.beta.assistants.files.create(
             "file-abc123",
             file_id="string",
         )
         assert_matches_type(AssistantFile, file, path=["response"])
 
     @parametrize
-    async def test_raw_response_create(self, client: AsyncOpenAI) -> None:
-        response = await client.beta.assistants.files.with_raw_response.create(
+    async def test_raw_response_create(self, async_client: AsyncOpenAI) -> None:
+        response = await async_client.beta.assistants.files.with_raw_response.create(
             "file-abc123",
             file_id="string",
         )
@@ -236,8 +230,8 @@ class TestAsyncFiles:
         assert_matches_type(AssistantFile, file, path=["response"])
 
     @parametrize
-    async def test_streaming_response_create(self, client: AsyncOpenAI) -> None:
-        async with client.beta.assistants.files.with_streaming_response.create(
+    async def test_streaming_response_create(self, async_client: AsyncOpenAI) -> None:
+        async with async_client.beta.assistants.files.with_streaming_response.create(
             "file-abc123",
             file_id="string",
         ) as response:
@@ -250,24 +244,24 @@ class TestAsyncFiles:
         assert cast(Any, response.is_closed) is True
 
     @parametrize
-    async def test_path_params_create(self, client: AsyncOpenAI) -> None:
+    async def test_path_params_create(self, async_client: AsyncOpenAI) -> None:
         with pytest.raises(ValueError, match=r"Expected a non-empty value for `assistant_id` but received ''"):
-            await client.beta.assistants.files.with_raw_response.create(
+            await async_client.beta.assistants.files.with_raw_response.create(
                 "",
                 file_id="string",
             )
 
     @parametrize
-    async def test_method_retrieve(self, client: AsyncOpenAI) -> None:
-        file = await client.beta.assistants.files.retrieve(
+    async def test_method_retrieve(self, async_client: AsyncOpenAI) -> None:
+        file = await async_client.beta.assistants.files.retrieve(
             "string",
             assistant_id="string",
         )
         assert_matches_type(AssistantFile, file, path=["response"])
 
     @parametrize
-    async def test_raw_response_retrieve(self, client: AsyncOpenAI) -> None:
-        response = await client.beta.assistants.files.with_raw_response.retrieve(
+    async def test_raw_response_retrieve(self, async_client: AsyncOpenAI) -> None:
+        response = await async_client.beta.assistants.files.with_raw_response.retrieve(
             "string",
             assistant_id="string",
         )
@@ -278,8 +272,8 @@ class TestAsyncFiles:
         assert_matches_type(AssistantFile, file, path=["response"])
 
     @parametrize
-    async def test_streaming_response_retrieve(self, client: AsyncOpenAI) -> None:
-        async with client.beta.assistants.files.with_streaming_response.retrieve(
+    async def test_streaming_response_retrieve(self, async_client: AsyncOpenAI) -> None:
+        async with async_client.beta.assistants.files.with_streaming_response.retrieve(
             "string",
             assistant_id="string",
         ) as response:
@@ -292,29 +286,29 @@ class TestAsyncFiles:
         assert cast(Any, response.is_closed) is True
 
     @parametrize
-    async def test_path_params_retrieve(self, client: AsyncOpenAI) -> None:
+    async def test_path_params_retrieve(self, async_client: AsyncOpenAI) -> None:
         with pytest.raises(ValueError, match=r"Expected a non-empty value for `assistant_id` but received ''"):
-            await client.beta.assistants.files.with_raw_response.retrieve(
+            await async_client.beta.assistants.files.with_raw_response.retrieve(
                 "string",
                 assistant_id="",
             )
 
         with pytest.raises(ValueError, match=r"Expected a non-empty value for `file_id` but received ''"):
-            await client.beta.assistants.files.with_raw_response.retrieve(
+            await async_client.beta.assistants.files.with_raw_response.retrieve(
                 "",
                 assistant_id="string",
             )
 
     @parametrize
-    async def test_method_list(self, client: AsyncOpenAI) -> None:
-        file = await client.beta.assistants.files.list(
+    async def test_method_list(self, async_client: AsyncOpenAI) -> None:
+        file = await async_client.beta.assistants.files.list(
             "string",
         )
         assert_matches_type(AsyncCursorPage[AssistantFile], file, path=["response"])
 
     @parametrize
-    async def test_method_list_with_all_params(self, client: AsyncOpenAI) -> None:
-        file = await client.beta.assistants.files.list(
+    async def test_method_list_with_all_params(self, async_client: AsyncOpenAI) -> None:
+        file = await async_client.beta.assistants.files.list(
             "string",
             after="string",
             before="string",
@@ -324,8 +318,8 @@ class TestAsyncFiles:
         assert_matches_type(AsyncCursorPage[AssistantFile], file, path=["response"])
 
     @parametrize
-    async def test_raw_response_list(self, client: AsyncOpenAI) -> None:
-        response = await client.beta.assistants.files.with_raw_response.list(
+    async def test_raw_response_list(self, async_client: AsyncOpenAI) -> None:
+        response = await async_client.beta.assistants.files.with_raw_response.list(
             "string",
         )
 
@@ -335,8 +329,8 @@ class TestAsyncFiles:
         assert_matches_type(AsyncCursorPage[AssistantFile], file, path=["response"])
 
     @parametrize
-    async def test_streaming_response_list(self, client: AsyncOpenAI) -> None:
-        async with client.beta.assistants.files.with_streaming_response.list(
+    async def test_streaming_response_list(self, async_client: AsyncOpenAI) -> None:
+        async with async_client.beta.assistants.files.with_streaming_response.list(
             "string",
         ) as response:
             assert not response.is_closed
@@ -348,23 +342,23 @@ class TestAsyncFiles:
         assert cast(Any, response.is_closed) is True
 
     @parametrize
-    async def test_path_params_list(self, client: AsyncOpenAI) -> None:
+    async def test_path_params_list(self, async_client: AsyncOpenAI) -> None:
         with pytest.raises(ValueError, match=r"Expected a non-empty value for `assistant_id` but received ''"):
-            await client.beta.assistants.files.with_raw_response.list(
+            await async_client.beta.assistants.files.with_raw_response.list(
                 "",
             )
 
     @parametrize
-    async def test_method_delete(self, client: AsyncOpenAI) -> None:
-        file = await client.beta.assistants.files.delete(
+    async def test_method_delete(self, async_client: AsyncOpenAI) -> None:
+        file = await async_client.beta.assistants.files.delete(
             "string",
             assistant_id="string",
         )
         assert_matches_type(FileDeleteResponse, file, path=["response"])
 
     @parametrize
-    async def test_raw_response_delete(self, client: AsyncOpenAI) -> None:
-        response = await client.beta.assistants.files.with_raw_response.delete(
+    async def test_raw_response_delete(self, async_client: AsyncOpenAI) -> None:
+        response = await async_client.beta.assistants.files.with_raw_response.delete(
             "string",
             assistant_id="string",
         )
@@ -375,8 +369,8 @@ class TestAsyncFiles:
         assert_matches_type(FileDeleteResponse, file, path=["response"])
 
     @parametrize
-    async def test_streaming_response_delete(self, client: AsyncOpenAI) -> None:
-        async with client.beta.assistants.files.with_streaming_response.delete(
+    async def test_streaming_response_delete(self, async_client: AsyncOpenAI) -> None:
+        async with async_client.beta.assistants.files.with_streaming_response.delete(
             "string",
             assistant_id="string",
         ) as response:
@@ -389,15 +383,15 @@ class TestAsyncFiles:
         assert cast(Any, response.is_closed) is True
 
     @parametrize
-    async def test_path_params_delete(self, client: AsyncOpenAI) -> None:
+    async def test_path_params_delete(self, async_client: AsyncOpenAI) -> None:
         with pytest.raises(ValueError, match=r"Expected a non-empty value for `assistant_id` but received ''"):
-            await client.beta.assistants.files.with_raw_response.delete(
+            await async_client.beta.assistants.files.with_raw_response.delete(
                 "string",
                 assistant_id="",
             )
 
         with pytest.raises(ValueError, match=r"Expected a non-empty value for `file_id` but received ''"):
-            await client.beta.assistants.files.with_raw_response.delete(
+            await async_client.beta.assistants.files.with_raw_response.delete(
                 "",
                 assistant_id="string",
             )
tests/api_resources/beta/threads/messages/test_files.py
@@ -9,18 +9,14 @@ import pytest
 
 from openai import OpenAI, AsyncOpenAI
 from tests.utils import assert_matches_type
-from openai._client import OpenAI, AsyncOpenAI
 from openai.pagination import SyncCursorPage, AsyncCursorPage
 from openai.types.beta.threads.messages import MessageFile
 
 base_url = os.environ.get("TEST_API_BASE_URL", "http://127.0.0.1:4010")
-api_key = "My API Key"
 
 
 class TestFiles:
-    strict_client = OpenAI(base_url=base_url, api_key=api_key, _strict_response_validation=True)
-    loose_client = OpenAI(base_url=base_url, api_key=api_key, _strict_response_validation=False)
-    parametrize = pytest.mark.parametrize("client", [strict_client, loose_client], ids=["strict", "loose"])
+    parametrize = pytest.mark.parametrize("client", [False, True], indirect=True, ids=["loose", "strict"])
 
     @parametrize
     def test_method_retrieve(self, client: OpenAI) -> None:
@@ -144,13 +140,11 @@ class TestFiles:
 
 
 class TestAsyncFiles:
-    strict_client = AsyncOpenAI(base_url=base_url, api_key=api_key, _strict_response_validation=True)
-    loose_client = AsyncOpenAI(base_url=base_url, api_key=api_key, _strict_response_validation=False)
-    parametrize = pytest.mark.parametrize("client", [strict_client, loose_client], ids=["strict", "loose"])
+    parametrize = pytest.mark.parametrize("async_client", [False, True], indirect=True, ids=["loose", "strict"])
 
     @parametrize
-    async def test_method_retrieve(self, client: AsyncOpenAI) -> None:
-        file = await client.beta.threads.messages.files.retrieve(
+    async def test_method_retrieve(self, async_client: AsyncOpenAI) -> None:
+        file = await async_client.beta.threads.messages.files.retrieve(
             "file-abc123",
             thread_id="thread_abc123",
             message_id="msg_abc123",
@@ -158,8 +152,8 @@ class TestAsyncFiles:
         assert_matches_type(MessageFile, file, path=["response"])
 
     @parametrize
-    async def test_raw_response_retrieve(self, client: AsyncOpenAI) -> None:
-        response = await client.beta.threads.messages.files.with_raw_response.retrieve(
+    async def test_raw_response_retrieve(self, async_client: AsyncOpenAI) -> None:
+        response = await async_client.beta.threads.messages.files.with_raw_response.retrieve(
             "file-abc123",
             thread_id="thread_abc123",
             message_id="msg_abc123",
@@ -171,8 +165,8 @@ class TestAsyncFiles:
         assert_matches_type(MessageFile, file, path=["response"])
 
     @parametrize
-    async def test_streaming_response_retrieve(self, client: AsyncOpenAI) -> None:
-        async with client.beta.threads.messages.files.with_streaming_response.retrieve(
+    async def test_streaming_response_retrieve(self, async_client: AsyncOpenAI) -> None:
+        async with async_client.beta.threads.messages.files.with_streaming_response.retrieve(
             "file-abc123",
             thread_id="thread_abc123",
             message_id="msg_abc123",
@@ -186,39 +180,39 @@ class TestAsyncFiles:
         assert cast(Any, response.is_closed) is True
 
     @parametrize
-    async def test_path_params_retrieve(self, client: AsyncOpenAI) -> None:
+    async def test_path_params_retrieve(self, async_client: AsyncOpenAI) -> None:
         with pytest.raises(ValueError, match=r"Expected a non-empty value for `thread_id` but received ''"):
-            await client.beta.threads.messages.files.with_raw_response.retrieve(
+            await async_client.beta.threads.messages.files.with_raw_response.retrieve(
                 "file-abc123",
                 thread_id="",
                 message_id="msg_abc123",
             )
 
         with pytest.raises(ValueError, match=r"Expected a non-empty value for `message_id` but received ''"):
-            await client.beta.threads.messages.files.with_raw_response.retrieve(
+            await async_client.beta.threads.messages.files.with_raw_response.retrieve(
                 "file-abc123",
                 thread_id="thread_abc123",
                 message_id="",
             )
 
         with pytest.raises(ValueError, match=r"Expected a non-empty value for `file_id` but received ''"):
-            await client.beta.threads.messages.files.with_raw_response.retrieve(
+            await async_client.beta.threads.messages.files.with_raw_response.retrieve(
                 "",
                 thread_id="thread_abc123",
                 message_id="msg_abc123",
             )
 
     @parametrize
-    async def test_method_list(self, client: AsyncOpenAI) -> None:
-        file = await client.beta.threads.messages.files.list(
+    async def test_method_list(self, async_client: AsyncOpenAI) -> None:
+        file = await async_client.beta.threads.messages.files.list(
             "string",
             thread_id="string",
         )
         assert_matches_type(AsyncCursorPage[MessageFile], file, path=["response"])
 
     @parametrize
-    async def test_method_list_with_all_params(self, client: AsyncOpenAI) -> None:
-        file = await client.beta.threads.messages.files.list(
+    async def test_method_list_with_all_params(self, async_client: AsyncOpenAI) -> None:
+        file = await async_client.beta.threads.messages.files.list(
             "string",
             thread_id="string",
             after="string",
@@ -229,8 +223,8 @@ class TestAsyncFiles:
         assert_matches_type(AsyncCursorPage[MessageFile], file, path=["response"])
 
     @parametrize
-    async def test_raw_response_list(self, client: AsyncOpenAI) -> None:
-        response = await client.beta.threads.messages.files.with_raw_response.list(
+    async def test_raw_response_list(self, async_client: AsyncOpenAI) -> None:
+        response = await async_client.beta.threads.messages.files.with_raw_response.list(
             "string",
             thread_id="string",
         )
@@ -241,8 +235,8 @@ class TestAsyncFiles:
         assert_matches_type(AsyncCursorPage[MessageFile], file, path=["response"])
 
     @parametrize
-    async def test_streaming_response_list(self, client: AsyncOpenAI) -> None:
-        async with client.beta.threads.messages.files.with_streaming_response.list(
+    async def test_streaming_response_list(self, async_client: AsyncOpenAI) -> None:
+        async with async_client.beta.threads.messages.files.with_streaming_response.list(
             "string",
             thread_id="string",
         ) as response:
@@ -255,15 +249,15 @@ class TestAsyncFiles:
         assert cast(Any, response.is_closed) is True
 
     @parametrize
-    async def test_path_params_list(self, client: AsyncOpenAI) -> None:
+    async def test_path_params_list(self, async_client: AsyncOpenAI) -> None:
         with pytest.raises(ValueError, match=r"Expected a non-empty value for `thread_id` but received ''"):
-            await client.beta.threads.messages.files.with_raw_response.list(
+            await async_client.beta.threads.messages.files.with_raw_response.list(
                 "string",
                 thread_id="",
             )
 
         with pytest.raises(ValueError, match=r"Expected a non-empty value for `message_id` but received ''"):
-            await client.beta.threads.messages.files.with_raw_response.list(
+            await async_client.beta.threads.messages.files.with_raw_response.list(
                 "",
                 thread_id="string",
             )
tests/api_resources/beta/threads/runs/test_steps.py
@@ -9,18 +9,14 @@ import pytest
 
 from openai import OpenAI, AsyncOpenAI
 from tests.utils import assert_matches_type
-from openai._client import OpenAI, AsyncOpenAI
 from openai.pagination import SyncCursorPage, AsyncCursorPage
 from openai.types.beta.threads.runs import RunStep
 
 base_url = os.environ.get("TEST_API_BASE_URL", "http://127.0.0.1:4010")
-api_key = "My API Key"
 
 
 class TestSteps:
-    strict_client = OpenAI(base_url=base_url, api_key=api_key, _strict_response_validation=True)
-    loose_client = OpenAI(base_url=base_url, api_key=api_key, _strict_response_validation=False)
-    parametrize = pytest.mark.parametrize("client", [strict_client, loose_client], ids=["strict", "loose"])
+    parametrize = pytest.mark.parametrize("client", [False, True], indirect=True, ids=["loose", "strict"])
 
     @parametrize
     def test_method_retrieve(self, client: OpenAI) -> None:
@@ -144,13 +140,11 @@ class TestSteps:
 
 
 class TestAsyncSteps:
-    strict_client = AsyncOpenAI(base_url=base_url, api_key=api_key, _strict_response_validation=True)
-    loose_client = AsyncOpenAI(base_url=base_url, api_key=api_key, _strict_response_validation=False)
-    parametrize = pytest.mark.parametrize("client", [strict_client, loose_client], ids=["strict", "loose"])
+    parametrize = pytest.mark.parametrize("async_client", [False, True], indirect=True, ids=["loose", "strict"])
 
     @parametrize
-    async def test_method_retrieve(self, client: AsyncOpenAI) -> None:
-        step = await client.beta.threads.runs.steps.retrieve(
+    async def test_method_retrieve(self, async_client: AsyncOpenAI) -> None:
+        step = await async_client.beta.threads.runs.steps.retrieve(
             "string",
             thread_id="string",
             run_id="string",
@@ -158,8 +152,8 @@ class TestAsyncSteps:
         assert_matches_type(RunStep, step, path=["response"])
 
     @parametrize
-    async def test_raw_response_retrieve(self, client: AsyncOpenAI) -> None:
-        response = await client.beta.threads.runs.steps.with_raw_response.retrieve(
+    async def test_raw_response_retrieve(self, async_client: AsyncOpenAI) -> None:
+        response = await async_client.beta.threads.runs.steps.with_raw_response.retrieve(
             "string",
             thread_id="string",
             run_id="string",
@@ -171,8 +165,8 @@ class TestAsyncSteps:
         assert_matches_type(RunStep, step, path=["response"])
 
     @parametrize
-    async def test_streaming_response_retrieve(self, client: AsyncOpenAI) -> None:
-        async with client.beta.threads.runs.steps.with_streaming_response.retrieve(
+    async def test_streaming_response_retrieve(self, async_client: AsyncOpenAI) -> None:
+        async with async_client.beta.threads.runs.steps.with_streaming_response.retrieve(
             "string",
             thread_id="string",
             run_id="string",
@@ -186,39 +180,39 @@ class TestAsyncSteps:
         assert cast(Any, response.is_closed) is True
 
     @parametrize
-    async def test_path_params_retrieve(self, client: AsyncOpenAI) -> None:
+    async def test_path_params_retrieve(self, async_client: AsyncOpenAI) -> None:
         with pytest.raises(ValueError, match=r"Expected a non-empty value for `thread_id` but received ''"):
-            await client.beta.threads.runs.steps.with_raw_response.retrieve(
+            await async_client.beta.threads.runs.steps.with_raw_response.retrieve(
                 "string",
                 thread_id="",
                 run_id="string",
             )
 
         with pytest.raises(ValueError, match=r"Expected a non-empty value for `run_id` but received ''"):
-            await client.beta.threads.runs.steps.with_raw_response.retrieve(
+            await async_client.beta.threads.runs.steps.with_raw_response.retrieve(
                 "string",
                 thread_id="string",
                 run_id="",
             )
 
         with pytest.raises(ValueError, match=r"Expected a non-empty value for `step_id` but received ''"):
-            await client.beta.threads.runs.steps.with_raw_response.retrieve(
+            await async_client.beta.threads.runs.steps.with_raw_response.retrieve(
                 "",
                 thread_id="string",
                 run_id="string",
             )
 
     @parametrize
-    async def test_method_list(self, client: AsyncOpenAI) -> None:
-        step = await client.beta.threads.runs.steps.list(
+    async def test_method_list(self, async_client: AsyncOpenAI) -> None:
+        step = await async_client.beta.threads.runs.steps.list(
             "string",
             thread_id="string",
         )
         assert_matches_type(AsyncCursorPage[RunStep], step, path=["response"])
 
     @parametrize
-    async def test_method_list_with_all_params(self, client: AsyncOpenAI) -> None:
-        step = await client.beta.threads.runs.steps.list(
+    async def test_method_list_with_all_params(self, async_client: AsyncOpenAI) -> None:
+        step = await async_client.beta.threads.runs.steps.list(
             "string",
             thread_id="string",
             after="string",
@@ -229,8 +223,8 @@ class TestAsyncSteps:
         assert_matches_type(AsyncCursorPage[RunStep], step, path=["response"])
 
     @parametrize
-    async def test_raw_response_list(self, client: AsyncOpenAI) -> None:
-        response = await client.beta.threads.runs.steps.with_raw_response.list(
+    async def test_raw_response_list(self, async_client: AsyncOpenAI) -> None:
+        response = await async_client.beta.threads.runs.steps.with_raw_response.list(
             "string",
             thread_id="string",
         )
@@ -241,8 +235,8 @@ class TestAsyncSteps:
         assert_matches_type(AsyncCursorPage[RunStep], step, path=["response"])
 
     @parametrize
-    async def test_streaming_response_list(self, client: AsyncOpenAI) -> None:
-        async with client.beta.threads.runs.steps.with_streaming_response.list(
+    async def test_streaming_response_list(self, async_client: AsyncOpenAI) -> None:
+        async with async_client.beta.threads.runs.steps.with_streaming_response.list(
             "string",
             thread_id="string",
         ) as response:
@@ -255,15 +249,15 @@ class TestAsyncSteps:
         assert cast(Any, response.is_closed) is True
 
     @parametrize
-    async def test_path_params_list(self, client: AsyncOpenAI) -> None:
+    async def test_path_params_list(self, async_client: AsyncOpenAI) -> None:
         with pytest.raises(ValueError, match=r"Expected a non-empty value for `thread_id` but received ''"):
-            await client.beta.threads.runs.steps.with_raw_response.list(
+            await async_client.beta.threads.runs.steps.with_raw_response.list(
                 "string",
                 thread_id="",
             )
 
         with pytest.raises(ValueError, match=r"Expected a non-empty value for `run_id` but received ''"):
-            await client.beta.threads.runs.steps.with_raw_response.list(
+            await async_client.beta.threads.runs.steps.with_raw_response.list(
                 "",
                 thread_id="string",
             )
tests/api_resources/beta/threads/test_messages.py
@@ -9,18 +9,14 @@ import pytest
 
 from openai import OpenAI, AsyncOpenAI
 from tests.utils import assert_matches_type
-from openai._client import OpenAI, AsyncOpenAI
 from openai.pagination import SyncCursorPage, AsyncCursorPage
 from openai.types.beta.threads import ThreadMessage
 
 base_url = os.environ.get("TEST_API_BASE_URL", "http://127.0.0.1:4010")
-api_key = "My API Key"
 
 
 class TestMessages:
-    strict_client = OpenAI(base_url=base_url, api_key=api_key, _strict_response_validation=True)
-    loose_client = OpenAI(base_url=base_url, api_key=api_key, _strict_response_validation=False)
-    parametrize = pytest.mark.parametrize("client", [strict_client, loose_client], ids=["strict", "loose"])
+    parametrize = pytest.mark.parametrize("client", [False, True], indirect=True, ids=["loose", "strict"])
 
     @parametrize
     def test_method_create(self, client: OpenAI) -> None:
@@ -235,13 +231,11 @@ class TestMessages:
 
 
 class TestAsyncMessages:
-    strict_client = AsyncOpenAI(base_url=base_url, api_key=api_key, _strict_response_validation=True)
-    loose_client = AsyncOpenAI(base_url=base_url, api_key=api_key, _strict_response_validation=False)
-    parametrize = pytest.mark.parametrize("client", [strict_client, loose_client], ids=["strict", "loose"])
+    parametrize = pytest.mark.parametrize("async_client", [False, True], indirect=True, ids=["loose", "strict"])
 
     @parametrize
-    async def test_method_create(self, client: AsyncOpenAI) -> None:
-        message = await client.beta.threads.messages.create(
+    async def test_method_create(self, async_client: AsyncOpenAI) -> None:
+        message = await async_client.beta.threads.messages.create(
             "string",
             content="x",
             role="user",
@@ -249,8 +243,8 @@ class TestAsyncMessages:
         assert_matches_type(ThreadMessage, message, path=["response"])
 
     @parametrize
-    async def test_method_create_with_all_params(self, client: AsyncOpenAI) -> None:
-        message = await client.beta.threads.messages.create(
+    async def test_method_create_with_all_params(self, async_client: AsyncOpenAI) -> None:
+        message = await async_client.beta.threads.messages.create(
             "string",
             content="x",
             role="user",
@@ -260,8 +254,8 @@ class TestAsyncMessages:
         assert_matches_type(ThreadMessage, message, path=["response"])
 
     @parametrize
-    async def test_raw_response_create(self, client: AsyncOpenAI) -> None:
-        response = await client.beta.threads.messages.with_raw_response.create(
+    async def test_raw_response_create(self, async_client: AsyncOpenAI) -> None:
+        response = await async_client.beta.threads.messages.with_raw_response.create(
             "string",
             content="x",
             role="user",
@@ -273,8 +267,8 @@ class TestAsyncMessages:
         assert_matches_type(ThreadMessage, message, path=["response"])
 
     @parametrize
-    async def test_streaming_response_create(self, client: AsyncOpenAI) -> None:
-        async with client.beta.threads.messages.with_streaming_response.create(
+    async def test_streaming_response_create(self, async_client: AsyncOpenAI) -> None:
+        async with async_client.beta.threads.messages.with_streaming_response.create(
             "string",
             content="x",
             role="user",
@@ -288,25 +282,25 @@ class TestAsyncMessages:
         assert cast(Any, response.is_closed) is True
 
     @parametrize
-    async def test_path_params_create(self, client: AsyncOpenAI) -> None:
+    async def test_path_params_create(self, async_client: AsyncOpenAI) -> None:
         with pytest.raises(ValueError, match=r"Expected a non-empty value for `thread_id` but received ''"):
-            await client.beta.threads.messages.with_raw_response.create(
+            await async_client.beta.threads.messages.with_raw_response.create(
                 "",
                 content="x",
                 role="user",
             )
 
     @parametrize
-    async def test_method_retrieve(self, client: AsyncOpenAI) -> None:
-        message = await client.beta.threads.messages.retrieve(
+    async def test_method_retrieve(self, async_client: AsyncOpenAI) -> None:
+        message = await async_client.beta.threads.messages.retrieve(
             "string",
             thread_id="string",
         )
         assert_matches_type(ThreadMessage, message, path=["response"])
 
     @parametrize
-    async def test_raw_response_retrieve(self, client: AsyncOpenAI) -> None:
-        response = await client.beta.threads.messages.with_raw_response.retrieve(
+    async def test_raw_response_retrieve(self, async_client: AsyncOpenAI) -> None:
+        response = await async_client.beta.threads.messages.with_raw_response.retrieve(
             "string",
             thread_id="string",
         )
@@ -317,8 +311,8 @@ class TestAsyncMessages:
         assert_matches_type(ThreadMessage, message, path=["response"])
 
     @parametrize
-    async def test_streaming_response_retrieve(self, client: AsyncOpenAI) -> None:
-        async with client.beta.threads.messages.with_streaming_response.retrieve(
+    async def test_streaming_response_retrieve(self, async_client: AsyncOpenAI) -> None:
+        async with async_client.beta.threads.messages.with_streaming_response.retrieve(
             "string",
             thread_id="string",
         ) as response:
@@ -331,30 +325,30 @@ class TestAsyncMessages:
         assert cast(Any, response.is_closed) is True
 
     @parametrize
-    async def test_path_params_retrieve(self, client: AsyncOpenAI) -> None:
+    async def test_path_params_retrieve(self, async_client: AsyncOpenAI) -> None:
         with pytest.raises(ValueError, match=r"Expected a non-empty value for `thread_id` but received ''"):
-            await client.beta.threads.messages.with_raw_response.retrieve(
+            await async_client.beta.threads.messages.with_raw_response.retrieve(
                 "string",
                 thread_id="",
             )
 
         with pytest.raises(ValueError, match=r"Expected a non-empty value for `message_id` but received ''"):
-            await client.beta.threads.messages.with_raw_response.retrieve(
+            await async_client.beta.threads.messages.with_raw_response.retrieve(
                 "",
                 thread_id="string",
             )
 
     @parametrize
-    async def test_method_update(self, client: AsyncOpenAI) -> None:
-        message = await client.beta.threads.messages.update(
+    async def test_method_update(self, async_client: AsyncOpenAI) -> None:
+        message = await async_client.beta.threads.messages.update(
             "string",
             thread_id="string",
         )
         assert_matches_type(ThreadMessage, message, path=["response"])
 
     @parametrize
-    async def test_method_update_with_all_params(self, client: AsyncOpenAI) -> None:
-        message = await client.beta.threads.messages.update(
+    async def test_method_update_with_all_params(self, async_client: AsyncOpenAI) -> None:
+        message = await async_client.beta.threads.messages.update(
             "string",
             thread_id="string",
             metadata={},
@@ -362,8 +356,8 @@ class TestAsyncMessages:
         assert_matches_type(ThreadMessage, message, path=["response"])
 
     @parametrize
-    async def test_raw_response_update(self, client: AsyncOpenAI) -> None:
-        response = await client.beta.threads.messages.with_raw_response.update(
+    async def test_raw_response_update(self, async_client: AsyncOpenAI) -> None:
+        response = await async_client.beta.threads.messages.with_raw_response.update(
             "string",
             thread_id="string",
         )
@@ -374,8 +368,8 @@ class TestAsyncMessages:
         assert_matches_type(ThreadMessage, message, path=["response"])
 
     @parametrize
-    async def test_streaming_response_update(self, client: AsyncOpenAI) -> None:
-        async with client.beta.threads.messages.with_streaming_response.update(
+    async def test_streaming_response_update(self, async_client: AsyncOpenAI) -> None:
+        async with async_client.beta.threads.messages.with_streaming_response.update(
             "string",
             thread_id="string",
         ) as response:
@@ -388,29 +382,29 @@ class TestAsyncMessages:
         assert cast(Any, response.is_closed) is True
 
     @parametrize
-    async def test_path_params_update(self, client: AsyncOpenAI) -> None:
+    async def test_path_params_update(self, async_client: AsyncOpenAI) -> None:
         with pytest.raises(ValueError, match=r"Expected a non-empty value for `thread_id` but received ''"):
-            await client.beta.threads.messages.with_raw_response.update(
+            await async_client.beta.threads.messages.with_raw_response.update(
                 "string",
                 thread_id="",
             )
 
         with pytest.raises(ValueError, match=r"Expected a non-empty value for `message_id` but received ''"):
-            await client.beta.threads.messages.with_raw_response.update(
+            await async_client.beta.threads.messages.with_raw_response.update(
                 "",
                 thread_id="string",
             )
 
     @parametrize
-    async def test_method_list(self, client: AsyncOpenAI) -> None:
-        message = await client.beta.threads.messages.list(
+    async def test_method_list(self, async_client: AsyncOpenAI) -> None:
+        message = await async_client.beta.threads.messages.list(
             "string",
         )
         assert_matches_type(AsyncCursorPage[ThreadMessage], message, path=["response"])
 
     @parametrize
-    async def test_method_list_with_all_params(self, client: AsyncOpenAI) -> None:
-        message = await client.beta.threads.messages.list(
+    async def test_method_list_with_all_params(self, async_client: AsyncOpenAI) -> None:
+        message = await async_client.beta.threads.messages.list(
             "string",
             after="string",
             before="string",
@@ -420,8 +414,8 @@ class TestAsyncMessages:
         assert_matches_type(AsyncCursorPage[ThreadMessage], message, path=["response"])
 
     @parametrize
-    async def test_raw_response_list(self, client: AsyncOpenAI) -> None:
-        response = await client.beta.threads.messages.with_raw_response.list(
+    async def test_raw_response_list(self, async_client: AsyncOpenAI) -> None:
+        response = await async_client.beta.threads.messages.with_raw_response.list(
             "string",
         )
 
@@ -431,8 +425,8 @@ class TestAsyncMessages:
         assert_matches_type(AsyncCursorPage[ThreadMessage], message, path=["response"])
 
     @parametrize
-    async def test_streaming_response_list(self, client: AsyncOpenAI) -> None:
-        async with client.beta.threads.messages.with_streaming_response.list(
+    async def test_streaming_response_list(self, async_client: AsyncOpenAI) -> None:
+        async with async_client.beta.threads.messages.with_streaming_response.list(
             "string",
         ) as response:
             assert not response.is_closed
@@ -444,8 +438,8 @@ class TestAsyncMessages:
         assert cast(Any, response.is_closed) is True
 
     @parametrize
-    async def test_path_params_list(self, client: AsyncOpenAI) -> None:
+    async def test_path_params_list(self, async_client: AsyncOpenAI) -> None:
         with pytest.raises(ValueError, match=r"Expected a non-empty value for `thread_id` but received ''"):
-            await client.beta.threads.messages.with_raw_response.list(
+            await async_client.beta.threads.messages.with_raw_response.list(
                 "",
             )
tests/api_resources/beta/threads/test_runs.py
@@ -9,20 +9,16 @@ import pytest
 
 from openai import OpenAI, AsyncOpenAI
 from tests.utils import assert_matches_type
-from openai._client import OpenAI, AsyncOpenAI
 from openai.pagination import SyncCursorPage, AsyncCursorPage
 from openai.types.beta.threads import (
     Run,
 )
 
 base_url = os.environ.get("TEST_API_BASE_URL", "http://127.0.0.1:4010")
-api_key = "My API Key"
 
 
 class TestRuns:
-    strict_client = OpenAI(base_url=base_url, api_key=api_key, _strict_response_validation=True)
-    loose_client = OpenAI(base_url=base_url, api_key=api_key, _strict_response_validation=False)
-    parametrize = pytest.mark.parametrize("client", [strict_client, loose_client], ids=["strict", "loose"])
+    parametrize = pytest.mark.parametrize("client", [False, True], indirect=True, ids=["loose", "strict"])
 
     @parametrize
     def test_method_create(self, client: OpenAI) -> None:
@@ -336,21 +332,19 @@ class TestRuns:
 
 
 class TestAsyncRuns:
-    strict_client = AsyncOpenAI(base_url=base_url, api_key=api_key, _strict_response_validation=True)
-    loose_client = AsyncOpenAI(base_url=base_url, api_key=api_key, _strict_response_validation=False)
-    parametrize = pytest.mark.parametrize("client", [strict_client, loose_client], ids=["strict", "loose"])
+    parametrize = pytest.mark.parametrize("async_client", [False, True], indirect=True, ids=["loose", "strict"])
 
     @parametrize
-    async def test_method_create(self, client: AsyncOpenAI) -> None:
-        run = await client.beta.threads.runs.create(
+    async def test_method_create(self, async_client: AsyncOpenAI) -> None:
+        run = await async_client.beta.threads.runs.create(
             "string",
             assistant_id="string",
         )
         assert_matches_type(Run, run, path=["response"])
 
     @parametrize
-    async def test_method_create_with_all_params(self, client: AsyncOpenAI) -> None:
-        run = await client.beta.threads.runs.create(
+    async def test_method_create_with_all_params(self, async_client: AsyncOpenAI) -> None:
+        run = await async_client.beta.threads.runs.create(
             "string",
             assistant_id="string",
             additional_instructions="string",
@@ -362,8 +356,8 @@ class TestAsyncRuns:
         assert_matches_type(Run, run, path=["response"])
 
     @parametrize
-    async def test_raw_response_create(self, client: AsyncOpenAI) -> None:
-        response = await client.beta.threads.runs.with_raw_response.create(
+    async def test_raw_response_create(self, async_client: AsyncOpenAI) -> None:
+        response = await async_client.beta.threads.runs.with_raw_response.create(
             "string",
             assistant_id="string",
         )
@@ -374,8 +368,8 @@ class TestAsyncRuns:
         assert_matches_type(Run, run, path=["response"])
 
     @parametrize
-    async def test_streaming_response_create(self, client: AsyncOpenAI) -> None:
-        async with client.beta.threads.runs.with_streaming_response.create(
+    async def test_streaming_response_create(self, async_client: AsyncOpenAI) -> None:
+        async with async_client.beta.threads.runs.with_streaming_response.create(
             "string",
             assistant_id="string",
         ) as response:
@@ -388,24 +382,24 @@ class TestAsyncRuns:
         assert cast(Any, response.is_closed) is True
 
     @parametrize
-    async def test_path_params_create(self, client: AsyncOpenAI) -> None:
+    async def test_path_params_create(self, async_client: AsyncOpenAI) -> None:
         with pytest.raises(ValueError, match=r"Expected a non-empty value for `thread_id` but received ''"):
-            await client.beta.threads.runs.with_raw_response.create(
+            await async_client.beta.threads.runs.with_raw_response.create(
                 "",
                 assistant_id="string",
             )
 
     @parametrize
-    async def test_method_retrieve(self, client: AsyncOpenAI) -> None:
-        run = await client.beta.threads.runs.retrieve(
+    async def test_method_retrieve(self, async_client: AsyncOpenAI) -> None:
+        run = await async_client.beta.threads.runs.retrieve(
             "string",
             thread_id="string",
         )
         assert_matches_type(Run, run, path=["response"])
 
     @parametrize
-    async def test_raw_response_retrieve(self, client: AsyncOpenAI) -> None:
-        response = await client.beta.threads.runs.with_raw_response.retrieve(
+    async def test_raw_response_retrieve(self, async_client: AsyncOpenAI) -> None:
+        response = await async_client.beta.threads.runs.with_raw_response.retrieve(
             "string",
             thread_id="string",
         )
@@ -416,8 +410,8 @@ class TestAsyncRuns:
         assert_matches_type(Run, run, path=["response"])
 
     @parametrize
-    async def test_streaming_response_retrieve(self, client: AsyncOpenAI) -> None:
-        async with client.beta.threads.runs.with_streaming_response.retrieve(
+    async def test_streaming_response_retrieve(self, async_client: AsyncOpenAI) -> None:
+        async with async_client.beta.threads.runs.with_streaming_response.retrieve(
             "string",
             thread_id="string",
         ) as response:
@@ -430,30 +424,30 @@ class TestAsyncRuns:
         assert cast(Any, response.is_closed) is True
 
     @parametrize
-    async def test_path_params_retrieve(self, client: AsyncOpenAI) -> None:
+    async def test_path_params_retrieve(self, async_client: AsyncOpenAI) -> None:
         with pytest.raises(ValueError, match=r"Expected a non-empty value for `thread_id` but received ''"):
-            await client.beta.threads.runs.with_raw_response.retrieve(
+            await async_client.beta.threads.runs.with_raw_response.retrieve(
                 "string",
                 thread_id="",
             )
 
         with pytest.raises(ValueError, match=r"Expected a non-empty value for `run_id` but received ''"):
-            await client.beta.threads.runs.with_raw_response.retrieve(
+            await async_client.beta.threads.runs.with_raw_response.retrieve(
                 "",
                 thread_id="string",
             )
 
     @parametrize
-    async def test_method_update(self, client: AsyncOpenAI) -> None:
-        run = await client.beta.threads.runs.update(
+    async def test_method_update(self, async_client: AsyncOpenAI) -> None:
+        run = await async_client.beta.threads.runs.update(
             "string",
             thread_id="string",
         )
         assert_matches_type(Run, run, path=["response"])
 
     @parametrize
-    async def test_method_update_with_all_params(self, client: AsyncOpenAI) -> None:
-        run = await client.beta.threads.runs.update(
+    async def test_method_update_with_all_params(self, async_client: AsyncOpenAI) -> None:
+        run = await async_client.beta.threads.runs.update(
             "string",
             thread_id="string",
             metadata={},
@@ -461,8 +455,8 @@ class TestAsyncRuns:
         assert_matches_type(Run, run, path=["response"])
 
     @parametrize
-    async def test_raw_response_update(self, client: AsyncOpenAI) -> None:
-        response = await client.beta.threads.runs.with_raw_response.update(
+    async def test_raw_response_update(self, async_client: AsyncOpenAI) -> None:
+        response = await async_client.beta.threads.runs.with_raw_response.update(
             "string",
             thread_id="string",
         )
@@ -473,8 +467,8 @@ class TestAsyncRuns:
         assert_matches_type(Run, run, path=["response"])
 
     @parametrize
-    async def test_streaming_response_update(self, client: AsyncOpenAI) -> None:
-        async with client.beta.threads.runs.with_streaming_response.update(
+    async def test_streaming_response_update(self, async_client: AsyncOpenAI) -> None:
+        async with async_client.beta.threads.runs.with_streaming_response.update(
             "string",
             thread_id="string",
         ) as response:
@@ -487,29 +481,29 @@ class TestAsyncRuns:
         assert cast(Any, response.is_closed) is True
 
     @parametrize
-    async def test_path_params_update(self, client: AsyncOpenAI) -> None:
+    async def test_path_params_update(self, async_client: AsyncOpenAI) -> None:
         with pytest.raises(ValueError, match=r"Expected a non-empty value for `thread_id` but received ''"):
-            await client.beta.threads.runs.with_raw_response.update(
+            await async_client.beta.threads.runs.with_raw_response.update(
                 "string",
                 thread_id="",
             )
 
         with pytest.raises(ValueError, match=r"Expected a non-empty value for `run_id` but received ''"):
-            await client.beta.threads.runs.with_raw_response.update(
+            await async_client.beta.threads.runs.with_raw_response.update(
                 "",
                 thread_id="string",
             )
 
     @parametrize
-    async def test_method_list(self, client: AsyncOpenAI) -> None:
-        run = await client.beta.threads.runs.list(
+    async def test_method_list(self, async_client: AsyncOpenAI) -> None:
+        run = await async_client.beta.threads.runs.list(
             "string",
         )
         assert_matches_type(AsyncCursorPage[Run], run, path=["response"])
 
     @parametrize
-    async def test_method_list_with_all_params(self, client: AsyncOpenAI) -> None:
-        run = await client.beta.threads.runs.list(
+    async def test_method_list_with_all_params(self, async_client: AsyncOpenAI) -> None:
+        run = await async_client.beta.threads.runs.list(
             "string",
             after="string",
             before="string",
@@ -519,8 +513,8 @@ class TestAsyncRuns:
         assert_matches_type(AsyncCursorPage[Run], run, path=["response"])
 
     @parametrize
-    async def test_raw_response_list(self, client: AsyncOpenAI) -> None:
-        response = await client.beta.threads.runs.with_raw_response.list(
+    async def test_raw_response_list(self, async_client: AsyncOpenAI) -> None:
+        response = await async_client.beta.threads.runs.with_raw_response.list(
             "string",
         )
 
@@ -530,8 +524,8 @@ class TestAsyncRuns:
         assert_matches_type(AsyncCursorPage[Run], run, path=["response"])
 
     @parametrize
-    async def test_streaming_response_list(self, client: AsyncOpenAI) -> None:
-        async with client.beta.threads.runs.with_streaming_response.list(
+    async def test_streaming_response_list(self, async_client: AsyncOpenAI) -> None:
+        async with async_client.beta.threads.runs.with_streaming_response.list(
             "string",
         ) as response:
             assert not response.is_closed
@@ -543,23 +537,23 @@ class TestAsyncRuns:
         assert cast(Any, response.is_closed) is True
 
     @parametrize
-    async def test_path_params_list(self, client: AsyncOpenAI) -> None:
+    async def test_path_params_list(self, async_client: AsyncOpenAI) -> None:
         with pytest.raises(ValueError, match=r"Expected a non-empty value for `thread_id` but received ''"):
-            await client.beta.threads.runs.with_raw_response.list(
+            await async_client.beta.threads.runs.with_raw_response.list(
                 "",
             )
 
     @parametrize
-    async def test_method_cancel(self, client: AsyncOpenAI) -> None:
-        run = await client.beta.threads.runs.cancel(
+    async def test_method_cancel(self, async_client: AsyncOpenAI) -> None:
+        run = await async_client.beta.threads.runs.cancel(
             "string",
             thread_id="string",
         )
         assert_matches_type(Run, run, path=["response"])
 
     @parametrize
-    async def test_raw_response_cancel(self, client: AsyncOpenAI) -> None:
-        response = await client.beta.threads.runs.with_raw_response.cancel(
+    async def test_raw_response_cancel(self, async_client: AsyncOpenAI) -> None:
+        response = await async_client.beta.threads.runs.with_raw_response.cancel(
             "string",
             thread_id="string",
         )
@@ -570,8 +564,8 @@ class TestAsyncRuns:
         assert_matches_type(Run, run, path=["response"])
 
     @parametrize
-    async def test_streaming_response_cancel(self, client: AsyncOpenAI) -> None:
-        async with client.beta.threads.runs.with_streaming_response.cancel(
+    async def test_streaming_response_cancel(self, async_client: AsyncOpenAI) -> None:
+        async with async_client.beta.threads.runs.with_streaming_response.cancel(
             "string",
             thread_id="string",
         ) as response:
@@ -584,22 +578,22 @@ class TestAsyncRuns:
         assert cast(Any, response.is_closed) is True
 
     @parametrize
-    async def test_path_params_cancel(self, client: AsyncOpenAI) -> None:
+    async def test_path_params_cancel(self, async_client: AsyncOpenAI) -> None:
         with pytest.raises(ValueError, match=r"Expected a non-empty value for `thread_id` but received ''"):
-            await client.beta.threads.runs.with_raw_response.cancel(
+            await async_client.beta.threads.runs.with_raw_response.cancel(
                 "string",
                 thread_id="",
             )
 
         with pytest.raises(ValueError, match=r"Expected a non-empty value for `run_id` but received ''"):
-            await client.beta.threads.runs.with_raw_response.cancel(
+            await async_client.beta.threads.runs.with_raw_response.cancel(
                 "",
                 thread_id="string",
             )
 
     @parametrize
-    async def test_method_submit_tool_outputs(self, client: AsyncOpenAI) -> None:
-        run = await client.beta.threads.runs.submit_tool_outputs(
+    async def test_method_submit_tool_outputs(self, async_client: AsyncOpenAI) -> None:
+        run = await async_client.beta.threads.runs.submit_tool_outputs(
             "string",
             thread_id="string",
             tool_outputs=[{}, {}, {}],
@@ -607,8 +601,8 @@ class TestAsyncRuns:
         assert_matches_type(Run, run, path=["response"])
 
     @parametrize
-    async def test_raw_response_submit_tool_outputs(self, client: AsyncOpenAI) -> None:
-        response = await client.beta.threads.runs.with_raw_response.submit_tool_outputs(
+    async def test_raw_response_submit_tool_outputs(self, async_client: AsyncOpenAI) -> None:
+        response = await async_client.beta.threads.runs.with_raw_response.submit_tool_outputs(
             "string",
             thread_id="string",
             tool_outputs=[{}, {}, {}],
@@ -620,8 +614,8 @@ class TestAsyncRuns:
         assert_matches_type(Run, run, path=["response"])
 
     @parametrize
-    async def test_streaming_response_submit_tool_outputs(self, client: AsyncOpenAI) -> None:
-        async with client.beta.threads.runs.with_streaming_response.submit_tool_outputs(
+    async def test_streaming_response_submit_tool_outputs(self, async_client: AsyncOpenAI) -> None:
+        async with async_client.beta.threads.runs.with_streaming_response.submit_tool_outputs(
             "string",
             thread_id="string",
             tool_outputs=[{}, {}, {}],
@@ -635,16 +629,16 @@ class TestAsyncRuns:
         assert cast(Any, response.is_closed) is True
 
     @parametrize
-    async def test_path_params_submit_tool_outputs(self, client: AsyncOpenAI) -> None:
+    async def test_path_params_submit_tool_outputs(self, async_client: AsyncOpenAI) -> None:
         with pytest.raises(ValueError, match=r"Expected a non-empty value for `thread_id` but received ''"):
-            await client.beta.threads.runs.with_raw_response.submit_tool_outputs(
+            await async_client.beta.threads.runs.with_raw_response.submit_tool_outputs(
                 "string",
                 thread_id="",
                 tool_outputs=[{}, {}, {}],
             )
 
         with pytest.raises(ValueError, match=r"Expected a non-empty value for `run_id` but received ''"):
-            await client.beta.threads.runs.with_raw_response.submit_tool_outputs(
+            await async_client.beta.threads.runs.with_raw_response.submit_tool_outputs(
                 "",
                 thread_id="string",
                 tool_outputs=[{}, {}, {}],
tests/api_resources/beta/test_assistants.py
@@ -9,7 +9,6 @@ import pytest
 
 from openai import OpenAI, AsyncOpenAI
 from tests.utils import assert_matches_type
-from openai._client import OpenAI, AsyncOpenAI
 from openai.pagination import SyncCursorPage, AsyncCursorPage
 from openai.types.beta import (
     Assistant,
@@ -17,13 +16,10 @@ from openai.types.beta import (
 )
 
 base_url = os.environ.get("TEST_API_BASE_URL", "http://127.0.0.1:4010")
-api_key = "My API Key"
 
 
 class TestAssistants:
-    strict_client = OpenAI(base_url=base_url, api_key=api_key, _strict_response_validation=True)
-    loose_client = OpenAI(base_url=base_url, api_key=api_key, _strict_response_validation=False)
-    parametrize = pytest.mark.parametrize("client", [strict_client, loose_client], ids=["strict", "loose"])
+    parametrize = pytest.mark.parametrize("client", [False, True], indirect=True, ids=["loose", "strict"])
 
     @parametrize
     def test_method_create(self, client: OpenAI) -> None:
@@ -234,20 +230,18 @@ class TestAssistants:
 
 
 class TestAsyncAssistants:
-    strict_client = AsyncOpenAI(base_url=base_url, api_key=api_key, _strict_response_validation=True)
-    loose_client = AsyncOpenAI(base_url=base_url, api_key=api_key, _strict_response_validation=False)
-    parametrize = pytest.mark.parametrize("client", [strict_client, loose_client], ids=["strict", "loose"])
+    parametrize = pytest.mark.parametrize("async_client", [False, True], indirect=True, ids=["loose", "strict"])
 
     @parametrize
-    async def test_method_create(self, client: AsyncOpenAI) -> None:
-        assistant = await client.beta.assistants.create(
+    async def test_method_create(self, async_client: AsyncOpenAI) -> None:
+        assistant = await async_client.beta.assistants.create(
             model="string",
         )
         assert_matches_type(Assistant, assistant, path=["response"])
 
     @parametrize
-    async def test_method_create_with_all_params(self, client: AsyncOpenAI) -> None:
-        assistant = await client.beta.assistants.create(
+    async def test_method_create_with_all_params(self, async_client: AsyncOpenAI) -> None:
+        assistant = await async_client.beta.assistants.create(
             model="string",
             description="string",
             file_ids=["string", "string", "string"],
@@ -259,8 +253,8 @@ class TestAsyncAssistants:
         assert_matches_type(Assistant, assistant, path=["response"])
 
     @parametrize
-    async def test_raw_response_create(self, client: AsyncOpenAI) -> None:
-        response = await client.beta.assistants.with_raw_response.create(
+    async def test_raw_response_create(self, async_client: AsyncOpenAI) -> None:
+        response = await async_client.beta.assistants.with_raw_response.create(
             model="string",
         )
 
@@ -270,8 +264,8 @@ class TestAsyncAssistants:
         assert_matches_type(Assistant, assistant, path=["response"])
 
     @parametrize
-    async def test_streaming_response_create(self, client: AsyncOpenAI) -> None:
-        async with client.beta.assistants.with_streaming_response.create(
+    async def test_streaming_response_create(self, async_client: AsyncOpenAI) -> None:
+        async with async_client.beta.assistants.with_streaming_response.create(
             model="string",
         ) as response:
             assert not response.is_closed
@@ -283,15 +277,15 @@ class TestAsyncAssistants:
         assert cast(Any, response.is_closed) is True
 
     @parametrize
-    async def test_method_retrieve(self, client: AsyncOpenAI) -> None:
-        assistant = await client.beta.assistants.retrieve(
+    async def test_method_retrieve(self, async_client: AsyncOpenAI) -> None:
+        assistant = await async_client.beta.assistants.retrieve(
             "string",
         )
         assert_matches_type(Assistant, assistant, path=["response"])
 
     @parametrize
-    async def test_raw_response_retrieve(self, client: AsyncOpenAI) -> None:
-        response = await client.beta.assistants.with_raw_response.retrieve(
+    async def test_raw_response_retrieve(self, async_client: AsyncOpenAI) -> None:
+        response = await async_client.beta.assistants.with_raw_response.retrieve(
             "string",
         )
 
@@ -301,8 +295,8 @@ class TestAsyncAssistants:
         assert_matches_type(Assistant, assistant, path=["response"])
 
     @parametrize
-    async def test_streaming_response_retrieve(self, client: AsyncOpenAI) -> None:
-        async with client.beta.assistants.with_streaming_response.retrieve(
+    async def test_streaming_response_retrieve(self, async_client: AsyncOpenAI) -> None:
+        async with async_client.beta.assistants.with_streaming_response.retrieve(
             "string",
         ) as response:
             assert not response.is_closed
@@ -314,22 +308,22 @@ class TestAsyncAssistants:
         assert cast(Any, response.is_closed) is True
 
     @parametrize
-    async def test_path_params_retrieve(self, client: AsyncOpenAI) -> None:
+    async def test_path_params_retrieve(self, async_client: AsyncOpenAI) -> None:
         with pytest.raises(ValueError, match=r"Expected a non-empty value for `assistant_id` but received ''"):
-            await client.beta.assistants.with_raw_response.retrieve(
+            await async_client.beta.assistants.with_raw_response.retrieve(
                 "",
             )
 
     @parametrize
-    async def test_method_update(self, client: AsyncOpenAI) -> None:
-        assistant = await client.beta.assistants.update(
+    async def test_method_update(self, async_client: AsyncOpenAI) -> None:
+        assistant = await async_client.beta.assistants.update(
             "string",
         )
         assert_matches_type(Assistant, assistant, path=["response"])
 
     @parametrize
-    async def test_method_update_with_all_params(self, client: AsyncOpenAI) -> None:
-        assistant = await client.beta.assistants.update(
+    async def test_method_update_with_all_params(self, async_client: AsyncOpenAI) -> None:
+        assistant = await async_client.beta.assistants.update(
             "string",
             description="string",
             file_ids=["string", "string", "string"],
@@ -342,8 +336,8 @@ class TestAsyncAssistants:
         assert_matches_type(Assistant, assistant, path=["response"])
 
     @parametrize
-    async def test_raw_response_update(self, client: AsyncOpenAI) -> None:
-        response = await client.beta.assistants.with_raw_response.update(
+    async def test_raw_response_update(self, async_client: AsyncOpenAI) -> None:
+        response = await async_client.beta.assistants.with_raw_response.update(
             "string",
         )
 
@@ -353,8 +347,8 @@ class TestAsyncAssistants:
         assert_matches_type(Assistant, assistant, path=["response"])
 
     @parametrize
-    async def test_streaming_response_update(self, client: AsyncOpenAI) -> None:
-        async with client.beta.assistants.with_streaming_response.update(
+    async def test_streaming_response_update(self, async_client: AsyncOpenAI) -> None:
+        async with async_client.beta.assistants.with_streaming_response.update(
             "string",
         ) as response:
             assert not response.is_closed
@@ -366,20 +360,20 @@ class TestAsyncAssistants:
         assert cast(Any, response.is_closed) is True
 
     @parametrize
-    async def test_path_params_update(self, client: AsyncOpenAI) -> None:
+    async def test_path_params_update(self, async_client: AsyncOpenAI) -> None:
         with pytest.raises(ValueError, match=r"Expected a non-empty value for `assistant_id` but received ''"):
-            await client.beta.assistants.with_raw_response.update(
+            await async_client.beta.assistants.with_raw_response.update(
                 "",
             )
 
     @parametrize
-    async def test_method_list(self, client: AsyncOpenAI) -> None:
-        assistant = await client.beta.assistants.list()
+    async def test_method_list(self, async_client: AsyncOpenAI) -> None:
+        assistant = await async_client.beta.assistants.list()
         assert_matches_type(AsyncCursorPage[Assistant], assistant, path=["response"])
 
     @parametrize
-    async def test_method_list_with_all_params(self, client: AsyncOpenAI) -> None:
-        assistant = await client.beta.assistants.list(
+    async def test_method_list_with_all_params(self, async_client: AsyncOpenAI) -> None:
+        assistant = await async_client.beta.assistants.list(
             after="string",
             before="string",
             limit=0,
@@ -388,8 +382,8 @@ class TestAsyncAssistants:
         assert_matches_type(AsyncCursorPage[Assistant], assistant, path=["response"])
 
     @parametrize
-    async def test_raw_response_list(self, client: AsyncOpenAI) -> None:
-        response = await client.beta.assistants.with_raw_response.list()
+    async def test_raw_response_list(self, async_client: AsyncOpenAI) -> None:
+        response = await async_client.beta.assistants.with_raw_response.list()
 
         assert response.is_closed is True
         assert response.http_request.headers.get("X-Stainless-Lang") == "python"
@@ -397,8 +391,8 @@ class TestAsyncAssistants:
         assert_matches_type(AsyncCursorPage[Assistant], assistant, path=["response"])
 
     @parametrize
-    async def test_streaming_response_list(self, client: AsyncOpenAI) -> None:
-        async with client.beta.assistants.with_streaming_response.list() as response:
+    async def test_streaming_response_list(self, async_client: AsyncOpenAI) -> None:
+        async with async_client.beta.assistants.with_streaming_response.list() as response:
             assert not response.is_closed
             assert response.http_request.headers.get("X-Stainless-Lang") == "python"
 
@@ -408,15 +402,15 @@ class TestAsyncAssistants:
         assert cast(Any, response.is_closed) is True
 
     @parametrize
-    async def test_method_delete(self, client: AsyncOpenAI) -> None:
-        assistant = await client.beta.assistants.delete(
+    async def test_method_delete(self, async_client: AsyncOpenAI) -> None:
+        assistant = await async_client.beta.assistants.delete(
             "string",
         )
         assert_matches_type(AssistantDeleted, assistant, path=["response"])
 
     @parametrize
-    async def test_raw_response_delete(self, client: AsyncOpenAI) -> None:
-        response = await client.beta.assistants.with_raw_response.delete(
+    async def test_raw_response_delete(self, async_client: AsyncOpenAI) -> None:
+        response = await async_client.beta.assistants.with_raw_response.delete(
             "string",
         )
 
@@ -426,8 +420,8 @@ class TestAsyncAssistants:
         assert_matches_type(AssistantDeleted, assistant, path=["response"])
 
     @parametrize
-    async def test_streaming_response_delete(self, client: AsyncOpenAI) -> None:
-        async with client.beta.assistants.with_streaming_response.delete(
+    async def test_streaming_response_delete(self, async_client: AsyncOpenAI) -> None:
+        async with async_client.beta.assistants.with_streaming_response.delete(
             "string",
         ) as response:
             assert not response.is_closed
@@ -439,8 +433,8 @@ class TestAsyncAssistants:
         assert cast(Any, response.is_closed) is True
 
     @parametrize
-    async def test_path_params_delete(self, client: AsyncOpenAI) -> None:
+    async def test_path_params_delete(self, async_client: AsyncOpenAI) -> None:
         with pytest.raises(ValueError, match=r"Expected a non-empty value for `assistant_id` but received ''"):
-            await client.beta.assistants.with_raw_response.delete(
+            await async_client.beta.assistants.with_raw_response.delete(
                 "",
             )
tests/api_resources/beta/test_threads.py
@@ -9,7 +9,6 @@ import pytest
 
 from openai import OpenAI, AsyncOpenAI
 from tests.utils import assert_matches_type
-from openai._client import OpenAI, AsyncOpenAI
 from openai.types.beta import (
     Thread,
     ThreadDeleted,
@@ -17,13 +16,10 @@ from openai.types.beta import (
 from openai.types.beta.threads import Run
 
 base_url = os.environ.get("TEST_API_BASE_URL", "http://127.0.0.1:4010")
-api_key = "My API Key"
 
 
 class TestThreads:
-    strict_client = OpenAI(base_url=base_url, api_key=api_key, _strict_response_validation=True)
-    loose_client = OpenAI(base_url=base_url, api_key=api_key, _strict_response_validation=False)
-    parametrize = pytest.mark.parametrize("client", [strict_client, loose_client], ids=["strict", "loose"])
+    parametrize = pytest.mark.parametrize("client", [False, True], indirect=True, ids=["loose", "strict"])
 
     @parametrize
     def test_method_create(self, client: OpenAI) -> None:
@@ -266,18 +262,16 @@ class TestThreads:
 
 
 class TestAsyncThreads:
-    strict_client = AsyncOpenAI(base_url=base_url, api_key=api_key, _strict_response_validation=True)
-    loose_client = AsyncOpenAI(base_url=base_url, api_key=api_key, _strict_response_validation=False)
-    parametrize = pytest.mark.parametrize("client", [strict_client, loose_client], ids=["strict", "loose"])
+    parametrize = pytest.mark.parametrize("async_client", [False, True], indirect=True, ids=["loose", "strict"])
 
     @parametrize
-    async def test_method_create(self, client: AsyncOpenAI) -> None:
-        thread = await client.beta.threads.create()
+    async def test_method_create(self, async_client: AsyncOpenAI) -> None:
+        thread = await async_client.beta.threads.create()
         assert_matches_type(Thread, thread, path=["response"])
 
     @parametrize
-    async def test_method_create_with_all_params(self, client: AsyncOpenAI) -> None:
-        thread = await client.beta.threads.create(
+    async def test_method_create_with_all_params(self, async_client: AsyncOpenAI) -> None:
+        thread = await async_client.beta.threads.create(
             messages=[
                 {
                     "role": "user",
@@ -303,8 +297,8 @@ class TestAsyncThreads:
         assert_matches_type(Thread, thread, path=["response"])
 
     @parametrize
-    async def test_raw_response_create(self, client: AsyncOpenAI) -> None:
-        response = await client.beta.threads.with_raw_response.create()
+    async def test_raw_response_create(self, async_client: AsyncOpenAI) -> None:
+        response = await async_client.beta.threads.with_raw_response.create()
 
         assert response.is_closed is True
         assert response.http_request.headers.get("X-Stainless-Lang") == "python"
@@ -312,8 +306,8 @@ class TestAsyncThreads:
         assert_matches_type(Thread, thread, path=["response"])
 
     @parametrize
-    async def test_streaming_response_create(self, client: AsyncOpenAI) -> None:
-        async with client.beta.threads.with_streaming_response.create() as response:
+    async def test_streaming_response_create(self, async_client: AsyncOpenAI) -> None:
+        async with async_client.beta.threads.with_streaming_response.create() as response:
             assert not response.is_closed
             assert response.http_request.headers.get("X-Stainless-Lang") == "python"
 
@@ -323,15 +317,15 @@ class TestAsyncThreads:
         assert cast(Any, response.is_closed) is True
 
     @parametrize
-    async def test_method_retrieve(self, client: AsyncOpenAI) -> None:
-        thread = await client.beta.threads.retrieve(
+    async def test_method_retrieve(self, async_client: AsyncOpenAI) -> None:
+        thread = await async_client.beta.threads.retrieve(
             "string",
         )
         assert_matches_type(Thread, thread, path=["response"])
 
     @parametrize
-    async def test_raw_response_retrieve(self, client: AsyncOpenAI) -> None:
-        response = await client.beta.threads.with_raw_response.retrieve(
+    async def test_raw_response_retrieve(self, async_client: AsyncOpenAI) -> None:
+        response = await async_client.beta.threads.with_raw_response.retrieve(
             "string",
         )
 
@@ -341,8 +335,8 @@ class TestAsyncThreads:
         assert_matches_type(Thread, thread, path=["response"])
 
     @parametrize
-    async def test_streaming_response_retrieve(self, client: AsyncOpenAI) -> None:
-        async with client.beta.threads.with_streaming_response.retrieve(
+    async def test_streaming_response_retrieve(self, async_client: AsyncOpenAI) -> None:
+        async with async_client.beta.threads.with_streaming_response.retrieve(
             "string",
         ) as response:
             assert not response.is_closed
@@ -354,30 +348,30 @@ class TestAsyncThreads:
         assert cast(Any, response.is_closed) is True
 
     @parametrize
-    async def test_path_params_retrieve(self, client: AsyncOpenAI) -> None:
+    async def test_path_params_retrieve(self, async_client: AsyncOpenAI) -> None:
         with pytest.raises(ValueError, match=r"Expected a non-empty value for `thread_id` but received ''"):
-            await client.beta.threads.with_raw_response.retrieve(
+            await async_client.beta.threads.with_raw_response.retrieve(
                 "",
             )
 
     @parametrize
-    async def test_method_update(self, client: AsyncOpenAI) -> None:
-        thread = await client.beta.threads.update(
+    async def test_method_update(self, async_client: AsyncOpenAI) -> None:
+        thread = await async_client.beta.threads.update(
             "string",
         )
         assert_matches_type(Thread, thread, path=["response"])
 
     @parametrize
-    async def test_method_update_with_all_params(self, client: AsyncOpenAI) -> None:
-        thread = await client.beta.threads.update(
+    async def test_method_update_with_all_params(self, async_client: AsyncOpenAI) -> None:
+        thread = await async_client.beta.threads.update(
             "string",
             metadata={},
         )
         assert_matches_type(Thread, thread, path=["response"])
 
     @parametrize
-    async def test_raw_response_update(self, client: AsyncOpenAI) -> None:
-        response = await client.beta.threads.with_raw_response.update(
+    async def test_raw_response_update(self, async_client: AsyncOpenAI) -> None:
+        response = await async_client.beta.threads.with_raw_response.update(
             "string",
         )
 
@@ -387,8 +381,8 @@ class TestAsyncThreads:
         assert_matches_type(Thread, thread, path=["response"])
 
     @parametrize
-    async def test_streaming_response_update(self, client: AsyncOpenAI) -> None:
-        async with client.beta.threads.with_streaming_response.update(
+    async def test_streaming_response_update(self, async_client: AsyncOpenAI) -> None:
+        async with async_client.beta.threads.with_streaming_response.update(
             "string",
         ) as response:
             assert not response.is_closed
@@ -400,22 +394,22 @@ class TestAsyncThreads:
         assert cast(Any, response.is_closed) is True
 
     @parametrize
-    async def test_path_params_update(self, client: AsyncOpenAI) -> None:
+    async def test_path_params_update(self, async_client: AsyncOpenAI) -> None:
         with pytest.raises(ValueError, match=r"Expected a non-empty value for `thread_id` but received ''"):
-            await client.beta.threads.with_raw_response.update(
+            await async_client.beta.threads.with_raw_response.update(
                 "",
             )
 
     @parametrize
-    async def test_method_delete(self, client: AsyncOpenAI) -> None:
-        thread = await client.beta.threads.delete(
+    async def test_method_delete(self, async_client: AsyncOpenAI) -> None:
+        thread = await async_client.beta.threads.delete(
             "string",
         )
         assert_matches_type(ThreadDeleted, thread, path=["response"])
 
     @parametrize
-    async def test_raw_response_delete(self, client: AsyncOpenAI) -> None:
-        response = await client.beta.threads.with_raw_response.delete(
+    async def test_raw_response_delete(self, async_client: AsyncOpenAI) -> None:
+        response = await async_client.beta.threads.with_raw_response.delete(
             "string",
         )
 
@@ -425,8 +419,8 @@ class TestAsyncThreads:
         assert_matches_type(ThreadDeleted, thread, path=["response"])
 
     @parametrize
-    async def test_streaming_response_delete(self, client: AsyncOpenAI) -> None:
-        async with client.beta.threads.with_streaming_response.delete(
+    async def test_streaming_response_delete(self, async_client: AsyncOpenAI) -> None:
+        async with async_client.beta.threads.with_streaming_response.delete(
             "string",
         ) as response:
             assert not response.is_closed
@@ -438,22 +432,22 @@ class TestAsyncThreads:
         assert cast(Any, response.is_closed) is True
 
     @parametrize
-    async def test_path_params_delete(self, client: AsyncOpenAI) -> None:
+    async def test_path_params_delete(self, async_client: AsyncOpenAI) -> None:
         with pytest.raises(ValueError, match=r"Expected a non-empty value for `thread_id` but received ''"):
-            await client.beta.threads.with_raw_response.delete(
+            await async_client.beta.threads.with_raw_response.delete(
                 "",
             )
 
     @parametrize
-    async def test_method_create_and_run(self, client: AsyncOpenAI) -> None:
-        thread = await client.beta.threads.create_and_run(
+    async def test_method_create_and_run(self, async_client: AsyncOpenAI) -> None:
+        thread = await async_client.beta.threads.create_and_run(
             assistant_id="string",
         )
         assert_matches_type(Run, thread, path=["response"])
 
     @parametrize
-    async def test_method_create_and_run_with_all_params(self, client: AsyncOpenAI) -> None:
-        thread = await client.beta.threads.create_and_run(
+    async def test_method_create_and_run_with_all_params(self, async_client: AsyncOpenAI) -> None:
+        thread = await async_client.beta.threads.create_and_run(
             assistant_id="string",
             instructions="string",
             metadata={},
@@ -486,8 +480,8 @@ class TestAsyncThreads:
         assert_matches_type(Run, thread, path=["response"])
 
     @parametrize
-    async def test_raw_response_create_and_run(self, client: AsyncOpenAI) -> None:
-        response = await client.beta.threads.with_raw_response.create_and_run(
+    async def test_raw_response_create_and_run(self, async_client: AsyncOpenAI) -> None:
+        response = await async_client.beta.threads.with_raw_response.create_and_run(
             assistant_id="string",
         )
 
@@ -497,8 +491,8 @@ class TestAsyncThreads:
         assert_matches_type(Run, thread, path=["response"])
 
     @parametrize
-    async def test_streaming_response_create_and_run(self, client: AsyncOpenAI) -> None:
-        async with client.beta.threads.with_streaming_response.create_and_run(
+    async def test_streaming_response_create_and_run(self, async_client: AsyncOpenAI) -> None:
+        async with async_client.beta.threads.with_streaming_response.create_and_run(
             assistant_id="string",
         ) as response:
             assert not response.is_closed
tests/api_resources/chat/test_completions.py
@@ -9,17 +9,13 @@ import pytest
 
 from openai import OpenAI, AsyncOpenAI
 from tests.utils import assert_matches_type
-from openai._client import OpenAI, AsyncOpenAI
 from openai.types.chat import ChatCompletion
 
 base_url = os.environ.get("TEST_API_BASE_URL", "http://127.0.0.1:4010")
-api_key = "My API Key"
 
 
 class TestCompletions:
-    strict_client = OpenAI(base_url=base_url, api_key=api_key, _strict_response_validation=True)
-    loose_client = OpenAI(base_url=base_url, api_key=api_key, _strict_response_validation=False)
-    parametrize = pytest.mark.parametrize("client", [strict_client, loose_client], ids=["strict", "loose"])
+    parametrize = pytest.mark.parametrize("client", [False, True], indirect=True, ids=["loose", "strict"])
 
     @parametrize
     def test_method_create_overload_1(self, client: OpenAI) -> None:
@@ -249,13 +245,11 @@ class TestCompletions:
 
 
 class TestAsyncCompletions:
-    strict_client = AsyncOpenAI(base_url=base_url, api_key=api_key, _strict_response_validation=True)
-    loose_client = AsyncOpenAI(base_url=base_url, api_key=api_key, _strict_response_validation=False)
-    parametrize = pytest.mark.parametrize("client", [strict_client, loose_client], ids=["strict", "loose"])
+    parametrize = pytest.mark.parametrize("async_client", [False, True], indirect=True, ids=["loose", "strict"])
 
     @parametrize
-    async def test_method_create_overload_1(self, client: AsyncOpenAI) -> None:
-        completion = await client.chat.completions.create(
+    async def test_method_create_overload_1(self, async_client: AsyncOpenAI) -> None:
+        completion = await async_client.chat.completions.create(
             messages=[
                 {
                     "content": "string",
@@ -267,8 +261,8 @@ class TestAsyncCompletions:
         assert_matches_type(ChatCompletion, completion, path=["response"])
 
     @parametrize
-    async def test_method_create_with_all_params_overload_1(self, client: AsyncOpenAI) -> None:
-        completion = await client.chat.completions.create(
+    async def test_method_create_with_all_params_overload_1(self, async_client: AsyncOpenAI) -> None:
+        completion = await async_client.chat.completions.create(
             messages=[
                 {
                     "content": "string",
@@ -330,8 +324,8 @@ class TestAsyncCompletions:
         assert_matches_type(ChatCompletion, completion, path=["response"])
 
     @parametrize
-    async def test_raw_response_create_overload_1(self, client: AsyncOpenAI) -> None:
-        response = await client.chat.completions.with_raw_response.create(
+    async def test_raw_response_create_overload_1(self, async_client: AsyncOpenAI) -> None:
+        response = await async_client.chat.completions.with_raw_response.create(
             messages=[
                 {
                     "content": "string",
@@ -347,8 +341,8 @@ class TestAsyncCompletions:
         assert_matches_type(ChatCompletion, completion, path=["response"])
 
     @parametrize
-    async def test_streaming_response_create_overload_1(self, client: AsyncOpenAI) -> None:
-        async with client.chat.completions.with_streaming_response.create(
+    async def test_streaming_response_create_overload_1(self, async_client: AsyncOpenAI) -> None:
+        async with async_client.chat.completions.with_streaming_response.create(
             messages=[
                 {
                     "content": "string",
@@ -366,8 +360,8 @@ class TestAsyncCompletions:
         assert cast(Any, response.is_closed) is True
 
     @parametrize
-    async def test_method_create_overload_2(self, client: AsyncOpenAI) -> None:
-        completion_stream = await client.chat.completions.create(
+    async def test_method_create_overload_2(self, async_client: AsyncOpenAI) -> None:
+        completion_stream = await async_client.chat.completions.create(
             messages=[
                 {
                     "content": "string",
@@ -380,8 +374,8 @@ class TestAsyncCompletions:
         await completion_stream.response.aclose()
 
     @parametrize
-    async def test_method_create_with_all_params_overload_2(self, client: AsyncOpenAI) -> None:
-        completion_stream = await client.chat.completions.create(
+    async def test_method_create_with_all_params_overload_2(self, async_client: AsyncOpenAI) -> None:
+        completion_stream = await async_client.chat.completions.create(
             messages=[
                 {
                     "content": "string",
@@ -443,8 +437,8 @@ class TestAsyncCompletions:
         await completion_stream.response.aclose()
 
     @parametrize
-    async def test_raw_response_create_overload_2(self, client: AsyncOpenAI) -> None:
-        response = await client.chat.completions.with_raw_response.create(
+    async def test_raw_response_create_overload_2(self, async_client: AsyncOpenAI) -> None:
+        response = await async_client.chat.completions.with_raw_response.create(
             messages=[
                 {
                     "content": "string",
@@ -460,8 +454,8 @@ class TestAsyncCompletions:
         await stream.close()
 
     @parametrize
-    async def test_streaming_response_create_overload_2(self, client: AsyncOpenAI) -> None:
-        async with client.chat.completions.with_streaming_response.create(
+    async def test_streaming_response_create_overload_2(self, async_client: AsyncOpenAI) -> None:
+        async with async_client.chat.completions.with_streaming_response.create(
             messages=[
                 {
                     "content": "string",
tests/api_resources/fine_tuning/test_jobs.py
@@ -9,7 +9,6 @@ import pytest
 
 from openai import OpenAI, AsyncOpenAI
 from tests.utils import assert_matches_type
-from openai._client import OpenAI, AsyncOpenAI
 from openai.pagination import SyncCursorPage, AsyncCursorPage
 from openai.types.fine_tuning import (
     FineTuningJob,
@@ -17,13 +16,10 @@ from openai.types.fine_tuning import (
 )
 
 base_url = os.environ.get("TEST_API_BASE_URL", "http://127.0.0.1:4010")
-api_key = "My API Key"
 
 
 class TestJobs:
-    strict_client = OpenAI(base_url=base_url, api_key=api_key, _strict_response_validation=True)
-    loose_client = OpenAI(base_url=base_url, api_key=api_key, _strict_response_validation=False)
-    parametrize = pytest.mark.parametrize("client", [strict_client, loose_client], ids=["strict", "loose"])
+    parametrize = pytest.mark.parametrize("client", [False, True], indirect=True, ids=["loose", "strict"])
 
     @parametrize
     def test_method_create(self, client: OpenAI) -> None:
@@ -232,21 +228,19 @@ class TestJobs:
 
 
 class TestAsyncJobs:
-    strict_client = AsyncOpenAI(base_url=base_url, api_key=api_key, _strict_response_validation=True)
-    loose_client = AsyncOpenAI(base_url=base_url, api_key=api_key, _strict_response_validation=False)
-    parametrize = pytest.mark.parametrize("client", [strict_client, loose_client], ids=["strict", "loose"])
+    parametrize = pytest.mark.parametrize("async_client", [False, True], indirect=True, ids=["loose", "strict"])
 
     @parametrize
-    async def test_method_create(self, client: AsyncOpenAI) -> None:
-        job = await client.fine_tuning.jobs.create(
+    async def test_method_create(self, async_client: AsyncOpenAI) -> None:
+        job = await async_client.fine_tuning.jobs.create(
             model="gpt-3.5-turbo",
             training_file="file-abc123",
         )
         assert_matches_type(FineTuningJob, job, path=["response"])
 
     @parametrize
-    async def test_method_create_with_all_params(self, client: AsyncOpenAI) -> None:
-        job = await client.fine_tuning.jobs.create(
+    async def test_method_create_with_all_params(self, async_client: AsyncOpenAI) -> None:
+        job = await async_client.fine_tuning.jobs.create(
             model="gpt-3.5-turbo",
             training_file="file-abc123",
             hyperparameters={
@@ -260,8 +254,8 @@ class TestAsyncJobs:
         assert_matches_type(FineTuningJob, job, path=["response"])
 
     @parametrize
-    async def test_raw_response_create(self, client: AsyncOpenAI) -> None:
-        response = await client.fine_tuning.jobs.with_raw_response.create(
+    async def test_raw_response_create(self, async_client: AsyncOpenAI) -> None:
+        response = await async_client.fine_tuning.jobs.with_raw_response.create(
             model="gpt-3.5-turbo",
             training_file="file-abc123",
         )
@@ -272,8 +266,8 @@ class TestAsyncJobs:
         assert_matches_type(FineTuningJob, job, path=["response"])
 
     @parametrize
-    async def test_streaming_response_create(self, client: AsyncOpenAI) -> None:
-        async with client.fine_tuning.jobs.with_streaming_response.create(
+    async def test_streaming_response_create(self, async_client: AsyncOpenAI) -> None:
+        async with async_client.fine_tuning.jobs.with_streaming_response.create(
             model="gpt-3.5-turbo",
             training_file="file-abc123",
         ) as response:
@@ -286,15 +280,15 @@ class TestAsyncJobs:
         assert cast(Any, response.is_closed) is True
 
     @parametrize
-    async def test_method_retrieve(self, client: AsyncOpenAI) -> None:
-        job = await client.fine_tuning.jobs.retrieve(
+    async def test_method_retrieve(self, async_client: AsyncOpenAI) -> None:
+        job = await async_client.fine_tuning.jobs.retrieve(
             "ft-AF1WoRqd3aJAHsqc9NY7iL8F",
         )
         assert_matches_type(FineTuningJob, job, path=["response"])
 
     @parametrize
-    async def test_raw_response_retrieve(self, client: AsyncOpenAI) -> None:
-        response = await client.fine_tuning.jobs.with_raw_response.retrieve(
+    async def test_raw_response_retrieve(self, async_client: AsyncOpenAI) -> None:
+        response = await async_client.fine_tuning.jobs.with_raw_response.retrieve(
             "ft-AF1WoRqd3aJAHsqc9NY7iL8F",
         )
 
@@ -304,8 +298,8 @@ class TestAsyncJobs:
         assert_matches_type(FineTuningJob, job, path=["response"])
 
     @parametrize
-    async def test_streaming_response_retrieve(self, client: AsyncOpenAI) -> None:
-        async with client.fine_tuning.jobs.with_streaming_response.retrieve(
+    async def test_streaming_response_retrieve(self, async_client: AsyncOpenAI) -> None:
+        async with async_client.fine_tuning.jobs.with_streaming_response.retrieve(
             "ft-AF1WoRqd3aJAHsqc9NY7iL8F",
         ) as response:
             assert not response.is_closed
@@ -317,28 +311,28 @@ class TestAsyncJobs:
         assert cast(Any, response.is_closed) is True
 
     @parametrize
-    async def test_path_params_retrieve(self, client: AsyncOpenAI) -> None:
+    async def test_path_params_retrieve(self, async_client: AsyncOpenAI) -> None:
         with pytest.raises(ValueError, match=r"Expected a non-empty value for `fine_tuning_job_id` but received ''"):
-            await client.fine_tuning.jobs.with_raw_response.retrieve(
+            await async_client.fine_tuning.jobs.with_raw_response.retrieve(
                 "",
             )
 
     @parametrize
-    async def test_method_list(self, client: AsyncOpenAI) -> None:
-        job = await client.fine_tuning.jobs.list()
+    async def test_method_list(self, async_client: AsyncOpenAI) -> None:
+        job = await async_client.fine_tuning.jobs.list()
         assert_matches_type(AsyncCursorPage[FineTuningJob], job, path=["response"])
 
     @parametrize
-    async def test_method_list_with_all_params(self, client: AsyncOpenAI) -> None:
-        job = await client.fine_tuning.jobs.list(
+    async def test_method_list_with_all_params(self, async_client: AsyncOpenAI) -> None:
+        job = await async_client.fine_tuning.jobs.list(
             after="string",
             limit=0,
         )
         assert_matches_type(AsyncCursorPage[FineTuningJob], job, path=["response"])
 
     @parametrize
-    async def test_raw_response_list(self, client: AsyncOpenAI) -> None:
-        response = await client.fine_tuning.jobs.with_raw_response.list()
+    async def test_raw_response_list(self, async_client: AsyncOpenAI) -> None:
+        response = await async_client.fine_tuning.jobs.with_raw_response.list()
 
         assert response.is_closed is True
         assert response.http_request.headers.get("X-Stainless-Lang") == "python"
@@ -346,8 +340,8 @@ class TestAsyncJobs:
         assert_matches_type(AsyncCursorPage[FineTuningJob], job, path=["response"])
 
     @parametrize
-    async def test_streaming_response_list(self, client: AsyncOpenAI) -> None:
-        async with client.fine_tuning.jobs.with_streaming_response.list() as response:
+    async def test_streaming_response_list(self, async_client: AsyncOpenAI) -> None:
+        async with async_client.fine_tuning.jobs.with_streaming_response.list() as response:
             assert not response.is_closed
             assert response.http_request.headers.get("X-Stainless-Lang") == "python"
 
@@ -357,15 +351,15 @@ class TestAsyncJobs:
         assert cast(Any, response.is_closed) is True
 
     @parametrize
-    async def test_method_cancel(self, client: AsyncOpenAI) -> None:
-        job = await client.fine_tuning.jobs.cancel(
+    async def test_method_cancel(self, async_client: AsyncOpenAI) -> None:
+        job = await async_client.fine_tuning.jobs.cancel(
             "ft-AF1WoRqd3aJAHsqc9NY7iL8F",
         )
         assert_matches_type(FineTuningJob, job, path=["response"])
 
     @parametrize
-    async def test_raw_response_cancel(self, client: AsyncOpenAI) -> None:
-        response = await client.fine_tuning.jobs.with_raw_response.cancel(
+    async def test_raw_response_cancel(self, async_client: AsyncOpenAI) -> None:
+        response = await async_client.fine_tuning.jobs.with_raw_response.cancel(
             "ft-AF1WoRqd3aJAHsqc9NY7iL8F",
         )
 
@@ -375,8 +369,8 @@ class TestAsyncJobs:
         assert_matches_type(FineTuningJob, job, path=["response"])
 
     @parametrize
-    async def test_streaming_response_cancel(self, client: AsyncOpenAI) -> None:
-        async with client.fine_tuning.jobs.with_streaming_response.cancel(
+    async def test_streaming_response_cancel(self, async_client: AsyncOpenAI) -> None:
+        async with async_client.fine_tuning.jobs.with_streaming_response.cancel(
             "ft-AF1WoRqd3aJAHsqc9NY7iL8F",
         ) as response:
             assert not response.is_closed
@@ -388,22 +382,22 @@ class TestAsyncJobs:
         assert cast(Any, response.is_closed) is True
 
     @parametrize
-    async def test_path_params_cancel(self, client: AsyncOpenAI) -> None:
+    async def test_path_params_cancel(self, async_client: AsyncOpenAI) -> None:
         with pytest.raises(ValueError, match=r"Expected a non-empty value for `fine_tuning_job_id` but received ''"):
-            await client.fine_tuning.jobs.with_raw_response.cancel(
+            await async_client.fine_tuning.jobs.with_raw_response.cancel(
                 "",
             )
 
     @parametrize
-    async def test_method_list_events(self, client: AsyncOpenAI) -> None:
-        job = await client.fine_tuning.jobs.list_events(
+    async def test_method_list_events(self, async_client: AsyncOpenAI) -> None:
+        job = await async_client.fine_tuning.jobs.list_events(
             "ft-AF1WoRqd3aJAHsqc9NY7iL8F",
         )
         assert_matches_type(AsyncCursorPage[FineTuningJobEvent], job, path=["response"])
 
     @parametrize
-    async def test_method_list_events_with_all_params(self, client: AsyncOpenAI) -> None:
-        job = await client.fine_tuning.jobs.list_events(
+    async def test_method_list_events_with_all_params(self, async_client: AsyncOpenAI) -> None:
+        job = await async_client.fine_tuning.jobs.list_events(
             "ft-AF1WoRqd3aJAHsqc9NY7iL8F",
             after="string",
             limit=0,
@@ -411,8 +405,8 @@ class TestAsyncJobs:
         assert_matches_type(AsyncCursorPage[FineTuningJobEvent], job, path=["response"])
 
     @parametrize
-    async def test_raw_response_list_events(self, client: AsyncOpenAI) -> None:
-        response = await client.fine_tuning.jobs.with_raw_response.list_events(
+    async def test_raw_response_list_events(self, async_client: AsyncOpenAI) -> None:
+        response = await async_client.fine_tuning.jobs.with_raw_response.list_events(
             "ft-AF1WoRqd3aJAHsqc9NY7iL8F",
         )
 
@@ -422,8 +416,8 @@ class TestAsyncJobs:
         assert_matches_type(AsyncCursorPage[FineTuningJobEvent], job, path=["response"])
 
     @parametrize
-    async def test_streaming_response_list_events(self, client: AsyncOpenAI) -> None:
-        async with client.fine_tuning.jobs.with_streaming_response.list_events(
+    async def test_streaming_response_list_events(self, async_client: AsyncOpenAI) -> None:
+        async with async_client.fine_tuning.jobs.with_streaming_response.list_events(
             "ft-AF1WoRqd3aJAHsqc9NY7iL8F",
         ) as response:
             assert not response.is_closed
@@ -435,8 +429,8 @@ class TestAsyncJobs:
         assert cast(Any, response.is_closed) is True
 
     @parametrize
-    async def test_path_params_list_events(self, client: AsyncOpenAI) -> None:
+    async def test_path_params_list_events(self, async_client: AsyncOpenAI) -> None:
         with pytest.raises(ValueError, match=r"Expected a non-empty value for `fine_tuning_job_id` but received ''"):
-            await client.fine_tuning.jobs.with_raw_response.list_events(
+            await async_client.fine_tuning.jobs.with_raw_response.list_events(
                 "",
             )
tests/api_resources/test_completions.py
@@ -10,16 +10,12 @@ import pytest
 from openai import OpenAI, AsyncOpenAI
 from tests.utils import assert_matches_type
 from openai.types import Completion
-from openai._client import OpenAI, AsyncOpenAI
 
 base_url = os.environ.get("TEST_API_BASE_URL", "http://127.0.0.1:4010")
-api_key = "My API Key"
 
 
 class TestCompletions:
-    strict_client = OpenAI(base_url=base_url, api_key=api_key, _strict_response_validation=True)
-    loose_client = OpenAI(base_url=base_url, api_key=api_key, _strict_response_validation=False)
-    parametrize = pytest.mark.parametrize("client", [strict_client, loose_client], ids=["strict", "loose"])
+    parametrize = pytest.mark.parametrize("client", [False, True], indirect=True, ids=["loose", "strict"])
 
     @parametrize
     def test_method_create_overload_1(self, client: OpenAI) -> None:
@@ -139,21 +135,19 @@ class TestCompletions:
 
 
 class TestAsyncCompletions:
-    strict_client = AsyncOpenAI(base_url=base_url, api_key=api_key, _strict_response_validation=True)
-    loose_client = AsyncOpenAI(base_url=base_url, api_key=api_key, _strict_response_validation=False)
-    parametrize = pytest.mark.parametrize("client", [strict_client, loose_client], ids=["strict", "loose"])
+    parametrize = pytest.mark.parametrize("async_client", [False, True], indirect=True, ids=["loose", "strict"])
 
     @parametrize
-    async def test_method_create_overload_1(self, client: AsyncOpenAI) -> None:
-        completion = await client.completions.create(
+    async def test_method_create_overload_1(self, async_client: AsyncOpenAI) -> None:
+        completion = await async_client.completions.create(
             model="string",
             prompt="This is a test.",
         )
         assert_matches_type(Completion, completion, path=["response"])
 
     @parametrize
-    async def test_method_create_with_all_params_overload_1(self, client: AsyncOpenAI) -> None:
-        completion = await client.completions.create(
+    async def test_method_create_with_all_params_overload_1(self, async_client: AsyncOpenAI) -> None:
+        completion = await async_client.completions.create(
             model="string",
             prompt="This is a test.",
             best_of=0,
@@ -175,8 +169,8 @@ class TestAsyncCompletions:
         assert_matches_type(Completion, completion, path=["response"])
 
     @parametrize
-    async def test_raw_response_create_overload_1(self, client: AsyncOpenAI) -> None:
-        response = await client.completions.with_raw_response.create(
+    async def test_raw_response_create_overload_1(self, async_client: AsyncOpenAI) -> None:
+        response = await async_client.completions.with_raw_response.create(
             model="string",
             prompt="This is a test.",
         )
@@ -187,8 +181,8 @@ class TestAsyncCompletions:
         assert_matches_type(Completion, completion, path=["response"])
 
     @parametrize
-    async def test_streaming_response_create_overload_1(self, client: AsyncOpenAI) -> None:
-        async with client.completions.with_streaming_response.create(
+    async def test_streaming_response_create_overload_1(self, async_client: AsyncOpenAI) -> None:
+        async with async_client.completions.with_streaming_response.create(
             model="string",
             prompt="This is a test.",
         ) as response:
@@ -201,8 +195,8 @@ class TestAsyncCompletions:
         assert cast(Any, response.is_closed) is True
 
     @parametrize
-    async def test_method_create_overload_2(self, client: AsyncOpenAI) -> None:
-        completion_stream = await client.completions.create(
+    async def test_method_create_overload_2(self, async_client: AsyncOpenAI) -> None:
+        completion_stream = await async_client.completions.create(
             model="string",
             prompt="This is a test.",
             stream=True,
@@ -210,8 +204,8 @@ class TestAsyncCompletions:
         await completion_stream.response.aclose()
 
     @parametrize
-    async def test_method_create_with_all_params_overload_2(self, client: AsyncOpenAI) -> None:
-        completion_stream = await client.completions.create(
+    async def test_method_create_with_all_params_overload_2(self, async_client: AsyncOpenAI) -> None:
+        completion_stream = await async_client.completions.create(
             model="string",
             prompt="This is a test.",
             stream=True,
@@ -233,8 +227,8 @@ class TestAsyncCompletions:
         await completion_stream.response.aclose()
 
     @parametrize
-    async def test_raw_response_create_overload_2(self, client: AsyncOpenAI) -> None:
-        response = await client.completions.with_raw_response.create(
+    async def test_raw_response_create_overload_2(self, async_client: AsyncOpenAI) -> None:
+        response = await async_client.completions.with_raw_response.create(
             model="string",
             prompt="This is a test.",
             stream=True,
@@ -245,8 +239,8 @@ class TestAsyncCompletions:
         await stream.close()
 
     @parametrize
-    async def test_streaming_response_create_overload_2(self, client: AsyncOpenAI) -> None:
-        async with client.completions.with_streaming_response.create(
+    async def test_streaming_response_create_overload_2(self, async_client: AsyncOpenAI) -> None:
+        async with async_client.completions.with_streaming_response.create(
             model="string",
             prompt="This is a test.",
             stream=True,
tests/api_resources/test_embeddings.py
@@ -10,16 +10,12 @@ import pytest
 from openai import OpenAI, AsyncOpenAI
 from tests.utils import assert_matches_type
 from openai.types import CreateEmbeddingResponse
-from openai._client import OpenAI, AsyncOpenAI
 
 base_url = os.environ.get("TEST_API_BASE_URL", "http://127.0.0.1:4010")
-api_key = "My API Key"
 
 
 class TestEmbeddings:
-    strict_client = OpenAI(base_url=base_url, api_key=api_key, _strict_response_validation=True)
-    loose_client = OpenAI(base_url=base_url, api_key=api_key, _strict_response_validation=False)
-    parametrize = pytest.mark.parametrize("client", [strict_client, loose_client], ids=["strict", "loose"])
+    parametrize = pytest.mark.parametrize("client", [False, True], indirect=True, ids=["loose", "strict"])
 
     @parametrize
     def test_method_create(self, client: OpenAI) -> None:
@@ -67,21 +63,19 @@ class TestEmbeddings:
 
 
 class TestAsyncEmbeddings:
-    strict_client = AsyncOpenAI(base_url=base_url, api_key=api_key, _strict_response_validation=True)
-    loose_client = AsyncOpenAI(base_url=base_url, api_key=api_key, _strict_response_validation=False)
-    parametrize = pytest.mark.parametrize("client", [strict_client, loose_client], ids=["strict", "loose"])
+    parametrize = pytest.mark.parametrize("async_client", [False, True], indirect=True, ids=["loose", "strict"])
 
     @parametrize
-    async def test_method_create(self, client: AsyncOpenAI) -> None:
-        embedding = await client.embeddings.create(
+    async def test_method_create(self, async_client: AsyncOpenAI) -> None:
+        embedding = await async_client.embeddings.create(
             input="The quick brown fox jumped over the lazy dog",
             model="text-embedding-ada-002",
         )
         assert_matches_type(CreateEmbeddingResponse, embedding, path=["response"])
 
     @parametrize
-    async def test_method_create_with_all_params(self, client: AsyncOpenAI) -> None:
-        embedding = await client.embeddings.create(
+    async def test_method_create_with_all_params(self, async_client: AsyncOpenAI) -> None:
+        embedding = await async_client.embeddings.create(
             input="The quick brown fox jumped over the lazy dog",
             model="text-embedding-ada-002",
             encoding_format="float",
@@ -90,8 +84,8 @@ class TestAsyncEmbeddings:
         assert_matches_type(CreateEmbeddingResponse, embedding, path=["response"])
 
     @parametrize
-    async def test_raw_response_create(self, client: AsyncOpenAI) -> None:
-        response = await client.embeddings.with_raw_response.create(
+    async def test_raw_response_create(self, async_client: AsyncOpenAI) -> None:
+        response = await async_client.embeddings.with_raw_response.create(
             input="The quick brown fox jumped over the lazy dog",
             model="text-embedding-ada-002",
         )
@@ -102,8 +96,8 @@ class TestAsyncEmbeddings:
         assert_matches_type(CreateEmbeddingResponse, embedding, path=["response"])
 
     @parametrize
-    async def test_streaming_response_create(self, client: AsyncOpenAI) -> None:
-        async with client.embeddings.with_streaming_response.create(
+    async def test_streaming_response_create(self, async_client: AsyncOpenAI) -> None:
+        async with async_client.embeddings.with_streaming_response.create(
             input="The quick brown fox jumped over the lazy dog",
             model="text-embedding-ada-002",
         ) as response:
tests/api_resources/test_files.py
@@ -13,19 +13,15 @@ import openai._legacy_response as _legacy_response
 from openai import OpenAI, AsyncOpenAI
 from tests.utils import assert_matches_type
 from openai.types import FileObject, FileDeleted
-from openai._client import OpenAI, AsyncOpenAI
 from openai.pagination import SyncPage, AsyncPage
 
 # pyright: reportDeprecated=false
 
 base_url = os.environ.get("TEST_API_BASE_URL", "http://127.0.0.1:4010")
-api_key = "My API Key"
 
 
 class TestFiles:
-    strict_client = OpenAI(base_url=base_url, api_key=api_key, _strict_response_validation=True)
-    loose_client = OpenAI(base_url=base_url, api_key=api_key, _strict_response_validation=False)
-    parametrize = pytest.mark.parametrize("client", [strict_client, loose_client], ids=["strict", "loose"])
+    parametrize = pytest.mark.parametrize("client", [False, True], indirect=True, ids=["loose", "strict"])
 
     @parametrize
     def test_method_create(self, client: OpenAI) -> None:
@@ -261,21 +257,19 @@ class TestFiles:
 
 
 class TestAsyncFiles:
-    strict_client = AsyncOpenAI(base_url=base_url, api_key=api_key, _strict_response_validation=True)
-    loose_client = AsyncOpenAI(base_url=base_url, api_key=api_key, _strict_response_validation=False)
-    parametrize = pytest.mark.parametrize("client", [strict_client, loose_client], ids=["strict", "loose"])
+    parametrize = pytest.mark.parametrize("async_client", [False, True], indirect=True, ids=["loose", "strict"])
 
     @parametrize
-    async def test_method_create(self, client: AsyncOpenAI) -> None:
-        file = await client.files.create(
+    async def test_method_create(self, async_client: AsyncOpenAI) -> None:
+        file = await async_client.files.create(
             file=b"raw file contents",
             purpose="fine-tune",
         )
         assert_matches_type(FileObject, file, path=["response"])
 
     @parametrize
-    async def test_raw_response_create(self, client: AsyncOpenAI) -> None:
-        response = await client.files.with_raw_response.create(
+    async def test_raw_response_create(self, async_client: AsyncOpenAI) -> None:
+        response = await async_client.files.with_raw_response.create(
             file=b"raw file contents",
             purpose="fine-tune",
         )
@@ -286,8 +280,8 @@ class TestAsyncFiles:
         assert_matches_type(FileObject, file, path=["response"])
 
     @parametrize
-    async def test_streaming_response_create(self, client: AsyncOpenAI) -> None:
-        async with client.files.with_streaming_response.create(
+    async def test_streaming_response_create(self, async_client: AsyncOpenAI) -> None:
+        async with async_client.files.with_streaming_response.create(
             file=b"raw file contents",
             purpose="fine-tune",
         ) as response:
@@ -300,15 +294,15 @@ class TestAsyncFiles:
         assert cast(Any, response.is_closed) is True
 
     @parametrize
-    async def test_method_retrieve(self, client: AsyncOpenAI) -> None:
-        file = await client.files.retrieve(
+    async def test_method_retrieve(self, async_client: AsyncOpenAI) -> None:
+        file = await async_client.files.retrieve(
             "string",
         )
         assert_matches_type(FileObject, file, path=["response"])
 
     @parametrize
-    async def test_raw_response_retrieve(self, client: AsyncOpenAI) -> None:
-        response = await client.files.with_raw_response.retrieve(
+    async def test_raw_response_retrieve(self, async_client: AsyncOpenAI) -> None:
+        response = await async_client.files.with_raw_response.retrieve(
             "string",
         )
 
@@ -318,8 +312,8 @@ class TestAsyncFiles:
         assert_matches_type(FileObject, file, path=["response"])
 
     @parametrize
-    async def test_streaming_response_retrieve(self, client: AsyncOpenAI) -> None:
-        async with client.files.with_streaming_response.retrieve(
+    async def test_streaming_response_retrieve(self, async_client: AsyncOpenAI) -> None:
+        async with async_client.files.with_streaming_response.retrieve(
             "string",
         ) as response:
             assert not response.is_closed
@@ -331,27 +325,27 @@ class TestAsyncFiles:
         assert cast(Any, response.is_closed) is True
 
     @parametrize
-    async def test_path_params_retrieve(self, client: AsyncOpenAI) -> None:
+    async def test_path_params_retrieve(self, async_client: AsyncOpenAI) -> None:
         with pytest.raises(ValueError, match=r"Expected a non-empty value for `file_id` but received ''"):
-            await client.files.with_raw_response.retrieve(
+            await async_client.files.with_raw_response.retrieve(
                 "",
             )
 
     @parametrize
-    async def test_method_list(self, client: AsyncOpenAI) -> None:
-        file = await client.files.list()
+    async def test_method_list(self, async_client: AsyncOpenAI) -> None:
+        file = await async_client.files.list()
         assert_matches_type(AsyncPage[FileObject], file, path=["response"])
 
     @parametrize
-    async def test_method_list_with_all_params(self, client: AsyncOpenAI) -> None:
-        file = await client.files.list(
+    async def test_method_list_with_all_params(self, async_client: AsyncOpenAI) -> None:
+        file = await async_client.files.list(
             purpose="string",
         )
         assert_matches_type(AsyncPage[FileObject], file, path=["response"])
 
     @parametrize
-    async def test_raw_response_list(self, client: AsyncOpenAI) -> None:
-        response = await client.files.with_raw_response.list()
+    async def test_raw_response_list(self, async_client: AsyncOpenAI) -> None:
+        response = await async_client.files.with_raw_response.list()
 
         assert response.is_closed is True
         assert response.http_request.headers.get("X-Stainless-Lang") == "python"
@@ -359,8 +353,8 @@ class TestAsyncFiles:
         assert_matches_type(AsyncPage[FileObject], file, path=["response"])
 
     @parametrize
-    async def test_streaming_response_list(self, client: AsyncOpenAI) -> None:
-        async with client.files.with_streaming_response.list() as response:
+    async def test_streaming_response_list(self, async_client: AsyncOpenAI) -> None:
+        async with async_client.files.with_streaming_response.list() as response:
             assert not response.is_closed
             assert response.http_request.headers.get("X-Stainless-Lang") == "python"
 
@@ -370,15 +364,15 @@ class TestAsyncFiles:
         assert cast(Any, response.is_closed) is True
 
     @parametrize
-    async def test_method_delete(self, client: AsyncOpenAI) -> None:
-        file = await client.files.delete(
+    async def test_method_delete(self, async_client: AsyncOpenAI) -> None:
+        file = await async_client.files.delete(
             "string",
         )
         assert_matches_type(FileDeleted, file, path=["response"])
 
     @parametrize
-    async def test_raw_response_delete(self, client: AsyncOpenAI) -> None:
-        response = await client.files.with_raw_response.delete(
+    async def test_raw_response_delete(self, async_client: AsyncOpenAI) -> None:
+        response = await async_client.files.with_raw_response.delete(
             "string",
         )
 
@@ -388,8 +382,8 @@ class TestAsyncFiles:
         assert_matches_type(FileDeleted, file, path=["response"])
 
     @parametrize
-    async def test_streaming_response_delete(self, client: AsyncOpenAI) -> None:
-        async with client.files.with_streaming_response.delete(
+    async def test_streaming_response_delete(self, async_client: AsyncOpenAI) -> None:
+        async with async_client.files.with_streaming_response.delete(
             "string",
         ) as response:
             assert not response.is_closed
@@ -401,17 +395,17 @@ class TestAsyncFiles:
         assert cast(Any, response.is_closed) is True
 
     @parametrize
-    async def test_path_params_delete(self, client: AsyncOpenAI) -> None:
+    async def test_path_params_delete(self, async_client: AsyncOpenAI) -> None:
         with pytest.raises(ValueError, match=r"Expected a non-empty value for `file_id` but received ''"):
-            await client.files.with_raw_response.delete(
+            await async_client.files.with_raw_response.delete(
                 "",
             )
 
     @parametrize
     @pytest.mark.respx(base_url=base_url)
-    async def test_method_content(self, client: AsyncOpenAI, respx_mock: MockRouter) -> None:
+    async def test_method_content(self, async_client: AsyncOpenAI, respx_mock: MockRouter) -> None:
         respx_mock.get("/files/string/content").mock(return_value=httpx.Response(200, json={"foo": "bar"}))
-        file = await client.files.content(
+        file = await async_client.files.content(
             "string",
         )
         assert isinstance(file, _legacy_response.HttpxBinaryResponseContent)
@@ -419,10 +413,10 @@ class TestAsyncFiles:
 
     @parametrize
     @pytest.mark.respx(base_url=base_url)
-    async def test_raw_response_content(self, client: AsyncOpenAI, respx_mock: MockRouter) -> None:
+    async def test_raw_response_content(self, async_client: AsyncOpenAI, respx_mock: MockRouter) -> None:
         respx_mock.get("/files/string/content").mock(return_value=httpx.Response(200, json={"foo": "bar"}))
 
-        response = await client.files.with_raw_response.content(
+        response = await async_client.files.with_raw_response.content(
             "string",
         )
 
@@ -433,9 +427,9 @@ class TestAsyncFiles:
 
     @parametrize
     @pytest.mark.respx(base_url=base_url)
-    async def test_streaming_response_content(self, client: AsyncOpenAI, respx_mock: MockRouter) -> None:
+    async def test_streaming_response_content(self, async_client: AsyncOpenAI, respx_mock: MockRouter) -> None:
         respx_mock.get("/files/string/content").mock(return_value=httpx.Response(200, json={"foo": "bar"}))
-        async with client.files.with_streaming_response.content(
+        async with async_client.files.with_streaming_response.content(
             "string",
         ) as response:
             assert not response.is_closed
@@ -448,25 +442,25 @@ class TestAsyncFiles:
 
     @parametrize
     @pytest.mark.respx(base_url=base_url)
-    async def test_path_params_content(self, client: AsyncOpenAI) -> None:
+    async def test_path_params_content(self, async_client: AsyncOpenAI) -> None:
         with pytest.raises(ValueError, match=r"Expected a non-empty value for `file_id` but received ''"):
-            await client.files.with_raw_response.content(
+            await async_client.files.with_raw_response.content(
                 "",
             )
 
     @parametrize
-    async def test_method_retrieve_content(self, client: AsyncOpenAI) -> None:
+    async def test_method_retrieve_content(self, async_client: AsyncOpenAI) -> None:
         with pytest.warns(DeprecationWarning):
-            file = await client.files.retrieve_content(
+            file = await async_client.files.retrieve_content(
                 "string",
             )
 
         assert_matches_type(str, file, path=["response"])
 
     @parametrize
-    async def test_raw_response_retrieve_content(self, client: AsyncOpenAI) -> None:
+    async def test_raw_response_retrieve_content(self, async_client: AsyncOpenAI) -> None:
         with pytest.warns(DeprecationWarning):
-            response = await client.files.with_raw_response.retrieve_content(
+            response = await async_client.files.with_raw_response.retrieve_content(
                 "string",
             )
 
@@ -476,9 +470,9 @@ class TestAsyncFiles:
         assert_matches_type(str, file, path=["response"])
 
     @parametrize
-    async def test_streaming_response_retrieve_content(self, client: AsyncOpenAI) -> None:
+    async def test_streaming_response_retrieve_content(self, async_client: AsyncOpenAI) -> None:
         with pytest.warns(DeprecationWarning):
-            async with client.files.with_streaming_response.retrieve_content(
+            async with async_client.files.with_streaming_response.retrieve_content(
                 "string",
             ) as response:
                 assert not response.is_closed
@@ -490,9 +484,9 @@ class TestAsyncFiles:
         assert cast(Any, response.is_closed) is True
 
     @parametrize
-    async def test_path_params_retrieve_content(self, client: AsyncOpenAI) -> None:
+    async def test_path_params_retrieve_content(self, async_client: AsyncOpenAI) -> None:
         with pytest.warns(DeprecationWarning):
             with pytest.raises(ValueError, match=r"Expected a non-empty value for `file_id` but received ''"):
-                await client.files.with_raw_response.retrieve_content(
+                await async_client.files.with_raw_response.retrieve_content(
                     "",
                 )
tests/api_resources/test_images.py
@@ -10,16 +10,12 @@ import pytest
 from openai import OpenAI, AsyncOpenAI
 from tests.utils import assert_matches_type
 from openai.types import ImagesResponse
-from openai._client import OpenAI, AsyncOpenAI
 
 base_url = os.environ.get("TEST_API_BASE_URL", "http://127.0.0.1:4010")
-api_key = "My API Key"
 
 
 class TestImages:
-    strict_client = OpenAI(base_url=base_url, api_key=api_key, _strict_response_validation=True)
-    loose_client = OpenAI(base_url=base_url, api_key=api_key, _strict_response_validation=False)
-    parametrize = pytest.mark.parametrize("client", [strict_client, loose_client], ids=["strict", "loose"])
+    parametrize = pytest.mark.parametrize("client", [False, True], indirect=True, ids=["loose", "strict"])
 
     @parametrize
     def test_method_create_variation(self, client: OpenAI) -> None:
@@ -159,20 +155,18 @@ class TestImages:
 
 
 class TestAsyncImages:
-    strict_client = AsyncOpenAI(base_url=base_url, api_key=api_key, _strict_response_validation=True)
-    loose_client = AsyncOpenAI(base_url=base_url, api_key=api_key, _strict_response_validation=False)
-    parametrize = pytest.mark.parametrize("client", [strict_client, loose_client], ids=["strict", "loose"])
+    parametrize = pytest.mark.parametrize("async_client", [False, True], indirect=True, ids=["loose", "strict"])
 
     @parametrize
-    async def test_method_create_variation(self, client: AsyncOpenAI) -> None:
-        image = await client.images.create_variation(
+    async def test_method_create_variation(self, async_client: AsyncOpenAI) -> None:
+        image = await async_client.images.create_variation(
             image=b"raw file contents",
         )
         assert_matches_type(ImagesResponse, image, path=["response"])
 
     @parametrize
-    async def test_method_create_variation_with_all_params(self, client: AsyncOpenAI) -> None:
-        image = await client.images.create_variation(
+    async def test_method_create_variation_with_all_params(self, async_client: AsyncOpenAI) -> None:
+        image = await async_client.images.create_variation(
             image=b"raw file contents",
             model="dall-e-2",
             n=1,
@@ -183,8 +177,8 @@ class TestAsyncImages:
         assert_matches_type(ImagesResponse, image, path=["response"])
 
     @parametrize
-    async def test_raw_response_create_variation(self, client: AsyncOpenAI) -> None:
-        response = await client.images.with_raw_response.create_variation(
+    async def test_raw_response_create_variation(self, async_client: AsyncOpenAI) -> None:
+        response = await async_client.images.with_raw_response.create_variation(
             image=b"raw file contents",
         )
 
@@ -194,8 +188,8 @@ class TestAsyncImages:
         assert_matches_type(ImagesResponse, image, path=["response"])
 
     @parametrize
-    async def test_streaming_response_create_variation(self, client: AsyncOpenAI) -> None:
-        async with client.images.with_streaming_response.create_variation(
+    async def test_streaming_response_create_variation(self, async_client: AsyncOpenAI) -> None:
+        async with async_client.images.with_streaming_response.create_variation(
             image=b"raw file contents",
         ) as response:
             assert not response.is_closed
@@ -207,16 +201,16 @@ class TestAsyncImages:
         assert cast(Any, response.is_closed) is True
 
     @parametrize
-    async def test_method_edit(self, client: AsyncOpenAI) -> None:
-        image = await client.images.edit(
+    async def test_method_edit(self, async_client: AsyncOpenAI) -> None:
+        image = await async_client.images.edit(
             image=b"raw file contents",
             prompt="A cute baby sea otter wearing a beret",
         )
         assert_matches_type(ImagesResponse, image, path=["response"])
 
     @parametrize
-    async def test_method_edit_with_all_params(self, client: AsyncOpenAI) -> None:
-        image = await client.images.edit(
+    async def test_method_edit_with_all_params(self, async_client: AsyncOpenAI) -> None:
+        image = await async_client.images.edit(
             image=b"raw file contents",
             prompt="A cute baby sea otter wearing a beret",
             mask=b"raw file contents",
@@ -229,8 +223,8 @@ class TestAsyncImages:
         assert_matches_type(ImagesResponse, image, path=["response"])
 
     @parametrize
-    async def test_raw_response_edit(self, client: AsyncOpenAI) -> None:
-        response = await client.images.with_raw_response.edit(
+    async def test_raw_response_edit(self, async_client: AsyncOpenAI) -> None:
+        response = await async_client.images.with_raw_response.edit(
             image=b"raw file contents",
             prompt="A cute baby sea otter wearing a beret",
         )
@@ -241,8 +235,8 @@ class TestAsyncImages:
         assert_matches_type(ImagesResponse, image, path=["response"])
 
     @parametrize
-    async def test_streaming_response_edit(self, client: AsyncOpenAI) -> None:
-        async with client.images.with_streaming_response.edit(
+    async def test_streaming_response_edit(self, async_client: AsyncOpenAI) -> None:
+        async with async_client.images.with_streaming_response.edit(
             image=b"raw file contents",
             prompt="A cute baby sea otter wearing a beret",
         ) as response:
@@ -255,15 +249,15 @@ class TestAsyncImages:
         assert cast(Any, response.is_closed) is True
 
     @parametrize
-    async def test_method_generate(self, client: AsyncOpenAI) -> None:
-        image = await client.images.generate(
+    async def test_method_generate(self, async_client: AsyncOpenAI) -> None:
+        image = await async_client.images.generate(
             prompt="A cute baby sea otter",
         )
         assert_matches_type(ImagesResponse, image, path=["response"])
 
     @parametrize
-    async def test_method_generate_with_all_params(self, client: AsyncOpenAI) -> None:
-        image = await client.images.generate(
+    async def test_method_generate_with_all_params(self, async_client: AsyncOpenAI) -> None:
+        image = await async_client.images.generate(
             prompt="A cute baby sea otter",
             model="dall-e-3",
             n=1,
@@ -276,8 +270,8 @@ class TestAsyncImages:
         assert_matches_type(ImagesResponse, image, path=["response"])
 
     @parametrize
-    async def test_raw_response_generate(self, client: AsyncOpenAI) -> None:
-        response = await client.images.with_raw_response.generate(
+    async def test_raw_response_generate(self, async_client: AsyncOpenAI) -> None:
+        response = await async_client.images.with_raw_response.generate(
             prompt="A cute baby sea otter",
         )
 
@@ -287,8 +281,8 @@ class TestAsyncImages:
         assert_matches_type(ImagesResponse, image, path=["response"])
 
     @parametrize
-    async def test_streaming_response_generate(self, client: AsyncOpenAI) -> None:
-        async with client.images.with_streaming_response.generate(
+    async def test_streaming_response_generate(self, async_client: AsyncOpenAI) -> None:
+        async with async_client.images.with_streaming_response.generate(
             prompt="A cute baby sea otter",
         ) as response:
             assert not response.is_closed
tests/api_resources/test_models.py
@@ -10,17 +10,13 @@ import pytest
 from openai import OpenAI, AsyncOpenAI
 from tests.utils import assert_matches_type
 from openai.types import Model, ModelDeleted
-from openai._client import OpenAI, AsyncOpenAI
 from openai.pagination import SyncPage, AsyncPage
 
 base_url = os.environ.get("TEST_API_BASE_URL", "http://127.0.0.1:4010")
-api_key = "My API Key"
 
 
 class TestModels:
-    strict_client = OpenAI(base_url=base_url, api_key=api_key, _strict_response_validation=True)
-    loose_client = OpenAI(base_url=base_url, api_key=api_key, _strict_response_validation=False)
-    parametrize = pytest.mark.parametrize("client", [strict_client, loose_client], ids=["strict", "loose"])
+    parametrize = pytest.mark.parametrize("client", [False, True], indirect=True, ids=["loose", "strict"])
 
     @parametrize
     def test_method_retrieve(self, client: OpenAI) -> None:
@@ -125,20 +121,18 @@ class TestModels:
 
 
 class TestAsyncModels:
-    strict_client = AsyncOpenAI(base_url=base_url, api_key=api_key, _strict_response_validation=True)
-    loose_client = AsyncOpenAI(base_url=base_url, api_key=api_key, _strict_response_validation=False)
-    parametrize = pytest.mark.parametrize("client", [strict_client, loose_client], ids=["strict", "loose"])
+    parametrize = pytest.mark.parametrize("async_client", [False, True], indirect=True, ids=["loose", "strict"])
 
     @parametrize
-    async def test_method_retrieve(self, client: AsyncOpenAI) -> None:
-        model = await client.models.retrieve(
+    async def test_method_retrieve(self, async_client: AsyncOpenAI) -> None:
+        model = await async_client.models.retrieve(
             "gpt-3.5-turbo",
         )
         assert_matches_type(Model, model, path=["response"])
 
     @parametrize
-    async def test_raw_response_retrieve(self, client: AsyncOpenAI) -> None:
-        response = await client.models.with_raw_response.retrieve(
+    async def test_raw_response_retrieve(self, async_client: AsyncOpenAI) -> None:
+        response = await async_client.models.with_raw_response.retrieve(
             "gpt-3.5-turbo",
         )
 
@@ -148,8 +142,8 @@ class TestAsyncModels:
         assert_matches_type(Model, model, path=["response"])
 
     @parametrize
-    async def test_streaming_response_retrieve(self, client: AsyncOpenAI) -> None:
-        async with client.models.with_streaming_response.retrieve(
+    async def test_streaming_response_retrieve(self, async_client: AsyncOpenAI) -> None:
+        async with async_client.models.with_streaming_response.retrieve(
             "gpt-3.5-turbo",
         ) as response:
             assert not response.is_closed
@@ -161,20 +155,20 @@ class TestAsyncModels:
         assert cast(Any, response.is_closed) is True
 
     @parametrize
-    async def test_path_params_retrieve(self, client: AsyncOpenAI) -> None:
+    async def test_path_params_retrieve(self, async_client: AsyncOpenAI) -> None:
         with pytest.raises(ValueError, match=r"Expected a non-empty value for `model` but received ''"):
-            await client.models.with_raw_response.retrieve(
+            await async_client.models.with_raw_response.retrieve(
                 "",
             )
 
     @parametrize
-    async def test_method_list(self, client: AsyncOpenAI) -> None:
-        model = await client.models.list()
+    async def test_method_list(self, async_client: AsyncOpenAI) -> None:
+        model = await async_client.models.list()
         assert_matches_type(AsyncPage[Model], model, path=["response"])
 
     @parametrize
-    async def test_raw_response_list(self, client: AsyncOpenAI) -> None:
-        response = await client.models.with_raw_response.list()
+    async def test_raw_response_list(self, async_client: AsyncOpenAI) -> None:
+        response = await async_client.models.with_raw_response.list()
 
         assert response.is_closed is True
         assert response.http_request.headers.get("X-Stainless-Lang") == "python"
@@ -182,8 +176,8 @@ class TestAsyncModels:
         assert_matches_type(AsyncPage[Model], model, path=["response"])
 
     @parametrize
-    async def test_streaming_response_list(self, client: AsyncOpenAI) -> None:
-        async with client.models.with_streaming_response.list() as response:
+    async def test_streaming_response_list(self, async_client: AsyncOpenAI) -> None:
+        async with async_client.models.with_streaming_response.list() as response:
             assert not response.is_closed
             assert response.http_request.headers.get("X-Stainless-Lang") == "python"
 
@@ -193,15 +187,15 @@ class TestAsyncModels:
         assert cast(Any, response.is_closed) is True
 
     @parametrize
-    async def test_method_delete(self, client: AsyncOpenAI) -> None:
-        model = await client.models.delete(
+    async def test_method_delete(self, async_client: AsyncOpenAI) -> None:
+        model = await async_client.models.delete(
             "ft:gpt-3.5-turbo:acemeco:suffix:abc123",
         )
         assert_matches_type(ModelDeleted, model, path=["response"])
 
     @parametrize
-    async def test_raw_response_delete(self, client: AsyncOpenAI) -> None:
-        response = await client.models.with_raw_response.delete(
+    async def test_raw_response_delete(self, async_client: AsyncOpenAI) -> None:
+        response = await async_client.models.with_raw_response.delete(
             "ft:gpt-3.5-turbo:acemeco:suffix:abc123",
         )
 
@@ -211,8 +205,8 @@ class TestAsyncModels:
         assert_matches_type(ModelDeleted, model, path=["response"])
 
     @parametrize
-    async def test_streaming_response_delete(self, client: AsyncOpenAI) -> None:
-        async with client.models.with_streaming_response.delete(
+    async def test_streaming_response_delete(self, async_client: AsyncOpenAI) -> None:
+        async with async_client.models.with_streaming_response.delete(
             "ft:gpt-3.5-turbo:acemeco:suffix:abc123",
         ) as response:
             assert not response.is_closed
@@ -224,8 +218,8 @@ class TestAsyncModels:
         assert cast(Any, response.is_closed) is True
 
     @parametrize
-    async def test_path_params_delete(self, client: AsyncOpenAI) -> None:
+    async def test_path_params_delete(self, async_client: AsyncOpenAI) -> None:
         with pytest.raises(ValueError, match=r"Expected a non-empty value for `model` but received ''"):
-            await client.models.with_raw_response.delete(
+            await async_client.models.with_raw_response.delete(
                 "",
             )
tests/api_resources/test_moderations.py
@@ -10,16 +10,12 @@ import pytest
 from openai import OpenAI, AsyncOpenAI
 from tests.utils import assert_matches_type
 from openai.types import ModerationCreateResponse
-from openai._client import OpenAI, AsyncOpenAI
 
 base_url = os.environ.get("TEST_API_BASE_URL", "http://127.0.0.1:4010")
-api_key = "My API Key"
 
 
 class TestModerations:
-    strict_client = OpenAI(base_url=base_url, api_key=api_key, _strict_response_validation=True)
-    loose_client = OpenAI(base_url=base_url, api_key=api_key, _strict_response_validation=False)
-    parametrize = pytest.mark.parametrize("client", [strict_client, loose_client], ids=["strict", "loose"])
+    parametrize = pytest.mark.parametrize("client", [False, True], indirect=True, ids=["loose", "strict"])
 
     @parametrize
     def test_method_create(self, client: OpenAI) -> None:
@@ -62,28 +58,26 @@ class TestModerations:
 
 
 class TestAsyncModerations:
-    strict_client = AsyncOpenAI(base_url=base_url, api_key=api_key, _strict_response_validation=True)
-    loose_client = AsyncOpenAI(base_url=base_url, api_key=api_key, _strict_response_validation=False)
-    parametrize = pytest.mark.parametrize("client", [strict_client, loose_client], ids=["strict", "loose"])
+    parametrize = pytest.mark.parametrize("async_client", [False, True], indirect=True, ids=["loose", "strict"])
 
     @parametrize
-    async def test_method_create(self, client: AsyncOpenAI) -> None:
-        moderation = await client.moderations.create(
+    async def test_method_create(self, async_client: AsyncOpenAI) -> None:
+        moderation = await async_client.moderations.create(
             input="I want to kill them.",
         )
         assert_matches_type(ModerationCreateResponse, moderation, path=["response"])
 
     @parametrize
-    async def test_method_create_with_all_params(self, client: AsyncOpenAI) -> None:
-        moderation = await client.moderations.create(
+    async def test_method_create_with_all_params(self, async_client: AsyncOpenAI) -> None:
+        moderation = await async_client.moderations.create(
             input="I want to kill them.",
             model="text-moderation-stable",
         )
         assert_matches_type(ModerationCreateResponse, moderation, path=["response"])
 
     @parametrize
-    async def test_raw_response_create(self, client: AsyncOpenAI) -> None:
-        response = await client.moderations.with_raw_response.create(
+    async def test_raw_response_create(self, async_client: AsyncOpenAI) -> None:
+        response = await async_client.moderations.with_raw_response.create(
             input="I want to kill them.",
         )
 
@@ -93,8 +87,8 @@ class TestAsyncModerations:
         assert_matches_type(ModerationCreateResponse, moderation, path=["response"])
 
     @parametrize
-    async def test_streaming_response_create(self, client: AsyncOpenAI) -> None:
-        async with client.moderations.with_streaming_response.create(
+    async def test_streaming_response_create(self, async_client: AsyncOpenAI) -> None:
+        async with async_client.moderations.with_streaming_response.create(
             input="I want to kill them.",
         ) as response:
             assert not response.is_closed
tests/conftest.py
@@ -1,9 +1,17 @@
+from __future__ import annotations
+
+import os
 import asyncio
 import logging
-from typing import Iterator
+from typing import TYPE_CHECKING, Iterator, AsyncIterator
 
 import pytest
 
+from openai import OpenAI, AsyncOpenAI
+
+if TYPE_CHECKING:
+    from _pytest.fixtures import FixtureRequest
+
 pytest.register_assert_rewrite("tests.utils")
 
 logging.getLogger("openai").setLevel(logging.DEBUG)
@@ -14,3 +22,28 @@ def event_loop() -> Iterator[asyncio.AbstractEventLoop]:
     loop = asyncio.new_event_loop()
     yield loop
     loop.close()
+
+
+base_url = os.environ.get("TEST_API_BASE_URL", "http://127.0.0.1:4010")
+
+api_key = "My API Key"
+
+
+@pytest.fixture(scope="session")
+def client(request: FixtureRequest) -> Iterator[OpenAI]:
+    strict = getattr(request, "param", True)
+    if not isinstance(strict, bool):
+        raise TypeError(f"Unexpected fixture parameter type {type(strict)}, expected {bool}")
+
+    with OpenAI(base_url=base_url, api_key=api_key, _strict_response_validation=strict) as client:
+        yield client
+
+
+@pytest.fixture(scope="session")
+async def async_client(request: FixtureRequest) -> AsyncIterator[AsyncOpenAI]:
+    strict = getattr(request, "param", True)
+    if not isinstance(strict, bool):
+        raise TypeError(f"Unexpected fixture parameter type {type(strict)}, expected {bool}")
+
+    async with AsyncOpenAI(base_url=base_url, api_key=api_key, _strict_response_validation=strict) as client:
+        yield client