main
1# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
2
3from __future__ import annotations
4
5import os
6from typing import Any, cast
7
8import pytest
9
10from openai import OpenAI, AsyncOpenAI
11from tests.utils import assert_matches_type
12from openai.types import Completion
13
14base_url = os.environ.get("TEST_API_BASE_URL", "http://127.0.0.1:4010")
15
16
17class TestCompletions:
18 parametrize = pytest.mark.parametrize("client", [False, True], indirect=True, ids=["loose", "strict"])
19
20 @parametrize
21 def test_method_create_overload_1(self, client: OpenAI) -> None:
22 completion = client.completions.create(
23 model="string",
24 prompt="This is a test.",
25 )
26 assert_matches_type(Completion, completion, path=["response"])
27
28 @parametrize
29 def test_method_create_with_all_params_overload_1(self, client: OpenAI) -> None:
30 completion = client.completions.create(
31 model="string",
32 prompt="This is a test.",
33 best_of=0,
34 echo=True,
35 frequency_penalty=-2,
36 logit_bias={"foo": 0},
37 logprobs=0,
38 max_tokens=16,
39 n=1,
40 presence_penalty=-2,
41 seed=0,
42 stop="\n",
43 stream=False,
44 stream_options={
45 "include_obfuscation": True,
46 "include_usage": True,
47 },
48 suffix="test.",
49 temperature=1,
50 top_p=1,
51 user="user-1234",
52 )
53 assert_matches_type(Completion, completion, path=["response"])
54
55 @parametrize
56 def test_raw_response_create_overload_1(self, client: OpenAI) -> None:
57 response = client.completions.with_raw_response.create(
58 model="string",
59 prompt="This is a test.",
60 )
61
62 assert response.is_closed is True
63 assert response.http_request.headers.get("X-Stainless-Lang") == "python"
64 completion = response.parse()
65 assert_matches_type(Completion, completion, path=["response"])
66
67 @parametrize
68 def test_streaming_response_create_overload_1(self, client: OpenAI) -> None:
69 with client.completions.with_streaming_response.create(
70 model="string",
71 prompt="This is a test.",
72 ) as response:
73 assert not response.is_closed
74 assert response.http_request.headers.get("X-Stainless-Lang") == "python"
75
76 completion = response.parse()
77 assert_matches_type(Completion, completion, path=["response"])
78
79 assert cast(Any, response.is_closed) is True
80
81 @parametrize
82 def test_method_create_overload_2(self, client: OpenAI) -> None:
83 completion_stream = client.completions.create(
84 model="string",
85 prompt="This is a test.",
86 stream=True,
87 )
88 completion_stream.response.close()
89
90 @parametrize
91 def test_method_create_with_all_params_overload_2(self, client: OpenAI) -> None:
92 completion_stream = client.completions.create(
93 model="string",
94 prompt="This is a test.",
95 stream=True,
96 best_of=0,
97 echo=True,
98 frequency_penalty=-2,
99 logit_bias={"foo": 0},
100 logprobs=0,
101 max_tokens=16,
102 n=1,
103 presence_penalty=-2,
104 seed=0,
105 stop="\n",
106 stream_options={
107 "include_obfuscation": True,
108 "include_usage": True,
109 },
110 suffix="test.",
111 temperature=1,
112 top_p=1,
113 user="user-1234",
114 )
115 completion_stream.response.close()
116
117 @parametrize
118 def test_raw_response_create_overload_2(self, client: OpenAI) -> None:
119 response = client.completions.with_raw_response.create(
120 model="string",
121 prompt="This is a test.",
122 stream=True,
123 )
124
125 assert response.http_request.headers.get("X-Stainless-Lang") == "python"
126 stream = response.parse()
127 stream.close()
128
129 @parametrize
130 def test_streaming_response_create_overload_2(self, client: OpenAI) -> None:
131 with client.completions.with_streaming_response.create(
132 model="string",
133 prompt="This is a test.",
134 stream=True,
135 ) as response:
136 assert not response.is_closed
137 assert response.http_request.headers.get("X-Stainless-Lang") == "python"
138
139 stream = response.parse()
140 stream.close()
141
142 assert cast(Any, response.is_closed) is True
143
144
145class TestAsyncCompletions:
146 parametrize = pytest.mark.parametrize(
147 "async_client", [False, True, {"http_client": "aiohttp"}], indirect=True, ids=["loose", "strict", "aiohttp"]
148 )
149
150 @parametrize
151 async def test_method_create_overload_1(self, async_client: AsyncOpenAI) -> None:
152 completion = await async_client.completions.create(
153 model="string",
154 prompt="This is a test.",
155 )
156 assert_matches_type(Completion, completion, path=["response"])
157
158 @parametrize
159 async def test_method_create_with_all_params_overload_1(self, async_client: AsyncOpenAI) -> None:
160 completion = await async_client.completions.create(
161 model="string",
162 prompt="This is a test.",
163 best_of=0,
164 echo=True,
165 frequency_penalty=-2,
166 logit_bias={"foo": 0},
167 logprobs=0,
168 max_tokens=16,
169 n=1,
170 presence_penalty=-2,
171 seed=0,
172 stop="\n",
173 stream=False,
174 stream_options={
175 "include_obfuscation": True,
176 "include_usage": True,
177 },
178 suffix="test.",
179 temperature=1,
180 top_p=1,
181 user="user-1234",
182 )
183 assert_matches_type(Completion, completion, path=["response"])
184
185 @parametrize
186 async def test_raw_response_create_overload_1(self, async_client: AsyncOpenAI) -> None:
187 response = await async_client.completions.with_raw_response.create(
188 model="string",
189 prompt="This is a test.",
190 )
191
192 assert response.is_closed is True
193 assert response.http_request.headers.get("X-Stainless-Lang") == "python"
194 completion = response.parse()
195 assert_matches_type(Completion, completion, path=["response"])
196
197 @parametrize
198 async def test_streaming_response_create_overload_1(self, async_client: AsyncOpenAI) -> None:
199 async with async_client.completions.with_streaming_response.create(
200 model="string",
201 prompt="This is a test.",
202 ) as response:
203 assert not response.is_closed
204 assert response.http_request.headers.get("X-Stainless-Lang") == "python"
205
206 completion = await response.parse()
207 assert_matches_type(Completion, completion, path=["response"])
208
209 assert cast(Any, response.is_closed) is True
210
211 @parametrize
212 async def test_method_create_overload_2(self, async_client: AsyncOpenAI) -> None:
213 completion_stream = await async_client.completions.create(
214 model="string",
215 prompt="This is a test.",
216 stream=True,
217 )
218 await completion_stream.response.aclose()
219
220 @parametrize
221 async def test_method_create_with_all_params_overload_2(self, async_client: AsyncOpenAI) -> None:
222 completion_stream = await async_client.completions.create(
223 model="string",
224 prompt="This is a test.",
225 stream=True,
226 best_of=0,
227 echo=True,
228 frequency_penalty=-2,
229 logit_bias={"foo": 0},
230 logprobs=0,
231 max_tokens=16,
232 n=1,
233 presence_penalty=-2,
234 seed=0,
235 stop="\n",
236 stream_options={
237 "include_obfuscation": True,
238 "include_usage": True,
239 },
240 suffix="test.",
241 temperature=1,
242 top_p=1,
243 user="user-1234",
244 )
245 await completion_stream.response.aclose()
246
247 @parametrize
248 async def test_raw_response_create_overload_2(self, async_client: AsyncOpenAI) -> None:
249 response = await async_client.completions.with_raw_response.create(
250 model="string",
251 prompt="This is a test.",
252 stream=True,
253 )
254
255 assert response.http_request.headers.get("X-Stainless-Lang") == "python"
256 stream = response.parse()
257 await stream.close()
258
259 @parametrize
260 async def test_streaming_response_create_overload_2(self, async_client: AsyncOpenAI) -> None:
261 async with async_client.completions.with_streaming_response.create(
262 model="string",
263 prompt="This is a test.",
264 stream=True,
265 ) as response:
266 assert not response.is_closed
267 assert response.http_request.headers.get("X-Stainless-Lang") == "python"
268
269 stream = await response.parse()
270 await stream.close()
271
272 assert cast(Any, response.is_closed) is True