main
1# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
2
3from __future__ import annotations
4
5import os
6from typing import Any, cast
7
8import pytest
9
10from openai import OpenAI, AsyncOpenAI
11from tests.utils import assert_matches_type
12from openai.types import ModerationCreateResponse
13
14base_url = os.environ.get("TEST_API_BASE_URL", "http://127.0.0.1:4010")
15
16
17class TestModerations:
18 parametrize = pytest.mark.parametrize("client", [False, True], indirect=True, ids=["loose", "strict"])
19
20 @parametrize
21 def test_method_create(self, client: OpenAI) -> None:
22 moderation = client.moderations.create(
23 input="I want to kill them.",
24 )
25 assert_matches_type(ModerationCreateResponse, moderation, path=["response"])
26
27 @parametrize
28 def test_method_create_with_all_params(self, client: OpenAI) -> None:
29 moderation = client.moderations.create(
30 input="I want to kill them.",
31 model="string",
32 )
33 assert_matches_type(ModerationCreateResponse, moderation, path=["response"])
34
35 @parametrize
36 def test_raw_response_create(self, client: OpenAI) -> None:
37 response = client.moderations.with_raw_response.create(
38 input="I want to kill them.",
39 )
40
41 assert response.is_closed is True
42 assert response.http_request.headers.get("X-Stainless-Lang") == "python"
43 moderation = response.parse()
44 assert_matches_type(ModerationCreateResponse, moderation, path=["response"])
45
46 @parametrize
47 def test_streaming_response_create(self, client: OpenAI) -> None:
48 with client.moderations.with_streaming_response.create(
49 input="I want to kill them.",
50 ) as response:
51 assert not response.is_closed
52 assert response.http_request.headers.get("X-Stainless-Lang") == "python"
53
54 moderation = response.parse()
55 assert_matches_type(ModerationCreateResponse, moderation, path=["response"])
56
57 assert cast(Any, response.is_closed) is True
58
59
60class TestAsyncModerations:
61 parametrize = pytest.mark.parametrize(
62 "async_client", [False, True, {"http_client": "aiohttp"}], indirect=True, ids=["loose", "strict", "aiohttp"]
63 )
64
65 @parametrize
66 async def test_method_create(self, async_client: AsyncOpenAI) -> None:
67 moderation = await async_client.moderations.create(
68 input="I want to kill them.",
69 )
70 assert_matches_type(ModerationCreateResponse, moderation, path=["response"])
71
72 @parametrize
73 async def test_method_create_with_all_params(self, async_client: AsyncOpenAI) -> None:
74 moderation = await async_client.moderations.create(
75 input="I want to kill them.",
76 model="string",
77 )
78 assert_matches_type(ModerationCreateResponse, moderation, path=["response"])
79
80 @parametrize
81 async def test_raw_response_create(self, async_client: AsyncOpenAI) -> None:
82 response = await async_client.moderations.with_raw_response.create(
83 input="I want to kill them.",
84 )
85
86 assert response.is_closed is True
87 assert response.http_request.headers.get("X-Stainless-Lang") == "python"
88 moderation = response.parse()
89 assert_matches_type(ModerationCreateResponse, moderation, path=["response"])
90
91 @parametrize
92 async def test_streaming_response_create(self, async_client: AsyncOpenAI) -> None:
93 async with async_client.moderations.with_streaming_response.create(
94 input="I want to kill them.",
95 ) as response:
96 assert not response.is_closed
97 assert response.http_request.headers.get("X-Stainless-Lang") == "python"
98
99 moderation = await response.parse()
100 assert_matches_type(ModerationCreateResponse, moderation, path=["response"])
101
102 assert cast(Any, response.is_closed) is True