Commit 674c6eeb
Changed files (4)
src
openai
src/openai/_utils/_utils.py
@@ -395,5 +395,7 @@ def lru_cache(*, maxsize: int | None = 128) -> Callable[[CallableT], CallableT]:
"""A version of functools.lru_cache that retains the type signature
for the wrapped function arguments.
"""
- wrapper = functools.lru_cache(maxsize=maxsize)
+ wrapper = functools.lru_cache( # noqa: TID251
+ maxsize=maxsize,
+ )
return cast(Any, wrapper) # type: ignore[no-any-return]
src/openai/_base_client.py
@@ -29,7 +29,6 @@ from typing import (
cast,
overload,
)
-from functools import lru_cache
from typing_extensions import Literal, override, get_origin
import anyio
@@ -61,7 +60,7 @@ from ._types import (
RequestOptions,
ModelBuilderProtocol,
)
-from ._utils import is_dict, is_list, is_given, is_mapping
+from ._utils import is_dict, is_list, is_given, lru_cache, is_mapping
from ._compat import model_copy, model_dump
from ._models import GenericModel, FinalRequestOptions, validate_type, construct_type
from ._response import (
src/openai/_models.py
@@ -4,7 +4,6 @@ import os
import inspect
from typing import TYPE_CHECKING, Any, Type, Union, Generic, TypeVar, Callable, cast
from datetime import date, datetime
-from functools import lru_cache
from typing_extensions import (
Unpack,
Literal,
@@ -37,6 +36,7 @@ from ._utils import (
PropertyInfo,
is_list,
is_given,
+ lru_cache,
is_mapping,
parse_date,
coerce_boolean,
pyproject.toml
@@ -167,7 +167,9 @@ select = [
"T201",
"T203",
# misuse of typing.TYPE_CHECKING
- "TCH004"
+ "TCH004",
+ # import rules
+ "TID251",
]
ignore = [
# mutable defaults
@@ -183,6 +185,9 @@ ignore-init-module-imports = true
[tool.ruff.format]
docstring-code-format = true
+[tool.ruff.lint.flake8-tidy-imports.banned-api]
+"functools.lru_cache".msg = "This function does not retain type information for the wrapped function's arguments; The `lru_cache` function from `_utils` should be used instead"
+
[tool.ruff.lint.isort]
length-sort = true
length-sort-straight = true