From a2e5b7854edce5b40d23bc3493d47f579bdcddef Mon Sep 17 00:00:00 2001 From: Peter Schutt Date: Mon, 20 Nov 2023 21:57:27 +1000 Subject: [PATCH 01/45] fix: typing of datastructures.URL (#2723) This PR fixes an issue where mypy would infer the type of `datastructures.URL` to be `Any`. This was caused by the use of the `@lrucache` decorator on `URL.__new__()`. We had ignored the error, however mypy would report: > litestar/datastructures/url.py:85: error: Unsupported decorated constructor type [misc] The fix adds a new `@classmethod`, `URL._new()` which is cached and called from `URL.__new__()`. --- litestar/datastructures/url.py | 29 +++++++++++++++++------------ 1 file changed, 17 insertions(+), 12 deletions(-) diff --git a/litestar/datastructures/url.py b/litestar/datastructures/url.py index 871917fadc..2a887a9997 100644 --- a/litestar/datastructures/url.py +++ b/litestar/datastructures/url.py @@ -8,12 +8,13 @@ from litestar.datastructures import MultiDict from litestar.types import Empty -__all__ = ("Address", "URL") - - if TYPE_CHECKING: + from typing_extensions import Self + from litestar.types import EmptyType, Scope +__all__ = ("Address", "URL") + _DEFAULT_SCHEME_PORTS = {"http": 80, "https": 443, "ftp": 21, "ws": 80, "wss": 443} @@ -81,13 +82,17 @@ class URL: hostname: str | None """Hostname if specified.""" - @lru_cache # type: ignore[misc] # noqa: B019 def __new__(cls, url: str | SplitResult) -> URL: """Create a new instance. Args: url: url string or split result to represent. """ + return cls._new(url=url) + + @classmethod + @lru_cache + def _new(cls, url: str | SplitResult) -> URL: instance = super().__new__(cls) instance._parsed_url = None @@ -135,7 +140,7 @@ def from_components( path: str = "", fragment: str = "", query: str = "", - ) -> URL: + ) -> Self: """Create a new URL from components. Args: @@ -148,7 +153,7 @@ def from_components( Returns: A new URL with the given components """ - return cls( # type: ignore[no-any-return] + return cls( SplitResult( scheme=scheme, netloc=netloc, @@ -159,7 +164,7 @@ def from_components( ) @classmethod - def from_scope(cls, scope: Scope) -> URL: + def from_scope(cls, scope: Scope) -> Self: """Construct a URL from a :class:`Scope <.types.Scope>` Args: @@ -202,7 +207,7 @@ def with_replacements( path: str = "", query: str | MultiDict | None | EmptyType = Empty, fragment: str = "", - ) -> URL: + ) -> Self: """Create a new URL, replacing the given components. Args: @@ -217,13 +222,13 @@ def with_replacements( """ if isinstance(query, MultiDict): query = urlencode(query=query) - query = (query if query is not Empty else self.query) or "" + query_str = cast("str", (query if query is not Empty else self.query) or "") - return URL.from_components( # type: ignore[no-any-return] + return type(self).from_components( scheme=scheme or self.scheme, netloc=netloc or self.netloc, path=path or self.path, - query=query, + query=query_str, fragment=fragment or self.fragment, ) @@ -250,7 +255,7 @@ def __str__(self) -> str: def __eq__(self, other: Any) -> bool: if isinstance(other, (str, URL)): return str(self) == str(other) - return NotImplemented # type: ignore[unreachable] # pragma: no cover + return NotImplemented # pragma: no cover def __repr__(self) -> str: return f"{type(self).__name__}({self._url!r})" From 8e4b50ceb79de292410d8d9600ed15000191ecb7 Mon Sep 17 00:00:00 2001 From: Peter Schutt Date: Wed, 22 Nov 2023 07:46:17 +1000 Subject: [PATCH 02/45] fix: app `template_config` parameter type (#2732) This PR modifies the annotation to use a bound generic instead of directly typing the template engine type as `TemplateEngineProtocol`. --- litestar/app.py | 6 ++---- litestar/config/app.py | 5 ++--- litestar/template/config.py | 17 +++++++++-------- litestar/types/internal_types.py | 3 +++ 4 files changed, 16 insertions(+), 15 deletions(-) diff --git a/litestar/app.py b/litestar/app.py index 80c2a0f970..24eb0d3afd 100644 --- a/litestar/app.py +++ b/litestar/app.py @@ -47,7 +47,7 @@ from litestar.static_files.base import StaticFiles from litestar.stores.registry import StoreRegistry from litestar.types import Empty, TypeDecodersSequence -from litestar.types.internal_types import PathParameterDefinition +from litestar.types.internal_types import PathParameterDefinition, TemplateConfigType from litestar.utils import deprecated, ensure_async_callable, join_paths, unique from litestar.utils.dataclass import extract_dataclass_items from litestar.utils.predicates import is_async_callable @@ -68,8 +68,6 @@ from litestar.openapi.spec.open_api import OpenAPI from litestar.static_files.config import StaticFilesConfig from litestar.stores.base import Store - from litestar.template import TemplateEngineProtocol - from litestar.template.config import TemplateConfig from litestar.types import ( AfterExceptionHookHandler, AfterRequestHookHandler, @@ -217,7 +215,7 @@ def __init__( static_files_config: Sequence[StaticFilesConfig] | None = None, stores: StoreRegistry | dict[str, Store] | None = None, tags: Sequence[str] | None = None, - template_config: TemplateConfig[TemplateEngineProtocol] | None = None, + template_config: TemplateConfigType | None = None, type_encoders: TypeEncodersMap | None = None, type_decoders: TypeDecodersSequence | None = None, websocket_class: type[WebSocket] | None = None, diff --git a/litestar/config/app.py b/litestar/config/app.py index 2cf513cdd0..ff12a05d97 100644 --- a/litestar/config/app.py +++ b/litestar/config/app.py @@ -30,8 +30,6 @@ from litestar.static_files.config import StaticFilesConfig from litestar.stores.base import Store from litestar.stores.registry import StoreRegistry - from litestar.template import TemplateEngineProtocol - from litestar.template.config import TemplateConfig from litestar.types import ( AfterExceptionHookHandler, AfterRequestHookHandler, @@ -51,6 +49,7 @@ from litestar.types.callable_types import LifespanHook from litestar.types.composite_types import TypeDecodersSequence from litestar.types.empty import EmptyType + from litestar.types.internal_types import TemplateConfigType __all__ = ( @@ -197,7 +196,7 @@ class AppConfig: """ tags: list[str] = field(default_factory=list) """A list of string tags that will be appended to the schema of all route handlers under the application.""" - template_config: TemplateConfig[TemplateEngineProtocol] | None = field(default=None) + template_config: TemplateConfigType | None = field(default=None) """An instance of :class:`TemplateConfig <.template.TemplateConfig>`.""" type_encoders: TypeEncodersMap | None = field(default=None) """A mapping of types to callables that transform them into types supported for serialization.""" diff --git a/litestar/template/config.py b/litestar/template/config.py index 9f331560c8..d2aa87c302 100644 --- a/litestar/template/config.py +++ b/litestar/template/config.py @@ -13,24 +13,24 @@ if TYPE_CHECKING: from litestar.types import PathType -T = TypeVar("T", bound=TemplateEngineProtocol) +EngineType = TypeVar("EngineType", bound=TemplateEngineProtocol) @dataclass -class TemplateConfig(Generic[T]): +class TemplateConfig(Generic[EngineType]): """Configuration for Templating. To enable templating, pass an instance of this class to the :class:`Litestar ` constructor using the 'template_config' key. """ - engine: type[T] | T | None = field(default=None) + engine: type[EngineType] | EngineType | None = field(default=None) """A template engine adhering to the :class:`TemplateEngineProtocol `.""" directory: PathType | list[PathType] | None = field(default=None) """A directory or list of directories from which to serve templates.""" - engine_callback: Callable[[T], None] | None = field(default=None) + engine_callback: Callable[[EngineType], None] | None = field(default=None) """A callback function that allows modifying the instantiated templating protocol.""" - instance: T | None = field(default=None) + instance: EngineType | None = field(default=None) """An instance of the templating protocol.""" def __post_init__(self) -> None: @@ -41,16 +41,17 @@ def __post_init__(self) -> None: if self.instance is not None and self.directory is not None: raise ImproperlyConfiguredException("directory cannot be set if instance is") - def to_engine(self) -> T: + def to_engine(self) -> EngineType: """Instantiate the template engine.""" template_engine = cast( - "T", self.engine(directory=self.directory, engine_instance=None) if isclass(self.engine) else self.engine + "EngineType", + self.engine(directory=self.directory, engine_instance=None) if isclass(self.engine) else self.engine, ) if callable(self.engine_callback): self.engine_callback(template_engine) return template_engine @cached_property - def engine_instance(self) -> T: + def engine_instance(self) -> EngineType: """Return the template engine instance.""" return self.to_engine() if self.instance is None else self.instance diff --git a/litestar/types/internal_types.py b/litestar/types/internal_types.py index 963467424c..499de1b6fc 100644 --- a/litestar/types/internal_types.py +++ b/litestar/types/internal_types.py @@ -24,6 +24,8 @@ from litestar.handlers.websocket_handlers import WebsocketRouteHandler from litestar.response import Response from litestar.router import Router + from litestar.template import TemplateConfig + from litestar.template.config import EngineType from litestar.types import Method ReservedKwargs: TypeAlias = Literal["request", "socket", "headers", "query", "cookies", "state", "data"] @@ -31,6 +33,7 @@ ResponseType: TypeAlias = "type[Response]" ControllerRouterHandler: TypeAlias = "type[Controller] | RouteHandlerType | Router | Callable[..., Any]" RouteHandlerMapItem: TypeAlias = 'dict[Method | Literal["websocket", "asgi"], RouteHandlerType]' +TemplateConfigType: TypeAlias = "TemplateConfig[EngineType]" # deprecated _LitestarType: TypeAlias = "Litestar" From 0d8bdc92c97a12e8b56804965c1e6d549786aca4 Mon Sep 17 00:00:00 2001 From: Cody Fincher <204685+cofin@users.noreply.github.com> Date: Tue, 21 Nov 2023 20:18:35 -0600 Subject: [PATCH 03/45] fix: set PDM settings to `--local` (#2733) --- .gitignore | 1 + Makefile | 2 +- 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/.gitignore b/.gitignore index 1a8709c880..5bf2443ccb 100644 --- a/.gitignore +++ b/.gitignore @@ -43,3 +43,4 @@ __pypackages__/ # test certificates certs/ +pdm.toml diff --git a/Makefile b/Makefile index 29d1d9b5ab..e2df0a6027 100644 --- a/Makefile +++ b/Makefile @@ -42,7 +42,7 @@ install: clean ## Install the project, dependencies, and pre-commit for @if [ "$(VENV_EXISTS)" ]; then echo "=> Removing existing virtual environment"; fi if [ "$(VENV_EXISTS)" ]; then $(MAKE) destroy; fi if [ "$(VENV_EXISTS)" ]; then $(MAKE) clean; fi - @if [ "$(USING_PDM)" ]; then $(PDM) config venv.in_project true && python3 -m venv --copies .venv && . $(ENV_PREFIX)/activate && $(ENV_PREFIX)/pip install --quiet -U wheel setuptools cython mypy pip; fi + @if [ "$(USING_PDM)" ]; then $(PDM) config --local venv.in_project true && python3 -m venv --copies .venv && . $(ENV_PREFIX)/activate && $(ENV_PREFIX)/pip install --quiet -U wheel setuptools cython mypy pip; fi @if [ "$(USING_PDM)" ]; then $(PDM) install -dG:all; fi @echo "=> Install complete! Note: If you want to re-install re-run 'make install'" From c68a549e5994c724a70ff7d41d59b33691b63fc5 Mon Sep 17 00:00:00 2001 From: Cody Fincher <204685+cofin@users.noreply.github.com> Date: Tue, 21 Nov 2023 20:23:23 -0600 Subject: [PATCH 04/45] feat: adds a deprecation warning for `litestar.contrib.piccolo` (#2704) * feat: adds a deprecation warning for `litestar.contrib.piccolo` * Update litestar/contrib/piccolo.py Co-authored-by: Jacob Coffee * fix: correct deprecation warning --------- Co-authored-by: Jacob Coffee --- litestar/contrib/piccolo.py | 20 ++++++++++++++----- .../test_piccolo_orm/test_piccolo_orm_dto.py | 12 +++++++++++ 2 files changed, 27 insertions(+), 5 deletions(-) diff --git a/litestar/contrib/piccolo.py b/litestar/contrib/piccolo.py index 972c104ed1..297cd7a2bb 100644 --- a/litestar/contrib/piccolo.py +++ b/litestar/contrib/piccolo.py @@ -11,22 +11,32 @@ from litestar.dto.data_structures import DTOFieldDefinition from litestar.exceptions import MissingDependencyException from litestar.types import Empty +from litestar.typing import FieldDefinition +from litestar.utils import warn_deprecation try: - import piccolo # noqa: F401 + from piccolo.columns import Column, column_types + from piccolo.table import Table except ImportError as e: raise MissingDependencyException("piccolo") from e -from piccolo.columns import Column, column_types -from piccolo.table import Table - -from litestar.typing import FieldDefinition T = TypeVar("T", bound=Table) __all__ = ("PiccoloDTO",) +def __getattr__(name: str) -> Any: + warn_deprecation( + deprecated_name=f"litestar.contrib.piccolo.{name}", + version="2.3.2", + kind="import", + removal_in="3.0.0", + info="importing from 'litestar.contrib.piccolo' is deprecated and will be removed in 3.0, please import from 'litestar_piccolo' package directly instead", + ) + return getattr(name, name) + + def _parse_piccolo_type(column: Column, extra: dict[str, Any]) -> FieldDefinition: if isinstance(column, (column_types.Decimal, column_types.Numeric)): column_type: Any = Decimal diff --git a/tests/unit/test_contrib/test_piccolo_orm/test_piccolo_orm_dto.py b/tests/unit/test_contrib/test_piccolo_orm/test_piccolo_orm_dto.py index b7a221c077..013a65a624 100644 --- a/tests/unit/test_contrib/test_piccolo_orm/test_piccolo_orm_dto.py +++ b/tests/unit/test_contrib/test_piccolo_orm/test_piccolo_orm_dto.py @@ -16,7 +16,9 @@ except ImportError: pytest.skip("Piccolo not installed", allow_module_level=True) +import pytest from piccolo.columns import Column, column_types +from piccolo.columns.column_types import Varchar from piccolo.conf.apps import Finder from piccolo.table import Table, create_db_tables, drop_db_tables @@ -26,6 +28,16 @@ from .tables import RecordingStudio, Venue +def test_dto_deprecation() -> None: + class Manager(Table): + name = Varchar(length=50) + + with pytest.deprecated_call(): + from litestar.contrib.piccolo import PiccoloDTO + + _ = PiccoloDTO[Manager] + + @pytest.fixture(autouse=True) async def scaffold_piccolo() -> AsyncGenerator: """Scaffolds Piccolo ORM and performs cleanup.""" From 444663b208bdfe8e4c8692e4bf7dc177cfd83bf3 Mon Sep 17 00:00:00 2001 From: Peter Schutt Date: Wed, 22 Nov 2023 21:30:24 +1000 Subject: [PATCH 05/45] refactor: use scope state for caching connection data (#2725) * refactor: use scope state for caching connection data WIP * refactor: `connection.Request` cached attributes. * refactor: use type: ignore instead of cast() on hot path * refactor: simplify `Request.form()` caching. * docs: document why we wrap asgi app in test client --- litestar/_kwargs/extractors.py | 8 +- litestar/app.py | 3 +- litestar/connection/base.py | 60 +++++---- litestar/connection/request.py | 76 +++++++----- litestar/constants.py | 25 +++- litestar/datastructures/headers.py | 5 +- litestar/testing/client/base.py | 60 +++++++-- litestar/testing/request_factory.py | 8 +- litestar/utils/scope.py | 7 -- tests/unit/test_connection/test_base.py | 19 +-- tests/unit/test_connection/test_request.py | 116 ++++++++++-------- .../unit/test_testing/test_request_factory.py | 2 +- 12 files changed, 237 insertions(+), 152 deletions(-) diff --git a/litestar/_kwargs/extractors.py b/litestar/_kwargs/extractors.py index df0845ce9f..28825aa106 100644 --- a/litestar/_kwargs/extractors.py +++ b/litestar/_kwargs/extractors.py @@ -9,12 +9,14 @@ parse_query_string, parse_url_encoded_form_data, ) +from litestar.constants import SCOPE_STATE_PARSED_QUERY_KEY from litestar.datastructures import Headers from litestar.datastructures.upload_file import UploadFile from litestar.enums import ParamType, RequestEncodingType from litestar.exceptions import ValidationException from litestar.params import BodyKwarg from litestar.types import Empty +from litestar.utils.scope import set_litestar_scope_state if TYPE_CHECKING: from litestar._kwargs import KwargsModel @@ -145,13 +147,15 @@ def parse_connection_query_params(connection: ASGIConnection, kwargs_model: Kwar Returns: A dictionary of parsed values. """ - parsed_query = connection.scope["_parsed_query"] = ( # type: ignore + parsed_query = ( connection._parsed_query if connection._parsed_query is not Empty else parse_query_string(connection.scope.get("query_string", b"")) ) + set_litestar_scope_state(connection.scope, SCOPE_STATE_PARSED_QUERY_KEY, parsed_query) return create_query_default_dict( - parsed_query=parsed_query, sequence_query_parameter_names=kwargs_model.sequence_query_parameter_names + parsed_query=parsed_query, # type: ignore[arg-type] + sequence_query_parameter_names=kwargs_model.sequence_query_parameter_names, ) diff --git a/litestar/app.py b/litestar/app.py index 24eb0d3afd..856fb3856f 100644 --- a/litestar/app.py +++ b/litestar/app.py @@ -532,10 +532,11 @@ async def __call__( Returns: None """ - scope["app"] = self if scope["type"] == "lifespan": await self.asgi_router.lifespan(receive=receive, send=send) # type: ignore[arg-type] return + + scope["app"] = self scope["state"] = {} await self.asgi_handler(scope, receive, self._wrap_send(send=send, scope=scope)) # type: ignore[arg-type] diff --git a/litestar/connection/base.py b/litestar/connection/base.py index 0d9abdd21a..4eb3842215 100644 --- a/litestar/connection/base.py +++ b/litestar/connection/base.py @@ -3,15 +3,19 @@ from typing import TYPE_CHECKING, Any, Generic, TypeVar, cast from litestar._parsers import parse_cookie_string, parse_query_string +from litestar.constants import ( + SCOPE_STATE_BASE_URL_KEY, + SCOPE_STATE_COOKIES_KEY, + SCOPE_STATE_PARSED_QUERY_KEY, + SCOPE_STATE_URL_KEY, +) from litestar.datastructures.headers import Headers from litestar.datastructures.multi_dicts import MultiDict from litestar.datastructures.state import State from litestar.datastructures.url import URL, Address, make_absolute_url from litestar.exceptions import ImproperlyConfiguredException from litestar.types.empty import Empty - -__all__ = ("ASGIConnection", "empty_receive", "empty_send") - +from litestar.utils.scope import get_litestar_scope_state, set_litestar_scope_state if TYPE_CHECKING: from typing import NoReturn @@ -21,6 +25,8 @@ from litestar.types.asgi_types import Message, Receive, Scope, Send from litestar.types.protocols import Logger +__all__ = ("ASGIConnection", "empty_receive", "empty_send") + UserT = TypeVar("UserT") AuthT = TypeVar("AuthT") HandlerT = TypeVar("HandlerT") @@ -75,10 +81,15 @@ def __init__(self, scope: Scope, receive: Receive = empty_receive, send: Send = self.scope = scope self.receive = receive self.send = send - self._base_url: Any = scope.get("_base_url", Empty) - self._url: Any = scope.get("_url", Empty) - self._parsed_query: Any = scope.get("_parsed_query", Empty) - self._cookies: Any = scope.get("_cookies", Empty) + self._base_url = cast("URL | EmptyType", get_litestar_scope_state(scope, SCOPE_STATE_BASE_URL_KEY, Empty)) + self._url = cast("URL | EmptyType", get_litestar_scope_state(scope, SCOPE_STATE_URL_KEY, Empty)) + self._parsed_query = cast( + "tuple[tuple[str, str], ...] | EmptyType", + get_litestar_scope_state(scope, SCOPE_STATE_PARSED_QUERY_KEY, Empty), + ) + self._cookies = cast( + "dict[str, str] | EmptyType", get_litestar_scope_state(scope, SCOPE_STATE_COOKIES_KEY, Empty) + ) @property def app(self) -> Litestar: @@ -115,7 +126,8 @@ def url(self) -> URL: A URL instance constructed from the request's scope. """ if self._url is Empty: - self._url = self.scope["_url"] = URL.from_scope(self.scope) # type: ignore[typeddict-unknown-key] + self._url = URL.from_scope(self.scope) + set_litestar_scope_state(self.scope, SCOPE_STATE_URL_KEY, self._url) return cast("URL", self._url) @@ -128,14 +140,17 @@ def base_url(self) -> URL: (host + domain + prefix) of the request. """ if self._base_url is Empty: - scope = { - **self.scope, - "path": "/", - "query_string": b"", - "root_path": self.scope.get("app_root_path") or self.scope.get("root_path", ""), - } - self._base_url = self.scope["_base_url"] = URL.from_scope(cast("Scope", scope)) # type: ignore[typeddict-unknown-key] - + scope = cast( + "Scope", + { + **self.scope, + "path": "/", + "query_string": b"", + "root_path": self.scope.get("app_root_path") or self.scope.get("root_path", ""), + }, + ) + self._base_url = URL.from_scope(scope) + set_litestar_scope_state(self.scope, SCOPE_STATE_BASE_URL_KEY, self._base_url) return cast("URL", self._base_url) @property @@ -155,9 +170,9 @@ def query_params(self) -> MultiDict[Any]: A normalized dict of query parameters. Multiple values for the same key are returned as a list. """ if self._parsed_query is Empty: - self._parsed_query = self.scope["_parsed_query"] = parse_query_string(self.scope.get("query_string", b"")) # type: ignore - - return MultiDict(self._parsed_query) + self._parsed_query = parse_query_string(self.scope.get("query_string", b"")) + set_litestar_scope_state(self.scope, SCOPE_STATE_PARSED_QUERY_KEY, self._parsed_query) + return MultiDict(cast("tuple[tuple[str, str], ...]", self._parsed_query)) @property def path_params(self) -> dict[str, Any]: @@ -176,11 +191,8 @@ def cookies(self) -> dict[str, str]: Returns any cookies stored in the header as a parsed dictionary. """ if self._cookies is Empty: - cookies: dict[str, str] = {} - if cookie_header := self.headers.get("cookie"): - cookies = parse_cookie_string(cookie_header) - - self._cookies = self.scope["_cookies"] = cookies # type: ignore[typeddict-unknown-key] + self._cookies = parse_cookie_string(cookie_header) if (cookie_header := self.headers.get("cookie")) else {} + set_litestar_scope_state(self.scope, SCOPE_STATE_COOKIES_KEY, self._cookies) return cast("dict[str, str]", self._cookies) diff --git a/litestar/connection/request.py b/litestar/connection/request.py index 6f22b8d342..2372aa5ae8 100644 --- a/litestar/connection/request.py +++ b/litestar/connection/request.py @@ -12,12 +12,21 @@ empty_receive, empty_send, ) +from litestar.constants import ( + SCOPE_STATE_ACCEPT_KEY, + SCOPE_STATE_BODY_KEY, + SCOPE_STATE_CONTENT_TYPE_KEY, + SCOPE_STATE_FORM_KEY, + SCOPE_STATE_JSON_KEY, + SCOPE_STATE_MSGPACK_KEY, +) from litestar.datastructures.headers import Accept from litestar.datastructures.multi_dicts import FormMultiDict from litestar.enums import RequestEncodingType from litestar.exceptions import InternalServerException from litestar.serialization import decode_json, decode_msgpack from litestar.types import Empty +from litestar.utils.scope import get_litestar_scope_state, set_litestar_scope_state __all__ = ("Request",) @@ -58,12 +67,12 @@ def __init__(self, scope: Scope, receive: Receive = empty_receive, send: Send = """ super().__init__(scope, receive, send) self.is_connected: bool = True - self._body: Any = scope.get("_body", Empty) - self._form: Any = scope.get("_form", Empty) - self._json: Any = scope.get("_json", Empty) - self._msgpack: Any = scope.get("_msgpack", Empty) - self._content_type: Any = scope.get("_content_type", Empty) - self._accept: Any = scope.get("_accept", Empty) + self._body = get_litestar_scope_state(scope, SCOPE_STATE_BODY_KEY, Empty) + self._form = get_litestar_scope_state(scope, SCOPE_STATE_FORM_KEY, Empty) + self._json = get_litestar_scope_state(scope, SCOPE_STATE_JSON_KEY, Empty) + self._msgpack = get_litestar_scope_state(scope, SCOPE_STATE_MSGPACK_KEY, Empty) + self._content_type = get_litestar_scope_state(scope, SCOPE_STATE_CONTENT_TYPE_KEY, Empty) + self._accept = get_litestar_scope_state(scope, SCOPE_STATE_ACCEPT_KEY, Empty) @property def method(self) -> Method: @@ -82,9 +91,8 @@ def content_type(self) -> tuple[str, dict[str, str]]: A tuple with the parsed value and a dictionary containing any options send in it. """ if self._content_type is Empty: - self._content_type = self.scope["_content_type"] = parse_content_header( # type: ignore[typeddict-unknown-key] - self.headers.get("Content-Type", "") - ) + self._content_type = parse_content_header(self.headers.get("Content-Type", "")) + set_litestar_scope_state(self.scope, SCOPE_STATE_CONTENT_TYPE_KEY, self._content_type) return cast("tuple[str, dict[str, str]]", self._content_type) @property @@ -95,7 +103,8 @@ def accept(self) -> Accept: An :class:`Accept ` instance, representing the list of acceptable media types. """ if self._accept is Empty: - self._accept = self.scope["_accept"] = Accept(self.headers.get("Accept", "*/*")) # type: ignore[typeddict-unknown-key] + self._accept = Accept(self.headers.get("Accept", "*/*")) + set_litestar_scope_state(self.scope, SCOPE_STATE_ACCEPT_KEY, self._accept) return cast("Accept", self._accept) async def json(self) -> Any: @@ -106,9 +115,8 @@ async def json(self) -> Any: """ if self._json is Empty: body = await self.body() - self._json = self.scope["_json"] = decode_json( # type: ignore[typeddict-unknown-key] - body or b"null", type_decoders=self.route_handler.resolve_type_decoders() - ) + self._json = decode_json(body or b"null", type_decoders=self.route_handler.resolve_type_decoders()) + set_litestar_scope_state(self.scope, SCOPE_STATE_JSON_KEY, self._json) return self._json async def msgpack(self) -> Any: @@ -119,9 +127,8 @@ async def msgpack(self) -> Any: """ if self._msgpack is Empty: body = await self.body() - self._msgpack = self.scope["_msgpack"] = decode_msgpack( # type: ignore[typeddict-unknown-key] - body or b"\xc0", type_decoders=self.route_handler.resolve_type_decoders() - ) + self._msgpack = decode_msgpack(body or b"\xc0", type_decoders=self.route_handler.resolve_type_decoders()) + set_litestar_scope_state(self.scope, SCOPE_STATE_MSGPACK_KEY, self._msgpack) return self._msgpack async def stream(self) -> AsyncGenerator[bytes, None]: @@ -162,7 +169,8 @@ async def body(self) -> bytes: A byte-string representing the body of the request. """ if self._body is Empty: - self._body = self.scope["_body"] = b"".join([c async for c in self.stream()]) # type: ignore[typeddict-unknown-key] + self._body = b"".join([c async for c in self.stream()]) + set_litestar_scope_state(self.scope, SCOPE_STATE_BODY_KEY, self._body) return cast("bytes", self._body) async def form(self) -> FormMultiDict: @@ -173,22 +181,24 @@ async def form(self) -> FormMultiDict: Returns: A FormMultiDict instance """ - if self._form is not Empty: - return FormMultiDict(self._form) - content_type, options = self.content_type - if content_type == RequestEncodingType.MULTI_PART: - self._form = self.scope["_form"] = form_values = parse_multipart_form( # type: ignore[typeddict-unknown-key] - body=await self.body(), - boundary=options.get("boundary", "").encode(), - multipart_form_part_limit=self.app.multipart_form_part_limit, - ) - return FormMultiDict(form_values) - if content_type == RequestEncodingType.URL_ENCODED: - self._form = self.scope["_form"] = form_values = parse_url_encoded_form_data( # type: ignore[typeddict-unknown-key] - await self.body(), - ) - return FormMultiDict(form_values) - return FormMultiDict() + if self._form is Empty: + content_type, options = self.content_type + if content_type == RequestEncodingType.MULTI_PART: + self._form = parse_multipart_form( + body=await self.body(), + boundary=options.get("boundary", "").encode(), + multipart_form_part_limit=self.app.multipart_form_part_limit, + ) + elif content_type == RequestEncodingType.URL_ENCODED: + self._form = parse_url_encoded_form_data( + await self.body(), + ) + else: + self._form = {} + + set_litestar_scope_state(self.scope, SCOPE_STATE_FORM_KEY, self._form) + + return FormMultiDict(self._form) async def send_push_promise(self, path: str) -> None: """Send a push promise. diff --git a/litestar/constants.py b/litestar/constants.py index 68bea65a10..59a6b0d69e 100644 --- a/litestar/constants.py +++ b/litestar/constants.py @@ -17,13 +17,26 @@ REDIRECT_STATUS_CODES: Final = {301, 302, 303, 307, 308} REDIRECT_ALLOWED_MEDIA_TYPES: Final = {MediaType.TEXT, MediaType.HTML, MediaType.JSON} RESERVED_KWARGS: Final = {"state", "headers", "cookies", "request", "socket", "data", "query", "scope", "body"} -SCOPE_STATE_CSRF_TOKEN_KEY = "csrf_token" # noqa: S105 # possible hardcoded password -SCOPE_STATE_DEPENDENCY_CACHE: Final = "dependency_cache" -SCOPE_STATE_NAMESPACE: Final = "__litestar__" -SCOPE_STATE_RESPONSE_COMPRESSED: Final = "response_compressed" -SCOPE_STATE_DO_CACHE: Final = "do_cache" -SCOPE_STATE_IS_CACHED: Final = "is_cached" SKIP_VALIDATION_NAMES: Final = {"request", "socket", "scope", "receive", "send"} UNDEFINED_SENTINELS: Final = {Signature.empty, Empty, Ellipsis, MISSING, UnsetType} WEBSOCKET_CLOSE: Final = "websocket.close" WEBSOCKET_DISCONNECT: Final = "websocket.disconnect" + +# keys for internal stuff that we store in the "__litestar__" namespace of the scope state +SCOPE_STATE_NAMESPACE: Final = "__litestar__" + +SCOPE_STATE_ACCEPT_KEY: Final = "accept" +SCOPE_STATE_BASE_URL_KEY: Final = "base_url" +SCOPE_STATE_BODY_KEY: Final = "body" +SCOPE_STATE_CONTENT_TYPE_KEY: Final = "content_type" +SCOPE_STATE_COOKIES_KEY: Final = "cookies" +SCOPE_STATE_CSRF_TOKEN_KEY: Final = "csrf_token" # possible hardcoded password +SCOPE_STATE_DEPENDENCY_CACHE: Final = "dependency_cache" +SCOPE_STATE_DO_CACHE: Final = "do_cache" +SCOPE_STATE_FORM_KEY: Final = "form" +SCOPE_STATE_IS_CACHED: Final = "is_cached" +SCOPE_STATE_JSON_KEY: Final = "json" +SCOPE_STATE_MSGPACK_KEY: Final = "msgpack" +SCOPE_STATE_PARSED_QUERY_KEY: Final = "parsed_query" +SCOPE_STATE_RESPONSE_COMPRESSED: Final = "response_compressed" +SCOPE_STATE_URL_KEY: Final = "url" diff --git a/litestar/datastructures/headers.py b/litestar/datastructures/headers.py index c87223da3e..89081aef77 100644 --- a/litestar/datastructures/headers.py +++ b/litestar/datastructures/headers.py @@ -27,9 +27,6 @@ from litestar.datastructures.multi_dicts import MultiMixin from litestar.dto.base_dto import AbstractDTO from litestar.exceptions import ImproperlyConfiguredException, ValidationException - -__all__ = ("Accept", "CacheControlHeader", "ETag", "Header", "Headers", "MutableScopeHeaders") - from litestar.typing import FieldDefinition from litestar.utils.dataclass import simple_asdict @@ -41,6 +38,8 @@ RawHeadersList, ) +__all__ = ("Accept", "CacheControlHeader", "ETag", "Header", "Headers", "MutableScopeHeaders") + ETAG_RE = re.compile(r'([Ww]/)?"(.+)"') PRINTABLE_ASCII_RE: Pattern[str] = re.compile(r"^[ -~]+$") diff --git a/litestar/testing/client/base.py b/litestar/testing/client/base.py index adb73d19ad..428c548f3a 100644 --- a/litestar/testing/client/base.py +++ b/litestar/testing/client/base.py @@ -6,20 +6,25 @@ from warnings import warn from anyio.from_thread import BlockingPortal, start_blocking_portal +from httpx import Cookies, Request, Response +from litestar import Litestar from litestar.connection import ASGIConnection +from litestar.constants import SCOPE_STATE_COOKIES_KEY from litestar.datastructures import MutableScopeHeaders +from litestar.enums import ScopeType from litestar.exceptions import ( ImproperlyConfiguredException, ) from litestar.types import AnyIOBackend, ASGIApp, HTTPResponseStartEvent +from litestar.utils.scope import set_litestar_scope_state if TYPE_CHECKING: from httpx._types import CookieTypes from litestar.middleware.session.base import BaseBackendConfig, BaseSessionBackend from litestar.middleware.session.client_side import ClientSideSessionBackend -from httpx import Cookies, Request, Response + from litestar.types.asgi_types import HTTPScope, Receive, Scope, Send T = TypeVar("T", bound=ASGIApp) @@ -30,8 +35,8 @@ def fake_http_send_message(headers: MutableScopeHeaders) -> HTTPResponseStartEve def fake_asgi_connection(app: ASGIApp, cookies: dict[str, str]) -> ASGIConnection[Any, Any, Any, Any]: - scope = { - "type": "http", + scope: HTTPScope = { + "type": ScopeType.HTTP, "path": "/", "raw_path": b"/", "root_path": "", @@ -39,18 +44,47 @@ def fake_asgi_connection(app: ASGIApp, cookies: dict[str, str]) -> ASGIConnectio "query_string": b"", "client": ("testclient", 50000), "server": ("testserver", 80), + "headers": [], "method": "GET", "http_version": "1.1", "extensions": {"http.response.template": {}}, - "app": app, + "app": app, # type: ignore[typeddict-item] "state": {}, "path_params": {}, - "route_handler": None, - "_cookies": cookies, + "route_handler": None, # type: ignore[typeddict-item] + "asgi": {"version": "3.0", "spec_version": "2.1"}, + "auth": None, + "session": None, + "user": None, } - return ASGIConnection[Any, Any, Any, Any]( - scope=scope, # type: ignore[arg-type] - ) + set_litestar_scope_state(scope, SCOPE_STATE_COOKIES_KEY, cookies) + return ASGIConnection[Any, Any, Any, Any](scope=scope) + + +def _wrap_app_to_add_state(app: ASGIApp) -> ASGIApp: + """Wrap an ASGI app to add state to the scope. + + Litestar depends on `state` being present in the ASGI connection scope. Scope state is optional in the ASGI spec, + however, the Litestar app always ensures it is present so that it can be depended on internally. + + When the ASGI app that is passed to the test client is _not_ a Litestar app, we need to add + state to the scope, because httpx does not do this for us. + + This assists us in testing Litestar components that rely on state being present in the scope, without having + to create a Litestar app for every test case. + + Args: + app: The ASGI app to wrap. + + Returns: + The wrapped ASGI app. + """ + + async def wrapped(scope: Scope, receive: Receive, send: Send) -> None: + scope["state"] = {} + await app(scope, receive, send) + + return wrapped class BaseTestClient(Generic[T]): @@ -83,10 +117,16 @@ def __init__( UserWarning, stacklevel=1, ) + self._session_backend: BaseSessionBackend | None = None if session_config: self._session_backend = session_config._backend_class(config=session_config) - self.app = app + + if not isinstance(app, Litestar): + app = _wrap_app_to_add_state(app) # type: ignore[assignment] + + self.app = cast("T", app) # type: ignore[redundant-cast] # pyright needs this + self.base_url = base_url self.backend = backend self.backend_options = backend_options diff --git a/litestar/testing/request_factory.py b/litestar/testing/request_factory.py index 54f8233ddd..de60b7719a 100644 --- a/litestar/testing/request_factory.py +++ b/litestar/testing/request_factory.py @@ -11,12 +11,14 @@ from litestar import delete, patch, post, put from litestar.app import Litestar from litestar.connection import Request +from litestar.constants import SCOPE_STATE_BODY_KEY from litestar.enums import HttpMethod, ParamType, RequestEncodingType, ScopeType from litestar.handlers.http_handlers import get from litestar.serialization import decode_json, default_serializer, encode_json from litestar.types import DataContainerType, HTTPScope, RouteHandlerType from litestar.types.asgi_types import ASGIVersion from litestar.utils import get_serializer_from_scope +from litestar.utils.scope import set_litestar_scope_state if TYPE_CHECKING: from httpx._types import FileTypes @@ -285,6 +287,7 @@ def _create_request_with_data( ) headers = headers or {} + body = b"" if data: data = json.loads(encode_json(data, serializer=get_serializer_from_scope(scope))) @@ -297,12 +300,9 @@ def _create_request_with_data( else: encoding_headers, stream = encode_urlencoded_data(decode_json(value=encode_json(data))) headers.update(encoding_headers) - body = b"" for chunk in stream: body += chunk - scope["_body"] = body # type: ignore[typeddict-unknown-key] - else: - scope["_body"] = b"" # type: ignore[typeddict-unknown-key] + set_litestar_scope_state(scope, SCOPE_STATE_BODY_KEY, body) self._create_cookie_header(headers, cookies) scope["headers"] = self._build_headers(headers) return Request(scope=scope) diff --git a/litestar/utils/scope.py b/litestar/utils/scope.py index b60f089a28..d7b5b53e71 100644 --- a/litestar/utils/scope.py +++ b/litestar/utils/scope.py @@ -46,13 +46,6 @@ def get_serializer_from_scope(scope: Scope) -> Serializer: def get_litestar_scope_state(scope: Scope, key: str, default: Any = None, pop: bool = False) -> Any: """Get an internal value from connection scope state. - Note: - If called with a default value, this method behaves like to `dict.set_default()`, both setting the key in the - namespace to the default value, and returning it. - - If called without a default value, the method behaves like `dict.get()`, returning ``None`` if the key does not - exist. - Args: scope: The connection scope. key: Key to get from internal namespace in scope state. diff --git a/tests/unit/test_connection/test_base.py b/tests/unit/test_connection/test_base.py index edc6cce376..71c52235c1 100644 --- a/tests/unit/test_connection/test_base.py +++ b/tests/unit/test_connection/test_base.py @@ -1,9 +1,10 @@ from typing import Any -from litestar import Litestar, get +from litestar import Litestar, constants, get from litestar.connection import ASGIConnection from litestar.logging.config import LoggingConfig from litestar.testing import RequestFactory +from litestar.utils.scope import get_litestar_scope_state def test_connection_base_properties() -> None: @@ -22,21 +23,21 @@ def handler() -> None: assert connection.app is app assert connection.route_handler is handler assert connection.state is not None - assert not scope.get("_url") + assert not get_litestar_scope_state(scope, constants.SCOPE_STATE_URL_KEY) assert connection.url - assert scope.get("_url") - assert not scope.get("_base_url") + assert get_litestar_scope_state(scope, constants.SCOPE_STATE_URL_KEY) + assert not get_litestar_scope_state(scope, constants.SCOPE_STATE_BASE_URL_KEY) assert connection.base_url - assert scope.get("_base_url") + assert get_litestar_scope_state(scope, constants.SCOPE_STATE_BASE_URL_KEY) assert not scope.get("_headers") assert connection.headers is not None assert scope.get("_headers") is not None - assert not scope.get("_parsed_query") + assert not get_litestar_scope_state(scope, constants.SCOPE_STATE_PARSED_QUERY_KEY) assert connection.query_params is not None - assert scope.get("_parsed_query") is not None - assert not scope.get("_cookies") + assert get_litestar_scope_state(scope, constants.SCOPE_STATE_PARSED_QUERY_KEY) is not None + assert not get_litestar_scope_state(scope, constants.SCOPE_STATE_COOKIES_KEY) assert connection.cookies is not None - assert scope.get("_cookies") is not None + assert get_litestar_scope_state(scope, constants.SCOPE_STATE_COOKIES_KEY) is not None assert connection.client assert connection.user is user assert connection.auth is auth diff --git a/tests/unit/test_connection/test_request.py b/tests/unit/test_connection/test_request.py index 35f34d0d26..6242e223ef 100644 --- a/tests/unit/test_connection/test_request.py +++ b/tests/unit/test_connection/test_request.py @@ -3,8 +3,9 @@ https://github.com/encode/starlette/blob/master/tests/test_requests.py. And are meant to ensure our compatibility with their API. """ +from __future__ import annotations -from typing import TYPE_CHECKING, Any, Dict, Generator, Optional +from typing import TYPE_CHECKING, Any, Callable, Dict, Generator from unittest.mock import patch import pytest @@ -30,42 +31,47 @@ def _route_handler() -> None: pass -async def test_request_empty_body_to_json(anyio_backend: str) -> None: +@pytest.fixture(name="scope") +def scope_fixture(create_scope: Callable[..., Scope]) -> Scope: + return create_scope(type="http", route_handler=_route_handler) + + +async def test_request_empty_body_to_json(anyio_backend: str, scope: Scope) -> None: with patch.object(Request, "body", return_value=b""): - request_empty_payload: Request = Request(scope={"type": "http", "route_handler": _route_handler}) # type: ignore + request_empty_payload: Request = Request(scope=scope) request_json = await request_empty_payload.json() assert request_json is None -async def test_request_invalid_body_to_json(anyio_backend: str) -> None: +async def test_request_invalid_body_to_json(anyio_backend: str, scope: Scope) -> None: with patch.object(Request, "body", return_value=b"invalid"), pytest.raises(SerializationException): - request_empty_payload: Request = Request(scope={"type": "http", "route_handler": _route_handler}) # type: ignore + request_empty_payload: Request = Request(scope=scope) await request_empty_payload.json() -async def test_request_valid_body_to_json(anyio_backend: str) -> None: +async def test_request_valid_body_to_json(anyio_backend: str, scope: Scope) -> None: with patch.object(Request, "body", return_value=b'{"test": "valid"}'): - request_empty_payload: Request = Request(scope={"type": "http", "route_handler": _route_handler}) # type: ignore + request_empty_payload: Request = Request(scope=scope) request_json = await request_empty_payload.json() assert request_json == {"test": "valid"} -async def test_request_empty_body_to_msgpack(anyio_backend: str) -> None: +async def test_request_empty_body_to_msgpack(anyio_backend: str, scope: Scope) -> None: with patch.object(Request, "body", return_value=b""): - request_empty_payload: Request = Request(scope={"type": "http", "route_handler": _route_handler}) # type: ignore + request_empty_payload: Request = Request(scope=scope) request_msgpack = await request_empty_payload.msgpack() assert request_msgpack is None -async def test_request_invalid_body_to_msgpack(anyio_backend: str) -> None: +async def test_request_invalid_body_to_msgpack(anyio_backend: str, scope: Scope) -> None: with patch.object(Request, "body", return_value=b"invalid"), pytest.raises(SerializationException): - request_empty_payload: Request = Request(scope={"type": "http", "route_handler": _route_handler}) # type: ignore + request_empty_payload: Request = Request(scope=scope) await request_empty_payload.msgpack() -async def test_request_valid_body_to_msgpack(anyio_backend: str) -> None: +async def test_request_valid_body_to_msgpack(anyio_backend: str, scope: Scope) -> None: with patch.object(Request, "body", return_value=encode_msgpack({"test": "valid"})): - request_empty_payload: Request = Request(scope={"type": "http", "route_handler": _route_handler}) # type: ignore + request_empty_payload: Request = Request(scope=scope) request_msgpack = await request_empty_payload.msgpack() assert request_msgpack == {"test": "valid"} @@ -75,12 +81,12 @@ def test_request_url_for() -> None: def proxy() -> None: pass - @get(path="/test") - def root(request: Request) -> Dict[str, str]: + @get(path="/test", signature_namespace={"dict": Dict}) + def root(request: Request) -> dict[str, str]: return {"url": request.url_for("proxy")} - @get(path="/test-none") - def test_none(request: Request) -> Dict[str, str]: + @get(path="/test-none", signature_namespace={"dict": Dict}) + def test_none(request: Request) -> dict[str, str]: return {"url": request.url_for("none")} with create_test_client(route_handlers=[proxy, root, test_none]) as client: @@ -91,13 +97,13 @@ def test_none(request: Request) -> Dict[str, str]: assert response.status_code == 500 -def test_request_asset_url(tmp_path: "Path") -> None: - @get(path="/resolver") - def resolver(request: Request) -> Dict[str, str]: +def test_request_asset_url(tmp_path: Path) -> None: + @get(path="/resolver", signature_namespace={"dict": Dict}) + def resolver(request: Request) -> dict[str, str]: return {"url": request.url_for_static_asset("js", "main.js")} - @get(path="/resolver-none") - def resolver_none(request: Request) -> Dict[str, str]: + @get(path="/resolver-none", signature_namespace={"dict": Dict}) + def resolver_none(request: Request) -> dict[str, str]: return {"url": request.url_for_static_asset("none", "main.js")} with create_test_client( @@ -131,7 +137,7 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) self.scope["called"] = True # type: ignore - @get("/") + @get("/", signature_types=[MyRequest]) def handler(request: MyRequest) -> None: value["called"] = request.scope.get("called") @@ -141,7 +147,7 @@ def handler(request: MyRequest) -> None: def test_request_url() -> None: - async def app(scope: "Scope", receive: "Receive", send: "Send") -> None: + async def app(scope: Scope, receive: Receive, send: Send) -> None: request = Request[Any, Any, Any](scope, receive) data = {"method": request.method, "url": str(request.url)} response = ASGIResponse(body=encode_json(data)) @@ -156,7 +162,7 @@ async def app(scope: "Scope", receive: "Receive", send: "Send") -> None: def test_request_query_params() -> None: - async def app(scope: "Scope", receive: "Receive", send: "Send") -> None: + async def app(scope: Scope, receive: Receive, send: Send) -> None: request = Request[Any, Any, Any](scope, receive) params = dict(request.query_params) response = ASGIResponse(body=encode_json({"params": params})) @@ -168,7 +174,7 @@ async def app(scope: "Scope", receive: "Receive", send: "Send") -> None: def test_request_headers() -> None: - async def app(scope: "Scope", receive: "Receive", send: "Send") -> None: + async def app(scope: Scope, receive: Receive, send: Send) -> None: request = Request[Any, Any, Any](scope, receive) headers = dict(request.headers) response = ASGIResponse(body=encode_json({"headers": headers})) @@ -188,7 +194,7 @@ async def app(scope: "Scope", receive: "Receive", send: "Send") -> None: def test_request_accept_header() -> None: - async def app(scope: "Scope", receive: "Receive", send: "Send") -> None: + async def app(scope: Scope, receive: Receive, send: Send) -> None: request = Request[Any, Any, Any](scope, receive) response = ASGIResponse(body=encode_json({"accepted_types": list(request.accept)})) await response(scope, receive, send) @@ -199,20 +205,26 @@ async def app(scope: "Scope", receive: "Receive", send: "Send") -> None: @pytest.mark.parametrize( - "scope,expected_client", + "scope_values,expected_client", ( ({"type": "http", "route_handler": _route_handler, "client": ["client", 42]}, Address("client", 42)), ({"type": "http", "route_handler": _route_handler, "client": None}, None), ({"type": "http", "route_handler": _route_handler}, None), ), ) -def test_request_client(scope: "Scope", expected_client: Optional[Address]) -> None: +def test_request_client( + scope_values: dict[str, Any], expected_client: Address | None, create_scope: Callable[..., Scope] +) -> None: + scope = create_scope() + scope.update(scope_values) # type: ignore[typeddict-item] + if "client" not in scope_values: + del scope["client"] # type: ignore[misc] client = Request[Any, Any, Any](scope).client assert client == expected_client def test_request_body() -> None: - async def app(scope: "Scope", receive: "Receive", send: "Send") -> None: + async def app(scope: Scope, receive: Receive, send: Send) -> None: request = Request[Any, Any, Any](scope, receive) body = await request.body() response = ASGIResponse(body=encode_json({"body": body.decode()})) @@ -231,7 +243,7 @@ async def app(scope: "Scope", receive: "Receive", send: "Send") -> None: def test_request_stream() -> None: - async def app(scope: "Scope", receive: "Receive", send: "Send") -> None: + async def app(scope: Scope, receive: Receive, send: Send) -> None: request = Request[Any, Any, Any](scope, receive) body = b"" async for chunk in request.stream(): @@ -252,7 +264,7 @@ async def app(scope: "Scope", receive: "Receive", send: "Send") -> None: def test_request_form_urlencoded() -> None: - async def app(scope: "Scope", receive: "Receive", send: "Send") -> None: + async def app(scope: Scope, receive: Receive, send: Send) -> None: request = Request[Any, Any, Any](scope, receive) form = await request.form() response = ASGIResponse(body=encode_json({"form": dict(form)})) @@ -265,7 +277,7 @@ async def app(scope: "Scope", receive: "Receive", send: "Send") -> None: def test_request_body_then_stream() -> None: - async def app(scope: "Any", receive: "Receive", send: "Send") -> None: + async def app(scope: Any, receive: Receive, send: Send) -> None: request = Request[Any, Any, Any](scope, receive) body = await request.body() chunks = b"" @@ -281,7 +293,7 @@ async def app(scope: "Any", receive: "Receive", send: "Send") -> None: def test_request_stream_then_body() -> None: - async def app(scope: "Scope", receive: "Receive", send: "Send") -> None: + async def app(scope: Scope, receive: Receive, send: Send) -> None: request = Request[Any, Any, Any](scope, receive) chunks = b"" async for chunk in request.stream(): @@ -301,7 +313,7 @@ async def app(scope: "Scope", receive: "Receive", send: "Send") -> None: def test_request_json() -> None: @asgi("/") - async def handler(scope: "Scope", receive: "Receive", send: "Send") -> None: + async def handler(scope: Scope, receive: Receive, send: Send) -> None: request = Request[Any, Any, Any](scope, receive) data = await request.json() response = ASGIResponse(body=encode_json({"json": data})) @@ -313,7 +325,7 @@ async def handler(scope: "Scope", receive: "Receive", send: "Send") -> None: def test_request_raw_path() -> None: - async def app(scope: "Scope", receive: "Receive", send: "Send") -> None: + async def app(scope: Scope, receive: Receive, send: Send) -> None: request = Request[Any, Any, Any](scope, receive) path = str(request.scope["path"]) raw_path = str(request.scope["raw_path"]) @@ -328,7 +340,7 @@ async def app(scope: "Scope", receive: "Receive", send: "Send") -> None: def test_request_without_setting_receive() -> None: """If Request is instantiated without the 'receive' channel, then .body() is not available.""" - async def app(scope: "Scope", receive: "Receive", send: "Send") -> None: + async def app(scope: Scope, receive: Receive, send: Send) -> None: request = Request[Any, Any, Any](scope) try: data = await request.json() @@ -342,10 +354,10 @@ async def app(scope: "Scope", receive: "Receive", send: "Send") -> None: assert response.json() == {"json": "Receive channel not available"} -async def test_request_disconnect() -> None: +async def test_request_disconnect(create_scope: Callable[..., Scope]) -> None: """If a client disconnect occurs while reading request body then InternalServerException should be raised.""" - async def app(scope: "Scope", receive: "Receive", send: "Send") -> None: + async def app(scope: Scope, receive: Receive, send: Send) -> None: request = Request[Any, Any, Any](scope, receive) await request.body() @@ -354,26 +366,26 @@ async def receiver() -> dict: with pytest.raises(InternalServerException): await app( - {"type": "http", "route_handler": _route_handler, "method": "POST", "path": "/"}, # type: ignore[arg-type] + create_scope(type="http", route_handler=_route_handler, method="POST", path="/"), receiver, # type: ignore[arg-type] empty_send, ) def test_request_state() -> None: - @get("/") - def handler(request: Request[Any, Any, Any]) -> Dict[Any, Any]: + @get("/", signature_namespace={"dict": Dict}) + def handler(request: Request[Any, Any, Any]) -> dict[Any, Any]: request.state.test = 1 assert request.state.test == 1 return request.state.dict() # type: ignore with create_test_client(handler) as client: response = client.get("/") - assert response.json() == {"test": 1} + assert response.json()["test"] == 1 def test_request_cookies() -> None: - async def app(scope: "Scope", receive: "Receive", send: "Send") -> None: + async def app(scope: Scope, receive: Receive, send: Send) -> None: request = Request[Any, Any, Any](scope, receive) mycookie = request.cookies.get("mycookie") if mycookie: @@ -395,7 +407,7 @@ async def app(scope: "Scope", receive: "Receive", send: "Send") -> None: def test_chunked_encoding() -> None: - async def app(scope: "Scope", receive: "Receive", send: "Send") -> None: + async def app(scope: Scope, receive: Receive, send: Send) -> None: request = Request[Any, Any, Any](scope, receive) body = await request.body() response = ASGIResponse(body=encode_json({"body": body.decode()})) @@ -412,7 +424,7 @@ def post_body() -> Generator[bytes, None, None]: def test_request_send_push_promise() -> None: - async def app(scope: "Scope", receive: "Receive", send: "Send") -> None: + async def app(scope: Scope, receive: Receive, send: Send) -> None: # the server is push-enabled scope["extensions"]["http.response.push"] = {} # type: ignore @@ -433,7 +445,7 @@ def test_request_send_push_promise_without_push_extension() -> None: .send_push_promise() does nothing. """ - async def app(scope: "Scope", receive: "Receive", send: "Send") -> None: + async def app(scope: Scope, receive: Receive, send: Send) -> None: request = Request[Any, Any, Any](scope) await request.send_push_promise("/style.css") @@ -451,7 +463,7 @@ def test_request_send_push_promise_without_setting_send() -> None: .send_push_promise() is not available. """ - async def app(scope: "Scope", receive: "Receive", send: "Send") -> None: + async def app(scope: Scope, receive: Receive, send: Send) -> None: # the server is push-enabled scope["extensions"]["http.response.push"] = {} # type: ignore @@ -470,10 +482,10 @@ async def app(scope: "Scope", receive: "Receive", send: "Send") -> None: class BeforeRequestMiddleWare(MiddlewareProtocol): - def __init__(self, app: "ASGIApp") -> None: + def __init__(self, app: ASGIApp) -> None: self.app = app - async def __call__(self, scope: "Scope", receive: "Receive", send: "Send") -> None: + async def __call__(self, scope: Scope, receive: Receive, send: Send) -> None: scope["state"]["main"] = 1 await self.app(scope, receive, send) @@ -483,8 +495,8 @@ def before_request(request: Request) -> None: assert request.state.main == 1 request.state.main = 2 - @get(path="/") - async def get_state(request: Request) -> Dict[str, str]: + @get(path="/", signature_namespace={"dict": Dict}) + async def get_state(request: Request) -> dict[str, str]: return {"state": request.state.main} with create_test_client( diff --git a/tests/unit/test_testing/test_request_factory.py b/tests/unit/test_testing/test_request_factory.py index fa2c148130..0171e2f91a 100644 --- a/tests/unit/test_testing/test_request_factory.py +++ b/tests/unit/test_testing/test_request_factory.py @@ -119,8 +119,8 @@ def test_request_factory_create_with_default_params() -> None: assert isinstance(request.app, Litestar) assert request.url == request.base_url == _DEFAULT_REQUEST_FACTORY_URL assert request.method == HttpMethod.GET + assert request.state.keys() == {"__litestar__"} assert not request.query_params - assert not request.state assert not request.path_params assert request.route_handler assert request.scope["http_version"] == "1.1" From a781226127d37d0a830957bc39f771c6d93e5e2b Mon Sep 17 00:00:00 2001 From: Peter Schutt Date: Wed, 22 Nov 2023 21:43:52 +1000 Subject: [PATCH 06/45] refactor: change type of `Empty` and `EmptyType` (#2734) * refactor: use scope state for caching connection data WIP * refactor: use type: ignore instead of cast() on hot path * refactor: change type of `Empty` and `EmptyType` This PR changes the type of `Empty` and `EmptyType` in `litestar.utils.empty` to types that better support narrowing. There is no change to the API of empty, and identity checks against `Empty` will still work as they always have. Use of `EmptyType` as a type in annotations will continue to work like it always has. Type checking of `Empty` will improve, as evidenced by the bug that this PR exposed in our use of `ParameterDefinition.default_factory`. It is typed `Callable | None` and in multiple places we've been passing `Empty` to it which was passing type checks because the type `Empty` _is_ callable. As a single enum variant, this type of error should no longer be possible. Where downstream users are importing and using `Empty` as one would expect, i.e., `if thing is Empty: ...`, this PR should only improve their typing experience. An issue that may arise is if `Empty` is being used in an `issubclass` check, e.g., `issubclass(thing, Empty)`. Under this PR, this would now be a runtime error. However, given that the previous implementation of `Empty` was marked as `@final`, it cannot have been subclassed in user-code, and so I think safe to assume that this scenario would be rare, if not non-existent. * refactor: improve exception wording Fix wording of exception raised where a user might return the empty sentinel from a handler. --- litestar/_kwargs/extractors.py | 2 +- litestar/app.py | 2 +- litestar/connection/base.py | 8 ++--- litestar/contrib/piccolo.py | 2 +- .../contrib/pydantic/pydantic_dto_factory.py | 4 +-- litestar/contrib/pydantic/utils.py | 35 +++++++++++-------- litestar/datastructures/url.py | 9 ++--- litestar/dto/msgspec_dto.py | 5 ++- litestar/handlers/base.py | 18 +++++----- litestar/handlers/http_handlers/base.py | 2 +- .../handlers/websocket_handlers/listener.py | 6 ++-- litestar/response/base.py | 4 +++ litestar/stores/redis.py | 2 +- litestar/types/empty.py | 14 ++++---- litestar/utils/compat.py | 2 +- tests/unit/test_contrib/conftest.py | 9 +++-- 16 files changed, 68 insertions(+), 56 deletions(-) diff --git a/litestar/_kwargs/extractors.py b/litestar/_kwargs/extractors.py index 28825aa106..b289511d83 100644 --- a/litestar/_kwargs/extractors.py +++ b/litestar/_kwargs/extractors.py @@ -154,7 +154,7 @@ def parse_connection_query_params(connection: ASGIConnection, kwargs_model: Kwar ) set_litestar_scope_state(connection.scope, SCOPE_STATE_PARSED_QUERY_KEY, parsed_query) return create_query_default_dict( - parsed_query=parsed_query, # type: ignore[arg-type] + parsed_query=parsed_query, sequence_query_parameter_names=kwargs_model.sequence_query_parameter_names, ) diff --git a/litestar/app.py b/litestar/app.py index 856fb3856f..73101998e2 100644 --- a/litestar/app.py +++ b/litestar/app.py @@ -345,7 +345,7 @@ def __init__( include_in_schema=include_in_schema, lifespan=list(lifespan or []), listeners=list(listeners or []), - logging_config=cast("BaseLoggingConfig | None", logging_config), + logging_config=logging_config, middleware=list(middleware or []), multipart_form_part_limit=multipart_form_part_limit, on_shutdown=list(on_shutdown or []), diff --git a/litestar/connection/base.py b/litestar/connection/base.py index 4eb3842215..2ac9e61c3b 100644 --- a/litestar/connection/base.py +++ b/litestar/connection/base.py @@ -129,7 +129,7 @@ def url(self) -> URL: self._url = URL.from_scope(self.scope) set_litestar_scope_state(self.scope, SCOPE_STATE_URL_KEY, self._url) - return cast("URL", self._url) + return self._url @property def base_url(self) -> URL: @@ -151,7 +151,7 @@ def base_url(self) -> URL: ) self._base_url = URL.from_scope(scope) set_litestar_scope_state(self.scope, SCOPE_STATE_BASE_URL_KEY, self._base_url) - return cast("URL", self._base_url) + return self._base_url @property def headers(self) -> Headers: @@ -172,7 +172,7 @@ def query_params(self) -> MultiDict[Any]: if self._parsed_query is Empty: self._parsed_query = parse_query_string(self.scope.get("query_string", b"")) set_litestar_scope_state(self.scope, SCOPE_STATE_PARSED_QUERY_KEY, self._parsed_query) - return MultiDict(cast("tuple[tuple[str, str], ...]", self._parsed_query)) + return MultiDict(self._parsed_query) @property def path_params(self) -> dict[str, Any]: @@ -194,7 +194,7 @@ def cookies(self) -> dict[str, str]: self._cookies = parse_cookie_string(cookie_header) if (cookie_header := self.headers.get("cookie")) else {} set_litestar_scope_state(self.scope, SCOPE_STATE_COOKIES_KEY, self._cookies) - return cast("dict[str, str]", self._cookies) + return self._cookies @property def client(self) -> Address | None: diff --git a/litestar/contrib/piccolo.py b/litestar/contrib/piccolo.py index 297cd7a2bb..314eb328e2 100644 --- a/litestar/contrib/piccolo.py +++ b/litestar/contrib/piccolo.py @@ -84,7 +84,7 @@ def generate_field_definitions(cls, model_type: type[Table]) -> Generator[DTOFie field_definition=_parse_piccolo_type(column, _create_column_extra(column)), dto_field=DTOField(mark=Mark.READ_ONLY if column._meta.primary_key else None), model_name=model_type.__name__, - default_factory=Empty, + default_factory=None, ), default=Empty if column._meta.required else None, name=column._meta.name, diff --git a/litestar/contrib/pydantic/pydantic_dto_factory.py b/litestar/contrib/pydantic/pydantic_dto_factory.py index 72cca5baef..d61f95d671 100644 --- a/litestar/contrib/pydantic/pydantic_dto_factory.py +++ b/litestar/contrib/pydantic/pydantic_dto_factory.py @@ -97,7 +97,7 @@ def generate_field_definitions( model_name=model_type.__name__, default_factory=field_info.default_factory if field_info.default_factory and not is_pydantic_undefined(field_info.default_factory) - else Empty, + else None, ), default=default, name=field_name, @@ -107,4 +107,4 @@ def generate_field_definitions( def detect_nested_field(cls, field_definition: FieldDefinition) -> bool: if pydantic_v2 is not Empty: # type: ignore[comparison-overlap] return field_definition.is_subclass_of((pydantic_v1.BaseModel, pydantic_v2.BaseModel)) - return field_definition.is_subclass_of(pydantic_v1.BaseModel) + return field_definition.is_subclass_of(pydantic_v1.BaseModel) # type: ignore[unreachable] diff --git a/litestar/contrib/pydantic/utils.py b/litestar/contrib/pydantic/utils.py index 69948decba..2de448677f 100644 --- a/litestar/contrib/pydantic/utils.py +++ b/litestar/contrib/pydantic/utils.py @@ -1,3 +1,4 @@ +# mypy: strict-equality=False from __future__ import annotations from typing import TYPE_CHECKING, Any @@ -52,13 +53,15 @@ def is_pydantic_model_class( Returns: A typeguard determining whether the type is :data:`BaseModel pydantic.BaseModel>`. """ - if pydantic_v1 is Empty: # type: ignore[comparison-overlap] # pragma: no cover - return False + tests: list[bool] = [] - if pydantic_v2 is Empty: # type: ignore[comparison-overlap] # pragma: no cover - return is_class_and_subclass(annotation, pydantic_v1.BaseModel) + if pydantic_v1 is not Empty: # pragma: no cover + tests.append(is_class_and_subclass(annotation, pydantic_v1.BaseModel)) - return is_class_and_subclass(annotation, (pydantic_v1.BaseModel, pydantic_v2.BaseModel)) + if pydantic_v2 is not Empty: # pragma: no cover + tests.append(is_class_and_subclass(annotation, pydantic_v2.BaseModel)) + + return any(tests) def is_pydantic_model_instance( @@ -72,13 +75,15 @@ def is_pydantic_model_instance( Returns: A typeguard determining whether the type is :data:`BaseModel pydantic.BaseModel>`. """ - if pydantic_v1 is Empty: # type: ignore[comparison-overlap] # pragma: no cover - return False + tests: list[bool] = [] + + if pydantic_v1 is not Empty: # pragma: no cover + tests.append(isinstance(annotation, pydantic_v1.BaseModel)) - if pydantic_v2 is Empty: # type: ignore[comparison-overlap] # pragma: no cover - return isinstance(annotation, pydantic_v1.BaseModel) + if pydantic_v2 is not Empty: # pragma: no cover + tests.append(isinstance(annotation, pydantic_v2.BaseModel)) - return isinstance(annotation, (pydantic_v1.BaseModel, pydantic_v2.BaseModel)) + return any(tests) def is_pydantic_constrained_field(annotation: Any) -> bool: @@ -90,8 +95,8 @@ def is_pydantic_constrained_field(annotation: Any) -> bool: Returns: True if pydantic is installed and the type is a constrained type, otherwise False. """ - if pydantic_v1 is Empty: # type: ignore[comparison-overlap] # pragma: no cover - return False + if pydantic_v1 is Empty: # pragma: no cover + return False # type: ignore[unreachable] return any( is_class_and_subclass(annotation, constrained_type) # pyright: ignore @@ -110,7 +115,7 @@ def is_pydantic_constrained_field(annotation: Any) -> bool: def pydantic_unwrap_and_get_origin(annotation: Any) -> Any | None: - if pydantic_v2 is Empty or is_class_and_subclass(annotation, pydantic_v1.BaseModel): # type: ignore[comparison-overlap] + if pydantic_v2 is Empty or (pydantic_v1 is not Empty and is_class_and_subclass(annotation, pydantic_v1.BaseModel)): return get_origin_or_inner_type(annotation) origin = annotation.__pydantic_generic_metadata__["origin"] @@ -123,7 +128,7 @@ def pydantic_get_type_hints_with_generics_resolved( localns: dict[str, Any] | None = None, include_extras: bool = False, ) -> dict[str, Any]: - if pydantic_v2 is Empty or is_class_and_subclass(annotation, pydantic_v1.BaseModel): # type: ignore[comparison-overlap] + if pydantic_v2 is Empty or (pydantic_v1 is not Empty and is_class_and_subclass(annotation, pydantic_v1.BaseModel)): return get_type_hints_with_generics_resolved(annotation) origin = pydantic_unwrap_and_get_origin(annotation) @@ -158,7 +163,7 @@ def pydantic_get_unwrapped_annotation_and_type_hints(annotation: Any) -> tuple[A def is_pydantic_2_model( obj: type[pydantic_v1.BaseModel | pydantic_v2.BaseModel], # pyright: ignore ) -> TypeGuard[pydantic_v2.BaseModel]: # pyright: ignore - return pydantic_v2 is not Empty and issubclass(obj, pydantic_v2.BaseModel) # type: ignore[comparison-overlap] + return pydantic_v2 is not Empty and issubclass(obj, pydantic_v2.BaseModel) def is_pydantic_undefined(value: Any) -> bool: diff --git a/litestar/datastructures/url.py b/litestar/datastructures/url.py index 2a887a9997..f3441d06ef 100644 --- a/litestar/datastructures/url.py +++ b/litestar/datastructures/url.py @@ -1,7 +1,7 @@ from __future__ import annotations from functools import lru_cache -from typing import TYPE_CHECKING, Any, NamedTuple, cast +from typing import TYPE_CHECKING, Any, NamedTuple from urllib.parse import SplitResult, urlencode, urlsplit, urlunsplit from litestar._parsers import parse_query_string @@ -222,13 +222,14 @@ def with_replacements( """ if isinstance(query, MultiDict): query = urlencode(query=query) - query_str = cast("str", (query if query is not Empty else self.query) or "") + + query = (query if query is not Empty else self.query) or "" return type(self).from_components( scheme=scheme or self.scheme, netloc=netloc or self.netloc, path=path or self.path, - query=query_str, + query=query, fragment=fragment or self.fragment, ) @@ -247,7 +248,7 @@ def query_params(self) -> MultiDict: """ if self._query_params is Empty: self._query_params = MultiDict(parse_query_string(query_string=self.query.encode())) - return cast("MultiDict", self._query_params) + return self._query_params def __str__(self) -> str: return self._url diff --git a/litestar/dto/msgspec_dto.py b/litestar/dto/msgspec_dto.py index 21cd2e3860..826a1d274f 100644 --- a/litestar/dto/msgspec_dto.py +++ b/litestar/dto/msgspec_dto.py @@ -31,6 +31,9 @@ def generate_field_definitions(cls, model_type: type[Struct]) -> Generator[DTOFi def default_or_empty(value: Any) -> Any: return Empty if value is NODEFAULT else value + def default_or_none(value: Any) -> Any: + return None if value is NODEFAULT else value + for key, field_definition in cls.get_model_type_hints(model_type).items(): msgspec_field = msgspec_fields[key] dto_field = (field_definition.extra or {}).pop(DTO_FIELD_META_KEY, DTOField()) @@ -40,7 +43,7 @@ def default_or_empty(value: Any) -> Any: field_definition=field_definition, dto_field=dto_field, model_name=model_type.__name__, - default_factory=default_or_empty(msgspec_field.default_factory), + default_factory=default_or_none(msgspec_field.default_factory), ), default=default_or_empty(msgspec_field.default), name=key, diff --git a/litestar/handlers/base.py b/litestar/handlers/base.py index 2ba9d422a6..61be4f2f3b 100644 --- a/litestar/handlers/base.py +++ b/litestar/handlers/base.py @@ -196,7 +196,7 @@ def signature_model(self) -> type[SignatureModel]: data_dto=self.resolve_data_dto(), type_decoders=self.resolve_type_decoders(), ) - return cast("type[SignatureModel]", self._signature_model) + return self._signature_model @property def fn(self) -> AsyncAnyCallable: @@ -226,19 +226,19 @@ def parsed_fn_signature(self) -> ParsedSignature: unwrap_partial(self.fn), self.resolve_signature_namespace() ) - return cast("ParsedSignature", self._parsed_fn_signature) + return self._parsed_fn_signature @property def parsed_return_field(self) -> FieldDefinition: if self._parsed_return_field is Empty: self._parsed_return_field = self.parsed_fn_signature.return_type - return cast("FieldDefinition", self._parsed_return_field) + return self._parsed_return_field @property def parsed_data_field(self) -> FieldDefinition | None: if self._parsed_data_field is Empty: self._parsed_data_field = self.parsed_fn_signature.parameters.get("data") - return cast("FieldDefinition | None", self._parsed_data_field) + return self._parsed_data_field @property def handler_name(self) -> str: @@ -322,7 +322,7 @@ def resolve_layered_parameters(self) -> dict[str, FieldDefinition]: for key, parameter in parameter_kwargs.items() } - return cast("dict[str, FieldDefinition]", self._resolved_layered_parameters) + return self._resolved_layered_parameters def resolve_guards(self) -> list[Guard]: """Return all guards in the handlers scope, starting from highest to current layer.""" @@ -336,7 +336,7 @@ def resolve_guards(self) -> list[Guard]: "list[Guard]", [ensure_async_callable(guard) for guard in self._resolved_guards] ) - return self._resolved_guards # type:ignore + return self._resolved_guards def resolve_dependencies(self) -> dict[str, Provide]: """Return all dependencies correlating to handler function's kwargs that exist in the handler's scope.""" @@ -364,7 +364,7 @@ def resolve_dependencies(self) -> dict[str, Provide]: ) self._resolved_dependencies[key] = provider - return cast("dict[str, Provide]", self._resolved_dependencies) + return self._resolved_dependencies def resolve_middleware(self) -> list[Middleware]: """Build the middleware stack for the RouteHandler and return it. @@ -455,7 +455,7 @@ def resolve_data_dto(self) -> type[AbstractDTO] | None: self._resolved_data_dto = data_dto - return cast("type[AbstractDTO] | None", self._resolved_data_dto) + return self._resolved_data_dto def resolve_return_dto(self) -> type[AbstractDTO] | None: """Resolve the return_dto by starting from the route handler and moving up. @@ -490,7 +490,7 @@ def resolve_return_dto(self) -> type[AbstractDTO] | None: else: self._resolved_return_dto = None - return cast("type[AbstractDTO] | None", self._resolved_return_dto) + return self._resolved_return_dto async def authorize_connection(self, connection: ASGIConnection) -> None: """Ensure the connection is authorized by running all the route guards in scope.""" diff --git a/litestar/handlers/http_handlers/base.py b/litestar/handlers/http_handlers/base.py index aa7cf5bd6c..547a83f680 100644 --- a/litestar/handlers/http_handlers/base.py +++ b/litestar/handlers/http_handlers/base.py @@ -404,7 +404,7 @@ def resolve_include_in_schema(self) -> bool: ] self._resolved_include_in_schema = include_in_schemas[-1] if include_in_schemas else True - return cast(bool, self._resolved_include_in_schema) + return self._resolved_include_in_schema def get_response_handler(self, is_response_type_data: bool = False) -> Callable[[Any], Awaitable[ASGIApp]]: """Resolve the response_handler function for the route handler. diff --git a/litestar/handlers/websocket_handlers/listener.py b/litestar/handlers/websocket_handlers/listener.py index 6363f4335f..57d3270adc 100644 --- a/litestar/handlers/websocket_handlers/listener.py +++ b/litestar/handlers/websocket_handlers/listener.py @@ -259,7 +259,7 @@ def signature_model(self) -> type[SignatureModel]: parsed_signature=self.parsed_fn_signature, type_decoders=self.resolve_type_decoders(), ) - return cast("type[SignatureModel]", self._signature_model) + return self._signature_model @asynccontextmanager async def default_connection_lifespan( @@ -297,12 +297,12 @@ async def default_connection_lifespan( def resolve_receive_handler(self) -> Callable[[WebSocket], Any]: if self._receive_handler is Empty: self._receive_handler = create_handle_receive(self) - return cast("Callable[[WebSocket], Any]", self._receive_handler) + return self._receive_handler def resolve_send_handler(self) -> Callable[[WebSocket, Any], Coroutine[None, None, None]]: if self._send_handler is Empty: self._send_handler = create_handle_send(self) - return cast("Callable[[WebSocket, Any], Coroutine[None, None, None]]", self._send_handler) + return self._send_handler websocket_listener = WebsocketListenerRouteHandler diff --git a/litestar/response/base.py b/litestar/response/base.py index 7823902615..7f16cbe011 100644 --- a/litestar/response/base.py +++ b/litestar/response/base.py @@ -9,6 +9,7 @@ from litestar.exceptions import ImproperlyConfiguredException from litestar.serialization import default_serializer, encode_json, encode_msgpack, get_serializer from litestar.status_codes import HTTP_200_OK, HTTP_204_NO_CONTENT, HTTP_304_NOT_MODIFIED +from litestar.types.empty import Empty from litestar.utils.deprecation import deprecated, warn_deprecation from litestar.utils.helpers import get_enum_string_value @@ -371,6 +372,9 @@ def render(self, content: Any, media_type: str, enc_hook: Serializer = default_s if isinstance(content, bytes): return content + if content is Empty: + raise RuntimeError("The `Empty` sentinel cannot be used as response content") + try: if media_type.startswith("text/") and not content: return b"" diff --git a/litestar/stores/redis.py b/litestar/stores/redis.py index 63af08ef17..bcb08cdb10 100644 --- a/litestar/stores/redis.py +++ b/litestar/stores/redis.py @@ -29,7 +29,7 @@ def __init__(self, redis: Redis, namespace: str | None | EmptyType = Empty) -> N ``None``. This will make :meth:`.delete_all` unavailable. """ self._redis = redis - self.namespace: str | None = "LITESTAR" if namespace is Empty else namespace # type: ignore[assignment] + self.namespace: str | None = "LITESTAR" if namespace is Empty else namespace # script to get and renew a key in one atomic step self._get_and_renew_script = self._redis.register_script( diff --git a/litestar/types/empty.py b/litestar/types/empty.py index ae55870249..dee9bc192e 100644 --- a/litestar/types/empty.py +++ b/litestar/types/empty.py @@ -2,15 +2,15 @@ __all__ = ("Empty", "EmptyType") -from typing import TYPE_CHECKING, Type, final +from enum import Enum +from typing import Final, Literal -if TYPE_CHECKING: - from typing_extensions import TypeAlias +class _EmptyEnum(Enum): + """A sentinel enum used as placeholder.""" -@final -class Empty: - """A sentinel class used as placeholder.""" + EMPTY = 0 -EmptyType: TypeAlias = Type[Empty] +EmptyType = Literal[_EmptyEnum.EMPTY] +Empty: Final = _EmptyEnum.EMPTY diff --git a/litestar/utils/compat.py b/litestar/utils/compat.py index ffd2784569..384db76f8b 100644 --- a/litestar/utils/compat.py +++ b/litestar/utils/compat.py @@ -23,5 +23,5 @@ async def async_next(gen: AsyncGenerator[T, Any], default: D | EmptyType = Empty return await gen.__anext__() except StopAsyncIteration as exc: if default is not Empty: - return default # type: ignore[return-value] + return default raise exc diff --git a/tests/unit/test_contrib/conftest.py b/tests/unit/test_contrib/conftest.py index 62346cb3b8..5d849f75d0 100644 --- a/tests/unit/test_contrib/conftest.py +++ b/tests/unit/test_contrib/conftest.py @@ -8,7 +8,6 @@ from litestar.dto import DTOField, Mark from litestar.dto.data_structures import DTOFieldDefinition -from litestar.types.empty import Empty from litestar.typing import FieldDefinition if TYPE_CHECKING: @@ -29,7 +28,7 @@ def expected_field_defs(int_factory: Callable[[], int]) -> list[DTOFieldDefiniti name="a", ), model_name=ANY, - default_factory=Empty, + default_factory=None, dto_field=DTOField(), ), replace( @@ -39,7 +38,7 @@ def expected_field_defs(int_factory: Callable[[], int]) -> list[DTOFieldDefiniti name="b", ), model_name=ANY, - default_factory=Empty, + default_factory=None, dto_field=DTOField(mark=Mark.READ_ONLY), ), metadata=ANY, @@ -54,7 +53,7 @@ def expected_field_defs(int_factory: Callable[[], int]) -> list[DTOFieldDefiniti name="c", ), model_name=ANY, - default_factory=Empty, + default_factory=None, dto_field=DTOField(), ), metadata=ANY, @@ -70,7 +69,7 @@ def expected_field_defs(int_factory: Callable[[], int]) -> list[DTOFieldDefiniti default=1, ), model_name=ANY, - default_factory=Empty, + default_factory=None, dto_field=DTOField(), ), metadata=ANY, From ac20595b167b6fd329a76a663d786dc57b5fcf51 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Janek=20Nouvertn=C3=A9?= Date: Wed, 22 Nov 2023 15:08:40 +0100 Subject: [PATCH 07/45] test(typing): Improve `litestar.typing` coverage (#2712) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * Improve typing coverage --------- Signed-off-by: Janek Nouvertné <25355197+provinzkraut@users.noreply.github.com> --- litestar/typing.py | 21 +++-- tests/unit/test_typing.py | 176 +++++++++++++++++++++++++++++++++++++- 2 files changed, 183 insertions(+), 14 deletions(-) diff --git a/litestar/typing.py b/litestar/typing.py index f446a5a588..1048296592 100644 --- a/litestar/typing.py +++ b/litestar/typing.py @@ -21,6 +21,7 @@ is_generic, is_non_string_iterable, is_non_string_sequence, + is_union, ) from litestar.utils.typing import ( get_instantiable_origin, @@ -125,7 +126,7 @@ def _traverse_metadata( Args: metadata: A list of metadata values from annotation, namely anything stored under Annotated[x, metadata...] - is_sequence_container: Whether or not the container is a sequence container (list, tuple etc...) + is_sequence_container: Whether the container is a sequence container (list, tuple etc...) extra: Extra key values to parse. Returns: @@ -233,8 +234,6 @@ def __hash__(self) -> int: def _extract_metadata( cls, annotation: Any, name: str | None, default: Any, metadata: tuple[Any, ...], extra: dict[str, Any] | None ) -> tuple[KwargDefinition | None, dict[str, Any]]: - from litestar.dto.base_dto import AbstractDTO - model = BodyKwarg if name == "data" else ParameterKwarg for extractor in _KWARG_META_EXTRACTORS: @@ -246,9 +245,6 @@ def _extract_metadata( extra=extra, ) - if isinstance(annotation, AbstractDTO): - return _create_metadata_from_type(metadata=[annotation], model=model, annotation=annotation, extra=extra) - if any(isinstance(arg, KwargDefinition) for arg in get_args(annotation)): return next(arg for arg in get_args(annotation) if isinstance(arg, KwargDefinition)), extra or {} @@ -392,8 +388,8 @@ def is_non_string_collection(self) -> bool: def bound_types(self) -> tuple[FieldDefinition, ...] | None: """A tuple of bound types - if the annotation is a TypeVar with bound types, otherwise None.""" if self.is_type_var and (bound := getattr(self.annotation, "__bound__", None)): - if is_non_string_sequence(bound): - return tuple(FieldDefinition.from_annotation(t) for t in bound) + if is_union(bound): + return tuple(FieldDefinition.from_annotation(t) for t in get_args(bound)) return (FieldDefinition.from_annotation(bound),) return None @@ -505,8 +501,11 @@ def from_annotation(cls, annotation: Any, **kwargs: Any) -> FieldDefinition: if isinstance(kwargs.get("default"), (KwargDefinition, DependencyKwarg)): kwargs["kwarg_definition"] = kwargs.pop("default") elif any(isinstance(v, (KwargDefinition, DependencyKwarg)) for v in metadata): - kwargs["kwarg_definition"] = next( - v for v in metadata if isinstance(v, (KwargDefinition, DependencyKwarg)) + kwargs["kwarg_definition"] = next( # pragma: no cover + # see https://github.com/nedbat/coveragepy/issues/475 + v + for v in metadata + if isinstance(v, (KwargDefinition, DependencyKwarg)) ) metadata = tuple(v for v in metadata if not isinstance(v, (KwargDefinition, DependencyKwarg))) elif (extra := kwargs.get("extra", {})) and "kwarg_definition" in extra: @@ -517,7 +516,7 @@ def from_annotation(cls, annotation: Any, **kwargs: Any) -> FieldDefinition: name=kwargs.get("name", ""), default=kwargs.get("default", Empty), metadata=metadata, - extra=kwargs.get("extra", {}), + extra=kwargs.get("extra"), ) kwargs.setdefault("annotation", unwrapped) diff --git a/tests/unit/test_typing.py b/tests/unit/test_typing.py index a2a3b9ff3d..ffd520958b 100644 --- a/tests/unit/test_typing.py +++ b/tests/unit/test_typing.py @@ -2,12 +2,15 @@ import sys from dataclasses import dataclass -from typing import Any, ForwardRef, Generic, List, Optional, Tuple, Union +from typing import Any, ForwardRef, Generic, List, Optional, Tuple, TypeVar, Union +import annotated_types +import msgspec import pytest -from typing_extensions import Annotated, TypedDict +from typing_extensions import Annotated, NotRequired, Required, TypedDict, get_type_hints -from litestar.typing import FieldDefinition +from litestar.params import DependencyKwarg, KwargDefinition, ParameterKwarg +from litestar.typing import FieldDefinition, _unpack_predicate from .test_utils.test_signature import T, _check_field_definition, field_definition_int, test_type_hints @@ -142,6 +145,25 @@ def test_field_definition_from_annotation(annotation: Any, expected: dict[str, A _check_field_definition(FieldDefinition.from_annotation(annotation), expected) +def test_field_definition_kwarg_definition_from_extras() -> None: + kwarg_definition = KwargDefinition() + assert ( + FieldDefinition.from_annotation(int, extra={"kwarg_definition": kwarg_definition}).kwarg_definition + is kwarg_definition + ) + + +@pytest.mark.parametrize("kwarg_definition", [KwargDefinition(), DependencyKwarg()]) +def test_field_definition_kwarg_definition_from_kwargs(kwarg_definition: KwargDefinition | DependencyKwarg) -> None: + assert FieldDefinition.from_annotation(int, kwarg_definition=kwarg_definition).kwarg_definition is kwarg_definition + + +def test_field_definition_with_annotated_kwarg_definition() -> None: + kwarg_definition = KwargDefinition() + fd = FieldDefinition.from_annotation(Annotated[str, kwarg_definition]) + assert fd.kwarg_definition is kwarg_definition + + def test_field_definition_from_union_annotation() -> None: """Test FieldDefinition.from_annotation for Union.""" annotation = Union[int, List[int]] @@ -267,6 +289,127 @@ def test_field_definition_equality() -> None: assert FieldDefinition.from_annotation(Optional[str]) == FieldDefinition.from_annotation(Union[str, None]) +def test_field_definition_hash() -> None: + assert hash(FieldDefinition.from_annotation(int)) == hash(FieldDefinition.from_annotation(int)) + assert hash(FieldDefinition.from_annotation(Annotated[int, False])) == hash( + FieldDefinition.from_annotation(Annotated[int, False]) + ) + assert hash(FieldDefinition.from_annotation(Annotated[int, False])) != hash( + FieldDefinition.from_annotation(Annotated[int, True]) + ) + assert hash(FieldDefinition.from_annotation(Union[str, int])) != hash( + FieldDefinition.from_annotation(Union[int, str]) + ) + + +def test_is_required() -> None: + class Foo(TypedDict): + required: Required[str] + not_required: NotRequired[str] + + class Bar(msgspec.Struct): + unset: Union[str, msgspec.UnsetType] = msgspec.UNSET # noqa: UP007 + with_default: str = "" + with_none_default: Union[str, None] = None # noqa: UP007 + + assert FieldDefinition.from_annotation(get_type_hints(Foo, include_extras=True)["required"]).is_required is True + assert ( + FieldDefinition.from_annotation(get_type_hints(Foo, include_extras=True)["not_required"]).is_required is False + ) + assert FieldDefinition.from_annotation(get_type_hints(Bar, include_extras=True)["unset"]).is_required is False + + assert ( + FieldDefinition.from_kwarg( + name="foo", kwarg_definition=ParameterKwarg(required=False), annotation=str + ).is_required + is False + ) + assert ( + FieldDefinition.from_kwarg( + name="foo", kwarg_definition=ParameterKwarg(required=True), annotation=str + ).is_required + is True + ) + assert ( + FieldDefinition.from_kwarg( + name="foo", kwarg_definition=ParameterKwarg(required=None, default=""), annotation=str + ).is_required + is False + ) + assert ( + FieldDefinition.from_kwarg( + name="foo", kwarg_definition=ParameterKwarg(required=None), annotation=str + ).is_required + is True + ) + + assert FieldDefinition.from_annotation(Optional[str]).is_required is False + assert FieldDefinition.from_annotation(str).is_required is True + + assert FieldDefinition.from_annotation(Any).is_required is False + + assert FieldDefinition.from_annotation(get_type_hints(Bar)["with_default"]).is_required is True + assert FieldDefinition.from_annotation(get_type_hints(Bar)["with_none_default"]).is_required is False + + +def test_field_definition_bound_type() -> None: + class Foo: + pass + + class Bar: + pass + + bound = TypeVar("bound", bound=Foo) + multiple_bounds = TypeVar("multiple_bounds", bound=Union[Foo, Bar]) + + assert FieldDefinition.from_annotation(str).bound_types is None + assert FieldDefinition.from_annotation(T).bound_types is None + + bound_types = FieldDefinition.from_annotation(bound).bound_types + + assert bound_types + assert len(bound_types) == 1 + assert isinstance(bound_types[0], FieldDefinition) + assert bound_types[0].raw is Foo + + bound_types_union = FieldDefinition.from_annotation(multiple_bounds).bound_types + assert bound_types_union + assert len(bound_types_union) == 2 + assert bound_types_union[0].raw is Foo + assert bound_types_union[1].raw is Bar + + +def test_nested_generic_types() -> None: + V = TypeVar("V") + + class Foo(Generic[T]): + pass + + class Bar(Generic[T, V]): + pass + + class Baz(Generic[T], Bar[T, str]): + pass + + fd_simple = FieldDefinition.from_annotation(Foo) + assert fd_simple.generic_types + assert len(fd_simple.generic_types) == 1 + assert fd_simple.generic_types[0].raw == T + + fd_union = FieldDefinition.from_annotation(Bar) + assert fd_union.generic_types + assert len(fd_union.generic_types) == 2 + assert fd_union.generic_types[0].raw == T + assert fd_union.generic_types[1].raw == V + + fd_nested = FieldDefinition.from_annotation(Baz) + assert fd_nested.generic_types + assert len(fd_nested.generic_types) == 3 + assert fd_nested.generic_types[0].raw == T + assert fd_nested.generic_types[1].raw == T + assert fd_nested.generic_types[2].raw == str + + @dataclass class GenericDataclass(Generic[T]): foo: T @@ -286,3 +429,30 @@ def test_field_definition_get_type_hints(annotation: Any, expected_type_hints: d FieldDefinition.from_annotation(annotation).get_type_hints(include_extras=True, resolve_generics=True) == expected_type_hints ) + + +@pytest.mark.parametrize( + ("annotation", "expected_type_hints"), + ((GenericDataclass[str], {"foo": T}), (GenericDataclass, {"foo": T}), (NormalDataclass, {"foo": int})), +) +def test_field_definition_get_type_hints_dont_resolve_generics( + annotation: Any, expected_type_hints: dict[str, Any] +) -> None: + assert ( + FieldDefinition.from_annotation(annotation).get_type_hints(include_extras=True, resolve_generics=False) + == expected_type_hints + ) + + +@pytest.mark.parametrize( + "predicate, expected_meta", + [ + (annotated_types.LowerCase.__metadata__[0], {"lower_case": True}), # pyright: ignore + (annotated_types.UpperCase.__metadata__[0], {"upper_case": True}), # pyright: ignore + (annotated_types.IsAscii.__metadata__[0], {"pattern": "[[:ascii:]]"}), # pyright: ignore + (annotated_types.IsDigits.__metadata__[0], {"pattern": "[[:digit:]]"}), # pyright: ignore + (object(), {}), + ], +) +def test_unpack_predicate(predicate: Any, expected_meta: dict[str, Any]) -> None: + assert _unpack_predicate(predicate) == expected_meta From 27875eb58eb24a1430c529eca26278028216ab98 Mon Sep 17 00:00:00 2001 From: Cody Fincher <204685+cofin@users.noreply.github.com> Date: Wed, 22 Nov 2023 10:06:52 -0600 Subject: [PATCH 08/45] feat: set app env on startup (#2735) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * feat: set app env on startup * Update litestar/cli/_utils.py Co-authored-by: Janek Nouvertné * fix: cast to set narrow type * fix: revert logic adjustment --------- Co-authored-by: Janek Nouvertné --- litestar/cli/_utils.py | 6 ++++++ tests/unit/test_cli/conftest.py | 5 +++++ tests/unit/test_cli/test_core_commands.py | 2 +- 3 files changed, 12 insertions(+), 1 deletion(-) diff --git a/litestar/cli/_utils.py b/litestar/cli/_utils.py index 6e338f661f..d2452e0132 100644 --- a/litestar/cli/_utils.py +++ b/litestar/cli/_utils.py @@ -3,6 +3,7 @@ import contextlib import importlib import inspect +import os import sys from dataclasses import dataclass from datetime import datetime, timedelta, timezone @@ -120,6 +121,8 @@ def from_env(cls, app_path: str | None, app_dir: Path | None = None) -> Litestar dotenv.load_dotenv() app_path = app_path or getenv("LITESTAR_APP") + if app_path and getenv("LITESTAR_APP") is None: + os.environ["LITESTAR_APP"] = app_path if app_path: console.print(f"Using Litestar app from env: [bright_blue]{app_path!r}") loaded_app = _load_app_from_path(app_path) @@ -345,11 +348,13 @@ def _autodiscover_app(cwd: Path) -> LoadedApp: ): if isinstance(value, Litestar): app_string = f"{import_path}:{attr}" + os.environ["LITESTAR_APP"] = app_string console.print(f"Using Litestar app from [bright_blue]{app_string}") return LoadedApp(app=value, app_path=app_string, is_factory=False) if hasattr(module, "create_app"): app_string = f"{import_path}:create_app" + os.environ["LITESTAR_APP"] = app_string console.print(f"Using Litestar factory [bright_blue]{app_string}") return LoadedApp(app=module.create_app(), app_path=app_string, is_factory=True) @@ -363,6 +368,7 @@ def _autodiscover_app(cwd: Path) -> LoadedApp: continue if return_annotation in ("Litestar", Litestar): app_string = f"{import_path}:{attr}" + os.environ["LITESTAR_APP"] = app_string console.print(f"Using Litestar factory [bright_blue]{app_string}") return LoadedApp(app=value(), app_path=f"{app_string}", is_factory=True) diff --git a/tests/unit/test_cli/conftest.py b/tests/unit/test_cli/conftest.py index 5db2c055ee..326a890997 100644 --- a/tests/unit/test_cli/conftest.py +++ b/tests/unit/test_cli/conftest.py @@ -28,6 +28,11 @@ from litestar.cli._utils import LitestarGroup +@pytest.fixture(autouse=True) +def reset_litestar_app_env(monkeypatch: MonkeyPatch) -> None: + monkeypatch.delenv("LITESTAR_APP", raising=False) + + @pytest.fixture() def root_command() -> LitestarGroup: import litestar.cli.main diff --git a/tests/unit/test_cli/test_core_commands.py b/tests/unit/test_cli/test_core_commands.py index 47ad066ba7..57be86a5c1 100644 --- a/tests/unit/test_cli/test_core_commands.py +++ b/tests/unit/test_cli/test_core_commands.py @@ -67,7 +67,7 @@ def test_run_command( mock_subprocess_run: MagicMock, mock_uvicorn_run: MagicMock, tmp_project_dir: Path, -) -> None: +) -> None: # sourcery skip: low-code-quality args = [] if custom_app_file: args.extend(["--app", f"{custom_app_file.stem}:app"]) From a92324a84af94bb455a7e466aa71520963bc3a53 Mon Sep 17 00:00:00 2001 From: Peter Schutt Date: Thu, 23 Nov 2023 16:17:55 +1000 Subject: [PATCH 09/45] fix: connection cache access (#2736) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * fix: connection cache access It is currently possible to make handling of an HTTP request hang indefinitely by first creating two connection objects, and then accessing the connection data on each one. E.g., if we await `request_1.json()`, we'll receive the data, and then if we await `request_2.json()` it will not identify that the data has been cached and will indefinitely await on `receive()` for a message that will never arrive. This PR resolves this issue by moving the check for cached data from the connection's `__init__()` method, to within the data access methods. Closes #2727 * Update litestar/testing/client/base.py Co-authored-by: Janek Nouvertné * Add tests. --------- Co-authored-by: Janek Nouvertné --- litestar/connection/base.py | 61 +++--- litestar/connection/request.py | 91 ++++---- litestar/middleware/logging.py | 2 +- .../test_connection_caching.py | 196 ++++++++++++++++++ 4 files changed, 288 insertions(+), 62 deletions(-) create mode 100644 tests/unit/test_connection/test_connection_caching.py diff --git a/litestar/connection/base.py b/litestar/connection/base.py index 2ac9e61c3b..60cc95a666 100644 --- a/litestar/connection/base.py +++ b/litestar/connection/base.py @@ -81,15 +81,10 @@ def __init__(self, scope: Scope, receive: Receive = empty_receive, send: Send = self.scope = scope self.receive = receive self.send = send - self._base_url = cast("URL | EmptyType", get_litestar_scope_state(scope, SCOPE_STATE_BASE_URL_KEY, Empty)) - self._url = cast("URL | EmptyType", get_litestar_scope_state(scope, SCOPE_STATE_URL_KEY, Empty)) - self._parsed_query = cast( - "tuple[tuple[str, str], ...] | EmptyType", - get_litestar_scope_state(scope, SCOPE_STATE_PARSED_QUERY_KEY, Empty), - ) - self._cookies = cast( - "dict[str, str] | EmptyType", get_litestar_scope_state(scope, SCOPE_STATE_COOKIES_KEY, Empty) - ) + self._base_url: URL | EmptyType = Empty + self._url: URL | EmptyType = Empty + self._parsed_query: tuple[tuple[str, str], ...] | EmptyType = Empty + self._cookies: dict[str, str] | EmptyType = Empty @property def app(self) -> Litestar: @@ -126,8 +121,11 @@ def url(self) -> URL: A URL instance constructed from the request's scope. """ if self._url is Empty: - self._url = URL.from_scope(self.scope) - set_litestar_scope_state(self.scope, SCOPE_STATE_URL_KEY, self._url) + if url := get_litestar_scope_state(self.scope, SCOPE_STATE_URL_KEY): + self._url = cast("URL", url) + else: + self._url = URL.from_scope(self.scope) + set_litestar_scope_state(self.scope, SCOPE_STATE_URL_KEY, self._url) return self._url @@ -140,17 +138,20 @@ def base_url(self) -> URL: (host + domain + prefix) of the request. """ if self._base_url is Empty: - scope = cast( - "Scope", - { - **self.scope, - "path": "/", - "query_string": b"", - "root_path": self.scope.get("app_root_path") or self.scope.get("root_path", ""), - }, - ) - self._base_url = URL.from_scope(scope) - set_litestar_scope_state(self.scope, SCOPE_STATE_BASE_URL_KEY, self._base_url) + if base_url := get_litestar_scope_state(self.scope, SCOPE_STATE_BASE_URL_KEY): + self._base_url = cast("URL", base_url) + else: + scope = cast( + "Scope", + { + **self.scope, + "path": "/", + "query_string": b"", + "root_path": self.scope.get("app_root_path") or self.scope.get("root_path", ""), + }, + ) + self._base_url = URL.from_scope(scope) + set_litestar_scope_state(self.scope, SCOPE_STATE_BASE_URL_KEY, self._base_url) return self._base_url @property @@ -170,8 +171,11 @@ def query_params(self) -> MultiDict[Any]: A normalized dict of query parameters. Multiple values for the same key are returned as a list. """ if self._parsed_query is Empty: - self._parsed_query = parse_query_string(self.scope.get("query_string", b"")) - set_litestar_scope_state(self.scope, SCOPE_STATE_PARSED_QUERY_KEY, self._parsed_query) + if (parsed_query := get_litestar_scope_state(self.scope, SCOPE_STATE_PARSED_QUERY_KEY, Empty)) is not Empty: + self._parsed_query = cast("tuple[tuple[str, str], ...]", parsed_query) + else: + self._parsed_query = parse_query_string(self.scope.get("query_string", b"")) + set_litestar_scope_state(self.scope, SCOPE_STATE_PARSED_QUERY_KEY, self._parsed_query) return MultiDict(self._parsed_query) @property @@ -191,8 +195,13 @@ def cookies(self) -> dict[str, str]: Returns any cookies stored in the header as a parsed dictionary. """ if self._cookies is Empty: - self._cookies = parse_cookie_string(cookie_header) if (cookie_header := self.headers.get("cookie")) else {} - set_litestar_scope_state(self.scope, SCOPE_STATE_COOKIES_KEY, self._cookies) + if (cookies := get_litestar_scope_state(self.scope, SCOPE_STATE_COOKIES_KEY, Empty)) is not Empty: + self._cookies = cast("dict[str, str]", cookies) + else: + self._cookies = ( + parse_cookie_string(cookie_header) if (cookie_header := self.headers.get("cookie")) else {} + ) + set_litestar_scope_state(self.scope, SCOPE_STATE_COOKIES_KEY, self._cookies) return self._cookies diff --git a/litestar/connection/request.py b/litestar/connection/request.py index 2372aa5ae8..bc3e663bc9 100644 --- a/litestar/connection/request.py +++ b/litestar/connection/request.py @@ -34,6 +34,7 @@ if TYPE_CHECKING: from litestar.handlers.http_handlers import HTTPRouteHandler # noqa: F401 from litestar.types.asgi_types import HTTPScope, Method, Receive, Scope, Send + from litestar.types.empty import EmptyType SERVER_PUSH_HEADERS = { @@ -67,12 +68,12 @@ def __init__(self, scope: Scope, receive: Receive = empty_receive, send: Send = """ super().__init__(scope, receive, send) self.is_connected: bool = True - self._body = get_litestar_scope_state(scope, SCOPE_STATE_BODY_KEY, Empty) - self._form = get_litestar_scope_state(scope, SCOPE_STATE_FORM_KEY, Empty) - self._json = get_litestar_scope_state(scope, SCOPE_STATE_JSON_KEY, Empty) - self._msgpack = get_litestar_scope_state(scope, SCOPE_STATE_MSGPACK_KEY, Empty) - self._content_type = get_litestar_scope_state(scope, SCOPE_STATE_CONTENT_TYPE_KEY, Empty) - self._accept = get_litestar_scope_state(scope, SCOPE_STATE_ACCEPT_KEY, Empty) + self._body: bytes | EmptyType = Empty + self._form: dict[str, str | list[str]] | EmptyType = Empty + self._json: Any = Empty + self._msgpack: Any = Empty + self._content_type: tuple[str, dict[str, str]] | EmptyType = Empty + self._accept: Accept | EmptyType = Empty @property def method(self) -> Method: @@ -91,9 +92,12 @@ def content_type(self) -> tuple[str, dict[str, str]]: A tuple with the parsed value and a dictionary containing any options send in it. """ if self._content_type is Empty: - self._content_type = parse_content_header(self.headers.get("Content-Type", "")) - set_litestar_scope_state(self.scope, SCOPE_STATE_CONTENT_TYPE_KEY, self._content_type) - return cast("tuple[str, dict[str, str]]", self._content_type) + if (content_type := get_litestar_scope_state(self.scope, SCOPE_STATE_CONTENT_TYPE_KEY, Empty)) is not Empty: + self._content_type = cast("tuple[str, dict[str, str]]", content_type) + else: + self._content_type = parse_content_header(self.headers.get("Content-Type", "")) + set_litestar_scope_state(self.scope, SCOPE_STATE_CONTENT_TYPE_KEY, self._content_type) + return self._content_type @property def accept(self) -> Accept: @@ -103,9 +107,12 @@ def accept(self) -> Accept: An :class:`Accept ` instance, representing the list of acceptable media types. """ if self._accept is Empty: - self._accept = Accept(self.headers.get("Accept", "*/*")) - set_litestar_scope_state(self.scope, SCOPE_STATE_ACCEPT_KEY, self._accept) - return cast("Accept", self._accept) + if accept := get_litestar_scope_state(self.scope, SCOPE_STATE_ACCEPT_KEY): + self._accept = cast("Accept", accept) + else: + self._accept = Accept(self.headers.get("Accept", "*/*")) + set_litestar_scope_state(self.scope, SCOPE_STATE_ACCEPT_KEY, self._accept) + return self._accept async def json(self) -> Any: """Retrieve the json request body from the request. @@ -114,9 +121,12 @@ async def json(self) -> Any: An arbitrary value """ if self._json is Empty: - body = await self.body() - self._json = decode_json(body or b"null", type_decoders=self.route_handler.resolve_type_decoders()) - set_litestar_scope_state(self.scope, SCOPE_STATE_JSON_KEY, self._json) + if (json_ := get_litestar_scope_state(self.scope, SCOPE_STATE_JSON_KEY, Empty)) is not Empty: + self._json = json_ + else: + body = await self.body() + self._json = decode_json(body or b"null", type_decoders=self.route_handler.resolve_type_decoders()) + set_litestar_scope_state(self.scope, SCOPE_STATE_JSON_KEY, self._json) return self._json async def msgpack(self) -> Any: @@ -126,9 +136,14 @@ async def msgpack(self) -> Any: An arbitrary value """ if self._msgpack is Empty: - body = await self.body() - self._msgpack = decode_msgpack(body or b"\xc0", type_decoders=self.route_handler.resolve_type_decoders()) - set_litestar_scope_state(self.scope, SCOPE_STATE_MSGPACK_KEY, self._msgpack) + if (msgpack := get_litestar_scope_state(self.scope, SCOPE_STATE_MSGPACK_KEY, Empty)) is not Empty: + self._msgpack = msgpack + else: + body = await self.body() + self._msgpack = decode_msgpack( + body or b"\xc0", type_decoders=self.route_handler.resolve_type_decoders() + ) + set_litestar_scope_state(self.scope, SCOPE_STATE_MSGPACK_KEY, self._msgpack) return self._msgpack async def stream(self) -> AsyncGenerator[bytes, None]: @@ -169,9 +184,12 @@ async def body(self) -> bytes: A byte-string representing the body of the request. """ if self._body is Empty: - self._body = b"".join([c async for c in self.stream()]) - set_litestar_scope_state(self.scope, SCOPE_STATE_BODY_KEY, self._body) - return cast("bytes", self._body) + if (body := get_litestar_scope_state(self.scope, SCOPE_STATE_BODY_KEY)) is not None: + self._body = cast("bytes", body) + else: + self._body = b"".join([c async for c in self.stream()]) + set_litestar_scope_state(self.scope, SCOPE_STATE_BODY_KEY, self._body) + return self._body async def form(self) -> FormMultiDict: """Retrieve form data from the request. If the request is either a 'multipart/form-data' or an @@ -182,21 +200,24 @@ async def form(self) -> FormMultiDict: A FormMultiDict instance """ if self._form is Empty: - content_type, options = self.content_type - if content_type == RequestEncodingType.MULTI_PART: - self._form = parse_multipart_form( - body=await self.body(), - boundary=options.get("boundary", "").encode(), - multipart_form_part_limit=self.app.multipart_form_part_limit, - ) - elif content_type == RequestEncodingType.URL_ENCODED: - self._form = parse_url_encoded_form_data( - await self.body(), - ) + if (form := get_litestar_scope_state(self.scope, SCOPE_STATE_FORM_KEY, Empty)) is not Empty: + self._form = cast("dict[str, str | list[str]]", form) else: - self._form = {} - - set_litestar_scope_state(self.scope, SCOPE_STATE_FORM_KEY, self._form) + content_type, options = self.content_type + if content_type == RequestEncodingType.MULTI_PART: + self._form = parse_multipart_form( + body=await self.body(), + boundary=options.get("boundary", "").encode(), + multipart_form_part_limit=self.app.multipart_form_part_limit, + ) + elif content_type == RequestEncodingType.URL_ENCODED: + self._form = parse_url_encoded_form_data( + await self.body(), + ) + else: + self._form = {} + + set_litestar_scope_state(self.scope, SCOPE_STATE_FORM_KEY, self._form) return FormMultiDict(self._form) diff --git a/litestar/middleware/logging.py b/litestar/middleware/logging.py index e43f872b96..bb4d9f537c 100644 --- a/litestar/middleware/logging.py +++ b/litestar/middleware/logging.py @@ -126,7 +126,7 @@ async def log_request(self, scope: Scope, receive: Receive) -> None: Returns: None """ - extracted_data = await self.extract_request_data(request=scope["app"].request_class(scope, receive=receive)) + extracted_data = await self.extract_request_data(request=scope["app"].request_class(scope, receive)) self.log_message(values=extracted_data) def log_response(self, scope: Scope) -> None: diff --git a/tests/unit/test_connection/test_connection_caching.py b/tests/unit/test_connection/test_connection_caching.py new file mode 100644 index 0000000000..a5abe8375e --- /dev/null +++ b/tests/unit/test_connection/test_connection_caching.py @@ -0,0 +1,196 @@ +from __future__ import annotations + +from typing import Any, Awaitable, Callable +from unittest.mock import ANY, MagicMock, call + +import pytest + +from litestar import Request, constants +from litestar.testing import RequestFactory +from litestar.types import Empty, HTTPReceiveMessage, Scope +from litestar.utils import get_litestar_scope_state, set_litestar_scope_state + + +async def test_multiple_request_object_data_caching(create_scope: Callable[..., Scope], mock: MagicMock) -> None: + """Test that accessing the request data on multiple request objects only attempts to await `receive()` once. + + https://github.com/litestar-org/litestar/issues/2727 + """ + + async def test_receive() -> HTTPReceiveMessage: + mock() + return {"type": "http.request", "body": b"abc", "more_body": False} + + scope = create_scope() + request_1 = Request[Any, Any, Any](scope, test_receive) + request_2 = Request[Any, Any, Any](scope, test_receive) + assert (await request_1.body()) == b"abc" + assert (await request_2.body()) == b"abc" + assert mock.call_count == 1 + + +@pytest.fixture(name="get_mock") +def get_mock_fixture() -> MagicMock: + return MagicMock() + + +@pytest.fixture(name="set_mock") +def set_mock_fixture() -> MagicMock: + return MagicMock() + + +@pytest.fixture(name="create_connection") +def create_connection_fixture( + get_mock: MagicMock, set_mock: MagicMock, monkeypatch: pytest.MonkeyPatch +) -> Callable[..., Request]: + def create_connection(body_type: str = "json") -> Request: + def wrapped_get_litestar_scope_state(scope_: Scope, key: str, default: Any = None) -> Any: + get_mock(key) + return get_litestar_scope_state(scope_, key, default) + + def wrapped_set_litestar_scope_state(scope_: Scope, key: str, value: Any) -> None: + set_mock(key, value) + set_litestar_scope_state(scope_, key, value) + + monkeypatch.setattr("litestar.connection.base.get_litestar_scope_state", wrapped_get_litestar_scope_state) + monkeypatch.setattr("litestar.connection.base.set_litestar_scope_state", wrapped_set_litestar_scope_state) + monkeypatch.setattr("litestar.connection.request.get_litestar_scope_state", wrapped_get_litestar_scope_state) + monkeypatch.setattr("litestar.connection.request.set_litestar_scope_state", wrapped_set_litestar_scope_state) + + connection = RequestFactory().get() + + async def fake_receive() -> HTTPReceiveMessage: + if body_type == "msgpack": + return {"type": "http.request", "body": b"\x81\xa3abc\xa3def", "more_body": False} + return {"type": "http.request", "body": b'{"abc":"def"}', "more_body": False} + + monkeypatch.setattr(connection, "receive", fake_receive) + + return connection + + return create_connection + + +@pytest.fixture(name="get_value") +def get_value_fixture() -> Callable[[Request, str, bool], Awaitable[Any]]: + """Fixture to get the value of a connection cached property. + + Returns: + A function to get the value of a connection cached property. + """ + + async def get_value_(connection: Request, prop_name: str, is_coro: bool) -> Any: + """Helper to get the value of the tested cached property.""" + value = getattr(connection, prop_name) + if is_coro: + return await value() + return value + + return get_value_ + + +caching_tests = [ + (constants.SCOPE_STATE_URL_KEY, "url", "_url", False), + (constants.SCOPE_STATE_BASE_URL_KEY, "base_url", "_base_url", False), + ( + constants.SCOPE_STATE_PARSED_QUERY_KEY, + "query_params", + "_parsed_query", + False, + ), + (constants.SCOPE_STATE_COOKIES_KEY, "cookies", "_cookies", False), + (constants.SCOPE_STATE_BODY_KEY, "body", "_body", True), + (constants.SCOPE_STATE_FORM_KEY, "form", "_form", True), + (constants.SCOPE_STATE_MSGPACK_KEY, "msgpack", "_msgpack", True), + (constants.SCOPE_STATE_JSON_KEY, "json", "_json", True), + (constants.SCOPE_STATE_ACCEPT_KEY, "accept", "_accept", False), + (constants.SCOPE_STATE_CONTENT_TYPE_KEY, "content_type", "_content_type", False), +] + + +@pytest.mark.parametrize(("state_key", "prop_name", "cache_attr_name", "is_coro"), caching_tests) +async def test_connection_cached_properties_no_scope_or_connection_caching( + state_key: str, + prop_name: str, + cache_attr_name: str, + is_coro: bool, + create_connection: Callable[..., Request], + get_mock: MagicMock, + set_mock: MagicMock, + get_value: Callable[[Request, str, bool], Awaitable[Any]], +) -> None: + def check_get_mock() -> None: + """Helper to check the get mock. + + For certain properties, we call `get_litestar_scope_state()` twice, once for the property and once for the + body. For these cases, we check that the mock was called twice. + """ + if state_key in ("json", "msgpack"): + get_mock.assert_has_calls([call(state_key), call("body")]) + elif state_key == "form": + get_mock.assert_has_calls([call(state_key), call("content_type")]) + else: + get_mock.assert_called_once_with(state_key) + + def check_set_mock() -> None: + """Helper to check the set mock. + + For certain properties, we call `set_litestar_scope_state()` twice, once for the property and once for the + body. For these cases, we check that the mock was called twice. + """ + if state_key in ("json", "msgpack"): + set_mock.assert_has_calls([call("body", ANY), call(state_key, ANY)]) + elif state_key == "form": + set_mock.assert_has_calls([call("content_type", ANY), call("form", ANY)]) + else: + set_mock.assert_called_once_with(state_key, ANY) + + connection = create_connection("msgpack" if state_key == "msgpack" else "json") + + assert get_litestar_scope_state(connection.scope, state_key, Empty) is Empty + setattr(connection, cache_attr_name, Empty) + + await get_value(connection, prop_name, is_coro) + check_get_mock() + check_set_mock() + + +@pytest.mark.parametrize(("state_key", "prop_name", "cache_attr_name", "is_coro"), caching_tests) +async def test_connection_cached_properties_cached_in_scope( + state_key: str, + prop_name: str, + cache_attr_name: str, + is_coro: bool, + create_connection: Callable[..., Request], + get_mock: MagicMock, + set_mock: MagicMock, + get_value: Callable[[Request, str, bool], Awaitable[Any]], +) -> None: + # set the value in the scope and ensure empty on connection + connection = create_connection() + + set_litestar_scope_state(connection.scope, state_key, {"a": "b"}) + setattr(connection, cache_attr_name, Empty) + + await get_value(connection, prop_name, is_coro) + get_mock.assert_called_once_with(state_key) + set_mock.assert_not_called() + + +@pytest.mark.parametrize(("state_key", "prop_name", "cache_attr_name", "is_coro"), caching_tests) +async def test_connection_cached_properties_cached_on_connection( + state_key: str, + prop_name: str, + cache_attr_name: str, + is_coro: bool, + create_connection: Callable[..., Request], + get_mock: MagicMock, + set_mock: MagicMock, + get_value: Callable[[Request, str, bool], Awaitable[Any]], +) -> None: + connection = create_connection() + # set the value on the connection + setattr(connection, cache_attr_name, {"a": "b"}) + await get_value(connection, prop_name, is_coro) + get_mock.assert_not_called() + set_mock.assert_not_called() From 992ccdd9a049618f6627460c38f09fe4abc7ab6f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Janek=20Nouvertn=C3=A9?= Date: Thu, 23 Nov 2023 17:31:45 +0100 Subject: [PATCH 10/45] test(plugins): Test advanced-alchemy re-exports (#2742) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Test advanced-alchemy re-exports Signed-off-by: Janek Nouvertné <25355197+provinzkraut@users.noreply.github.com> --- tests/unit/test_plugins/__init__.py | 0 .../test_base.py} | 0 tests/unit/test_plugins/test_sqlalchemy.py | 35 +++++++++++++++++++ 3 files changed, 35 insertions(+) create mode 100644 tests/unit/test_plugins/__init__.py rename tests/unit/{test_plugins.py => test_plugins/test_base.py} (100%) create mode 100644 tests/unit/test_plugins/test_sqlalchemy.py diff --git a/tests/unit/test_plugins/__init__.py b/tests/unit/test_plugins/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/tests/unit/test_plugins.py b/tests/unit/test_plugins/test_base.py similarity index 100% rename from tests/unit/test_plugins.py rename to tests/unit/test_plugins/test_base.py diff --git a/tests/unit/test_plugins/test_sqlalchemy.py b/tests/unit/test_plugins/test_sqlalchemy.py new file mode 100644 index 0000000000..437ba3acd6 --- /dev/null +++ b/tests/unit/test_plugins/test_sqlalchemy.py @@ -0,0 +1,35 @@ +import advanced_alchemy +from advanced_alchemy import base as sa_base +from advanced_alchemy import types as sa_types +from advanced_alchemy.extensions import litestar as sa_litestar + +from litestar.plugins import sqlalchemy + + +def test_re_exports() -> None: + assert sqlalchemy.filters is advanced_alchemy.filters + assert sqlalchemy.types is sa_types + + assert sqlalchemy.AuditColumns is sa_base.AuditColumns + assert sqlalchemy.BigIntAuditBase is sa_base.BigIntAuditBase + assert sqlalchemy.BigIntBase is sa_base.BigIntBase + assert sqlalchemy.BigIntPrimaryKey is sa_base.BigIntPrimaryKey + assert sqlalchemy.CommonTableAttributes is sa_base.CommonTableAttributes + assert sqlalchemy.UUIDAuditBase is sa_base.UUIDAuditBase + assert sqlalchemy.UUIDBase is sa_base.UUIDBase + assert sqlalchemy.UUIDPrimaryKey is sa_base.UUIDPrimaryKey + assert sqlalchemy.orm_registry is sa_base.orm_registry + + assert sqlalchemy.AlembicAsyncConfig is sa_litestar.AlembicAsyncConfig + assert sqlalchemy.AlembicCommands is sa_litestar.AlembicCommands + assert sqlalchemy.AlembicSyncConfig is sa_litestar.AlembicSyncConfig + assert sqlalchemy.AsyncSessionConfig is sa_litestar.AsyncSessionConfig + assert sqlalchemy.EngineConfig is sa_litestar.EngineConfig + assert sqlalchemy.SQLAlchemyAsyncConfig is sa_litestar.SQLAlchemyAsyncConfig + assert sqlalchemy.SQLAlchemyDTO is sa_litestar.SQLAlchemyDTO + assert sqlalchemy.SQLAlchemyDTOConfig is sa_litestar.SQLAlchemyDTOConfig + assert sqlalchemy.SQLAlchemyInitPlugin is sa_litestar.SQLAlchemyInitPlugin + assert sqlalchemy.SQLAlchemyPlugin is sa_litestar.SQLAlchemyPlugin + assert sqlalchemy.SQLAlchemySerializationPlugin is sa_litestar.SQLAlchemySerializationPlugin + assert sqlalchemy.SQLAlchemySyncConfig is sa_litestar.SQLAlchemySyncConfig + assert sqlalchemy.SyncSessionConfig is sa_litestar.SyncSessionConfig From 7f898eaccfdab7103dd4fe5a84450ef08213f02a Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Janek=20Nouvertn=C3=A9?= Date: Thu, 23 Nov 2023 17:35:58 +0100 Subject: [PATCH 11/45] fix: _get_normalized_schema_key regexes (#2740) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Fix _get_normalized_schema_key regexes Signed-off-by: Janek Nouvertné <25355197+provinzkraut@users.noreply.github.com> --- litestar/_openapi/schema_generation/utils.py | 18 +++++++++++++++--- 1 file changed, 15 insertions(+), 3 deletions(-) diff --git a/litestar/_openapi/schema_generation/utils.py b/litestar/_openapi/schema_generation/utils.py index d3be118ee7..cccdb58fc9 100644 --- a/litestar/_openapi/schema_generation/utils.py +++ b/litestar/_openapi/schema_generation/utils.py @@ -85,7 +85,16 @@ def _should_create_literal_schema(field_definition: FieldDefinition) -> bool: TYPE_NAME_NORMALIZATION_SUB_REGEX = re.compile(r"[^a-zA-Z0-9]+") -TYPE_NAME_NORMALIZATION_TRIM_REGEX = re.compile(r"^_+(class_+)?|_+$") +TYPE_NAME_EXTRACTION_REGEX = re.compile(r"<\w+ '(.+)'") + + +def _replace_non_alphanumeric_match(match: re.Match) -> str: + # we don't want to introduce leading or trailing underscores, so we only replace a + # char with an underscore if we're not at the beginning or at the end of the + # matchable string + if match.start() == 0 or match.end() == match.endpos: + return "" + return "_" def _get_normalized_schema_key(type_annotation_str: str) -> str: @@ -99,8 +108,11 @@ def _get_normalized_schema_key(type_annotation_str: str) -> str: Returns: A normalized version of the input string """ - # Use a regular expression to replace non-alphanumeric characters with underscores - return TYPE_NAME_NORMALIZATION_TRIM_REGEX.sub("", TYPE_NAME_NORMALIZATION_SUB_REGEX.sub("_", type_annotation_str)) + # extract names from repr() style annotations like + normalized_name = TYPE_NAME_EXTRACTION_REGEX.sub(r"\g<1>", type_annotation_str) + # replace all non-alphanumeric characters with underscores, ensuring no leading or + # trailing underscores + return TYPE_NAME_NORMALIZATION_SUB_REGEX.sub(_replace_non_alphanumeric_match, normalized_name) def get_formatted_examples(field_definition: FieldDefinition, examples: Sequence[Example]) -> Mapping[str, Example]: From b5a134153ff010cbcd1e73723f9e0e80148ca30c Mon Sep 17 00:00:00 2001 From: Cesar Giulietti <38872121+cesarmg1980@users.noreply.github.com> Date: Sat, 25 Nov 2023 00:29:06 -0300 Subject: [PATCH 12/45] docs: Removed extra sentence in 'Rate-Limit Middleware' Section (#2749) Before: ``` ...and a value for the request quota (integer). For the other configuration options. ``` After: ``` ...and a value for the request quota (integer). ``` --- docs/usage/middleware/builtin-middleware.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/usage/middleware/builtin-middleware.rst b/docs/usage/middleware/builtin-middleware.rst index 3a903ccf39..656b02c4c9 100644 --- a/docs/usage/middleware/builtin-middleware.rst +++ b/docs/usage/middleware/builtin-middleware.rst @@ -186,7 +186,7 @@ To use the rate limit middleware, use the :class:`RateLimitConfig Date: Sat, 25 Nov 2023 08:59:34 +0530 Subject: [PATCH 13/45] docs: add cesarmg1980 as a contributor for doc (#2750) * docs: update README.md [skip ci] * docs: update .all-contributorsrc [skip ci] --------- Co-authored-by: allcontributors[bot] <46447321+allcontributors[bot]@users.noreply.github.com> Co-authored-by: guacs <126393040+guacs@users.noreply.github.com> --- .all-contributorsrc | 9 +++++++++ README.md | 1 + 2 files changed, 10 insertions(+) diff --git a/.all-contributorsrc b/.all-contributorsrc index 1db7f8e35e..56d96d3eb7 100644 --- a/.all-contributorsrc +++ b/.all-contributorsrc @@ -1451,6 +1451,15 @@ "contributions": [ "doc" ] + }, + { + "login": "cesarmg1980", + "name": "Cesar Giulietti", + "avatar_url": "https://avatars.githubusercontent.com/u/38872121?v=4", + "profile": "https://github.com/cesarmg1980", + "contributions": [ + "doc" + ] } ], "contributorsPerLine": 7, diff --git a/README.md b/README.md index 5daed531cd..c71870d2e3 100644 --- a/README.md +++ b/README.md @@ -506,6 +506,7 @@ see [the contribution guide](CONTRIBUTING.rst). L. Bao
L. Bao

📖 Jarred Glaser
Jarred Glaser

📖 Hunter Boyd
Hunter Boyd

📖 + Cesar Giulietti
Cesar Giulietti

📖 From 8b621ac13820840f2c48123b603340ec09ff1380 Mon Sep 17 00:00:00 2001 From: guacs <126393040+guacs@users.noreply.github.com> Date: Sat, 25 Nov 2023 11:40:52 +0530 Subject: [PATCH 14/45] fix: correctly handle multiple file upload (#2753) * test: add failing tests for multiple file upload * fix: correctly handle multiple file uploads * refactor: use a single conditional * refactor: move the multiple file upload test to test_kwargs --- litestar/_kwargs/extractors.py | 7 ++++++- .../unit/test_datastructures/test_upload_file.py | 2 +- tests/unit/test_kwargs/test_multipart_data.py | 16 ++++++++++++++++ 3 files changed, 23 insertions(+), 2 deletions(-) diff --git a/litestar/_kwargs/extractors.py b/litestar/_kwargs/extractors.py index b289511d83..66d7f31901 100644 --- a/litestar/_kwargs/extractors.py +++ b/litestar/_kwargs/extractors.py @@ -351,7 +351,12 @@ async def extract_multipart( ) if field_definition.is_non_string_sequence: - return list(form_values.values()) + values = list(form_values.values()) + if field_definition.inner_types[0].annotation is UploadFile and isinstance(values[0], list): + return values[0] + + return values + if field_definition.is_simple_type and field_definition.annotation is UploadFile and form_values: return next(v for v in form_values.values() if isinstance(v, UploadFile)) diff --git a/tests/unit/test_datastructures/test_upload_file.py b/tests/unit/test_datastructures/test_upload_file.py index c762127a6f..ae657344a3 100644 --- a/tests/unit/test_datastructures/test_upload_file.py +++ b/tests/unit/test_datastructures/test_upload_file.py @@ -44,7 +44,7 @@ def test_cleanup_is_being_performed(tmpdir: Path) -> None: upload_file: Optional[UploadFile] = None - @post("/form") + @post("/form", sync_to_thread=False) def handler(data: UploadFile = Body(media_type=RequestEncodingType.MULTI_PART)) -> None: nonlocal upload_file upload_file = data diff --git a/tests/unit/test_kwargs/test_multipart_data.py b/tests/unit/test_kwargs/test_multipart_data.py index 87c048b465..2cfbc5962e 100644 --- a/tests/unit/test_kwargs/test_multipart_data.py +++ b/tests/unit/test_kwargs/test_multipart_data.py @@ -394,6 +394,22 @@ async def hello_world(data: UploadFile = Body(media_type=RequestEncodingType.MUL assert response.status_code == HTTP_201_CREATED +@pytest.mark.parametrize("file_count", (1, 2)) +def test_upload_multiple_files(file_count: int) -> None: + @post("/") + async def handler(data: List[UploadFile] = Body(media_type=RequestEncodingType.MULTI_PART)) -> None: + assert len(data) == file_count + + for file in data: + assert await file.read() == b"1" + + with create_test_client([handler]) as client: + files_to_upload = [("file", b"1") for _ in range(file_count)] + response = client.post("/", files=files_to_upload) + + assert response.status_code == HTTP_201_CREATED + + def test_optional_formdata() -> None: @post("/", signature_types=[UploadFile]) async def hello_world(data: Optional[UploadFile] = Body(media_type=RequestEncodingType.MULTI_PART)) -> None: From 4ac93447f1c6859b9404e975028ef539f0776f79 Mon Sep 17 00:00:00 2001 From: "allcontributors[bot]" <46447321+allcontributors[bot]@users.noreply.github.com> Date: Sun, 26 Nov 2023 11:01:09 +1000 Subject: [PATCH 15/45] docs: add marcuslimdw as a contributor for doc (#2758) * docs: update README.md [skip ci] * docs: update .all-contributorsrc [skip ci] --------- Co-authored-by: allcontributors[bot] <46447321+allcontributors[bot]@users.noreply.github.com> --- .all-contributorsrc | 9 +++++++++ README.md | 1 + 2 files changed, 10 insertions(+) diff --git a/.all-contributorsrc b/.all-contributorsrc index 56d96d3eb7..25a50a50c8 100644 --- a/.all-contributorsrc +++ b/.all-contributorsrc @@ -1460,6 +1460,15 @@ "contributions": [ "doc" ] + }, + { + "login": "marcuslimdw", + "name": "Marcus Lim", + "avatar_url": "https://avatars.githubusercontent.com/u/42759889?v=4", + "profile": "https://gitlab.com/marcuslimdw/", + "contributions": [ + "doc" + ] } ], "contributorsPerLine": 7, diff --git a/README.md b/README.md index c71870d2e3..50e772ada1 100644 --- a/README.md +++ b/README.md @@ -507,6 +507,7 @@ see [the contribution guide](CONTRIBUTING.rst). Jarred Glaser
Jarred Glaser

📖 Hunter Boyd
Hunter Boyd

📖 Cesar Giulietti
Cesar Giulietti

📖 + Marcus Lim
Marcus Lim

📖 From b760f1f4426f1fbd86b9a2f80dd7b30ad4653474 Mon Sep 17 00:00:00 2001 From: Marcus Lim <42759889+marcuslimdw@users.noreply.github.com> Date: Sun, 26 Nov 2023 09:13:40 +0800 Subject: [PATCH 16/45] docs: Fix type checking command in CONTRIBUTING.rst (#2757) Fix type checking command in CONTRIBUTING.rst --- CONTRIBUTING.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/CONTRIBUTING.rst b/CONTRIBUTING.rst index 5b17951d4f..676bd4689e 100644 --- a/CONTRIBUTING.rst +++ b/CONTRIBUTING.rst @@ -107,7 +107,7 @@ enforce type safety. You can run them with: - ``make mypy`` - ``make pyright`` -- ``make typecheck`` to run both +- ``make type-check`` to run both - ``make lint`` to run pre-commit hooks and type checkers. Our type checkers are run on Python 3.8 in CI, so you should make sure to run them on the same version locally as well. From 7414f7fd7d4782223502895e6a23b77ed635cd2d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Janek=20Nouvertn=C3=A9?= Date: Sun, 26 Nov 2023 08:39:40 +0100 Subject: [PATCH 17/45] feat: Improve ASGI extension interface (#2756) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * Improve asgi extension handling Signed-off-by: Janek Nouvertné <25355197+provinzkraut@users.noreply.github.com> --------- Signed-off-by: Janek Nouvertné <25355197+provinzkraut@users.noreply.github.com> --- litestar/connection/base.py | 4 +- litestar/connection/request.py | 55 +++++++++++++++++----- litestar/enums.py | 12 +++++ tests/unit/test_connection/test_request.py | 29 +++++++++++- 4 files changed, 85 insertions(+), 15 deletions(-) diff --git a/litestar/connection/base.py b/litestar/connection/base.py index 60cc95a666..e8e8ff83b7 100644 --- a/litestar/connection/base.py +++ b/litestar/connection/base.py @@ -25,6 +25,7 @@ from litestar.types.asgi_types import Message, Receive, Scope, Send from litestar.types.protocols import Logger + __all__ = ("ASGIConnection", "empty_receive", "empty_send") UserT = TypeVar("UserT") @@ -61,7 +62,7 @@ async def empty_send(_: Message) -> NoReturn: # pragma: no cover class ASGIConnection(Generic[HandlerT, UserT, AuthT, StateT]): """The base ASGI connection container.""" - __slots__ = ("scope", "receive", "send", "_base_url", "_url", "_parsed_query", "_cookies") + __slots__ = ("scope", "receive", "send", "_base_url", "_url", "_parsed_query", "_cookies", "_server_extensions") scope: Scope """The ASGI scope attached to the connection.""" @@ -85,6 +86,7 @@ def __init__(self, scope: Scope, receive: Receive = empty_receive, send: Send = self._url: URL | EmptyType = Empty self._parsed_query: tuple[tuple[str, str], ...] | EmptyType = Empty self._cookies: dict[str, str] | EmptyType = Empty + self._server_extensions = scope.get("extensions") or {} # extensions may be None @property def app(self) -> Litestar: diff --git a/litestar/connection/request.py b/litestar/connection/request.py index bc3e663bc9..a8d6bb5a12 100644 --- a/litestar/connection/request.py +++ b/litestar/connection/request.py @@ -1,5 +1,6 @@ from __future__ import annotations +import warnings from typing import TYPE_CHECKING, Any, AsyncGenerator, Generic, cast from litestar._multipart import parse_content_header, parse_multipart_form @@ -22,8 +23,12 @@ ) from litestar.datastructures.headers import Accept from litestar.datastructures.multi_dicts import FormMultiDict -from litestar.enums import RequestEncodingType -from litestar.exceptions import InternalServerException +from litestar.enums import ASGIExtension, RequestEncodingType +from litestar.exceptions import ( + InternalServerException, + LitestarException, + LitestarWarning, +) from litestar.serialization import decode_json, decode_msgpack from litestar.types import Empty from litestar.utils.scope import get_litestar_scope_state, set_litestar_scope_state @@ -49,7 +54,16 @@ class Request(Generic[UserT, AuthT, StateT], ASGIConnection["HTTPRouteHandler", UserT, AuthT, StateT]): """The Litestar Request class.""" - __slots__ = ("_json", "_form", "_body", "_msgpack", "_content_type", "_accept", "is_connected") + __slots__ = ( + "_json", + "_form", + "_body", + "_msgpack", + "_content_type", + "_accept", + "is_connected", + "supports_push_promise", + ) scope: HTTPScope """The ASGI scope attached to the connection.""" @@ -74,6 +88,7 @@ def __init__(self, scope: Scope, receive: Receive = empty_receive, send: Send = self._msgpack: Any = Empty self._content_type: tuple[str, dict[str, str]] | EmptyType = Empty self._accept: Accept | EmptyType = Empty + self.supports_push_promise = ASGIExtension.SERVER_PUSH in self._server_extensions @property def method(self) -> Method: @@ -221,22 +236,38 @@ async def form(self) -> FormMultiDict: return FormMultiDict(self._form) - async def send_push_promise(self, path: str) -> None: + async def send_push_promise(self, path: str, raise_if_unavailable: bool = False) -> None: """Send a push promise. This method requires the `http.response.push` extension to be sent from the ASGI server. Args: path: Path to send the promise to. + raise_if_unavailable: Raise an exception if server push is not supported by + the server Returns: None """ - extensions: dict[str, dict[Any, Any]] = self.scope.get("extensions") or {} - if "http.response.push" in extensions: - raw_headers: list[tuple[bytes, bytes]] = [] - for name in SERVER_PUSH_HEADERS: - raw_headers.extend( - (name.encode("latin-1"), value.encode("latin-1")) for value in self.headers.getall(name, []) - ) - await self.send({"type": "http.response.push", "path": path, "headers": raw_headers}) + if not self.supports_push_promise: + if raise_if_unavailable: + raise LitestarException("Attempted to send a push promise but the server does not support it") + + warnings.warn( + "Attempted to send a push promise but the server does not support it. In a future version, this will " + "raise an exception. To enable this behaviour in the current version, set raise_if_unavailable=True. " + "To prevent this behaviour, make sure that the server you are using supports the 'http.response.push' " + "ASGI extension, or check this dynamically via " + ":attr:`~litestar.connection.Request.supports_push_promise`", + stacklevel=2, + category=LitestarWarning, + ) + + return + + raw_headers = [ + (header_name.encode("latin-1"), value.encode("latin-1")) + for header_name in (self.headers.keys() & SERVER_PUSH_HEADERS) + for value in self.headers.getall(header_name, []) + ] + await self.send({"type": "http.response.push", "path": path, "headers": raw_headers}) diff --git a/litestar/enums.py b/litestar/enums.py index 14ea2e37d2..a660228c9d 100644 --- a/litestar/enums.py +++ b/litestar/enums.py @@ -76,3 +76,15 @@ class CompressionEncoding(str, Enum): GZIP = "gzip" BROTLI = "br" + + +class ASGIExtension(str, Enum): + """ASGI extension keys: https://asgi.readthedocs.io/en/latest/extensions.html""" + + WS_DENIAL = "websocket.http.response" + SERVER_PUSH = "http.response.push" + ZERO_COPY_SEND_EXTENSION = "http.response.zerocopysend" + PATH_SEND = "http.response.pathsend" + TLS = "tls" + EARLY_HINTS = "http.response.early_hint" + HTTP_TRAILERS = "http.response.trailers" diff --git a/tests/unit/test_connection/test_request.py b/tests/unit/test_connection/test_request.py index 6242e223ef..0bff39624f 100644 --- a/tests/unit/test_connection/test_request.py +++ b/tests/unit/test_connection/test_request.py @@ -13,7 +13,12 @@ from litestar import MediaType, Request, asgi, get from litestar.connection.base import empty_send from litestar.datastructures import Address, Cookie -from litestar.exceptions import InternalServerException, SerializationException +from litestar.exceptions import ( + InternalServerException, + LitestarException, + LitestarWarning, + SerializationException, +) from litestar.middleware import MiddlewareProtocol from litestar.response.base import ASGIResponse from litestar.serialization import encode_json, encode_msgpack @@ -447,7 +452,9 @@ def test_request_send_push_promise_without_push_extension() -> None: async def app(scope: Scope, receive: Receive, send: Send) -> None: request = Request[Any, Any, Any](scope) - await request.send_push_promise("/style.css") + + with pytest.warns(LitestarWarning, match="Attempted to send a push promise"): + await request.send_push_promise("/style.css") response = ASGIResponse(body=encode_json({"json": "OK"})) await response(scope, receive, send) @@ -457,6 +464,24 @@ async def app(scope: Scope, receive: Receive, send: Send) -> None: assert response.json() == {"json": "OK"} +def test_request_send_push_promise_without_push_extension_raises() -> None: + """If server does not support the `http.response.push` extension, + + .send_push_promise() does nothing. + """ + + async def app(scope: Scope, receive: Receive, send: Send) -> None: + request = Request[Any, Any, Any](scope) + + with pytest.raises(LitestarException, match="Attempted to send a push promise"): + await request.send_push_promise("/style.css", raise_if_unavailable=True) + + response = ASGIResponse(body=encode_json({"json": "OK"})) + await response(scope, receive, send) + + TestClient(app).get("/") + + def test_request_send_push_promise_without_setting_send() -> None: """If Request is instantiated without the send channel, then. From dfe872c6379142d6c5a266a6e5a19fa48dacddb8 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Janek=20Nouvertn=C3=A9?= Date: Mon, 27 Nov 2023 05:30:53 +0100 Subject: [PATCH 18/45] fix(dto): nested field renaming (#2721) (#2764) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * Fix nested field renaming Signed-off-by: Janek Nouvertné <25355197+provinzkraut@users.noreply.github.com> * formatting Signed-off-by: Janek Nouvertné <25355197+provinzkraut@users.noreply.github.com> * Test collection renaming Signed-off-by: Janek Nouvertné <25355197+provinzkraut@users.noreply.github.com> --------- Signed-off-by: Janek Nouvertné <25355197+provinzkraut@users.noreply.github.com> --- litestar/dto/_backend.py | 82 ++++++++++++++++--- .../test_backends/test_base_dto.py | 9 +- .../test_dto/test_factory/test_integration.py | 39 ++++++++- 3 files changed, 118 insertions(+), 12 deletions(-) diff --git a/litestar/dto/_backend.py b/litestar/dto/_backend.py index c3de87e9e1..e1a2622f79 100644 --- a/litestar/dto/_backend.py +++ b/litestar/dto/_backend.py @@ -4,7 +4,18 @@ from __future__ import annotations from dataclasses import replace -from typing import TYPE_CHECKING, AbstractSet, Any, Callable, ClassVar, Collection, Final, Mapping, Union, cast +from typing import ( + TYPE_CHECKING, + AbstractSet, + Any, + ClassVar, + Collection, + Final, + Mapping, + Protocol, + Union, + cast, +) from msgspec import UNSET, Struct, UnsetType, convert, defstruct, field from typing_extensions import get_origin @@ -37,6 +48,19 @@ __all__ = ("DTOBackend",) +class CompositeTypeHandler(Protocol): + def __call__( + self, + field_definition: FieldDefinition, + exclude: AbstractSet[str], + include: AbstractSet[str], + rename_fields: dict[str, str], + unique_name: str, + nested_depth: int, + ) -> CompositeType: + ... + + class DTOBackend: __slots__ = ( "annotation", @@ -70,7 +94,7 @@ def __init__( dto_factory: The DTO factory class calling this backend. field_definition: Parsed type. handler_id: The name of the handler that this backend is for. - is_data_field: Whether or not the field is a subclass of DTOData. + is_data_field: Whether the field is a subclass of DTOData. model_type: Model type. wrapper_attribute_name: If the data that DTO should operate upon is wrapped in a generic datastructure, this is the name of the attribute that the data is stored in. """ @@ -82,7 +106,10 @@ def __init__( self.wrapper_attribute_name: Final[str | None] = wrapper_attribute_name self.parsed_field_definitions = self.parse_model( - model_type=model_type, exclude=self.dto_factory.config.exclude, include=self.dto_factory.config.include + model_type=model_type, + exclude=self.dto_factory.config.exclude, + include=self.dto_factory.config.include, + rename_fields=self.dto_factory.config.rename_fields, ) self.transfer_model_type = self.create_transfer_model_type( model_name=model_type.__name__, field_definitions=self.parsed_field_definitions @@ -99,7 +126,12 @@ def __init__( self.annotation = _maybe_wrap_in_generic_annotation(annotation, self.transfer_model_type) def parse_model( - self, model_type: Any, exclude: AbstractSet[str], include: AbstractSet[str], nested_depth: int = 0 + self, + model_type: Any, + exclude: AbstractSet[str], + include: AbstractSet[str], + rename_fields: dict[str, str], + nested_depth: int = 0, ) -> tuple[TransferDTOFieldDefinition, ...]: """Reduce :attr:`model_type` to a tuple :class:`TransferDTOFieldDefinition` instances. @@ -123,6 +155,7 @@ def parse_model( field_definition=field_definition, exclude=exclude, include=include, + rename_fields=rename_fields, field_name=field_definition.name, unique_name=field_definition.model_name, nested_depth=nested_depth, @@ -130,7 +163,7 @@ def parse_model( except RecursionError: continue - if rename := self.dto_factory.config.rename_fields.get(field_definition.name): + if rename := rename_fields.get(field_definition.name): serialization_name = rename elif self.dto_factory.config.rename_strategy: serialization_name = _rename_field( @@ -342,9 +375,7 @@ def encode_data(self, data: Any) -> LitestarEncodableType: ), ) - def _get_handler_for_field_definition( - self, field_definition: FieldDefinition - ) -> Callable[[FieldDefinition, AbstractSet[str], AbstractSet[str], str, int], CompositeType] | None: + def _get_handler_for_field_definition(self, field_definition: FieldDefinition) -> CompositeTypeHandler | None: if field_definition.is_union: return self._create_union_type @@ -365,15 +396,24 @@ def _create_transfer_type( field_definition: FieldDefinition, exclude: AbstractSet[str], include: AbstractSet[str], + rename_fields: dict[str, str], field_name: str, unique_name: str, nested_depth: int, ) -> CompositeType | SimpleType: exclude = _filter_nested_field(exclude, field_name) include = _filter_nested_field(include, field_name) + rename_fields = _filter_nested_field_mapping(rename_fields, field_name) if composite_type_handler := self._get_handler_for_field_definition(field_definition): - return composite_type_handler(field_definition, exclude, include, unique_name, nested_depth) + return composite_type_handler( + field_definition=field_definition, + exclude=exclude, + include=include, + rename_fields=rename_fields, + unique_name=unique_name, + nested_depth=nested_depth, + ) transfer_model: NestedFieldInfo | None = None @@ -382,7 +422,11 @@ def _create_transfer_type( raise RecursionError nested_field_definitions = self.parse_model( - model_type=field_definition.annotation, exclude=exclude, include=include, nested_depth=nested_depth + 1 + model_type=field_definition.annotation, + exclude=exclude, + include=include, + rename_fields=rename_fields, + nested_depth=nested_depth + 1, ) transfer_model = NestedFieldInfo( @@ -397,6 +441,7 @@ def _create_collection_type( field_definition: FieldDefinition, exclude: AbstractSet[str], include: AbstractSet[str], + rename_fields: dict[str, str], unique_name: str, nested_depth: int, ) -> CollectionType: @@ -408,6 +453,7 @@ def _create_collection_type( field_name="0", unique_name=f"{unique_name}_0", nested_depth=nested_depth, + rename_fields=rename_fields, ) return CollectionType( field_definition=field_definition, inner_type=inner_type, has_nested=inner_type.has_nested @@ -418,6 +464,7 @@ def _create_mapping_type( field_definition: FieldDefinition, exclude: AbstractSet[str], include: AbstractSet[str], + rename_fields: dict[str, str], unique_name: str, nested_depth: int, ) -> MappingType: @@ -429,6 +476,7 @@ def _create_mapping_type( field_name="0", unique_name=f"{unique_name}_0", nested_depth=nested_depth, + rename_fields=rename_fields, ) value_type = self._create_transfer_type( field_definition=inner_types[1] if inner_types else FieldDefinition.from_annotation(Any), @@ -437,6 +485,7 @@ def _create_mapping_type( field_name="1", unique_name=f"{unique_name}_1", nested_depth=nested_depth, + rename_fields=rename_fields, ) return MappingType( field_definition=field_definition, @@ -450,6 +499,7 @@ def _create_tuple_type( field_definition: FieldDefinition, exclude: AbstractSet[str], include: AbstractSet[str], + rename_fields: dict[str, str], unique_name: str, nested_depth: int, ) -> TupleType: @@ -461,6 +511,7 @@ def _create_tuple_type( field_name=str(i), unique_name=f"{unique_name}_{i}", nested_depth=nested_depth, + rename_fields=rename_fields, ) for i, inner_type in enumerate(field_definition.inner_types) ) @@ -475,6 +526,7 @@ def _create_union_type( field_definition: FieldDefinition, exclude: AbstractSet[str], include: AbstractSet[str], + rename_fields: dict[str, str], unique_name: str, nested_depth: int, ) -> UnionType: @@ -486,6 +538,7 @@ def _create_union_type( field_name=str(i), unique_name=f"{unique_name}_{i}", nested_depth=nested_depth, + rename_fields=rename_fields, ) for i, inner_type in enumerate(field_definition.inner_types) ) @@ -521,6 +574,15 @@ def _filter_nested_field(field_name_set: AbstractSet[str], field_name: str) -> A return {split[1] for s in field_name_set if (split := s.split(".", 1))[0] == field_name and len(split) > 1} +def _filter_nested_field_mapping(field_name_mapping: Mapping[str, str], field_name: str) -> dict[str, str]: + """Filter a nested field name.""" + return { + split[1]: v + for s, v in field_name_mapping.items() + if (split := s.split(".", 1))[0] == field_name and len(split) > 1 + } + + def _transfer_data( destination_type: type[Any], source_data: Any | Collection[Any], diff --git a/tests/unit/test_dto/test_factory/test_backends/test_base_dto.py b/tests/unit/test_dto/test_factory/test_backends/test_base_dto.py index a0aef33553..83b27d6bd4 100644 --- a/tests/unit/test_dto/test_factory/test_backends/test_base_dto.py +++ b/tests/unit/test_dto/test_factory/test_backends/test_base_dto.py @@ -146,9 +146,16 @@ def create_transfer_type( field_name: str = "name", unique_name: str = "some_module.SomeModel.name", nested_depth: int = 0, + rename_fields: dict[str, str] | None = None, ) -> TransferType: return backend._create_transfer_type( - field_definition, exclude or set(), include or set(), field_name, unique_name, nested_depth + field_definition=field_definition, + exclude=exclude or set(), + include=include or set(), + field_name=field_name, + unique_name=unique_name, + nested_depth=nested_depth, + rename_fields=rename_fields or {}, ) diff --git a/tests/unit/test_dto/test_factory/test_integration.py b/tests/unit/test_dto/test_factory/test_integration.py index 46f65f4da8..f1d7c29e98 100644 --- a/tests/unit/test_dto/test_factory/test_integration.py +++ b/tests/unit/test_dto/test_factory/test_integration.py @@ -2,7 +2,8 @@ from __future__ import annotations from dataclasses import dataclass, field -from typing import TYPE_CHECKING, Dict, Generic, List, Optional, Sequence, TypeVar, cast +from types import ModuleType +from typing import TYPE_CHECKING, Callable, Dict, Generic, List, Optional, Sequence, TypeVar, cast from unittest.mock import MagicMock from uuid import UUID @@ -91,6 +92,42 @@ def handler(data: Foo) -> Foo: assert response.json() == {"baz": "hello"} +def test_renamed_field_nested(use_experimental_dto_backend: bool, create_module: Callable[[str], ModuleType]) -> None: + # https://github.com/litestar-org/litestar/issues/2721 + module = create_module( + """ +from dataclasses import dataclass +from typing import List + +@dataclass +class Bar: + id: str + +@dataclass +class Foo: + id: str + bar: Bar + bars: List[Bar] +""" + ) + + Foo = module.Foo + + config = DTOConfig( + rename_fields={"id": "foo_id", "bar.id": "bar_id", "bars.0.id": "bars_id"}, + experimental_codegen_backend=use_experimental_dto_backend, + ) + dto = DataclassDTO[Annotated[Foo, config]] # type: ignore[valid-type] + + @post(dto=dto, signature_types=[Foo]) + def handler(data: Foo) -> Foo: # type: ignore[valid-type] + return data + + with create_test_client(route_handlers=[handler]) as client: + response = client.post("/", json={"foo_id": "1", "bar": {"bar_id": "2"}, "bars": [{"bars_id": "3"}]}) + assert response.json() == {"foo_id": "1", "bar": {"bar_id": "2"}, "bars": [{"bars_id": "3"}]} + + @dataclass class Spam: main_id: str = "spam-id" From 389c7d732a4f8df22abc4672012351f2175d34ee Mon Sep 17 00:00:00 2001 From: Peter Schutt Date: Mon, 27 Nov 2023 22:34:24 +1000 Subject: [PATCH 19/45] feat: ConnectionState object to replace litestar scope state. (#2751) * feat: stronger typing for scope state utils. We use connection scope state to store a lot of different things for caching, auth, logging etc. This PR improves the type safety of writing to, and retrieving from the scope state via the scope state utils. The PR removes the "pop" behavior from `get_litestar_scope_state()` in favor of adding a new utility `pop_litestar_scope_state()` so that both the get and pop utils share the same semantics as `dict.get()` and `dict.pop()`. * feat: stronger typing for scope state utils. We use connection scope state to store a lot of different things for caching, auth, logging etc. This PR improves the type safety of writing to, and retrieving from the scope state via the scope state utils. The PR removes the "pop" behavior from `get_litestar_scope_state()` in favor of adding a new utility `pop_litestar_scope_state()` so that both the get and pop utils share the same semantics as `dict.get()` and `dict.pop()`. * feat: stronger typing for scope state utils. (Sourcery refactored) (#2747) 'Refactored by Sourcery' Co-authored-by: Sourcery AI <> * feat: ConnectionState object to replace litestar scope state. (Sourcery refactored) (#2752) 'Refactored by Sourcery' Co-authored-by: Sourcery AI <> * Deprecate `SCOPE_STATE_*` names. * Move `ConnectionState` into `utils.scope.state` namespace. * Rename `ConnectionState` to `ScopeState`. * Rename `not_empty()` to `value_or_default()` * Deprecate scope state utils. * Removes unused utility. * Removes unused utility. * Fix type errors. * feat: ConnectionState object to replace litestar scope state. (Sourcery refactored) (#2759) 'Refactored by Sourcery' Co-authored-by: Sourcery AI <> * Tests for deprecations * Make `ScopeState` a `Struct`. * Ignore code-cov for module `__getattr__` raising `AttributeError` * Revert "Make `ScopeState` a `Struct`." This reverts commit 8795d7d31e1d77d355f64539265149252a95bfc5. * Fix rebase issues. * Replace `or` expression with `type: ignore[assignment]` * Remove unused utilities. * Modify `Headers.from_scope()` to accept `Scope` type. * Move connection state namespace key to private. * Extend tests for utility compat. --------- Co-authored-by: sourcery-ai[bot] <58596630+sourcery-ai[bot]@users.noreply.github.com> --- litestar/_kwargs/extractors.py | 5 +- litestar/connection/base.py | 53 +++--- litestar/connection/request.py | 52 +++--- litestar/constants.py | 53 +++--- litestar/datastructures/headers.py | 13 +- litestar/middleware/compression.py | 17 +- litestar/middleware/csrf.py | 10 +- litestar/middleware/logging.py | 21 ++- litestar/middleware/response_cache.py | 16 +- litestar/response/template.py | 6 +- litestar/routes/http.py | 6 +- litestar/template/base.py | 6 +- litestar/testing/client/base.py | 5 +- litestar/testing/request_factory.py | 5 +- litestar/utils/__init__.py | 33 +++- litestar/utils/empty.py | 26 +++ litestar/utils/scope.py | 80 --------- litestar/utils/scope/__init__.py | 62 +++++++ litestar/utils/scope/state.py | 156 ++++++++++++++++++ tests/unit/test_connection/test_base.py | 26 +-- .../test_connection_caching.py | 74 +++++---- .../unit/test_datastructures/test_headers.py | 10 +- tests/unit/test_deprecations.py | 23 +++ tests/unit/test_template/test_csrf_token.py | 7 +- .../unit/test_testing/test_request_factory.py | 2 +- tests/unit/test_utils/test_scope.py | 76 ++++++--- 26 files changed, 545 insertions(+), 298 deletions(-) create mode 100644 litestar/utils/empty.py delete mode 100644 litestar/utils/scope.py create mode 100644 litestar/utils/scope/__init__.py create mode 100644 litestar/utils/scope/state.py diff --git a/litestar/_kwargs/extractors.py b/litestar/_kwargs/extractors.py index 66d7f31901..f9be0c937a 100644 --- a/litestar/_kwargs/extractors.py +++ b/litestar/_kwargs/extractors.py @@ -9,14 +9,13 @@ parse_query_string, parse_url_encoded_form_data, ) -from litestar.constants import SCOPE_STATE_PARSED_QUERY_KEY from litestar.datastructures import Headers from litestar.datastructures.upload_file import UploadFile from litestar.enums import ParamType, RequestEncodingType from litestar.exceptions import ValidationException from litestar.params import BodyKwarg from litestar.types import Empty -from litestar.utils.scope import set_litestar_scope_state +from litestar.utils.scope.state import ScopeState if TYPE_CHECKING: from litestar._kwargs import KwargsModel @@ -152,7 +151,7 @@ def parse_connection_query_params(connection: ASGIConnection, kwargs_model: Kwar if connection._parsed_query is not Empty else parse_query_string(connection.scope.get("query_string", b"")) ) - set_litestar_scope_state(connection.scope, SCOPE_STATE_PARSED_QUERY_KEY, parsed_query) + ScopeState.from_scope(connection.scope).parsed_query = parsed_query return create_query_default_dict( parsed_query=parsed_query, sequence_query_parameter_names=kwargs_model.sequence_query_parameter_names, diff --git a/litestar/connection/base.py b/litestar/connection/base.py index e8e8ff83b7..ada24f1588 100644 --- a/litestar/connection/base.py +++ b/litestar/connection/base.py @@ -3,19 +3,13 @@ from typing import TYPE_CHECKING, Any, Generic, TypeVar, cast from litestar._parsers import parse_cookie_string, parse_query_string -from litestar.constants import ( - SCOPE_STATE_BASE_URL_KEY, - SCOPE_STATE_COOKIES_KEY, - SCOPE_STATE_PARSED_QUERY_KEY, - SCOPE_STATE_URL_KEY, -) from litestar.datastructures.headers import Headers from litestar.datastructures.multi_dicts import MultiDict from litestar.datastructures.state import State from litestar.datastructures.url import URL, Address, make_absolute_url from litestar.exceptions import ImproperlyConfiguredException from litestar.types.empty import Empty -from litestar.utils.scope import get_litestar_scope_state, set_litestar_scope_state +from litestar.utils.scope.state import ScopeState if TYPE_CHECKING: from typing import NoReturn @@ -25,7 +19,6 @@ from litestar.types.asgi_types import Message, Receive, Scope, Send from litestar.types.protocols import Logger - __all__ = ("ASGIConnection", "empty_receive", "empty_send") UserT = TypeVar("UserT") @@ -62,7 +55,17 @@ async def empty_send(_: Message) -> NoReturn: # pragma: no cover class ASGIConnection(Generic[HandlerT, UserT, AuthT, StateT]): """The base ASGI connection container.""" - __slots__ = ("scope", "receive", "send", "_base_url", "_url", "_parsed_query", "_cookies", "_server_extensions") + __slots__ = ( + "scope", + "receive", + "send", + "_base_url", + "_url", + "_parsed_query", + "_cookies", + "_server_extensions", + "_connection_state", + ) scope: Scope """The ASGI scope attached to the connection.""" @@ -82,6 +85,7 @@ def __init__(self, scope: Scope, receive: Receive = empty_receive, send: Send = self.scope = scope self.receive = receive self.send = send + self._connection_state = ScopeState.from_scope(scope) self._base_url: URL | EmptyType = Empty self._url: URL | EmptyType = Empty self._parsed_query: tuple[tuple[str, str], ...] | EmptyType = Empty @@ -123,11 +127,10 @@ def url(self) -> URL: A URL instance constructed from the request's scope. """ if self._url is Empty: - if url := get_litestar_scope_state(self.scope, SCOPE_STATE_URL_KEY): - self._url = cast("URL", url) + if (url := self._connection_state.url) is not Empty: + self._url = url else: - self._url = URL.from_scope(self.scope) - set_litestar_scope_state(self.scope, SCOPE_STATE_URL_KEY, self._url) + self._connection_state.url = self._url = URL.from_scope(self.scope) return self._url @@ -140,8 +143,8 @@ def base_url(self) -> URL: (host + domain + prefix) of the request. """ if self._base_url is Empty: - if base_url := get_litestar_scope_state(self.scope, SCOPE_STATE_BASE_URL_KEY): - self._base_url = cast("URL", base_url) + if (base_url := self._connection_state.base_url) is not Empty: + self._base_url = base_url else: scope = cast( "Scope", @@ -152,8 +155,7 @@ def base_url(self) -> URL: "root_path": self.scope.get("app_root_path") or self.scope.get("root_path", ""), }, ) - self._base_url = URL.from_scope(scope) - set_litestar_scope_state(self.scope, SCOPE_STATE_BASE_URL_KEY, self._base_url) + self._connection_state.base_url = self._base_url = URL.from_scope(scope) return self._base_url @property @@ -173,11 +175,12 @@ def query_params(self) -> MultiDict[Any]: A normalized dict of query parameters. Multiple values for the same key are returned as a list. """ if self._parsed_query is Empty: - if (parsed_query := get_litestar_scope_state(self.scope, SCOPE_STATE_PARSED_QUERY_KEY, Empty)) is not Empty: - self._parsed_query = cast("tuple[tuple[str, str], ...]", parsed_query) + if (parsed_query := self._connection_state.parsed_query) is not Empty: + self._parsed_query = parsed_query else: - self._parsed_query = parse_query_string(self.scope.get("query_string", b"")) - set_litestar_scope_state(self.scope, SCOPE_STATE_PARSED_QUERY_KEY, self._parsed_query) + self._connection_state.parsed_query = self._parsed_query = parse_query_string( + self.scope.get("query_string", b"") + ) return MultiDict(self._parsed_query) @property @@ -197,14 +200,12 @@ def cookies(self) -> dict[str, str]: Returns any cookies stored in the header as a parsed dictionary. """ if self._cookies is Empty: - if (cookies := get_litestar_scope_state(self.scope, SCOPE_STATE_COOKIES_KEY, Empty)) is not Empty: - self._cookies = cast("dict[str, str]", cookies) + if (cookies := self._connection_state.cookies) is not Empty: + self._cookies = cookies else: - self._cookies = ( + self._connection_state.cookies = self._cookies = ( parse_cookie_string(cookie_header) if (cookie_header := self.headers.get("cookie")) else {} ) - set_litestar_scope_state(self.scope, SCOPE_STATE_COOKIES_KEY, self._cookies) - return self._cookies @property diff --git a/litestar/connection/request.py b/litestar/connection/request.py index a8d6bb5a12..8006d8a43d 100644 --- a/litestar/connection/request.py +++ b/litestar/connection/request.py @@ -1,7 +1,7 @@ from __future__ import annotations import warnings -from typing import TYPE_CHECKING, Any, AsyncGenerator, Generic, cast +from typing import TYPE_CHECKING, Any, AsyncGenerator, Generic from litestar._multipart import parse_content_header, parse_multipart_form from litestar._parsers import parse_url_encoded_form_data @@ -13,14 +13,6 @@ empty_receive, empty_send, ) -from litestar.constants import ( - SCOPE_STATE_ACCEPT_KEY, - SCOPE_STATE_BODY_KEY, - SCOPE_STATE_CONTENT_TYPE_KEY, - SCOPE_STATE_FORM_KEY, - SCOPE_STATE_JSON_KEY, - SCOPE_STATE_MSGPACK_KEY, -) from litestar.datastructures.headers import Accept from litestar.datastructures.multi_dicts import FormMultiDict from litestar.enums import ASGIExtension, RequestEncodingType @@ -31,7 +23,6 @@ ) from litestar.serialization import decode_json, decode_msgpack from litestar.types import Empty -from litestar.utils.scope import get_litestar_scope_state, set_litestar_scope_state __all__ = ("Request",) @@ -107,11 +98,12 @@ def content_type(self) -> tuple[str, dict[str, str]]: A tuple with the parsed value and a dictionary containing any options send in it. """ if self._content_type is Empty: - if (content_type := get_litestar_scope_state(self.scope, SCOPE_STATE_CONTENT_TYPE_KEY, Empty)) is not Empty: - self._content_type = cast("tuple[str, dict[str, str]]", content_type) + if (content_type := self._connection_state.content_type) is not Empty: + self._content_type = content_type else: - self._content_type = parse_content_header(self.headers.get("Content-Type", "")) - set_litestar_scope_state(self.scope, SCOPE_STATE_CONTENT_TYPE_KEY, self._content_type) + self._content_type = self._connection_state.content_type = parse_content_header( + self.headers.get("Content-Type", "") + ) return self._content_type @property @@ -122,11 +114,10 @@ def accept(self) -> Accept: An :class:`Accept ` instance, representing the list of acceptable media types. """ if self._accept is Empty: - if accept := get_litestar_scope_state(self.scope, SCOPE_STATE_ACCEPT_KEY): - self._accept = cast("Accept", accept) + if (accept := self._connection_state.accept) is not Empty: + self._accept = accept else: - self._accept = Accept(self.headers.get("Accept", "*/*")) - set_litestar_scope_state(self.scope, SCOPE_STATE_ACCEPT_KEY, self._accept) + self._accept = self._connection_state.accept = Accept(self.headers.get("Accept", "*/*")) return self._accept async def json(self) -> Any: @@ -136,12 +127,13 @@ async def json(self) -> Any: An arbitrary value """ if self._json is Empty: - if (json_ := get_litestar_scope_state(self.scope, SCOPE_STATE_JSON_KEY, Empty)) is not Empty: + if (json_ := self._connection_state.json) is not Empty: self._json = json_ else: body = await self.body() - self._json = decode_json(body or b"null", type_decoders=self.route_handler.resolve_type_decoders()) - set_litestar_scope_state(self.scope, SCOPE_STATE_JSON_KEY, self._json) + self._json = self._connection_state.json = decode_json( + body or b"null", type_decoders=self.route_handler.resolve_type_decoders() + ) return self._json async def msgpack(self) -> Any: @@ -151,14 +143,13 @@ async def msgpack(self) -> Any: An arbitrary value """ if self._msgpack is Empty: - if (msgpack := get_litestar_scope_state(self.scope, SCOPE_STATE_MSGPACK_KEY, Empty)) is not Empty: + if (msgpack := self._connection_state.msgpack) is not Empty: self._msgpack = msgpack else: body = await self.body() - self._msgpack = decode_msgpack( + self._msgpack = self._connection_state.msgpack = decode_msgpack( body or b"\xc0", type_decoders=self.route_handler.resolve_type_decoders() ) - set_litestar_scope_state(self.scope, SCOPE_STATE_MSGPACK_KEY, self._msgpack) return self._msgpack async def stream(self) -> AsyncGenerator[bytes, None]: @@ -199,11 +190,10 @@ async def body(self) -> bytes: A byte-string representing the body of the request. """ if self._body is Empty: - if (body := get_litestar_scope_state(self.scope, SCOPE_STATE_BODY_KEY)) is not None: - self._body = cast("bytes", body) + if (body := self._connection_state.body) is not Empty: + self._body = body else: - self._body = b"".join([c async for c in self.stream()]) - set_litestar_scope_state(self.scope, SCOPE_STATE_BODY_KEY, self._body) + self._body = self._connection_state.body = b"".join([c async for c in self.stream()]) return self._body async def form(self) -> FormMultiDict: @@ -215,8 +205,8 @@ async def form(self) -> FormMultiDict: A FormMultiDict instance """ if self._form is Empty: - if (form := get_litestar_scope_state(self.scope, SCOPE_STATE_FORM_KEY, Empty)) is not Empty: - self._form = cast("dict[str, str | list[str]]", form) + if (form := self._connection_state.form) is not Empty: + self._form = form else: content_type, options = self.content_type if content_type == RequestEncodingType.MULTI_PART: @@ -232,7 +222,7 @@ async def form(self) -> FormMultiDict: else: self._form = {} - set_litestar_scope_state(self.scope, SCOPE_STATE_FORM_KEY, self._form) + self._connection_state.form = self._form return FormMultiDict(self._form) diff --git a/litestar/constants.py b/litestar/constants.py index 59a6b0d69e..930296c4be 100644 --- a/litestar/constants.py +++ b/litestar/constants.py @@ -1,11 +1,12 @@ from dataclasses import MISSING from inspect import Signature -from typing import Final +from typing import Any, Final from msgspec import UnsetType from litestar.enums import MediaType from litestar.types import Empty +from litestar.utils.deprecation import warn_deprecation DEFAULT_ALLOWED_CORS_HEADERS: Final = {"Accept", "Accept-Language", "Content-Language", "Content-Type"} DEFAULT_CHUNK_SIZE: Final = 1024 * 128 # 128KB @@ -22,21 +23,35 @@ WEBSOCKET_CLOSE: Final = "websocket.close" WEBSOCKET_DISCONNECT: Final = "websocket.disconnect" -# keys for internal stuff that we store in the "__litestar__" namespace of the scope state -SCOPE_STATE_NAMESPACE: Final = "__litestar__" - -SCOPE_STATE_ACCEPT_KEY: Final = "accept" -SCOPE_STATE_BASE_URL_KEY: Final = "base_url" -SCOPE_STATE_BODY_KEY: Final = "body" -SCOPE_STATE_CONTENT_TYPE_KEY: Final = "content_type" -SCOPE_STATE_COOKIES_KEY: Final = "cookies" -SCOPE_STATE_CSRF_TOKEN_KEY: Final = "csrf_token" # possible hardcoded password -SCOPE_STATE_DEPENDENCY_CACHE: Final = "dependency_cache" -SCOPE_STATE_DO_CACHE: Final = "do_cache" -SCOPE_STATE_FORM_KEY: Final = "form" -SCOPE_STATE_IS_CACHED: Final = "is_cached" -SCOPE_STATE_JSON_KEY: Final = "json" -SCOPE_STATE_MSGPACK_KEY: Final = "msgpack" -SCOPE_STATE_PARSED_QUERY_KEY: Final = "parsed_query" -SCOPE_STATE_RESPONSE_COMPRESSED: Final = "response_compressed" -SCOPE_STATE_URL_KEY: Final = "url" + +# deprecated constants +_SCOPE_STATE_CSRF_TOKEN_KEY = "csrf_token" # noqa: S105 # possible hardcoded password +_SCOPE_STATE_DEPENDENCY_CACHE: Final = "dependency_cache" +_SCOPE_STATE_NAMESPACE: Final = "__litestar__" +_SCOPE_STATE_RESPONSE_COMPRESSED: Final = "response_compressed" +_SCOPE_STATE_DO_CACHE: Final = "do_cache" +_SCOPE_STATE_IS_CACHED: Final = "is_cached" + +_deprecated_names = { + "SCOPE_STATE_CSRF_TOKEN_KEY": _SCOPE_STATE_CSRF_TOKEN_KEY, + "SCOPE_STATE_DEPENDENCY_CACHE": _SCOPE_STATE_DEPENDENCY_CACHE, + "SCOPE_STATE_NAMESPACE": _SCOPE_STATE_NAMESPACE, + "SCOPE_STATE_RESPONSE_COMPRESSED": _SCOPE_STATE_RESPONSE_COMPRESSED, + "SCOPE_STATE_DO_CACHE": _SCOPE_STATE_DO_CACHE, + "SCOPE_STATE_IS_CACHED": _SCOPE_STATE_IS_CACHED, +} + + +def __getattr__(name: str) -> Any: + if name in _deprecated_names: + warn_deprecation( + deprecated_name=f"litestar.constants.{name}", + version="2.4", + kind="import", + removal_in="3.0", + info=f"'{name}' from 'litestar.constants' is deprecated and will be removed in 3.0. " + "Direct access to Litestar scope state is not recommended.", + ) + + return globals()["_deprecated_names"][name] + raise AttributeError(f"module {__name__} has no attribute {name}") # pragma: no cover diff --git a/litestar/datastructures/headers.py b/litestar/datastructures/headers.py index 89081aef77..f3e9bd7c21 100644 --- a/litestar/datastructures/headers.py +++ b/litestar/datastructures/headers.py @@ -27,8 +27,10 @@ from litestar.datastructures.multi_dicts import MultiMixin from litestar.dto.base_dto import AbstractDTO from litestar.exceptions import ImproperlyConfiguredException, ValidationException +from litestar.types.empty import Empty from litestar.typing import FieldDefinition from litestar.utils.dataclass import simple_asdict +from litestar.utils.scope.state import ScopeState if TYPE_CHECKING: from litestar.types.asgi_types import ( @@ -36,6 +38,7 @@ Message, RawHeaders, RawHeadersList, + Scope, ) __all__ = ("Accept", "CacheControlHeader", "ETag", "Header", "Headers", "MutableScopeHeaders") @@ -71,7 +74,7 @@ def __init__(self, headers: Optional[Union[Mapping[str, str], "RawHeaders", Mult self._header_list: Optional[RawHeadersList] = None @classmethod - def from_scope(cls, scope: "HeaderScope") -> "Headers": + def from_scope(cls, scope: "Scope") -> "Headers": """Create headers from a send-message. Args: @@ -83,10 +86,10 @@ def from_scope(cls, scope: "HeaderScope") -> "Headers": Raises: ValueError: If the message does not have a ``headers`` key """ - if (headers := scope.get("_headers")) is None: - headers = scope["_headers"] = cls(scope["headers"]) # type: ignore[typeddict-unknown-key] - - return cast("Headers", headers) + connection_state = ScopeState.from_scope(scope) + if (headers := connection_state.headers) is Empty: + headers = connection_state.headers = cls(scope["headers"]) + return headers def to_header_list(self) -> "RawHeadersList": """Raw header value. diff --git a/litestar/middleware/compression.py b/litestar/middleware/compression.py index 760b8a174f..c5cd860dda 100644 --- a/litestar/middleware/compression.py +++ b/litestar/middleware/compression.py @@ -4,15 +4,12 @@ from io import BytesIO from typing import TYPE_CHECKING, Any, Literal -from litestar.constants import SCOPE_STATE_IS_CACHED, SCOPE_STATE_RESPONSE_COMPRESSED from litestar.datastructures import Headers, MutableScopeHeaders from litestar.enums import CompressionEncoding, ScopeType from litestar.exceptions import MissingDependencyException from litestar.middleware.base import AbstractMiddleware -from litestar.utils import get_litestar_scope_state, set_litestar_scope_state - -__all__ = ("CompressionFacade", "CompressionMiddleware") - +from litestar.utils.empty import value_or_default +from litestar.utils.scope.state import ScopeState if TYPE_CHECKING: from litestar.config.compression import CompressionConfig @@ -30,6 +27,8 @@ except ImportError: Compressor = Any +__all__ = ("CompressionFacade", "CompressionMiddleware") + class CompressionFacade: """A unified facade offering a uniform interface for different compression libraries.""" @@ -178,6 +177,8 @@ def create_compression_send_wrapper( _own_encoding = compression_encoding.encode("latin-1") + connection_state = ScopeState.from_scope(scope) + async def send_wrapper(message: Message) -> None: """Handle and compresses the HTTP Message with brotli. @@ -191,7 +192,7 @@ async def send_wrapper(message: Message) -> None: initial_message = message return - if initial_message and get_litestar_scope_state(scope, SCOPE_STATE_IS_CACHED): + if initial_message is not None and value_or_default(connection_state.is_cached, False): await send(initial_message) await send(message) return @@ -207,7 +208,7 @@ async def send_wrapper(message: Message) -> None: headers["Content-Encoding"] = compression_encoding headers.extend_header_value("vary", "Accept-Encoding") del headers["Content-Length"] - set_litestar_scope_state(scope, SCOPE_STATE_RESPONSE_COMPRESSED, True) + connection_state.response_compressed = True facade.write(body) @@ -227,7 +228,7 @@ async def send_wrapper(message: Message) -> None: headers["Content-Length"] = str(len(body)) headers.extend_header_value("vary", "Accept-Encoding") message["body"] = body - set_litestar_scope_state(scope, SCOPE_STATE_RESPONSE_COMPRESSED, True) + connection_state.response_compressed = True await send(initial_message) await send(message) diff --git a/litestar/middleware/csrf.py b/litestar/middleware/csrf.py index 14592236cf..58c020960f 100644 --- a/litestar/middleware/csrf.py +++ b/litestar/middleware/csrf.py @@ -6,7 +6,6 @@ from secrets import compare_digest from typing import TYPE_CHECKING, Any -from litestar.constants import SCOPE_STATE_CSRF_TOKEN_KEY from litestar.datastructures import MutableScopeHeaders from litestar.datastructures.cookie import Cookie from litestar.enums import RequestEncodingType, ScopeType @@ -16,7 +15,7 @@ should_bypass_middleware, ) from litestar.middleware.base import MiddlewareProtocol -from litestar.utils import set_litestar_scope_state +from litestar.utils.scope.state import ScopeState if TYPE_CHECKING: from litestar.config.csrf import CSRFConfig @@ -111,17 +110,18 @@ async def __call__(self, scope: Scope, receive: Receive, send: Send) -> None: form = await request.form() existing_csrf_token = form.get("_csrf_token", None) + connection_state = ScopeState.from_scope(scope) if request.method in self.config.safe_methods or should_bypass_middleware( scope=scope, scopes=self.scopes, exclude_opt_key=self.config.exclude_from_csrf_key, exclude_path_pattern=self.exclude, ): - token = csrf_cookie or generate_csrf_token(secret=self.config.secret) - set_litestar_scope_state(scope=scope, key=SCOPE_STATE_CSRF_TOKEN_KEY, value=token) + token = connection_state.csrf_token = csrf_cookie or generate_csrf_token(secret=self.config.secret) await self.app(scope, receive, self.create_send_wrapper(send=send, csrf_cookie=csrf_cookie, token=token)) elif self._csrf_tokens_match(existing_csrf_token, csrf_cookie): - set_litestar_scope_state(scope=scope, key=SCOPE_STATE_CSRF_TOKEN_KEY, value=existing_csrf_token) + # we haven't properly narrowed the type of `existing_csrf_token` to be non-None, but we know it is + connection_state.csrf_token = existing_csrf_token # type: ignore[assignment] await self.app(scope, receive, send) else: raise PermissionDeniedException("CSRF token verification failed") diff --git a/litestar/middleware/logging.py b/litestar/middleware/logging.py index bb4d9f537c..300fbbb05d 100644 --- a/litestar/middleware/logging.py +++ b/litestar/middleware/logging.py @@ -7,7 +7,6 @@ from litestar.constants import ( HTTP_RESPONSE_BODY, HTTP_RESPONSE_START, - SCOPE_STATE_RESPONSE_COMPRESSED, ) from litestar.data_extractors import ( ConnectionDataExtractor, @@ -19,11 +18,9 @@ from litestar.exceptions import ImproperlyConfiguredException from litestar.middleware.base import AbstractMiddleware, DefineMiddleware from litestar.serialization import encode_json -from litestar.utils import ( - get_litestar_scope_state, - get_serializer_from_scope, - set_litestar_scope_state, -) +from litestar.utils.empty import value_or_default +from litestar.utils.scope import get_serializer_from_scope +from litestar.utils.scope.state import ScopeState __all__ = ("LoggingMiddleware", "LoggingMiddlewareConfig") @@ -194,13 +191,14 @@ def extract_response_data(self, scope: Scope) -> dict[str, Any]: """ data: dict[str, Any] = {"message": self.config.response_log_message} serializer = get_serializer_from_scope(scope) + connection_state = ScopeState.from_scope(scope) extracted_data = self.response_extractor( messages=( - get_litestar_scope_state(scope, HTTP_RESPONSE_START, pop=True), - get_litestar_scope_state(scope, HTTP_RESPONSE_BODY, pop=True), + connection_state.log_context.pop(HTTP_RESPONSE_START), + connection_state.log_context.pop(HTTP_RESPONSE_BODY), ), ) - response_body_compressed = get_litestar_scope_state(scope, SCOPE_STATE_RESPONSE_COMPRESSED, default=False) + response_body_compressed = value_or_default(connection_state.response_compressed, False) for key in self.config.response_log_fields: value: Any value = extracted_data.get(key) @@ -221,12 +219,13 @@ def create_send_wrapper(self, scope: Scope, send: Send) -> Send: Returns: An ASGI send function. """ + connection_state = ScopeState.from_scope(scope) async def send_wrapper(message: Message) -> None: if message["type"] == HTTP_RESPONSE_START: - set_litestar_scope_state(scope, HTTP_RESPONSE_START, message) + connection_state.log_context[HTTP_RESPONSE_START] = message elif message["type"] == HTTP_RESPONSE_BODY: - set_litestar_scope_state(scope, HTTP_RESPONSE_BODY, message) + connection_state.log_context[HTTP_RESPONSE_BODY] = message self.log_response(scope=scope) await send(message) diff --git a/litestar/middleware/response_cache.py b/litestar/middleware/response_cache.py index a80aab739b..62dcde6e23 100644 --- a/litestar/middleware/response_cache.py +++ b/litestar/middleware/response_cache.py @@ -5,9 +5,10 @@ from msgspec.msgpack import encode as encode_msgpack from litestar import Request -from litestar.constants import HTTP_RESPONSE_BODY, HTTP_RESPONSE_START, SCOPE_STATE_DO_CACHE, SCOPE_STATE_IS_CACHED +from litestar.constants import HTTP_RESPONSE_BODY, HTTP_RESPONSE_START from litestar.enums import ScopeType -from litestar.utils import get_litestar_scope_state, set_litestar_scope_state +from litestar.utils.empty import value_or_default +from litestar.utils.scope.state import ScopeState from .base import AbstractMiddleware @@ -33,16 +34,19 @@ async def __call__(self, scope: Scope, receive: Receive, send: Send) -> None: elif route_handler.cache is not False and isinstance(route_handler.cache, int): expires_in = route_handler.cache + connection_state = ScopeState.from_scope(scope) + messages: list[Message] = [] async def wrapped_send(message: Message) -> None: - if not get_litestar_scope_state(scope, SCOPE_STATE_IS_CACHED): + if not value_or_default(connection_state.is_cached, False): if message["type"] == HTTP_RESPONSE_START: - do_cache = self.config.cache_response_filter(cast("HTTPScope", scope), message["status"]) - set_litestar_scope_state(scope, SCOPE_STATE_DO_CACHE, do_cache) + do_cache = connection_state.do_cache = self.config.cache_response_filter( + cast("HTTPScope", scope), message["status"] + ) if do_cache: messages.append(message) - elif get_litestar_scope_state(scope, SCOPE_STATE_DO_CACHE): + elif value_or_default(connection_state.do_cache, False): messages.append(message) if messages and message["type"] == HTTP_RESPONSE_BODY and not message["more_body"]: diff --git a/litestar/response/template.py b/litestar/response/template.py index 5c8cbd3717..6499aaec01 100644 --- a/litestar/response/template.py +++ b/litestar/response/template.py @@ -5,13 +5,13 @@ from pathlib import PurePath from typing import TYPE_CHECKING, Any, Iterable, cast -from litestar.constants import SCOPE_STATE_CSRF_TOKEN_KEY from litestar.enums import MediaType from litestar.exceptions import ImproperlyConfiguredException from litestar.response.base import ASGIResponse, Response from litestar.status_codes import HTTP_200_OK -from litestar.utils import get_litestar_scope_state from litestar.utils.deprecation import warn_deprecation +from litestar.utils.empty import value_or_default +from litestar.utils.scope.state import ScopeState if TYPE_CHECKING: from litestar.app import Litestar @@ -90,7 +90,7 @@ def create_template_context(self, request: Request) -> dict[str, Any]: Returns: A dictionary holding the template context """ - csrf_token = get_litestar_scope_state(scope=request.scope, key=SCOPE_STATE_CSRF_TOKEN_KEY, default="") + csrf_token = value_or_default(ScopeState.from_scope(request.scope).csrf_token, "") return { **self.context, "request": request, diff --git a/litestar/routes/http.py b/litestar/routes/http.py index d0286ddad7..d288b96881 100644 --- a/litestar/routes/http.py +++ b/litestar/routes/http.py @@ -5,7 +5,7 @@ from msgspec.msgpack import decode as _decode_msgpack_plain -from litestar.constants import DEFAULT_ALLOWED_CORS_HEADERS, SCOPE_STATE_IS_CACHED +from litestar.constants import DEFAULT_ALLOWED_CORS_HEADERS from litestar.datastructures.headers import Headers from litestar.datastructures.upload_file import UploadFile from litestar.enums import HttpMethod, MediaType, ScopeType @@ -14,7 +14,7 @@ from litestar.response import Response from litestar.routes.base import BaseRoute from litestar.status_codes import HTTP_204_NO_CONTENT, HTTP_400_BAD_REQUEST -from litestar.utils import set_litestar_scope_state +from litestar.utils.scope.state import ScopeState if TYPE_CHECKING: from litestar._kwargs import KwargsModel @@ -226,7 +226,7 @@ async def _get_cached_response(request: Request, route_handler: HTTPRouteHandler messages = _decode_msgpack_plain(cached_response_data) async def cached_response(scope: Scope, receive: Receive, send: Send) -> None: - set_litestar_scope_state(scope, SCOPE_STATE_IS_CACHED, True) + ScopeState.from_scope(scope).is_cached = True for message in messages: await send(message) diff --git a/litestar/template/base.py b/litestar/template/base.py index a9711880f2..3474717ed4 100644 --- a/litestar/template/base.py +++ b/litestar/template/base.py @@ -4,9 +4,9 @@ from typing_extensions import Concatenate, ParamSpec, TypeAlias -from litestar.constants import SCOPE_STATE_CSRF_TOKEN_KEY -from litestar.utils import get_litestar_scope_state from litestar.utils.deprecation import warn_deprecation +from litestar.utils.empty import value_or_default +from litestar.utils.scope.state import ScopeState if TYPE_CHECKING: from pathlib import Path @@ -67,7 +67,7 @@ def csrf_token(context: Mapping[str, Any], /) -> str: A CSRF token if the app level ``csrf_config`` is set, otherwise an empty string. """ scope = _get_request_from_context(context).scope - return cast("str", get_litestar_scope_state(scope=scope, key=SCOPE_STATE_CSRF_TOKEN_KEY, default="")) + return value_or_default(ScopeState.from_scope(scope).csrf_token, "") def url_for_static_asset(context: Mapping[str, Any], /, name: str, file_path: str) -> str: diff --git a/litestar/testing/client/base.py b/litestar/testing/client/base.py index 428c548f3a..93f1082a97 100644 --- a/litestar/testing/client/base.py +++ b/litestar/testing/client/base.py @@ -10,14 +10,13 @@ from litestar import Litestar from litestar.connection import ASGIConnection -from litestar.constants import SCOPE_STATE_COOKIES_KEY from litestar.datastructures import MutableScopeHeaders from litestar.enums import ScopeType from litestar.exceptions import ( ImproperlyConfiguredException, ) from litestar.types import AnyIOBackend, ASGIApp, HTTPResponseStartEvent -from litestar.utils.scope import set_litestar_scope_state +from litestar.utils.scope.state import ScopeState if TYPE_CHECKING: from httpx._types import CookieTypes @@ -57,7 +56,7 @@ def fake_asgi_connection(app: ASGIApp, cookies: dict[str, str]) -> ASGIConnectio "session": None, "user": None, } - set_litestar_scope_state(scope, SCOPE_STATE_COOKIES_KEY, cookies) + ScopeState.from_scope(scope).cookies = cookies return ASGIConnection[Any, Any, Any, Any](scope=scope) diff --git a/litestar/testing/request_factory.py b/litestar/testing/request_factory.py index de60b7719a..af6d26fbe6 100644 --- a/litestar/testing/request_factory.py +++ b/litestar/testing/request_factory.py @@ -11,14 +11,13 @@ from litestar import delete, patch, post, put from litestar.app import Litestar from litestar.connection import Request -from litestar.constants import SCOPE_STATE_BODY_KEY from litestar.enums import HttpMethod, ParamType, RequestEncodingType, ScopeType from litestar.handlers.http_handlers import get from litestar.serialization import decode_json, default_serializer, encode_json from litestar.types import DataContainerType, HTTPScope, RouteHandlerType from litestar.types.asgi_types import ASGIVersion from litestar.utils import get_serializer_from_scope -from litestar.utils.scope import set_litestar_scope_state +from litestar.utils.scope.state import ScopeState if TYPE_CHECKING: from httpx._types import FileTypes @@ -302,7 +301,7 @@ def _create_request_with_data( headers.update(encoding_headers) for chunk in stream: body += chunk - set_litestar_scope_state(scope, SCOPE_STATE_BODY_KEY, body) + ScopeState.from_scope(scope).body = body self._create_cookie_header(headers, cookies) scope["headers"] = self._build_headers(headers) return Request(scope=scope) diff --git a/litestar/utils/__init__.py b/litestar/utils/__init__.py index 6eca50ea57..d0901a2d12 100644 --- a/litestar/utils/__init__.py +++ b/litestar/utils/__init__.py @@ -1,3 +1,5 @@ +from typing import Any + from litestar.utils.deprecation import deprecated, warn_deprecation from .helpers import get_enum_string_value, get_name, unique_name_for_scope, url_quote @@ -20,11 +22,11 @@ is_undefined_sentinel, is_union, ) -from .scope import ( - delete_litestar_scope_state, - get_litestar_scope_state, +from .scope import ( # type: ignore[attr-defined] + _delete_litestar_scope_state, + _get_litestar_scope_state, + _set_litestar_scope_state, get_serializer_from_scope, - set_litestar_scope_state, ) from .sequence import find_index, unique from .sync import AsyncIteratorWrapper, ensure_async_callable @@ -33,11 +35,9 @@ __all__ = ( "ensure_async_callable", "AsyncIteratorWrapper", - "delete_litestar_scope_state", "deprecated", "find_index", "get_enum_string_value", - "get_litestar_scope_state", "get_name", "get_origin_or_inner_type", "get_serializer_from_scope", @@ -60,9 +60,28 @@ "join_paths", "make_non_optional_union", "normalize_path", - "set_litestar_scope_state", "unique", "unique_name_for_scope", "url_quote", "warn_deprecation", ) + +_deprecated_names = { + "get_litestar_scope_state": _get_litestar_scope_state, + "set_litestar_scope_state": _set_litestar_scope_state, + "delete_litestar_scope_state": _delete_litestar_scope_state, +} + + +def __getattr__(name: str) -> Any: + if name in _deprecated_names: + warn_deprecation( + deprecated_name=f"litestar.utils.{name}", + version="2.4", + kind="import", + removal_in="3.0", + info=f"'litestar.utils.{name}' is deprecated. The Litestar scope state is private and should not be used." + "Plugin authors should maintain their own scope state namespace.", + ) + return globals()["_deprecated_names"][name] + raise AttributeError(f"module {__name__!r} has no attribute {name!r}") # pragma: no cover diff --git a/litestar/utils/empty.py b/litestar/utils/empty.py new file mode 100644 index 0000000000..cdde871d71 --- /dev/null +++ b/litestar/utils/empty.py @@ -0,0 +1,26 @@ +from __future__ import annotations + +from typing import TYPE_CHECKING, TypeVar + +from litestar.types.empty import Empty + +if TYPE_CHECKING: + from litestar.types.empty import EmptyType + +ValueT = TypeVar("ValueT") +DefaultT = TypeVar("DefaultT") + + +def value_or_default(value: ValueT | EmptyType, default: DefaultT) -> ValueT | DefaultT: + """Return `value` handling the case where it is empty. + + If `value` is `Empty`, `default` is returned. + + Args: + value: The value to check. + default: The default value to return if `value` is `Empty`. + + Returns: + The value or default value. + """ + return default if value is Empty else value diff --git a/litestar/utils/scope.py b/litestar/utils/scope.py deleted file mode 100644 index d7b5b53e71..0000000000 --- a/litestar/utils/scope.py +++ /dev/null @@ -1,80 +0,0 @@ -from __future__ import annotations - -from typing import TYPE_CHECKING, Any - -from litestar.constants import SCOPE_STATE_NAMESPACE -from litestar.serialization import get_serializer - -if TYPE_CHECKING: - from litestar.types import Scope, Serializer - -__all__ = ( - "delete_litestar_scope_state", - "get_serializer_from_scope", - "get_litestar_scope_state", - "set_litestar_scope_state", -) - - -def get_serializer_from_scope(scope: Scope) -> Serializer: - """Return a serializer given a scope object. - - Args: - scope: The ASGI connection scope. - - Returns: - A serializer function - """ - route_handler = scope["route_handler"] - app = scope["app"] - - if hasattr(route_handler, "resolve_type_encoders"): - type_encoders = route_handler.resolve_type_encoders() - else: - type_encoders = app.type_encoders or {} - - if response_class := ( - route_handler.resolve_response_class() # pyright: ignore - if hasattr(route_handler, "resolve_response_class") - else app.response_class - ): - type_encoders = {**type_encoders, **(response_class.type_encoders or {})} - - return get_serializer(type_encoders) - - -def get_litestar_scope_state(scope: Scope, key: str, default: Any = None, pop: bool = False) -> Any: - """Get an internal value from connection scope state. - - Args: - scope: The connection scope. - key: Key to get from internal namespace in scope state. - default: Default value to return. - pop: Boolean flag dictating whether the value should be deleted from the state. - - Returns: - Value mapped to ``key`` in internal connection scope namespace. - """ - namespace = scope["state"].setdefault(SCOPE_STATE_NAMESPACE, {}) - return namespace.pop(key, default) if pop else namespace.get(key, default) - - -def set_litestar_scope_state(scope: Scope, key: str, value: Any) -> None: - """Set an internal value in connection scope state. - - Args: - scope: The connection scope. - key: Key to set under internal namespace in scope state. - value: Value for key. - """ - scope["state"].setdefault(SCOPE_STATE_NAMESPACE, {})[key] = value - - -def delete_litestar_scope_state(scope: Scope, key: str) -> None: - """Delete an internal value from connection scope state. - - Args: - scope: The connection scope. - key: Key to set under internal namespace in scope state. - """ - del scope["state"][SCOPE_STATE_NAMESPACE][key] diff --git a/litestar/utils/scope/__init__.py b/litestar/utils/scope/__init__.py new file mode 100644 index 0000000000..44895ebc26 --- /dev/null +++ b/litestar/utils/scope/__init__.py @@ -0,0 +1,62 @@ +from __future__ import annotations + +from typing import TYPE_CHECKING, Any + +from litestar.serialization import get_serializer +from litestar.utils.deprecation import warn_deprecation +from litestar.utils.scope.state import delete_litestar_scope_state as _delete_litestar_scope_state +from litestar.utils.scope.state import get_litestar_scope_state as _get_litestar_scope_state +from litestar.utils.scope.state import set_litestar_scope_state as _set_litestar_scope_state + +if TYPE_CHECKING: + from litestar.types import Scope, Serializer + +__all__ = ("get_serializer_from_scope",) + + +def get_serializer_from_scope(scope: Scope) -> Serializer: + """Return a serializer given a scope object. + + Args: + scope: The ASGI connection scope. + + Returns: + A serializer function + """ + route_handler = scope["route_handler"] + app = scope["app"] + + if hasattr(route_handler, "resolve_type_encoders"): + type_encoders = route_handler.resolve_type_encoders() + else: + type_encoders = app.type_encoders or {} + + if response_class := ( + route_handler.resolve_response_class() # pyright: ignore + if hasattr(route_handler, "resolve_response_class") + else app.response_class + ): + type_encoders = {**type_encoders, **(response_class.type_encoders or {})} + + return get_serializer(type_encoders) + + +_deprecated_names = { + "get_litestar_scope_state": _get_litestar_scope_state, + "set_litestar_scope_state": _set_litestar_scope_state, + "delete_litestar_scope_state": _delete_litestar_scope_state, +} + + +def __getattr__(name: str) -> Any: + if name in _deprecated_names: + warn_deprecation( + deprecated_name=f"litestar.utils.scope.{name}", + version="2.4", + kind="import", + removal_in="3.0", + info=f"'litestar.utils.scope.{name}' is deprecated. The Litestar scope state is private and should not be used." + "Plugin authors should maintain their own scope state namespace.", + ) + return globals()["_deprecated_names"][name] + raise AttributeError(f"module {__name__!r} has no attribute {name!r}") # pragma: no cover diff --git a/litestar/utils/scope/state.py b/litestar/utils/scope/state.py new file mode 100644 index 0000000000..bd6829d387 --- /dev/null +++ b/litestar/utils/scope/state.py @@ -0,0 +1,156 @@ +from __future__ import annotations + +from dataclasses import dataclass +from typing import TYPE_CHECKING, Any, Final + +from litestar.types import Empty, EmptyType +from litestar.utils.empty import value_or_default + +if TYPE_CHECKING: + from typing_extensions import Self + + from litestar.datastructures import URL, Accept, Headers + from litestar.types.asgi_types import Scope + +CONNECTION_STATE_KEY: Final = "_ls_connection_state" + + +@dataclass +class ScopeState: + """An object for storing connection state. + + This is an internal API, and subject to change without notice. + + All types are a union with `EmptyType` and are seeded with the `Empty` value. + """ + + __slots__ = ( + "accept", + "base_url", + "body", + "content_type", + "cookies", + "csrf_token", + "dependency_cache", + "do_cache", + "form", + "headers", + "is_cached", + "json", + "log_context", + "msgpack", + "parsed_query", + "response_compressed", + "url", + "_compat_ns", + ) + + def __init__(self) -> None: + self.accept = Empty + self.base_url = Empty + self.body = Empty + self.content_type = Empty + self.cookies = Empty + self.csrf_token = Empty + self.dependency_cache = Empty + self.do_cache = Empty + self.form = Empty + self.headers = Empty + self.is_cached = Empty + self.json = Empty + self.log_context: dict[str, Any] = {} + self.msgpack = Empty + self.parsed_query = Empty + self.response_compressed = Empty + self.url = Empty + self._compat_ns: dict[str, Any] = {} + + accept: Accept | EmptyType + base_url: URL | EmptyType + body: bytes | EmptyType + content_type: tuple[str, dict[str, str]] | EmptyType + cookies: dict[str, str] | EmptyType + csrf_token: str | EmptyType + dependency_cache: dict[str, Any] | EmptyType + do_cache: bool | EmptyType + form: dict[str, str | list[str]] | EmptyType + headers: Headers | EmptyType + is_cached: bool | EmptyType + json: Any | EmptyType + log_context: dict[str, Any] + msgpack: Any | EmptyType + parsed_query: tuple[tuple[str, str], ...] | EmptyType + response_compressed: bool | EmptyType + url: URL | EmptyType + _compat_ns: dict[str, Any] + + @classmethod + def from_scope(cls, scope: Scope) -> Self: + """Create a new `ConnectionState` object from a scope. + + Object is cached in the scope's state under the `SCOPE_STATE_NAMESPACE` key. + + Args: + scope: The ASGI connection scope. + + Returns: + A `ConnectionState` object. + """ + if state := scope["state"].get(CONNECTION_STATE_KEY): + return state # type: ignore[no-any-return] + state = scope["state"][CONNECTION_STATE_KEY] = cls() + scope["state"][CONNECTION_STATE_KEY] = state + return state + + +def get_litestar_scope_state(scope: Scope, key: str, default: Any = None, pop: bool = False) -> Any: + """Get an internal value from connection scope state. + + Args: + scope: The connection scope. + key: Key to get from internal namespace in scope state. + default: Default value to return. + pop: Boolean flag dictating whether the value should be deleted from the state. + + Returns: + Value mapped to ``key`` in internal connection scope namespace. + """ + scope_state = ScopeState.from_scope(scope) + try: + val = value_or_default(getattr(scope_state, key), default) + if pop: + setattr(scope_state, key, Empty) + return val + except AttributeError: + if pop: + return scope_state._compat_ns.pop(key, default) + return scope_state._compat_ns.get(key, default) + + +def set_litestar_scope_state(scope: Scope, key: str, value: Any) -> None: + """Set an internal value in connection scope state. + + Args: + scope: The connection scope. + key: Key to set under internal namespace in scope state. + value: Value for key. + """ + scope_state = ScopeState.from_scope(scope) + if hasattr(scope_state, key): + setattr(scope_state, key, value) + else: + scope_state._compat_ns[key] = value + + +def delete_litestar_scope_state(scope: Scope, key: str) -> None: + """Delete an internal value from connection scope state. + + Args: + scope: The connection scope. + key: Key to set under internal namespace in scope state. + """ + scope_state = ScopeState.from_scope(scope) + if hasattr(scope_state, key): + setattr(scope_state, key, Empty) + else: + del scope_state._compat_ns[key] diff --git a/tests/unit/test_connection/test_base.py b/tests/unit/test_connection/test_base.py index 71c52235c1..d514aa9d62 100644 --- a/tests/unit/test_connection/test_base.py +++ b/tests/unit/test_connection/test_base.py @@ -1,10 +1,11 @@ from typing import Any -from litestar import Litestar, constants, get +from litestar import Litestar, get from litestar.connection import ASGIConnection from litestar.logging.config import LoggingConfig from litestar.testing import RequestFactory -from litestar.utils.scope import get_litestar_scope_state +from litestar.types.empty import Empty +from litestar.utils.scope.state import ScopeState def test_connection_base_properties() -> None: @@ -18,26 +19,27 @@ def handler() -> None: session = {"session": "abc"} scope = RequestFactory(app=app).get(route_handler=handler, user=user, auth=auth, session=session).scope connection = ASGIConnection[Any, Any, Any, Any](scope) + connection_state = ScopeState.from_scope(scope) assert connection.app assert connection.app is app assert connection.route_handler is handler assert connection.state is not None - assert not get_litestar_scope_state(scope, constants.SCOPE_STATE_URL_KEY) + assert connection_state.url is Empty assert connection.url - assert get_litestar_scope_state(scope, constants.SCOPE_STATE_URL_KEY) - assert not get_litestar_scope_state(scope, constants.SCOPE_STATE_BASE_URL_KEY) + assert connection_state.url is not Empty + assert connection_state.base_url is Empty # type:ignore[unreachable] assert connection.base_url - assert get_litestar_scope_state(scope, constants.SCOPE_STATE_BASE_URL_KEY) - assert not scope.get("_headers") + assert connection_state.base_url is not Empty + assert connection_state.headers is Empty assert connection.headers is not None - assert scope.get("_headers") is not None - assert not get_litestar_scope_state(scope, constants.SCOPE_STATE_PARSED_QUERY_KEY) + assert connection_state.headers is not Empty + assert connection_state.parsed_query is Empty assert connection.query_params is not None - assert get_litestar_scope_state(scope, constants.SCOPE_STATE_PARSED_QUERY_KEY) is not None - assert not get_litestar_scope_state(scope, constants.SCOPE_STATE_COOKIES_KEY) + assert connection_state.parsed_query is not Empty + assert connection_state.cookies is Empty assert connection.cookies is not None - assert get_litestar_scope_state(scope, constants.SCOPE_STATE_COOKIES_KEY) is not None + assert connection_state.cookies is not Empty assert connection.client assert connection.user is user assert connection.auth is auth diff --git a/tests/unit/test_connection/test_connection_caching.py b/tests/unit/test_connection/test_connection_caching.py index a5abe8375e..43c2fe9865 100644 --- a/tests/unit/test_connection/test_connection_caching.py +++ b/tests/unit/test_connection/test_connection_caching.py @@ -5,10 +5,10 @@ import pytest -from litestar import Request, constants +from litestar import Request from litestar.testing import RequestFactory from litestar.types import Empty, HTTPReceiveMessage, Scope -from litestar.utils import get_litestar_scope_state, set_litestar_scope_state +from litestar.utils.scope.state import ScopeState async def test_multiple_request_object_data_caching(create_scope: Callable[..., Scope], mock: MagicMock) -> None: @@ -43,20 +43,17 @@ def set_mock_fixture() -> MagicMock: def create_connection_fixture( get_mock: MagicMock, set_mock: MagicMock, monkeypatch: pytest.MonkeyPatch ) -> Callable[..., Request]: - def create_connection(body_type: str = "json") -> Request: - def wrapped_get_litestar_scope_state(scope_: Scope, key: str, default: Any = None) -> Any: + class MockScopeState(ScopeState): + def __getattribute__(self, key: str) -> Any: get_mock(key) - return get_litestar_scope_state(scope_, key, default) + return object.__getattribute__(self, key) - def wrapped_set_litestar_scope_state(scope_: Scope, key: str, value: Any) -> None: + def __setattr__(self, key: str, value: Any) -> None: set_mock(key, value) - set_litestar_scope_state(scope_, key, value) - - monkeypatch.setattr("litestar.connection.base.get_litestar_scope_state", wrapped_get_litestar_scope_state) - monkeypatch.setattr("litestar.connection.base.set_litestar_scope_state", wrapped_set_litestar_scope_state) - monkeypatch.setattr("litestar.connection.request.get_litestar_scope_state", wrapped_get_litestar_scope_state) - monkeypatch.setattr("litestar.connection.request.set_litestar_scope_state", wrapped_set_litestar_scope_state) + super().__setattr__(key, value) + def create_connection(body_type: str = "json") -> Request: + monkeypatch.setattr("litestar.connection.base.ScopeState", MockScopeState) connection = RequestFactory().get() async def fake_receive() -> HTTPReceiveMessage: @@ -82,29 +79,22 @@ def get_value_fixture() -> Callable[[Request, str, bool], Awaitable[Any]]: async def get_value_(connection: Request, prop_name: str, is_coro: bool) -> Any: """Helper to get the value of the tested cached property.""" value = getattr(connection, prop_name) - if is_coro: - return await value() - return value + return await value() if is_coro else value return get_value_ caching_tests = [ - (constants.SCOPE_STATE_URL_KEY, "url", "_url", False), - (constants.SCOPE_STATE_BASE_URL_KEY, "base_url", "_base_url", False), - ( - constants.SCOPE_STATE_PARSED_QUERY_KEY, - "query_params", - "_parsed_query", - False, - ), - (constants.SCOPE_STATE_COOKIES_KEY, "cookies", "_cookies", False), - (constants.SCOPE_STATE_BODY_KEY, "body", "_body", True), - (constants.SCOPE_STATE_FORM_KEY, "form", "_form", True), - (constants.SCOPE_STATE_MSGPACK_KEY, "msgpack", "_msgpack", True), - (constants.SCOPE_STATE_JSON_KEY, "json", "_json", True), - (constants.SCOPE_STATE_ACCEPT_KEY, "accept", "_accept", False), - (constants.SCOPE_STATE_CONTENT_TYPE_KEY, "content_type", "_content_type", False), + ("url", "url", "_url", False), + ("base_url", "base_url", "_base_url", False), + ("parsed_query", "query_params", "_parsed_query", False), + ("cookies", "cookies", "_cookies", False), + ("body", "body", "_body", True), + ("form", "form", "_form", True), + ("msgpack", "msgpack", "_msgpack", True), + ("json", "json", "_json", True), + ("accept", "accept", "_accept", False), + ("content_type", "content_type", "_content_type", False), ] @@ -125,8 +115,10 @@ def check_get_mock() -> None: For certain properties, we call `get_litestar_scope_state()` twice, once for the property and once for the body. For these cases, we check that the mock was called twice. """ - if state_key in ("json", "msgpack"): + if state_key in {"json", "msgpack"}: get_mock.assert_has_calls([call(state_key), call("body")]) + elif state_key in {"accept", "cookies", "content_type"}: + get_mock.assert_has_calls([call(state_key), call("headers")]) elif state_key == "form": get_mock.assert_has_calls([call(state_key), call("content_type")]) else: @@ -138,18 +130,23 @@ def check_set_mock() -> None: For certain properties, we call `set_litestar_scope_state()` twice, once for the property and once for the body. For these cases, we check that the mock was called twice. """ - if state_key in ("json", "msgpack"): + if state_key in {"json", "msgpack"}: set_mock.assert_has_calls([call("body", ANY), call(state_key, ANY)]) elif state_key == "form": - set_mock.assert_has_calls([call("content_type", ANY), call("form", ANY)]) + set_mock.assert_has_calls([call("content_type", ANY), call(state_key, ANY)]) + elif state_key in {"accept", "cookies", "content_type"}: + set_mock.assert_has_calls([call("headers", ANY), call(state_key, ANY)]) else: set_mock.assert_called_once_with(state_key, ANY) connection = create_connection("msgpack" if state_key == "msgpack" else "json") + connection_state = connection._connection_state - assert get_litestar_scope_state(connection.scope, state_key, Empty) is Empty + assert getattr(connection_state, state_key) is Empty setattr(connection, cache_attr_name, Empty) + get_mock.reset_mock() + set_mock.reset_mock() await get_value(connection, prop_name, is_coro) check_get_mock() check_set_mock() @@ -168,10 +165,13 @@ async def test_connection_cached_properties_cached_in_scope( ) -> None: # set the value in the scope and ensure empty on connection connection = create_connection() + connection_state = ScopeState.from_scope(connection.scope) - set_litestar_scope_state(connection.scope, state_key, {"a": "b"}) + setattr(connection_state, state_key, {"not": "empty"}) setattr(connection, cache_attr_name, Empty) + get_mock.reset_mock() + set_mock.reset_mock() await get_value(connection, prop_name, is_coro) get_mock.assert_called_once_with(state_key) set_mock.assert_not_called() @@ -190,7 +190,9 @@ async def test_connection_cached_properties_cached_on_connection( ) -> None: connection = create_connection() # set the value on the connection - setattr(connection, cache_attr_name, {"a": "b"}) + setattr(connection, cache_attr_name, {"not": "empty"}) + get_mock.reset_mock() + set_mock.reset_mock() await get_value(connection, prop_name, is_coro) get_mock.assert_not_called() set_mock.assert_not_called() diff --git a/tests/unit/test_datastructures/test_headers.py b/tests/unit/test_datastructures/test_headers.py index 34b5752119..355804ac99 100644 --- a/tests/unit/test_datastructures/test_headers.py +++ b/tests/unit/test_datastructures/test_headers.py @@ -1,5 +1,5 @@ from dataclasses import dataclass -from typing import TYPE_CHECKING, List, Optional, Union +from typing import TYPE_CHECKING, Callable, List, Optional, Union import pytest from pytest import FixtureRequest @@ -18,7 +18,7 @@ from litestar.utils.dataclass import simple_asdict if TYPE_CHECKING: - from litestar.types.asgi_types import RawHeaders, RawHeadersList + from litestar.types.asgi_types import RawHeaders, RawHeadersList, Scope @pytest.fixture @@ -74,10 +74,8 @@ def test_headers_from_raw_tuple() -> None: assert headers.getall("foo") == ["bar", "baz"] -def test_headers_from_scope() -> None: - headers = Headers.from_scope( - HTTPResponseStartEvent(type="http.response.start", status=200, headers=[(b"foo", b"bar"), (b"buzz", b"bup")]) - ) +def test_headers_from_scope(create_scope: "Callable[..., Scope]") -> None: + headers = Headers.from_scope(create_scope(headers=[(b"foo", b"bar"), (b"buzz", b"bup")])) assert headers["foo"] == "bar" assert headers["buzz"] == "bup" diff --git a/tests/unit/test_deprecations.py b/tests/unit/test_deprecations.py index 15aa9a897a..9103e22042 100644 --- a/tests/unit/test_deprecations.py +++ b/tests/unit/test_deprecations.py @@ -115,3 +115,26 @@ def test_litestar_templates_template_context_deprecation() -> None: def test_minijinja_from_state_deprecation() -> None: with pytest.warns(DeprecationWarning): from litestar.contrib.minijinja import minijinja_from_state # noqa: F401 + + +def test_constants_deprecations() -> None: + with pytest.warns(DeprecationWarning): + from litestar.constants import SCOPE_STATE_NAMESPACE # noqa: F401 + + +def test_utils_deprecations() -> None: + with pytest.warns(DeprecationWarning): + from litestar.utils import ( # noqa: F401 + delete_litestar_scope_state, + get_litestar_scope_state, + set_litestar_scope_state, + ) + + +def test_utils_scope_deprecations() -> None: + with pytest.warns(DeprecationWarning): + from litestar.utils.scope import ( # noqa: F401 + delete_litestar_scope_state, + get_litestar_scope_state, + set_litestar_scope_state, + ) diff --git a/tests/unit/test_template/test_csrf_token.py b/tests/unit/test_template/test_csrf_token.py index da51dc36b7..f905c428a3 100644 --- a/tests/unit/test_template/test_csrf_token.py +++ b/tests/unit/test_template/test_csrf_token.py @@ -6,7 +6,6 @@ from litestar import MediaType, get from litestar.config.csrf import CSRFConfig -from litestar.constants import SCOPE_STATE_CSRF_TOKEN_KEY from litestar.contrib.jinja import JinjaTemplateEngine from litestar.contrib.mako import MakoTemplateEngine from litestar.contrib.minijinja import MiniJinjaTemplateEngine @@ -15,7 +14,8 @@ from litestar.template.config import TemplateConfig from litestar.testing import create_test_client from litestar.types import Scope -from litestar.utils import get_litestar_scope_state +from litestar.utils.empty import value_or_default +from litestar.utils.scope.state import ScopeState @pytest.mark.parametrize( @@ -61,7 +61,8 @@ def test_csrf_input(engine: Any, template: str, tmp_path: Path) -> None: @get(path="/", media_type=MediaType.HTML) def handler(scope: Scope) -> Template: - token["value"] = get_litestar_scope_state(scope, SCOPE_STATE_CSRF_TOKEN_KEY) + connection_state = ScopeState.from_scope(scope) + token["value"] = value_or_default(connection_state.csrf_token, "") return Template(template_name="abc.html") csrf_config = CSRFConfig(secret="yaba daba do") diff --git a/tests/unit/test_testing/test_request_factory.py b/tests/unit/test_testing/test_request_factory.py index 0171e2f91a..7869e83b01 100644 --- a/tests/unit/test_testing/test_request_factory.py +++ b/tests/unit/test_testing/test_request_factory.py @@ -119,7 +119,7 @@ def test_request_factory_create_with_default_params() -> None: assert isinstance(request.app, Litestar) assert request.url == request.base_url == _DEFAULT_REQUEST_FACTORY_URL assert request.method == HttpMethod.GET - assert request.state.keys() == {"__litestar__"} + assert request.state.keys() == {"_ls_connection_state"} assert not request.query_params assert not request.path_params assert request.route_handler diff --git a/tests/unit/test_utils/test_scope.py b/tests/unit/test_utils/test_scope.py index 7225434aea..1e4d215e48 100644 --- a/tests/unit/test_utils/test_scope.py +++ b/tests/unit/test_utils/test_scope.py @@ -1,44 +1,72 @@ -from typing import TYPE_CHECKING +from __future__ import annotations + +from typing import TYPE_CHECKING, Callable import pytest -from litestar.constants import SCOPE_STATE_NAMESPACE +from litestar.types.empty import Empty from litestar.utils import ( + delete_litestar_scope_state, get_litestar_scope_state, set_litestar_scope_state, ) +from litestar.utils.scope.state import ScopeState if TYPE_CHECKING: - from litestar.types.asgi_types import HTTPScope + from litestar.types.asgi_types import Scope @pytest.fixture() -def scope() -> "HTTPScope": - return {"state": {}} # type:ignore[typeddict-item] - - -def test_get_litestar_scope_state_without_default_does_not_set_key_in_scope_state(scope: "HTTPScope") -> None: - get_litestar_scope_state(scope, "key") - assert SCOPE_STATE_NAMESPACE in scope["state"] - assert "key" not in scope["state"][SCOPE_STATE_NAMESPACE] - +def scope(create_scope: Callable[..., Scope]) -> Scope: + return create_scope() -def test_get_litestar_scope_state_with_default_does_not_set_key_in_scope_state(scope: "HTTPScope") -> None: - value = get_litestar_scope_state(scope, "key", "value") - assert SCOPE_STATE_NAMESPACE in scope["state"] - assert value == "value" - assert "key" not in scope["state"][SCOPE_STATE_NAMESPACE] - -def test_get_litestar_scope_state_removes_value_from_state(scope: "HTTPScope") -> None: +@pytest.mark.parametrize(("pop",), [(True,), (False,)]) +def test_get_litestar_scope_state_arbitrary_value(pop: bool, scope: Scope) -> None: key = "test" value = {"key": "value"} - scope["state"][SCOPE_STATE_NAMESPACE] = {key: value} - retrieved_value = get_litestar_scope_state(scope, key, pop=True) + connection_state = ScopeState.from_scope(scope) + connection_state._compat_ns[key] = value + retrieved_value = get_litestar_scope_state(scope, key, pop=pop) assert retrieved_value == value - assert key not in scope["state"][SCOPE_STATE_NAMESPACE] + if pop: + assert connection_state._compat_ns.get(key) is None + else: + assert connection_state._compat_ns.get(key) == value + + +@pytest.mark.parametrize(("pop",), [(True,), (False,)]) +def test_get_litestar_scope_state_defined_value(pop: bool, scope: Scope) -> None: + connection_state = ScopeState.from_scope(scope) + connection_state.is_cached = True + assert get_litestar_scope_state(scope, "is_cached", pop=pop) is True + if pop: + assert connection_state.is_cached is Empty # type: ignore[comparison-overlap] + else: + assert connection_state.is_cached is True -def test_set_litestar_scope_state(scope: "HTTPScope") -> None: +def test_set_litestar_scope_state_arbitrary_value(scope: Scope) -> None: + connection_state = ScopeState.from_scope(scope) set_litestar_scope_state(scope, "key", "value") - assert scope["state"][SCOPE_STATE_NAMESPACE]["key"] == "value" + assert connection_state._compat_ns["key"] == "value" + + +def test_set_litestar_scope_state_defined_value(scope: Scope) -> None: + connection_state = ScopeState.from_scope(scope) + set_litestar_scope_state(scope, "is_cached", True) + assert connection_state.is_cached is True + + +def test_delete_litestar_scope_state_arbitrary_value(scope: Scope) -> None: + connection_state = ScopeState.from_scope(scope) + connection_state._compat_ns["key"] = "value" + delete_litestar_scope_state(scope, "key") + assert "key" not in connection_state._compat_ns + + +def test_delete_litestar_scope_state_defined_value(scope: Scope) -> None: + connection_state = ScopeState.from_scope(scope) + connection_state.is_cached = True + delete_litestar_scope_state(scope, "is_cached") + assert connection_state.is_cached is Empty # type: ignore[comparison-overlap] From ce0d971f1646f8c7912b8a3c0583b7b872a042d4 Mon Sep 17 00:00:00 2001 From: Peter Schutt Date: Mon, 27 Nov 2023 22:39:26 +1000 Subject: [PATCH 20/45] fix: dto handled objects nested in mappings (#2775) fix: dto return mapping This PR resolves an issue where we failed to properly reconstruct the annotation with the backend transfer model type for mapping annotations. It also solves a subsequent issue for transferring data to/from mappings. Closes #2737 --- litestar/dto/_backend.py | 63 ++++++++++++------- litestar/dto/_codegen_backend.py | 13 ++-- .../test_dto/test_factory/test_integration.py | 18 ++++++ tests/unit/test_openapi/conftest.py | 4 +- 4 files changed, 71 insertions(+), 27 deletions(-) diff --git a/litestar/dto/_backend.py b/litestar/dto/_backend.py index e1a2622f79..aae962f56e 100644 --- a/litestar/dto/_backend.py +++ b/litestar/dto/_backend.py @@ -18,7 +18,6 @@ ) from msgspec import UNSET, Struct, UnsetType, convert, defstruct, field -from typing_extensions import get_origin from litestar.dto._types import ( CollectionType, @@ -38,7 +37,6 @@ from litestar.types import Empty from litestar.typing import FieldDefinition from litestar.utils import unique_name_for_scope -from litestar.utils.typing import safe_generic_origin_map if TYPE_CHECKING: from litestar.connection import ASGIConnection @@ -119,11 +117,9 @@ def __init__( self.override_serialization_name: bool = False if field_definition.is_subclass_of(DTOData): self.dto_data_type = field_definition.annotation - annotation = self.field_definition.inner_types[0].annotation - else: - annotation = field_definition.annotation + field_definition = self.field_definition.inner_types[0] - self.annotation = _maybe_wrap_in_generic_annotation(annotation, self.transfer_model_type) + self.annotation = build_annotation_for_backend(model_type, field_definition, self.transfer_model_type) def parse_model( self, @@ -605,17 +601,32 @@ def _transfer_data( Returns: Data parsed into ``destination_type``. """ - if field_definition.is_non_string_collection and not field_definition.is_mapping: + if field_definition.is_non_string_collection: + if not field_definition.is_mapping: + return field_definition.instantiable_origin( + _transfer_data( + destination_type=destination_type, + source_data=item, + field_definitions=field_definitions, + field_definition=field_definition.inner_types[0], + is_data_field=is_data_field, + override_serialization_name=override_serialization_name, + ) + for item in source_data + ) return field_definition.instantiable_origin( - _transfer_data( - destination_type=destination_type, - source_data=item, - field_definitions=field_definitions, - field_definition=field_definition.inner_types[0], - is_data_field=is_data_field, - override_serialization_name=override_serialization_name, + ( + key, + _transfer_data( + destination_type=destination_type, + source_data=value, + field_definitions=field_definitions, + field_definition=field_definition.inner_types[1], + is_data_field=is_data_field, + override_serialization_name=override_serialization_name, + ), ) - for item in source_data + for key, value in source_data.items() # type: ignore[union-attr] ) return _transfer_instance_data( @@ -796,20 +807,30 @@ def _create_struct_for_field_definitions( return defstruct(model_name, struct_fields, frozen=True, kw_only=True) -def _maybe_wrap_in_generic_annotation(annotation: Any, model: Any) -> Any: +def build_annotation_for_backend( + model_type: type[Any], field_definition: FieldDefinition, transfer_model: type[Struct] +) -> Any: """A helper to re-build a generic outer type with new inner type. Args: - annotation: The original annotation on the handler signature - model: The data container type + model_type: The original model type. + field_definition: The parsed type that represents the handler annotation for which the DTO is being applied. + transfer_model: The transfer model generated to represent the model type. Returns: Annotation with new inner type if applicable. """ - if (origin := get_origin(annotation)) and origin in safe_generic_origin_map: - return safe_generic_origin_map[origin][model] # type: ignore[index] + if not field_definition.inner_types: + if field_definition.is_subclass_of(model_type): + return transfer_model + return field_definition.annotation + + inner_types = tuple( + build_annotation_for_backend(model_type, inner_type, transfer_model) + for inner_type in field_definition.inner_types + ) - return origin[model] if (origin := get_origin(annotation)) else model + return field_definition.safe_generic_origin[inner_types] def _should_mark_private(field_definition: DTOFieldDefinition, underscore_fields_private: bool) -> bool: diff --git a/litestar/dto/_codegen_backend.py b/litestar/dto/_codegen_backend.py index 9808920322..897f760c4c 100644 --- a/litestar/dto/_codegen_backend.py +++ b/litestar/dto/_codegen_backend.py @@ -349,7 +349,7 @@ def create_transfer_data( override_serialization_name: bool, field_definition: FieldDefinition | None = None, ) -> Callable[[Any], Any]: - if field_definition and field_definition.is_non_string_collection and not field_definition.is_mapping: + if field_definition and field_definition.is_non_string_collection: factory = cls( is_data_field=is_data_field, override_serialization_name=override_serialization_name, @@ -390,9 +390,14 @@ def _create_transfer_data_body_nested( override_serialization_name=self.override_serialization_name, ) transfer_func_name = self._add_to_fn_globals("transfer_data", transfer_func) - self._add_stmt( - f"{assignment_target} = {origin_name}({transfer_func_name}(item) for item in {source_data_name})" - ) + if field_definition.is_mapping: + self._add_stmt( + f"{assignment_target} = {origin_name}((key, {transfer_func_name}(item)) for key, item in {source_data_name}.items())" + ) + else: + self._add_stmt( + f"{assignment_target} = {origin_name}({transfer_func_name}(item) for item in {source_data_name})" + ) def _create_transfer_instance_data( self, diff --git a/tests/unit/test_dto/test_factory/test_integration.py b/tests/unit/test_dto/test_factory/test_integration.py index f1d7c29e98..faa2d38a0a 100644 --- a/tests/unit/test_dto/test_factory/test_integration.py +++ b/tests/unit/test_dto/test_factory/test_integration.py @@ -805,3 +805,21 @@ def handler(data: Superuser) -> Superuser: headers={"Content-Type": "application/json; charset=utf-8"}, ) assert msgspec.json.decode(received.content, type=Superuser) == data + + +def test_dto_returning_mapping(use_experimental_dto_backend: bool) -> None: + @dataclass + class Lexeme: + id: int + name: str + + class LexemeDTO(DataclassDTO[Lexeme]): + config = DTOConfig(exclude={"id"}, experimental_codegen_backend=use_experimental_dto_backend) + + @get(return_dto=LexemeDTO, signature_types=[Lexeme]) + async def get_definition() -> Dict[str, Lexeme]: + return {"hello": Lexeme(id=1, name="hello"), "world": Lexeme(id=2, name="world")} + + with create_test_client(route_handlers=[get_definition]) as client: + response = client.get("/") + assert response.json() == {"hello": {"name": "hello"}, "world": {"name": "world"}} diff --git a/tests/unit/test_openapi/conftest.py b/tests/unit/test_openapi/conftest.py index 101a70a35c..966dd0bcb0 100644 --- a/tests/unit/test_openapi/conftest.py +++ b/tests/unit/test_openapi/conftest.py @@ -61,7 +61,7 @@ def create_person( @post(path="/bulk", dto=PartialDataclassPersonDTO) def bulk_create_person( - self, data: List[DTOData[DataclassPerson]], secret_header: str = Parameter(header="secret") + self, data: DTOData[List[DataclassPerson]], secret_header: str = Parameter(header="secret") ) -> List[DataclassPerson]: return [] @@ -73,7 +73,7 @@ def bulk_update_person( @patch(path="/bulk", dto=PartialDataclassPersonDTO) def bulk_partial_update_person( - self, data: List[DTOData[DataclassPerson]], secret_header: str = Parameter(header="secret") + self, data: DTOData[List[DataclassPerson]], secret_header: str = Parameter(header="secret") ) -> List[DataclassPerson]: return [] From 074fc9c3c04702432f6f9954c3601cba37818523 Mon Sep 17 00:00:00 2001 From: Peter Schutt Date: Mon, 27 Nov 2023 23:37:00 +1000 Subject: [PATCH 21/45] fix: type narrowing in csrf middleware (#2777) --- litestar/middleware/csrf.py | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/litestar/middleware/csrf.py b/litestar/middleware/csrf.py index 58c020960f..94dd422d57 100644 --- a/litestar/middleware/csrf.py +++ b/litestar/middleware/csrf.py @@ -119,9 +119,12 @@ async def __call__(self, scope: Scope, receive: Receive, send: Send) -> None: ): token = connection_state.csrf_token = csrf_cookie or generate_csrf_token(secret=self.config.secret) await self.app(scope, receive, self.create_send_wrapper(send=send, csrf_cookie=csrf_cookie, token=token)) - elif self._csrf_tokens_match(existing_csrf_token, csrf_cookie): - # we haven't properly narrowed the type of `existing_csrf_token` to be non-None, but we know it is - connection_state.csrf_token = existing_csrf_token # type: ignore[assignment] + elif ( + existing_csrf_token is not None + and csrf_cookie is not None + and self._csrf_tokens_match(existing_csrf_token, csrf_cookie) + ): + connection_state.csrf_token = existing_csrf_token await self.app(scope, receive, send) else: raise PermissionDeniedException("CSRF token verification failed") @@ -177,11 +180,8 @@ def _decode_csrf_token(self, token: str) -> str | None: expected_hash = generate_csrf_hash(token=token_secret, secret=self.config.secret) return token_secret if compare_digest(existing_hash, expected_hash) else None - def _csrf_tokens_match(self, request_csrf_token: str | None, cookie_csrf_token: str | None) -> bool: + def _csrf_tokens_match(self, request_csrf_token: str, cookie_csrf_token: str) -> bool: """Take the CSRF tokens from the request and the cookie and verify both are valid and identical.""" - if not (request_csrf_token and cookie_csrf_token): - return False - decoded_request_token = self._decode_csrf_token(request_csrf_token) decoded_cookie_token = self._decode_csrf_token(cookie_csrf_token) if decoded_request_token is None or decoded_cookie_token is None: From 18d5a6bfd15c31ff223a21f26feace27969c7415 Mon Sep 17 00:00:00 2001 From: Peter Schutt Date: Mon, 27 Nov 2023 23:42:47 +1000 Subject: [PATCH 22/45] fix: consistent sequence union parameter errors (#2776) --- litestar/_signature/model.py | 5 +++-- tests/unit/test_signature/test_parsing.py | 27 +++++++++++++++++++++++ 2 files changed, 30 insertions(+), 2 deletions(-) diff --git a/litestar/_signature/model.py b/litestar/_signature/model.py index 94b4c6742f..3ce04d36eb 100644 --- a/litestar/_signature/model.py +++ b/litestar/_signature/model.py @@ -291,9 +291,10 @@ def _create_annotation( type_decoders=type_decoders, meta_data=meta_data, ) - for inner_type in (t for t in field_definition.inner_types if t.annotation is not type(None)) + for inner_type in field_definition.inner_types + if not inner_type.is_none_type ] - return Optional[types[0]] if field_definition.is_optional else Union[tuple(types)] # pyright: ignore + return Optional[Union[tuple(types)]] if field_definition.is_optional else Union[tuple(types)] # pyright: ignore if decoder := _get_decoder_for_type(annotation, type_decoders=type_decoders): # FIXME: temporary (hopefully) hack, see: https://github.com/jcrist/msgspec/issues/497 diff --git a/tests/unit/test_signature/test_parsing.py b/tests/unit/test_signature/test_parsing.py index 25d47e58a7..dff80dff19 100644 --- a/tests/unit/test_signature/test_parsing.py +++ b/tests/unit/test_signature/test_parsing.py @@ -154,3 +154,30 @@ def handler(param: Annotated[Union[str, List[str]], Body(max_length=3, max_items assert response.status_code == 200 mock.assert_called_once_with("foo") + + +@pytest.mark.parametrize(("with_optional",), [(True,), (False,)]) +def test_collection_union_struct_fields(with_optional: bool) -> None: + """Test consistent behavior between optional and non-optional collection unions. + + Issue: https://github.com/litestar-org/litestar/issues/2600 identified that where a union + of collection types was optional, it would result in a 400 error when the handler was called, + whereas a non-optional union would result in a 500 error. + + This test ensures that both optional and non-optional unions of collection types result in + the same error. + """ + + annotation = Union[List[str], List[int]] + + if with_optional: + annotation = Optional[annotation] # type: ignore[misc] + + @get("/", signature_namespace={"annotation": annotation}) + def handler(param: annotation) -> None: # pyright: ignore + return None + + with create_test_client([handler]) as client: + response = client.get("/?param=foo¶m=bar¶m=123") + assert response.status_code == 500 + assert "TypeError: Type unions may not contain more than one array-like" in response.text From b43830aad7bf41c09e290030f3fcd0b211e941ce Mon Sep 17 00:00:00 2001 From: Pragy Agarwal Date: Mon, 27 Nov 2023 21:49:05 +0530 Subject: [PATCH 23/45] docs: Explain how to work around the limitation of reserved keyword arguments for route handlers (#2767) * Explain how to work around the limitation of reserved keyword arguments for route handlers * Change doc to ref * Remove admonition & move it before the example --- docs/usage/routing/handlers.rst | 2 ++ 1 file changed, 2 insertions(+) diff --git a/docs/usage/routing/handlers.rst b/docs/usage/routing/handlers.rst index 198f1a2614..b99282641f 100644 --- a/docs/usage/routing/handlers.rst +++ b/docs/usage/routing/handlers.rst @@ -106,6 +106,8 @@ Additionally, you can specify the following special kwargs, what's called "reser * ``state`` : injects a copy of the application :class:`State <.datastructures.state.State>`. * ``body`` : the raw request body. Available only for `http route handlers`_ +Note that if your parameters collide with any of the reserved keyword arguments above, you can :ref:`provide an alternative name `. + For example: .. code-block:: python From cbc60d271c929420a9d753ff4b076dc2f1aec73c Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Janek=20Nouvertn=C3=A9?= <25355197+provinzkraut@users.noreply.github.com> Date: Mon, 27 Nov 2023 20:15:38 +0100 Subject: [PATCH 24/45] v2.4.0 MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Signed-off-by: Janek Nouvertné <25355197+provinzkraut@users.noreply.github.com> --- docs/release-notes/changelog.rst | 191 ++++++++++++++++++++++++++++++- pyproject.toml | 2 +- 2 files changed, 191 insertions(+), 2 deletions(-) diff --git a/docs/release-notes/changelog.rst b/docs/release-notes/changelog.rst index 3997fff18d..c4b7e528e3 100644 --- a/docs/release-notes/changelog.rst +++ b/docs/release-notes/changelog.rst @@ -3,8 +3,197 @@ 2.x Changelog ============= +.. changelog:: 2.4.0 + :date: 2023/11/27 + + .. change:: Fix ``HTTPException`` handling during concurrent dependency resolving + :type: bugfix + :pr: 2596 + :issue: 2594 + + An issue was fixed that would lead to :exc:`HTTPExceptions` not being re-raised + properly when they occurred within the resolution of nested dependencies during + the request lifecycle. + + .. change:: Fix OpenAPI examples format + :type: bugfix + :pr: 2660 + :issue: 2272 + + Fix the OpenAPI examples format by removing the wrapping object. + + Before the change, for a given model + + .. code-block:: python + + @dataclass + class Foo: + foo: int + + The following example would be generated: + + .. code-block:: json + + { + "description": "Example value", + "value": { + "foo": 7906 + } + } + + After the fix, this is now: + + .. code-block:: json + + { + "foo": 7906 + } + + .. change:: Fix CLI plugin commands not showing up in command list + :type: bugfix + :pr: 2441 + + Fix a bug where commands registered by CLI plugins were available, but would not + show up in the commands list + + .. change:: Fix missing ``write-only`` mark in ``dto_field()`` signature + :type: bugfix + :pr: 2684 + + Fix the missing ``write-only`` string literal in the ``mark`` parameter of + :func:`~litestar.dto.field.dto_field` + + .. change:: Fix OpenAPI schemas incorrectly flagged as duplicates + :type: bugfix + :pr: 2475 + :issue: 2471 + + Fix an issue that would lead to OpenAPI schemas being incorrectly considered + duplicates, resulting in an :exc:`ImproperlyConfiguredException` being raised. + + .. change:: Fix Pydantic URL type support in OpenAPI and serialization + :type: bugfix + :pr: 2701 + :issue: 2664 + + Add missing support for Pydantic's URL types (``AnyUrl`` and its descendants) + for both serialization and OpenAPI schema generation. These types were only + partially supported previously; Serialization support was lacking for v1 and v2, + and OpenAPI support was missing for v2. + + .. change:: Fix incorrect ``ValidationException`` message when multiple errors were encountered + :type: bugfix + :pr: 2716 + :issue: 2714 + + Fix a bug where :exc:`ValidationException` could contain duplicated messages in + ``extra`` field, when multiple errors were encountered during validation + + .. change:: Fix DTO renaming renames all fields of the same name in nested DTOs + :type: bugfix + :pr: 2764 + :issue: 2721 + + Fix an issue with nested field renaming in DTOs that would lead to all fields + with a given name to be renamed in a nested structure. + + In the below example, both ``Foo.id`` and ``Bar.id`` would have been renamed to + ``foo_id`` + + .. code-block:: python + + from dataclasses import dataclass + + + @dataclass + class Bar: + id: str + + + @dataclass + class Foo: + id: str + bar: Bar + + + FooDTO = DataclassDTO[Annotated[Foo, DTOConfig(rename_fields={"id": "foo_id"})]] + + .. change:: Fix handling of DTO objects nested in mappings + :type: bugfix + :pr: 2775 + :issue: 2737 + + Fix a bug where DTOs nested in a :class:`~typing.Mapping` type would fail to + serialize correctly. + + .. change:: Fix inconsistent sequence union parameter errors + :type: bugfix + :pr: 2776 + :issue: 2600 + + Fix a bug where unions of collection types would result in different errors + depending on whether the union included :obj:`None` or not. + + .. change:: Fix graceful handling of WebSocket disconnect in channels WebSockets handlers + :type: bugfix + :pr: 2691 + + Fix the behaviour of WebSocket disconnect handling within the WebSocket handlers + provided by :doc:`channels `, that would sometimes lead to + a ``RuntimeError: Unexpected ASGI message 'websocket.close', after sending 'websocket.close'.`` + exception being raised upon the closing of a WebSocket connection. + + + .. change:: Add ``server_lifespan`` hook + :type: feature + :pr: 2658 + + A new ``server_lifespan`` hook is now available on :class:`~litestar.app.Litestar`. + This hook works similar to the regular ``lifespan`` context manager, with the + difference being is that it is only called once for the entire server lifespan, + not for each application startup phase. Note that these only differ when running + with an ASGI server that's using multiple worker processes. + + .. change:: Allow rendering templates directly from strings + :type: feature + :pr: 2689 + :issue: 2687 + + A new ``template_string`` parameter was added to :class:`~litestar.template.Template`, + allowing to render templates directly from strings. + + .. seealso:: + :ref:`usage/templating:Template Files vs. Strings` + + .. change:: Support nested DTO field renaming + :type: feature + :pr: 2764 + :issue: 2721 + + Using similar semantics as for exclusion/inclusion, nested DTO fields can now + also be renamed: + + .. code-block:: python + + from dataclasses import dataclass + + + @dataclass + class Bar: + id: str + + + @dataclass + class Foo: + id: str + bars: list[Bar] + + + FooDTO = DataclassDTO[Annotated[Foo, DTOConfig(rename_fields={"bars.0.id": "bar_id"})]] + + .. changelog:: 2.3.2 - :date: 2023/11706 + :date: 2023/11/06 .. change:: Fix recursion error when re-using the path of a route handler for static files :type: bugfix diff --git a/pyproject.toml b/pyproject.toml index e12ab7a195..34effb1048 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -56,7 +56,7 @@ maintainers = [ name = "litestar" readme = "README.md" requires-python = ">=3.8,<4.0" -version = "2.3.2" +version = "2.4.0" [project.urls] Blog = "https://blog.litestar.dev" From d304baf61c7f0a695a6badf355563a7a553fb01d Mon Sep 17 00:00:00 2001 From: Peter Schutt Date: Tue, 28 Nov 2023 19:16:30 +1000 Subject: [PATCH 25/45] fix: remove imports from `contrib.jwt` from `security.jwt`. (#2784) `contrib.jwt` imports from ` security.jwt` which was importing from `contrib.jwt`, and so on. This PR removes the `contrib` imports from the main lib. Closes #2782 --- litestar/security/jwt/auth.py | 4 ++-- litestar/security/jwt/middleware.py | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/litestar/security/jwt/auth.py b/litestar/security/jwt/auth.py index 5dc325ac73..a23ec92347 100644 --- a/litestar/security/jwt/auth.py +++ b/litestar/security/jwt/auth.py @@ -4,13 +4,13 @@ from datetime import datetime, timedelta, timezone from typing import TYPE_CHECKING, Any, Callable, Generic, Iterable, Literal, Sequence, TypeVar, cast -from litestar.contrib.jwt.jwt_token import Token -from litestar.contrib.jwt.middleware import JWTAuthenticationMiddleware, JWTCookieAuthenticationMiddleware from litestar.datastructures import Cookie from litestar.enums import MediaType from litestar.middleware import DefineMiddleware from litestar.openapi.spec import Components, OAuthFlow, OAuthFlows, SecurityRequirement, SecurityScheme from litestar.security.base import AbstractSecurityConfig +from litestar.security.jwt.middleware import JWTAuthenticationMiddleware, JWTCookieAuthenticationMiddleware +from litestar.security.jwt.token import Token from litestar.status_codes import HTTP_201_CREATED from litestar.types import ControllerRouterHandler, Empty, Guard, Method, Scopes, SyncOrAsyncUnion, TypeEncodersMap diff --git a/litestar/security/jwt/middleware.py b/litestar/security/jwt/middleware.py index 960f8ab866..eac501415d 100644 --- a/litestar/security/jwt/middleware.py +++ b/litestar/security/jwt/middleware.py @@ -2,12 +2,12 @@ from typing import TYPE_CHECKING, Awaitable, Callable, Sequence -from litestar.contrib.jwt.jwt_token import Token from litestar.exceptions import NotAuthorizedException from litestar.middleware.authentication import ( AbstractAuthenticationMiddleware, AuthenticationResult, ) +from litestar.security.jwt.token import Token __all__ = ("JWTAuthenticationMiddleware", "JWTCookieAuthenticationMiddleware") From 2612798456dada20c15ae3c0d5676a62195abe7a Mon Sep 17 00:00:00 2001 From: Peter Schutt Date: Tue, 28 Nov 2023 19:23:09 +1000 Subject: [PATCH 26/45] fix: raise config error when yield dependencies are cached (#2780) * fix: raise config error when yield dependencies are cached This PR makes it a configuration error to attempt to `use_cache=True` with generator dependencies. Prior to this implementation, this would result in a 500 error at runtime. The reason for making this a config error is that after the first use of the generator the cleanup code executes and so subsequent use of the generated value is dubious. Closes #2771 * fix: raise config error when yield dependencies are cached (Sourcery refactored) (#2781) 'Refactored by Sourcery' Co-authored-by: Sourcery AI <> --------- Co-authored-by: sourcery-ai[bot] <58596630+sourcery-ai[bot]@users.noreply.github.com> --- litestar/_kwargs/dependencies.py | 5 ++--- litestar/di.py | 36 ++++++++++++++++++++++---------- litestar/utils/__init__.py | 7 +++---- litestar/utils/predicates.py | 20 ++++++++++++++++-- tests/unit/test_deprecations.py | 8 +++++++ tests/unit/test_di.py | 12 +++++++++++ 6 files changed, 68 insertions(+), 20 deletions(-) diff --git a/litestar/_kwargs/dependencies.py b/litestar/_kwargs/dependencies.py index 486207a05a..88ffb07b1e 100644 --- a/litestar/_kwargs/dependencies.py +++ b/litestar/_kwargs/dependencies.py @@ -1,6 +1,5 @@ from __future__ import annotations -from inspect import isasyncgen, isgenerator from typing import TYPE_CHECKING, Any from litestar.utils.compat import async_next @@ -65,10 +64,10 @@ async def resolve_dependency( ) value = await dependency.provide(**dependency_kwargs) - if isgenerator(value): + if dependency.provide.has_sync_generator_dependency: cleanup_group.add(value) value = next(value) - elif isasyncgen(value): + elif dependency.provide.has_async_generator_dependency: cleanup_group.add(value) value = await async_next(value) diff --git a/litestar/di.py b/litestar/di.py index 9f4292d940..2d8227d496 100644 --- a/litestar/di.py +++ b/litestar/di.py @@ -1,25 +1,24 @@ from __future__ import annotations -from inspect import isclass +from inspect import isasyncgenfunction, isclass, isgeneratorfunction from typing import TYPE_CHECKING, Any from litestar.exceptions import ImproperlyConfiguredException from litestar.types import Empty from litestar.utils import ensure_async_callable -from litestar.utils.predicates import is_async_callable, is_sync_or_async_generator +from litestar.utils.predicates import is_async_callable from litestar.utils.warnings import ( warn_implicit_sync_to_thread, warn_sync_to_thread_with_async_callable, warn_sync_to_thread_with_generator, ) -__all__ = ("Provide",) - - if TYPE_CHECKING: from litestar._signature import SignatureModel from litestar.types import AnyCallable +__all__ = ("Provide",) + class Provide: """Wrapper class for dependency injection""" @@ -27,7 +26,8 @@ class Provide: __slots__ = ( "dependency", "has_sync_callable", - "has_class_dependency", + "has_sync_generator_dependency", + "has_async_generator_dependency", "signature_model", "sync_to_thread", "use_cache", @@ -39,7 +39,7 @@ class Provide: def __init__( self, - dependency: AnyCallable | type, + dependency: AnyCallable | type[Any], use_cache: bool = False, sync_to_thread: bool | None = None, ) -> None: @@ -53,14 +53,28 @@ def __init__( if not callable(dependency): raise ImproperlyConfiguredException("Provider dependency must a callable value") - has_sync_callable = isclass(dependency) or not is_async_callable(dependency) + is_class_dependency = isclass(dependency) + self.has_sync_generator_dependency = isgeneratorfunction( + dependency if not is_class_dependency else dependency.__call__ # type: ignore[operator] + ) + self.has_async_generator_dependency = isasyncgenfunction( + dependency if not is_class_dependency else dependency.__call__ # type: ignore[operator] + ) + has_generator_dependency = self.has_sync_generator_dependency or self.has_async_generator_dependency + + if has_generator_dependency and use_cache: + raise ImproperlyConfiguredException( + "Cannot cache generator dependency, consider using Lifespan Context instead." + ) + + has_sync_callable = is_class_dependency or not is_async_callable(dependency) if sync_to_thread is not None: - if is_sync_or_async_generator(dependency): - warn_sync_to_thread_with_generator(dependency, stacklevel=3) + if has_generator_dependency: + warn_sync_to_thread_with_generator(dependency, stacklevel=3) # type: ignore[arg-type] elif not has_sync_callable: warn_sync_to_thread_with_async_callable(dependency, stacklevel=3) # pyright: ignore - elif has_sync_callable and not is_sync_or_async_generator(dependency): + elif has_sync_callable and not has_generator_dependency: warn_implicit_sync_to_thread(dependency, stacklevel=3) if sync_to_thread and has_sync_callable: diff --git a/litestar/utils/__init__.py b/litestar/utils/__init__.py index d0901a2d12..3f627922ed 100644 --- a/litestar/utils/__init__.py +++ b/litestar/utils/__init__.py @@ -5,6 +5,7 @@ from .helpers import get_enum_string_value, get_name, unique_name_for_scope, url_quote from .path import join_paths, normalize_path from .predicates import ( + _is_sync_or_async_generator, is_annotated_type, is_any, is_async_callable, @@ -18,7 +19,6 @@ is_non_string_iterable, is_non_string_sequence, is_optional_union, - is_sync_or_async_generator, is_undefined_sentinel, is_union, ) @@ -54,7 +54,6 @@ "is_non_string_iterable", "is_non_string_sequence", "is_optional_union", - "is_sync_or_async_generator", "is_undefined_sentinel", "is_union", "join_paths", @@ -70,6 +69,7 @@ "get_litestar_scope_state": _get_litestar_scope_state, "set_litestar_scope_state": _set_litestar_scope_state, "delete_litestar_scope_state": _delete_litestar_scope_state, + "is_sync_or_async_generator": _is_sync_or_async_generator, } @@ -80,8 +80,7 @@ def __getattr__(name: str) -> Any: version="2.4", kind="import", removal_in="3.0", - info=f"'litestar.utils.{name}' is deprecated. The Litestar scope state is private and should not be used." - "Plugin authors should maintain their own scope state namespace.", + info=f"'litestar.utils.{name}' is deprecated.", ) return globals()["_deprecated_names"][name] raise AttributeError(f"module {__name__!r} has no attribute {name!r}") # pragma: no cover diff --git a/litestar/utils/predicates.py b/litestar/utils/predicates.py index cf3f8962bb..b2c4fb5d72 100644 --- a/litestar/utils/predicates.py +++ b/litestar/utils/predicates.py @@ -36,6 +36,7 @@ from litestar.constants import UNDEFINED_SENTINELS from litestar.types import Empty from litestar.types.builtin_types import NoneType, UnionTypes +from litestar.utils.deprecation import warn_deprecation from litestar.utils.helpers import unwrap_partial from litestar.utils.typing import get_origin_or_inner_type @@ -62,7 +63,6 @@ "is_non_string_iterable", "is_non_string_sequence", "is_optional_union", - "is_sync_or_async_generator", "is_undefined_sentinel", "is_union", ) @@ -287,7 +287,7 @@ def is_class_var(annotation: Any) -> bool: return annotation is ClassVar -def is_sync_or_async_generator(obj: Any) -> TypeGuard[AnyGenerator]: +def _is_sync_or_async_generator(obj: Any) -> TypeGuard[AnyGenerator]: """Check if the given annotation is a sync or async generator. Args: @@ -321,3 +321,19 @@ def is_undefined_sentinel(value: Any) -> bool: A boolean. """ return any(v is value for v in UNDEFINED_SENTINELS) + + +_deprecated_names = {"is_sync_or_async_generator": _is_sync_or_async_generator} + + +def __getattr__(name: str) -> Any: + if name in _deprecated_names: + warn_deprecation( + deprecated_name=f"litestar.utils.scope.{name}", + version="2.4", + kind="import", + removal_in="3.0", + info=f"'litestar.utils.predicates.{name}' is deprecated.", + ) + return globals()["_deprecated_names"][name] + raise AttributeError(f"module {__name__!r} has no attribute {name!r}") # pragma: no cover diff --git a/tests/unit/test_deprecations.py b/tests/unit/test_deprecations.py index 9103e22042..ca3bb54779 100644 --- a/tests/unit/test_deprecations.py +++ b/tests/unit/test_deprecations.py @@ -138,3 +138,11 @@ def test_utils_scope_deprecations() -> None: get_litestar_scope_state, set_litestar_scope_state, ) + + +def test_is_sync_or_async_generator_deprecation() -> None: + with pytest.warns(DeprecationWarning): + from litestar.utils.predicates import is_sync_or_async_generator # noqa: F401 + + with pytest.warns(DeprecationWarning): + from litestar.utils import is_sync_or_async_generator as _ # noqa: F401 diff --git a/tests/unit/test_di.py b/tests/unit/test_di.py index c7e485098c..cdcccb8dcc 100644 --- a/tests/unit/test_di.py +++ b/tests/unit/test_di.py @@ -153,3 +153,15 @@ def test_dependency_has_async_callable(dep: Any, exp: bool) -> None: def test_raises_when_dependency_is_not_callable() -> None: with pytest.raises(ImproperlyConfiguredException): Provide(123) # type: ignore + + +@pytest.mark.parametrize( + ("dep",), + [ + (generator_func,), + (async_generator_func,), + ], +) +def test_raises_when_generator_dependency_is_cached(dep: Any) -> None: + with pytest.raises(ImproperlyConfiguredException): + Provide(dep, use_cache=True) From 2027019b5eba8c069241b789e6412acf74887baf Mon Sep 17 00:00:00 2001 From: guacs <126393040+guacs@users.noreply.github.com> Date: Tue, 28 Nov 2023 15:12:24 +0530 Subject: [PATCH 27/45] chore: bump version --- pyproject.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index 34effb1048..c99654c43b 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -56,7 +56,7 @@ maintainers = [ name = "litestar" readme = "README.md" requires-python = ">=3.8,<4.0" -version = "2.4.0" +version = "2.4.1" [project.urls] Blog = "https://blog.litestar.dev" From b54f2f3d433bf0f909114064121ca203a1942470 Mon Sep 17 00:00:00 2001 From: guacs <126393040+guacs@users.noreply.github.com> Date: Tue, 28 Nov 2023 16:12:57 +0530 Subject: [PATCH 28/45] chore: add release notes for v2.4.1 (#2786) chore: add release notes --- docs/release-notes/changelog.rst | 20 ++++++++++++++++++++ 1 file changed, 20 insertions(+) diff --git a/docs/release-notes/changelog.rst b/docs/release-notes/changelog.rst index c4b7e528e3..64d169c08b 100644 --- a/docs/release-notes/changelog.rst +++ b/docs/release-notes/changelog.rst @@ -3,6 +3,26 @@ 2.x Changelog ============= +.. changelog:: 2.4.1 + :date: 2023/11/28 + + .. change:: Fix circular import when importing from ``litestar.security.jwt`` + :type: bugfix + :pr: 2784 + :issue: 2782 + + An :exc:`ImportError` was raised when trying to import from ``litestar.security.jwt``. This was fixed + by removing the imports from the deprecated ``litestar.contrib.jwt`` within ``litesetar.security.jwt``. + + .. change:: Raise config error when generator dependencies are cached + :type: bugfix + :pr: 2780 + :issue: 2771 + + Previously, an :exc:`InternalServerError` was raised when attempting to use + `use_cache=True` with generator dependencies. This will now raise a configuration + error during application startup. + .. changelog:: 2.4.0 :date: 2023/11/27 From cb8afc2fa2a209ed75e65eb02a4cc7bff4529c91 Mon Sep 17 00:00:00 2001 From: guacs <126393040+guacs@users.noreply.github.com> Date: Tue, 28 Nov 2023 16:53:35 +0530 Subject: [PATCH 29/45] ci: correct command to run pydantic v1 test (#2787) --- .github/workflows/publish.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/publish.yml b/.github/workflows/publish.yml index 577b15dd88..9f5f0665e3 100644 --- a/.github/workflows/publish.yml +++ b/.github/workflows/publish.yml @@ -70,7 +70,7 @@ jobs: run: echo "PYTHONPATH=$PWD" >> $GITHUB_ENV - name: Test - run: python -m unittest test_apps/pydantic_1_app.py + run: pdm run python -m unittest test_apps/pydantic_1_app.py publish-release: name: upload release to PyPI From 84710a1d657ccc95c9854eb6536eadd117642a6a Mon Sep 17 00:00:00 2001 From: Peter Schutt Date: Tue, 28 Nov 2023 23:15:47 +1000 Subject: [PATCH 30/45] fix: deduplication of openapi params (#2788) Previous behavior would consider two parameters with the same name but declared in different places (eg., header, cookie) as an error. This PR incorporates the "param_in" value when validating params for openapi spec so that it would only be an error to have multiple different parameters, of the same name declared in the same place. Closes #2662 --- litestar/_openapi/parameters.py | 16 +++++----- tests/unit/test_openapi/test_integration.py | 35 +++++++++++++++++++-- 2 files changed, 41 insertions(+), 10 deletions(-) diff --git a/litestar/_openapi/parameters.py b/litestar/_openapi/parameters.py index 60aef59882..08f7e9f54f 100644 --- a/litestar/_openapi/parameters.py +++ b/litestar/_openapi/parameters.py @@ -38,7 +38,7 @@ def __init__(self, route_handler: BaseRouteHandler) -> None: route_handler: Associated route handler """ self.route_handler = route_handler - self._parameters: dict[str, Parameter] = {} + self._parameters: dict[tuple[str, str], Parameter] = {} def add(self, parameter: Parameter) -> None: """Add a ``Parameter`` to the collection. @@ -50,18 +50,18 @@ def add(self, parameter: Parameter) -> None: ``ImproperlyConfiguredException``. """ - if parameter.name not in self._parameters: + if (parameter.name, parameter.param_in) not in self._parameters: # because we are defining routes as unique per path, we have to handle here a situation when there is an optional # path parameter. e.g. get(path=["/", "/{param:str}"]). When parsing the parameter for path, the route handler # would still have a kwarg called param: # def handler(param: str | None) -> ... if parameter.param_in != ParamType.QUERY or all( - "{" + parameter.name + ":" not in path for path in self.route_handler.paths + f"{{{parameter.name}:" not in path for path in self.route_handler.paths ): - self._parameters[parameter.name] = parameter + self._parameters[(parameter.name, parameter.param_in)] = parameter return - pre_existing = self._parameters[parameter.name] + pre_existing = self._parameters[(parameter.name, parameter.param_in)] if parameter == pre_existing: return @@ -206,13 +206,13 @@ def create_parameter_for_handler( dependency_providers = route_handler.resolve_dependencies() layered_parameters = route_handler.resolve_layered_parameters() - unique_handler_fields = tuple( + unique_handler_fields = ( (k, v) for k, v in handler_fields.items() if k not in RESERVED_KWARGS and k not in layered_parameters ) - unique_layered_fields = tuple( + unique_layered_fields = ( (k, v) for k, v in layered_parameters.items() if k not in RESERVED_KWARGS and k not in handler_fields ) - intersection_fields = tuple( + intersection_fields = ( (k, v) for k, v in handler_fields.items() if k not in RESERVED_KWARGS and k in layered_parameters ) diff --git a/tests/unit/test_openapi/test_integration.py b/tests/unit/test_openapi/test_integration.py index b0fd5fcf49..029719a7ce 100644 --- a/tests/unit/test_openapi/test_integration.py +++ b/tests/unit/test_openapi/test_integration.py @@ -1,17 +1,19 @@ from __future__ import annotations from dataclasses import dataclass -from typing import Generic, Optional, TypeVar +from typing import Generic, Optional, TypeVar, cast import msgspec import pytest import yaml from typing_extensions import Annotated -from litestar import Controller, get, post +from litestar import Controller, Litestar, get, post from litestar.app import DEFAULT_OPENAPI_CONFIG from litestar.enums import MediaType, OpenAPIMediaType, ParamType from litestar.openapi import OpenAPIConfig, OpenAPIController +from litestar.openapi.spec import Parameter as OpenAPIParameter +from litestar.params import Parameter from litestar.serialization.msgspec_hooks import decode_json, encode_json, get_serializer from litestar.status_codes import HTTP_200_OK, HTTP_404_NOT_FOUND from litestar.testing import create_test_client @@ -291,3 +293,32 @@ def handler_foo_int() -> Foo[int]: } }, } + + +def test_allow_multiple_parameters_with_same_name_but_different_location() -> None: + """Test that we can support params with the same name if they are in different locations, e.g., cookie and header. + + https://github.com/litestar-org/litestar/issues/2662 + """ + + @post("/test") + async def route( + name: Annotated[Optional[str], Parameter(cookie="name")] = None, # noqa: UP007 + name_header: Annotated[Optional[str], Parameter(header="name")] = None, # noqa: UP007 + ) -> str: + return name or name_header or "" + + app = Litestar(route_handlers=[route], debug=True) + assert app.openapi_schema.paths is not None + schema = app.openapi_schema + paths = schema.paths + assert paths is not None + path = paths["/test"] + assert path.post is not None + parameters = path.post.parameters + assert parameters is not None + assert len(parameters) == 2 + assert all(isinstance(param, OpenAPIParameter) for param in parameters) + params = cast("list[OpenAPIParameter]", parameters) + assert all(param.name == "name" for param in params) + assert tuple(param.param_in for param in params) == ("cookie", "header") From af3ac4dce32cc7a743055ad1fe7321feed478c5b Mon Sep 17 00:00:00 2001 From: Peter Schutt Date: Wed, 29 Nov 2023 02:02:49 +1000 Subject: [PATCH 31/45] fix: DTOData annotation without DTO (#2789) This PR resolves an issue where a handler would be allowed to be registered with a `DTOData` annotation, but no `dto` defined. This is now a configuration error. Closes #2779 --- litestar/handlers/base.py | 10 ++++++++++ .../test_base_handlers/test_validations.py | 20 +++++++++++++++++++ 2 files changed, 30 insertions(+) diff --git a/litestar/handlers/base.py b/litestar/handlers/base.py index 61be4f2f3b..6fe11bdf90 100644 --- a/litestar/handlers/base.py +++ b/litestar/handlers/base.py @@ -7,6 +7,7 @@ from litestar._signature import SignatureModel from litestar.config.app import ExperimentalFeatures from litestar.di import Provide +from litestar.dto import DTOData from litestar.exceptions import ImproperlyConfiguredException from litestar.serialization import default_deserializer, default_serializer from litestar.types import ( @@ -526,6 +527,15 @@ def on_registration(self, app: Litestar) -> None: def _validate_handler_function(self) -> None: """Validate the route handler function once set by inspecting its return annotations.""" + if ( + self.parsed_data_field is not None + and self.parsed_data_field.is_subclass_of(DTOData) + and not self.resolve_data_dto() + ): + raise ImproperlyConfiguredException( + f"Handler function {self.handler_name} has a data parameter that is a subclass of DTOData but no " + "DTO has been registered for it." + ) def __str__(self) -> str: """Return a unique identifier for the route handler. diff --git a/tests/unit/test_handlers/test_base_handlers/test_validations.py b/tests/unit/test_handlers/test_base_handlers/test_validations.py index 58f0a306be..a0b168a230 100644 --- a/tests/unit/test_handlers/test_base_handlers/test_validations.py +++ b/tests/unit/test_handlers/test_base_handlers/test_validations.py @@ -1,5 +1,9 @@ +from dataclasses import dataclass + import pytest +from litestar import Litestar, post +from litestar.dto import DTOData from litestar.exceptions import ImproperlyConfiguredException from litestar.handlers.base import BaseRouteHandler @@ -9,3 +13,19 @@ def test_raise_no_fn_validation() -> None: with pytest.raises(ImproperlyConfiguredException): handler.fn + + +def test_dto_data_annotation_with_no_resolved_dto() -> None: + @dataclass + class Model: + """Example dataclass model.""" + + hello: str + + @post("/") + async def async_hello_world(data: DTOData[Model]) -> Model: + """Route Handler that outputs hello world.""" + return data.create_instance() + + with pytest.raises(ImproperlyConfiguredException): + Litestar(route_handlers=[async_hello_world]) From 2fad9811d6b2722e7704d5b995b860a125d83396 Mon Sep 17 00:00:00 2001 From: Peter Schutt Date: Wed, 29 Nov 2023 02:07:46 +1000 Subject: [PATCH 32/45] chore: update deps & tooling (#2793) --- .pre-commit-config.yaml | 10 +- pdm.lock | 427 ++++++++++++++++++++-------------------- pyproject.toml | 1 + 3 files changed, 219 insertions(+), 219 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index dff2c48a44..96197f1823 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -2,7 +2,7 @@ default_language_version: python: "3.8" repos: - repo: https://github.com/compilerla/conventional-pre-commit - rev: v2.4.0 + rev: v3.0.0 hooks: - id: conventional-pre-commit stages: [commit-msg] @@ -23,7 +23,7 @@ repos: - id: unasyncd additional_dependencies: ["ruff"] - repo: https://github.com/charliermarsh/ruff-pre-commit - rev: "v0.1.5" + rev: "v0.1.6" hooks: - id: ruff args: ["--fix"] @@ -40,12 +40,12 @@ repos: hooks: - id: blacken-docs - repo: https://github.com/pre-commit/mirrors-prettier - rev: "v3.0.3" + rev: "v3.1.0" hooks: - id: prettier exclude: "_templates|.git|.all-contributorsrc" - repo: https://github.com/python-formate/flake8-dunder-all - rev: v0.3.0 + rev: v0.3.1 hooks: - id: ensure-dunder-all exclude: "test*|examples*|tools" @@ -56,7 +56,7 @@ repos: - id: slotscheck exclude: "test_*|docs" - repo: https://github.com/sphinx-contrib/sphinx-lint - rev: "v0.8.1" + rev: "v0.9.0" hooks: - id: sphinx-lint - repo: local diff --git a/pdm.lock b/pdm.lock index bb6e4860d8..118c916546 100644 --- a/pdm.lock +++ b/pdm.lock @@ -87,7 +87,7 @@ files = [ [[package]] name = "anyio" -version = "4.0.0" +version = "4.1.0" requires_python = ">=3.8" summary = "High level compatibility layer for multiple asynchronous event loop implementations" dependencies = [ @@ -96,8 +96,8 @@ dependencies = [ "sniffio>=1.1", ] files = [ - {file = "anyio-4.0.0-py3-none-any.whl", hash = "sha256:cfdb2b588b9fc25ede96d8db56ed50848b0b649dca3dd1df0b11f683bb9e0b5f"}, - {file = "anyio-4.0.0.tar.gz", hash = "sha256:f7ed51751b2c2add651e5747c891b47e26d2a21be5d32d9311dfe9692f3e5d7a"}, + {file = "anyio-4.1.0-py3-none-any.whl", hash = "sha256:56a415fbc462291813a94528a779597226619c8e78af7de0507333f700011e5f"}, + {file = "anyio-4.1.0.tar.gz", hash = "sha256:5a0bec7085176715be77df87fc66d6c9d70626bd752fcc85f57cdbee5b3760da"}, ] [[package]] @@ -286,7 +286,7 @@ files = [ [[package]] name = "beanie" -version = "1.23.3" +version = "1.23.6" requires_python = ">=3.7,<4.0" summary = "Asynchronous Python ODM for MongoDB" dependencies = [ @@ -298,8 +298,8 @@ dependencies = [ "typing-extensions>=4.7; python_version < \"3.11\"", ] files = [ - {file = "beanie-1.23.3-py3-none-any.whl", hash = "sha256:6f58a7c6ff8e5e76036e9307ab3951221667804e84460bd00c0556b2c793d91d"}, - {file = "beanie-1.23.3.tar.gz", hash = "sha256:19471297f3ca0ffbef0a17d1c8b1bb101d0bddb0c91679b566bdfc90ef31166f"}, + {file = "beanie-1.23.6-py3-none-any.whl", hash = "sha256:c780e0f951ee40faa688e7b0e56dc963486087c7e3970cd5a3d99cb47703e677"}, + {file = "beanie-1.23.6.tar.gz", hash = "sha256:9a9a36936188a44dcf16aa8930b450249eaa5c7ebc0edc056ad194bdec3539ca"}, ] [[package]] @@ -791,36 +791,36 @@ files = [ [[package]] name = "cryptography" -version = "41.0.5" +version = "41.0.7" requires_python = ">=3.7" summary = "cryptography is a package which provides cryptographic recipes and primitives to Python developers." dependencies = [ "cffi>=1.12", ] files = [ - {file = "cryptography-41.0.5-cp37-abi3-macosx_10_12_universal2.whl", hash = "sha256:da6a0ff8f1016ccc7477e6339e1d50ce5f59b88905585f77193ebd5068f1e797"}, - {file = "cryptography-41.0.5-cp37-abi3-macosx_10_12_x86_64.whl", hash = "sha256:b948e09fe5fb18517d99994184854ebd50b57248736fd4c720ad540560174ec5"}, - {file = "cryptography-41.0.5-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d38e6031e113b7421db1de0c1b1f7739564a88f1684c6b89234fbf6c11b75147"}, - {file = "cryptography-41.0.5-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e270c04f4d9b5671ebcc792b3ba5d4488bf7c42c3c241a3748e2599776f29696"}, - {file = "cryptography-41.0.5-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:ec3b055ff8f1dce8e6ef28f626e0972981475173d7973d63f271b29c8a2897da"}, - {file = "cryptography-41.0.5-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:7d208c21e47940369accfc9e85f0de7693d9a5d843c2509b3846b2db170dfd20"}, - {file = "cryptography-41.0.5-cp37-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:8254962e6ba1f4d2090c44daf50a547cd5f0bf446dc658a8e5f8156cae0d8548"}, - {file = "cryptography-41.0.5-cp37-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:a48e74dad1fb349f3dc1d449ed88e0017d792997a7ad2ec9587ed17405667e6d"}, - {file = "cryptography-41.0.5-cp37-abi3-win32.whl", hash = "sha256:d3977f0e276f6f5bf245c403156673db103283266601405376f075c849a0b936"}, - {file = "cryptography-41.0.5-cp37-abi3-win_amd64.whl", hash = "sha256:73801ac9736741f220e20435f84ecec75ed70eda90f781a148f1bad546963d81"}, - {file = "cryptography-41.0.5-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:3be3ca726e1572517d2bef99a818378bbcf7d7799d5372a46c79c29eb8d166c1"}, - {file = "cryptography-41.0.5-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:e886098619d3815e0ad5790c973afeee2c0e6e04b4da90b88e6bd06e2a0b1b72"}, - {file = "cryptography-41.0.5-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:573eb7128cbca75f9157dcde974781209463ce56b5804983e11a1c462f0f4e88"}, - {file = "cryptography-41.0.5-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:0c327cac00f082013c7c9fb6c46b7cc9fa3c288ca702c74773968173bda421bf"}, - {file = "cryptography-41.0.5-pp38-pypy38_pp73-macosx_10_12_x86_64.whl", hash = "sha256:227ec057cd32a41c6651701abc0328135e472ed450f47c2766f23267b792a88e"}, - {file = "cryptography-41.0.5-pp38-pypy38_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:22892cc830d8b2c89ea60148227631bb96a7da0c1b722f2aac8824b1b7c0b6b8"}, - {file = "cryptography-41.0.5-pp38-pypy38_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:5a70187954ba7292c7876734183e810b728b4f3965fbe571421cb2434d279179"}, - {file = "cryptography-41.0.5-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:88417bff20162f635f24f849ab182b092697922088b477a7abd6664ddd82291d"}, - {file = "cryptography-41.0.5-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:c707f7afd813478e2019ae32a7c49cd932dd60ab2d2a93e796f68236b7e1fbf1"}, - {file = "cryptography-41.0.5-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:580afc7b7216deeb87a098ef0674d6ee34ab55993140838b14c9b83312b37b86"}, - {file = "cryptography-41.0.5-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:fba1e91467c65fe64a82c689dc6cf58151158993b13eb7a7f3f4b7f395636723"}, - {file = "cryptography-41.0.5-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:0d2a6a598847c46e3e321a7aef8af1436f11c27f1254933746304ff014664d84"}, - {file = "cryptography-41.0.5.tar.gz", hash = "sha256:392cb88b597247177172e02da6b7a63deeff1937fa6fec3bbf902ebd75d97ec7"}, + {file = "cryptography-41.0.7-cp37-abi3-macosx_10_12_universal2.whl", hash = "sha256:3c78451b78313fa81607fa1b3f1ae0a5ddd8014c38a02d9db0616133987b9cdf"}, + {file = "cryptography-41.0.7-cp37-abi3-macosx_10_12_x86_64.whl", hash = "sha256:928258ba5d6f8ae644e764d0f996d61a8777559f72dfeb2eea7e2fe0ad6e782d"}, + {file = "cryptography-41.0.7-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5a1b41bc97f1ad230a41657d9155113c7521953869ae57ac39ac7f1bb471469a"}, + {file = "cryptography-41.0.7-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:841df4caa01008bad253bce2a6f7b47f86dc9f08df4b433c404def869f590a15"}, + {file = "cryptography-41.0.7-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:5429ec739a29df2e29e15d082f1d9ad683701f0ec7709ca479b3ff2708dae65a"}, + {file = "cryptography-41.0.7-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:43f2552a2378b44869fe8827aa19e69512e3245a219104438692385b0ee119d1"}, + {file = "cryptography-41.0.7-cp37-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:af03b32695b24d85a75d40e1ba39ffe7db7ffcb099fe507b39fd41a565f1b157"}, + {file = "cryptography-41.0.7-cp37-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:49f0805fc0b2ac8d4882dd52f4a3b935b210935d500b6b805f321addc8177406"}, + {file = "cryptography-41.0.7-cp37-abi3-win32.whl", hash = "sha256:f983596065a18a2183e7f79ab3fd4c475205b839e02cbc0efbbf9666c4b3083d"}, + {file = "cryptography-41.0.7-cp37-abi3-win_amd64.whl", hash = "sha256:90452ba79b8788fa380dfb587cca692976ef4e757b194b093d845e8d99f612f2"}, + {file = "cryptography-41.0.7-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:079b85658ea2f59c4f43b70f8119a52414cdb7be34da5d019a77bf96d473b960"}, + {file = "cryptography-41.0.7-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:b640981bf64a3e978a56167594a0e97db71c89a479da8e175d8bb5be5178c003"}, + {file = "cryptography-41.0.7-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:e3114da6d7f95d2dee7d3f4eec16dacff819740bbab931aff8648cb13c5ff5e7"}, + {file = "cryptography-41.0.7-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:d5ec85080cce7b0513cfd233914eb8b7bbd0633f1d1703aa28d1dd5a72f678ec"}, + {file = "cryptography-41.0.7-pp38-pypy38_pp73-macosx_10_12_x86_64.whl", hash = "sha256:7a698cb1dac82c35fcf8fe3417a3aaba97de16a01ac914b89a0889d364d2f6be"}, + {file = "cryptography-41.0.7-pp38-pypy38_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:37a138589b12069efb424220bf78eac59ca68b95696fc622b6ccc1c0a197204a"}, + {file = "cryptography-41.0.7-pp38-pypy38_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:68a2dec79deebc5d26d617bfdf6e8aab065a4f34934b22d3b5010df3ba36612c"}, + {file = "cryptography-41.0.7-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:09616eeaef406f99046553b8a40fbf8b1e70795a91885ba4c96a70793de5504a"}, + {file = "cryptography-41.0.7-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:48a0476626da912a44cc078f9893f292f0b3e4c739caf289268168d8f4702a39"}, + {file = "cryptography-41.0.7-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:c7f3201ec47d5207841402594f1d7950879ef890c0c495052fa62f58283fde1a"}, + {file = "cryptography-41.0.7-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:c5ca78485a255e03c32b513f8c2bc39fedb7f5c5f8535545bdc223a03b24f248"}, + {file = "cryptography-41.0.7-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:d6c391c021ab1f7a82da5d8d0b3cee2f4b2c455ec86c8aebbc84837a631ff309"}, + {file = "cryptography-41.0.7.tar.gz", hash = "sha256:13f93ce9bea8016c253b34afc6bd6a75993e5c40672ed5405a9c832f0d4a00bc"}, ] [[package]] @@ -952,12 +952,12 @@ files = [ [[package]] name = "exceptiongroup" -version = "1.1.3" +version = "1.2.0" requires_python = ">=3.7" summary = "Backport of PEP 654 (exception groups)" files = [ - {file = "exceptiongroup-1.1.3-py3-none-any.whl", hash = "sha256:343280667a4585d195ca1cf9cef84a4e178c4b6cf2274caef9859782b567d5e3"}, - {file = "exceptiongroup-1.1.3.tar.gz", hash = "sha256:097acd85d473d75af5bb98e41b61ff7fe35efe6675e4f9370ec6ec5126d160e9"}, + {file = "exceptiongroup-1.2.0-py3-none-any.whl", hash = "sha256:4bfd3996ac73b41e9b9628b04e079f193850720ea5945fc96a08633c66912f14"}, + {file = "exceptiongroup-1.2.0.tar.gz", hash = "sha256:91f5c769735f051a4290d52edd0858999b57e5876e9f85937691bd4c9fa3ed68"}, ] [[package]] @@ -1252,19 +1252,19 @@ files = [ [[package]] name = "httpx" -version = "0.25.1" +version = "0.25.2" requires_python = ">=3.8" summary = "The next generation HTTP client." dependencies = [ "anyio", "certifi", - "httpcore", + "httpcore==1.*", "idna", "sniffio", ] files = [ - {file = "httpx-0.25.1-py3-none-any.whl", hash = "sha256:fec7d6cc5c27c578a391f7e87b9aa7d3d8fbcd034f6399f9f79b45bcc12a866a"}, - {file = "httpx-0.25.1.tar.gz", hash = "sha256:ffd96d5cf901e63863d9f1b4b6807861dbea4d301613415d9e6e57ead15fc5d0"}, + {file = "httpx-0.25.2-py3-none-any.whl", hash = "sha256:a05d3d052d9b2dfce0e3896636467f8a5342fb2b902c819428e1ac65413ca118"}, + {file = "httpx-0.25.2.tar.gz", hash = "sha256:8b8fcaa0c8ea7b05edd69a094e63a2094c4efcb48129fb757361bc423c0ad9e8"}, ] [[package]] @@ -1279,7 +1279,7 @@ files = [ [[package]] name = "hypothesis" -version = "6.88.3" +version = "6.91.0" requires_python = ">=3.8" summary = "A library for property-based testing" dependencies = [ @@ -1288,8 +1288,8 @@ dependencies = [ "sortedcontainers<3.0.0,>=2.1.0", ] files = [ - {file = "hypothesis-6.88.3-py3-none-any.whl", hash = "sha256:781ce6fd35e11ca77ad132a20cebe66fd215f56678f8efd6b87013b14500151b"}, - {file = "hypothesis-6.88.3.tar.gz", hash = "sha256:5cfda253e34726c98ab04b9595fca15677ee9f4f6055146aea25a6278d71f6f1"}, + {file = "hypothesis-6.91.0-py3-none-any.whl", hash = "sha256:316e06d6f7d5f8ab87bcc7417fca750a2b082ed3ce902b979816b413276680b3"}, + {file = "hypothesis-6.91.0.tar.gz", hash = "sha256:a9f61a2bcfc342febcc1d04b80a99e789c57b700f91cbd43bbdb5d651af385cd"}, ] [[package]] @@ -1734,7 +1734,7 @@ files = [ [[package]] name = "mypy" -version = "1.7.0" +version = "1.7.1" requires_python = ">=3.8" summary = "Optional static typing for Python" dependencies = [ @@ -1743,33 +1743,33 @@ dependencies = [ "typing-extensions>=4.1.0", ] files = [ - {file = "mypy-1.7.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:5da84d7bf257fd8f66b4f759a904fd2c5a765f70d8b52dde62b521972a0a2357"}, - {file = "mypy-1.7.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a3637c03f4025f6405737570d6cbfa4f1400eb3c649317634d273687a09ffc2f"}, - {file = "mypy-1.7.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b633f188fc5ae1b6edca39dae566974d7ef4e9aaaae00bc36efe1f855e5173ac"}, - {file = "mypy-1.7.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:d6ed9a3997b90c6f891138e3f83fb8f475c74db4ccaa942a1c7bf99e83a989a1"}, - {file = "mypy-1.7.0-cp310-cp310-win_amd64.whl", hash = "sha256:1fe46e96ae319df21359c8db77e1aecac8e5949da4773c0274c0ef3d8d1268a9"}, - {file = "mypy-1.7.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:df67fbeb666ee8828f675fee724cc2cbd2e4828cc3df56703e02fe6a421b7401"}, - {file = "mypy-1.7.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a79cdc12a02eb526d808a32a934c6fe6df07b05f3573d210e41808020aed8b5d"}, - {file = "mypy-1.7.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f65f385a6f43211effe8c682e8ec3f55d79391f70a201575def73d08db68ead1"}, - {file = "mypy-1.7.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:0e81ffd120ee24959b449b647c4b2fbfcf8acf3465e082b8d58fd6c4c2b27e46"}, - {file = "mypy-1.7.0-cp311-cp311-win_amd64.whl", hash = "sha256:f29386804c3577c83d76520abf18cfcd7d68264c7e431c5907d250ab502658ee"}, - {file = "mypy-1.7.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:87c076c174e2c7ef8ab416c4e252d94c08cd4980a10967754f91571070bf5fbe"}, - {file = "mypy-1.7.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:6cb8d5f6d0fcd9e708bb190b224089e45902cacef6f6915481806b0c77f7786d"}, - {file = "mypy-1.7.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d93e76c2256aa50d9c82a88e2f569232e9862c9982095f6d54e13509f01222fc"}, - {file = "mypy-1.7.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:cddee95dea7990e2215576fae95f6b78a8c12f4c089d7e4367564704e99118d3"}, - {file = "mypy-1.7.0-cp312-cp312-win_amd64.whl", hash = "sha256:d01921dbd691c4061a3e2ecdbfbfad029410c5c2b1ee88946bf45c62c6c91210"}, - {file = "mypy-1.7.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:185cff9b9a7fec1f9f7d8352dff8a4c713b2e3eea9c6c4b5ff7f0edf46b91e41"}, - {file = "mypy-1.7.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:7a7b1e399c47b18feb6f8ad4a3eef3813e28c1e871ea7d4ea5d444b2ac03c418"}, - {file = "mypy-1.7.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fc9fe455ad58a20ec68599139ed1113b21f977b536a91b42bef3ffed5cce7391"}, - {file = "mypy-1.7.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:d0fa29919d2e720c8dbaf07d5578f93d7b313c3e9954c8ec05b6d83da592e5d9"}, - {file = "mypy-1.7.0-cp38-cp38-win_amd64.whl", hash = "sha256:2b53655a295c1ed1af9e96b462a736bf083adba7b314ae775563e3fb4e6795f5"}, - {file = "mypy-1.7.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:c1b06b4b109e342f7dccc9efda965fc3970a604db70f8560ddfdee7ef19afb05"}, - {file = "mypy-1.7.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:bf7a2f0a6907f231d5e41adba1a82d7d88cf1f61a70335889412dec99feeb0f8"}, - {file = "mypy-1.7.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:551d4a0cdcbd1d2cccdcc7cb516bb4ae888794929f5b040bb51aae1846062901"}, - {file = "mypy-1.7.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:55d28d7963bef00c330cb6461db80b0b72afe2f3c4e2963c99517cf06454e665"}, - {file = "mypy-1.7.0-cp39-cp39-win_amd64.whl", hash = "sha256:870bd1ffc8a5862e593185a4c169804f2744112b4a7c55b93eb50f48e7a77010"}, - {file = "mypy-1.7.0-py3-none-any.whl", hash = "sha256:96650d9a4c651bc2a4991cf46f100973f656d69edc7faf91844e87fe627f7e96"}, - {file = "mypy-1.7.0.tar.gz", hash = "sha256:1e280b5697202efa698372d2f39e9a6713a0395a756b1c6bd48995f8d72690dc"}, + {file = "mypy-1.7.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:12cce78e329838d70a204293e7b29af9faa3ab14899aec397798a4b41be7f340"}, + {file = "mypy-1.7.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:1484b8fa2c10adf4474f016e09d7a159602f3239075c7bf9f1627f5acf40ad49"}, + {file = "mypy-1.7.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:31902408f4bf54108bbfb2e35369877c01c95adc6192958684473658c322c8a5"}, + {file = "mypy-1.7.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:f2c2521a8e4d6d769e3234350ba7b65ff5d527137cdcde13ff4d99114b0c8e7d"}, + {file = "mypy-1.7.1-cp310-cp310-win_amd64.whl", hash = "sha256:fcd2572dd4519e8a6642b733cd3a8cfc1ef94bafd0c1ceed9c94fe736cb65b6a"}, + {file = "mypy-1.7.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4b901927f16224d0d143b925ce9a4e6b3a758010673eeded9b748f250cf4e8f7"}, + {file = "mypy-1.7.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:2f7f6985d05a4e3ce8255396df363046c28bea790e40617654e91ed580ca7c51"}, + {file = "mypy-1.7.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:944bdc21ebd620eafefc090cdf83158393ec2b1391578359776c00de00e8907a"}, + {file = "mypy-1.7.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:9c7ac372232c928fff0645d85f273a726970c014749b924ce5710d7d89763a28"}, + {file = "mypy-1.7.1-cp311-cp311-win_amd64.whl", hash = "sha256:f6efc9bd72258f89a3816e3a98c09d36f079c223aa345c659622f056b760ab42"}, + {file = "mypy-1.7.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:6dbdec441c60699288adf051f51a5d512b0d818526d1dcfff5a41f8cd8b4aaf1"}, + {file = "mypy-1.7.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:4fc3d14ee80cd22367caaaf6e014494415bf440980a3045bf5045b525680ac33"}, + {file = "mypy-1.7.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2c6e4464ed5f01dc44dc9821caf67b60a4e5c3b04278286a85c067010653a0eb"}, + {file = "mypy-1.7.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:d9b338c19fa2412f76e17525c1b4f2c687a55b156320acb588df79f2e6fa9fea"}, + {file = "mypy-1.7.1-cp312-cp312-win_amd64.whl", hash = "sha256:204e0d6de5fd2317394a4eff62065614c4892d5a4d1a7ee55b765d7a3d9e3f82"}, + {file = "mypy-1.7.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:84860e06ba363d9c0eeabd45ac0fde4b903ad7aa4f93cd8b648385a888e23200"}, + {file = "mypy-1.7.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:8c5091ebd294f7628eb25ea554852a52058ac81472c921150e3a61cdd68f75a7"}, + {file = "mypy-1.7.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:40716d1f821b89838589e5b3106ebbc23636ffdef5abc31f7cd0266db936067e"}, + {file = "mypy-1.7.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:5cf3f0c5ac72139797953bd50bc6c95ac13075e62dbfcc923571180bebb662e9"}, + {file = "mypy-1.7.1-cp38-cp38-win_amd64.whl", hash = "sha256:78e25b2fd6cbb55ddfb8058417df193f0129cad5f4ee75d1502248e588d9e0d7"}, + {file = "mypy-1.7.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:75c4d2a6effd015786c87774e04331b6da863fc3fc4e8adfc3b40aa55ab516fe"}, + {file = "mypy-1.7.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:2643d145af5292ee956aa0a83c2ce1038a3bdb26e033dadeb2f7066fb0c9abce"}, + {file = "mypy-1.7.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:75aa828610b67462ffe3057d4d8a4112105ed211596b750b53cbfe182f44777a"}, + {file = "mypy-1.7.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ee5d62d28b854eb61889cde4e1dbc10fbaa5560cb39780c3995f6737f7e82120"}, + {file = "mypy-1.7.1-cp39-cp39-win_amd64.whl", hash = "sha256:72cf32ce7dd3562373f78bd751f73c96cfb441de147cc2448a92c1a308bd0ca6"}, + {file = "mypy-1.7.1-py3-none-any.whl", hash = "sha256:f7c5d642db47376a0cc130f0de6d055056e010debdaf0707cd2b0fc7e7ef30ea"}, + {file = "mypy-1.7.1.tar.gz", hash = "sha256:fcb6d9afb1b6208b4c712af0dafdc650f518836065df0d4fb1d800f5d6773db2"}, ] [[package]] @@ -2004,7 +2004,7 @@ files = [ [[package]] name = "polyfactory" -version = "2.11.0" +version = "2.12.0" requires_python = "<4.0,>=3.8" summary = "Mock data generation factories" dependencies = [ @@ -2012,8 +2012,8 @@ dependencies = [ "typing-extensions", ] files = [ - {file = "polyfactory-2.11.0-py3-none-any.whl", hash = "sha256:f1146ee171b55575a50c93cb36d0d68c5ab69b85ab747d7791bcf986a1be1253"}, - {file = "polyfactory-2.11.0.tar.gz", hash = "sha256:b31b4997cc8128568f73f641b6858ed64be0fd3fd3627071c027b619c5b14d88"}, + {file = "polyfactory-2.12.0-py3-none-any.whl", hash = "sha256:35c170f62763ec7e64d38b0981e4a95e3dd32870f10e1251c5f97dda0525bd64"}, + {file = "polyfactory-2.12.0.tar.gz", hash = "sha256:26dc3a52baae1ebd6386708d9a99f8ea4ef57c9d45e556815ee5e44a1cd27fc0"}, ] [[package]] @@ -2035,12 +2035,12 @@ files = [ [[package]] name = "prometheus-client" -version = "0.18.0" +version = "0.19.0" requires_python = ">=3.8" summary = "Python client for the Prometheus monitoring system." files = [ - {file = "prometheus_client-0.18.0-py3-none-any.whl", hash = "sha256:8de3ae2755f890826f4b6479e5571d4f74ac17a81345fe69a6778fdb92579184"}, - {file = "prometheus_client-0.18.0.tar.gz", hash = "sha256:35f7a8c22139e2bb7ca5a698e92d38145bc8dc74c1c0bf56f25cca886a764e17"}, + {file = "prometheus_client-0.19.0-py3-none-any.whl", hash = "sha256:c88b1e6ecf6b41cd8fb5731c7ae919bf66df6ec6fafa555cd6c0e16ca169ae92"}, + {file = "prometheus_client-0.19.0.tar.gz", hash = "sha256:4585b0d1223148c27a225b10dbec5ae9bc4c81a99a3fa80774fa6209935324e1"}, ] [[package]] @@ -2065,122 +2065,121 @@ files = [ [[package]] name = "pydantic" -version = "2.4.2" +version = "2.5.2" requires_python = ">=3.7" summary = "Data validation using Python type hints" dependencies = [ "annotated-types>=0.4.0", - "pydantic-core==2.10.1", + "pydantic-core==2.14.5", "typing-extensions>=4.6.1", ] files = [ - {file = "pydantic-2.4.2-py3-none-any.whl", hash = "sha256:bc3ddf669d234f4220e6e1c4d96b061abe0998185a8d7855c0126782b7abc8c1"}, - {file = "pydantic-2.4.2.tar.gz", hash = "sha256:94f336138093a5d7f426aac732dcfe7ab4eb4da243c88f891d65deb4a2556ee7"}, + {file = "pydantic-2.5.2-py3-none-any.whl", hash = "sha256:80c50fb8e3dcecfddae1adbcc00ec5822918490c99ab31f6cf6140ca1c1429f0"}, + {file = "pydantic-2.5.2.tar.gz", hash = "sha256:ff177ba64c6faf73d7afa2e8cad38fd456c0dbe01c9954e71038001cd15a6edd"}, ] [[package]] name = "pydantic-core" -version = "2.10.1" +version = "2.14.5" requires_python = ">=3.7" summary = "" dependencies = [ "typing-extensions!=4.7.0,>=4.6.0", ] files = [ - {file = "pydantic_core-2.10.1-cp310-cp310-macosx_10_7_x86_64.whl", hash = "sha256:d64728ee14e667ba27c66314b7d880b8eeb050e58ffc5fec3b7a109f8cddbd63"}, - {file = "pydantic_core-2.10.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:48525933fea744a3e7464c19bfede85df4aba79ce90c60b94d8b6e1eddd67096"}, - {file = "pydantic_core-2.10.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ef337945bbd76cce390d1b2496ccf9f90b1c1242a3a7bc242ca4a9fc5993427a"}, - {file = "pydantic_core-2.10.1-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:a1392e0638af203cee360495fd2cfdd6054711f2db5175b6e9c3c461b76f5175"}, - {file = "pydantic_core-2.10.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0675ba5d22de54d07bccde38997e780044dcfa9a71aac9fd7d4d7a1d2e3e65f7"}, - {file = "pydantic_core-2.10.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:128552af70a64660f21cb0eb4876cbdadf1a1f9d5de820fed6421fa8de07c893"}, - {file = "pydantic_core-2.10.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f6e6aed5818c264412ac0598b581a002a9f050cb2637a84979859e70197aa9e"}, - {file = "pydantic_core-2.10.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:ecaac27da855b8d73f92123e5f03612b04c5632fd0a476e469dfc47cd37d6b2e"}, - {file = "pydantic_core-2.10.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:b3c01c2fb081fced3bbb3da78510693dc7121bb893a1f0f5f4b48013201f362e"}, - {file = "pydantic_core-2.10.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:92f675fefa977625105708492850bcbc1182bfc3e997f8eecb866d1927c98ae6"}, - {file = "pydantic_core-2.10.1-cp310-none-win32.whl", hash = "sha256:420a692b547736a8d8703c39ea935ab5d8f0d2573f8f123b0a294e49a73f214b"}, - {file = "pydantic_core-2.10.1-cp310-none-win_amd64.whl", hash = "sha256:0880e239827b4b5b3e2ce05e6b766a7414e5f5aedc4523be6b68cfbc7f61c5d0"}, - {file = "pydantic_core-2.10.1-cp311-cp311-macosx_10_7_x86_64.whl", hash = "sha256:073d4a470b195d2b2245d0343569aac7e979d3a0dcce6c7d2af6d8a920ad0bea"}, - {file = "pydantic_core-2.10.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:600d04a7b342363058b9190d4e929a8e2e715c5682a70cc37d5ded1e0dd370b4"}, - {file = "pydantic_core-2.10.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:39215d809470f4c8d1881758575b2abfb80174a9e8daf8f33b1d4379357e417c"}, - {file = "pydantic_core-2.10.1-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:eeb3d3d6b399ffe55f9a04e09e635554012f1980696d6b0aca3e6cf42a17a03b"}, - {file = "pydantic_core-2.10.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a7a7902bf75779bc12ccfc508bfb7a4c47063f748ea3de87135d433a4cca7a2f"}, - {file = "pydantic_core-2.10.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3625578b6010c65964d177626fde80cf60d7f2e297d56b925cb5cdeda6e9925a"}, - {file = "pydantic_core-2.10.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:caa48fc31fc7243e50188197b5f0c4228956f97b954f76da157aae7f67269ae8"}, - {file = "pydantic_core-2.10.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:07ec6d7d929ae9c68f716195ce15e745b3e8fa122fc67698ac6498d802ed0fa4"}, - {file = "pydantic_core-2.10.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e6f31a17acede6a8cd1ae2d123ce04d8cca74056c9d456075f4f6f85de055607"}, - {file = "pydantic_core-2.10.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:d8f1ebca515a03e5654f88411420fea6380fc841d1bea08effb28184e3d4899f"}, - {file = "pydantic_core-2.10.1-cp311-none-win32.whl", hash = "sha256:6db2eb9654a85ada248afa5a6db5ff1cf0f7b16043a6b070adc4a5be68c716d6"}, - {file = "pydantic_core-2.10.1-cp311-none-win_amd64.whl", hash = "sha256:4a5be350f922430997f240d25f8219f93b0c81e15f7b30b868b2fddfc2d05f27"}, - {file = "pydantic_core-2.10.1-cp311-none-win_arm64.whl", hash = "sha256:5fdb39f67c779b183b0c853cd6b45f7db84b84e0571b3ef1c89cdb1dfc367325"}, - {file = "pydantic_core-2.10.1-cp312-cp312-macosx_10_7_x86_64.whl", hash = "sha256:b1f22a9ab44de5f082216270552aa54259db20189e68fc12484873d926426921"}, - {file = "pydantic_core-2.10.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:8572cadbf4cfa95fb4187775b5ade2eaa93511f07947b38f4cd67cf10783b118"}, - {file = "pydantic_core-2.10.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:db9a28c063c7c00844ae42a80203eb6d2d6bbb97070cfa00194dff40e6f545ab"}, - {file = "pydantic_core-2.10.1-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:0e2a35baa428181cb2270a15864ec6286822d3576f2ed0f4cd7f0c1708472aff"}, - {file = "pydantic_core-2.10.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:05560ab976012bf40f25d5225a58bfa649bb897b87192a36c6fef1ab132540d7"}, - {file = "pydantic_core-2.10.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d6495008733c7521a89422d7a68efa0a0122c99a5861f06020ef5b1f51f9ba7c"}, - {file = "pydantic_core-2.10.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:14ac492c686defc8e6133e3a2d9eaf5261b3df26b8ae97450c1647286750b901"}, - {file = "pydantic_core-2.10.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:8282bab177a9a3081fd3d0a0175a07a1e2bfb7fcbbd949519ea0980f8a07144d"}, - {file = "pydantic_core-2.10.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:aafdb89fdeb5fe165043896817eccd6434aee124d5ee9b354f92cd574ba5e78f"}, - {file = "pydantic_core-2.10.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:f6defd966ca3b187ec6c366604e9296f585021d922e666b99c47e78738b5666c"}, - {file = "pydantic_core-2.10.1-cp312-none-win32.whl", hash = "sha256:7c4d1894fe112b0864c1fa75dffa045720a194b227bed12f4be7f6045b25209f"}, - {file = "pydantic_core-2.10.1-cp312-none-win_amd64.whl", hash = "sha256:5994985da903d0b8a08e4935c46ed8daf5be1cf217489e673910951dc533d430"}, - {file = "pydantic_core-2.10.1-cp312-none-win_arm64.whl", hash = "sha256:0d8a8adef23d86d8eceed3e32e9cca8879c7481c183f84ed1a8edc7df073af94"}, - {file = "pydantic_core-2.10.1-cp38-cp38-macosx_10_7_x86_64.whl", hash = "sha256:6835451b57c1b467b95ffb03a38bb75b52fb4dc2762bb1d9dbed8de31ea7d0fc"}, - {file = "pydantic_core-2.10.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:b00bc4619f60c853556b35f83731bd817f989cba3e97dc792bb8c97941b8053a"}, - {file = "pydantic_core-2.10.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0fa467fd300a6f046bdb248d40cd015b21b7576c168a6bb20aa22e595c8ffcdd"}, - {file = "pydantic_core-2.10.1-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d99277877daf2efe074eae6338453a4ed54a2d93fb4678ddfe1209a0c93a2468"}, - {file = "pydantic_core-2.10.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fa7db7558607afeccb33c0e4bf1c9a9a835e26599e76af6fe2fcea45904083a6"}, - {file = "pydantic_core-2.10.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:aad7bd686363d1ce4ee930ad39f14e1673248373f4a9d74d2b9554f06199fb58"}, - {file = "pydantic_core-2.10.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:443fed67d33aa85357464f297e3d26e570267d1af6fef1c21ca50921d2976302"}, - {file = "pydantic_core-2.10.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:042462d8d6ba707fd3ce9649e7bf268633a41018d6a998fb5fbacb7e928a183e"}, - {file = "pydantic_core-2.10.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:ecdbde46235f3d560b18be0cb706c8e8ad1b965e5c13bbba7450c86064e96561"}, - {file = "pydantic_core-2.10.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:ed550ed05540c03f0e69e6d74ad58d026de61b9eaebebbaaf8873e585cbb18de"}, - {file = "pydantic_core-2.10.1-cp38-none-win32.whl", hash = "sha256:8cdbbd92154db2fec4ec973d45c565e767ddc20aa6dbaf50142676484cbff8ee"}, - {file = "pydantic_core-2.10.1-cp38-none-win_amd64.whl", hash = "sha256:9f6f3e2598604956480f6c8aa24a3384dbf6509fe995d97f6ca6103bb8c2534e"}, - {file = "pydantic_core-2.10.1-cp39-cp39-macosx_10_7_x86_64.whl", hash = "sha256:655f8f4c8d6a5963c9a0687793da37b9b681d9ad06f29438a3b2326d4e6b7970"}, - {file = "pydantic_core-2.10.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:e570ffeb2170e116a5b17e83f19911020ac79d19c96f320cbfa1fa96b470185b"}, - {file = "pydantic_core-2.10.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:64322bfa13e44c6c30c518729ef08fda6026b96d5c0be724b3c4ae4da939f875"}, - {file = "pydantic_core-2.10.1-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:485a91abe3a07c3a8d1e082ba29254eea3e2bb13cbbd4351ea4e5a21912cc9b0"}, - {file = "pydantic_core-2.10.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f7c2b8eb9fc872e68b46eeaf835e86bccc3a58ba57d0eedc109cbb14177be531"}, - {file = "pydantic_core-2.10.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a5cb87bdc2e5f620693148b5f8f842d293cae46c5f15a1b1bf7ceeed324a740c"}, - {file = "pydantic_core-2.10.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:25bd966103890ccfa028841a8f30cebcf5875eeac8c4bde4fe221364c92f0c9a"}, - {file = "pydantic_core-2.10.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:f323306d0556351735b54acbf82904fe30a27b6a7147153cbe6e19aaaa2aa429"}, - {file = "pydantic_core-2.10.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:0c27f38dc4fbf07b358b2bc90edf35e82d1703e22ff2efa4af4ad5de1b3833e7"}, - {file = "pydantic_core-2.10.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:f1365e032a477c1430cfe0cf2856679529a2331426f8081172c4a74186f1d595"}, - {file = "pydantic_core-2.10.1-cp39-none-win32.whl", hash = "sha256:a1c311fd06ab3b10805abb72109f01a134019739bd3286b8ae1bc2fc4e50c07a"}, - {file = "pydantic_core-2.10.1-cp39-none-win_amd64.whl", hash = "sha256:ae8a8843b11dc0b03b57b52793e391f0122e740de3df1474814c700d2622950a"}, - {file = "pydantic_core-2.10.1-pp310-pypy310_pp73-macosx_10_7_x86_64.whl", hash = "sha256:d43002441932f9a9ea5d6f9efaa2e21458221a3a4b417a14027a1d530201ef1b"}, - {file = "pydantic_core-2.10.1-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:fcb83175cc4936a5425dde3356f079ae03c0802bbdf8ff82c035f8a54b333521"}, - {file = "pydantic_core-2.10.1-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:962ed72424bf1f72334e2f1e61b68f16c0e596f024ca7ac5daf229f7c26e4208"}, - {file = "pydantic_core-2.10.1-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2cf5bb4dd67f20f3bbc1209ef572a259027c49e5ff694fa56bed62959b41e1f9"}, - {file = "pydantic_core-2.10.1-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:e544246b859f17373bed915182ab841b80849ed9cf23f1f07b73b7c58baee5fb"}, - {file = "pydantic_core-2.10.1-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:c0877239307b7e69d025b73774e88e86ce82f6ba6adf98f41069d5b0b78bd1bf"}, - {file = "pydantic_core-2.10.1-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:53df009d1e1ba40f696f8995683e067e3967101d4bb4ea6f667931b7d4a01357"}, - {file = "pydantic_core-2.10.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:a1254357f7e4c82e77c348dabf2d55f1d14d19d91ff025004775e70a6ef40ada"}, - {file = "pydantic_core-2.10.1-pp37-pypy37_pp73-macosx_10_7_x86_64.whl", hash = "sha256:524ff0ca3baea164d6d93a32c58ac79eca9f6cf713586fdc0adb66a8cdeab96a"}, - {file = "pydantic_core-2.10.1-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3f0ac9fb8608dbc6eaf17956bf623c9119b4db7dbb511650910a82e261e6600f"}, - {file = "pydantic_core-2.10.1-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:320f14bd4542a04ab23747ff2c8a778bde727158b606e2661349557f0770711e"}, - {file = "pydantic_core-2.10.1-pp37-pypy37_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:63974d168b6233b4ed6a0046296803cb13c56637a7b8106564ab575926572a55"}, - {file = "pydantic_core-2.10.1-pp37-pypy37_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:417243bf599ba1f1fef2bb8c543ceb918676954734e2dcb82bf162ae9d7bd514"}, - {file = "pydantic_core-2.10.1-pp37-pypy37_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:dda81e5ec82485155a19d9624cfcca9be88a405e2857354e5b089c2a982144b2"}, - {file = "pydantic_core-2.10.1-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:14cfbb00959259e15d684505263d5a21732b31248a5dd4941f73a3be233865b9"}, - {file = "pydantic_core-2.10.1-pp38-pypy38_pp73-macosx_10_7_x86_64.whl", hash = "sha256:631cb7415225954fdcc2a024119101946793e5923f6c4d73a5914d27eb3d3a05"}, - {file = "pydantic_core-2.10.1-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:bec7dd208a4182e99c5b6c501ce0b1f49de2802448d4056091f8e630b28e9a52"}, - {file = "pydantic_core-2.10.1-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:149b8a07712f45b332faee1a2258d8ef1fb4a36f88c0c17cb687f205c5dc6e7d"}, - {file = "pydantic_core-2.10.1-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4d966c47f9dd73c2d32a809d2be529112d509321c5310ebf54076812e6ecd884"}, - {file = "pydantic_core-2.10.1-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:7eb037106f5c6b3b0b864ad226b0b7ab58157124161d48e4b30c4a43fef8bc4b"}, - {file = "pydantic_core-2.10.1-pp38-pypy38_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:154ea7c52e32dce13065dbb20a4a6f0cc012b4f667ac90d648d36b12007fa9f7"}, - {file = "pydantic_core-2.10.1-pp38-pypy38_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:e562617a45b5a9da5be4abe72b971d4f00bf8555eb29bb91ec2ef2be348cd132"}, - {file = "pydantic_core-2.10.1-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:f23b55eb5464468f9e0e9a9935ce3ed2a870608d5f534025cd5536bca25b1402"}, - {file = "pydantic_core-2.10.1-pp39-pypy39_pp73-macosx_10_7_x86_64.whl", hash = "sha256:e9121b4009339b0f751955baf4543a0bfd6bc3f8188f8056b1a25a2d45099934"}, - {file = "pydantic_core-2.10.1-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:0523aeb76e03f753b58be33b26540880bac5aa54422e4462404c432230543f33"}, - {file = "pydantic_core-2.10.1-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2e0e2959ef5d5b8dc9ef21e1a305a21a36e254e6a34432d00c72a92fdc5ecda5"}, - {file = "pydantic_core-2.10.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:da01bec0a26befab4898ed83b362993c844b9a607a86add78604186297eb047e"}, - {file = "pydantic_core-2.10.1-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:f2e9072d71c1f6cfc79a36d4484c82823c560e6f5599c43c1ca6b5cdbd54f881"}, - {file = "pydantic_core-2.10.1-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:f36a3489d9e28fe4b67be9992a23029c3cec0babc3bd9afb39f49844a8c721c5"}, - {file = "pydantic_core-2.10.1-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:f64f82cc3443149292b32387086d02a6c7fb39b8781563e0ca7b8d7d9cf72bd7"}, - {file = "pydantic_core-2.10.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:b4a6db486ac8e99ae696e09efc8b2b9fea67b63c8f88ba7a1a16c24a057a0776"}, - {file = "pydantic_core-2.10.1.tar.gz", hash = "sha256:0f8682dbdd2f67f8e1edddcbffcc29f60a6182b4901c367fc8c1c40d30bb0a82"}, + {file = "pydantic_core-2.14.5-cp310-cp310-macosx_10_7_x86_64.whl", hash = "sha256:7e88f5696153dc516ba6e79f82cc4747e87027205f0e02390c21f7cb3bd8abfd"}, + {file = "pydantic_core-2.14.5-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:4641e8ad4efb697f38a9b64ca0523b557c7931c5f84e0fd377a9a3b05121f0de"}, + {file = "pydantic_core-2.14.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:774de879d212db5ce02dfbf5b0da9a0ea386aeba12b0b95674a4ce0593df3d07"}, + {file = "pydantic_core-2.14.5-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ebb4e035e28f49b6f1a7032920bb9a0c064aedbbabe52c543343d39341a5b2a3"}, + {file = "pydantic_core-2.14.5-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b53e9ad053cd064f7e473a5f29b37fc4cc9dc6d35f341e6afc0155ea257fc911"}, + {file = "pydantic_core-2.14.5-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8aa1768c151cf562a9992462239dfc356b3d1037cc5a3ac829bb7f3bda7cc1f9"}, + {file = "pydantic_core-2.14.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eac5c82fc632c599f4639a5886f96867ffced74458c7db61bc9a66ccb8ee3113"}, + {file = "pydantic_core-2.14.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d2ae91f50ccc5810b2f1b6b858257c9ad2e08da70bf890dee02de1775a387c66"}, + {file = "pydantic_core-2.14.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:6b9ff467ffbab9110e80e8c8de3bcfce8e8b0fd5661ac44a09ae5901668ba997"}, + {file = "pydantic_core-2.14.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:61ea96a78378e3bd5a0be99b0e5ed00057b71f66115f5404d0dae4819f495093"}, + {file = "pydantic_core-2.14.5-cp310-none-win32.whl", hash = "sha256:bb4c2eda937a5e74c38a41b33d8c77220380a388d689bcdb9b187cf6224c9720"}, + {file = "pydantic_core-2.14.5-cp310-none-win_amd64.whl", hash = "sha256:b7851992faf25eac90bfcb7bfd19e1f5ffa00afd57daec8a0042e63c74a4551b"}, + {file = "pydantic_core-2.14.5-cp311-cp311-macosx_10_7_x86_64.whl", hash = "sha256:4e40f2bd0d57dac3feb3a3aed50f17d83436c9e6b09b16af271b6230a2915459"}, + {file = "pydantic_core-2.14.5-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ab1cdb0f14dc161ebc268c09db04d2c9e6f70027f3b42446fa11c153521c0e88"}, + {file = "pydantic_core-2.14.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:aae7ea3a1c5bb40c93cad361b3e869b180ac174656120c42b9fadebf685d121b"}, + {file = "pydantic_core-2.14.5-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:60b7607753ba62cf0739177913b858140f11b8af72f22860c28eabb2f0a61937"}, + {file = "pydantic_core-2.14.5-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2248485b0322c75aee7565d95ad0e16f1c67403a470d02f94da7344184be770f"}, + {file = "pydantic_core-2.14.5-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:823fcc638f67035137a5cd3f1584a4542d35a951c3cc68c6ead1df7dac825c26"}, + {file = "pydantic_core-2.14.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:96581cfefa9123accc465a5fd0cc833ac4d75d55cc30b633b402e00e7ced00a6"}, + {file = "pydantic_core-2.14.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:a33324437018bf6ba1bb0f921788788641439e0ed654b233285b9c69704c27b4"}, + {file = "pydantic_core-2.14.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:9bd18fee0923ca10f9a3ff67d4851c9d3e22b7bc63d1eddc12f439f436f2aada"}, + {file = "pydantic_core-2.14.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:853a2295c00f1d4429db4c0fb9475958543ee80cfd310814b5c0ef502de24dda"}, + {file = "pydantic_core-2.14.5-cp311-none-win32.whl", hash = "sha256:cb774298da62aea5c80a89bd58c40205ab4c2abf4834453b5de207d59d2e1651"}, + {file = "pydantic_core-2.14.5-cp311-none-win_amd64.whl", hash = "sha256:e87fc540c6cac7f29ede02e0f989d4233f88ad439c5cdee56f693cc9c1c78077"}, + {file = "pydantic_core-2.14.5-cp311-none-win_arm64.whl", hash = "sha256:57d52fa717ff445cb0a5ab5237db502e6be50809b43a596fb569630c665abddf"}, + {file = "pydantic_core-2.14.5-cp312-cp312-macosx_10_7_x86_64.whl", hash = "sha256:e60f112ac88db9261ad3a52032ea46388378034f3279c643499edb982536a093"}, + {file = "pydantic_core-2.14.5-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:6e227c40c02fd873c2a73a98c1280c10315cbebe26734c196ef4514776120aeb"}, + {file = "pydantic_core-2.14.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f0cbc7fff06a90bbd875cc201f94ef0ee3929dfbd5c55a06674b60857b8b85ed"}, + {file = "pydantic_core-2.14.5-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:103ef8d5b58596a731b690112819501ba1db7a36f4ee99f7892c40da02c3e189"}, + {file = "pydantic_core-2.14.5-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c949f04ecad823f81b1ba94e7d189d9dfb81edbb94ed3f8acfce41e682e48cef"}, + {file = "pydantic_core-2.14.5-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c1452a1acdf914d194159439eb21e56b89aa903f2e1c65c60b9d874f9b950e5d"}, + {file = "pydantic_core-2.14.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cb4679d4c2b089e5ef89756bc73e1926745e995d76e11925e3e96a76d5fa51fc"}, + {file = "pydantic_core-2.14.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:cf9d3fe53b1ee360e2421be95e62ca9b3296bf3f2fb2d3b83ca49ad3f925835e"}, + {file = "pydantic_core-2.14.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:70f4b4851dbb500129681d04cc955be2a90b2248d69273a787dda120d5cf1f69"}, + {file = "pydantic_core-2.14.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:59986de5710ad9613ff61dd9b02bdd2f615f1a7052304b79cc8fa2eb4e336d2d"}, + {file = "pydantic_core-2.14.5-cp312-none-win32.whl", hash = "sha256:699156034181e2ce106c89ddb4b6504c30db8caa86e0c30de47b3e0654543260"}, + {file = "pydantic_core-2.14.5-cp312-none-win_amd64.whl", hash = "sha256:5baab5455c7a538ac7e8bf1feec4278a66436197592a9bed538160a2e7d11e36"}, + {file = "pydantic_core-2.14.5-cp312-none-win_arm64.whl", hash = "sha256:e47e9a08bcc04d20975b6434cc50bf82665fbc751bcce739d04a3120428f3e27"}, + {file = "pydantic_core-2.14.5-cp38-cp38-macosx_10_7_x86_64.whl", hash = "sha256:ef98ca7d5995a82f43ec0ab39c4caf6a9b994cb0b53648ff61716370eadc43cf"}, + {file = "pydantic_core-2.14.5-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:c6eae413494a1c3f89055da7a5515f32e05ebc1a234c27674a6956755fb2236f"}, + {file = "pydantic_core-2.14.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dcf4e6d85614f7a4956c2de5a56531f44efb973d2fe4a444d7251df5d5c4dcfd"}, + {file = "pydantic_core-2.14.5-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:6637560562134b0e17de333d18e69e312e0458ee4455bdad12c37100b7cad706"}, + {file = "pydantic_core-2.14.5-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:77fa384d8e118b3077cccfcaf91bf83c31fe4dc850b5e6ee3dc14dc3d61bdba1"}, + {file = "pydantic_core-2.14.5-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:16e29bad40bcf97aac682a58861249ca9dcc57c3f6be22f506501833ddb8939c"}, + {file = "pydantic_core-2.14.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:531f4b4252fac6ca476fbe0e6f60f16f5b65d3e6b583bc4d87645e4e5ddde331"}, + {file = "pydantic_core-2.14.5-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:074f3d86f081ce61414d2dc44901f4f83617329c6f3ab49d2bc6c96948b2c26b"}, + {file = "pydantic_core-2.14.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:c2adbe22ab4babbca99c75c5d07aaf74f43c3195384ec07ccbd2f9e3bddaecec"}, + {file = "pydantic_core-2.14.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:0f6116a558fd06d1b7c2902d1c4cf64a5bd49d67c3540e61eccca93f41418124"}, + {file = "pydantic_core-2.14.5-cp38-none-win32.whl", hash = "sha256:fe0a5a1025eb797752136ac8b4fa21aa891e3d74fd340f864ff982d649691867"}, + {file = "pydantic_core-2.14.5-cp38-none-win_amd64.whl", hash = "sha256:079206491c435b60778cf2b0ee5fd645e61ffd6e70c47806c9ed51fc75af078d"}, + {file = "pydantic_core-2.14.5-cp39-cp39-macosx_10_7_x86_64.whl", hash = "sha256:a6a16f4a527aae4f49c875da3cdc9508ac7eef26e7977952608610104244e1b7"}, + {file = "pydantic_core-2.14.5-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:abf058be9517dc877227ec3223f0300034bd0e9f53aebd63cf4456c8cb1e0863"}, + {file = "pydantic_core-2.14.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:49b08aae5013640a3bfa25a8eebbd95638ec3f4b2eaf6ed82cf0c7047133f03b"}, + {file = "pydantic_core-2.14.5-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:c2d97e906b4ff36eb464d52a3bc7d720bd6261f64bc4bcdbcd2c557c02081ed2"}, + {file = "pydantic_core-2.14.5-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3128e0bbc8c091ec4375a1828d6118bc20404883169ac95ffa8d983b293611e6"}, + {file = "pydantic_core-2.14.5-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:88e74ab0cdd84ad0614e2750f903bb0d610cc8af2cc17f72c28163acfcf372a4"}, + {file = "pydantic_core-2.14.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c339dabd8ee15f8259ee0f202679b6324926e5bc9e9a40bf981ce77c038553db"}, + {file = "pydantic_core-2.14.5-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:3387277f1bf659caf1724e1afe8ee7dbc9952a82d90f858ebb931880216ea955"}, + {file = "pydantic_core-2.14.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:ba6b6b3846cfc10fdb4c971980a954e49d447cd215ed5a77ec8190bc93dd7bc5"}, + {file = "pydantic_core-2.14.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ca61d858e4107ce5e1330a74724fe757fc7135190eb5ce5c9d0191729f033209"}, + {file = "pydantic_core-2.14.5-cp39-none-win32.whl", hash = "sha256:ec1e72d6412f7126eb7b2e3bfca42b15e6e389e1bc88ea0069d0cc1742f477c6"}, + {file = "pydantic_core-2.14.5-cp39-none-win_amd64.whl", hash = "sha256:c0b97ec434041827935044bbbe52b03d6018c2897349670ff8fe11ed24d1d4ab"}, + {file = "pydantic_core-2.14.5-pp310-pypy310_pp73-macosx_10_7_x86_64.whl", hash = "sha256:79e0a2cdbdc7af3f4aee3210b1172ab53d7ddb6a2d8c24119b5706e622b346d0"}, + {file = "pydantic_core-2.14.5-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:678265f7b14e138d9a541ddabbe033012a2953315739f8cfa6d754cc8063e8ca"}, + {file = "pydantic_core-2.14.5-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:95b15e855ae44f0c6341ceb74df61b606e11f1087e87dcb7482377374aac6abe"}, + {file = "pydantic_core-2.14.5-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:09b0e985fbaf13e6b06a56d21694d12ebca6ce5414b9211edf6f17738d82b0f8"}, + {file = "pydantic_core-2.14.5-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:3ad873900297bb36e4b6b3f7029d88ff9829ecdc15d5cf20161775ce12306f8a"}, + {file = "pydantic_core-2.14.5-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:2d0ae0d8670164e10accbeb31d5ad45adb71292032d0fdb9079912907f0085f4"}, + {file = "pydantic_core-2.14.5-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:d37f8ec982ead9ba0a22a996129594938138a1503237b87318392a48882d50b7"}, + {file = "pydantic_core-2.14.5-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:35613015f0ba7e14c29ac6c2483a657ec740e5ac5758d993fdd5870b07a61d8b"}, + {file = "pydantic_core-2.14.5-pp37-pypy37_pp73-macosx_10_7_x86_64.whl", hash = "sha256:ab4ea451082e684198636565224bbb179575efc1658c48281b2c866bfd4ddf04"}, + {file = "pydantic_core-2.14.5-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4ce601907e99ea5b4adb807ded3570ea62186b17f88e271569144e8cca4409c7"}, + {file = "pydantic_core-2.14.5-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fb2ed8b3fe4bf4506d6dab3b93b83bbc22237e230cba03866d561c3577517d18"}, + {file = "pydantic_core-2.14.5-pp37-pypy37_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:70f947628e074bb2526ba1b151cee10e4c3b9670af4dbb4d73bc8a89445916b5"}, + {file = "pydantic_core-2.14.5-pp37-pypy37_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:4bc536201426451f06f044dfbf341c09f540b4ebdb9fd8d2c6164d733de5e634"}, + {file = "pydantic_core-2.14.5-pp37-pypy37_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:f4791cf0f8c3104ac668797d8c514afb3431bc3305f5638add0ba1a5a37e0d88"}, + {file = "pydantic_core-2.14.5-pp38-pypy38_pp73-macosx_10_7_x86_64.whl", hash = "sha256:038c9f763e650712b899f983076ce783175397c848da04985658e7628cbe873b"}, + {file = "pydantic_core-2.14.5-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:27548e16c79702f1e03f5628589c6057c9ae17c95b4c449de3c66b589ead0520"}, + {file = "pydantic_core-2.14.5-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c97bee68898f3f4344eb02fec316db93d9700fb1e6a5b760ffa20d71d9a46ce3"}, + {file = "pydantic_core-2.14.5-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b9b759b77f5337b4ea024f03abc6464c9f35d9718de01cfe6bae9f2e139c397e"}, + {file = "pydantic_core-2.14.5-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:439c9afe34638ace43a49bf72d201e0ffc1a800295bed8420c2a9ca8d5e3dbb3"}, + {file = "pydantic_core-2.14.5-pp38-pypy38_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:ba39688799094c75ea8a16a6b544eb57b5b0f3328697084f3f2790892510d144"}, + {file = "pydantic_core-2.14.5-pp38-pypy38_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:ccd4d5702bb90b84df13bd491be8d900b92016c5a455b7e14630ad7449eb03f8"}, + {file = "pydantic_core-2.14.5-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:81982d78a45d1e5396819bbb4ece1fadfe5f079335dd28c4ab3427cd95389944"}, + {file = "pydantic_core-2.14.5-pp39-pypy39_pp73-macosx_10_7_x86_64.whl", hash = "sha256:7f8210297b04e53bc3da35db08b7302a6a1f4889c79173af69b72ec9754796b8"}, + {file = "pydantic_core-2.14.5-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:8c8a8812fe6f43a3a5b054af6ac2d7b8605c7bcab2804a8a7d68b53f3cd86e00"}, + {file = "pydantic_core-2.14.5-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:206ed23aecd67c71daf5c02c3cd19c0501b01ef3cbf7782db9e4e051426b3d0d"}, + {file = "pydantic_core-2.14.5-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c2027d05c8aebe61d898d4cffd774840a9cb82ed356ba47a90d99ad768f39789"}, + {file = "pydantic_core-2.14.5-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:40180930807ce806aa71eda5a5a5447abb6b6a3c0b4b3b1b1962651906484d68"}, + {file = "pydantic_core-2.14.5-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:615a0a4bff11c45eb3c1996ceed5bdaa2f7b432425253a7c2eed33bb86d80abc"}, + {file = "pydantic_core-2.14.5-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:f5e412d717366e0677ef767eac93566582518fe8be923361a5c204c1a62eaafe"}, + {file = "pydantic_core-2.14.5-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:513b07e99c0a267b1d954243845d8a833758a6726a3b5d8948306e3fe14675e3"}, + {file = "pydantic_core-2.14.5.tar.gz", hash = "sha256:6d30226dfc816dd0fdf120cae611dd2215117e4f9b124af8c60ab9093b6e8e71"}, ] [[package]] @@ -2198,17 +2197,17 @@ files = [ [[package]] name = "pydantic" -version = "2.4.2" +version = "2.5.2" extras = ["email"] requires_python = ">=3.7" summary = "Data validation using Python type hints" dependencies = [ "email-validator>=2.0.0", - "pydantic==2.4.2", + "pydantic==2.5.2", ] files = [ - {file = "pydantic-2.4.2-py3-none-any.whl", hash = "sha256:bc3ddf669d234f4220e6e1c4d96b061abe0998185a8d7855c0126782b7abc8c1"}, - {file = "pydantic-2.4.2.tar.gz", hash = "sha256:94f336138093a5d7f426aac732dcfe7ab4eb4da243c88f891d65deb4a2556ee7"}, + {file = "pydantic-2.5.2-py3-none-any.whl", hash = "sha256:80c50fb8e3dcecfddae1adbcc00ec5822918490c99ab31f6cf6140ca1c1429f0"}, + {file = "pydantic-2.5.2.tar.gz", hash = "sha256:ff177ba64c6faf73d7afa2e8cad38fd456c0dbe01c9954e71038001cd15a6edd"}, ] [[package]] @@ -2319,15 +2318,15 @@ files = [ [[package]] name = "pyright" -version = "1.1.335" +version = "1.1.337" requires_python = ">=3.7" summary = "Command line wrapper for pyright" dependencies = [ "nodeenv>=1.6.0", ] files = [ - {file = "pyright-1.1.335-py3-none-any.whl", hash = "sha256:1149d99d5cea3997010a5ac39611534e0426125d5090913ae5cb1e0e2c9fbca3"}, - {file = "pyright-1.1.335.tar.gz", hash = "sha256:12c09c1644b223515cc342f7d383e55eefeedd730d7875e39a2cf338c2d99be4"}, + {file = "pyright-1.1.337-py3-none-any.whl", hash = "sha256:8cbd4ef71797258f816a8393a758c9c91213479f472082d0e3a735ef7ab5f65a"}, + {file = "pyright-1.1.337.tar.gz", hash = "sha256:81d81f839d1750385390c4c4a7b84b062ece2f9a078f87055d4d2a5914ef2a08"}, ] [[package]] @@ -2402,16 +2401,16 @@ files = [ [[package]] name = "pytest-rerunfailures" -version = "12.0" +version = "13.0" requires_python = ">=3.7" summary = "pytest plugin to re-run tests to eliminate flaky failures" dependencies = [ "packaging>=17.1", - "pytest>=6.2", + "pytest>=7", ] files = [ - {file = "pytest-rerunfailures-12.0.tar.gz", hash = "sha256:784f462fa87fe9bdf781d0027d856b47a4bfe6c12af108f6bd887057a917b48e"}, - {file = "pytest_rerunfailures-12.0-py3-none-any.whl", hash = "sha256:9a1afd04e21b8177faf08a9bbbf44de7a0fe3fc29f8ddbe83b9684bd5f8f92a9"}, + {file = "pytest-rerunfailures-13.0.tar.gz", hash = "sha256:e132dbe420bc476f544b96e7036edd0a69707574209b6677263c950d19b09199"}, + {file = "pytest_rerunfailures-13.0-py3-none-any.whl", hash = "sha256:34919cb3fcb1f8e5d4b940aa75ccdea9661bade925091873b7c6fa5548333069"}, ] [[package]] @@ -2429,7 +2428,7 @@ files = [ [[package]] name = "pytest-xdist" -version = "3.4.0" +version = "3.5.0" requires_python = ">=3.7" summary = "pytest xdist plugin for distributed testing, most importantly across multiple CPUs" dependencies = [ @@ -2437,8 +2436,8 @@ dependencies = [ "pytest>=6.2.0", ] files = [ - {file = "pytest-xdist-3.4.0.tar.gz", hash = "sha256:3a94a931dd9e268e0b871a877d09fe2efb6175c2c23d60d56a6001359002b832"}, - {file = "pytest_xdist-3.4.0-py3-none-any.whl", hash = "sha256:e513118bf787677a427e025606f55e95937565e06dfaac8d87f55301e57ae607"}, + {file = "pytest-xdist-3.5.0.tar.gz", hash = "sha256:cbb36f3d67e0c478baa57fa4edc8843887e0f6cfc42d677530a36d7472b32d8a"}, + {file = "pytest_xdist-3.5.0-py3-none-any.whl", hash = "sha256:d075629c7e00b611df89f490a5063944bee7a4362a5ff11c7cc7824a03dfce24"}, ] [[package]] @@ -2579,7 +2578,7 @@ files = [ [[package]] name = "rich" -version = "13.6.0" +version = "13.7.0" requires_python = ">=3.7.0" summary = "Render rich text, tables, progress bars, syntax highlighting, markdown and more to the terminal" dependencies = [ @@ -2588,8 +2587,8 @@ dependencies = [ "typing-extensions<5.0,>=4.0.0; python_version < \"3.9\"", ] files = [ - {file = "rich-13.6.0-py3-none-any.whl", hash = "sha256:2b38e2fe9ca72c9a00170a1a2d20c63c790d0e10ef1fe35eba76e1e7b1d7d245"}, - {file = "rich-13.6.0.tar.gz", hash = "sha256:5c14d22737e6d5084ef4771b62d5d4363165b403455a30a1c8ca39dc7b644bef"}, + {file = "rich-13.7.0-py3-none-any.whl", hash = "sha256:6da14c108c4866ee9520bbffa71f6fe3962e193b7da68720583850cd4548e235"}, + {file = "rich-13.7.0.tar.gz", hash = "sha256:5cb5123b5cf9ee70584244246816e9114227e0b98ad9176eede6ad54bf5403fa"}, ] [[package]] @@ -2684,27 +2683,27 @@ files = [ [[package]] name = "ruff" -version = "0.1.5" +version = "0.1.6" requires_python = ">=3.7" summary = "An extremely fast Python linter and code formatter, written in Rust." files = [ - {file = "ruff-0.1.5-py3-none-macosx_10_7_x86_64.whl", hash = "sha256:32d47fc69261c21a4c48916f16ca272bf2f273eb635d91c65d5cd548bf1f3d96"}, - {file = "ruff-0.1.5-py3-none-macosx_10_9_x86_64.macosx_11_0_arm64.macosx_10_9_universal2.whl", hash = "sha256:171276c1df6c07fa0597fb946139ced1c2978f4f0b8254f201281729981f3c17"}, - {file = "ruff-0.1.5-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:17ef33cd0bb7316ca65649fc748acc1406dfa4da96a3d0cde6d52f2e866c7b39"}, - {file = "ruff-0.1.5-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:b2c205827b3f8c13b4a432e9585750b93fd907986fe1aec62b2a02cf4401eee6"}, - {file = "ruff-0.1.5-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bb408e3a2ad8f6881d0f2e7ad70cddb3ed9f200eb3517a91a245bbe27101d379"}, - {file = "ruff-0.1.5-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:f20dc5e5905ddb407060ca27267c7174f532375c08076d1a953cf7bb016f5a24"}, - {file = "ruff-0.1.5-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:aafb9d2b671ed934998e881e2c0f5845a4295e84e719359c71c39a5363cccc91"}, - {file = "ruff-0.1.5-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a4894dddb476597a0ba4473d72a23151b8b3b0b5f958f2cf4d3f1c572cdb7af7"}, - {file = "ruff-0.1.5-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a00a7ec893f665ed60008c70fe9eeb58d210e6b4d83ec6654a9904871f982a2a"}, - {file = "ruff-0.1.5-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:a8c11206b47f283cbda399a654fd0178d7a389e631f19f51da15cbe631480c5b"}, - {file = "ruff-0.1.5-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:fa29e67b3284b9a79b1a85ee66e293a94ac6b7bb068b307a8a373c3d343aa8ec"}, - {file = "ruff-0.1.5-py3-none-musllinux_1_2_i686.whl", hash = "sha256:9b97fd6da44d6cceb188147b68db69a5741fbc736465b5cea3928fdac0bc1aeb"}, - {file = "ruff-0.1.5-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:721f4b9d3b4161df8dc9f09aa8562e39d14e55a4dbaa451a8e55bdc9590e20f4"}, - {file = "ruff-0.1.5-py3-none-win32.whl", hash = "sha256:f80c73bba6bc69e4fdc73b3991db0b546ce641bdcd5b07210b8ad6f64c79f1ab"}, - {file = "ruff-0.1.5-py3-none-win_amd64.whl", hash = "sha256:c21fe20ee7d76206d290a76271c1af7a5096bc4c73ab9383ed2ad35f852a0087"}, - {file = "ruff-0.1.5-py3-none-win_arm64.whl", hash = "sha256:82bfcb9927e88c1ed50f49ac6c9728dab3ea451212693fe40d08d314663e412f"}, - {file = "ruff-0.1.5.tar.gz", hash = "sha256:5cbec0ef2ae1748fb194f420fb03fb2c25c3258c86129af7172ff8f198f125ab"}, + {file = "ruff-0.1.6-py3-none-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:88b8cdf6abf98130991cbc9f6438f35f6e8d41a02622cc5ee130a02a0ed28703"}, + {file = "ruff-0.1.6-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:5c549ed437680b6105a1299d2cd30e4964211606eeb48a0ff7a93ef70b902248"}, + {file = "ruff-0.1.6-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1cf5f701062e294f2167e66d11b092bba7af6a057668ed618a9253e1e90cfd76"}, + {file = "ruff-0.1.6-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:05991ee20d4ac4bb78385360c684e4b417edd971030ab12a4fbd075ff535050e"}, + {file = "ruff-0.1.6-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:87455a0c1f739b3c069e2f4c43b66479a54dea0276dd5d4d67b091265f6fd1dc"}, + {file = "ruff-0.1.6-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:683aa5bdda5a48cb8266fcde8eea2a6af4e5700a392c56ea5fb5f0d4bfdc0240"}, + {file = "ruff-0.1.6-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:137852105586dcbf80c1717facb6781555c4e99f520c9c827bd414fac67ddfb6"}, + {file = "ruff-0.1.6-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:bd98138a98d48a1c36c394fd6b84cd943ac92a08278aa8ac8c0fdefcf7138f35"}, + {file = "ruff-0.1.6-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3a0cd909d25f227ac5c36d4e7e681577275fb74ba3b11d288aff7ec47e3ae745"}, + {file = "ruff-0.1.6-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:e8fd1c62a47aa88a02707b5dd20c5ff20d035d634aa74826b42a1da77861b5ff"}, + {file = "ruff-0.1.6-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:fd89b45d374935829134a082617954120d7a1470a9f0ec0e7f3ead983edc48cc"}, + {file = "ruff-0.1.6-py3-none-musllinux_1_2_i686.whl", hash = "sha256:491262006e92f825b145cd1e52948073c56560243b55fb3b4ecb142f6f0e9543"}, + {file = "ruff-0.1.6-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:ea284789861b8b5ca9d5443591a92a397ac183d4351882ab52f6296b4fdd5462"}, + {file = "ruff-0.1.6-py3-none-win32.whl", hash = "sha256:1610e14750826dfc207ccbcdd7331b6bd285607d4181df9c1c6ae26646d6848a"}, + {file = "ruff-0.1.6-py3-none-win_amd64.whl", hash = "sha256:4558b3e178145491e9bc3b2ee3c4b42f19d19384eaa5c59d10acf6e8f8b57e33"}, + {file = "ruff-0.1.6-py3-none-win_arm64.whl", hash = "sha256:03910e81df0d8db0e30050725a5802441c2022ea3ae4fe0609b76081731accbc"}, + {file = "ruff-0.1.6.tar.gz", hash = "sha256:1b09f29b16c6ead5ea6b097ef2764b42372aebe363722f1605ecbcd2b9207184"}, ] [[package]] @@ -2849,7 +2848,7 @@ files = [ [[package]] name = "sphinx-click" -version = "5.0.1" +version = "5.1.0" requires_python = ">=3.8" summary = "Sphinx extension that automatically documents click applications" dependencies = [ @@ -2858,8 +2857,8 @@ dependencies = [ "sphinx>=2.0", ] files = [ - {file = "sphinx-click-5.0.1.tar.gz", hash = "sha256:fcc7df15e56e3ff17ebf446cdd316c2eb79580b37c49579fba11e5468802ef25"}, - {file = "sphinx_click-5.0.1-py3-none-any.whl", hash = "sha256:31836ca22f746d3c26cbfdfe0c58edf0bca5783731a0b2e25bb6d59800bb75a1"}, + {file = "sphinx-click-5.1.0.tar.gz", hash = "sha256:6812c2db62d3fae71a4addbe5a8a0a16c97eb491f3cd63fe34b4ed7e07236f33"}, + {file = "sphinx_click-5.1.0-py3-none-any.whl", hash = "sha256:ae97557a4e9ec646045089326c3b90e026c58a45e083b8f35f17d5d6558d08a0"}, ] [[package]] @@ -3332,7 +3331,7 @@ files = [ [[package]] name = "types-redis" -version = "4.6.0.10" +version = "4.6.0.11" requires_python = ">=3.7" summary = "Typing stubs for redis" dependencies = [ @@ -3340,8 +3339,8 @@ dependencies = [ "types-pyOpenSSL", ] files = [ - {file = "types-redis-4.6.0.10.tar.gz", hash = "sha256:aa7fb5f743534500f274ddf11ab1c910aae1020481865a36b799e1d67de2aaf3"}, - {file = "types_redis-4.6.0.10-py3-none-any.whl", hash = "sha256:00f003da884ec3d1d54633186b4cbd587b39782595c5603330cc46a51f9bcf6e"}, + {file = "types-redis-4.6.0.11.tar.gz", hash = "sha256:c8cfc84635183deca2db4a528966c5566445fd3713983f0034fb0f5a09e0890d"}, + {file = "types_redis-4.6.0.11-py3-none-any.whl", hash = "sha256:94fc61118601fb4f79206b33b9f4344acff7ca1d7bba67834987fb0efcf6a770"}, ] [[package]] diff --git a/pyproject.toml b/pyproject.toml index c99654c43b..2efff42f4c 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -262,6 +262,7 @@ warn_untyped_fields = true [tool.pyright] disableBytesTypePromotions = true +reportUnnecessaryTypeIgnoreComments = true exclude = [ "test_apps", "tools", From 176af9c86d7caf926151e5d19f13359a79f475e1 Mon Sep 17 00:00:00 2001 From: Jacob Coffee Date: Tue, 28 Nov 2023 19:19:45 -0600 Subject: [PATCH 33/45] fix(docs): adjust wording for example app (#2795) * fix(docs): adjust wording for example app * fix(docs): correct link --- docs/index.rst | 10 ++++++---- docs/usage/middleware/creating-middleware.rst | 5 ++--- 2 files changed, 8 insertions(+), 7 deletions(-) diff --git a/docs/index.rst b/docs/index.rst index 8b8ebc8997..37969d8602 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -128,6 +128,7 @@ and * ``http://localhost:8000/schema`` (for `ReDoc `_), * ``http://localhost:8000/schema/swagger`` (for `Swagger UI `_), * ``http://localhost:8000/schema/elements`` (for `Stoplight Elements `_) + * ``http://localhost:8000/schema/rapidoc`` (for `RapiDoc `_) You can check out a more in-depth tutorial in the :doc:`/tutorials/todo-app/index` section! @@ -287,12 +288,13 @@ Example Applications -------------------- -* `litestar-pg-redis-docker `_ : In addition to Litestar, this +* `litestar-pg-redis-docker `_ : In addition to Litestar, this demonstrates a pattern of application modularity, SQLAlchemy 2.0 ORM, Redis cache connectivity, and more. Like all Litestar projects, this application is open to contributions, big and small. -* `litestar-fullstack `_ : A reference application that features a - Litestar app configured with best practices, SQLAlchemy 2.0 and SAQ, a frontend integrated with Vitejs and Jinja2 - templates. Docker, and more. +* `litestar-fullstack `_ : A fully-capable, production-ready fullstack + Litestar web application configured with best practices. It includes SQLAlchemy 2.0, VueJS, `Vite `_, + :doc:`SAQ job queue `_, ``Jinja`` templates and more. + `Read more `_. * `litestar-hello-world `_: A bare-minimum application setup. Great for testing and POC work. diff --git a/docs/usage/middleware/creating-middleware.rst b/docs/usage/middleware/creating-middleware.rst index 602f6eb0eb..9b30f221f5 100644 --- a/docs/usage/middleware/creating-middleware.rst +++ b/docs/usage/middleware/creating-middleware.rst @@ -115,9 +115,8 @@ explore another example - redirecting the request to a different url from a midd else: await self.app(scope, receive, send) -As you can see in the above, given some condition (request.session being None) we create a -:class:`ASGIRedirectResponse ` and then await it. Otherwise, we await ``self.app`` - +As you can see in the above, given some condition (``request.session`` being None) we create a +:class:`ASGIRedirectResponse ` and then await it. Otherwise, we await ``self.app`` Modifying ASGI Requests and Responses using the MiddlewareProtocol ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ From 9f6ddf43928c6deb033f18fcec5aed21772192ff Mon Sep 17 00:00:00 2001 From: Elliot Hall <38925749+elliot-hall@users.noreply.github.com> Date: Tue, 28 Nov 2023 20:26:45 -0500 Subject: [PATCH 34/45] fix(docs): Updating SQLAlchemy tutorial line emphasis (#2796) --- docs/tutorials/sqlalchemy/0-introduction.rst | 2 +- docs/tutorials/sqlalchemy/1-provide-session-with-di.rst | 2 +- docs/tutorials/sqlalchemy/2-serialization-plugin.rst | 2 +- docs/tutorials/sqlalchemy/3-init-plugin.rst | 2 +- docs/tutorials/sqlalchemy/4-final-touches-and-recap.rst | 2 +- 5 files changed, 5 insertions(+), 5 deletions(-) diff --git a/docs/tutorials/sqlalchemy/0-introduction.rst b/docs/tutorials/sqlalchemy/0-introduction.rst index fb49c45425..d7ba5aea2b 100644 --- a/docs/tutorials/sqlalchemy/0-introduction.rst +++ b/docs/tutorials/sqlalchemy/0-introduction.rst @@ -104,7 +104,7 @@ serializable by Litestar. :language: python :linenos: :lines: 2-3,14-16,45-48,94-101 - :emphasize-lines: 3,4,6,7,11,16 + :emphasize-lines: 3,4,6,11 Behavior ++++++++ diff --git a/docs/tutorials/sqlalchemy/1-provide-session-with-di.rst b/docs/tutorials/sqlalchemy/1-provide-session-with-di.rst index 0408bed26d..b17b417be4 100644 --- a/docs/tutorials/sqlalchemy/1-provide-session-with-di.rst +++ b/docs/tutorials/sqlalchemy/1-provide-session-with-di.rst @@ -11,7 +11,7 @@ handlers. .. literalinclude:: /examples/contrib/sqlalchemy/plugins/tutorial/full_app_with_session_di.py :language: python :linenos: - :emphasize-lines: 49-58,84-86,89-91,96-97,105 + :emphasize-lines: 49-58,84-85,89-91,96-97,105 In the previous example, the database session is created within each HTTP route handler function. In this script we use dependency injection to decouple creation of the session from the route handlers. diff --git a/docs/tutorials/sqlalchemy/2-serialization-plugin.rst b/docs/tutorials/sqlalchemy/2-serialization-plugin.rst index 6096b9dbf8..0eea369259 100644 --- a/docs/tutorials/sqlalchemy/2-serialization-plugin.rst +++ b/docs/tutorials/sqlalchemy/2-serialization-plugin.rst @@ -10,7 +10,7 @@ Here's the code: .. literalinclude:: /examples/contrib/sqlalchemy/plugins/tutorial/full_app_with_serialization_plugin.py :language: python :linenos: - :emphasize-lines: 10,77-78,82,84,88,92,99 + :emphasize-lines: 11,78-79,83-84,89,91-92,100 We've simply imported the plugin and added it to our app's plugins list, and now we can receive and return our SQLAlchemy data models directly to and from our handler. diff --git a/docs/tutorials/sqlalchemy/3-init-plugin.rst b/docs/tutorials/sqlalchemy/3-init-plugin.rst index a178ece1a8..0990c1cf96 100644 --- a/docs/tutorials/sqlalchemy/3-init-plugin.rst +++ b/docs/tutorials/sqlalchemy/3-init-plugin.rst @@ -19,7 +19,7 @@ Here's the updated code: .. literalinclude:: /examples/contrib/sqlalchemy/plugins/tutorial/full_app_with_init_plugin.py :language: python :linenos: - :emphasize-lines: 10-11,29,77-79,86 + :emphasize-lines: 12,30,78-79,87 The most notable difference is that we no longer need the ``db_connection()`` lifespan context manager - the plugin handles this for us. It also handles the creation of the tables in our database if we supply our metadata and diff --git a/docs/tutorials/sqlalchemy/4-final-touches-and-recap.rst b/docs/tutorials/sqlalchemy/4-final-touches-and-recap.rst index 8451634565..382ee79dc1 100644 --- a/docs/tutorials/sqlalchemy/4-final-touches-and-recap.rst +++ b/docs/tutorials/sqlalchemy/4-final-touches-and-recap.rst @@ -13,7 +13,7 @@ Here is our final application: .. literalinclude:: /examples/contrib/sqlalchemy/plugins/tutorial/full_app_with_plugin.py :language: python :linenos: - :emphasize-lines: 9,80 + :emphasize-lines: 11,79 Recap ===== From 77b62a2e9a4776f9532b1e03f694790ac42da400 Mon Sep 17 00:00:00 2001 From: Peter Schutt Date: Wed, 29 Nov 2023 16:38:45 +1000 Subject: [PATCH 35/45] fix: setting props on schema using camelCase names (#2800) fix: setting schema props using camelCase names We have been setting properties on the schema object using camelCase names when they are declared with snake_case names. --- litestar/_openapi/schema_generation/schema.py | 27 ++++++++++++------- tests/unit/test_openapi/test_parameters.py | 4 +-- 2 files changed, 19 insertions(+), 12 deletions(-) diff --git a/litestar/_openapi/schema_generation/schema.py b/litestar/_openapi/schema_generation/schema.py index 4679e59b30..9812306dd4 100644 --- a/litestar/_openapi/schema_generation/schema.py +++ b/litestar/_openapi/schema_generation/schema.py @@ -77,22 +77,22 @@ from litestar.plugins import OpenAPISchemaPluginProtocol KWARG_DEFINITION_ATTRIBUTE_TO_OPENAPI_PROPERTY_MAP: dict[str, str] = { - "content_encoding": "contentEncoding", + "content_encoding": "content_encoding", "default": "default", "description": "description", "enum": "enum", "examples": "examples", - "external_docs": "externalDocs", + "external_docs": "external_docs", "format": "format", "ge": "minimum", - "gt": "exclusiveMinimum", + "gt": "exclusive_minimum", "le": "maximum", - "lt": "exclusiveMaximum", - "max_items": "maxItems", - "max_length": "maxLength", - "min_items": "minItems", - "min_length": "minLength", - "multiple_of": "multipleOf", + "lt": "exclusive_maximum", + "max_items": "max_items", + "max_length": "max_length", + "min_items": "min_items", + "min_length": "min_length", + "multiple_of": "multiple_of", "pattern": "pattern", "title": "title", } @@ -644,7 +644,14 @@ def process_schema_result(self, field: FieldDefinition, schema: Schema) -> Schem if schema_key == "examples": value = get_formatted_examples(field, cast("list[Example]", value)) - setattr(schema, schema_key, value) + # we only want to transfer values from the `KwargDefinition` to `Schema` if the schema object + # doesn't already have a value for that property. For example, if a field is a constrained date, + # by this point, we have already set the `exclusive_minimum` and/or `exclusive_maximum` fields + # to floating point timestamp values on the schema object. However, the original `date` objects + # that define those constraints on `KwargDefinition` are still `date` objects. We don't want to + # overwrite them here. + if getattr(schema, schema_key, None) is None: + setattr(schema, schema_key, value) if not schema.examples and self.generate_examples: from litestar._openapi.schema_generation.examples import create_examples_for_field diff --git a/tests/unit/test_openapi/test_parameters.py b/tests/unit/test_openapi/test_parameters.py index 3305727450..3f64b03dac 100644 --- a/tests/unit/test_openapi/test_parameters.py +++ b/tests/unit/test_openapi/test_parameters.py @@ -290,13 +290,13 @@ def my_handler( assert router3.param_in == ParamType.HEADER assert router3.schema.type == OpenAPIType.NUMBER # type: ignore assert router3.required - assert router3.schema.multipleOf == 5.0 # type: ignore + assert router3.schema.multiple_of == 5.0 # type: ignore assert router3.schema.examples # type: ignore assert controller1.param_in == ParamType.QUERY assert controller1.schema.type == OpenAPIType.INTEGER # type: ignore assert controller1.required - assert controller1.schema.exclusiveMaximum == 100.0 # type: ignore + assert controller1.schema.exclusive_maximum == 100.0 # type: ignore assert controller1.schema.examples # type: ignore assert controller3.param_in == ParamType.QUERY From b3de259cfc755b0f7411f35fcd8a41692f7eec6e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Janek=20Nouvertn=C3=A9?= Date: Thu, 30 Nov 2023 21:05:02 +0100 Subject: [PATCH 36/45] docs: Fix `contrib.jwt` references in examples (#2794) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * Fix contrib.jwt references --------- Signed-off-by: Janek Nouvertné <25355197+provinzkraut@users.noreply.github.com> Co-authored-by: Jacob Coffee --- .../sqlalchemy/sqlalchemy_repository_extension.py | 5 +---- docs/examples/{contrib => security}/jwt/__init__.py | 0 .../{contrib => security}/jwt/using_jwt_auth.py | 2 +- .../jwt/using_jwt_cookie_auth.py | 2 +- .../jwt/using_oauth2_password_bearer.py | 2 +- docs/examples/security/using_session_auth.py | 5 +---- docs/reference/contrib/jwt.rst | 3 +-- docs/reference/security/index.rst | 1 + docs/reference/security/jwt.rst | 2 ++ docs/release-notes/changelog.rst | 6 +++--- docs/usage/security/jwt.rst | 12 ++++++------ sonar-project.properties | 1 + .../test_jwt => test_security}/__init__.py | 0 tests/examples/test_security/test_jwt/__init__.py | 0 .../test_jwt/test_using_jwt_auth.py | 2 +- .../test_jwt/test_using_jwt_cookie_auth.py | 2 +- .../test_jwt/test_using_oauth2_password_bearer.py | 2 +- 17 files changed, 22 insertions(+), 25 deletions(-) rename docs/examples/{contrib => security}/jwt/__init__.py (100%) rename docs/examples/{contrib => security}/jwt/using_jwt_auth.py (98%) rename docs/examples/{contrib => security}/jwt/using_jwt_cookie_auth.py (98%) rename docs/examples/{contrib => security}/jwt/using_oauth2_password_bearer.py (97%) create mode 100644 docs/reference/security/jwt.rst rename tests/examples/{test_contrib/test_jwt => test_security}/__init__.py (100%) create mode 100644 tests/examples/test_security/test_jwt/__init__.py rename tests/examples/{test_contrib => test_security}/test_jwt/test_using_jwt_auth.py (92%) rename tests/examples/{test_contrib => test_security}/test_jwt/test_using_jwt_cookie_auth.py (90%) rename tests/examples/{test_contrib => test_security}/test_jwt/test_using_oauth2_password_bearer.py (88%) diff --git a/docs/examples/contrib/sqlalchemy/sqlalchemy_repository_extension.py b/docs/examples/contrib/sqlalchemy/sqlalchemy_repository_extension.py index 9f1ba0bd72..f864bbcdd7 100644 --- a/docs/examples/contrib/sqlalchemy/sqlalchemy_repository_extension.py +++ b/docs/examples/contrib/sqlalchemy/sqlalchemy_repository_extension.py @@ -15,10 +15,7 @@ from litestar import Litestar, get, post from litestar.contrib.sqlalchemy.base import UUIDAuditBase from litestar.contrib.sqlalchemy.plugins import AsyncSessionConfig, SQLAlchemyAsyncConfig, SQLAlchemyInitPlugin -from litestar.contrib.sqlalchemy.repository import ( - ModelT, - SQLAlchemyAsyncRepository, -) +from litestar.contrib.sqlalchemy.repository import ModelT, SQLAlchemyAsyncRepository from litestar.di import Provide if TYPE_CHECKING: diff --git a/docs/examples/contrib/jwt/__init__.py b/docs/examples/security/jwt/__init__.py similarity index 100% rename from docs/examples/contrib/jwt/__init__.py rename to docs/examples/security/jwt/__init__.py diff --git a/docs/examples/contrib/jwt/using_jwt_auth.py b/docs/examples/security/jwt/using_jwt_auth.py similarity index 98% rename from docs/examples/contrib/jwt/using_jwt_auth.py rename to docs/examples/security/jwt/using_jwt_auth.py index bde62b2529..28420a5958 100644 --- a/docs/examples/contrib/jwt/using_jwt_auth.py +++ b/docs/examples/security/jwt/using_jwt_auth.py @@ -6,8 +6,8 @@ from litestar import Litestar, Request, Response, get, post from litestar.connection import ASGIConnection -from litestar.contrib.jwt import JWTAuth, Token from litestar.openapi.config import OpenAPIConfig +from litestar.security.jwt import JWTAuth, Token # Let's assume we have a User model that is a pydantic model. diff --git a/docs/examples/contrib/jwt/using_jwt_cookie_auth.py b/docs/examples/security/jwt/using_jwt_cookie_auth.py similarity index 98% rename from docs/examples/contrib/jwt/using_jwt_cookie_auth.py rename to docs/examples/security/jwt/using_jwt_cookie_auth.py index d9996a5952..909a38a733 100644 --- a/docs/examples/contrib/jwt/using_jwt_cookie_auth.py +++ b/docs/examples/security/jwt/using_jwt_cookie_auth.py @@ -6,8 +6,8 @@ from litestar import Litestar, Request, Response, get, post from litestar.connection import ASGIConnection -from litestar.contrib.jwt import JWTCookieAuth, Token from litestar.openapi.config import OpenAPIConfig +from litestar.security.jwt import JWTCookieAuth, Token # Let's assume we have a User model that is a pydantic model. diff --git a/docs/examples/contrib/jwt/using_oauth2_password_bearer.py b/docs/examples/security/jwt/using_oauth2_password_bearer.py similarity index 97% rename from docs/examples/contrib/jwt/using_oauth2_password_bearer.py rename to docs/examples/security/jwt/using_oauth2_password_bearer.py index f43c4880aa..4c77e4ce78 100644 --- a/docs/examples/contrib/jwt/using_oauth2_password_bearer.py +++ b/docs/examples/security/jwt/using_oauth2_password_bearer.py @@ -6,8 +6,8 @@ from litestar import Litestar, Request, Response, get, post from litestar.connection import ASGIConnection -from litestar.contrib.jwt import OAuth2Login, OAuth2PasswordBearerAuth, Token from litestar.openapi.config import OpenAPIConfig +from litestar.security.jwt import OAuth2Login, OAuth2PasswordBearerAuth, Token # Let's assume we have a User model that is a pydantic model. diff --git a/docs/examples/security/using_session_auth.py b/docs/examples/security/using_session_auth.py index de0a9b0f43..f9bb2dc8ca 100644 --- a/docs/examples/security/using_session_auth.py +++ b/docs/examples/security/using_session_auth.py @@ -6,10 +6,7 @@ from litestar import Litestar, Request, get, post from litestar.connection import ASGIConnection from litestar.exceptions import NotAuthorizedException -from litestar.middleware.session.server_side import ( - ServerSideSessionBackend, - ServerSideSessionConfig, -) +from litestar.middleware.session.server_side import ServerSideSessionBackend, ServerSideSessionConfig from litestar.openapi.config import OpenAPIConfig from litestar.security.session_auth import SessionAuth from litestar.stores.memory import MemoryStore diff --git a/docs/reference/contrib/jwt.rst b/docs/reference/contrib/jwt.rst index 9c4ca86d32..6b49ee3d9c 100644 --- a/docs/reference/contrib/jwt.rst +++ b/docs/reference/contrib/jwt.rst @@ -1,5 +1,4 @@ jwt === -.. automodule:: litestar.contrib.jwt - :members: +This page has moved to :doc:`/reference/security/jwt` diff --git a/docs/reference/security/index.rst b/docs/reference/security/index.rst index 0df8b4de3c..12f7deabb9 100644 --- a/docs/reference/security/index.rst +++ b/docs/reference/security/index.rst @@ -8,4 +8,5 @@ security .. toctree:: :maxdepth: 1 + jwt session_auth diff --git a/docs/reference/security/jwt.rst b/docs/reference/security/jwt.rst new file mode 100644 index 0000000000..62a8d09242 --- /dev/null +++ b/docs/reference/security/jwt.rst @@ -0,0 +1,2 @@ +.. automodule:: litestar.security.jwt + :members: diff --git a/docs/release-notes/changelog.rst b/docs/release-notes/changelog.rst index 64d169c08b..aafef3927c 100644 --- a/docs/release-notes/changelog.rst +++ b/docs/release-notes/changelog.rst @@ -1024,7 +1024,7 @@ :pr: 2160 Fix a regression that would make - :class:`~litestar.contrib.jwt.JWTAuthenticationMiddleware` authenticate + ``litestar.contrib.jwt.JWTAuthenticationMiddleware`` authenticate ``OPTIONS`` and ``HEAD`` requests by default. .. change:: SessionAuth | Regression: ``OPTIONS`` and ``HEAD`` being authenticated by default @@ -1591,8 +1591,8 @@ :type: feature :pr: 1695 - Add the :attr:`extras ` attribute, containing - extra attributes found on the JWT. + Add the ``litestar.contrib.jwt.Token.extras`` attribute, containing extra + attributes found on the JWT. .. change:: Add default modes for ``Websocket.iter_json`` and ``WebSocket.iter_data`` :type: feature diff --git a/docs/usage/security/jwt.rst b/docs/usage/security/jwt.rst index 33dc3b6cdd..1a069f6cbe 100644 --- a/docs/usage/security/jwt.rst +++ b/docs/usage/security/jwt.rst @@ -8,10 +8,10 @@ JWT Auth Backend ---------------- This is the base JWT Auth backend. You can read about its particular API in -the :class:`API Reference `. It sends the JWT token using a header - and it expects requests to +the :class:`API Reference `. It sends the JWT token using a header - and it expects requests to send the JWT token using the same header key. -.. literalinclude:: /examples/contrib/jwt/using_jwt_auth.py +.. literalinclude:: /examples/security/jwt/using_jwt_auth.py :language: python :caption: Using JWT Auth @@ -19,10 +19,10 @@ send the JWT token using the same header key. JWT Cookie Auth Backend ----------------------- -This backend inherits from the :class:`JWTAuth ` backend, with the difference being that instead +This backend inherits from the :class:`JWTAuth ` backend, with the difference being that instead of using a header for the JWT Token, it uses a cookie. -.. literalinclude:: /examples/contrib/jwt/using_jwt_cookie_auth.py +.. literalinclude:: /examples/security/jwt/using_jwt_cookie_auth.py :language: python :caption: Using JWT Cookie Auth @@ -30,9 +30,9 @@ of using a header for the JWT Token, it uses a cookie. OAuth2 Bearer Password Flow --------------------------- -This backend inherits from the :class:`JWTCookieAuth ` backend. It works similarly to +This backend inherits from the :class:`JWTCookieAuth ` backend. It works similarly to the ``JWTCookieAuth`` backend, but is meant to be used for OAUTH2 Bearer password flows. -.. literalinclude:: /examples/contrib/jwt/using_oauth2_password_bearer.py +.. literalinclude:: /examples/security/jwt/using_oauth2_password_bearer.py :language: python :caption: Using OAUTH2 Bearer Password diff --git a/sonar-project.properties b/sonar-project.properties index 6e4f4d7a60..46d79f9be6 100644 --- a/sonar-project.properties +++ b/sonar-project.properties @@ -12,6 +12,7 @@ sonar.coverage.exclusions=\ sonar.cpd.exclusions=\ litestar/connection.py, \ litestar/contrib/jwt/*, \ + litestar/security/jwt/*, \ litestar/handlers/**/*, \ litestar/middleware/session/*, \ litestar/params.py, \ diff --git a/tests/examples/test_contrib/test_jwt/__init__.py b/tests/examples/test_security/__init__.py similarity index 100% rename from tests/examples/test_contrib/test_jwt/__init__.py rename to tests/examples/test_security/__init__.py diff --git a/tests/examples/test_security/test_jwt/__init__.py b/tests/examples/test_security/test_jwt/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/tests/examples/test_contrib/test_jwt/test_using_jwt_auth.py b/tests/examples/test_security/test_jwt/test_using_jwt_auth.py similarity index 92% rename from tests/examples/test_contrib/test_jwt/test_using_jwt_auth.py rename to tests/examples/test_security/test_jwt/test_using_jwt_auth.py index 50df09a023..07f46a3580 100644 --- a/tests/examples/test_contrib/test_jwt/test_using_jwt_auth.py +++ b/tests/examples/test_security/test_jwt/test_using_jwt_auth.py @@ -1,6 +1,6 @@ from uuid import uuid4 -from docs.examples.contrib.jwt.using_jwt_auth import app +from docs.examples.security.jwt.using_jwt_auth import app from litestar.status_codes import HTTP_200_OK, HTTP_201_CREATED, HTTP_401_UNAUTHORIZED from litestar.testing import TestClient diff --git a/tests/examples/test_contrib/test_jwt/test_using_jwt_cookie_auth.py b/tests/examples/test_security/test_jwt/test_using_jwt_cookie_auth.py similarity index 90% rename from tests/examples/test_contrib/test_jwt/test_using_jwt_cookie_auth.py rename to tests/examples/test_security/test_jwt/test_using_jwt_cookie_auth.py index 8ff9e8790b..26709607e2 100644 --- a/tests/examples/test_contrib/test_jwt/test_using_jwt_cookie_auth.py +++ b/tests/examples/test_security/test_jwt/test_using_jwt_cookie_auth.py @@ -1,6 +1,6 @@ from uuid import uuid4 -from docs.examples.contrib.jwt.using_jwt_cookie_auth import app +from docs.examples.security.jwt.using_jwt_cookie_auth import app from litestar.status_codes import HTTP_200_OK, HTTP_201_CREATED, HTTP_401_UNAUTHORIZED from litestar.testing import TestClient diff --git a/tests/examples/test_contrib/test_jwt/test_using_oauth2_password_bearer.py b/tests/examples/test_security/test_jwt/test_using_oauth2_password_bearer.py similarity index 88% rename from tests/examples/test_contrib/test_jwt/test_using_oauth2_password_bearer.py rename to tests/examples/test_security/test_jwt/test_using_oauth2_password_bearer.py index 9688c726ef..188bbadf58 100644 --- a/tests/examples/test_contrib/test_jwt/test_using_oauth2_password_bearer.py +++ b/tests/examples/test_security/test_jwt/test_using_oauth2_password_bearer.py @@ -1,6 +1,6 @@ from uuid import uuid4 -from docs.examples.contrib.jwt.using_oauth2_password_bearer import app +from docs.examples.security.jwt.using_oauth2_password_bearer import app from litestar.status_codes import HTTP_201_CREATED, HTTP_401_UNAUTHORIZED from litestar.testing import TestClient From 1cfde0c850046d3ea430bf0dc267343a646fc5dd Mon Sep 17 00:00:00 2001 From: Peter Schutt Date: Fri, 1 Dec 2023 14:42:17 +1000 Subject: [PATCH 37/45] fix: event handler error breaks stream (#2810) This PR prevents exceptions raised from within event handlers from closing the event receive stream, and from further propagation beyond the event emitter backend. When an exception is caught from an event handler, we log it at ERROR level. Closes #2809 --- litestar/events/emitter.py | 22 +++++++++++++++------- tests/unit/test_events.py | 33 +++++++++++++++++++++++++++++++++ 2 files changed, 48 insertions(+), 7 deletions(-) diff --git a/litestar/events/emitter.py b/litestar/events/emitter.py index 54c00c7e8d..0e84df8fa6 100644 --- a/litestar/events/emitter.py +++ b/litestar/events/emitter.py @@ -1,5 +1,6 @@ from __future__ import annotations +import logging import math import sys from abc import ABC, abstractmethod @@ -17,9 +18,6 @@ from litestar.exceptions import ImproperlyConfiguredException -__all__ = ("BaseEventEmitterBackend", "SimpleEventEmitter") - - if TYPE_CHECKING: from types import TracebackType @@ -27,6 +25,10 @@ from litestar.events.listener import EventListener +__all__ = ("BaseEventEmitterBackend", "SimpleEventEmitter") + +logger = logging.getLogger(__name__) + class BaseEventEmitterBackend(AsyncContextManager["BaseEventEmitterBackend"], ABC): """Abstract class used to define event emitter backends.""" @@ -77,19 +79,25 @@ def __init__(self, listeners: Sequence[EventListener]) -> None: self._send_stream: MemoryObjectSendStream | None = None self._exit_stack: AsyncExitStack | None = None - @staticmethod - async def _worker(receive_stream: MemoryObjectReceiveStream) -> None: + async def _worker(self, receive_stream: MemoryObjectReceiveStream) -> None: """Run items from ``receive_stream`` in a task group. Returns: None """ - async with receive_stream, anyio.create_task_group() as task_group: + async with receive_stream: async for item in receive_stream: - fn, args, kwargs = item + await self._run_listener_in_task_group(*item) + + @staticmethod + async def _run_listener_in_task_group(fn: Any, args: tuple[Any], kwargs: dict[str, Any]) -> None: + try: + async with anyio.create_task_group() as task_group: if kwargs: fn = partial(fn, **kwargs) task_group.start_soon(fn, *args) + except Exception as exc: + logger.exception("Error in event listener: %s", exc) async def __aenter__(self) -> SimpleEventEmitter: self._exit_stack = AsyncExitStack() diff --git a/tests/unit/test_events.py b/tests/unit/test_events.py index 946c2aa15f..c88611395d 100644 --- a/tests/unit/test_events.py +++ b/tests/unit/test_events.py @@ -109,3 +109,36 @@ async def test_raises_when_not_listener_are_registered_for_an_event_id(async_lis with create_test_client(route_handlers=[], listeners=[async_listener]) as client: with pytest.raises(ImproperlyConfiguredException): client.app.emit("x") + + +async def test_event_listener_raises_exception(async_listener: EventListener, mock: MagicMock) -> None: + """Test that an event listener that raises an exception does not prevent other listeners from being called. + + https://github.com/litestar-org/litestar/issues/2809 + """ + + error_mock = MagicMock() + + @listener("error_event") + async def raising_listener(*args: Any, **kwargs: Any) -> None: + error_mock() + raise ValueError("test") + + @get("/error") + def route_handler_1(request: Request[Any, Any, Any]) -> None: + request.app.emit("error_event") + + @get("/no-error") + def route_handler_2(request: Request[Any, Any, Any]) -> None: + request.app.emit("test_event") + + with create_test_client( + route_handlers=[route_handler_1, route_handler_2], listeners=[async_listener, raising_listener] + ) as client: + first_response = client.get("/error") + second_response = client.get("/no-error") + assert first_response.status_code == HTTP_200_OK + assert second_response.status_code == HTTP_200_OK + + error_mock.assert_called() + mock.assert_called() From 0a5a75b45cd5703fdacfd96b154cc35dfbea8f37 Mon Sep 17 00:00:00 2001 From: Peter Schutt Date: Sat, 2 Dec 2023 00:24:20 +1000 Subject: [PATCH 38/45] fix: concurrent event listener execution (#2814) This PR corrects an implementation made in #2810 that prevented listeners from running concurrently. For #2809 --- litestar/events/emitter.py | 14 ++------------ litestar/events/listener.py | 35 ++++++++++++++++++++++++++++++----- 2 files changed, 32 insertions(+), 17 deletions(-) diff --git a/litestar/events/emitter.py b/litestar/events/emitter.py index 0e84df8fa6..14499741e3 100644 --- a/litestar/events/emitter.py +++ b/litestar/events/emitter.py @@ -1,6 +1,5 @@ from __future__ import annotations -import logging import math import sys from abc import ABC, abstractmethod @@ -27,8 +26,6 @@ __all__ = ("BaseEventEmitterBackend", "SimpleEventEmitter") -logger = logging.getLogger(__name__) - class BaseEventEmitterBackend(AsyncContextManager["BaseEventEmitterBackend"], ABC): """Abstract class used to define event emitter backends.""" @@ -85,19 +82,12 @@ async def _worker(self, receive_stream: MemoryObjectReceiveStream) -> None: Returns: None """ - async with receive_stream: + async with receive_stream, anyio.create_task_group() as task_group: async for item in receive_stream: - await self._run_listener_in_task_group(*item) - - @staticmethod - async def _run_listener_in_task_group(fn: Any, args: tuple[Any], kwargs: dict[str, Any]) -> None: - try: - async with anyio.create_task_group() as task_group: + fn, args, kwargs = item if kwargs: fn = partial(fn, **kwargs) task_group.start_soon(fn, *args) - except Exception as exc: - logger.exception("Error in event listener: %s", exc) async def __aenter__(self) -> SimpleEventEmitter: self._exit_stack = AsyncExitStack() diff --git a/litestar/events/listener.py b/litestar/events/listener.py index 487d237de9..63c9848df1 100644 --- a/litestar/events/listener.py +++ b/litestar/events/listener.py @@ -1,16 +1,18 @@ from __future__ import annotations -from typing import TYPE_CHECKING +import logging +from typing import TYPE_CHECKING, Any from litestar.exceptions import ImproperlyConfiguredException from litestar.utils import ensure_async_callable -__all__ = ("EventListener", "listener") - - if TYPE_CHECKING: from litestar.types import AnyCallable, AsyncAnyCallable +__all__ = ("EventListener", "listener") + +logger = logging.getLogger(__name__) + class EventListener: """Decorator for event listeners""" @@ -40,10 +42,33 @@ def __call__(self, fn: AnyCallable) -> EventListener: if not callable(fn): raise ImproperlyConfiguredException("EventListener instance should be called as a decorator on a callable") - self.fn = ensure_async_callable(fn) + self.fn = self.wrap_in_error_handler(ensure_async_callable(fn)) return self + @staticmethod + def wrap_in_error_handler(fn: AsyncAnyCallable) -> AsyncAnyCallable: + """Wrap a listener function to handle errors. + + Listeners are executed concurrently in a TaskGroup, so we need to ensure that exceptions do not propagate + to the task group which results in any other unfinished listeners to be cancelled, and the receive stream to + be closed. + + See https://github.com/litestar-org/litestar/issues/2809 + + Args: + fn: The listener function to wrap. + """ + + async def wrapped(*args: Any, **kwargs: Any) -> None: + """Wrap a listener function to handle errors.""" + try: + await fn(*args, **kwargs) + except Exception as exc: + logger.exception("Error while executing listener %s: %s", fn.__name__, exc) + + return wrapped + def __hash__(self) -> int: return hash(self.event_ids) + hash(self.fn) From 7c50d58e065ee9a2176653c572806c4c9e163428 Mon Sep 17 00:00:00 2001 From: Peter Schutt Date: Sat, 2 Dec 2023 05:14:22 +1000 Subject: [PATCH 39/45] fix: openapi schema for pydantic computed fields (#2797) --- litestar/_openapi/schema_generation/schema.py | 1 + .../pydantic/pydantic_schema_plugin.py | 6 +++ litestar/contrib/pydantic/utils.py | 38 ++++++++++++++++++- litestar/params.py | 2 + .../test_pydantic/test_openapi.py | 31 ++++++++++++++- 5 files changed, 76 insertions(+), 2 deletions(-) diff --git a/litestar/_openapi/schema_generation/schema.py b/litestar/_openapi/schema_generation/schema.py index 9812306dd4..811cd92a77 100644 --- a/litestar/_openapi/schema_generation/schema.py +++ b/litestar/_openapi/schema_generation/schema.py @@ -95,6 +95,7 @@ "multiple_of": "multiple_of", "pattern": "pattern", "title": "title", + "read_only": "read_only", } TYPE_MAP: dict[type[Any] | None | Any, Schema] = { diff --git a/litestar/contrib/pydantic/pydantic_schema_plugin.py b/litestar/contrib/pydantic/pydantic_schema_plugin.py index d1b65f5bbf..657ef147e4 100644 --- a/litestar/contrib/pydantic/pydantic_schema_plugin.py +++ b/litestar/contrib/pydantic/pydantic_schema_plugin.py @@ -7,6 +7,7 @@ from litestar._openapi.schema_generation.schema import SchemaCreator, _get_type_schema_name from litestar._openapi.schema_generation.utils import get_formatted_examples from litestar.contrib.pydantic.utils import ( + create_field_definitions_for_computed_fields, is_pydantic_2_model, is_pydantic_constrained_field, is_pydantic_model_class, @@ -275,6 +276,11 @@ def for_pydantic_model(cls, field_definition: FieldDefinition, schema_creator: S for k, f in model_fields.items() } + computed_field_definitions = create_field_definitions_for_computed_fields( + annotation, schema_creator.prefer_alias + ) + field_definitions.update(computed_field_definitions) + return Schema( required=sorted(f.name for f in field_definitions.values() if f.is_required), properties={k: schema_creator.for_field_definition(f) for k, f in field_definitions.items()}, diff --git a/litestar/contrib/pydantic/utils.py b/litestar/contrib/pydantic/utils.py index 2de448677f..f4f8d0f98a 100644 --- a/litestar/contrib/pydantic/utils.py +++ b/litestar/contrib/pydantic/utils.py @@ -3,9 +3,11 @@ from typing import TYPE_CHECKING, Any -from typing_extensions import get_type_hints +from typing_extensions import Annotated, get_type_hints +from litestar.params import KwargDefinition from litestar.types import Empty +from litestar.typing import FieldDefinition from litestar.utils import is_class_and_subclass from litestar.utils.predicates import is_generic from litestar.utils.typing import ( @@ -168,3 +170,37 @@ def is_pydantic_2_model( def is_pydantic_undefined(value: Any) -> bool: return any(v is value for v in PYDANTIC_UNDEFINED_SENTINELS) + + +def create_field_definitions_for_computed_fields( + model: type[pydantic_v1.BaseModel | pydantic_v2.BaseModel], # pyright: ignore + prefer_alias: bool, +) -> dict[str, FieldDefinition]: + """Create field definitions for computed fields. + + Args: + model: A pydantic model. + prefer_alias: Whether to prefer the alias or the name of the field. + + Returns: + A dictionary containing the field definitions for the computed fields. + """ + pydantic_decorators = getattr(model, "__pydantic_decorators__", None) + if pydantic_decorators is None: + return {} + + def get_name(k: str, dec: Any) -> str: + if not dec.info.alias: + return k + return dec.info.alias if prefer_alias else k # type: ignore[no-any-return] + + return { + (name := get_name(k, dec)): FieldDefinition.from_annotation( + Annotated[ + dec.info.return_type, + KwargDefinition(title=dec.info.title, description=dec.info.description, read_only=True), + ], + name=name, + ) + for k, dec in pydantic_decorators.computed_fields.items() + } diff --git a/litestar/params.py b/litestar/params.py index 2c60c7485b..bff010bb7e 100644 --- a/litestar/params.py +++ b/litestar/params.py @@ -110,6 +110,8 @@ class KwargDefinition: """Specify the format to which a string value should be converted.""" enum: Sequence[Any] | None = field(default=None) """A sequence of valid values.""" + read_only: bool | None = field(default=None) + """A boolean flag dictating whether this parameter is read only.""" @property def is_constrained(self) -> bool: diff --git a/tests/unit/test_contrib/test_pydantic/test_openapi.py b/tests/unit/test_contrib/test_pydantic/test_openapi.py index 888032d30b..dcb4400206 100644 --- a/tests/unit/test_contrib/test_pydantic/test_openapi.py +++ b/tests/unit/test_contrib/test_pydantic/test_openapi.py @@ -18,7 +18,7 @@ from litestar._openapi.schema_generation.utils import _get_normalized_schema_key from litestar.contrib.pydantic import PydanticPlugin, PydanticSchemaPlugin from litestar.openapi import OpenAPIConfig -from litestar.openapi.spec import Example, Schema +from litestar.openapi.spec import Example, Reference, Schema from litestar.openapi.spec.enums import OpenAPIFormat, OpenAPIType from litestar.params import KwargDefinition from litestar.status_codes import HTTP_200_OK @@ -615,3 +615,32 @@ def test_create_for_url_v2(field_type: Any) -> None: schema = SchemaCreator(plugins=[PydanticSchemaPlugin()]).for_field_definition(field_definition) assert schema.type == OpenAPIType.STRING # type: ignore[union-attr] assert schema.format == OpenAPIFormat.URL # type: ignore[union-attr] + + +@pytest.mark.parametrize("prefer_alias", [True, False]) +def test_create_for_computed_field(prefer_alias: bool) -> None: + class Sample(pydantic_v2.BaseModel): + property_one: str + + @pydantic_v2.computed_field( + description="a description", title="a title", alias="prop_two" if prefer_alias else None + ) + def property_two(self) -> bool: + return True + + field_definition = FieldDefinition.from_annotation(Sample) + schema_creator = SchemaCreator(plugins=[PydanticSchemaPlugin()]) + ref = schema_creator.for_field_definition(field_definition) + assert isinstance(ref, Reference) + assert len(schema_creator.schemas) == 1 + schema = next(iter(schema_creator.schemas.values())) + assert schema.required == ["property_one", "property_two"] if not prefer_alias else ["property_one", "prop_two"] + properties = schema.properties + assert properties is not None + assert properties.keys() == {"property_one", "property_two"} if not prefer_alias else {"property_one", "prop_two"} + property_two = properties["property_two"] if not prefer_alias else properties["prop_two"] + assert isinstance(property_two, Schema) + assert property_two.type == OpenAPIType.BOOLEAN + assert property_two.description == "a description" + assert property_two.title == "a title" + assert property_two.read_only From d26ee096bb396cc922ec86ec9449de6ae8bddcdd Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Janek=20Nouvertn=C3=A9?= <25355197+provinzkraut@users.noreply.github.com> Date: Sat, 2 Dec 2023 13:09:12 +0100 Subject: [PATCH 40/45] v2.4.2 MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Signed-off-by: Janek Nouvertné <25355197+provinzkraut@users.noreply.github.com> --- docs/release-notes/changelog.rst | 45 ++++++++++++++++++++++++++++++++ pyproject.toml | 2 +- 2 files changed, 46 insertions(+), 1 deletion(-) diff --git a/docs/release-notes/changelog.rst b/docs/release-notes/changelog.rst index aafef3927c..4e0caf8b86 100644 --- a/docs/release-notes/changelog.rst +++ b/docs/release-notes/changelog.rst @@ -3,6 +3,51 @@ 2.x Changelog ============= +.. changelog:: 2.4.2 + :date: 2023/12/02 + + .. change:: Fix OpenAPI handling of parameters with duplicated names + :type: bugfix + :issue: 2662 + :pr: 2788 + + Fix a bug where schema generation would consider two parameters with the same + name but declared in different places (eg., header, cookie) as an error. + + .. change:: Fix late failure where ``DTOData`` is used without a DTO + :type: bugfix + :issue: 2779 + :pr: 2789 + + Fix an issue where a handler would be allowed to be registered with a + ``DTOData`` annotation without having a DTO defined, which would result in a + runtime exception. In cases like these, a configuration error is now raised + during startup. + + .. change:: Correctly propagate camelCase names on OpenAPI schema + :type: bugfix + :pr: 2800 + + Fix a bug where OpenAPI schema fields would be inappropriately propagated as + camelCase where they should have been snake_case + + .. change:: Fix error handling in event handler stream + :type: bugfix + :pr: 2810, 2814 + + Fix a class of errors that could result in the event listener stream being + terminated when an exception occurred within an event listener. Errors in + event listeners are now not propagated anymore but handled by the backend and + logged instead. + + .. change:: Fix OpenAPI schema for pydantic computed fields + :type: bugfix + :pr: 2797 + :issue: 2792 + + Add support for including computed fields in schemas generated from pydantic + models. + .. changelog:: 2.4.1 :date: 2023/11/28 diff --git a/pyproject.toml b/pyproject.toml index 2efff42f4c..528aa8948b 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -56,7 +56,7 @@ maintainers = [ name = "litestar" readme = "README.md" requires-python = ">=3.8,<4.0" -version = "2.4.1" +version = "2.4.2" [project.urls] Blog = "https://blog.litestar.dev" From cbb54d0d8c555b1a08bca07bab05cd9a92254283 Mon Sep 17 00:00:00 2001 From: "allcontributors[bot]" <46447321+allcontributors[bot]@users.noreply.github.com> Date: Sat, 2 Dec 2023 15:41:22 +0100 Subject: [PATCH 41/45] docs: add hzhou0 as a contributor for bug, and code (#2819) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * docs: update README.md [skip ci] * docs: update .all-contributorsrc [skip ci] --------- Co-authored-by: allcontributors[bot] <46447321+allcontributors[bot]@users.noreply.github.com> Co-authored-by: Janek Nouvertné --- .all-contributorsrc | 10 ++++++++++ README.md | 1 + 2 files changed, 11 insertions(+) diff --git a/.all-contributorsrc b/.all-contributorsrc index 25a50a50c8..94c8719719 100644 --- a/.all-contributorsrc +++ b/.all-contributorsrc @@ -1469,6 +1469,16 @@ "contributions": [ "doc" ] + }, + { + "login": "hzhou0", + "name": "Henry Zhou", + "avatar_url": "https://avatars.githubusercontent.com/u/43188301?v=4", + "profile": "https://github.com/hzhou0", + "contributions": [ + "bug", + "code" + ] } ], "contributorsPerLine": 7, diff --git a/README.md b/README.md index 50e772ada1..885ffe051f 100644 --- a/README.md +++ b/README.md @@ -508,6 +508,7 @@ see [the contribution guide](CONTRIBUTING.rst). Hunter Boyd
Hunter Boyd

📖 Cesar Giulietti
Cesar Giulietti

📖 Marcus Lim
Marcus Lim

📖 + Henry Zhou
Henry Zhou

🐛 💻 From a281ce7ebae2bebe9f58bdf62b7fb251e79d6050 Mon Sep 17 00:00:00 2001 From: Henry Zhou <43188301+hzhou0@users.noreply.github.com> Date: Sat, 2 Dec 2023 10:18:02 -0800 Subject: [PATCH 42/45] fix: correct OpenAPI schema for enum constraints (#2812) (#2818) This PR fixes OpenAPI schema generation when enum constraints are present, such as in python Enums or Literal types. The JSONSchema "type" constraint is now a list of all types present in the enum constraints, which is the specified schema expectation. Fixes tests to expect this behaviour. Co-authored-by: Cody Fincher <204685+cofin@users.noreply.github.com> --- litestar/_openapi/schema_generation/schema.py | 34 ++++++++++++++++--- tests/unit/test_openapi/test_schema.py | 6 ++-- 2 files changed, 33 insertions(+), 7 deletions(-) diff --git a/litestar/_openapi/schema_generation/schema.py b/litestar/_openapi/schema_generation/schema.py index 811cd92a77..4f94194a16 100644 --- a/litestar/_openapi/schema_generation/schema.py +++ b/litestar/_openapi/schema_generation/schema.py @@ -58,6 +58,7 @@ from litestar.params import BodyKwarg, ParameterKwarg from litestar.plugins import OpenAPISchemaPlugin from litestar.types import Empty +from litestar.types.builtin_types import NoneType from litestar.typing import FieldDefinition from litestar.utils.helpers import get_name from litestar.utils.predicates import ( @@ -116,6 +117,7 @@ MutableMapping: Schema(type=OpenAPIType.OBJECT), MutableSequence: Schema(type=OpenAPIType.ARRAY), None: Schema(type=OpenAPIType.NULL), + NoneType: Schema(type=OpenAPIType.NULL), OrderedDict: Schema(type=OpenAPIType.OBJECT), Path: Schema(type=OpenAPIType.STRING, format=OpenAPIFormat.URI), Pattern: Schema(type=OpenAPIType.STRING, format=OpenAPIFormat.REGEX), @@ -146,6 +148,29 @@ } +def _types_in_list(lst: list[Any]) -> list[OpenAPIType] | OpenAPIType: + """Extract unique OpenAPITypes present in the values of a list. + + Args: + lst: A list of values + + Returns: + OpenAPIType in the given list. If more then one exists, return + a list of OpenAPITypes. + """ + schema_types: list[OpenAPIType] = [] + for item in lst: + schema_type = TYPE_MAP[type(item)].type + if isinstance(schema_type, OpenAPIType): + schema_types.append(schema_type) + elif schema_type is None: + raise RuntimeError("Item in TYPE_MAP must have a type that is not None") + else: + schema_types.extend(schema_type) + schema_types = list(set(schema_types)) + return schema_types[0] if len(schema_types) == 1 else schema_types + + def _get_type_schema_name(field_definition: FieldDefinition) -> str: """Extract the schema name from a data container. @@ -178,10 +203,9 @@ def create_enum_schema(annotation: EnumMeta, include_null: bool = False) -> Sche A schema instance. """ enum_values: list[str | int | None] = [v.value for v in annotation] # type: ignore - if include_null: + if include_null and None not in enum_values: enum_values.append(None) - openapi_type = OpenAPIType.STRING if isinstance(enum_values[0], str) else OpenAPIType.INTEGER - return Schema(type=openapi_type, enum=enum_values) + return Schema(type=_types_in_list(enum_values), enum=enum_values) def _iter_flat_literal_args(annotation: Any) -> Iterable[Any]: @@ -211,9 +235,9 @@ def create_literal_schema(annotation: Any, include_null: bool = False) -> Schema A schema instance. """ args = list(_iter_flat_literal_args(annotation)) - if include_null: + if include_null and None not in args: args.append(None) - schema = copy(TYPE_MAP[type(args[0])]) + schema = Schema(type=_types_in_list(args)) if len(args) > 1: schema.enum = args else: diff --git a/tests/unit/test_openapi/test_schema.py b/tests/unit/test_openapi/test_schema.py index fee3da93cc..2cb113ec6e 100644 --- a/tests/unit/test_openapi/test_schema.py +++ b/tests/unit/test_openapi/test_schema.py @@ -466,14 +466,16 @@ class Foo(Enum): schema = SchemaCreator().for_field_definition(FieldDefinition.from_annotation(Optional[Foo])) assert isinstance(schema, Schema) - assert schema.type == OpenAPIType.INTEGER + assert schema.type is not None + assert set(schema.type) == {OpenAPIType.INTEGER, OpenAPIType.NULL} assert schema.enum == [1, 2, None] def test_optional_literal() -> None: schema = SchemaCreator().for_field_definition(FieldDefinition.from_annotation(Optional[Literal[1]])) assert isinstance(schema, Schema) - assert schema.type == OpenAPIType.INTEGER + assert schema.type is not None + assert set(schema.type) == {OpenAPIType.INTEGER, OpenAPIType.NULL} assert schema.enum == [1, None] From 143e61240d416d37768f0ea613a66535d85a310e Mon Sep 17 00:00:00 2001 From: William Stam Date: Sat, 2 Dec 2023 20:35:45 +0200 Subject: [PATCH 43/45] docs: Adding docker and supervisor deployment guides (#2724) * docs: Adding docker and supervisor deployment guides * docs: Adding docker and supervisor deployment guides * docs: Adding docker and supervisor deployment guides * Update docs/topics/deployment/index.rst --------- Co-authored-by: William Stam Co-authored-by: Cody Fincher <204685+cofin@users.noreply.github.com> --- docs/topics/deployment/docker.rst | 40 ++++++ docs/topics/deployment/index.rst | 2 + docs/topics/deployment/supervisor.rst | 187 ++++++++++++++++++++++++++ 3 files changed, 229 insertions(+) create mode 100644 docs/topics/deployment/docker.rst create mode 100644 docs/topics/deployment/supervisor.rst diff --git a/docs/topics/deployment/docker.rst b/docs/topics/deployment/docker.rst new file mode 100644 index 0000000000..b3ce5e4700 --- /dev/null +++ b/docs/topics/deployment/docker.rst @@ -0,0 +1,40 @@ +Docker +=========== + +This uses the default python container https://hub.docker.com/_/python + +Dockerfile +----------- + +.. code-block:: docker + + FROM python:3.12 + WORKDIR /code + COPY ./requirements.txt /code/requirements.txt + RUN pip install --no-cache-dir --upgrade -r /code/requirements.txt + COPY ./src / + CMD ["uvicorn", "app:app", "--host", "0.0.0.0", "--port", "80"] + + +This copies the `src` folder on your machine to the `/code` in the docker container and runs your app via uvicorn. Adjust according to whichever asgi server you choose. + +:doc:`manually-with-asgi-server` + + +Docker-compose +--------------- + +If you want to run the container as part of a docker-compose setup then you can simply use this compose file + +.. code-block:: yaml + + services: + api: + build: + context: ./ + dockerfile: Dockerfile + container_name: "api" + depends_on: + - database + ports: + - "80:80" diff --git a/docs/topics/deployment/index.rst b/docs/topics/deployment/index.rst index 20daa77b4a..f7a7ff8f75 100644 --- a/docs/topics/deployment/index.rst +++ b/docs/topics/deployment/index.rst @@ -12,3 +12,5 @@ Contents nginx-unit manually-with-asgi-server + docker + supervisor diff --git a/docs/topics/deployment/supervisor.rst b/docs/topics/deployment/supervisor.rst new file mode 100644 index 0000000000..f093bb0153 --- /dev/null +++ b/docs/topics/deployment/supervisor.rst @@ -0,0 +1,187 @@ +Supervisor (linux) +================== + +To keep a litestar app running you need to set it as a service. The 2 main ways to do that on Ubuntui is to use systemctl or supervisor. Both use unit files to define the service + +Supervisor is an additional package you need to install but i find it much easier to monitor the service than with systemctl + +.. code-block:: sh + + sudo apt install supervisor + +.. _conf_file: + +Conf file +---------- + +Supervisord uses a config file for defining services http://supervisord.org/configuration.html + +.. code-block:: text + + [program:api] + directory=/opt/api/src + command=/opt/api/venv/bin/python main.py + redirect_stderr=true + stdout_logfile=/var/log/api.log + stdout_logfile_backups=10 + autostart=true + autorestart=true + + +`[program:api]` will be your service name. so `supervisorctl start api` + +`directory=/...` the directory where the service must run from + +`command=...` the script the service must run. notice the python executable path, this uses the venv's python to run the app. + +You will need to reload the supervisor config to load your new service file. do so with: + +.. code-block:: sh + + sudo supervisorctl reread + sudo supervisorctl update + + +to start/stop the service + +.. code-block:: sh + + sudo supervisorctl start api + sudo supervisorctl stop api + + +to get the status + +.. code-block:: sh + + sudo supervisorctl status api + + +to watch the output + +.. code-block:: sh + + sudo supervisorctl tail -f api + + +Start the service if its not started. make sure its running. check the output to make sure there aren't any errors. and if all that went according to plan your litestar application should be accessible on +http://yyy.yyy.yyy.yyy:80 + + +Alias for easy control +========================================= + +This follows onto the Supervisor setup. + +To make things easier to handle the service, here's an alias to use that will make things much easier for you. this introduces some commands like: + +.. code-block:: sh + + api start + api stop + api restart + api status + api watch + + +Create an alias file `/etc/profile.d/api.sh` this is where the magic happens to let us simply use `api start` instead of `sudo supervisorctl start api` (all that extra typing.. urrgghhh). Adding it to `/etc/profile.d/` makes the alias available for all users on that system. They would still however need to pass sudo for these commands. + +.. code-block:: sh + + api() { + case $1 in + start) + echo "Starting" + sudo supervisorctl start api || true + ;; + stop) + echo "Stopping" + sudo supervisorctl stop api || true + ;; + restart) + echo "Stopping" + sudo supervisorctl stop api || true + sleep 2 + echo "Starting" + sudo supervisorctl start api || true + ;; + status) + echo "Status" + sudo supervisorctl status api || true + ;; + watch) + sudo supervisorctl tail -f api + ;; + + help) + echo "Available options:" + echo " api start" + echo " api stop" + echo " api restart" + echo " api status" + echo " api watch" + ;; + + *) + cd /opt/api + ;; + esac + } + +To activate the alias without restarting your session use `source /etc/profile.d/api.sh`. + +Using the `watch` command lets you monitor the realtime output of your application. + + +Updating your application +-------------------------- + +A cool tip that the whole alias brings to the table is that if you include your supervisor conf file and the alias in your code base, you can do something like this for for updating your entire application. + +.. code-block:: sh + + api() { + case $1 in + # ... # + update) + echo " > Stopping" + sudo supervisorctl stop api || true + + echo " > Updating files" + cd /opt/api + git reset --hard origin/master + git pull origin master + + sleep 2 + + echo " > Linking supervisord service file" + sudo ln -sf /opt/api/server/service.conf /etc/supervisor/conf.d/api.conf + echo " > Linking service alias" + sudo ln -sf /opt/api/server/alias.sh /etc/profile.d/api.sh + source /etc/profile.d/api.sh + + sleep 2 + + echo " > Updating supervisord services" + sudo supervisorctl reread + sudo supervisorctl update + + sleep 2 + + source venv/bin/activate + echo " > Updating dependencies" + pip install -U -r requirements.txt + + echo "------------------------" + echo "Done" + + read -p "Start the service? (y/n) " -n 1 -r + echo # (optional) move to a new line + if [[ $REPLY =~ ^[Yy]$ ]] + then + echo "Starting" + sudo supervisorctl start api || true + fi + ;; + +You can sym link both the alias file and the conf file into their respective locations and load them up after a git pull. From 5b1dec41e6ae2fa12e9684119c3e2cf1aafaf0f5 Mon Sep 17 00:00:00 2001 From: "allcontributors[bot]" <46447321+allcontributors[bot]@users.noreply.github.com> Date: Sat, 2 Dec 2023 12:36:07 -0600 Subject: [PATCH 44/45] docs: add WilliamStam as a contributor for doc (#2826) * docs: update README.md [skip ci] * docs: update .all-contributorsrc [skip ci] --------- Co-authored-by: allcontributors[bot] <46447321+allcontributors[bot]@users.noreply.github.com> --- .all-contributorsrc | 9 +++++++++ README.md | 3 +++ 2 files changed, 12 insertions(+) diff --git a/.all-contributorsrc b/.all-contributorsrc index 94c8719719..cea2cfeaaf 100644 --- a/.all-contributorsrc +++ b/.all-contributorsrc @@ -1479,6 +1479,15 @@ "bug", "code" ] + }, + { + "login": "WilliamStam", + "name": "William Stam", + "avatar_url": "https://avatars.githubusercontent.com/u/182800?v=4", + "profile": "https://github.com/WilliamStam", + "contributions": [ + "doc" + ] } ], "contributorsPerLine": 7, diff --git a/README.md b/README.md index 885ffe051f..536ec863fd 100644 --- a/README.md +++ b/README.md @@ -510,6 +510,9 @@ see [the contribution guide](CONTRIBUTING.rst). Marcus Lim
Marcus Lim

📖 Henry Zhou
Henry Zhou

🐛 💻 + + William Stam
William Stam

📖 + From 7979771d941ab2cc3be6249f59d31dc48ccc6039 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Janek=20Nouvertn=C3=A9?= Date: Sun, 3 Dec 2023 12:43:44 +0100 Subject: [PATCH 45/45] ci: validate feature PR target (#2831) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * validate feature PR target --------- Signed-off-by: Janek Nouvertné <25355197+provinzkraut@users.noreply.github.com> --- .github/workflows/pr-target.yml | 25 +++++++++++++++++++++++++ 1 file changed, 25 insertions(+) create mode 100644 .github/workflows/pr-target.yml diff --git a/.github/workflows/pr-target.yml b/.github/workflows/pr-target.yml new file mode 100644 index 0000000000..707a8633a6 --- /dev/null +++ b/.github/workflows/pr-target.yml @@ -0,0 +1,25 @@ +name: "Validate PR target" + +on: + pull_request: + types: + - opened + - edited + - synchronize + branches: + - main + +permissions: + pull-requests: read + +jobs: + main: + name: Validate PR target branch + runs-on: ubuntu-latest + steps: + - name: Check PR target + if: ${{ startsWith(github.event.pull_request.title, 'feat') }} + uses: actions/github-script@v3 + with: + script: | + core.setFailed('Cannot merge feature type PR into main. Merge into a feature branch or develop')