From 5d8c659373ae2b169892fc9d99d54bd1b3baf65a Mon Sep 17 00:00:00 2001 From: Sean Quah <8349537+squahtx@users.noreply.github.com> Date: Tue, 30 May 2023 14:37:39 +0100 Subject: [PATCH 01/75] Remove unused `FederationServer.__str__` override (#15690) Signed-off-by: Sean Quah --- changelog.d/15690.misc | 1 + synapse/federation/federation_server.py | 3 --- 2 files changed, 1 insertion(+), 3 deletions(-) create mode 100644 changelog.d/15690.misc diff --git a/changelog.d/15690.misc b/changelog.d/15690.misc new file mode 100644 index 000000000000..c6c259eb7d14 --- /dev/null +++ b/changelog.d/15690.misc @@ -0,0 +1 @@ +Remove some unused code. diff --git a/synapse/federation/federation_server.py b/synapse/federation/federation_server.py index f4ca70a69806..e17cb840de99 100644 --- a/synapse/federation/federation_server.py +++ b/synapse/federation/federation_server.py @@ -1291,9 +1291,6 @@ async def _process_incoming_pdus_in_room_inner( return lock = new_lock - def __str__(self) -> str: - return "" % self.server_name - async def exchange_third_party_invite( self, sender_user_id: str, target_user_id: str, room_id: str, signed: Dict ) -> None: From e2c8458bba5ab20f84c93a6c68e293b2d304cdc0 Mon Sep 17 00:00:00 2001 From: Quentin Gliech Date: Fri, 17 Jun 2022 14:48:55 +0200 Subject: [PATCH 02/75] Make the api.auth.Auth a Protocol --- synapse/api/auth/__init__.py | 175 ++++++++++++++ synapse/api/auth/base.py | 273 ++++++++++++++++++++++ synapse/api/{auth.py => auth/internal.py} | 249 +------------------- synapse/server.py | 3 +- tests/api/test_auth.py | 4 +- tests/handlers/test_register.py | 4 +- tests/test_state.py | 4 +- 7 files changed, 464 insertions(+), 248 deletions(-) create mode 100644 synapse/api/auth/__init__.py create mode 100644 synapse/api/auth/base.py rename synapse/api/{auth.py => auth/internal.py} (61%) diff --git a/synapse/api/auth/__init__.py b/synapse/api/auth/__init__.py new file mode 100644 index 000000000000..90cfe39d7623 --- /dev/null +++ b/synapse/api/auth/__init__.py @@ -0,0 +1,175 @@ +# Copyright 2023 The Matrix.org Foundation. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +from typing import Optional, Tuple + +from typing_extensions import Protocol + +from twisted.web.server import Request + +from synapse.appservice import ApplicationService +from synapse.http.site import SynapseRequest +from synapse.types import Requester + +# guests always get this device id. +GUEST_DEVICE_ID = "guest_device" + + +class Auth(Protocol): + """The interface that an auth provider must implement.""" + + async def check_user_in_room( + self, + room_id: str, + requester: Requester, + allow_departed_users: bool = False, + ) -> Tuple[str, Optional[str]]: + """Check if the user is in the room, or was at some point. + Args: + room_id: The room to check. + + user_id: The user to check. + + current_state: Optional map of the current state of the room. + If provided then that map is used to check whether they are a + member of the room. Otherwise the current membership is + loaded from the database. + + allow_departed_users: if True, accept users that were previously + members but have now departed. + + Raises: + AuthError if the user is/was not in the room. + Returns: + The current membership of the user in the room and the + membership event ID of the user. + """ + + async def get_user_by_req( + self, + request: SynapseRequest, + allow_guest: bool = False, + allow_expired: bool = False, + ) -> Requester: + """Get a registered user's ID. + + Args: + request: An HTTP request with an access_token query parameter. + allow_guest: If False, will raise an AuthError if the user making the + request is a guest. + allow_expired: If True, allow the request through even if the account + is expired, or session token lifetime has ended. Note that + /login will deliver access tokens regardless of expiration. + + Returns: + Resolves to the requester + Raises: + InvalidClientCredentialsError if no user by that token exists or the token + is invalid. + AuthError if access is denied for the user in the access token + """ + + async def validate_appservice_can_control_user_id( + self, app_service: ApplicationService, user_id: str + ) -> None: + """Validates that the app service is allowed to control + the given user. + + Args: + app_service: The app service that controls the user + user_id: The author MXID that the app service is controlling + + Raises: + AuthError: If the application service is not allowed to control the user + (user namespace regex does not match, wrong homeserver, etc) + or if the user has not been registered yet. + """ + + async def get_user_by_access_token( + self, + token: str, + allow_expired: bool = False, + ) -> Requester: + """Validate access token and get user_id from it + + Args: + token: The access token to get the user by + allow_expired: If False, raises an InvalidClientTokenError + if the token is expired + + Raises: + InvalidClientTokenError if a user by that token exists, but the token is + expired + InvalidClientCredentialsError if no user by that token exists or the token + is invalid + """ + + async def is_server_admin(self, requester: Requester) -> bool: + """Check if the given user is a local server admin. + + Args: + requester: user to check + + Returns: + True if the user is an admin + """ + + async def check_can_change_room_list( + self, room_id: str, requester: Requester + ) -> bool: + """Determine whether the user is allowed to edit the room's entry in the + published room list. + + Args: + room_id + user + """ + + @staticmethod + def has_access_token(request: Request) -> bool: + """Checks if the request has an access_token. + + Returns: + False if no access_token was given, True otherwise. + """ + + @staticmethod + def get_access_token_from_request(request: Request) -> str: + """Extracts the access_token from the request. + + Args: + request: The http request. + Returns: + The access_token + Raises: + MissingClientTokenError: If there isn't a single access_token in the + request + """ + + async def check_user_in_room_or_world_readable( + self, room_id: str, requester: Requester, allow_departed_users: bool = False + ) -> Tuple[str, Optional[str]]: + """Checks that the user is or was in the room or the room is world + readable. If it isn't then an exception is raised. + + Args: + room_id: room to check + user_id: user to check + allow_departed_users: if True, accept users that were previously + members but have now departed + + Returns: + Resolves to the current membership of the user in the room and the + membership event ID of the user. If the user is not in the room and + never has been, then `(Membership.JOIN, None)` is returned. + """ diff --git a/synapse/api/auth/base.py b/synapse/api/auth/base.py new file mode 100644 index 000000000000..240f2b90dee2 --- /dev/null +++ b/synapse/api/auth/base.py @@ -0,0 +1,273 @@ +# Copyright 2023 The Matrix.org Foundation. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +import logging +from typing import TYPE_CHECKING, Optional, Tuple + +from twisted.web.server import Request + +from synapse import event_auth +from synapse.api.constants import EventTypes, HistoryVisibility, Membership +from synapse.api.errors import ( + AuthError, + Codes, + MissingClientTokenError, + UnstableSpecAuthError, +) +from synapse.appservice import ApplicationService +from synapse.logging.opentracing import trace +from synapse.types import Requester + +if TYPE_CHECKING: + from synapse.server import HomeServer + +logger = logging.getLogger(__name__) + + +class BaseAuth: + """Common base class for all auth implementations.""" + + def __init__(self, hs: "HomeServer"): + self.hs = hs + self.store = hs.get_datastores().main + self._storage_controllers = hs.get_storage_controllers() + + async def check_user_in_room( + self, + room_id: str, + requester: Requester, + allow_departed_users: bool = False, + ) -> Tuple[str, Optional[str]]: + """Check if the user is in the room, or was at some point. + Args: + room_id: The room to check. + + requester: The user making the request, according to the access token. + + current_state: Optional map of the current state of the room. + If provided then that map is used to check whether they are a + member of the room. Otherwise the current membership is + loaded from the database. + + allow_departed_users: if True, accept users that were previously + members but have now departed. + + Raises: + AuthError if the user is/was not in the room. + Returns: + The current membership of the user in the room and the + membership event ID of the user. + """ + + user_id = requester.user.to_string() + ( + membership, + member_event_id, + ) = await self.store.get_local_current_membership_for_user_in_room( + user_id=user_id, + room_id=room_id, + ) + + if membership: + if membership == Membership.JOIN: + return membership, member_event_id + + # XXX this looks totally bogus. Why do we not allow users who have been banned, + # or those who were members previously and have been re-invited? + if allow_departed_users and membership == Membership.LEAVE: + forgot = await self.store.did_forget(user_id, room_id) + if not forgot: + return membership, member_event_id + raise UnstableSpecAuthError( + 403, + "User %s not in room %s" % (user_id, room_id), + errcode=Codes.NOT_JOINED, + ) + + @trace + async def check_user_in_room_or_world_readable( + self, room_id: str, requester: Requester, allow_departed_users: bool = False + ) -> Tuple[str, Optional[str]]: + """Checks that the user is or was in the room or the room is world + readable. If it isn't then an exception is raised. + + Args: + room_id: room to check + user_id: user to check + allow_departed_users: if True, accept users that were previously + members but have now departed + + Returns: + Resolves to the current membership of the user in the room and the + membership event ID of the user. If the user is not in the room and + never has been, then `(Membership.JOIN, None)` is returned. + """ + + try: + # check_user_in_room will return the most recent membership + # event for the user if: + # * The user is a non-guest user, and was ever in the room + # * The user is a guest user, and has joined the room + # else it will throw. + return await self.check_user_in_room( + room_id, requester, allow_departed_users=allow_departed_users + ) + except AuthError: + visibility = await self._storage_controllers.state.get_current_state_event( + room_id, EventTypes.RoomHistoryVisibility, "" + ) + if ( + visibility + and visibility.content.get("history_visibility") + == HistoryVisibility.WORLD_READABLE + ): + return Membership.JOIN, None + raise AuthError( + 403, + "User %r not in room %s, and room previews are disabled" + % (requester.user, room_id), + ) + + async def validate_appservice_can_control_user_id( + self, app_service: ApplicationService, user_id: str + ) -> None: + """Validates that the app service is allowed to control + the given user. + + Args: + app_service: The app service that controls the user + user_id: The author MXID that the app service is controlling + + Raises: + AuthError: If the application service is not allowed to control the user + (user namespace regex does not match, wrong homeserver, etc) + or if the user has not been registered yet. + """ + + # It's ok if the app service is trying to use the sender from their registration + if app_service.sender == user_id: + pass + # Check to make sure the app service is allowed to control the user + elif not app_service.is_interested_in_user(user_id): + raise AuthError( + 403, + "Application service cannot masquerade as this user (%s)." % user_id, + ) + # Check to make sure the user is already registered on the homeserver + elif not (await self.store.get_user_by_id(user_id)): + raise AuthError( + 403, "Application service has not registered this user (%s)" % user_id + ) + + async def is_server_admin(self, requester: Requester) -> bool: + """Check if the given user is a local server admin. + + Args: + requester: user to check + + Returns: + True if the user is an admin + """ + raise NotImplementedError() + + async def check_can_change_room_list( + self, room_id: str, requester: Requester + ) -> bool: + """Determine whether the user is allowed to edit the room's entry in the + published room list. + + Args: + room_id + user + """ + + is_admin = await self.is_server_admin(requester) + if is_admin: + return True + + await self.check_user_in_room(room_id, requester) + + # We currently require the user is a "moderator" in the room. We do this + # by checking if they would (theoretically) be able to change the + # m.room.canonical_alias events + + power_level_event = ( + await self._storage_controllers.state.get_current_state_event( + room_id, EventTypes.PowerLevels, "" + ) + ) + + auth_events = {} + if power_level_event: + auth_events[(EventTypes.PowerLevels, "")] = power_level_event + + send_level = event_auth.get_send_level( + EventTypes.CanonicalAlias, "", power_level_event + ) + user_level = event_auth.get_user_power_level( + requester.user.to_string(), auth_events + ) + + return user_level >= send_level + + @staticmethod + def has_access_token(request: Request) -> bool: + """Checks if the request has an access_token. + + Returns: + False if no access_token was given, True otherwise. + """ + # This will always be set by the time Twisted calls us. + assert request.args is not None + + query_params = request.args.get(b"access_token") + auth_headers = request.requestHeaders.getRawHeaders(b"Authorization") + return bool(query_params) or bool(auth_headers) + + @staticmethod + def get_access_token_from_request(request: Request) -> str: + """Extracts the access_token from the request. + + Args: + request: The http request. + Returns: + The access_token + Raises: + MissingClientTokenError: If there isn't a single access_token in the + request + """ + # This will always be set by the time Twisted calls us. + assert request.args is not None + + auth_headers = request.requestHeaders.getRawHeaders(b"Authorization") + query_params = request.args.get(b"access_token") + if auth_headers: + # Try the get the access_token from a "Authorization: Bearer" + # header + if query_params is not None: + raise MissingClientTokenError( + "Mixing Authorization headers and access_token query parameters." + ) + if len(auth_headers) > 1: + raise MissingClientTokenError("Too many Authorization headers.") + parts = auth_headers[0].split(b" ") + if parts[0] == b"Bearer" and len(parts) == 2: + return parts[1].decode("ascii") + else: + raise MissingClientTokenError("Invalid Authorization header.") + else: + # Try to get the access_token from the query params. + if not query_params: + raise MissingClientTokenError() + + return query_params[0].decode("ascii") diff --git a/synapse/api/auth.py b/synapse/api/auth/internal.py similarity index 61% rename from synapse/api/auth.py rename to synapse/api/auth/internal.py index 66e869bc2db2..813d537e537c 100644 --- a/synapse/api/auth.py +++ b/synapse/api/auth/internal.py @@ -1,4 +1,4 @@ -# Copyright 2014 - 2016 OpenMarket Ltd +# Copyright 2023 The Matrix.org Foundation. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -12,113 +12,49 @@ # See the License for the specific language governing permissions and # limitations under the License. import logging -from typing import TYPE_CHECKING, Optional, Tuple +from typing import TYPE_CHECKING, Optional import pymacaroons from netaddr import IPAddress from twisted.web.server import Request -from synapse import event_auth -from synapse.api.constants import EventTypes, HistoryVisibility, Membership from synapse.api.errors import ( AuthError, Codes, InvalidClientTokenError, MissingClientTokenError, - UnstableSpecAuthError, ) -from synapse.appservice import ApplicationService from synapse.http import get_request_user_agent from synapse.http.site import SynapseRequest -from synapse.logging.opentracing import ( - active_span, - force_tracing, - start_active_span, - trace, -) +from synapse.logging.opentracing import active_span, force_tracing, start_active_span from synapse.types import Requester, create_requester from synapse.util.cancellation import cancellable +from . import GUEST_DEVICE_ID +from .base import BaseAuth + if TYPE_CHECKING: from synapse.server import HomeServer logger = logging.getLogger(__name__) -# guests always get this device id. -GUEST_DEVICE_ID = "guest_device" - - -class Auth: +class InternalAuth(BaseAuth): """ This class contains functions for authenticating users of our client-server API. """ def __init__(self, hs: "HomeServer"): - self.hs = hs + super().__init__(hs) self.clock = hs.get_clock() - self.store = hs.get_datastores().main self._account_validity_handler = hs.get_account_validity_handler() - self._storage_controllers = hs.get_storage_controllers() self._macaroon_generator = hs.get_macaroon_generator() self._track_appservice_user_ips = hs.config.appservice.track_appservice_user_ips self._track_puppeted_user_ips = hs.config.api.track_puppeted_user_ips self._force_tracing_for_users = hs.config.tracing.force_tracing_for_users - async def check_user_in_room( - self, - room_id: str, - requester: Requester, - allow_departed_users: bool = False, - ) -> Tuple[str, Optional[str]]: - """Check if the user is in the room, or was at some point. - Args: - room_id: The room to check. - - requester: The user making the request, according to the access token. - - current_state: Optional map of the current state of the room. - If provided then that map is used to check whether they are a - member of the room. Otherwise the current membership is - loaded from the database. - - allow_departed_users: if True, accept users that were previously - members but have now departed. - - Raises: - AuthError if the user is/was not in the room. - Returns: - The current membership of the user in the room and the - membership event ID of the user. - """ - - user_id = requester.user.to_string() - ( - membership, - member_event_id, - ) = await self.store.get_local_current_membership_for_user_in_room( - user_id=user_id, - room_id=room_id, - ) - - if membership: - if membership == Membership.JOIN: - return membership, member_event_id - - # XXX this looks totally bogus. Why do we not allow users who have been banned, - # or those who were members previously and have been re-invited? - if allow_departed_users and membership == Membership.LEAVE: - forgot = await self.store.did_forget(user_id, room_id) - if not forgot: - return membership, member_event_id - raise UnstableSpecAuthError( - 403, - "User %s not in room %s" % (user_id, room_id), - errcode=Codes.NOT_JOINED, - ) - @cancellable async def get_user_by_req( self, @@ -253,37 +189,6 @@ async def _wrapped_get_user_by_req( except KeyError: raise MissingClientTokenError() - async def validate_appservice_can_control_user_id( - self, app_service: ApplicationService, user_id: str - ) -> None: - """Validates that the app service is allowed to control - the given user. - - Args: - app_service: The app service that controls the user - user_id: The author MXID that the app service is controlling - - Raises: - AuthError: If the application service is not allowed to control the user - (user namespace regex does not match, wrong homeserver, etc) - or if the user has not been registered yet. - """ - - # It's ok if the app service is trying to use the sender from their registration - if app_service.sender == user_id: - pass - # Check to make sure the app service is allowed to control the user - elif not app_service.is_interested_in_user(user_id): - raise AuthError( - 403, - "Application service cannot masquerade as this user (%s)." % user_id, - ) - # Check to make sure the user is already registered on the homeserver - elif not (await self.store.get_user_by_id(user_id)): - raise AuthError( - 403, "Application service has not registered this user (%s)" % user_id - ) - @cancellable async def _get_appservice_user(self, request: Request) -> Optional[Requester]: """ @@ -462,141 +367,3 @@ async def is_server_admin(self, requester: Requester) -> bool: True if the user is an admin """ return await self.store.is_server_admin(requester.user) - - async def check_can_change_room_list( - self, room_id: str, requester: Requester - ) -> bool: - """Determine whether the user is allowed to edit the room's entry in the - published room list. - - Args: - room_id: The room to check. - requester: The user making the request, according to the access token. - """ - - is_admin = await self.is_server_admin(requester) - if is_admin: - return True - - await self.check_user_in_room(room_id, requester) - - # We currently require the user is a "moderator" in the room. We do this - # by checking if they would (theoretically) be able to change the - # m.room.canonical_alias events - - power_level_event = ( - await self._storage_controllers.state.get_current_state_event( - room_id, EventTypes.PowerLevels, "" - ) - ) - - auth_events = {} - if power_level_event: - auth_events[(EventTypes.PowerLevels, "")] = power_level_event - - send_level = event_auth.get_send_level( - EventTypes.CanonicalAlias, "", power_level_event - ) - user_level = event_auth.get_user_power_level( - requester.user.to_string(), auth_events - ) - - return user_level >= send_level - - @staticmethod - def has_access_token(request: Request) -> bool: - """Checks if the request has an access_token. - - Returns: - False if no access_token was given, True otherwise. - """ - # This will always be set by the time Twisted calls us. - assert request.args is not None - - query_params = request.args.get(b"access_token") - auth_headers = request.requestHeaders.getRawHeaders(b"Authorization") - return bool(query_params) or bool(auth_headers) - - @staticmethod - @cancellable - def get_access_token_from_request(request: Request) -> str: - """Extracts the access_token from the request. - - Args: - request: The http request. - Returns: - The access_token - Raises: - MissingClientTokenError: If there isn't a single access_token in the - request - """ - # This will always be set by the time Twisted calls us. - assert request.args is not None - - auth_headers = request.requestHeaders.getRawHeaders(b"Authorization") - query_params = request.args.get(b"access_token") - if auth_headers: - # Try the get the access_token from a "Authorization: Bearer" - # header - if query_params is not None: - raise MissingClientTokenError( - "Mixing Authorization headers and access_token query parameters." - ) - if len(auth_headers) > 1: - raise MissingClientTokenError("Too many Authorization headers.") - parts = auth_headers[0].split(b" ") - if parts[0] == b"Bearer" and len(parts) == 2: - return parts[1].decode("ascii") - else: - raise MissingClientTokenError("Invalid Authorization header.") - else: - # Try to get the access_token from the query params. - if not query_params: - raise MissingClientTokenError() - - return query_params[0].decode("ascii") - - @trace - async def check_user_in_room_or_world_readable( - self, room_id: str, requester: Requester, allow_departed_users: bool = False - ) -> Tuple[str, Optional[str]]: - """Checks that the user is or was in the room or the room is world - readable. If it isn't then an exception is raised. - - Args: - room_id: The room to check. - requester: The user making the request, according to the access token. - allow_departed_users: If True, accept users that were previously - members but have now departed. - - Returns: - Resolves to the current membership of the user in the room and the - membership event ID of the user. If the user is not in the room and - never has been, then `(Membership.JOIN, None)` is returned. - """ - - try: - # check_user_in_room will return the most recent membership - # event for the user if: - # * The user is a non-guest user, and was ever in the room - # * The user is a guest user, and has joined the room - # else it will throw. - return await self.check_user_in_room( - room_id, requester, allow_departed_users=allow_departed_users - ) - except AuthError: - visibility = await self._storage_controllers.state.get_current_state_event( - room_id, EventTypes.RoomHistoryVisibility, "" - ) - if ( - visibility - and visibility.content.get("history_visibility") - == HistoryVisibility.WORLD_READABLE - ): - return Membership.JOIN, None - raise UnstableSpecAuthError( - 403, - "User %s not in room %s, and room previews are disabled" - % (requester.user, room_id), - errcode=Codes.NOT_JOINED, - ) diff --git a/synapse/server.py b/synapse/server.py index cce5fb66ff02..df88af12a999 100644 --- a/synapse/server.py +++ b/synapse/server.py @@ -31,6 +31,7 @@ from twisted.web.resource import Resource from synapse.api.auth import Auth +from synapse.api.auth.internal import InternalAuth from synapse.api.auth_blocking import AuthBlocking from synapse.api.filtering import Filtering from synapse.api.ratelimiting import Ratelimiter, RequestRatelimiter @@ -427,7 +428,7 @@ def get_replication_notifier(self) -> ReplicationNotifier: @cache_in_self def get_auth(self) -> Auth: - return Auth(self) + return InternalAuth(self) @cache_in_self def get_auth_blocking(self) -> AuthBlocking: diff --git a/tests/api/test_auth.py b/tests/api/test_auth.py index 6e36e73f0d75..3dac52d178ad 100644 --- a/tests/api/test_auth.py +++ b/tests/api/test_auth.py @@ -18,7 +18,7 @@ from twisted.test.proto_helpers import MemoryReactor -from synapse.api.auth import Auth +from synapse.api.auth.internal import InternalAuth from synapse.api.auth_blocking import AuthBlocking from synapse.api.constants import UserTypes from synapse.api.errors import ( @@ -48,7 +48,7 @@ def prepare(self, reactor: MemoryReactor, clock: Clock, hs: HomeServer) -> None: # have been called by the HomeserverTestCase machinery. hs.datastores.main = self.store # type: ignore[union-attr] hs.get_auth_handler().store = self.store - self.auth = Auth(hs) + self.auth = InternalAuth(hs) # AuthBlocking reads from the hs' config on initialization. We need to # modify its config instead of the hs' diff --git a/tests/handlers/test_register.py b/tests/handlers/test_register.py index 73822b07a599..8d8584609b8c 100644 --- a/tests/handlers/test_register.py +++ b/tests/handlers/test_register.py @@ -17,7 +17,7 @@ from twisted.test.proto_helpers import MemoryReactor -from synapse.api.auth import Auth +from synapse.api.auth.internal import InternalAuth from synapse.api.constants import UserTypes from synapse.api.errors import ( CodeMessageException, @@ -683,7 +683,7 @@ def test_spam_checker_shadow_ban(self) -> None: request = Mock(args={}) request.args[b"access_token"] = [token.encode("ascii")] request.requestHeaders.getRawHeaders = mock_getRawHeaders() - auth = Auth(self.hs) + auth = InternalAuth(self.hs) requester = self.get_success(auth.get_user_by_req(request)) self.assertTrue(requester.shadow_banned) diff --git a/tests/test_state.py b/tests/test_state.py index ddf59916b172..7a49b8795320 100644 --- a/tests/test_state.py +++ b/tests/test_state.py @@ -28,7 +28,7 @@ from twisted.internet import defer -from synapse.api.auth import Auth +from synapse.api.auth.internal import InternalAuth from synapse.api.constants import EventTypes, Membership from synapse.api.room_versions import RoomVersions from synapse.events import EventBase, make_event_from_dict @@ -240,7 +240,7 @@ def setUp(self) -> None: hs.get_macaroon_generator.return_value = MacaroonGenerator( clock, "tesths", b"verysecret" ) - hs.get_auth.return_value = Auth(hs) + hs.get_auth.return_value = InternalAuth(hs) hs.get_state_resolution_handler = lambda: StateResolutionHandler(hs) hs.get_storage_controllers.return_value = storage_controllers From 765244faeef9e20c573d2c7935f05f76aeca1c28 Mon Sep 17 00:00:00 2001 From: Quentin Gliech Date: Tue, 13 Sep 2022 17:54:32 +0200 Subject: [PATCH 03/75] Initial MSC3964 support: delegation of auth to OIDC server --- synapse/api/auth/oauth_delegated.py | 227 ++++++++++++++++++++++++++++ synapse/config/auth.py | 30 +++- synapse/server.py | 4 + 3 files changed, 260 insertions(+), 1 deletion(-) create mode 100644 synapse/api/auth/oauth_delegated.py diff --git a/synapse/api/auth/oauth_delegated.py b/synapse/api/auth/oauth_delegated.py new file mode 100644 index 000000000000..b3b5c29a949e --- /dev/null +++ b/synapse/api/auth/oauth_delegated.py @@ -0,0 +1,227 @@ +# Copyright 2023 The Matrix.org Foundation. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +import logging +from typing import TYPE_CHECKING, Any, Dict, List, Optional +from urllib.parse import urlencode + +from authlib.oauth2 import ClientAuth +from authlib.oauth2.auth import encode_client_secret_basic, encode_client_secret_post +from authlib.oauth2.rfc7523 import ClientSecretJWT, PrivateKeyJWT, private_key_jwt_sign +from authlib.oauth2.rfc7662 import IntrospectionToken +from authlib.oidc.discovery import OpenIDProviderMetadata, get_well_known_url + +from twisted.web.client import readBody +from twisted.web.http_headers import Headers + +from synapse.api.auth.base import BaseAuth +from synapse.api.errors import AuthError, StoreError +from synapse.http.site import SynapseRequest +from synapse.logging.context import make_deferred_yieldable +from synapse.types import Requester, UserID, create_requester +from synapse.util import json_decoder +from synapse.util.caches.cached_call import RetryOnExceptionCachedCall + +if TYPE_CHECKING: + from synapse.server import HomeServer + +logger = logging.getLogger(__name__) + + +def scope_to_list(scope: str) -> List[str]: + """Convert a scope string to a list of scope tokens""" + return scope.strip().split(" ") + + +class PrivateKeyJWTWithKid(PrivateKeyJWT): + """An implementation of the private_key_jwt client auth method that includes a kid header. + + This is needed because some providers (Keycloak) require the kid header to figure + out which key to use to verify the signature. + """ + + def sign(self, auth: Any, token_endpoint: str) -> bytes: + return private_key_jwt_sign( + auth.client_secret, + client_id=auth.client_id, + token_endpoint=token_endpoint, + claims=self.claims, + header={"kid": auth.client_secret["kid"]}, + ) + + +class OAuthDelegatedAuth(BaseAuth): + AUTH_METHODS = { + "client_secret_post": encode_client_secret_post, + "client_secret_basic": encode_client_secret_basic, + "client_secret_jwt": ClientSecretJWT(), + "private_key_jwt": PrivateKeyJWTWithKid(), + } + + def __init__(self, hs: "HomeServer"): + super().__init__(hs) + + self._config = hs.config.auth + assert self._config.oauth_delegation_enabled, "OAuth delegation is not enabled" + assert self._config.oauth_delegation_issuer, "No issuer provided" + assert self._config.oauth_delegation_client_id, "No client_id provided" + assert self._config.oauth_delegation_client_secret, "No client_secret provided" + assert ( + self._config.oauth_delegation_client_auth_method + in OAuthDelegatedAuth.AUTH_METHODS + ), "Invalid client_auth_method" + + self._http_client = hs.get_proxied_http_client() + self._hostname = hs.hostname + + self._issuer_metadata = RetryOnExceptionCachedCall(self._load_metadata) + secret = self._config.oauth_delegation_client_secret + self._client_auth = ClientAuth( + self._config.oauth_delegation_client_id, + secret, + OAuthDelegatedAuth.AUTH_METHODS[ + self._config.oauth_delegation_client_auth_method + ], + ) + + async def _load_metadata(self) -> OpenIDProviderMetadata: + if self._config.oauth_delegation_issuer_metadata is not None: + return OpenIDProviderMetadata( + **self._config.oauth_delegation_issuer_metadata + ) + url = get_well_known_url(self._config.oauth_delegation_issuer, external=True) + response = await self._http_client.get_json(url) + metadata = OpenIDProviderMetadata(**response) + # metadata.validate_introspection_endpoint() + return metadata + + async def _introspect_token(self, token: str) -> IntrospectionToken: + metadata = await self._issuer_metadata.get() + introspection_endpoint = metadata.get("introspection_endpoint") + raw_headers: Dict[str, str] = { + "Content-Type": "application/x-www-form-urlencoded", + "User-Agent": str(self._http_client.user_agent, "utf-8"), + "Accept": "application/json", + } + + args = {"token": token, "token_type_hint": "access_token"} + body = urlencode(args, True) + + # Fill the body/headers with credentials + uri, raw_headers, body = self._client_auth.prepare( + method="POST", uri=introspection_endpoint, headers=raw_headers, body=body + ) + headers = Headers({k: [v] for (k, v) in raw_headers.items()}) + + # Do the actual request + # We're not using the SimpleHttpClient util methods as we don't want to + # check the HTTP status code and we do the body encoding ourself. + response = await self._http_client.request( + method="POST", + uri=uri, + data=body.encode("utf-8"), + headers=headers, + ) + + resp_body = await make_deferred_yieldable(readBody(response)) + # TODO: Let's not worry about 5xx errors & co. for now and just try + # decoding that as JSON. We should also do some validation of the + # response + resp = json_decoder.decode(resp_body.decode("utf-8")) + return IntrospectionToken(**resp) + + async def get_user_by_req( + self, + request: SynapseRequest, + allow_guest: bool = False, + allow_expired: bool = False, + ) -> Requester: + access_token = self.get_access_token_from_request(request) + return await self.get_user_by_access_token(access_token, allow_expired) + + async def get_user_by_access_token( + self, + token: str, + allow_expired: bool = False, + ) -> Requester: + introspection_result = await self._introspect_token(token) + + logger.info(f"Introspection result: {introspection_result!r}") + + # TODO: introspection verification should be more extensive, especially: + # - verify the scopes + # - verify the audience + if not introspection_result.get("active"): + raise AuthError( + 403, + "Invalid access token", + ) + + # TODO: claim mapping should be configurable + username: Optional[str] = introspection_result.get("username") + if username is None or not isinstance(username, str): + raise AuthError( + 500, + "Invalid username claim in the introspection result", + ) + + # Let's look at the scope + scope: List[str] = scope_to_list(introspection_result.get("scope", "")) + device_id = None + # Find device_id in scope + for tok in scope: + if tok.startswith("urn:matrix:org.matrix.msc2967.client:device:"): + parts = tok.split(":") + if len(parts) == 5: + device_id = parts[4] + + user_id = UserID(username, self._hostname) + user_info = await self.store.get_userinfo_by_id(user_id=user_id.to_string()) + + # If the user does not exist, we should create it on the fly + # TODO: we could use SCIM to provision users ahead of time and listen + # for SCIM SET events if those ever become standard: + # https://datatracker.ietf.org/doc/html/draft-hunt-scim-notify-00 + if not user_info: + await self.store.register_user(user_id=user_id.to_string()) + user_info = await self.store.get_userinfo_by_id(user_id=user_id.to_string()) + if not user_info: + raise AuthError( + 500, + "Could not create user on the fly", + ) + + if device_id: + # Create the device on the fly if it does not exist + try: + await self.store.get_device( + user_id=user_id.to_string(), device_id=device_id + ) + except StoreError: + await self.store.store_device( + user_id=user_id.to_string(), + device_id=device_id, + initial_device_display_name="OIDC-native client", + ) + + # TODO: there is a few things missing in the requester here, which still need + # to be figured out, like: + # - impersonation, with the `authenticated_entity`, which is used for + # rate-limiting, MAU limits, etc. + # - shadow-banning, with the `shadow_banned` flag + # - a proper solution for appservices, which still needs to be figured out in + # the context of MSC3861 + return create_requester( + user_id=user_id, + device_id=device_id, + ) diff --git a/synapse/config/auth.py b/synapse/config/auth.py index 35774962c0be..25b5cc60dcf1 100644 --- a/synapse/config/auth.py +++ b/synapse/config/auth.py @@ -14,9 +14,11 @@ # limitations under the License. from typing import Any +from authlib.jose.rfc7517 import JsonWebKey + from synapse.types import JsonDict -from ._base import Config +from ._base import Config, ConfigError class AuthConfig(Config): @@ -53,3 +55,29 @@ def read_config(self, config: JsonDict, **kwargs: Any) -> None: self.ui_auth_session_timeout = self.parse_duration( ui_auth.get("session_timeout", 0) ) + + oauth_delegation = config.get("oauth_delegation", {}) + self.oauth_delegation_enabled = oauth_delegation.get("enabled", False) + self.oauth_delegation_issuer = oauth_delegation.get("issuer", "") + self.oauth_delegation_issuer_metadata = oauth_delegation.get("issuer_metadata") + self.oauth_delegation_account = oauth_delegation.get("account", "") + self.oauth_delegation_client_id = oauth_delegation.get("client_id", "") + self.oauth_delegation_client_secret = oauth_delegation.get("client_secret", "") + self.oauth_delegation_client_auth_method = oauth_delegation.get( + "client_auth_method", "client_secret_post" + ) + + self.password_enabled = password_config.get( + "enabled", not self.oauth_delegation_enabled + ) + + if self.oauth_delegation_client_auth_method == "private_key_jwt": + self.oauth_delegation_client_secret = JsonWebKey.import_key( + self.oauth_delegation_client_secret + ) + + # If we are delegating via OAuth then password cannot be supported as well + if self.oauth_delegation_enabled and self.password_enabled: + raise ConfigError( + "Password auth cannot be enabled when OAuth delegation is enabled" + ) diff --git a/synapse/server.py b/synapse/server.py index df88af12a999..1c82500f3024 100644 --- a/synapse/server.py +++ b/synapse/server.py @@ -428,6 +428,10 @@ def get_replication_notifier(self) -> ReplicationNotifier: @cache_in_self def get_auth(self) -> Auth: + if self.config.auth.oauth_delegation_enabled: + from synapse.api.auth.oauth_delegated import OAuthDelegatedAuth + + return OAuthDelegatedAuth(self) return InternalAuth(self) @cache_in_self From 8f576aa462684e13b20dc380e759a76e6db821b6 Mon Sep 17 00:00:00 2001 From: Quentin Gliech Date: Tue, 16 May 2023 15:36:40 +0200 Subject: [PATCH 04/75] Expose the public keys used for client authentication on an endpoint --- synapse/rest/synapse/client/__init__.py | 6 +++ synapse/rest/synapse/client/jwks.py | 72 +++++++++++++++++++++++++ 2 files changed, 78 insertions(+) create mode 100644 synapse/rest/synapse/client/jwks.py diff --git a/synapse/rest/synapse/client/__init__.py b/synapse/rest/synapse/client/__init__.py index e55924f5979b..dcfd0ad6aac6 100644 --- a/synapse/rest/synapse/client/__init__.py +++ b/synapse/rest/synapse/client/__init__.py @@ -46,6 +46,12 @@ def build_synapse_client_resource_tree(hs: "HomeServer") -> Mapping[str, Resourc "/_synapse/client/unsubscribe": UnsubscribeResource(hs), } + # Expose the JWKS endpoint if OAuth2 delegation is enabled + if hs.config.auth.oauth_delegation_enabled: + from synapse.rest.synapse.client.jwks import JwksResource + + resources["/_synapse/jwks"] = JwksResource(hs) + # provider-specific SSO bits. Only load these if they are enabled, since they # rely on optional dependencies. if hs.config.oidc.oidc_enabled: diff --git a/synapse/rest/synapse/client/jwks.py b/synapse/rest/synapse/client/jwks.py new file mode 100644 index 000000000000..818585843eaa --- /dev/null +++ b/synapse/rest/synapse/client/jwks.py @@ -0,0 +1,72 @@ +# Copyright 2022 The Matrix.org Foundation C.I.C. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +import logging +from typing import TYPE_CHECKING, Tuple + +from synapse.http.server import DirectServeJsonResource +from synapse.http.site import SynapseRequest +from synapse.types import JsonDict + +if TYPE_CHECKING: + from synapse.server import HomeServer + +logger = logging.getLogger(__name__) + + +class JwksResource(DirectServeJsonResource): + def __init__(self, hs: "HomeServer"): + from authlib.jose.rfc7517 import Key + + super().__init__(extract_context=True) + + # Parameters that are allowed to be exposed in the public key. + # This is done manually, because authlib's private to public key conversion + # is unreliable depending on the version. Instead, we just serialize the private + # key and only keep the public parameters. + # List from https://www.iana.org/assignments/jose/jose.xhtml#web-key-parameters + public_parameters = { + "kty", + "use", + "key_ops", + "alg", + "kid", + "x5u", + "x5c", + "x5t", + "x5t#S256", + "crv", + "x", + "y", + "n", + "e", + "ext", + } + + secret = hs.config.auth.oauth_delegation_client_secret + + if isinstance(secret, Key): + private_key = secret.as_dict() + public_key = { + k: v for k, v in private_key.items() if k in public_parameters + } + keys = [public_key] + else: + keys = [] + + self.res = { + "keys": keys, + } + + async def _async_render_GET(self, request: SynapseRequest) -> Tuple[int, JsonDict]: + return 200, self.res From e82ec6d00819253d15d22a41ba3b75ad77dce98f Mon Sep 17 00:00:00 2001 From: Quentin Gliech Date: Thu, 18 Nov 2021 15:21:00 +0100 Subject: [PATCH 05/75] MSC2965: OIDC Provider discovery via well-known document --- synapse/rest/well_known.py | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/synapse/rest/well_known.py b/synapse/rest/well_known.py index e2174fdfea03..fd3b17a5ad51 100644 --- a/synapse/rest/well_known.py +++ b/synapse/rest/well_known.py @@ -44,6 +44,15 @@ def get_well_known(self) -> Optional[JsonDict]: "base_url": self._config.registration.default_identity_server } + if self._config.auth.oauth_delegation_enabled: + result["org.matrix.msc2965.authentication"] = { + "issuer": self._config.auth.oauth_delegation_issuer + } + if self._config.auth.oauth_delegation_account != "": + result["org.matrix.msc2965.authentication"][ + "account" + ] = self._config.auth.oauth_delegation_account + if self._config.server.extra_well_known_client_content: for ( key, From c5cf1b421d8e0d765f812880ff41fe5d244a0919 Mon Sep 17 00:00:00 2001 From: Quentin Gliech Date: Fri, 17 Jun 2022 16:58:05 +0200 Subject: [PATCH 06/75] Save the scopes in the requester --- synapse/api/auth/oauth_delegated.py | 1 + synapse/types/__init__.py | 8 ++++++++ tests/api/test_auth.py | 2 ++ 3 files changed, 11 insertions(+) diff --git a/synapse/api/auth/oauth_delegated.py b/synapse/api/auth/oauth_delegated.py index b3b5c29a949e..2715127e3285 100644 --- a/synapse/api/auth/oauth_delegated.py +++ b/synapse/api/auth/oauth_delegated.py @@ -224,4 +224,5 @@ async def get_user_by_access_token( return create_requester( user_id=user_id, device_id=device_id, + scope=scope, ) diff --git a/synapse/types/__init__.py b/synapse/types/__init__.py index 42baf8ac6bb4..dfc95e8ebb8a 100644 --- a/synapse/types/__init__.py +++ b/synapse/types/__init__.py @@ -131,6 +131,7 @@ class Requester: user: "UserID" access_token_id: Optional[int] is_guest: bool + scope: Set[str] shadow_banned: bool device_id: Optional[str] app_service: Optional["ApplicationService"] @@ -147,6 +148,7 @@ def serialize(self) -> Dict[str, Any]: "user_id": self.user.to_string(), "access_token_id": self.access_token_id, "is_guest": self.is_guest, + "scope": list(self.scope), "shadow_banned": self.shadow_banned, "device_id": self.device_id, "app_server_id": self.app_service.id if self.app_service else None, @@ -175,6 +177,7 @@ def deserialize( user=UserID.from_string(input["user_id"]), access_token_id=input["access_token_id"], is_guest=input["is_guest"], + scope=set(input["scope"]), shadow_banned=input["shadow_banned"], device_id=input["device_id"], app_service=appservice, @@ -186,6 +189,7 @@ def create_requester( user_id: Union[str, "UserID"], access_token_id: Optional[int] = None, is_guest: bool = False, + scope: StrCollection = (), shadow_banned: bool = False, device_id: Optional[str] = None, app_service: Optional["ApplicationService"] = None, @@ -199,6 +203,7 @@ def create_requester( access_token_id: *ID* of the access token used for this request, or None if it came via the appservice API or similar is_guest: True if the user making this request is a guest user + scope: the scope of the access token used for this request, if any shadow_banned: True if the user making this request is shadow-banned. device_id: device_id which was set at authentication time app_service: the AS requesting on behalf of the user @@ -215,10 +220,13 @@ def create_requester( if authenticated_entity is None: authenticated_entity = user_id.to_string() + scope = set(scope) + return Requester( user_id, access_token_id, is_guest, + scope, shadow_banned, device_id, app_service, diff --git a/tests/api/test_auth.py b/tests/api/test_auth.py index 3dac52d178ad..cdb0048122a1 100644 --- a/tests/api/test_auth.py +++ b/tests/api/test_auth.py @@ -426,6 +426,7 @@ def test_blocking_mau__appservice_requester_allowed_when_not_tracking_ips( access_token_id=None, device_id="FOOBAR", is_guest=False, + scope=set(), shadow_banned=False, app_service=appservice, authenticated_entity="@appservice:server", @@ -456,6 +457,7 @@ def test_blocking_mau__appservice_requester_disallowed_when_tracking_ips( access_token_id=None, device_id="FOOBAR", is_guest=False, + scope=set(), shadow_banned=False, app_service=appservice, authenticated_entity="@appservice:server", From 7628dbf4e9b48d9714ccbd0530af579d9c290fed Mon Sep 17 00:00:00 2001 From: Quentin Gliech Date: Mon, 20 Jun 2022 11:17:48 +0200 Subject: [PATCH 07/75] Handle the Synapse admin scope --- synapse/api/auth/oauth_delegated.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/synapse/api/auth/oauth_delegated.py b/synapse/api/auth/oauth_delegated.py index 2715127e3285..ff1f395e5864 100644 --- a/synapse/api/auth/oauth_delegated.py +++ b/synapse/api/auth/oauth_delegated.py @@ -140,6 +140,9 @@ async def _introspect_token(self, token: str) -> IntrospectionToken: resp = json_decoder.decode(resp_body.decode("utf-8")) return IntrospectionToken(**resp) + async def is_server_admin(self, requester: Requester) -> bool: + return "urn:synapse:admin:*" in requester.scope + async def get_user_by_req( self, request: SynapseRequest, From f9cd549f6485620381443f2b4b75a1bd0a88d39f Mon Sep 17 00:00:00 2001 From: Quentin Gliech Date: Tue, 13 Sep 2022 16:13:20 +0200 Subject: [PATCH 08/75] Record the `sub` claims as an external_id --- synapse/api/auth/oauth_delegated.py | 59 ++++++++++++++++++----------- 1 file changed, 37 insertions(+), 22 deletions(-) diff --git a/synapse/api/auth/oauth_delegated.py b/synapse/api/auth/oauth_delegated.py index ff1f395e5864..5565ef0a1a73 100644 --- a/synapse/api/auth/oauth_delegated.py +++ b/synapse/api/auth/oauth_delegated.py @@ -68,6 +68,8 @@ class OAuthDelegatedAuth(BaseAuth): "private_key_jwt": PrivateKeyJWTWithKid(), } + EXTERNAL_ID_PROVIDER = "oauth-delegated" + def __init__(self, hs: "HomeServer"): super().__init__(hs) @@ -170,13 +172,42 @@ async def get_user_by_access_token( "Invalid access token", ) - # TODO: claim mapping should be configurable - username: Optional[str] = introspection_result.get("username") - if username is None or not isinstance(username, str): - raise AuthError( - 500, - "Invalid username claim in the introspection result", + # Match via the sub claim + sub: Optional[str] = introspection_result.get("sub") + if sub is None: + raise AuthError(500, "Invalid sub claim in the introspection result") + + user_id_str = await self.store.get_user_by_external_id( + OAuthDelegatedAuth.EXTERNAL_ID_PROVIDER, sub + ) + if user_id_str is None: + # If we could not find a user via the external_id, it either does not exist, + # or the external_id was never recorded + + # TODO: claim mapping should be configurable + username: Optional[str] = introspection_result.get("username") + if username is None or not isinstance(username, str): + raise AuthError( + 500, + "Invalid username claim in the introspection result", + ) + user_id = UserID(username, self._hostname) + + # First try to find a user from the username claim + user_info = await self.store.get_userinfo_by_id(user_id=user_id.to_string()) + if user_info is None: + # If the user does not exist, we should create it on the fly + # TODO: we could use SCIM to provision users ahead of time and listen + # for SCIM SET events if those ever become standard: + # https://datatracker.ietf.org/doc/html/draft-hunt-scim-notify-00 + await self.store.register_user(user_id=user_id.to_string()) + + # And record the sub as external_id + await self.store.record_user_external_id( + OAuthDelegatedAuth.EXTERNAL_ID_PROVIDER, sub, user_id.to_string() ) + else: + user_id = UserID.from_string(user_id_str) # Let's look at the scope scope: List[str] = scope_to_list(introspection_result.get("scope", "")) @@ -188,22 +219,6 @@ async def get_user_by_access_token( if len(parts) == 5: device_id = parts[4] - user_id = UserID(username, self._hostname) - user_info = await self.store.get_userinfo_by_id(user_id=user_id.to_string()) - - # If the user does not exist, we should create it on the fly - # TODO: we could use SCIM to provision users ahead of time and listen - # for SCIM SET events if those ever become standard: - # https://datatracker.ietf.org/doc/html/draft-hunt-scim-notify-00 - if not user_info: - await self.store.register_user(user_id=user_id.to_string()) - user_info = await self.store.get_userinfo_by_id(user_id=user_id.to_string()) - if not user_info: - raise AuthError( - 500, - "Could not create user on the fly", - ) - if device_id: # Create the device on the fly if it does not exist try: From d20669971a5be17776a2991c77f5348662bb3902 Mon Sep 17 00:00:00 2001 From: Hugh Nimmo-Smith Date: Tue, 20 Sep 2022 12:54:18 +0100 Subject: [PATCH 09/75] Use `name` claim as display name when registering users on the fly. This makes is so that the `name` claim got when introspecting the token is used as the display name when registering a user on the fly. --- synapse/api/auth/oauth_delegated.py | 9 ++++++++- 1 file changed, 8 insertions(+), 1 deletion(-) diff --git a/synapse/api/auth/oauth_delegated.py b/synapse/api/auth/oauth_delegated.py index 5565ef0a1a73..9e01e3fadc82 100644 --- a/synapse/api/auth/oauth_delegated.py +++ b/synapse/api/auth/oauth_delegated.py @@ -200,7 +200,14 @@ async def get_user_by_access_token( # TODO: we could use SCIM to provision users ahead of time and listen # for SCIM SET events if those ever become standard: # https://datatracker.ietf.org/doc/html/draft-hunt-scim-notify-00 - await self.store.register_user(user_id=user_id.to_string()) + + # TODO: claim mapping should be configurable + # If present, use the name claim as the displayname + name: Optional[str] = introspection_result.get("name") + + await self.store.register_user( + user_id=user_id.to_string(), create_profile_with_displayname=name + ) # And record the sub as external_id await self.store.record_user_external_id( From a1374b5c70fc8520930a1777dc131403812d7967 Mon Sep 17 00:00:00 2001 From: Hugh Nimmo-Smith Date: Wed, 16 Nov 2022 11:05:05 +0000 Subject: [PATCH 10/75] MSC2967: Check access token scope for use as user and add guest support --- synapse/api/auth/oauth_delegated.py | 30 +++++++++++++++++++---------- 1 file changed, 20 insertions(+), 10 deletions(-) diff --git a/synapse/api/auth/oauth_delegated.py b/synapse/api/auth/oauth_delegated.py index 9e01e3fadc82..cfa178218cb8 100644 --- a/synapse/api/auth/oauth_delegated.py +++ b/synapse/api/auth/oauth_delegated.py @@ -25,7 +25,7 @@ from twisted.web.http_headers import Headers from synapse.api.auth.base import BaseAuth -from synapse.api.errors import AuthError, StoreError +from synapse.api.errors import AuthError, InvalidClientTokenError, StoreError from synapse.http.site import SynapseRequest from synapse.logging.context import make_deferred_yieldable from synapse.types import Requester, UserID, create_requester @@ -164,18 +164,29 @@ async def get_user_by_access_token( logger.info(f"Introspection result: {introspection_result!r}") # TODO: introspection verification should be more extensive, especially: - # - verify the scopes # - verify the audience if not introspection_result.get("active"): - raise AuthError( - 403, - "Invalid access token", - ) + raise InvalidClientTokenError("Token is not active") + + # Let's look at the scope + scope: List[str] = scope_to_list(introspection_result.get("scope", "")) + + # Determine type of user based on presence of particular scopes + has_admin_scope = "urn:synapse:admin:*" in scope + has_user_scope = "urn:matrix:org.matrix.msc2967.client:api:*" in scope + has_guest_scope = "urn:matrix:org.matrix.msc2967.client:api:guest" in scope + is_user = has_user_scope or has_admin_scope + is_guest = has_guest_scope and not is_user + + if not is_user and not is_guest: + raise InvalidClientTokenError("No scope in token granting user rights") # Match via the sub claim sub: Optional[str] = introspection_result.get("sub") if sub is None: - raise AuthError(500, "Invalid sub claim in the introspection result") + raise InvalidClientTokenError( + "Invalid sub claim in the introspection result" + ) user_id_str = await self.store.get_user_by_external_id( OAuthDelegatedAuth.EXTERNAL_ID_PROVIDER, sub @@ -216,10 +227,8 @@ async def get_user_by_access_token( else: user_id = UserID.from_string(user_id_str) - # Let's look at the scope - scope: List[str] = scope_to_list(introspection_result.get("scope", "")) - device_id = None # Find device_id in scope + device_id = None for tok in scope: if tok.startswith("urn:matrix:org.matrix.msc2967.client:device:"): parts = tok.split(":") @@ -250,4 +259,5 @@ async def get_user_by_access_token( user_id=user_id, device_id=device_id, scope=scope, + is_guest=is_guest, ) From 28a9663bdf092541250ae1209f201e57b663dc81 Mon Sep 17 00:00:00 2001 From: Hugh Nimmo-Smith Date: Wed, 16 Nov 2022 17:44:13 +0000 Subject: [PATCH 11/75] Initial tests for OAuth delegation --- tests/handlers/test_oauth_delegation.py | 345 ++++++++++++++++++++++++ 1 file changed, 345 insertions(+) create mode 100644 tests/handlers/test_oauth_delegation.py diff --git a/tests/handlers/test_oauth_delegation.py b/tests/handlers/test_oauth_delegation.py new file mode 100644 index 000000000000..54f48948196a --- /dev/null +++ b/tests/handlers/test_oauth_delegation.py @@ -0,0 +1,345 @@ +# Copyright 2022 Matrix.org Foundation C.I.C. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +from typing import Any, Dict +from unittest.mock import ANY, Mock +from urllib.parse import parse_qs + +from twisted.test.proto_helpers import MemoryReactor + +from synapse.api.errors import InvalidClientTokenError +from synapse.server import HomeServer +from synapse.types import JsonDict +from synapse.util import Clock + +from tests.test_utils import FakeResponse, get_awaitable_result, simple_async_mock +from tests.unittest import HomeserverTestCase, skip_unless +from tests.utils import mock_getRawHeaders + +try: + import authlib # noqa: F401 + + HAS_AUTHLIB = True +except ImportError: + HAS_AUTHLIB = False + + +# These are a few constants that are used as config parameters in the tests. +SERVER_NAME = "test" +ISSUER = "https://issuer/" +CLIENT_ID = "test-client-id" +CLIENT_SECRET = "test-client-secret" +BASE_URL = "https://synapse/" +SCOPES = ["openid"] + +AUTHORIZATION_ENDPOINT = ISSUER + "authorize" +TOKEN_ENDPOINT = ISSUER + "token" +USERINFO_ENDPOINT = ISSUER + "userinfo" +WELL_KNOWN = ISSUER + ".well-known/openid-configuration" +JWKS_URI = ISSUER + ".well-known/jwks.json" +INTROSPECTION_ENDPOINT = ISSUER + "introspect" + +SYNAPSE_ADMIN_SCOPE = "urn:synapse:admin:*" +MATRIX_USER_SCOPE = "urn:matrix:org.matrix.msc2967.client:api:*" +MATRIX_GUEST_SCOPE = "urn:matrix:org.matrix.msc2967.client:api:guest" +DEVICE = "AABBCCDD" +MATRIX_DEVICE_SCOPE = "urn:matrix:org.matrix.msc2967.client:device:" + DEVICE +SUBJECT = "abc-def-ghi" +USERNAME = "test-user" + + +async def get_json(url: str) -> JsonDict: + # Mock get_json calls to handle jwks & oidc discovery endpoints + if url == WELL_KNOWN: + # Minimal discovery document, as defined in OpenID.Discovery + # https://openid.net/specs/openid-connect-discovery-1_0.html#ProviderMetadata + return { + "issuer": ISSUER, + "authorization_endpoint": AUTHORIZATION_ENDPOINT, + "token_endpoint": TOKEN_ENDPOINT, + "jwks_uri": JWKS_URI, + "userinfo_endpoint": USERINFO_ENDPOINT, + "introspection_endpoint": INTROSPECTION_ENDPOINT, + "response_types_supported": ["code"], + "subject_types_supported": ["public"], + "id_token_signing_alg_values_supported": ["RS256"], + } + elif url == JWKS_URI: + return {"keys": []} + + return {} + + +@skip_unless(HAS_AUTHLIB, "requires authlib") +class MSC3861OAuthDelegation(HomeserverTestCase): + def default_config(self) -> Dict[str, Any]: + config = super().default_config() + config["public_baseurl"] = BASE_URL + config["oauth_delegation"] = { + "enabled": True, + "issuer": ISSUER, + "client_id": CLIENT_ID, + "client_auth_method": "client_secret_post", + "client_secret": CLIENT_SECRET, + } + return config + + def make_homeserver(self, reactor: MemoryReactor, clock: Clock) -> HomeServer: + self.http_client = Mock(spec=["get_json"]) + self.http_client.get_json.side_effect = get_json + self.http_client.user_agent = b"Synapse Test" + + hs = self.setup_test_homeserver(proxied_http_client=self.http_client) + + self.auth = hs.get_auth() + + return hs + + def _assertParams(self) -> None: + """Assert that the request parameters are correct.""" + params = parse_qs(self.http_client.request.call_args[1]["data"].decode("utf-8")) + self.assertEqual(params["token"], ["mockAccessToken"]) + self.assertEqual(params["client_id"], [CLIENT_ID]) + self.assertEqual(params["client_secret"], [CLIENT_SECRET]) + + def test_inactive_token(self) -> None: + """The handler should return a 403 where the token is inactive.""" + + self.http_client.request = simple_async_mock( + return_value=FakeResponse.json( + code=200, + payload={"active": False}, + ) + ) + request = Mock(args={}) + request.args[b"access_token"] = [b"mockAccessToken"] + request.requestHeaders.getRawHeaders = mock_getRawHeaders() + self.get_failure(self.auth.get_user_by_req(request), InvalidClientTokenError) + self.http_client.get_json.assert_called_once_with(WELL_KNOWN) + self.http_client.request.assert_called_once_with( + method="POST", uri=INTROSPECTION_ENDPOINT, data=ANY, headers=ANY + ) + self._assertParams() + + def test_active_no_scope(self) -> None: + """The handler should return a 403 where no scope is given.""" + + self.http_client.request = simple_async_mock( + return_value=FakeResponse.json( + code=200, + payload={"active": True}, + ) + ) + request = Mock(args={}) + request.args[b"access_token"] = [b"mockAccessToken"] + request.requestHeaders.getRawHeaders = mock_getRawHeaders() + self.get_failure(self.auth.get_user_by_req(request), InvalidClientTokenError) + self.http_client.get_json.assert_called_once_with(WELL_KNOWN) + self.http_client.request.assert_called_once_with( + method="POST", uri=INTROSPECTION_ENDPOINT, data=ANY, headers=ANY + ) + self._assertParams() + + def test_active_user_no_subject(self) -> None: + """The handler should return a 500 when no subject is present.""" + + self.http_client.request = simple_async_mock( + return_value=FakeResponse.json( + code=200, + payload={"active": True, "scope": " ".join([MATRIX_USER_SCOPE])}, + ) + ) + request = Mock(args={}) + request.args[b"access_token"] = [b"mockAccessToken"] + request.requestHeaders.getRawHeaders = mock_getRawHeaders() + self.get_failure(self.auth.get_user_by_req(request), InvalidClientTokenError) + self.http_client.get_json.assert_called_once_with(WELL_KNOWN) + self.http_client.request.assert_called_once_with( + method="POST", uri=INTROSPECTION_ENDPOINT, data=ANY, headers=ANY + ) + self._assertParams() + + def test_active_no_user_scope(self) -> None: + """The handler should return a 500 when no subject is present.""" + + self.http_client.request = simple_async_mock( + return_value=FakeResponse.json( + code=200, + payload={ + "active": True, + "sub": SUBJECT, + "scope": " ".join([MATRIX_DEVICE_SCOPE]), + }, + ) + ) + request = Mock(args={}) + request.args[b"access_token"] = [b"mockAccessToken"] + request.requestHeaders.getRawHeaders = mock_getRawHeaders() + self.get_failure(self.auth.get_user_by_req(request), InvalidClientTokenError) + self.http_client.get_json.assert_called_once_with(WELL_KNOWN) + self.http_client.request.assert_called_once_with( + method="POST", uri=INTROSPECTION_ENDPOINT, data=ANY, headers=ANY + ) + self._assertParams() + + def test_active_admin(self) -> None: + """The handler should return a requester with admin rights.""" + + self.http_client.request = simple_async_mock( + return_value=FakeResponse.json( + code=200, + payload={ + "active": True, + "sub": SUBJECT, + "scope": " ".join([SYNAPSE_ADMIN_SCOPE]), + "username": USERNAME, + }, + ) + ) + request = Mock(args={}) + request.args[b"access_token"] = [b"mockAccessToken"] + request.requestHeaders.getRawHeaders = mock_getRawHeaders() + requester = self.get_success(self.auth.get_user_by_req(request)) + self.http_client.get_json.assert_called_once_with(WELL_KNOWN) + self.http_client.request.assert_called_once_with( + method="POST", uri=INTROSPECTION_ENDPOINT, data=ANY, headers=ANY + ) + self._assertParams() + self.assertEqual(requester.user.to_string(), "@%s:%s" % (USERNAME, SERVER_NAME)) + self.assertEqual(requester.is_guest, False) + self.assertEqual(requester.device_id, None) + self.assertEqual( + get_awaitable_result(self.auth.is_server_admin(requester)), True + ) + + def test_active_admin_highest_privilege(self) -> None: + """The handler should resolve to the most permissive scope.""" + + self.http_client.request = simple_async_mock( + return_value=FakeResponse.json( + code=200, + payload={ + "active": True, + "sub": SUBJECT, + "scope": " ".join( + [SYNAPSE_ADMIN_SCOPE, MATRIX_USER_SCOPE, MATRIX_GUEST_SCOPE] + ), + "username": USERNAME, + }, + ) + ) + request = Mock(args={}) + request.args[b"access_token"] = [b"mockAccessToken"] + request.requestHeaders.getRawHeaders = mock_getRawHeaders() + requester = self.get_success(self.auth.get_user_by_req(request)) + self.http_client.get_json.assert_called_once_with(WELL_KNOWN) + self.http_client.request.assert_called_once_with( + method="POST", uri=INTROSPECTION_ENDPOINT, data=ANY, headers=ANY + ) + self._assertParams() + self.assertEqual(requester.user.to_string(), "@%s:%s" % (USERNAME, SERVER_NAME)) + self.assertEqual(requester.is_guest, False) + self.assertEqual(requester.device_id, None) + self.assertEqual( + get_awaitable_result(self.auth.is_server_admin(requester)), True + ) + + def test_active_user(self) -> None: + """The handler should return a requester with normal user rights.""" + + self.http_client.request = simple_async_mock( + return_value=FakeResponse.json( + code=200, + payload={ + "active": True, + "sub": SUBJECT, + "scope": " ".join([MATRIX_USER_SCOPE]), + "username": USERNAME, + }, + ) + ) + request = Mock(args={}) + request.args[b"access_token"] = [b"mockAccessToken"] + request.requestHeaders.getRawHeaders = mock_getRawHeaders() + requester = self.get_success(self.auth.get_user_by_req(request)) + self.http_client.get_json.assert_called_once_with(WELL_KNOWN) + self.http_client.request.assert_called_once_with( + method="POST", uri=INTROSPECTION_ENDPOINT, data=ANY, headers=ANY + ) + self._assertParams() + self.assertEqual(requester.user.to_string(), "@%s:%s" % (USERNAME, SERVER_NAME)) + self.assertEqual(requester.is_guest, False) + self.assertEqual(requester.device_id, None) + self.assertEqual( + get_awaitable_result(self.auth.is_server_admin(requester)), False + ) + + def test_active_user_with_device(self) -> None: + """The handler should return a requester with normal user rights and a device ID.""" + + self.http_client.request = simple_async_mock( + return_value=FakeResponse.json( + code=200, + payload={ + "active": True, + "sub": SUBJECT, + "scope": " ".join([MATRIX_USER_SCOPE, MATRIX_DEVICE_SCOPE]), + "username": USERNAME, + }, + ) + ) + request = Mock(args={}) + request.args[b"access_token"] = [b"mockAccessToken"] + request.requestHeaders.getRawHeaders = mock_getRawHeaders() + requester = self.get_success(self.auth.get_user_by_req(request)) + self.http_client.get_json.assert_called_once_with(WELL_KNOWN) + self.http_client.request.assert_called_once_with( + method="POST", uri=INTROSPECTION_ENDPOINT, data=ANY, headers=ANY + ) + self._assertParams() + self.assertEqual(requester.user.to_string(), "@%s:%s" % (USERNAME, SERVER_NAME)) + self.assertEqual(requester.is_guest, False) + self.assertEqual( + get_awaitable_result(self.auth.is_server_admin(requester)), False + ) + self.assertEqual(requester.device_id, DEVICE) + + def test_active_guest_with_device(self) -> None: + """The handler should return a requester with guest user rights and a device ID.""" + + self.http_client.request = simple_async_mock( + return_value=FakeResponse.json( + code=200, + payload={ + "active": True, + "sub": SUBJECT, + "scope": " ".join([MATRIX_GUEST_SCOPE, MATRIX_DEVICE_SCOPE]), + "username": USERNAME, + }, + ) + ) + request = Mock(args={}) + request.args[b"access_token"] = [b"mockAccessToken"] + request.requestHeaders.getRawHeaders = mock_getRawHeaders() + requester = self.get_success(self.auth.get_user_by_req(request)) + self.http_client.get_json.assert_called_once_with(WELL_KNOWN) + self.http_client.request.assert_called_once_with( + method="POST", uri=INTROSPECTION_ENDPOINT, data=ANY, headers=ANY + ) + self._assertParams() + self.assertEqual(requester.user.to_string(), "@%s:%s" % (USERNAME, SERVER_NAME)) + self.assertEqual(requester.is_guest, True) + self.assertEqual( + get_awaitable_result(self.auth.is_server_admin(requester)), False + ) + self.assertEqual(requester.device_id, DEVICE) From 5fe96082d09d1af3dc33b62b6a47a6baca02703c Mon Sep 17 00:00:00 2001 From: Hugh Nimmo-Smith Date: Thu, 17 Nov 2022 14:34:11 +0000 Subject: [PATCH 12/75] Actually enforce guest + return www-authenticate header --- synapse/api/auth/oauth_delegated.py | 18 +++++++++-- synapse/api/errors.py | 28 ++++++++++++++-- synapse/http/server.py | 6 ++++ tests/handlers/test_oauth_delegation.py | 43 +++++++++++++++++++++++-- 4 files changed, 87 insertions(+), 8 deletions(-) diff --git a/synapse/api/auth/oauth_delegated.py b/synapse/api/auth/oauth_delegated.py index cfa178218cb8..9cb6eb7f7944 100644 --- a/synapse/api/auth/oauth_delegated.py +++ b/synapse/api/auth/oauth_delegated.py @@ -25,7 +25,12 @@ from twisted.web.http_headers import Headers from synapse.api.auth.base import BaseAuth -from synapse.api.errors import AuthError, InvalidClientTokenError, StoreError +from synapse.api.errors import ( + AuthError, + InvalidClientTokenError, + OAuthInsufficientScopeError, + StoreError, +) from synapse.http.site import SynapseRequest from synapse.logging.context import make_deferred_yieldable from synapse.types import Requester, UserID, create_requester @@ -152,7 +157,16 @@ async def get_user_by_req( allow_expired: bool = False, ) -> Requester: access_token = self.get_access_token_from_request(request) - return await self.get_user_by_access_token(access_token, allow_expired) + + # TODO: we probably want to assert the allow_guest inside this call so that we don't provision the user if they don't have enough permission: + requester = await self.get_user_by_access_token(access_token, allow_expired) + + if not allow_guest and requester.is_guest: + raise OAuthInsufficientScopeError( + ["urn:matrix:org.matrix.msc2967.client:api:*"] + ) + + return requester async def get_user_by_access_token( self, diff --git a/synapse/api/errors.py b/synapse/api/errors.py index 8c7c94b04568..af894243f8d3 100644 --- a/synapse/api/errors.py +++ b/synapse/api/errors.py @@ -119,14 +119,20 @@ class Codes(str, Enum): class CodeMessageException(RuntimeError): - """An exception with integer code and message string attributes. + """An exception with integer code, a message string attributes and optional headers. Attributes: code: HTTP error code msg: string describing the error + headers: optional response headers to send """ - def __init__(self, code: Union[int, HTTPStatus], msg: str): + def __init__( + self, + code: Union[int, HTTPStatus], + msg: str, + headers: Optional[Dict[str, str]] = None, + ): super().__init__("%d: %s" % (code, msg)) # Some calls to this method pass instances of http.HTTPStatus for `code`. @@ -137,6 +143,7 @@ def __init__(self, code: Union[int, HTTPStatus], msg: str): # To eliminate this behaviour, we convert them to their integer equivalents here. self.code = int(code) self.msg = msg + self.headers = headers class RedirectException(CodeMessageException): @@ -182,6 +189,7 @@ def __init__( msg: str, errcode: str = Codes.UNKNOWN, additional_fields: Optional[Dict] = None, + headers: Optional[Dict[str, str]] = None, ): """Constructs a synapse error. @@ -190,7 +198,7 @@ def __init__( msg: The human-readable error message. errcode: The matrix error code e.g 'M_FORBIDDEN' """ - super().__init__(code, msg) + super().__init__(code, msg, headers) self.errcode = errcode if additional_fields is None: self._additional_fields: Dict = {} @@ -335,6 +343,20 @@ def __init__( super().__init__(code, msg, errcode, additional_fields) +class OAuthInsufficientScopeError(SynapseError): + """An error raised when the caller does not have sufficient scope to perform the requested action""" + + def __init__( + self, + required_scopes: List[str], + ): + headers = { + "WWW-Authenticate": 'Bearer error="insufficient_scope", scope="%s"' + % (" ".join(required_scopes)) + } + super().__init__(401, "Insufficient scope", Codes.FORBIDDEN, None, headers) + + class UnstableSpecAuthError(AuthError): """An error raised when a new error code is being proposed to replace a previous one. This error will return a "org.matrix.unstable.errcode" property with the new error code, diff --git a/synapse/http/server.py b/synapse/http/server.py index 101dc2e747d0..04768c6a237f 100644 --- a/synapse/http/server.py +++ b/synapse/http/server.py @@ -111,6 +111,9 @@ def return_json_error( exc: SynapseError = f.value # type: ignore error_code = exc.code error_dict = exc.error_dict(config) + if exc.headers is not None: + for header, value in exc.headers.items(): + request.setHeader(header, value) logger.info("%s SynapseError: %s - %s", request, error_code, exc.msg) elif f.check(CancelledError): error_code = HTTP_STATUS_REQUEST_CANCELLED @@ -172,6 +175,9 @@ def return_html_error( cme: CodeMessageException = f.value # type: ignore code = cme.code msg = cme.msg + if cme.headers is not None: + for header, value in cme.headers.items(): + request.setHeader(header, value) if isinstance(cme, RedirectException): logger.info("%s redirect to %s", request, cme.location) diff --git a/tests/handlers/test_oauth_delegation.py b/tests/handlers/test_oauth_delegation.py index 54f48948196a..bca9db16267b 100644 --- a/tests/handlers/test_oauth_delegation.py +++ b/tests/handlers/test_oauth_delegation.py @@ -17,7 +17,8 @@ from twisted.test.proto_helpers import MemoryReactor -from synapse.api.errors import InvalidClientTokenError +from synapse.api.errors import InvalidClientTokenError, OAuthInsufficientScopeError +from synapse.rest.client import devices from synapse.server import HomeServer from synapse.types import JsonDict from synapse.util import Clock @@ -82,6 +83,10 @@ async def get_json(url: str) -> JsonDict: @skip_unless(HAS_AUTHLIB, "requires authlib") class MSC3861OAuthDelegation(HomeserverTestCase): + servlets = [ + devices.register_servlets, + ] + def default_config(self) -> Dict[str, Any]: config = super().default_config() config["public_baseurl"] = BASE_URL @@ -314,7 +319,37 @@ def test_active_user_with_device(self) -> None: ) self.assertEqual(requester.device_id, DEVICE) - def test_active_guest_with_device(self) -> None: + def test_active_guest_not_allowed(self) -> None: + """The handler should return an insufficient scope error.""" + + self.http_client.request = simple_async_mock( + return_value=FakeResponse.json( + code=200, + payload={ + "active": True, + "sub": SUBJECT, + "scope": " ".join([MATRIX_GUEST_SCOPE, MATRIX_DEVICE_SCOPE]), + "username": USERNAME, + }, + ) + ) + request = Mock(args={}) + request.args[b"access_token"] = [b"mockAccessToken"] + request.requestHeaders.getRawHeaders = mock_getRawHeaders() + error = self.get_failure( + self.auth.get_user_by_req(request), OAuthInsufficientScopeError + ) + self.http_client.get_json.assert_called_once_with(WELL_KNOWN) + self.http_client.request.assert_called_once_with( + method="POST", uri=INTROSPECTION_ENDPOINT, data=ANY, headers=ANY + ) + self._assertParams() + self.assertEqual( + getattr(error.value, "headers", {})["WWW-Authenticate"], + 'Bearer error="insufficient_scope", scope="urn:matrix:org.matrix.msc2967.client:api:*"', + ) + + def test_active_guest_allowed(self) -> None: """The handler should return a requester with guest user rights and a device ID.""" self.http_client.request = simple_async_mock( @@ -331,7 +366,9 @@ def test_active_guest_with_device(self) -> None: request = Mock(args={}) request.args[b"access_token"] = [b"mockAccessToken"] request.requestHeaders.getRawHeaders = mock_getRawHeaders() - requester = self.get_success(self.auth.get_user_by_req(request)) + requester = self.get_success( + self.auth.get_user_by_req(request, allow_guest=True) + ) self.http_client.get_json.assert_called_once_with(WELL_KNOWN) self.http_client.request.assert_called_once_with( method="POST", uri=INTROSPECTION_ENDPOINT, data=ANY, headers=ANY From 31691d61511d41286272d779727502e396ce86eb Mon Sep 17 00:00:00 2001 From: Quentin Gliech Date: Wed, 10 May 2023 16:08:43 +0200 Subject: [PATCH 13/75] Disable account related endpoints when using OAuth delegation --- synapse/handlers/auth.py | 8 +- synapse/rest/client/account.py | 24 ++-- synapse/rest/client/devices.py | 11 +- synapse/rest/client/keys.py | 30 +++- synapse/rest/client/login.py | 3 + synapse/rest/client/logout.py | 3 + synapse/rest/client/register.py | 3 + tests/handlers/test_oauth_delegation.py | 180 +++++++++++++++++++++++- 8 files changed, 243 insertions(+), 19 deletions(-) diff --git a/synapse/handlers/auth.py b/synapse/handlers/auth.py index d001f2fb2f0b..a53984be336f 100644 --- a/synapse/handlers/auth.py +++ b/synapse/handlers/auth.py @@ -274,6 +274,8 @@ def __init__(self, hs: "HomeServer"): # response. self._extra_attributes: Dict[str, SsoLoginExtraAttributes] = {} + self.oauth_delegation_enabled = hs.config.auth.oauth_delegation_enabled + async def validate_user_via_ui_auth( self, requester: Requester, @@ -322,8 +324,12 @@ async def validate_user_via_ui_auth( LimitExceededError if the ratelimiter's failed request count for this user is too high to proceed - """ + if self.oauth_delegation_enabled: + raise SynapseError( + HTTPStatus.INTERNAL_SERVER_ERROR, "UIA shouldn't be used with MSC3861" + ) + if not requester.access_token_id: raise ValueError("Cannot validate a user without an access token") if can_skip_ui_auth and self._ui_auth_session_timeout: diff --git a/synapse/rest/client/account.py b/synapse/rest/client/account.py index 3d0c55daa05c..ccd1f7509cde 100644 --- a/synapse/rest/client/account.py +++ b/synapse/rest/client/account.py @@ -27,6 +27,7 @@ from synapse.api.errors import ( Codes, InteractiveAuthIncompleteError, + NotFoundError, SynapseError, ThreepidValidationError, ) @@ -600,6 +601,9 @@ async def on_GET(self, request: SynapseRequest) -> Tuple[int, JsonDict]: # ThreePidBindRestServelet.PostBody with an `alias_generator` to handle # `threePidCreds` versus `three_pid_creds`. async def on_POST(self, request: SynapseRequest) -> Tuple[int, JsonDict]: + if self.hs.config.auth.oauth_delegation_enabled: + raise NotFoundError(errcode=Codes.UNRECOGNIZED) + if not self.hs.config.registration.enable_3pid_changes: raise SynapseError( 400, "3PID changes are disabled on this server", Codes.FORBIDDEN @@ -890,19 +894,21 @@ async def on_POST(self, request: SynapseRequest) -> Tuple[int, JsonDict]: def register_servlets(hs: "HomeServer", http_server: HttpServer) -> None: if hs.config.worker.worker_app is None: - EmailPasswordRequestTokenRestServlet(hs).register(http_server) - PasswordRestServlet(hs).register(http_server) - DeactivateAccountRestServlet(hs).register(http_server) - EmailThreepidRequestTokenRestServlet(hs).register(http_server) - MsisdnThreepidRequestTokenRestServlet(hs).register(http_server) - AddThreepidEmailSubmitTokenServlet(hs).register(http_server) - AddThreepidMsisdnSubmitTokenServlet(hs).register(http_server) + if not hs.config.auth.oauth_delegation_enabled: + EmailPasswordRequestTokenRestServlet(hs).register(http_server) + DeactivateAccountRestServlet(hs).register(http_server) + PasswordRestServlet(hs).register(http_server) + EmailThreepidRequestTokenRestServlet(hs).register(http_server) + MsisdnThreepidRequestTokenRestServlet(hs).register(http_server) + AddThreepidEmailSubmitTokenServlet(hs).register(http_server) + AddThreepidMsisdnSubmitTokenServlet(hs).register(http_server) ThreepidRestServlet(hs).register(http_server) if hs.config.worker.worker_app is None: - ThreepidAddRestServlet(hs).register(http_server) ThreepidBindRestServlet(hs).register(http_server) ThreepidUnbindRestServlet(hs).register(http_server) - ThreepidDeleteRestServlet(hs).register(http_server) + if not hs.config.auth.oauth_delegation_enabled: + ThreepidAddRestServlet(hs).register(http_server) + ThreepidDeleteRestServlet(hs).register(http_server) WhoamiRestServlet(hs).register(http_server) if hs.config.worker.worker_app is None and hs.config.experimental.msc3720_enabled: diff --git a/synapse/rest/client/devices.py b/synapse/rest/client/devices.py index e97d0bf475ba..00e9bff43f5c 100644 --- a/synapse/rest/client/devices.py +++ b/synapse/rest/client/devices.py @@ -19,7 +19,7 @@ from pydantic import Extra, StrictStr from synapse.api import errors -from synapse.api.errors import NotFoundError +from synapse.api.errors import NotFoundError, UnrecognizedRequestError from synapse.handlers.device import DeviceHandler from synapse.http.server import HttpServer from synapse.http.servlet import ( @@ -135,6 +135,7 @@ def __init__(self, hs: "HomeServer"): self.device_handler = handler self.auth_handler = hs.get_auth_handler() self._msc3852_enabled = hs.config.experimental.msc3852_enabled + self.oauth_delegation_enabled = hs.config.auth.oauth_delegation_enabled async def on_GET( self, request: SynapseRequest, device_id: str @@ -166,6 +167,9 @@ class DeleteBody(RequestBodyModel): async def on_DELETE( self, request: SynapseRequest, device_id: str ) -> Tuple[int, JsonDict]: + if self.oauth_delegation_enabled: + raise UnrecognizedRequestError(code=404) + requester = await self.auth.get_user_by_req(request) try: @@ -344,7 +348,10 @@ async def on_POST(self, request: SynapseRequest) -> Tuple[int, JsonDict]: def register_servlets(hs: "HomeServer", http_server: HttpServer) -> None: - if hs.config.worker.worker_app is None: + if ( + hs.config.worker.worker_app is None + and not hs.config.auth.oauth_delegation_enabled + ): DeleteDevicesRestServlet(hs).register(http_server) DevicesRestServlet(hs).register(http_server) if hs.config.worker.worker_app is None: diff --git a/synapse/rest/client/keys.py b/synapse/rest/client/keys.py index 413edd8a4d7b..c3ca83c0c88e 100644 --- a/synapse/rest/client/keys.py +++ b/synapse/rest/client/keys.py @@ -17,9 +17,10 @@ import logging import re from collections import Counter +from http import HTTPStatus from typing import TYPE_CHECKING, Any, Dict, Optional, Tuple -from synapse.api.errors import InvalidAPICallError, SynapseError +from synapse.api.errors import Codes, InvalidAPICallError, SynapseError from synapse.http.server import HttpServer from synapse.http.servlet import ( RestServlet, @@ -375,9 +376,29 @@ async def on_POST(self, request: SynapseRequest) -> Tuple[int, JsonDict]: user_id = requester.user.to_string() body = parse_json_object_from_request(request) - if self.hs.config.experimental.msc3967_enabled: - if await self.e2e_keys_handler.is_cross_signing_set_up_for_user(user_id): - # If we already have a master key then cross signing is set up and we require UIA to reset + is_cross_signing_setup = ( + await self.e2e_keys_handler.is_cross_signing_set_up_for_user(user_id) + ) + + # Before MSC3967 we required UIA both when setting up cross signing for the + # first time and when resetting the device signing key. With MSC3967 we only + # require UIA when resetting cross-signing, and not when setting up the first + # time. Because there is no UIA in MSC3861, for now we throw an error if the + # user tries to reset the device signing key when MSC3861 is enabled, but allow + # first-time setup. + if self.hs.config.auth.oauth_delegation_enabled: + # There is no way to reset the device signing key with MSC3861 + if is_cross_signing_setup: + raise SynapseError( + HTTPStatus.NOT_IMPLEMENTED, + "Resetting cross signing keys is not yet supported with MSC3861", + Codes.UNRECOGNIZED, + ) + # But first-time setup is fine + + elif self.hs.config.experimental.msc3967_enabled: + # If we already have a master key then cross signing is set up and we require UIA to reset + if is_cross_signing_setup: await self.auth_handler.validate_user_via_ui_auth( requester, request, @@ -387,6 +408,7 @@ async def on_POST(self, request: SynapseRequest) -> Tuple[int, JsonDict]: can_skip_ui_auth=False, ) # Otherwise we don't require UIA since we are setting up cross signing for first time + else: # Previous behaviour is to always require UIA but allow it to be skipped await self.auth_handler.validate_user_via_ui_auth( diff --git a/synapse/rest/client/login.py b/synapse/rest/client/login.py index 6ca61ffbd08e..4d0eabcb840e 100644 --- a/synapse/rest/client/login.py +++ b/synapse/rest/client/login.py @@ -633,6 +633,9 @@ async def on_GET(self, request: SynapseRequest) -> None: def register_servlets(hs: "HomeServer", http_server: HttpServer) -> None: + if hs.config.auth.oauth_delegation_enabled: + return + LoginRestServlet(hs).register(http_server) if ( hs.config.worker.worker_app is None diff --git a/synapse/rest/client/logout.py b/synapse/rest/client/logout.py index 6d34625ad5d6..b64a6d5961b7 100644 --- a/synapse/rest/client/logout.py +++ b/synapse/rest/client/logout.py @@ -80,5 +80,8 @@ async def on_POST(self, request: SynapseRequest) -> Tuple[int, JsonDict]: def register_servlets(hs: "HomeServer", http_server: HttpServer) -> None: + if hs.config.auth.oauth_delegation_enabled: + return + LogoutRestServlet(hs).register(http_server) LogoutAllRestServlet(hs).register(http_server) diff --git a/synapse/rest/client/register.py b/synapse/rest/client/register.py index 7f84a17e2964..6866988c3809 100644 --- a/synapse/rest/client/register.py +++ b/synapse/rest/client/register.py @@ -955,6 +955,9 @@ def _calculate_registration_flows( def register_servlets(hs: "HomeServer", http_server: HttpServer) -> None: + if hs.config.auth.oauth_delegation_enabled: + return + if hs.config.worker.worker_app is None: EmailRegisterRequestTokenRestServlet(hs).register(http_server) MsisdnRegisterRequestTokenRestServlet(hs).register(http_server) diff --git a/tests/handlers/test_oauth_delegation.py b/tests/handlers/test_oauth_delegation.py index bca9db16267b..ee1bc5ca7ac6 100644 --- a/tests/handlers/test_oauth_delegation.py +++ b/tests/handlers/test_oauth_delegation.py @@ -11,14 +11,27 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. -from typing import Any, Dict + +from http import HTTPStatus +from typing import Any, Dict, Union from unittest.mock import ANY, Mock from urllib.parse import parse_qs +from signedjson.key import ( + encode_verify_key_base64, + generate_signing_key, + get_verify_key, +) +from signedjson.sign import sign_json + from twisted.test.proto_helpers import MemoryReactor -from synapse.api.errors import InvalidClientTokenError, OAuthInsufficientScopeError -from synapse.rest.client import devices +from synapse.api.errors import ( + Codes, + InvalidClientTokenError, + OAuthInsufficientScopeError, +) +from synapse.rest.client import account, devices, keys, login, logout, register from synapse.server import HomeServer from synapse.types import JsonDict from synapse.util import Clock @@ -57,6 +70,7 @@ MATRIX_DEVICE_SCOPE = "urn:matrix:org.matrix.msc2967.client:device:" + DEVICE SUBJECT = "abc-def-ghi" USERNAME = "test-user" +USER_ID = "@" + USERNAME + ":" + SERVER_NAME async def get_json(url: str) -> JsonDict: @@ -84,7 +98,12 @@ async def get_json(url: str) -> JsonDict: @skip_unless(HAS_AUTHLIB, "requires authlib") class MSC3861OAuthDelegation(HomeserverTestCase): servlets = [ + account.register_servlets, devices.register_servlets, + keys.register_servlets, + register.register_servlets, + login.register_servlets, + logout.register_servlets, ] def default_config(self) -> Dict[str, Any]: @@ -380,3 +399,158 @@ def test_active_guest_allowed(self) -> None: get_awaitable_result(self.auth.is_server_admin(requester)), False ) self.assertEqual(requester.device_id, DEVICE) + + def make_device_keys(self, user_id: str, device_id: str) -> JsonDict: + # We only generate a master key to simplify the test. + master_signing_key = generate_signing_key(device_id) + master_verify_key = encode_verify_key_base64(get_verify_key(master_signing_key)) + + return { + "master_key": sign_json( + { + "user_id": user_id, + "usage": ["master"], + "keys": {"ed25519:" + master_verify_key: master_verify_key}, + }, + user_id, + master_signing_key, + ), + } + + def test_cross_signing(self) -> None: + """Try uploading device keys with OAuth delegation enabled.""" + + self.http_client.request = simple_async_mock( + return_value=FakeResponse.json( + code=200, + payload={ + "active": True, + "sub": SUBJECT, + "scope": " ".join([MATRIX_USER_SCOPE, MATRIX_DEVICE_SCOPE]), + "username": USERNAME, + }, + ) + ) + keys_upload_body = self.make_device_keys(USER_ID, DEVICE) + channel = self.make_request( + "POST", + "/_matrix/client/v3/keys/device_signing/upload", + keys_upload_body, + access_token="mockAccessToken", + ) + + self.assertEqual(channel.code, 200, channel.json_body) + + channel = self.make_request( + "POST", + "/_matrix/client/v3/keys/device_signing/upload", + keys_upload_body, + access_token="mockAccessToken", + ) + + self.assertEqual(channel.code, HTTPStatus.NOT_IMPLEMENTED, channel.json_body) + + def expect_unauthorized( + self, method: str, path: str, content: Union[bytes, str, JsonDict] = "" + ) -> None: + channel = self.make_request(method, path, content, shorthand=False) + + self.assertEqual(channel.code, 401, channel.json_body) + + def expect_unrecognized( + self, method: str, path: str, content: Union[bytes, str, JsonDict] = "" + ) -> None: + channel = self.make_request(method, path, content) + + self.assertEqual(channel.code, 404, channel.json_body) + self.assertEqual( + channel.json_body["errcode"], Codes.UNRECOGNIZED, channel.json_body + ) + + def test_uia_endpoints(self) -> None: + """Test that endpoints that were removed in MSC2964 are no longer available.""" + + # This is just an endpoint that should remain visible (but requires auth): + self.expect_unauthorized("GET", "/_matrix/client/v3/devices") + + # This remains usable, but will require a uia scope: + self.expect_unauthorized( + "POST", "/_matrix/client/v3/keys/device_signing/upload" + ) + + def test_3pid_endpoints(self) -> None: + """Test that 3pid account management endpoints that were removed in MSC2964 are no longer available.""" + + # Remains and requires auth: + self.expect_unauthorized("GET", "/_matrix/client/v3/account/3pid") + self.expect_unauthorized( + "POST", + "/_matrix/client/v3/account/3pid/bind", + { + "client_secret": "foo", + "id_access_token": "bar", + "id_server": "foo", + "sid": "bar", + }, + ) + self.expect_unauthorized("POST", "/_matrix/client/v3/account/3pid/unbind", {}) + + # These are gone: + self.expect_unrecognized( + "POST", "/_matrix/client/v3/account/3pid" + ) # deprecated + self.expect_unrecognized("POST", "/_matrix/client/v3/account/3pid/add") + self.expect_unrecognized("POST", "/_matrix/client/v3/account/3pid/delete") + self.expect_unrecognized( + "POST", "/_matrix/client/v3/account/3pid/email/requestToken" + ) + self.expect_unrecognized( + "POST", "/_matrix/client/v3/account/3pid/msisdn/requestToken" + ) + + def test_account_management_endpoints_removed(self) -> None: + """Test that account management endpoints that were removed in MSC2964 are no longer available.""" + self.expect_unrecognized("POST", "/_matrix/client/v3/account/deactivate") + self.expect_unrecognized("POST", "/_matrix/client/v3/account/password") + self.expect_unrecognized( + "POST", "/_matrix/client/v3/account/password/email/requestToken" + ) + self.expect_unrecognized( + "POST", "/_matrix/client/v3/account/password/msisdn/requestToken" + ) + + def test_registration_endpoints_removed(self) -> None: + """Test that registration endpoints that were removed in MSC2964 are no longer available.""" + self.expect_unrecognized( + "GET", "/_matrix/client/v1/register/m.login.registration_token/validity" + ) + self.expect_unrecognized("POST", "/_matrix/client/v3/register") + self.expect_unrecognized("GET", "/_matrix/client/v3/register") + self.expect_unrecognized("GET", "/_matrix/client/v3/register/available") + self.expect_unrecognized( + "POST", "/_matrix/client/v3/register/email/requestToken" + ) + self.expect_unrecognized( + "POST", "/_matrix/client/v3/register/msisdn/requestToken" + ) + + def test_session_management_endpoints_removed(self) -> None: + """Test that session management endpoints that were removed in MSC2964 are no longer available.""" + self.expect_unrecognized("GET", "/_matrix/client/v3/login") + self.expect_unrecognized("POST", "/_matrix/client/v3/login") + self.expect_unrecognized("GET", "/_matrix/client/v3/login/sso/redirect") + self.expect_unrecognized("POST", "/_matrix/client/v3/logout") + self.expect_unrecognized("POST", "/_matrix/client/v3/logout/all") + self.expect_unrecognized("POST", "/_matrix/client/v3/refresh") + self.expect_unrecognized("GET", "/_matrix/static/client/login") + + def test_device_management_endpoints_removed(self) -> None: + """Test that device management endpoints that were removed in MSC2964 are no longer available.""" + self.expect_unrecognized("POST", "/_matrix/client/v3/delete_devices") + self.expect_unrecognized("DELETE", "/_matrix/client/v3/devices/{DEVICE}") + + def test_openid_endpoints_removed(self) -> None: + """Test that OpenID id_token endpoints that were removed in MSC2964 are no longer available.""" + self.expect_unrecognized( + "POST", "/_matrix/client/v3/user/{USERNAME}/openid/request_token" + ) From 03920bdd4e9390d74762ecd923ddf0d6c75d222e Mon Sep 17 00:00:00 2001 From: Hugh Nimmo-Smith Date: Mon, 6 Feb 2023 17:12:42 +0000 Subject: [PATCH 14/75] Test MSC2965 implementation: well-known discovery document --- tests/rest/test_well_known.py | 38 +++++++++++++++++++++++++++++++++++ 1 file changed, 38 insertions(+) diff --git a/tests/rest/test_well_known.py b/tests/rest/test_well_known.py index 2091b08d89cf..34333d88df25 100644 --- a/tests/rest/test_well_known.py +++ b/tests/rest/test_well_known.py @@ -17,6 +17,13 @@ from tests import unittest +try: + import authlib # noqa: F401 + + HAS_AUTHLIB = True +except ImportError: + HAS_AUTHLIB = False + class WellKnownTests(unittest.HomeserverTestCase): def create_test_resource(self) -> Resource: @@ -96,3 +103,34 @@ def test_server_well_known_disabled(self) -> None: "GET", "/.well-known/matrix/server", shorthand=False ) self.assertEqual(channel.code, 404) + + @unittest.skip_unless(HAS_AUTHLIB, "requires authlib") + @unittest.override_config( + { + "public_baseurl": "https://homeserver", # this is only required so that client well known is served + "oauth_delegation": { + "enabled": True, + "issuer": "https://issuer", + "account": "https://my-account.issuer", + "client_id": "id", + "client_auth_method": "client_secret_post", + "client_secret": "secret", + }, + } + ) + def test_client_well_known_msc3861_oauth_delegation(self) -> None: + channel = self.make_request( + "GET", "/.well-known/matrix/client", shorthand=False + ) + + self.assertEqual(channel.code, 200) + self.assertEqual( + channel.json_body, + { + "m.homeserver": {"base_url": "https://homeserver/"}, + "org.matrix.msc2965.authentication": { + "issuer": "https://issuer", + "account": "https://my-account.issuer", + }, + }, + ) From 249f4a338dde0c1bcde5e14121d8d9fa156f185f Mon Sep 17 00:00:00 2001 From: Hugh Nimmo-Smith Date: Tue, 9 May 2023 16:20:04 +0200 Subject: [PATCH 15/75] Refactor config to be an experimental feature Also enforce you can't combine it with incompatible config options --- ...auth_delegated.py => msc3861_delegated.py} | 53 ++--- synapse/config/auth.py | 39 +--- synapse/config/experimental.py | 193 ++++++++++++++++- synapse/handlers/auth.py | 4 +- synapse/module_api/__init__.py | 7 + synapse/rest/client/account.py | 6 +- synapse/rest/client/devices.py | 6 +- synapse/rest/client/keys.py | 2 +- synapse/rest/client/login.py | 2 +- synapse/rest/client/logout.py | 2 +- synapse/rest/client/register.py | 2 +- synapse/rest/synapse/client/__init__.py | 2 +- synapse/rest/synapse/client/jwks.py | 8 +- synapse/rest/well_known.py | 9 +- synapse/server.py | 6 +- tests/config/test_oauth_delegation.py | 202 ++++++++++++++++++ tests/handlers/test_oauth_delegation.py | 15 +- tests/rest/test_well_known.py | 17 +- 18 files changed, 479 insertions(+), 96 deletions(-) rename synapse/api/auth/{oauth_delegated.py => msc3861_delegated.py} (87%) create mode 100644 tests/config/test_oauth_delegation.py diff --git a/synapse/api/auth/oauth_delegated.py b/synapse/api/auth/msc3861_delegated.py similarity index 87% rename from synapse/api/auth/oauth_delegated.py rename to synapse/api/auth/msc3861_delegated.py index 9cb6eb7f7944..4ca3280bd3c4 100644 --- a/synapse/api/auth/oauth_delegated.py +++ b/synapse/api/auth/msc3861_delegated.py @@ -65,7 +65,7 @@ def sign(self, auth: Any, token_endpoint: str) -> bytes: ) -class OAuthDelegatedAuth(BaseAuth): +class MSC3861DelegatedAuth(BaseAuth): AUTH_METHODS = { "client_secret_post": encode_client_secret_post, "client_secret_basic": encode_client_secret_basic, @@ -78,35 +78,38 @@ class OAuthDelegatedAuth(BaseAuth): def __init__(self, hs: "HomeServer"): super().__init__(hs) - self._config = hs.config.auth - assert self._config.oauth_delegation_enabled, "OAuth delegation is not enabled" - assert self._config.oauth_delegation_issuer, "No issuer provided" - assert self._config.oauth_delegation_client_id, "No client_id provided" - assert self._config.oauth_delegation_client_secret, "No client_secret provided" - assert ( - self._config.oauth_delegation_client_auth_method - in OAuthDelegatedAuth.AUTH_METHODS - ), "Invalid client_auth_method" + self._config = hs.config.experimental.msc3861 + auth_method = MSC3861DelegatedAuth.AUTH_METHODS.get( + self._config.client_auth_method.value, None + ) + # Those assertions are already checked when parsing the config + assert self._config.enabled, "OAuth delegation is not enabled" + assert self._config.issuer, "No issuer provided" + assert self._config.client_id, "No client_id provided" + assert auth_method is not None, "Invalid client_auth_method provided" self._http_client = hs.get_proxied_http_client() self._hostname = hs.hostname self._issuer_metadata = RetryOnExceptionCachedCall(self._load_metadata) - secret = self._config.oauth_delegation_client_secret - self._client_auth = ClientAuth( - self._config.oauth_delegation_client_id, - secret, - OAuthDelegatedAuth.AUTH_METHODS[ - self._config.oauth_delegation_client_auth_method - ], - ) - async def _load_metadata(self) -> OpenIDProviderMetadata: - if self._config.oauth_delegation_issuer_metadata is not None: - return OpenIDProviderMetadata( - **self._config.oauth_delegation_issuer_metadata + if isinstance(auth_method, PrivateKeyJWTWithKid): + # Use the JWK as the client secret when using the private_key_jwt method + assert self._config.jwk, "No JWK provided" + self._client_auth = ClientAuth( + self._config.client_id, self._config.jwk, auth_method ) - url = get_well_known_url(self._config.oauth_delegation_issuer, external=True) + else: + # Else use the client secret + assert self._config.client_secret, "No client_secret provided" + self._client_auth = ClientAuth( + self._config.client_id, self._config.client_secret, auth_method + ) + + async def _load_metadata(self) -> OpenIDProviderMetadata: + if self._config.issuer_metadata is not None: + return OpenIDProviderMetadata(**self._config.issuer_metadata) + url = get_well_known_url(self._config.issuer, external=True) response = await self._http_client.get_json(url) metadata = OpenIDProviderMetadata(**response) # metadata.validate_introspection_endpoint() @@ -203,7 +206,7 @@ async def get_user_by_access_token( ) user_id_str = await self.store.get_user_by_external_id( - OAuthDelegatedAuth.EXTERNAL_ID_PROVIDER, sub + MSC3861DelegatedAuth.EXTERNAL_ID_PROVIDER, sub ) if user_id_str is None: # If we could not find a user via the external_id, it either does not exist, @@ -236,7 +239,7 @@ async def get_user_by_access_token( # And record the sub as external_id await self.store.record_user_external_id( - OAuthDelegatedAuth.EXTERNAL_ID_PROVIDER, sub, user_id.to_string() + MSC3861DelegatedAuth.EXTERNAL_ID_PROVIDER, sub, user_id.to_string() ) else: user_id = UserID.from_string(user_id_str) diff --git a/synapse/config/auth.py b/synapse/config/auth.py index 25b5cc60dcf1..12e853980e3f 100644 --- a/synapse/config/auth.py +++ b/synapse/config/auth.py @@ -14,11 +14,9 @@ # limitations under the License. from typing import Any -from authlib.jose.rfc7517 import JsonWebKey - from synapse.types import JsonDict -from ._base import Config, ConfigError +from ._base import Config class AuthConfig(Config): @@ -31,7 +29,14 @@ def read_config(self, config: JsonDict, **kwargs: Any) -> None: if password_config is None: password_config = {} - passwords_enabled = password_config.get("enabled", True) + # The default value of password_config.enabled is True, unless msc3861 is enabled. + msc3861_enabled = ( + config.get("experimental_features", {}) + .get("msc3861", {}) + .get("enabled", False) + ) + passwords_enabled = password_config.get("enabled", not msc3861_enabled) + # 'only_for_reauth' allows users who have previously set a password to use it, # even though passwords would otherwise be disabled. passwords_for_reauth_only = passwords_enabled == "only_for_reauth" @@ -55,29 +60,3 @@ def read_config(self, config: JsonDict, **kwargs: Any) -> None: self.ui_auth_session_timeout = self.parse_duration( ui_auth.get("session_timeout", 0) ) - - oauth_delegation = config.get("oauth_delegation", {}) - self.oauth_delegation_enabled = oauth_delegation.get("enabled", False) - self.oauth_delegation_issuer = oauth_delegation.get("issuer", "") - self.oauth_delegation_issuer_metadata = oauth_delegation.get("issuer_metadata") - self.oauth_delegation_account = oauth_delegation.get("account", "") - self.oauth_delegation_client_id = oauth_delegation.get("client_id", "") - self.oauth_delegation_client_secret = oauth_delegation.get("client_secret", "") - self.oauth_delegation_client_auth_method = oauth_delegation.get( - "client_auth_method", "client_secret_post" - ) - - self.password_enabled = password_config.get( - "enabled", not self.oauth_delegation_enabled - ) - - if self.oauth_delegation_client_auth_method == "private_key_jwt": - self.oauth_delegation_client_secret = JsonWebKey.import_key( - self.oauth_delegation_client_secret - ) - - # If we are delegating via OAuth then password cannot be supported as well - if self.oauth_delegation_enabled and self.password_enabled: - raise ConfigError( - "Password auth cannot be enabled when OAuth delegation is enabled" - ) diff --git a/synapse/config/experimental.py b/synapse/config/experimental.py index d769b7f6686e..b9607975f903 100644 --- a/synapse/config/experimental.py +++ b/synapse/config/experimental.py @@ -12,15 +12,196 @@ # See the License for the specific language governing permissions and # limitations under the License. -from typing import Any, Optional +import enum +from typing import TYPE_CHECKING, Any, Optional import attr +import attr.validators from synapse.api.room_versions import KNOWN_ROOM_VERSIONS, RoomVersions from synapse.config import ConfigError -from synapse.config._base import Config +from synapse.config._base import Config, RootConfig from synapse.types import JsonDict +# Determine whether authlib is installed. +try: + import authlib # noqa: F401 + + HAS_AUTHLIB = True +except ImportError: + HAS_AUTHLIB = False + +if TYPE_CHECKING: + # Only import this if we're type checking, as it might not be installed at runtime. + from authlib.jose.rfc7517 import JsonWebKey + + +class ClientAuthMethod(enum.Enum): + """List of supported client auth methods.""" + + CLIENT_SECRET_POST = "client_secret_post" + CLIENT_SECRET_BASIC = "client_secret_basic" + CLIENT_SECRET_JWT = "client_secret_jwt" + PRIVATE_KEY_JWT = "private_key_jwt" + + +def _parse_jwks(jwks: Optional[JsonDict]) -> Optional["JsonWebKey"]: + """A helper function to parse a JWK dict into a JsonWebKey.""" + + if jwks is None: + return None + + from authlib.jose.rfc7517 import JsonWebKey + + return JsonWebKey.import_key(jwks) + + +@attr.s(slots=True, frozen=True) +class MSC3861: + """Configuration for MSC3861: Matrix architecture change to delegate authentication via OIDC""" + + enabled: bool = attr.ib(default=False, validator=attr.validators.instance_of(bool)) + """Whether to enable MSC3861 auth delegation.""" + + @enabled.validator + def _check_enabled(self, attribute: attr.Attribute, value: bool) -> None: + # Only allow enabling MSC3861 if authlib is installed + if value and not HAS_AUTHLIB: + raise ConfigError( + "MSC3861 is enabled but authlib is not installed. " + "Please install authlib to use MSC3861." + ) + + issuer: str = attr.ib(default="", validator=attr.validators.instance_of(str)) + """The URL of the OIDC Provider.""" + + issuer_metadata: Optional[JsonDict] = attr.ib(default=None) + """The issuer metadata to use, otherwise discovered from /.well-known/openid-configuration as per MSC2965.""" + + client_id: str = attr.ib( + default="", + validator=attr.validators.instance_of(str), + ) + """The client ID to use when calling the introspection endpoint.""" + + client_auth_method: ClientAuthMethod = attr.ib( + default=ClientAuthMethod.CLIENT_SECRET_POST, converter=ClientAuthMethod + ) + """The auth method used when calling the introspection endpoint.""" + + client_secret: Optional[str] = attr.ib( + default=None, + validator=attr.validators.optional(attr.validators.instance_of(str)), + ) + """ + The client secret to use when calling the introspection endpoint, + when using any of the client_secret_* client auth methods. + """ + + jwk: Optional["JsonWebKey"] = attr.ib(default=None, converter=_parse_jwks) + """ + The JWKS to use when calling the introspection endpoint, + when using the private_key_jwt client auth method. + """ + + @client_auth_method.validator + def _check_client_auth_method( + self, attribute: attr.Attribute, value: ClientAuthMethod + ) -> None: + # Check that the right client credentials are provided for the client auth method. + if not self.enabled: + return + + if value == ClientAuthMethod.PRIVATE_KEY_JWT and self.jwk is None: + raise ConfigError( + "A JWKS must be provided when using the private_key_jwt client auth method" + ) + + if ( + value + in ( + ClientAuthMethod.CLIENT_SECRET_POST, + ClientAuthMethod.CLIENT_SECRET_BASIC, + ClientAuthMethod.CLIENT_SECRET_JWT, + ) + and self.client_secret is None + ): + raise ConfigError( + f"A client secret must be provided when using the {value} client auth method" + ) + + account_management_url: Optional[str] = attr.ib( + default=None, + validator=attr.validators.optional(attr.validators.instance_of(str)), + ) + """The URL of the My Account page on the OIDC Provider as per MSC2965.""" + + def check_config_conflicts(self, root: RootConfig) -> None: + """Checks for any configuration conflicts with other parts of Synapse. + + Raises: + ConfigError: If there are any configuration conflicts. + """ + + if not self.enabled: + return + + if ( + root.auth.password_enabled_for_reauth + or root.auth.password_enabled_for_login + ): + raise ConfigError( + "Password auth cannot be enabled when OAuth delegation is enabled" + ) + + if root.registration.enable_registration: + raise ConfigError( + "Registration cannot be enabled when OAuth delegation is enabled" + ) + + if ( + root.oidc.oidc_enabled + or root.saml2.saml2_enabled + or root.cas.cas_enabled + or root.jwt.jwt_enabled + ): + raise ConfigError("SSO cannot be enabled when OAuth delegation is enabled") + + if bool(root.authproviders.password_providers): + raise ConfigError( + "Password auth providers cannot be enabled when OAuth delegation is enabled" + ) + + if root.captcha.enable_registration_captcha: + raise ConfigError( + "CAPTCHA cannot be enabled when OAuth delegation is enabled" + ) + + if root.experimental.msc3882_enabled: + raise ConfigError( + "MSC3882 cannot be enabled when OAuth delegation is enabled" + ) + + if root.registration.refresh_token_lifetime: + raise ConfigError( + "refresh_token_lifetime cannot be set when OAuth delegation is enabled" + ) + + if root.registration.nonrefreshable_access_token_lifetime: + raise ConfigError( + "nonrefreshable_access_token_lifetime cannot be set when OAuth delegation is enabled" + ) + + if root.registration.session_lifetime: + raise ConfigError( + "session_lifetime cannot be set when OAuth delegation is enabled" + ) + + if not root.experimental.msc3970_enabled: + raise ConfigError( + "experimental_features.msc3970_enabled must be 'true' when OAuth delegation is enabled" + ) + @attr.s(auto_attribs=True, frozen=True, slots=True) class MSC3866Config: @@ -182,8 +363,14 @@ def read_config(self, config: JsonDict, **kwargs: Any) -> None: "msc3981_recurse_relations", False ) + # MSC3861: Matrix architecture change to delegate authentication via OIDC + self.msc3861 = MSC3861(**experimental.get("msc3861", {})) + # MSC3970: Scope transaction IDs to devices - self.msc3970_enabled = experimental.get("msc3970_enabled", False) + self.msc3970_enabled = experimental.get("msc3970_enabled", self.msc3861.enabled) + + # Check that none of the other config options conflict with MSC3861 when enabled + self.msc3861.check_config_conflicts(self.root) # MSC4009: E.164 Matrix IDs self.msc4009_e164_mxids = experimental.get("msc4009_e164_mxids", False) diff --git a/synapse/handlers/auth.py b/synapse/handlers/auth.py index a53984be336f..4f986d90cbd9 100644 --- a/synapse/handlers/auth.py +++ b/synapse/handlers/auth.py @@ -274,7 +274,7 @@ def __init__(self, hs: "HomeServer"): # response. self._extra_attributes: Dict[str, SsoLoginExtraAttributes] = {} - self.oauth_delegation_enabled = hs.config.auth.oauth_delegation_enabled + self.msc3861_oauth_delegation_enabled = hs.config.experimental.msc3861.enabled async def validate_user_via_ui_auth( self, @@ -325,7 +325,7 @@ async def validate_user_via_ui_auth( LimitExceededError if the ratelimiter's failed request count for this user is too high to proceed """ - if self.oauth_delegation_enabled: + if self.msc3861_oauth_delegation_enabled: raise SynapseError( HTTPStatus.INTERNAL_SERVER_ERROR, "UIA shouldn't be used with MSC3861" ) diff --git a/synapse/module_api/__init__.py b/synapse/module_api/__init__.py index 0e9f366cba62..134bd2e62021 100644 --- a/synapse/module_api/__init__.py +++ b/synapse/module_api/__init__.py @@ -38,6 +38,7 @@ from synapse.api import errors from synapse.api.errors import SynapseError +from synapse.config import ConfigError from synapse.events import EventBase from synapse.events.presence_router import ( GET_INTERESTED_USERS_CALLBACK, @@ -252,6 +253,7 @@ def __init__(self, hs: "HomeServer", auth_handler: AuthHandler) -> None: self._device_handler = hs.get_device_handler() self.custom_template_dir = hs.config.server.custom_template_directory self._callbacks = hs.get_module_api_callbacks() + self.msc3861_oauth_delegation_enabled = hs.config.experimental.msc3861.enabled try: app_name = self._hs.config.email.email_app_name @@ -419,6 +421,11 @@ def register_password_auth_provider_callbacks( Added in Synapse v1.46.0. """ + if self.msc3861_oauth_delegation_enabled: + raise ConfigError( + "Cannot use password auth provider callbacks when OAuth delegation is enabled" + ) + return self._password_auth_provider.register_password_auth_provider_callbacks( check_3pid_auth=check_3pid_auth, on_logged_out=on_logged_out, diff --git a/synapse/rest/client/account.py b/synapse/rest/client/account.py index ccd1f7509cde..679ab9f266c7 100644 --- a/synapse/rest/client/account.py +++ b/synapse/rest/client/account.py @@ -601,7 +601,7 @@ async def on_GET(self, request: SynapseRequest) -> Tuple[int, JsonDict]: # ThreePidBindRestServelet.PostBody with an `alias_generator` to handle # `threePidCreds` versus `three_pid_creds`. async def on_POST(self, request: SynapseRequest) -> Tuple[int, JsonDict]: - if self.hs.config.auth.oauth_delegation_enabled: + if self.hs.config.experimental.msc3861.enabled: raise NotFoundError(errcode=Codes.UNRECOGNIZED) if not self.hs.config.registration.enable_3pid_changes: @@ -894,7 +894,7 @@ async def on_POST(self, request: SynapseRequest) -> Tuple[int, JsonDict]: def register_servlets(hs: "HomeServer", http_server: HttpServer) -> None: if hs.config.worker.worker_app is None: - if not hs.config.auth.oauth_delegation_enabled: + if not hs.config.experimental.msc3861.enabled: EmailPasswordRequestTokenRestServlet(hs).register(http_server) DeactivateAccountRestServlet(hs).register(http_server) PasswordRestServlet(hs).register(http_server) @@ -906,7 +906,7 @@ def register_servlets(hs: "HomeServer", http_server: HttpServer) -> None: if hs.config.worker.worker_app is None: ThreepidBindRestServlet(hs).register(http_server) ThreepidUnbindRestServlet(hs).register(http_server) - if not hs.config.auth.oauth_delegation_enabled: + if not hs.config.experimental.msc3861.enabled: ThreepidAddRestServlet(hs).register(http_server) ThreepidDeleteRestServlet(hs).register(http_server) WhoamiRestServlet(hs).register(http_server) diff --git a/synapse/rest/client/devices.py b/synapse/rest/client/devices.py index 00e9bff43f5c..38dff9703f96 100644 --- a/synapse/rest/client/devices.py +++ b/synapse/rest/client/devices.py @@ -135,7 +135,7 @@ def __init__(self, hs: "HomeServer"): self.device_handler = handler self.auth_handler = hs.get_auth_handler() self._msc3852_enabled = hs.config.experimental.msc3852_enabled - self.oauth_delegation_enabled = hs.config.auth.oauth_delegation_enabled + self._msc3861_oauth_delegation_enabled = hs.config.experimental.msc3861.enabled async def on_GET( self, request: SynapseRequest, device_id: str @@ -167,7 +167,7 @@ class DeleteBody(RequestBodyModel): async def on_DELETE( self, request: SynapseRequest, device_id: str ) -> Tuple[int, JsonDict]: - if self.oauth_delegation_enabled: + if self._msc3861_oauth_delegation_enabled: raise UnrecognizedRequestError(code=404) requester = await self.auth.get_user_by_req(request) @@ -350,7 +350,7 @@ async def on_POST(self, request: SynapseRequest) -> Tuple[int, JsonDict]: def register_servlets(hs: "HomeServer", http_server: HttpServer) -> None: if ( hs.config.worker.worker_app is None - and not hs.config.auth.oauth_delegation_enabled + and not hs.config.experimental.msc3861.enabled ): DeleteDevicesRestServlet(hs).register(http_server) DevicesRestServlet(hs).register(http_server) diff --git a/synapse/rest/client/keys.py b/synapse/rest/client/keys.py index c3ca83c0c88e..70b8be1aa237 100644 --- a/synapse/rest/client/keys.py +++ b/synapse/rest/client/keys.py @@ -386,7 +386,7 @@ async def on_POST(self, request: SynapseRequest) -> Tuple[int, JsonDict]: # time. Because there is no UIA in MSC3861, for now we throw an error if the # user tries to reset the device signing key when MSC3861 is enabled, but allow # first-time setup. - if self.hs.config.auth.oauth_delegation_enabled: + if self.hs.config.experimental.msc3861.enabled: # There is no way to reset the device signing key with MSC3861 if is_cross_signing_setup: raise SynapseError( diff --git a/synapse/rest/client/login.py b/synapse/rest/client/login.py index 4d0eabcb840e..d4dc2462b9c0 100644 --- a/synapse/rest/client/login.py +++ b/synapse/rest/client/login.py @@ -633,7 +633,7 @@ async def on_GET(self, request: SynapseRequest) -> None: def register_servlets(hs: "HomeServer", http_server: HttpServer) -> None: - if hs.config.auth.oauth_delegation_enabled: + if hs.config.experimental.msc3861.enabled: return LoginRestServlet(hs).register(http_server) diff --git a/synapse/rest/client/logout.py b/synapse/rest/client/logout.py index b64a6d5961b7..94ad90942f39 100644 --- a/synapse/rest/client/logout.py +++ b/synapse/rest/client/logout.py @@ -80,7 +80,7 @@ async def on_POST(self, request: SynapseRequest) -> Tuple[int, JsonDict]: def register_servlets(hs: "HomeServer", http_server: HttpServer) -> None: - if hs.config.auth.oauth_delegation_enabled: + if hs.config.experimental.msc3861.enabled: return LogoutRestServlet(hs).register(http_server) diff --git a/synapse/rest/client/register.py b/synapse/rest/client/register.py index 6866988c3809..f8fb0e1dee46 100644 --- a/synapse/rest/client/register.py +++ b/synapse/rest/client/register.py @@ -955,7 +955,7 @@ def _calculate_registration_flows( def register_servlets(hs: "HomeServer", http_server: HttpServer) -> None: - if hs.config.auth.oauth_delegation_enabled: + if hs.config.experimental.msc3861.enabled: return if hs.config.worker.worker_app is None: diff --git a/synapse/rest/synapse/client/__init__.py b/synapse/rest/synapse/client/__init__.py index dcfd0ad6aac6..57335fb913fc 100644 --- a/synapse/rest/synapse/client/__init__.py +++ b/synapse/rest/synapse/client/__init__.py @@ -47,7 +47,7 @@ def build_synapse_client_resource_tree(hs: "HomeServer") -> Mapping[str, Resourc } # Expose the JWKS endpoint if OAuth2 delegation is enabled - if hs.config.auth.oauth_delegation_enabled: + if hs.config.experimental.msc3861.enabled: from synapse.rest.synapse.client.jwks import JwksResource resources["/_synapse/jwks"] = JwksResource(hs) diff --git a/synapse/rest/synapse/client/jwks.py b/synapse/rest/synapse/client/jwks.py index 818585843eaa..7c0a1223fb82 100644 --- a/synapse/rest/synapse/client/jwks.py +++ b/synapse/rest/synapse/client/jwks.py @@ -26,8 +26,6 @@ class JwksResource(DirectServeJsonResource): def __init__(self, hs: "HomeServer"): - from authlib.jose.rfc7517 import Key - super().__init__(extract_context=True) # Parameters that are allowed to be exposed in the public key. @@ -53,10 +51,10 @@ def __init__(self, hs: "HomeServer"): "ext", } - secret = hs.config.auth.oauth_delegation_client_secret + key = hs.config.experimental.msc3861.jwk - if isinstance(secret, Key): - private_key = secret.as_dict() + if key is not None: + private_key = key.as_dict() public_key = { k: v for k, v in private_key.items() if k in public_parameters } diff --git a/synapse/rest/well_known.py b/synapse/rest/well_known.py index fd3b17a5ad51..b8b4b5379b82 100644 --- a/synapse/rest/well_known.py +++ b/synapse/rest/well_known.py @@ -44,14 +44,15 @@ def get_well_known(self) -> Optional[JsonDict]: "base_url": self._config.registration.default_identity_server } - if self._config.auth.oauth_delegation_enabled: + # We use the MSC3861 values as they are used by multiple MSCs + if self._config.experimental.msc3861.enabled: result["org.matrix.msc2965.authentication"] = { - "issuer": self._config.auth.oauth_delegation_issuer + "issuer": self._config.experimental.msc3861.issuer } - if self._config.auth.oauth_delegation_account != "": + if self._config.experimental.msc3861.account_management_url is not None: result["org.matrix.msc2965.authentication"][ "account" - ] = self._config.auth.oauth_delegation_account + ] = self._config.experimental.msc3861.account_management_url if self._config.server.extra_well_known_client_content: for ( diff --git a/synapse/server.py b/synapse/server.py index 1c82500f3024..0f36ef69cb91 100644 --- a/synapse/server.py +++ b/synapse/server.py @@ -428,10 +428,10 @@ def get_replication_notifier(self) -> ReplicationNotifier: @cache_in_self def get_auth(self) -> Auth: - if self.config.auth.oauth_delegation_enabled: - from synapse.api.auth.oauth_delegated import OAuthDelegatedAuth + if self.config.experimental.msc3861.enabled: + from synapse.api.auth.msc3861_delegated import MSC3861DelegatedAuth - return OAuthDelegatedAuth(self) + return MSC3861DelegatedAuth(self) return InternalAuth(self) @cache_in_self diff --git a/tests/config/test_oauth_delegation.py b/tests/config/test_oauth_delegation.py new file mode 100644 index 000000000000..c5fc6d6ebb0b --- /dev/null +++ b/tests/config/test_oauth_delegation.py @@ -0,0 +1,202 @@ +# Copyright 2023 Matrix.org Foundation C.I.C. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from typing import Any, Dict +from unittest.mock import Mock + +from synapse.config import ConfigError +from synapse.module_api import ModuleApi +from synapse.types import JsonDict + +from tests.server import get_clock +from tests.unittest import HomeserverTestCase, override_config, skip_unless + +try: + import authlib # noqa: F401 + + HAS_AUTHLIB = True +except ImportError: + HAS_AUTHLIB = False + + +# These are a few constants that are used as config parameters in the tests. +SERVER_NAME = "test" +ISSUER = "https://issuer/" +CLIENT_ID = "test-client-id" +CLIENT_SECRET = "test-client-secret" +BASE_URL = "https://synapse/" + + +class CustomAuthModule: + """A module which registers a password auth provider.""" + + @staticmethod + def parse_config(config: JsonDict) -> None: + pass + + def __init__(self, config: None, api: ModuleApi): + api.register_password_auth_provider_callbacks( + auth_checkers={("m.login.password", ("password",)): Mock()}, + ) + + +@skip_unless(HAS_AUTHLIB, "requires authlib") +class MSC3861OAuthDelegation(HomeserverTestCase): + """Test that the Homeserver fails to initialize if the config is invalid.""" + + def setUp(self) -> None: + self.reactor, self.clock = get_clock() + self._hs_args = {"clock": self.clock, "reactor": self.reactor} + + def default_config(self) -> Dict[str, Any]: + config = super().default_config() + config["public_baseurl"] = BASE_URL + if "experimental_features" not in config: + config["experimental_features"] = {} + config["experimental_features"]["msc3861"] = { + "enabled": True, + "issuer": ISSUER, + "client_id": CLIENT_ID, + "client_auth_method": "client_secret_post", + "client_secret": CLIENT_SECRET, + } + return config + + def test_registration_cannot_be_enabled(self) -> None: + with self.assertRaises(ConfigError): + self.setup_test_homeserver() + + @override_config( + { + "enable_registration": False, + "password_config": { + "enabled": True, + }, + } + ) + def test_password_config_cannot_be_enabled(self) -> None: + with self.assertRaises(ConfigError): + self.setup_test_homeserver() + + @override_config( + { + "enable_registration": False, + "oidc_providers": [ + { + "idp_id": "microsoft", + "idp_name": "Microsoft", + "issuer": "https://login.microsoftonline.com//v2.0", + "client_id": "", + "client_secret": "", + "scopes": ["openid", "profile"], + "authorization_endpoint": "https://login.microsoftonline.com//oauth2/v2.0/authorize", + "token_endpoint": "https://login.microsoftonline.com//oauth2/v2.0/token", + "userinfo_endpoint": "https://graph.microsoft.com/oidc/userinfo", + } + ], + } + ) + def test_oidc_sso_cannot_be_enabled(self) -> None: + with self.assertRaises(ConfigError): + self.setup_test_homeserver() + + @override_config( + { + "enable_registration": False, + "cas_config": { + "enabled": True, + "server_url": "https://cas-server.com", + "displayname_attribute": "name", + "required_attributes": {"userGroup": "staff", "department": "None"}, + }, + } + ) + def test_cas_sso_cannot_be_enabled(self) -> None: + with self.assertRaises(ConfigError): + self.setup_test_homeserver() + + @override_config( + { + "enable_registration": False, + "modules": [ + { + "module": f"{__name__}.{CustomAuthModule.__qualname__}", + "config": {}, + } + ], + } + ) + def test_auth_providers_cannot_be_enabled(self) -> None: + with self.assertRaises(ConfigError): + self.setup_test_homeserver() + + @override_config( + { + "enable_registration": False, + "jwt_config": { + "enabled": True, + "secret": "my-secret-token", + "algorithm": "HS256", + }, + } + ) + def test_jwt_auth_cannot_be_enabled(self) -> None: + with self.assertRaises(ConfigError): + self.setup_test_homeserver() + + @override_config( + { + "enable_registration": False, + "experimental_features": { + "msc3882_enabled": True, + }, + } + ) + def test_msc3882_auth_cannot_be_enabled(self) -> None: + with self.assertRaises(ConfigError): + self.setup_test_homeserver() + + @override_config( + { + "enable_registration": False, + "recaptcha_public_key": "test", + "recaptcha_private_key": "test", + "enable_registration_captcha": True, + } + ) + def test_captcha_cannot_be_enabled(self) -> None: + with self.assertRaises(ConfigError): + self.setup_test_homeserver() + + @override_config( + { + "enable_registration": False, + "refresh_token_lifetime": "24h", + "refreshable_access_token_lifetime": "10m", + "nonrefreshable_access_token_lifetime": "24h", + } + ) + def test_refreshable_tokens_cannot_be_enabled(self) -> None: + with self.assertRaises(ConfigError): + self.setup_test_homeserver() + + @override_config( + { + "enable_registration": False, + "session_lifetime": "24h", + } + ) + def test_session_lifetime_cannot_be_set(self) -> None: + with self.assertRaises(ConfigError): + self.setup_test_homeserver() diff --git a/tests/handlers/test_oauth_delegation.py b/tests/handlers/test_oauth_delegation.py index ee1bc5ca7ac6..081fef51ecd8 100644 --- a/tests/handlers/test_oauth_delegation.py +++ b/tests/handlers/test_oauth_delegation.py @@ -109,12 +109,15 @@ class MSC3861OAuthDelegation(HomeserverTestCase): def default_config(self) -> Dict[str, Any]: config = super().default_config() config["public_baseurl"] = BASE_URL - config["oauth_delegation"] = { - "enabled": True, - "issuer": ISSUER, - "client_id": CLIENT_ID, - "client_auth_method": "client_secret_post", - "client_secret": CLIENT_SECRET, + config["disable_registration"] = True + config["experimental_features"] = { + "msc3861": { + "enabled": True, + "issuer": ISSUER, + "client_id": CLIENT_ID, + "client_auth_method": "client_secret_post", + "client_secret": CLIENT_SECRET, + } } return config diff --git a/tests/rest/test_well_known.py b/tests/rest/test_well_known.py index 34333d88df25..377243a1706d 100644 --- a/tests/rest/test_well_known.py +++ b/tests/rest/test_well_known.py @@ -108,14 +108,17 @@ def test_server_well_known_disabled(self) -> None: @unittest.override_config( { "public_baseurl": "https://homeserver", # this is only required so that client well known is served - "oauth_delegation": { - "enabled": True, - "issuer": "https://issuer", - "account": "https://my-account.issuer", - "client_id": "id", - "client_auth_method": "client_secret_post", - "client_secret": "secret", + "experimental_features": { + "msc3861": { + "enabled": True, + "issuer": "https://issuer", + "account_management_url": "https://my-account.issuer", + "client_id": "id", + "client_auth_method": "client_secret_post", + "client_secret": "secret", + }, }, + "disable_registration": True, } ) def test_client_well_known_msc3861_oauth_delegation(self) -> None: From bad1f2cd3558d908b579b6c191bcd7bebecd32be Mon Sep 17 00:00:00 2001 From: Hugh Nimmo-Smith Date: Tue, 7 Feb 2023 12:55:54 +0000 Subject: [PATCH 16/75] Tests for JWKS endpoint --- tests/config/test_oauth_delegation.py | 117 +++++++++++++++++++++++--- tests/rest/admin/test_jwks.py | 106 +++++++++++++++++++++++ 2 files changed, 212 insertions(+), 11 deletions(-) create mode 100644 tests/rest/admin/test_jwks.py diff --git a/tests/config/test_oauth_delegation.py b/tests/config/test_oauth_delegation.py index c5fc6d6ebb0b..6d294e0144f6 100644 --- a/tests/config/test_oauth_delegation.py +++ b/tests/config/test_oauth_delegation.py @@ -51,6 +51,34 @@ def __init__(self, config: None, api: ModuleApi): ) +def _dict_merge(merge_dict: dict, into_dict: dict) -> None: + """Do a deep merge of two dicts + + Recursively merges `merge_dict` into `into_dict`: + * For keys where both `merge_dict` and `into_dict` have a dict value, the values + are recursively merged + * For all other keys, the values in `into_dict` (if any) are overwritten with + the value from `merge_dict`. + + Args: + merge_dict: dict to merge + into_dict: target dict to be modified + """ + for k, v in merge_dict.items(): + if k not in into_dict: + into_dict[k] = v + continue + + current_val = into_dict[k] + + if isinstance(v, dict) and isinstance(current_val, dict): + _dict_merge(v, current_val) + continue + + # otherwise we just overwrite + into_dict[k] = v + + @skip_unless(HAS_AUTHLIB, "requires authlib") class MSC3861OAuthDelegation(HomeserverTestCase): """Test that the Homeserver fails to initialize if the config is invalid.""" @@ -60,18 +88,85 @@ def setUp(self) -> None: self._hs_args = {"clock": self.clock, "reactor": self.reactor} def default_config(self) -> Dict[str, Any]: - config = super().default_config() - config["public_baseurl"] = BASE_URL - if "experimental_features" not in config: - config["experimental_features"] = {} - config["experimental_features"]["msc3861"] = { - "enabled": True, - "issuer": ISSUER, - "client_id": CLIENT_ID, - "client_auth_method": "client_secret_post", - "client_secret": CLIENT_SECRET, + default_extra_config = { + "public_baseurl": BASE_URL, + "experimental_features": { + "msc3861": { + "enabled": True, + "issuer": ISSUER, + "client_id": CLIENT_ID, + "client_auth_method": "client_secret_post", + "client_secret": CLIENT_SECRET, + } + }, + } + _dict_merge( + {} if self._extra_config is None else self._extra_config, + default_extra_config, + ) + self._extra_config = default_extra_config + return super().default_config() + + @override_config( + { + "enable_registration": False, } - return config + ) + def test_client_secret_post_works(self) -> None: + self.setup_test_homeserver() + + @override_config( + { + "enable_registration": False, + "experimental_features": { + "msc3861": { + "client_auth_method": "invalid", + } + }, + } + ) + def test_invalid_client_auth_method(self) -> None: + with self.assertRaises(ValueError): + self.setup_test_homeserver() + + @override_config( + { + "enable_registration": False, + "experimental_features": { + "msc3861": { + "client_auth_method": "private_key_jwt", + } + }, + } + ) + def test_invalid_private_key_jwt(self) -> None: + with self.assertRaises(ConfigError): + self.setup_test_homeserver() + + @override_config( + { + "enable_registration": False, + "experimental_features": { + "msc3861": { + "client_auth_method": "private_key_jwt", + "jwk": { + "p": "-frVdP_tZ-J_nIR6HNMDq1N7aunwm51nAqNnhqIyuA8ikx7LlQED1tt2LD3YEvYyW8nxE2V95HlCRZXQPMiRJBFOsbmYkzl2t-MpavTaObB_fct_JqcRtdXddg4-_ihdjRDwUOreq_dpWh6MIKsC3UyekfkHmeEJg5YpOTL15j8", + "kty": "RSA", + "q": "oFw-Enr_YozQB1ab-kawn4jY3yHi8B1nSmYT0s8oTCflrmps5BFJfCkHL5ij3iY15z0o2m0N-jjB1oSJ98O4RayEEYNQlHnTNTl0kRIWzpoqblHUIxVcahIpP_xTovBJzwi8XXoLGqHOOMA-r40LSyVgP2Ut8D9qBwV6_UfT0LU", + "d": "WFkDPYo4b4LIS64D_QtQfGGuAObPvc3HFfp9VZXyq3SJR58XZRHE0jqtlEMNHhOTgbMYS3w8nxPQ_qVzY-5hs4fIanwvB64mAoOGl0qMHO65DTD_WsGFwzYClJPBVniavkLE2Hmpu8IGe6lGliN8vREC6_4t69liY-XcN_ECboVtC2behKkLOEASOIMuS7YcKAhTJFJwkl1dqDlliEn5A4u4xy7nuWQz3juB1OFdKlwGA5dfhDNglhoLIwNnkLsUPPFO-WB5ZNEW35xxHOToxj4bShvDuanVA6mJPtTKjz0XibjB36bj_nF_j7EtbE2PdGJ2KevAVgElR4lqS4ISgQ", + "e": "AQAB", + "kid": "test", + "qi": "cPfNk8l8W5exVNNea4d7QZZ8Qr8LgHghypYAxz8PQh1fNa8Ya1SNUDVzC2iHHhszxxA0vB9C7jGze8dBrvnzWYF1XvQcqNIVVgHhD57R1Nm3dj2NoHIKe0Cu4bCUtP8xnZQUN4KX7y4IIcgRcBWG1hT6DEYZ4BxqicnBXXNXAUI", + "dp": "dKlMHvslV1sMBQaKWpNb3gPq0B13TZhqr3-E2_8sPlvJ3fD8P4CmwwnOn50JDuhY3h9jY5L06sBwXjspYISVv8hX-ndMLkEeF3lrJeA5S70D8rgakfZcPIkffm3tlf1Ok3v5OzoxSv3-67Df4osMniyYwDUBCB5Oq1tTx77xpU8", + "dq": "S4ooU1xNYYcjl9FcuJEEMqKsRrAXzzSKq6laPTwIp5dDwt2vXeAm1a4eDHXC-6rUSZGt5PbqVqzV4s-cjnJMI8YYkIdjNg4NSE1Ac_YpeDl3M3Colb5CQlU7yUB7xY2bt0NOOFp9UJZYJrOo09mFMGjy5eorsbitoZEbVqS3SuE", + "n": "nJbYKqFwnURKimaviyDFrNLD3gaKR1JW343Qem25VeZxoMq1665RHVoO8n1oBm4ClZdjIiZiVdpyqzD5-Ow12YQgQEf1ZHP3CCcOQQhU57Rh5XvScTe5IxYVkEW32IW2mp_CJ6WfjYpfeL4azarVk8H3Vr59d1rSrKTVVinVdZer9YLQyC_rWAQNtHafPBMrf6RYiNGV9EiYn72wFIXlLlBYQ9Fx7bfe1PaL6qrQSsZP3_rSpuvVdLh1lqGeCLR0pyclA9uo5m2tMyCXuuGQLbA_QJm5xEc7zd-WFdux2eXF045oxnSZ_kgQt-pdN7AxGWOVvwoTf9am6mSkEdv6iw", + }, + } + }, + } + ) + def test_private_key_jwt_works(self) -> None: + self.setup_test_homeserver() def test_registration_cannot_be_enabled(self) -> None: with self.assertRaises(ConfigError): diff --git a/tests/rest/admin/test_jwks.py b/tests/rest/admin/test_jwks.py new file mode 100644 index 000000000000..a9a6191c7346 --- /dev/null +++ b/tests/rest/admin/test_jwks.py @@ -0,0 +1,106 @@ +# Copyright 2023 The Matrix.org Foundation C.I.C. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from typing import Dict + +from twisted.web.resource import Resource + +from synapse.rest.synapse.client import build_synapse_client_resource_tree + +from tests.unittest import HomeserverTestCase, override_config, skip_unless + +try: + import authlib # noqa: F401 + + HAS_AUTHLIB = True +except ImportError: + HAS_AUTHLIB = False + + +@skip_unless(HAS_AUTHLIB, "requires authlib") +class JWKSTestCase(HomeserverTestCase): + """Test /_synapse/jwks JWKS data.""" + + def create_resource_dict(self) -> Dict[str, Resource]: + d = super().create_resource_dict() + d.update(build_synapse_client_resource_tree(self.hs)) + return d + + def test_empty_jwks(self) -> None: + """Test that the JWKS endpoint is not present by default.""" + channel = self.make_request("GET", "/_synapse/jwks") + self.assertEqual(404, channel.code, channel.result) + + @override_config( + { + "disable_registration": True, + "experimental_features": { + "msc3861": { + "enabled": True, + "issuer": "https://issuer/", + "client_id": "test-client-id", + "client_auth_method": "client_secret_post", + "client_secret": "secret", + }, + }, + } + ) + def test_empty_jwks_for_msc3861_client_secret_post(self) -> None: + """Test that the JWKS endpoint is empty when plain auth is used.""" + channel = self.make_request("GET", "/_synapse/jwks") + self.assertEqual(200, channel.code, channel.result) + self.assertEqual({"keys": []}, channel.json_body) + + @override_config( + { + "disable_registration": True, + "experimental_features": { + "msc3861": { + "enabled": True, + "issuer": "https://issuer/", + "client_id": "test-client-id", + "client_auth_method": "private_key_jwt", + "jwk": { + "p": "-frVdP_tZ-J_nIR6HNMDq1N7aunwm51nAqNnhqIyuA8ikx7LlQED1tt2LD3YEvYyW8nxE2V95HlCRZXQPMiRJBFOsbmYkzl2t-MpavTaObB_fct_JqcRtdXddg4-_ihdjRDwUOreq_dpWh6MIKsC3UyekfkHmeEJg5YpOTL15j8", + "kty": "RSA", + "q": "oFw-Enr_YozQB1ab-kawn4jY3yHi8B1nSmYT0s8oTCflrmps5BFJfCkHL5ij3iY15z0o2m0N-jjB1oSJ98O4RayEEYNQlHnTNTl0kRIWzpoqblHUIxVcahIpP_xTovBJzwi8XXoLGqHOOMA-r40LSyVgP2Ut8D9qBwV6_UfT0LU", + "d": "WFkDPYo4b4LIS64D_QtQfGGuAObPvc3HFfp9VZXyq3SJR58XZRHE0jqtlEMNHhOTgbMYS3w8nxPQ_qVzY-5hs4fIanwvB64mAoOGl0qMHO65DTD_WsGFwzYClJPBVniavkLE2Hmpu8IGe6lGliN8vREC6_4t69liY-XcN_ECboVtC2behKkLOEASOIMuS7YcKAhTJFJwkl1dqDlliEn5A4u4xy7nuWQz3juB1OFdKlwGA5dfhDNglhoLIwNnkLsUPPFO-WB5ZNEW35xxHOToxj4bShvDuanVA6mJPtTKjz0XibjB36bj_nF_j7EtbE2PdGJ2KevAVgElR4lqS4ISgQ", + "e": "AQAB", + "kid": "test", + "qi": "cPfNk8l8W5exVNNea4d7QZZ8Qr8LgHghypYAxz8PQh1fNa8Ya1SNUDVzC2iHHhszxxA0vB9C7jGze8dBrvnzWYF1XvQcqNIVVgHhD57R1Nm3dj2NoHIKe0Cu4bCUtP8xnZQUN4KX7y4IIcgRcBWG1hT6DEYZ4BxqicnBXXNXAUI", + "dp": "dKlMHvslV1sMBQaKWpNb3gPq0B13TZhqr3-E2_8sPlvJ3fD8P4CmwwnOn50JDuhY3h9jY5L06sBwXjspYISVv8hX-ndMLkEeF3lrJeA5S70D8rgakfZcPIkffm3tlf1Ok3v5OzoxSv3-67Df4osMniyYwDUBCB5Oq1tTx77xpU8", + "dq": "S4ooU1xNYYcjl9FcuJEEMqKsRrAXzzSKq6laPTwIp5dDwt2vXeAm1a4eDHXC-6rUSZGt5PbqVqzV4s-cjnJMI8YYkIdjNg4NSE1Ac_YpeDl3M3Colb5CQlU7yUB7xY2bt0NOOFp9UJZYJrOo09mFMGjy5eorsbitoZEbVqS3SuE", + "n": "nJbYKqFwnURKimaviyDFrNLD3gaKR1JW343Qem25VeZxoMq1665RHVoO8n1oBm4ClZdjIiZiVdpyqzD5-Ow12YQgQEf1ZHP3CCcOQQhU57Rh5XvScTe5IxYVkEW32IW2mp_CJ6WfjYpfeL4azarVk8H3Vr59d1rSrKTVVinVdZer9YLQyC_rWAQNtHafPBMrf6RYiNGV9EiYn72wFIXlLlBYQ9Fx7bfe1PaL6qrQSsZP3_rSpuvVdLh1lqGeCLR0pyclA9uo5m2tMyCXuuGQLbA_QJm5xEc7zd-WFdux2eXF045oxnSZ_kgQt-pdN7AxGWOVvwoTf9am6mSkEdv6iw", + }, + }, + }, + } + ) + def test_key_returned_for_msc3861_client_secret_post(self) -> None: + """Test that the JWKS includes public part of JWK for private_key_jwt auth is used.""" + channel = self.make_request("GET", "/_synapse/jwks") + self.assertEqual(200, channel.code, channel.result) + self.assertEqual( + { + "keys": [ + { + "kty": "RSA", + "e": "AQAB", + "kid": "test", + "n": "nJbYKqFwnURKimaviyDFrNLD3gaKR1JW343Qem25VeZxoMq1665RHVoO8n1oBm4ClZdjIiZiVdpyqzD5-Ow12YQgQEf1ZHP3CCcOQQhU57Rh5XvScTe5IxYVkEW32IW2mp_CJ6WfjYpfeL4azarVk8H3Vr59d1rSrKTVVinVdZer9YLQyC_rWAQNtHafPBMrf6RYiNGV9EiYn72wFIXlLlBYQ9Fx7bfe1PaL6qrQSsZP3_rSpuvVdLh1lqGeCLR0pyclA9uo5m2tMyCXuuGQLbA_QJm5xEc7zd-WFdux2eXF045oxnSZ_kgQt-pdN7AxGWOVvwoTf9am6mSkEdv6iw", + } + ] + }, + channel.json_body, + ) From c008b44b4f7bb3604be77709c62e6ec78389f8ed Mon Sep 17 00:00:00 2001 From: Quentin Gliech Date: Tue, 4 Apr 2023 18:11:17 +0200 Subject: [PATCH 17/75] Add an admin token for MAS -> Synapse calls --- synapse/api/auth/msc3861_delegated.py | 15 +++++++++++++++ synapse/config/experimental.py | 9 +++++++++ 2 files changed, 24 insertions(+) diff --git a/synapse/api/auth/msc3861_delegated.py b/synapse/api/auth/msc3861_delegated.py index 4ca3280bd3c4..a84b7730b340 100644 --- a/synapse/api/auth/msc3861_delegated.py +++ b/synapse/api/auth/msc3861_delegated.py @@ -90,6 +90,7 @@ def __init__(self, hs: "HomeServer"): self._http_client = hs.get_proxied_http_client() self._hostname = hs.hostname + self._admin_token = self._config.admin_token self._issuer_metadata = RetryOnExceptionCachedCall(self._load_metadata) @@ -176,6 +177,20 @@ async def get_user_by_access_token( token: str, allow_expired: bool = False, ) -> Requester: + if self._admin_token is not None and token == self._admin_token: + # XXX: This is a temporary solution so that the admin API can be called by + # the OIDC provider. This will be removed once we have OIDC client + # credentials grant support in matrix-authentication-service. + logging.info("Admin toked used") + # XXX: that user doesn't exist and won't be provisioned. + # This is mostly fine for admin calls, but we should also think about doing + # requesters without a user_id. + admin_user = UserID("__oidc_admin", self._hostname) + return create_requester( + user_id=admin_user, + scope=["urn:synapse:admin:*"], + ) + introspection_result = await self._introspect_token(token) logger.info(f"Introspection result: {introspection_result!r}") diff --git a/synapse/config/experimental.py b/synapse/config/experimental.py index b9607975f903..d4dff22b0bae 100644 --- a/synapse/config/experimental.py +++ b/synapse/config/experimental.py @@ -136,6 +136,15 @@ def _check_client_auth_method( ) """The URL of the My Account page on the OIDC Provider as per MSC2965.""" + admin_token: Optional[str] = attr.ib( + default=None, + validator=attr.validators.optional(attr.validators.instance_of(str)), + ) + """ + A token that should be considered as an admin token. + This is used by the OIDC provider, to make admin calls to Synapse. + """ + def check_config_conflicts(self, root: RootConfig) -> None: """Checks for any configuration conflicts with other parts of Synapse. From 4d0231b3648d5d70a8e0f4d99a0c040f12f15669 Mon Sep 17 00:00:00 2001 From: Quentin Gliech Date: Tue, 16 May 2023 10:52:37 +0200 Subject: [PATCH 18/75] Make AS tokens work & allow ASes to /register --- synapse/api/auth/base.py | 80 +++++++++++++++++++++++- synapse/api/auth/internal.py | 82 +------------------------ synapse/api/auth/msc3861_delegated.py | 9 ++- synapse/rest/client/register.py | 69 +++++++++++++++++++++ tests/handlers/test_oauth_delegation.py | 4 +- 5 files changed, 159 insertions(+), 85 deletions(-) diff --git a/synapse/api/auth/base.py b/synapse/api/auth/base.py index 240f2b90dee2..9321d6f18637 100644 --- a/synapse/api/auth/base.py +++ b/synapse/api/auth/base.py @@ -14,6 +14,8 @@ import logging from typing import TYPE_CHECKING, Optional, Tuple +from netaddr import IPAddress + from twisted.web.server import Request from synapse import event_auth @@ -26,7 +28,8 @@ ) from synapse.appservice import ApplicationService from synapse.logging.opentracing import trace -from synapse.types import Requester +from synapse.types import Requester, create_requester +from synapse.util.cancellation import cancellable if TYPE_CHECKING: from synapse.server import HomeServer @@ -271,3 +274,78 @@ def get_access_token_from_request(request: Request) -> str: raise MissingClientTokenError() return query_params[0].decode("ascii") + + @cancellable + async def get_appservice_user( + self, request: Request, access_token: str + ) -> Optional[Requester]: + """ + Given a request, reads the request parameters to determine: + - whether it's an application service that's making this request + - what user the application service should be treated as controlling + (the user_id URI parameter allows an application service to masquerade + any applicable user in its namespace) + - what device the application service should be treated as controlling + (the device_id[^1] URI parameter allows an application service to masquerade + as any device that exists for the relevant user) + + [^1] Unstable and provided by MSC3202. + Must use `org.matrix.msc3202.device_id` in place of `device_id` for now. + + Returns: + the application service `Requester` of that request + + Postconditions: + - The `app_service` field in the returned `Requester` is set + - The `user_id` field in the returned `Requester` is either the application + service sender or the controlled user set by the `user_id` URI parameter + - The returned application service is permitted to control the returned user ID. + - The returned device ID, if present, has been checked to be a valid device ID + for the returned user ID. + """ + DEVICE_ID_ARG_NAME = b"org.matrix.msc3202.device_id" + + app_service = self.store.get_app_service_by_token(access_token) + if app_service is None: + return None + + if app_service.ip_range_whitelist: + ip_address = IPAddress(request.getClientAddress().host) + if ip_address not in app_service.ip_range_whitelist: + return None + + # This will always be set by the time Twisted calls us. + assert request.args is not None + + if b"user_id" in request.args: + effective_user_id = request.args[b"user_id"][0].decode("utf8") + await self.validate_appservice_can_control_user_id( + app_service, effective_user_id + ) + else: + effective_user_id = app_service.sender + + effective_device_id: Optional[str] = None + + if ( + self.hs.config.experimental.msc3202_device_masquerading_enabled + and DEVICE_ID_ARG_NAME in request.args + ): + effective_device_id = request.args[DEVICE_ID_ARG_NAME][0].decode("utf8") + # We only just set this so it can't be None! + assert effective_device_id is not None + device_opt = await self.store.get_device( + effective_user_id, effective_device_id + ) + if device_opt is None: + # For now, use 400 M_EXCLUSIVE if the device doesn't exist. + # This is an open thread of discussion on MSC3202 as of 2021-12-09. + raise AuthError( + 400, + f"Application service trying to use a device that doesn't exist ('{effective_device_id}' for {effective_user_id})", + Codes.EXCLUSIVE, + ) + + return create_requester( + effective_user_id, app_service=app_service, device_id=effective_device_id + ) diff --git a/synapse/api/auth/internal.py b/synapse/api/auth/internal.py index 813d537e537c..e2ae198b196e 100644 --- a/synapse/api/auth/internal.py +++ b/synapse/api/auth/internal.py @@ -12,12 +12,9 @@ # See the License for the specific language governing permissions and # limitations under the License. import logging -from typing import TYPE_CHECKING, Optional +from typing import TYPE_CHECKING import pymacaroons -from netaddr import IPAddress - -from twisted.web.server import Request from synapse.api.errors import ( AuthError, @@ -122,7 +119,7 @@ async def _wrapped_get_user_by_req( access_token = self.get_access_token_from_request(request) # First check if it could be a request from an appservice - requester = await self._get_appservice_user(request) + requester = await self.get_appservice_user(request, access_token) if not requester: # If not, it should be from a regular user requester = await self.get_user_by_access_token( @@ -189,81 +186,6 @@ async def _wrapped_get_user_by_req( except KeyError: raise MissingClientTokenError() - @cancellable - async def _get_appservice_user(self, request: Request) -> Optional[Requester]: - """ - Given a request, reads the request parameters to determine: - - whether it's an application service that's making this request - - what user the application service should be treated as controlling - (the user_id URI parameter allows an application service to masquerade - any applicable user in its namespace) - - what device the application service should be treated as controlling - (the device_id[^1] URI parameter allows an application service to masquerade - as any device that exists for the relevant user) - - [^1] Unstable and provided by MSC3202. - Must use `org.matrix.msc3202.device_id` in place of `device_id` for now. - - Returns: - the application service `Requester` of that request - - Postconditions: - - The `app_service` field in the returned `Requester` is set - - The `user_id` field in the returned `Requester` is either the application - service sender or the controlled user set by the `user_id` URI parameter - - The returned application service is permitted to control the returned user ID. - - The returned device ID, if present, has been checked to be a valid device ID - for the returned user ID. - """ - DEVICE_ID_ARG_NAME = b"org.matrix.msc3202.device_id" - - app_service = self.store.get_app_service_by_token( - self.get_access_token_from_request(request) - ) - if app_service is None: - return None - - if app_service.ip_range_whitelist: - ip_address = IPAddress(request.getClientAddress().host) - if ip_address not in app_service.ip_range_whitelist: - return None - - # This will always be set by the time Twisted calls us. - assert request.args is not None - - if b"user_id" in request.args: - effective_user_id = request.args[b"user_id"][0].decode("utf8") - await self.validate_appservice_can_control_user_id( - app_service, effective_user_id - ) - else: - effective_user_id = app_service.sender - - effective_device_id: Optional[str] = None - - if ( - self.hs.config.experimental.msc3202_device_masquerading_enabled - and DEVICE_ID_ARG_NAME in request.args - ): - effective_device_id = request.args[DEVICE_ID_ARG_NAME][0].decode("utf8") - # We only just set this so it can't be None! - assert effective_device_id is not None - device_opt = await self.store.get_device( - effective_user_id, effective_device_id - ) - if device_opt is None: - # For now, use 400 M_EXCLUSIVE if the device doesn't exist. - # This is an open thread of discussion on MSC3202 as of 2021-12-09. - raise AuthError( - 400, - f"Application service trying to use a device that doesn't exist ('{effective_device_id}' for {effective_user_id})", - Codes.EXCLUSIVE, - ) - - return create_requester( - effective_user_id, app_service=app_service, device_id=effective_device_id - ) - async def get_user_by_access_token( self, token: str, diff --git a/synapse/api/auth/msc3861_delegated.py b/synapse/api/auth/msc3861_delegated.py index a84b7730b340..b84dce2563a7 100644 --- a/synapse/api/auth/msc3861_delegated.py +++ b/synapse/api/auth/msc3861_delegated.py @@ -162,14 +162,19 @@ async def get_user_by_req( ) -> Requester: access_token = self.get_access_token_from_request(request) - # TODO: we probably want to assert the allow_guest inside this call so that we don't provision the user if they don't have enough permission: - requester = await self.get_user_by_access_token(access_token, allow_expired) + requester = await self.get_appservice_user(request, access_token) + if not requester: + # TODO: we probably want to assert the allow_guest inside this call + # so that we don't provision the user if they don't have enough permission: + requester = await self.get_user_by_access_token(access_token, allow_expired) if not allow_guest and requester.is_guest: raise OAuthInsufficientScopeError( ["urn:matrix:org.matrix.msc2967.client:api:*"] ) + request.requester = requester + return requester async def get_user_by_access_token( diff --git a/synapse/rest/client/register.py b/synapse/rest/client/register.py index f8fb0e1dee46..d59669f0b6da 100644 --- a/synapse/rest/client/register.py +++ b/synapse/rest/client/register.py @@ -869,6 +869,74 @@ async def _do_guest_registration( return 200, result +class RegisterAppServiceOnlyRestServlet(RestServlet): + """An alternative registration API endpoint that only allows ASes to register + + This replaces the regular /register endpoint if MSC3861. There are two notable + differences with the regular /register endpoint: + - It only allows the `m.login.application_service` login type + - It does not create a device or access token for the just-registered user + + Note that the exact behaviour of this endpoint is not yet finalised. It should be + just good enough to make most ASes work. + """ + + PATTERNS = client_patterns("/register$") + CATEGORY = "Registration/login requests" + + def __init__(self, hs: "HomeServer"): + super().__init__() + + self.auth = hs.get_auth() + self.registration_handler = hs.get_registration_handler() + self.ratelimiter = hs.get_registration_ratelimiter() + + @interactive_auth_handler + async def on_POST(self, request: SynapseRequest) -> Tuple[int, JsonDict]: + body = parse_json_object_from_request(request) + + client_addr = request.getClientAddress().host + + await self.ratelimiter.ratelimit(None, client_addr, update=False) + + kind = parse_string(request, "kind", default="user") + + if kind == "guest": + raise SynapseError(403, "Guest access is disabled") + elif kind != "user": + raise UnrecognizedRequestError( + f"Do not understand membership kind: {kind}", + ) + + # Pull out the provided username and do basic sanity checks early since + # the auth layer will store these in sessions. + desired_username = body.get("username") + if not isinstance(desired_username, str) or len(desired_username) > 512: + raise SynapseError(400, "Invalid username") + + # Allow only ASes to use this API. + if body.get("type") != APP_SERVICE_REGISTRATION_TYPE: + raise SynapseError(403, "Non-application service registration type") + + if not self.auth.has_access_token(request): + raise SynapseError( + 400, + "Appservice token must be provided when using a type of m.login.application_service", + ) + + # XXX we should check that desired_username is valid. Currently + # we give appservices carte blanche for any insanity in mxids, + # because the IRC bridges rely on being able to register stupid + # IDs. + + as_token = self.auth.get_access_token_from_request(request) + + user_id = await self.registration_handler.appservice_register( + desired_username, as_token + ) + return 200, {"user_id": user_id} + + def _calculate_registration_flows( config: HomeServerConfig, auth_handler: AuthHandler ) -> List[List[str]]: @@ -956,6 +1024,7 @@ def _calculate_registration_flows( def register_servlets(hs: "HomeServer", http_server: HttpServer) -> None: if hs.config.experimental.msc3861.enabled: + RegisterAppServiceOnlyRestServlet(hs).register(http_server) return if hs.config.worker.worker_app is None: diff --git a/tests/handlers/test_oauth_delegation.py b/tests/handlers/test_oauth_delegation.py index 081fef51ecd8..e53020a58a2a 100644 --- a/tests/handlers/test_oauth_delegation.py +++ b/tests/handlers/test_oauth_delegation.py @@ -527,8 +527,8 @@ def test_registration_endpoints_removed(self) -> None: self.expect_unrecognized( "GET", "/_matrix/client/v1/register/m.login.registration_token/validity" ) - self.expect_unrecognized("POST", "/_matrix/client/v3/register") - self.expect_unrecognized("GET", "/_matrix/client/v3/register") + # This is still available for AS registrations + # self.expect_unrecognized("POST", "/_matrix/client/v3/register") self.expect_unrecognized("GET", "/_matrix/client/v3/register/available") self.expect_unrecognized( "POST", "/_matrix/client/v3/register/email/requestToken" From e343125b3880bfc55223735a784eb1894db5e9be Mon Sep 17 00:00:00 2001 From: Quentin Gliech Date: Wed, 10 May 2023 18:05:06 +0200 Subject: [PATCH 19/75] Disable incompatible Admin API endpoints --- synapse/rest/admin/__init__.py | 21 +++++++++++++-------- synapse/rest/admin/users.py | 8 ++++++++ tests/handlers/test_oauth_delegation.py | 19 +++++++++++++++++++ 3 files changed, 40 insertions(+), 8 deletions(-) diff --git a/synapse/rest/admin/__init__.py b/synapse/rest/admin/__init__.py index c729364839c0..fe8177ed4dcc 100644 --- a/synapse/rest/admin/__init__.py +++ b/synapse/rest/admin/__init__.py @@ -257,9 +257,11 @@ def register_servlets(hs: "HomeServer", http_server: HttpServer) -> None: DeleteRoomStatusByRoomIdRestServlet(hs).register(http_server) JoinRoomAliasServlet(hs).register(http_server) VersionServlet(hs).register(http_server) - UserAdminServlet(hs).register(http_server) + if not hs.config.experimental.msc3861.enabled: + UserAdminServlet(hs).register(http_server) UserMembershipRestServlet(hs).register(http_server) - UserTokenRestServlet(hs).register(http_server) + if not hs.config.experimental.msc3861.enabled: + UserTokenRestServlet(hs).register(http_server) UserRestServletV2(hs).register(http_server) UsersRestServletV2(hs).register(http_server) UserMediaStatisticsRestServlet(hs).register(http_server) @@ -274,9 +276,10 @@ def register_servlets(hs: "HomeServer", http_server: HttpServer) -> None: RoomEventContextServlet(hs).register(http_server) RateLimitRestServlet(hs).register(http_server) UsernameAvailableRestServlet(hs).register(http_server) - ListRegistrationTokensRestServlet(hs).register(http_server) - NewRegistrationTokenRestServlet(hs).register(http_server) - RegistrationTokenRestServlet(hs).register(http_server) + if not hs.config.experimental.msc3861.enabled: + ListRegistrationTokensRestServlet(hs).register(http_server) + NewRegistrationTokenRestServlet(hs).register(http_server) + RegistrationTokenRestServlet(hs).register(http_server) DestinationMembershipRestServlet(hs).register(http_server) DestinationResetConnectionRestServlet(hs).register(http_server) DestinationRestServlet(hs).register(http_server) @@ -306,10 +309,12 @@ def register_servlets_for_client_rest_resource( # The following resources can only be run on the main process. if hs.config.worker.worker_app is None: DeactivateAccountRestServlet(hs).register(http_server) - ResetPasswordRestServlet(hs).register(http_server) + if not hs.config.experimental.msc3861.enabled: + ResetPasswordRestServlet(hs).register(http_server) SearchUsersRestServlet(hs).register(http_server) - UserRegisterServlet(hs).register(http_server) - AccountValidityRenewServlet(hs).register(http_server) + if not hs.config.experimental.msc3861.enabled: + UserRegisterServlet(hs).register(http_server) + AccountValidityRenewServlet(hs).register(http_server) # Load the media repo ones if we're using them. Otherwise load the servlets which # don't need a media repo (typically readonly admin APIs). diff --git a/synapse/rest/admin/users.py b/synapse/rest/admin/users.py index 932333ae5715..407fe9c8043a 100644 --- a/synapse/rest/admin/users.py +++ b/synapse/rest/admin/users.py @@ -71,6 +71,7 @@ def __init__(self, hs: "HomeServer"): self.auth = hs.get_auth() self.admin_handler = hs.get_admin_handler() self._msc3866_enabled = hs.config.experimental.msc3866.enabled + self._msc3861_enabled = hs.config.experimental.msc3861.enabled async def on_GET(self, request: SynapseRequest) -> Tuple[int, JsonDict]: await assert_requester_is_admin(self.auth, request) @@ -94,7 +95,14 @@ async def on_GET(self, request: SynapseRequest) -> Tuple[int, JsonDict]: user_id = parse_string(request, "user_id") name = parse_string(request, "name") + guests = parse_boolean(request, "guests", default=True) + if self._msc3861_enabled and guests: + raise SynapseError( + HTTPStatus.BAD_REQUEST, + "The guests parameter is not supported when MSC3861 is enabled.", + errcode=Codes.INVALID_PARAM, + ) deactivated = parse_boolean(request, "deactivated", default=False) # If support for MSC3866 is not enabled, apply no filtering based on the diff --git a/tests/handlers/test_oauth_delegation.py b/tests/handlers/test_oauth_delegation.py index e53020a58a2a..b79c43a42472 100644 --- a/tests/handlers/test_oauth_delegation.py +++ b/tests/handlers/test_oauth_delegation.py @@ -31,6 +31,7 @@ InvalidClientTokenError, OAuthInsufficientScopeError, ) +from synapse.rest import admin from synapse.rest.client import account, devices, keys, login, logout, register from synapse.server import HomeServer from synapse.types import JsonDict @@ -104,6 +105,7 @@ class MSC3861OAuthDelegation(HomeserverTestCase): register.register_servlets, login.register_servlets, logout.register_servlets, + admin.register_servlets, ] def default_config(self) -> Dict[str, Any]: @@ -557,3 +559,20 @@ def test_openid_endpoints_removed(self) -> None: self.expect_unrecognized( "POST", "/_matrix/client/v3/user/{USERNAME}/openid/request_token" ) + + def test_admin_api_endpoints_removed(self) -> None: + """Test that admin API endpoints that were removed in MSC2964 are no longer available.""" + self.expect_unrecognized("GET", "/_synapse/admin/v1/registration_tokens") + self.expect_unrecognized("POST", "/_synapse/admin/v1/registration_tokens/new") + self.expect_unrecognized("GET", "/_synapse/admin/v1/registration_tokens/abcd") + self.expect_unrecognized("PUT", "/_synapse/admin/v1/registration_tokens/abcd") + self.expect_unrecognized( + "DELETE", "/_synapse/admin/v1/registration_tokens/abcd" + ) + self.expect_unrecognized("POST", "/_synapse/admin/v1/reset_password/foo") + self.expect_unrecognized("POST", "/_synapse/admin/v1/users/foo/login") + self.expect_unrecognized("GET", "/_synapse/admin/v1/register") + self.expect_unrecognized("POST", "/_synapse/admin/v1/register") + self.expect_unrecognized("GET", "/_synapse/admin/v1/users/foo/admin") + self.expect_unrecognized("PUT", "/_synapse/admin/v1/users/foo/admin") + self.expect_unrecognized("POST", "/_synapse/admin/v1/account_validity/validity") From ec9379d7e298c24f3530cf48ee34c30aa038feb2 Mon Sep 17 00:00:00 2001 From: Quentin Gliech Date: Fri, 12 May 2023 15:22:46 +0200 Subject: [PATCH 20/75] Newsfile. --- changelog.d/15582.feature | 1 + 1 file changed, 1 insertion(+) create mode 100644 changelog.d/15582.feature diff --git a/changelog.d/15582.feature b/changelog.d/15582.feature new file mode 100644 index 000000000000..00959500a54e --- /dev/null +++ b/changelog.d/15582.feature @@ -0,0 +1 @@ +Experimental [MSC3861](https://github.com/matrix-org/matrix-spec-proposals/pull/3861) support: delegate auth to an OIDC provider. From 14a5be9c4d69b5669792f2cdc658c266847a8c4a Mon Sep 17 00:00:00 2001 From: Quentin Gliech Date: Mon, 22 May 2023 15:48:57 +0200 Subject: [PATCH 21/75] Handle errors when introspecting tokens This returns a proper 503 when the introspection endpoint is not working for some reason, which should avoid logging out clients in those cases. --- synapse/api/auth/msc3861_delegated.py | 42 ++++++++++++++++++++++--- tests/handlers/test_oauth_delegation.py | 35 +++++++++++++++++++++ tests/test_utils/__init__.py | 4 +-- 3 files changed, 74 insertions(+), 7 deletions(-) diff --git a/synapse/api/auth/msc3861_delegated.py b/synapse/api/auth/msc3861_delegated.py index b84dce2563a7..82c66691dae1 100644 --- a/synapse/api/auth/msc3861_delegated.py +++ b/synapse/api/auth/msc3861_delegated.py @@ -27,9 +27,11 @@ from synapse.api.auth.base import BaseAuth from synapse.api.errors import ( AuthError, + HttpResponseException, InvalidClientTokenError, OAuthInsufficientScopeError, StoreError, + SynapseError, ) from synapse.http.site import SynapseRequest from synapse.logging.context import make_deferred_yieldable @@ -117,6 +119,21 @@ async def _load_metadata(self) -> OpenIDProviderMetadata: return metadata async def _introspect_token(self, token: str) -> IntrospectionToken: + """ + Send a token to the introspection endpoint and returns the introspection response + + Parameters: + token: The token to introspect + + Raises: + HttpResponseException: If the introspection endpoint returns a non-2xx response + ValueError: If the introspection endpoint returns an invalid JSON response + JSONDecodeError: If the introspection endpoint returns a non-JSON response + Exception: If the HTTP request fails + + Returns: + The introspection response + """ metadata = await self._issuer_metadata.get() introspection_endpoint = metadata.get("introspection_endpoint") raw_headers: Dict[str, str] = { @@ -136,7 +153,7 @@ async def _introspect_token(self, token: str) -> IntrospectionToken: # Do the actual request # We're not using the SimpleHttpClient util methods as we don't want to - # check the HTTP status code and we do the body encoding ourself. + # check the HTTP status code, and we do the body encoding ourselves. response = await self._http_client.request( method="POST", uri=uri, @@ -145,10 +162,21 @@ async def _introspect_token(self, token: str) -> IntrospectionToken: ) resp_body = await make_deferred_yieldable(readBody(response)) - # TODO: Let's not worry about 5xx errors & co. for now and just try - # decoding that as JSON. We should also do some validation of the - # response + + if response.code < 200 or response.code >= 300: + raise HttpResponseException( + response.code, + response.phrase.decode("ascii", errors="replace"), + resp_body, + ) + resp = json_decoder.decode(resp_body.decode("utf-8")) + + if not isinstance(resp, dict): + raise ValueError( + "The introspection endpoint returned an invalid JSON response." + ) + return IntrospectionToken(**resp) async def is_server_admin(self, requester: Requester) -> bool: @@ -196,7 +224,11 @@ async def get_user_by_access_token( scope=["urn:synapse:admin:*"], ) - introspection_result = await self._introspect_token(token) + try: + introspection_result = await self._introspect_token(token) + except Exception: + logger.exception("Failed to introspect token") + raise SynapseError(503, "Unable to introspect the access token") logger.info(f"Introspection result: {introspection_result!r}") diff --git a/tests/handlers/test_oauth_delegation.py b/tests/handlers/test_oauth_delegation.py index b79c43a42472..16ce2c069d60 100644 --- a/tests/handlers/test_oauth_delegation.py +++ b/tests/handlers/test_oauth_delegation.py @@ -30,6 +30,7 @@ Codes, InvalidClientTokenError, OAuthInsufficientScopeError, + SynapseError, ) from synapse.rest import admin from synapse.rest.client import account, devices, keys, login, logout, register @@ -405,6 +406,40 @@ def test_active_guest_allowed(self) -> None: ) self.assertEqual(requester.device_id, DEVICE) + def test_unavailable_introspection_endpoint(self) -> None: + """The handler should return an internal server error.""" + request = Mock(args={}) + request.args[b"access_token"] = [b"mockAccessToken"] + request.requestHeaders.getRawHeaders = mock_getRawHeaders() + + # The introspection endpoint is returning an error. + self.http_client.request = simple_async_mock( + return_value=FakeResponse(code=500, body=b"Internal Server Error") + ) + error = self.get_failure(self.auth.get_user_by_req(request), SynapseError) + self.assertEqual(error.value.code, 503) + + # The introspection endpoint request fails. + self.http_client.request = simple_async_mock(raises=Exception()) + error = self.get_failure(self.auth.get_user_by_req(request), SynapseError) + self.assertEqual(error.value.code, 503) + + # The introspection endpoint does not return a JSON object. + self.http_client.request = simple_async_mock( + return_value=FakeResponse.json( + code=200, payload=["this is an array", "not an object"] + ) + ) + error = self.get_failure(self.auth.get_user_by_req(request), SynapseError) + self.assertEqual(error.value.code, 503) + + # The introspection endpoint does not return valid JSON. + self.http_client.request = simple_async_mock( + return_value=FakeResponse(code=200, body=b"this is not valid JSON") + ) + error = self.get_failure(self.auth.get_user_by_req(request), SynapseError) + self.assertEqual(error.value.code, 503) + def make_device_keys(self, user_id: str, device_id: str) -> JsonDict: # We only generate a master key to simplify the test. master_signing_key = generate_signing_key(device_id) diff --git a/tests/test_utils/__init__.py b/tests/test_utils/__init__.py index e5dae670a70e..c8cc841d9540 100644 --- a/tests/test_utils/__init__.py +++ b/tests/test_utils/__init__.py @@ -33,7 +33,7 @@ from twisted.web.http_headers import Headers from twisted.web.iweb import IResponse -from synapse.types import JsonDict +from synapse.types import JsonSerializable if TYPE_CHECKING: from sys import UnraisableHookArgs @@ -145,7 +145,7 @@ def deliverBody(self, protocol: IProtocol) -> None: protocol.connectionLost(Failure(ResponseDone())) @classmethod - def json(cls, *, code: int = 200, payload: JsonDict) -> "FakeResponse": + def json(cls, *, code: int = 200, payload: JsonSerializable) -> "FakeResponse": headers = Headers({"Content-Type": ["application/json"]}) body = json.dumps(payload).encode("utf-8") return cls(code=code, body=body, headers=headers) From 98afc57d59df118a13f894fc66f206bc7409e14a Mon Sep 17 00:00:00 2001 From: Quentin Gliech Date: Mon, 22 May 2023 17:17:49 +0200 Subject: [PATCH 22/75] Make OIDC scope constants --- synapse/api/auth/msc3861_delegated.py | 25 +++++++++++++++---------- 1 file changed, 15 insertions(+), 10 deletions(-) diff --git a/synapse/api/auth/msc3861_delegated.py b/synapse/api/auth/msc3861_delegated.py index 82c66691dae1..5b0e678c0f59 100644 --- a/synapse/api/auth/msc3861_delegated.py +++ b/synapse/api/auth/msc3861_delegated.py @@ -44,6 +44,15 @@ logger = logging.getLogger(__name__) +# Scope as defined by MSC2967 +# https://github.com/matrix-org/matrix-spec-proposals/pull/2967 +SCOPE_MATRIX_API = "urn:matrix:org.matrix.msc2967.client:api:*" +SCOPE_MATRIX_GUEST = "urn:matrix:org.matrix.msc2967.client:api:guest" +SCOPE_MATRIX_DEVICE_PREFIX = "urn:matrix:org.matrix.msc2967.client:device:" + +# Scope which allows access to the Synapse admin API +SCOPE_SYNAPSE_ADMIN = "urn:synapse:admin:*" + def scope_to_list(scope: str) -> List[str]: """Convert a scope string to a list of scope tokens""" @@ -197,9 +206,7 @@ async def get_user_by_req( requester = await self.get_user_by_access_token(access_token, allow_expired) if not allow_guest and requester.is_guest: - raise OAuthInsufficientScopeError( - ["urn:matrix:org.matrix.msc2967.client:api:*"] - ) + raise OAuthInsufficientScopeError([SCOPE_MATRIX_API]) request.requester = requester @@ -241,9 +248,9 @@ async def get_user_by_access_token( scope: List[str] = scope_to_list(introspection_result.get("scope", "")) # Determine type of user based on presence of particular scopes - has_admin_scope = "urn:synapse:admin:*" in scope - has_user_scope = "urn:matrix:org.matrix.msc2967.client:api:*" in scope - has_guest_scope = "urn:matrix:org.matrix.msc2967.client:api:guest" in scope + has_admin_scope = SCOPE_SYNAPSE_ADMIN in scope + has_user_scope = SCOPE_MATRIX_API in scope + has_guest_scope = SCOPE_MATRIX_GUEST in scope is_user = has_user_scope or has_admin_scope is_guest = has_guest_scope and not is_user @@ -299,10 +306,8 @@ async def get_user_by_access_token( # Find device_id in scope device_id = None for tok in scope: - if tok.startswith("urn:matrix:org.matrix.msc2967.client:device:"): - parts = tok.split(":") - if len(parts) == 5: - device_id = parts[4] + if tok.startswith(SCOPE_MATRIX_DEVICE_PREFIX): + device_id = tok[len(SCOPE_MATRIX_DEVICE_PREFIX) :] if device_id: # Create the device on the fly if it does not exist From f739bde962daa9bc425c8343f35993ae889dbc67 Mon Sep 17 00:00:00 2001 From: Quentin Gliech Date: Tue, 23 May 2023 16:59:53 +0200 Subject: [PATCH 23/75] Reject tokens with multiple device scopes --- synapse/api/auth/msc3861_delegated.py | 30 ++++++++++++++++++++----- tests/handlers/test_oauth_delegation.py | 29 +++++++++++++++++++++++- 2 files changed, 52 insertions(+), 7 deletions(-) diff --git a/synapse/api/auth/msc3861_delegated.py b/synapse/api/auth/msc3861_delegated.py index 5b0e678c0f59..e4b16c0b5c87 100644 --- a/synapse/api/auth/msc3861_delegated.py +++ b/synapse/api/auth/msc3861_delegated.py @@ -303,13 +303,31 @@ async def get_user_by_access_token( else: user_id = UserID.from_string(user_id_str) - # Find device_id in scope - device_id = None - for tok in scope: - if tok.startswith(SCOPE_MATRIX_DEVICE_PREFIX): - device_id = tok[len(SCOPE_MATRIX_DEVICE_PREFIX) :] + # Find device_ids in scope + # We only allow a single device_id in the scope, so we find them all in the + # scope list, and raise if there are more than one. The OIDC server should be + # the one enforcing valid scopes, so we raise a 500 if we find an invalid scope. + device_ids = [ + tok[len(SCOPE_MATRIX_DEVICE_PREFIX) :] + for tok in scope + if tok.startswith(SCOPE_MATRIX_DEVICE_PREFIX) + ] + + if len(device_ids) > 1: + raise AuthError( + 500, + "Multiple device IDs in scope", + ) + + device_id = device_ids[0] if device_ids else None + if device_id is not None: + # Sanity check the device_id + if len(device_id) > 255 or len(device_id) < 1: + raise AuthError( + 500, + "Invalid device ID in scope", + ) - if device_id: # Create the device on the fly if it does not exist try: await self.store.get_device( diff --git a/tests/handlers/test_oauth_delegation.py b/tests/handlers/test_oauth_delegation.py index 16ce2c069d60..064153551263 100644 --- a/tests/handlers/test_oauth_delegation.py +++ b/tests/handlers/test_oauth_delegation.py @@ -27,6 +27,7 @@ from twisted.test.proto_helpers import MemoryReactor from synapse.api.errors import ( + AuthError, Codes, InvalidClientTokenError, OAuthInsufficientScopeError, @@ -68,8 +69,9 @@ SYNAPSE_ADMIN_SCOPE = "urn:synapse:admin:*" MATRIX_USER_SCOPE = "urn:matrix:org.matrix.msc2967.client:api:*" MATRIX_GUEST_SCOPE = "urn:matrix:org.matrix.msc2967.client:api:guest" +MATRIX_DEVICE_SCOPE_PREFIX = "urn:matrix:org.matrix.msc2967.client:device:" DEVICE = "AABBCCDD" -MATRIX_DEVICE_SCOPE = "urn:matrix:org.matrix.msc2967.client:device:" + DEVICE +MATRIX_DEVICE_SCOPE = MATRIX_DEVICE_SCOPE_PREFIX + DEVICE SUBJECT = "abc-def-ghi" USERNAME = "test-user" USER_ID = "@" + USERNAME + ":" + SERVER_NAME @@ -344,6 +346,31 @@ def test_active_user_with_device(self) -> None: ) self.assertEqual(requester.device_id, DEVICE) + def test_multiple_devices(self) -> None: + """The handler should raise an error if multiple devices are found in the scope.""" + + self.http_client.request = simple_async_mock( + return_value=FakeResponse.json( + code=200, + payload={ + "active": True, + "sub": SUBJECT, + "scope": " ".join( + [ + MATRIX_USER_SCOPE, + f"{MATRIX_DEVICE_SCOPE_PREFIX}AABBCC", + f"{MATRIX_DEVICE_SCOPE_PREFIX}DDEEFF", + ] + ), + "username": USERNAME, + }, + ) + ) + request = Mock(args={}) + request.args[b"access_token"] = [b"mockAccessToken"] + request.requestHeaders.getRawHeaders = mock_getRawHeaders() + self.get_failure(self.auth.get_user_by_req(request), AuthError) + def test_active_guest_not_allowed(self) -> None: """The handler should return an insufficient scope error.""" From 32a2f050042531ad4673b42789e833e9cd307740 Mon Sep 17 00:00:00 2001 From: Quentin Gliech Date: Fri, 26 May 2023 14:50:19 +0200 Subject: [PATCH 24/75] Make the config tests spawn the homeserver only when needed --- synapse/config/experimental.py | 40 ++- tests/config/test_oauth_delegation.py | 348 ++++++++++++-------------- 2 files changed, 182 insertions(+), 206 deletions(-) diff --git a/synapse/config/experimental.py b/synapse/config/experimental.py index d4dff22b0bae..1d189b2e26cf 100644 --- a/synapse/config/experimental.py +++ b/synapse/config/experimental.py @@ -69,7 +69,8 @@ def _check_enabled(self, attribute: attr.Attribute, value: bool) -> None: if value and not HAS_AUTHLIB: raise ConfigError( "MSC3861 is enabled but authlib is not installed. " - "Please install authlib to use MSC3861." + "Please install authlib to use MSC3861.", + ("experimental", "msc3861", "enabled"), ) issuer: str = attr.ib(default="", validator=attr.validators.instance_of(str)) @@ -114,7 +115,8 @@ def _check_client_auth_method( if value == ClientAuthMethod.PRIVATE_KEY_JWT and self.jwk is None: raise ConfigError( - "A JWKS must be provided when using the private_key_jwt client auth method" + "A JWKS must be provided when using the private_key_jwt client auth method", + ("experimental", "msc3861", "client_auth_method"), ) if ( @@ -127,7 +129,8 @@ def _check_client_auth_method( and self.client_secret is None ): raise ConfigError( - f"A client secret must be provided when using the {value} client auth method" + f"A client secret must be provided when using the {value} client auth method", + ("experimental", "msc3861", "client_auth_method"), ) account_management_url: Optional[str] = attr.ib( @@ -160,12 +163,14 @@ def check_config_conflicts(self, root: RootConfig) -> None: or root.auth.password_enabled_for_login ): raise ConfigError( - "Password auth cannot be enabled when OAuth delegation is enabled" + "Password auth cannot be enabled when OAuth delegation is enabled", + ("password_config", "enabled"), ) if root.registration.enable_registration: raise ConfigError( - "Registration cannot be enabled when OAuth delegation is enabled" + "Registration cannot be enabled when OAuth delegation is enabled", + ("enable_registration",), ) if ( @@ -183,32 +188,38 @@ def check_config_conflicts(self, root: RootConfig) -> None: if root.captcha.enable_registration_captcha: raise ConfigError( - "CAPTCHA cannot be enabled when OAuth delegation is enabled" + "CAPTCHA cannot be enabled when OAuth delegation is enabled", + ("captcha", "enable_registration_captcha"), ) if root.experimental.msc3882_enabled: raise ConfigError( - "MSC3882 cannot be enabled when OAuth delegation is enabled" + "MSC3882 cannot be enabled when OAuth delegation is enabled", + ("experimental_features", "msc3882_enabled"), ) if root.registration.refresh_token_lifetime: raise ConfigError( - "refresh_token_lifetime cannot be set when OAuth delegation is enabled" + "refresh_token_lifetime cannot be set when OAuth delegation is enabled", + ("refresh_token_lifetime",), ) if root.registration.nonrefreshable_access_token_lifetime: raise ConfigError( - "nonrefreshable_access_token_lifetime cannot be set when OAuth delegation is enabled" + "nonrefreshable_access_token_lifetime cannot be set when OAuth delegation is enabled", + ("nonrefreshable_access_token_lifetime",), ) if root.registration.session_lifetime: raise ConfigError( - "session_lifetime cannot be set when OAuth delegation is enabled" + "session_lifetime cannot be set when OAuth delegation is enabled", + ("session_lifetime",), ) if not root.experimental.msc3970_enabled: raise ConfigError( - "experimental_features.msc3970_enabled must be 'true' when OAuth delegation is enabled" + "experimental_features.msc3970_enabled must be 'true' when OAuth delegation is enabled", + ("experimental_features", "msc3970_enabled"), ) @@ -373,7 +384,12 @@ def read_config(self, config: JsonDict, **kwargs: Any) -> None: ) # MSC3861: Matrix architecture change to delegate authentication via OIDC - self.msc3861 = MSC3861(**experimental.get("msc3861", {})) + try: + self.msc3861 = MSC3861(**experimental.get("msc3861", {})) + except ValueError as exc: + raise ConfigError( + "Invalid MSC3861 configuration", ("experimental", "msc3861") + ) from exc # MSC3970: Scope transaction IDs to devices self.msc3970_enabled = experimental.get("msc3970_enabled", self.msc3861.enabled) diff --git a/tests/config/test_oauth_delegation.py b/tests/config/test_oauth_delegation.py index 6d294e0144f6..2ead721b00fc 100644 --- a/tests/config/test_oauth_delegation.py +++ b/tests/config/test_oauth_delegation.py @@ -12,15 +12,16 @@ # See the License for the specific language governing permissions and # limitations under the License. -from typing import Any, Dict from unittest.mock import Mock from synapse.config import ConfigError +from synapse.config.homeserver import HomeServerConfig from synapse.module_api import ModuleApi from synapse.types import JsonDict -from tests.server import get_clock -from tests.unittest import HomeserverTestCase, override_config, skip_unless +from tests.server import get_clock, setup_test_homeserver +from tests.unittest import TestCase, skip_unless +from tests.utils import default_config try: import authlib # noqa: F401 @@ -51,45 +52,15 @@ def __init__(self, config: None, api: ModuleApi): ) -def _dict_merge(merge_dict: dict, into_dict: dict) -> None: - """Do a deep merge of two dicts - - Recursively merges `merge_dict` into `into_dict`: - * For keys where both `merge_dict` and `into_dict` have a dict value, the values - are recursively merged - * For all other keys, the values in `into_dict` (if any) are overwritten with - the value from `merge_dict`. - - Args: - merge_dict: dict to merge - into_dict: target dict to be modified - """ - for k, v in merge_dict.items(): - if k not in into_dict: - into_dict[k] = v - continue - - current_val = into_dict[k] - - if isinstance(v, dict) and isinstance(current_val, dict): - _dict_merge(v, current_val) - continue - - # otherwise we just overwrite - into_dict[k] = v - - @skip_unless(HAS_AUTHLIB, "requires authlib") -class MSC3861OAuthDelegation(HomeserverTestCase): +class MSC3861OAuthDelegation(TestCase): """Test that the Homeserver fails to initialize if the config is invalid.""" def setUp(self) -> None: - self.reactor, self.clock = get_clock() - self._hs_args = {"clock": self.clock, "reactor": self.reactor} - - def default_config(self) -> Dict[str, Any]: - default_extra_config = { + self.config_dict: JsonDict = { + **default_config("test"), "public_baseurl": BASE_URL, + "enable_registration": False, "experimental_features": { "msc3861": { "enabled": True, @@ -100,198 +71,187 @@ def default_config(self) -> Dict[str, Any]: } }, } - _dict_merge( - {} if self._extra_config is None else self._extra_config, - default_extra_config, - ) - self._extra_config = default_extra_config - return super().default_config() - @override_config( - { - "enable_registration": False, - } - ) + def parse_config(self) -> HomeServerConfig: + config = HomeServerConfig() + config.parse_config_dict(self.config_dict, "", "") + return config + def test_client_secret_post_works(self) -> None: - self.setup_test_homeserver() + self.config_dict["experimental_features"]["msc3861"].update( + client_auth_method="client_secret_post", + client_secret=CLIENT_SECRET, + ) + + self.parse_config() + + def test_client_secret_post_requires_client_secret(self) -> None: + self.config_dict["experimental_features"]["msc3861"].update( + client_auth_method="client_secret_post", + client_secret=None, + ) + + with self.assertRaises(ConfigError): + self.parse_config() + + def test_client_secret_basic_works(self) -> None: + self.config_dict["experimental_features"]["msc3861"].update( + client_auth_method="client_secret_basic", + client_secret=CLIENT_SECRET, + ) + + self.parse_config() + + def test_client_secret_basic_requires_client_secret(self) -> None: + self.config_dict["experimental_features"]["msc3861"].update( + client_auth_method="client_secret_basic", + client_secret=None, + ) + + with self.assertRaises(ConfigError): + self.parse_config() + + def test_client_secret_jwt_works(self) -> None: + self.config_dict["experimental_features"]["msc3861"].update( + client_auth_method="client_secret_jwt", + client_secret=CLIENT_SECRET, + ) + + self.parse_config() + + def test_client_secret_jwt_requires_client_secret(self) -> None: + self.config_dict["experimental_features"]["msc3861"].update( + client_auth_method="client_secret_jwt", + client_secret=None, + ) + + with self.assertRaises(ConfigError): + self.parse_config() - @override_config( - { - "enable_registration": False, - "experimental_features": { - "msc3861": { - "client_auth_method": "invalid", - } - }, - } - ) def test_invalid_client_auth_method(self) -> None: - with self.assertRaises(ValueError): - self.setup_test_homeserver() + self.config_dict["experimental_features"]["msc3861"].update( + client_auth_method="invalid", + ) - @override_config( - { - "enable_registration": False, - "experimental_features": { - "msc3861": { - "client_auth_method": "private_key_jwt", - } - }, - } - ) - def test_invalid_private_key_jwt(self) -> None: with self.assertRaises(ConfigError): - self.setup_test_homeserver() + self.parse_config() + + def test_private_key_jwt_requires_jwk(self) -> None: + self.config_dict["experimental_features"]["msc3861"].update( + client_auth_method="private_key_jwt", + ) + + with self.assertRaises(ConfigError): + self.parse_config() - @override_config( - { - "enable_registration": False, - "experimental_features": { - "msc3861": { - "client_auth_method": "private_key_jwt", - "jwk": { - "p": "-frVdP_tZ-J_nIR6HNMDq1N7aunwm51nAqNnhqIyuA8ikx7LlQED1tt2LD3YEvYyW8nxE2V95HlCRZXQPMiRJBFOsbmYkzl2t-MpavTaObB_fct_JqcRtdXddg4-_ihdjRDwUOreq_dpWh6MIKsC3UyekfkHmeEJg5YpOTL15j8", - "kty": "RSA", - "q": "oFw-Enr_YozQB1ab-kawn4jY3yHi8B1nSmYT0s8oTCflrmps5BFJfCkHL5ij3iY15z0o2m0N-jjB1oSJ98O4RayEEYNQlHnTNTl0kRIWzpoqblHUIxVcahIpP_xTovBJzwi8XXoLGqHOOMA-r40LSyVgP2Ut8D9qBwV6_UfT0LU", - "d": "WFkDPYo4b4LIS64D_QtQfGGuAObPvc3HFfp9VZXyq3SJR58XZRHE0jqtlEMNHhOTgbMYS3w8nxPQ_qVzY-5hs4fIanwvB64mAoOGl0qMHO65DTD_WsGFwzYClJPBVniavkLE2Hmpu8IGe6lGliN8vREC6_4t69liY-XcN_ECboVtC2behKkLOEASOIMuS7YcKAhTJFJwkl1dqDlliEn5A4u4xy7nuWQz3juB1OFdKlwGA5dfhDNglhoLIwNnkLsUPPFO-WB5ZNEW35xxHOToxj4bShvDuanVA6mJPtTKjz0XibjB36bj_nF_j7EtbE2PdGJ2KevAVgElR4lqS4ISgQ", - "e": "AQAB", - "kid": "test", - "qi": "cPfNk8l8W5exVNNea4d7QZZ8Qr8LgHghypYAxz8PQh1fNa8Ya1SNUDVzC2iHHhszxxA0vB9C7jGze8dBrvnzWYF1XvQcqNIVVgHhD57R1Nm3dj2NoHIKe0Cu4bCUtP8xnZQUN4KX7y4IIcgRcBWG1hT6DEYZ4BxqicnBXXNXAUI", - "dp": "dKlMHvslV1sMBQaKWpNb3gPq0B13TZhqr3-E2_8sPlvJ3fD8P4CmwwnOn50JDuhY3h9jY5L06sBwXjspYISVv8hX-ndMLkEeF3lrJeA5S70D8rgakfZcPIkffm3tlf1Ok3v5OzoxSv3-67Df4osMniyYwDUBCB5Oq1tTx77xpU8", - "dq": "S4ooU1xNYYcjl9FcuJEEMqKsRrAXzzSKq6laPTwIp5dDwt2vXeAm1a4eDHXC-6rUSZGt5PbqVqzV4s-cjnJMI8YYkIdjNg4NSE1Ac_YpeDl3M3Colb5CQlU7yUB7xY2bt0NOOFp9UJZYJrOo09mFMGjy5eorsbitoZEbVqS3SuE", - "n": "nJbYKqFwnURKimaviyDFrNLD3gaKR1JW343Qem25VeZxoMq1665RHVoO8n1oBm4ClZdjIiZiVdpyqzD5-Ow12YQgQEf1ZHP3CCcOQQhU57Rh5XvScTe5IxYVkEW32IW2mp_CJ6WfjYpfeL4azarVk8H3Vr59d1rSrKTVVinVdZer9YLQyC_rWAQNtHafPBMrf6RYiNGV9EiYn72wFIXlLlBYQ9Fx7bfe1PaL6qrQSsZP3_rSpuvVdLh1lqGeCLR0pyclA9uo5m2tMyCXuuGQLbA_QJm5xEc7zd-WFdux2eXF045oxnSZ_kgQt-pdN7AxGWOVvwoTf9am6mSkEdv6iw", - }, - } - }, - } - ) def test_private_key_jwt_works(self) -> None: - self.setup_test_homeserver() + self.config_dict["experimental_features"]["msc3861"].update( + client_auth_method="private_key_jwt", + jwk={ + "p": "-frVdP_tZ-J_nIR6HNMDq1N7aunwm51nAqNnhqIyuA8ikx7LlQED1tt2LD3YEvYyW8nxE2V95HlCRZXQPMiRJBFOsbmYkzl2t-MpavTaObB_fct_JqcRtdXddg4-_ihdjRDwUOreq_dpWh6MIKsC3UyekfkHmeEJg5YpOTL15j8", + "kty": "RSA", + "q": "oFw-Enr_YozQB1ab-kawn4jY3yHi8B1nSmYT0s8oTCflrmps5BFJfCkHL5ij3iY15z0o2m0N-jjB1oSJ98O4RayEEYNQlHnTNTl0kRIWzpoqblHUIxVcahIpP_xTovBJzwi8XXoLGqHOOMA-r40LSyVgP2Ut8D9qBwV6_UfT0LU", + "d": "WFkDPYo4b4LIS64D_QtQfGGuAObPvc3HFfp9VZXyq3SJR58XZRHE0jqtlEMNHhOTgbMYS3w8nxPQ_qVzY-5hs4fIanwvB64mAoOGl0qMHO65DTD_WsGFwzYClJPBVniavkLE2Hmpu8IGe6lGliN8vREC6_4t69liY-XcN_ECboVtC2behKkLOEASOIMuS7YcKAhTJFJwkl1dqDlliEn5A4u4xy7nuWQz3juB1OFdKlwGA5dfhDNglhoLIwNnkLsUPPFO-WB5ZNEW35xxHOToxj4bShvDuanVA6mJPtTKjz0XibjB36bj_nF_j7EtbE2PdGJ2KevAVgElR4lqS4ISgQ", + "e": "AQAB", + "kid": "test", + "qi": "cPfNk8l8W5exVNNea4d7QZZ8Qr8LgHghypYAxz8PQh1fNa8Ya1SNUDVzC2iHHhszxxA0vB9C7jGze8dBrvnzWYF1XvQcqNIVVgHhD57R1Nm3dj2NoHIKe0Cu4bCUtP8xnZQUN4KX7y4IIcgRcBWG1hT6DEYZ4BxqicnBXXNXAUI", + "dp": "dKlMHvslV1sMBQaKWpNb3gPq0B13TZhqr3-E2_8sPlvJ3fD8P4CmwwnOn50JDuhY3h9jY5L06sBwXjspYISVv8hX-ndMLkEeF3lrJeA5S70D8rgakfZcPIkffm3tlf1Ok3v5OzoxSv3-67Df4osMniyYwDUBCB5Oq1tTx77xpU8", + "dq": "S4ooU1xNYYcjl9FcuJEEMqKsRrAXzzSKq6laPTwIp5dDwt2vXeAm1a4eDHXC-6rUSZGt5PbqVqzV4s-cjnJMI8YYkIdjNg4NSE1Ac_YpeDl3M3Colb5CQlU7yUB7xY2bt0NOOFp9UJZYJrOo09mFMGjy5eorsbitoZEbVqS3SuE", + "n": "nJbYKqFwnURKimaviyDFrNLD3gaKR1JW343Qem25VeZxoMq1665RHVoO8n1oBm4ClZdjIiZiVdpyqzD5-Ow12YQgQEf1ZHP3CCcOQQhU57Rh5XvScTe5IxYVkEW32IW2mp_CJ6WfjYpfeL4azarVk8H3Vr59d1rSrKTVVinVdZer9YLQyC_rWAQNtHafPBMrf6RYiNGV9EiYn72wFIXlLlBYQ9Fx7bfe1PaL6qrQSsZP3_rSpuvVdLh1lqGeCLR0pyclA9uo5m2tMyCXuuGQLbA_QJm5xEc7zd-WFdux2eXF045oxnSZ_kgQt-pdN7AxGWOVvwoTf9am6mSkEdv6iw", + }, + ) + self.parse_config() def test_registration_cannot_be_enabled(self) -> None: + self.config_dict["enable_registration"] = True with self.assertRaises(ConfigError): - self.setup_test_homeserver() + self.parse_config() - @override_config( - { - "enable_registration": False, - "password_config": { - "enabled": True, - }, - } - ) def test_password_config_cannot_be_enabled(self) -> None: + self.config_dict["password_config"] = {"enabled": True} with self.assertRaises(ConfigError): - self.setup_test_homeserver() + self.parse_config() - @override_config( - { - "enable_registration": False, - "oidc_providers": [ - { - "idp_id": "microsoft", - "idp_name": "Microsoft", - "issuer": "https://login.microsoftonline.com//v2.0", - "client_id": "", - "client_secret": "", - "scopes": ["openid", "profile"], - "authorization_endpoint": "https://login.microsoftonline.com//oauth2/v2.0/authorize", - "token_endpoint": "https://login.microsoftonline.com//oauth2/v2.0/token", - "userinfo_endpoint": "https://graph.microsoft.com/oidc/userinfo", - } - ], - } - ) def test_oidc_sso_cannot_be_enabled(self) -> None: + self.config_dict["oidc_providers"] = [ + { + "idp_id": "microsoft", + "idp_name": "Microsoft", + "issuer": "https://login.microsoftonline.com//v2.0", + "client_id": "", + "client_secret": "", + "scopes": ["openid", "profile"], + "authorization_endpoint": "https://login.microsoftonline.com//oauth2/v2.0/authorize", + "token_endpoint": "https://login.microsoftonline.com//oauth2/v2.0/token", + "userinfo_endpoint": "https://graph.microsoft.com/oidc/userinfo", + } + ] + with self.assertRaises(ConfigError): - self.setup_test_homeserver() + self.parse_config() - @override_config( - { - "enable_registration": False, - "cas_config": { - "enabled": True, - "server_url": "https://cas-server.com", - "displayname_attribute": "name", - "required_attributes": {"userGroup": "staff", "department": "None"}, - }, - } - ) def test_cas_sso_cannot_be_enabled(self) -> None: + self.config_dict["cas_config"] = { + "enabled": True, + "server_url": "https://cas-server.com", + "displayname_attribute": "name", + "required_attributes": {"userGroup": "staff", "department": "None"}, + } + with self.assertRaises(ConfigError): - self.setup_test_homeserver() + self.parse_config() - @override_config( - { - "enable_registration": False, - "modules": [ - { - "module": f"{__name__}.{CustomAuthModule.__qualname__}", - "config": {}, - } - ], - } - ) def test_auth_providers_cannot_be_enabled(self) -> None: + self.config_dict["modules"] = [ + { + "module": f"{__name__}.{CustomAuthModule.__qualname__}", + "config": {}, + } + ] + + # This requires actually setting up an HS, as the module will be run on setup, + # which should raise as the module tries to register an auth provider + config = self.parse_config() + reactor, clock = get_clock() with self.assertRaises(ConfigError): - self.setup_test_homeserver() + setup_test_homeserver( + self.addCleanup, reactor=reactor, clock=clock, config=config + ) - @override_config( - { - "enable_registration": False, - "jwt_config": { - "enabled": True, - "secret": "my-secret-token", - "algorithm": "HS256", - }, - } - ) def test_jwt_auth_cannot_be_enabled(self) -> None: + self.config_dict["jwt_config"] = { + "enabled": True, + "secret": "my-secret-token", + "algorithm": "HS256", + } + with self.assertRaises(ConfigError): - self.setup_test_homeserver() + self.parse_config() - @override_config( - { - "enable_registration": False, - "experimental_features": { - "msc3882_enabled": True, - }, - } - ) def test_msc3882_auth_cannot_be_enabled(self) -> None: + self.config_dict["experimental_features"]["msc3882_enabled"] = True with self.assertRaises(ConfigError): - self.setup_test_homeserver() + self.parse_config() - @override_config( - { - "enable_registration": False, - "recaptcha_public_key": "test", - "recaptcha_private_key": "test", - "enable_registration_captcha": True, - } - ) def test_captcha_cannot_be_enabled(self) -> None: + self.config_dict.update( + enable_registration_captcha=True, + recaptcha_public_key="test", + recaptcha_private_key="test", + ) with self.assertRaises(ConfigError): - self.setup_test_homeserver() + self.parse_config() - @override_config( - { - "enable_registration": False, - "refresh_token_lifetime": "24h", - "refreshable_access_token_lifetime": "10m", - "nonrefreshable_access_token_lifetime": "24h", - } - ) def test_refreshable_tokens_cannot_be_enabled(self) -> None: + self.config_dict.update( + refresh_token_lifetime="24h", + refreshable_access_token_lifetime="10m", + nonrefreshable_access_token_lifetime="24h", + ) with self.assertRaises(ConfigError): - self.setup_test_homeserver() + self.parse_config() - @override_config( - { - "enable_registration": False, - "session_lifetime": "24h", - } - ) def test_session_lifetime_cannot_be_set(self) -> None: + self.config_dict["session_lifetime"] = "24h" with self.assertRaises(ConfigError): - self.setup_test_homeserver() + self.parse_config() From ceb3dd77db0d3ce992d40175c3f53f6b6ddfa168 Mon Sep 17 00:00:00 2001 From: Quentin Gliech Date: Fri, 26 May 2023 15:16:34 +0200 Subject: [PATCH 25/75] Enforce that an admin token also has the basic Matrix API scope --- synapse/api/auth/msc3861_delegated.py | 7 ++----- tests/handlers/test_oauth_delegation.py | 26 ++++++++++++++++++++++++- 2 files changed, 27 insertions(+), 6 deletions(-) diff --git a/synapse/api/auth/msc3861_delegated.py b/synapse/api/auth/msc3861_delegated.py index e4b16c0b5c87..31c1de0119a2 100644 --- a/synapse/api/auth/msc3861_delegated.py +++ b/synapse/api/auth/msc3861_delegated.py @@ -248,13 +248,10 @@ async def get_user_by_access_token( scope: List[str] = scope_to_list(introspection_result.get("scope", "")) # Determine type of user based on presence of particular scopes - has_admin_scope = SCOPE_SYNAPSE_ADMIN in scope has_user_scope = SCOPE_MATRIX_API in scope has_guest_scope = SCOPE_MATRIX_GUEST in scope - is_user = has_user_scope or has_admin_scope - is_guest = has_guest_scope and not is_user - if not is_user and not is_guest: + if not has_user_scope and not has_guest_scope: raise InvalidClientTokenError("No scope in token granting user rights") # Match via the sub claim @@ -351,5 +348,5 @@ async def get_user_by_access_token( user_id=user_id, device_id=device_id, scope=scope, - is_guest=is_guest, + is_guest=(has_guest_scope and not has_user_scope), ) diff --git a/tests/handlers/test_oauth_delegation.py b/tests/handlers/test_oauth_delegation.py index 064153551263..6309d7b36e8a 100644 --- a/tests/handlers/test_oauth_delegation.py +++ b/tests/handlers/test_oauth_delegation.py @@ -224,6 +224,30 @@ def test_active_no_user_scope(self) -> None: ) self._assertParams() + def test_active_admin_not_user(self) -> None: + """The handler should raise when the scope has admin right but not user.""" + + self.http_client.request = simple_async_mock( + return_value=FakeResponse.json( + code=200, + payload={ + "active": True, + "sub": SUBJECT, + "scope": " ".join([SYNAPSE_ADMIN_SCOPE]), + "username": USERNAME, + }, + ) + ) + request = Mock(args={}) + request.args[b"access_token"] = [b"mockAccessToken"] + request.requestHeaders.getRawHeaders = mock_getRawHeaders() + self.get_failure(self.auth.get_user_by_req(request), InvalidClientTokenError) + self.http_client.get_json.assert_called_once_with(WELL_KNOWN) + self.http_client.request.assert_called_once_with( + method="POST", uri=INTROSPECTION_ENDPOINT, data=ANY, headers=ANY + ) + self._assertParams() + def test_active_admin(self) -> None: """The handler should return a requester with admin rights.""" @@ -233,7 +257,7 @@ def test_active_admin(self) -> None: payload={ "active": True, "sub": SUBJECT, - "scope": " ".join([SYNAPSE_ADMIN_SCOPE]), + "scope": " ".join([SYNAPSE_ADMIN_SCOPE, MATRIX_USER_SCOPE]), "username": USERNAME, }, ) From c01343de43b86eb4a6c055547369d07c198a435f Mon Sep 17 00:00:00 2001 From: Patrick Cloke Date: Wed, 31 May 2023 07:18:29 -0400 Subject: [PATCH 26/75] Add stricter mypy options (#15694) Enable warn_unused_configs, strict_concatenate, disallow_subclassing_any, and disallow_incomplete_defs. --- changelog.d/15694.misc | 1 + mypy.ini | 23 ++++++++++++++++++++--- synapse/api/auth/msc3861_delegated.py | 2 +- synapse/federation/federation_server.py | 4 ++-- synapse/handlers/oidc.py | 2 +- synapse/handlers/pagination.py | 4 ++-- synapse/http/server.py | 14 +++++++------- synapse/util/__init__.py | 4 ++-- synapse/util/async_helpers.py | 2 +- synapse/util/caches/lrucache.py | 6 ++---- tests/server.py | 2 +- 11 files changed, 40 insertions(+), 24 deletions(-) create mode 100644 changelog.d/15694.misc diff --git a/changelog.d/15694.misc b/changelog.d/15694.misc new file mode 100644 index 000000000000..93ceaeafc9b9 --- /dev/null +++ b/changelog.d/15694.misc @@ -0,0 +1 @@ +Improve type hints. diff --git a/mypy.ini b/mypy.ini index a7ec66196d41..56cd1d560ea8 100644 --- a/mypy.ini +++ b/mypy.ini @@ -2,17 +2,29 @@ namespace_packages = True plugins = pydantic.mypy, mypy_zope:plugin, scripts-dev/mypy_synapse_plugin.py follow_imports = normal -check_untyped_defs = True show_error_codes = True show_traceback = True mypy_path = stubs warn_unreachable = True -warn_unused_ignores = True local_partial_types = True no_implicit_optional = True + +# Strict checks, see mypy --help +warn_unused_configs = True +# disallow_any_generics = True +disallow_subclassing_any = True +# disallow_untyped_calls = True disallow_untyped_defs = True -strict_equality = True +disallow_incomplete_defs = True +# check_untyped_defs = True +# disallow_untyped_decorators = True warn_redundant_casts = True +warn_unused_ignores = True +# warn_return_any = True +# no_implicit_reexport = True +strict_equality = True +strict_concatenate = True + # Run mypy type checking with the minimum supported Python version to catch new usage # that isn't backwards-compatible (types, overloads, etc). python_version = 3.8 @@ -31,6 +43,7 @@ warn_unused_ignores = False [mypy-synapse.util.caches.treecache] disallow_untyped_defs = False +disallow_incomplete_defs = False ;; Dependencies without annotations ;; Before ignoring a module, check to see if type stubs are available. @@ -40,6 +53,7 @@ disallow_untyped_defs = False ;; which we can pull in as a dev dependency by adding to `pyproject.toml`'s ;; `[tool.poetry.dev-dependencies]` list. +# https://github.com/lepture/authlib/issues/460 [mypy-authlib.*] ignore_missing_imports = True @@ -49,9 +63,11 @@ ignore_missing_imports = True [mypy-lxml] ignore_missing_imports = True +# https://github.com/msgpack/msgpack-python/issues/448 [mypy-msgpack] ignore_missing_imports = True +# https://github.com/wolever/parameterized/issues/143 [mypy-parameterized.*] ignore_missing_imports = True @@ -73,6 +89,7 @@ ignore_missing_imports = True [mypy-srvlookup.*] ignore_missing_imports = True +# https://github.com/twisted/treq/pull/366 [mypy-treq.*] ignore_missing_imports = True diff --git a/synapse/api/auth/msc3861_delegated.py b/synapse/api/auth/msc3861_delegated.py index 31c1de0119a2..bd4fc9c0ee3d 100644 --- a/synapse/api/auth/msc3861_delegated.py +++ b/synapse/api/auth/msc3861_delegated.py @@ -59,7 +59,7 @@ def scope_to_list(scope: str) -> List[str]: return scope.strip().split(" ") -class PrivateKeyJWTWithKid(PrivateKeyJWT): +class PrivateKeyJWTWithKid(PrivateKeyJWT): # type: ignore[misc] """An implementation of the private_key_jwt client auth method that includes a kid header. This is needed because some providers (Keycloak) require the kid header to figure diff --git a/synapse/federation/federation_server.py b/synapse/federation/federation_server.py index e17cb840de99..149351dda025 100644 --- a/synapse/federation/federation_server.py +++ b/synapse/federation/federation_server.py @@ -515,7 +515,7 @@ async def process_pdu(pdu: EventBase) -> JsonDict: logger.error( "Failed to handle PDU %s", event_id, - exc_info=(f.type, f.value, f.getTracebackObject()), # type: ignore + exc_info=(f.type, f.value, f.getTracebackObject()), ) return {"error": str(e)} @@ -1247,7 +1247,7 @@ async def _process_incoming_pdus_in_room_inner( logger.error( "Failed to handle PDU %s", event.event_id, - exc_info=(f.type, f.value, f.getTracebackObject()), # type: ignore + exc_info=(f.type, f.value, f.getTracebackObject()), ) received_ts = await self.store.remove_received_event_from_staging( diff --git a/synapse/handlers/oidc.py b/synapse/handlers/oidc.py index e7e0b5e049b3..24b68e03012d 100644 --- a/synapse/handlers/oidc.py +++ b/synapse/handlers/oidc.py @@ -1354,7 +1354,7 @@ async def handle_backchannel_logout( finish_request(request) -class LogoutToken(JWTClaims): +class LogoutToken(JWTClaims): # type: ignore[misc] """ Holds and verify claims of a logout token, as per https://openid.net/specs/openid-connect-backchannel-1_0.html#LogoutToken diff --git a/synapse/handlers/pagination.py b/synapse/handlers/pagination.py index 63b35c8d621f..d5257acb7da3 100644 --- a/synapse/handlers/pagination.py +++ b/synapse/handlers/pagination.py @@ -360,7 +360,7 @@ async def _purge_history( except Exception: f = Failure() logger.error( - "[purge] failed", exc_info=(f.type, f.value, f.getTracebackObject()) # type: ignore + "[purge] failed", exc_info=(f.type, f.value, f.getTracebackObject()) ) self._purges_by_id[purge_id].status = PurgeStatus.STATUS_FAILED self._purges_by_id[purge_id].error = f.getErrorMessage() @@ -689,7 +689,7 @@ async def _shutdown_and_purge_room( f = Failure() logger.error( "failed", - exc_info=(f.type, f.value, f.getTracebackObject()), # type: ignore + exc_info=(f.type, f.value, f.getTracebackObject()), ) self._delete_by_id[delete_id].status = DeleteStatus.STATUS_FAILED self._delete_by_id[delete_id].error = f.getErrorMessage() diff --git a/synapse/http/server.py b/synapse/http/server.py index 04768c6a237f..933172c87327 100644 --- a/synapse/http/server.py +++ b/synapse/http/server.py @@ -108,7 +108,7 @@ def return_json_error( if f.check(SynapseError): # mypy doesn't understand that f.check asserts the type. - exc: SynapseError = f.value # type: ignore + exc: SynapseError = f.value error_code = exc.code error_dict = exc.error_dict(config) if exc.headers is not None: @@ -124,7 +124,7 @@ def return_json_error( "Got cancellation before client disconnection from %r: %r", request.request_metrics.name, request, - exc_info=(f.type, f.value, f.getTracebackObject()), # type: ignore[arg-type] + exc_info=(f.type, f.value, f.getTracebackObject()), ) else: error_code = 500 @@ -134,7 +134,7 @@ def return_json_error( "Failed handle request via %r: %r", request.request_metrics.name, request, - exc_info=(f.type, f.value, f.getTracebackObject()), # type: ignore[arg-type] + exc_info=(f.type, f.value, f.getTracebackObject()), ) # Only respond with an error response if we haven't already started writing, @@ -172,7 +172,7 @@ def return_html_error( """ if f.check(CodeMessageException): # mypy doesn't understand that f.check asserts the type. - cme: CodeMessageException = f.value # type: ignore + cme: CodeMessageException = f.value code = cme.code msg = cme.msg if cme.headers is not None: @@ -189,7 +189,7 @@ def return_html_error( logger.error( "Failed handle request %r", request, - exc_info=(f.type, f.value, f.getTracebackObject()), # type: ignore[arg-type] + exc_info=(f.type, f.value, f.getTracebackObject()), ) elif f.check(CancelledError): code = HTTP_STATUS_REQUEST_CANCELLED @@ -199,7 +199,7 @@ def return_html_error( logger.error( "Got cancellation before client disconnection when handling request %r", request, - exc_info=(f.type, f.value, f.getTracebackObject()), # type: ignore[arg-type] + exc_info=(f.type, f.value, f.getTracebackObject()), ) else: code = HTTPStatus.INTERNAL_SERVER_ERROR @@ -208,7 +208,7 @@ def return_html_error( logger.error( "Failed handle request %r", request, - exc_info=(f.type, f.value, f.getTracebackObject()), # type: ignore[arg-type] + exc_info=(f.type, f.value, f.getTracebackObject()), ) if isinstance(error_template, str): diff --git a/synapse/util/__init__.py b/synapse/util/__init__.py index 9ddd26ccaa2d..7ea0c4c36bcc 100644 --- a/synapse/util/__init__.py +++ b/synapse/util/__init__.py @@ -76,7 +76,7 @@ def unwrapFirstError(failure: Failure) -> Failure: # the subFailure's value, which will do a better job of preserving stacktraces. # (actually, you probably want to use yieldable_gather_results anyway) failure.trap(defer.FirstError) - return failure.value.subFailure # type: ignore[union-attr] # Issue in Twisted's annotations + return failure.value.subFailure P = ParamSpec("P") @@ -178,7 +178,7 @@ def log_failure( """ logger.error( - msg, exc_info=(failure.type, failure.value, failure.getTracebackObject()) # type: ignore[arg-type] + msg, exc_info=(failure.type, failure.value, failure.getTracebackObject()) ) if not consumeErrors: diff --git a/synapse/util/async_helpers.py b/synapse/util/async_helpers.py index 01e3cd46f650..4041e49e71dc 100644 --- a/synapse/util/async_helpers.py +++ b/synapse/util/async_helpers.py @@ -138,7 +138,7 @@ def errback(f: Failure) -> Optional[Failure]: for observer in observers: # This is a little bit of magic to correctly propagate stack # traces when we `await` on one of the observer deferreds. - f.value.__failure__ = f # type: ignore[union-attr] + f.value.__failure__ = f try: observer.errback(f) except Exception as e: diff --git a/synapse/util/caches/lrucache.py b/synapse/util/caches/lrucache.py index 452d5d04c1c0..ed0da17227d3 100644 --- a/synapse/util/caches/lrucache.py +++ b/synapse/util/caches/lrucache.py @@ -93,10 +93,8 @@ def _get_size_of(val: Any, *, recurse: bool = True) -> int: # a general type var, distinct from either KT or VT T = TypeVar("T") -P = TypeVar("P") - -class _TimedListNode(ListNode[P]): +class _TimedListNode(ListNode[T]): """A `ListNode` that tracks last access time.""" __slots__ = ["last_access_ts_secs"] @@ -821,7 +819,7 @@ class AsyncLruCache(Generic[KT, VT]): utilize external cache systems that require await behaviour to be created. """ - def __init__(self, *args, **kwargs): # type: ignore + def __init__(self, *args: Any, **kwargs: Any): self._lru_cache: LruCache[KT, VT] = LruCache(*args, **kwargs) async def get( diff --git a/tests/server.py b/tests/server.py index 7296f0a55281..a12c3e3b9a09 100644 --- a/tests/server.py +++ b/tests/server.py @@ -642,7 +642,7 @@ def runInteraction( pool.runWithConnection = runWithConnection # type: ignore[assignment] pool.runInteraction = runInteraction # type: ignore[assignment] # Replace the thread pool with a threadless 'thread' pool - pool.threadpool = ThreadPool(clock._reactor) # type: ignore[assignment] + pool.threadpool = ThreadPool(clock._reactor) pool.running = True # We've just changed the Databases to run DB transactions on the same From daf3a679089770e00d1b70d8ed2f91ab108b73e3 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Gabriel=20F=C3=A9ron?= Date: Wed, 31 May 2023 15:18:37 +0200 Subject: [PATCH 27/75] Add get_canonical_room_alias to module API (#15450) Co-authored-by: Boxdot --- changelog.d/15450.feature | 1 + synapse/module_api/__init__.py | 27 +++++++++++++++++++++++++++ synapse/storage/controllers/state.py | 2 +- 3 files changed, 29 insertions(+), 1 deletion(-) create mode 100644 changelog.d/15450.feature diff --git a/changelog.d/15450.feature b/changelog.d/15450.feature new file mode 100644 index 000000000000..2102381143ba --- /dev/null +++ b/changelog.d/15450.feature @@ -0,0 +1 @@ +Support resolving a room's [canonical alias](https://spec.matrix.org/v1.7/client-server-api/#mroomcanonical_alias) via the module API. \ No newline at end of file diff --git a/synapse/module_api/__init__.py b/synapse/module_api/__init__.py index 134bd2e62021..a8d6224a4528 100644 --- a/synapse/module_api/__init__.py +++ b/synapse/module_api/__init__.py @@ -122,6 +122,7 @@ JsonMapping, Requester, RoomAlias, + RoomID, StateMap, UserID, UserInfo, @@ -1570,6 +1571,32 @@ async def get_monthly_active_users_by_service( start_timestamp, end_timestamp ) + async def get_canonical_room_alias(self, room_id: RoomID) -> Optional[RoomAlias]: + """ + Retrieve the given room's current canonical alias. + + A room may declare an alias as "canonical", meaning that it is the + preferred alias to use when referring to the room. This function + retrieves that alias from the room's state. + + Added in Synapse v1.86.0. + + Args: + room_id: The Room ID to find the alias of. + + Returns: + None if the room ID does not exist, or if the room exists but has no canonical alias. + Otherwise, the parsed room alias. + """ + room_alias_str = ( + await self._storage_controllers.state.get_canonical_alias_for_room( + room_id.to_string() + ) + ) + if room_alias_str: + return RoomAlias.from_string(room_alias_str) + return None + async def lookup_room_alias(self, room_alias: str) -> Tuple[str, List[str]]: """ Get the room ID associated with a room alias. diff --git a/synapse/storage/controllers/state.py b/synapse/storage/controllers/state.py index 7089b0a1d85d..233df7cce24e 100644 --- a/synapse/storage/controllers/state.py +++ b/synapse/storage/controllers/state.py @@ -485,7 +485,7 @@ async def get_canonical_alias_for_room(self, room_id: str) -> Optional[str]: if not event: return None - return event.content.get("canonical_alias") + return event.content.get("alias") @trace @tag_args From 874378c0523bb82314434f1f0f2c5e1462a34a5b Mon Sep 17 00:00:00 2001 From: Jason Little Date: Wed, 31 May 2023 10:13:31 -0500 Subject: [PATCH 28/75] Docker fully qualified image names (#15689) * Fully qualified docker image names for the main Dockerfile and Complement related. * Fully qualified docker image names for Dockerfiles associated with building Debian release artifacts. This one is harder and is separate from the other commit in case it wasn't correct or was unwanted. I decided to do the expansion on the docker images in the Dockerfile itself, instead of the various source places that build which distribution that is selected, as it would have been more invasive with the scripts breaking up the string for tagging and such. This one is untested. * Changelog * Update docker/Dockerfile-workers * Update docker/complement/Dockerfile --------- Co-authored-by: reivilibre --- .github/workflows/release-artifacts.yml | 1 + changelog.d/15689.misc | 1 + docker/Dockerfile | 6 +++--- docker/Dockerfile-dhvirtualenv | 4 ++-- docker/Dockerfile-workers | 4 ++-- docker/complement/Dockerfile | 5 +++-- docker/editable.Dockerfile | 2 +- scripts-dev/build_debian_packages.py | 2 ++ 8 files changed, 15 insertions(+), 10 deletions(-) create mode 100644 changelog.d/15689.misc diff --git a/.github/workflows/release-artifacts.yml b/.github/workflows/release-artifacts.yml index ebd7d298a9e9..09812004017f 100644 --- a/.github/workflows/release-artifacts.yml +++ b/.github/workflows/release-artifacts.yml @@ -34,6 +34,7 @@ jobs: - id: set-distros run: | # if we're running from a tag, get the full list of distros; otherwise just use debian:sid + # NOTE: inside the actual Dockerfile-dhvirtualenv, the image name is expanded into its full image path dists='["debian:sid"]' if [[ $GITHUB_REF == refs/tags/* ]]; then dists=$(scripts-dev/build_debian_packages.py --show-dists-json) diff --git a/changelog.d/15689.misc b/changelog.d/15689.misc new file mode 100644 index 000000000000..4262cc951507 --- /dev/null +++ b/changelog.d/15689.misc @@ -0,0 +1 @@ +Add fully qualified docker image names to Dockerfiles. diff --git a/docker/Dockerfile b/docker/Dockerfile index 6107dced43f9..12cff84131b7 100644 --- a/docker/Dockerfile +++ b/docker/Dockerfile @@ -27,7 +27,7 @@ ARG PYTHON_VERSION=3.11 ### # We hardcode the use of Debian bullseye here because this could change upstream # and other Dockerfiles used for testing are expecting bullseye. -FROM docker.io/python:${PYTHON_VERSION}-slim-bullseye as requirements +FROM docker.io/library/python:${PYTHON_VERSION}-slim-bullseye as requirements # RUN --mount is specific to buildkit and is documented at # https://github.com/moby/buildkit/blob/master/frontend/dockerfile/docs/syntax.md#build-mounts-run---mount. @@ -87,7 +87,7 @@ RUN if [ -z "$TEST_ONLY_IGNORE_POETRY_LOCKFILE" ]; then \ ### ### Stage 1: builder ### -FROM docker.io/python:${PYTHON_VERSION}-slim-bullseye as builder +FROM docker.io/library/python:${PYTHON_VERSION}-slim-bullseye as builder # install the OS build deps RUN \ @@ -158,7 +158,7 @@ RUN --mount=type=cache,target=/synapse/target,sharing=locked \ ### Stage 2: runtime ### -FROM docker.io/python:${PYTHON_VERSION}-slim-bullseye +FROM docker.io/library/python:${PYTHON_VERSION}-slim-bullseye LABEL org.opencontainers.image.url='https://matrix.org/docs/projects/server/synapse' LABEL org.opencontainers.image.documentation='https://github.com/matrix-org/synapse/blob/master/docker/README.md' diff --git a/docker/Dockerfile-dhvirtualenv b/docker/Dockerfile-dhvirtualenv index 2013732422ce..861129ebc225 100644 --- a/docker/Dockerfile-dhvirtualenv +++ b/docker/Dockerfile-dhvirtualenv @@ -24,7 +24,7 @@ ARG distro="" # https://launchpad.net/~jyrki-pulliainen/+archive/ubuntu/dh-virtualenv, but # it's not obviously easier to use that than to build our own.) -FROM ${distro} as builder +FROM docker.io/library/${distro} as builder RUN apt-get update -qq -o Acquire::Languages=none RUN env DEBIAN_FRONTEND=noninteractive apt-get install \ @@ -55,7 +55,7 @@ RUN cd /dh-virtualenv && DEB_BUILD_OPTIONS=nodoc dpkg-buildpackage -us -uc -b ### ### Stage 1 ### -FROM ${distro} +FROM docker.io/library/${distro} # Get the distro we want to pull from as a dynamic build variable # (We need to define it in each build stage) diff --git a/docker/Dockerfile-workers b/docker/Dockerfile-workers index faf7f2cef8a5..adb9a725e33f 100644 --- a/docker/Dockerfile-workers +++ b/docker/Dockerfile-workers @@ -7,7 +7,7 @@ ARG FROM=matrixdotorg/synapse:$SYNAPSE_VERSION # target image. For repeated rebuilds, this is much faster than apt installing # each time. -FROM debian:bullseye-slim AS deps_base +FROM docker.io/library/debian:bullseye-slim AS deps_base RUN \ --mount=type=cache,target=/var/cache/apt,sharing=locked \ --mount=type=cache,target=/var/lib/apt,sharing=locked \ @@ -21,7 +21,7 @@ FROM debian:bullseye-slim AS deps_base # which makes it much easier to copy (but we need to make sure we use an image # based on the same debian version as the synapse image, to make sure we get # the expected version of libc. -FROM redis:6-bullseye AS redis_base +FROM docker.io/library/redis:6-bullseye AS redis_base # now build the final image, based on the the regular Synapse docker image FROM $FROM diff --git a/docker/complement/Dockerfile b/docker/complement/Dockerfile index be1aa1c55e09..5103068a49fc 100644 --- a/docker/complement/Dockerfile +++ b/docker/complement/Dockerfile @@ -7,6 +7,7 @@ # https://github.com/matrix-org/synapse/blob/develop/docker/README-testing.md#testing-with-postgresql-and-single-or-multi-process-synapse ARG SYNAPSE_VERSION=latest +# This is an intermediate image, to be built locally (not pulled from a registry). ARG FROM=matrixdotorg/synapse-workers:$SYNAPSE_VERSION FROM $FROM @@ -19,8 +20,8 @@ FROM $FROM # the same debian version as Synapse's docker image (so the versions of the # shared libraries match). RUN adduser --system --uid 999 postgres --home /var/lib/postgresql - COPY --from=postgres:13-bullseye /usr/lib/postgresql /usr/lib/postgresql - COPY --from=postgres:13-bullseye /usr/share/postgresql /usr/share/postgresql + COPY --from=docker.io/library/postgres:13-bullseye /usr/lib/postgresql /usr/lib/postgresql + COPY --from=docker.io/library/postgres:13-bullseye /usr/share/postgresql /usr/share/postgresql RUN mkdir /var/run/postgresql && chown postgres /var/run/postgresql ENV PATH="${PATH}:/usr/lib/postgresql/13/bin" ENV PGDATA=/var/lib/postgresql/data diff --git a/docker/editable.Dockerfile b/docker/editable.Dockerfile index 0e8cf2e712ff..c53ce1c718e8 100644 --- a/docker/editable.Dockerfile +++ b/docker/editable.Dockerfile @@ -10,7 +10,7 @@ ARG PYTHON_VERSION=3.9 ### # We hardcode the use of Debian bullseye here because this could change upstream # and other Dockerfiles used for testing are expecting bullseye. -FROM docker.io/python:${PYTHON_VERSION}-slim-bullseye +FROM docker.io/library/python:${PYTHON_VERSION}-slim-bullseye # Install Rust and other dependencies (stolen from normal Dockerfile) # install the OS build deps diff --git a/scripts-dev/build_debian_packages.py b/scripts-dev/build_debian_packages.py index ede766501100..4c9f134ddd83 100755 --- a/scripts-dev/build_debian_packages.py +++ b/scripts-dev/build_debian_packages.py @@ -20,6 +20,8 @@ from types import FrameType from typing import Collection, Optional, Sequence, Set +# These are expanded inside the dockerfile to be a fully qualified image name. +# e.g. docker.io/library/debian:bullseye DISTS = ( "debian:buster", # oldstable: EOL 2022-08 "debian:bullseye", From 6f18812bb044a2959fdc9881c328578adb7b33f2 Mon Sep 17 00:00:00 2001 From: Patrick Cloke Date: Wed, 31 May 2023 13:06:57 -0400 Subject: [PATCH 29/75] Add stubs package for lxml. (#15697) The stubs have some issues so this has some generous cast and ignores in it, but it is better than not having stubs. Note that confusing that Element is a function which creates _Element instances (and similarly for Comment). --- changelog.d/15697.misc | 1 + mypy.ini | 3 -- poetry.lock | 25 +++++++-- pyproject.toml | 1 + synapse/media/oembed.py | 32 ++++++----- synapse/media/preview_html.py | 79 ++++++++++++++++++++-------- tests/media/test_html_preview.py | 18 ++++++- tests/media/test_oembed.py | 2 +- tests/media/test_url_previewer.py | 2 +- tests/rest/media/test_url_preview.py | 2 +- 10 files changed, 117 insertions(+), 48 deletions(-) create mode 100644 changelog.d/15697.misc diff --git a/changelog.d/15697.misc b/changelog.d/15697.misc new file mode 100644 index 000000000000..93ceaeafc9b9 --- /dev/null +++ b/changelog.d/15697.misc @@ -0,0 +1 @@ +Improve type hints. diff --git a/mypy.ini b/mypy.ini index 56cd1d560ea8..1038b7d8c7a4 100644 --- a/mypy.ini +++ b/mypy.ini @@ -60,9 +60,6 @@ ignore_missing_imports = True [mypy-ijson.*] ignore_missing_imports = True -[mypy-lxml] -ignore_missing_imports = True - # https://github.com/msgpack/msgpack-python/issues/448 [mypy-msgpack] ignore_missing_imports = True diff --git a/poetry.lock b/poetry.lock index 0879e64cf176..d8964f5719de 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,4 +1,4 @@ -# This file is automatically @generated by Poetry and should not be changed by hand. +# This file is automatically @generated by Poetry 1.4.2 and should not be changed by hand. [[package]] name = "alabaster" @@ -1215,6 +1215,21 @@ html5 = ["html5lib"] htmlsoup = ["BeautifulSoup4"] source = ["Cython (>=0.29.7)"] +[[package]] +name = "lxml-stubs" +version = "0.4.0" +description = "Type annotations for the lxml package" +category = "dev" +optional = false +python-versions = "*" +files = [ + {file = "lxml-stubs-0.4.0.tar.gz", hash = "sha256:184877b42127256abc2b932ba8bd0ab5ea80bd0b0fee618d16daa40e0b71abee"}, + {file = "lxml_stubs-0.4.0-py3-none-any.whl", hash = "sha256:3b381e9e82397c64ea3cc4d6f79d1255d015f7b114806d4826218805c10ec003"}, +] + +[package.extras] +test = ["coverage[toml] (==5.2)", "pytest (>=6.0.0)", "pytest-mypy-plugins (==1.9.3)"] + [[package]] name = "markdown-it-py" version = "2.2.0" @@ -3409,22 +3424,22 @@ docs = ["Sphinx", "repoze.sphinx.autointerface"] test = ["zope.i18nmessageid", "zope.testing", "zope.testrunner"] [extras] -all = ["matrix-synapse-ldap3", "psycopg2", "psycopg2cffi", "psycopg2cffi-compat", "pysaml2", "authlib", "lxml", "sentry-sdk", "jaeger-client", "opentracing", "txredisapi", "hiredis", "Pympler", "pyicu"] +all = ["Pympler", "authlib", "hiredis", "jaeger-client", "lxml", "matrix-synapse-ldap3", "opentracing", "psycopg2", "psycopg2cffi", "psycopg2cffi-compat", "pyicu", "pysaml2", "sentry-sdk", "txredisapi"] cache-memory = ["Pympler"] jwt = ["authlib"] matrix-synapse-ldap3 = ["matrix-synapse-ldap3"] oidc = ["authlib"] opentracing = ["jaeger-client", "opentracing"] postgres = ["psycopg2", "psycopg2cffi", "psycopg2cffi-compat"] -redis = ["txredisapi", "hiredis"] +redis = ["hiredis", "txredisapi"] saml2 = ["pysaml2"] sentry = ["sentry-sdk"] systemd = ["systemd-python"] -test = ["parameterized", "idna"] +test = ["idna", "parameterized"] url-preview = ["lxml"] user-search = ["pyicu"] [metadata] lock-version = "2.0" python-versions = "^3.7.1" -content-hash = "ef3a16dd66177f7141239e1a2d3e07cc14c08f1e4e0c5127184d022bc062da52" +content-hash = "7ad11e62a675e09444cf33ca2de3216fc4efc5874a2575e54d95d577a52439d3" diff --git a/pyproject.toml b/pyproject.toml index 7227bc752316..4476f57ca74f 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -314,6 +314,7 @@ black = ">=22.3.0" ruff = "0.0.265" # Typechecking +lxml-stubs = ">=0.4.0" mypy = "*" mypy-zope = "*" types-bleach = ">=4.1.0" diff --git a/synapse/media/oembed.py b/synapse/media/oembed.py index c0eaf04be544..5ad9eec80b97 100644 --- a/synapse/media/oembed.py +++ b/synapse/media/oembed.py @@ -14,7 +14,7 @@ import html import logging import urllib.parse -from typing import TYPE_CHECKING, List, Optional +from typing import TYPE_CHECKING, List, Optional, cast import attr @@ -98,7 +98,7 @@ def get_oembed_url(self, url: str) -> Optional[str]: # No match. return None - def autodiscover_from_html(self, tree: "etree.Element") -> Optional[str]: + def autodiscover_from_html(self, tree: "etree._Element") -> Optional[str]: """ Search an HTML document for oEmbed autodiscovery information. @@ -109,18 +109,22 @@ def autodiscover_from_html(self, tree: "etree.Element") -> Optional[str]: The URL to use for oEmbed information, or None if no URL was found. """ # Search for link elements with the proper rel and type attributes. - for tag in tree.xpath( - "//link[@rel='alternate'][@type='application/json+oembed']" + # Cast: the type returned by xpath depends on the xpath expression: mypy can't deduce this. + for tag in cast( + List["etree._Element"], + tree.xpath("//link[@rel='alternate'][@type='application/json+oembed']"), ): if "href" in tag.attrib: - return tag.attrib["href"] + return cast(str, tag.attrib["href"]) # Some providers (e.g. Flickr) use alternative instead of alternate. - for tag in tree.xpath( - "//link[@rel='alternative'][@type='application/json+oembed']" + # Cast: the type returned by xpath depends on the xpath expression: mypy can't deduce this. + for tag in cast( + List["etree._Element"], + tree.xpath("//link[@rel='alternative'][@type='application/json+oembed']"), ): if "href" in tag.attrib: - return tag.attrib["href"] + return cast(str, tag.attrib["href"]) return None @@ -212,11 +216,12 @@ def parse_oembed_response(self, url: str, raw_body: bytes) -> OEmbedResult: return OEmbedResult(open_graph_response, author_name, cache_age) -def _fetch_urls(tree: "etree.Element", tag_name: str) -> List[str]: +def _fetch_urls(tree: "etree._Element", tag_name: str) -> List[str]: results = [] - for tag in tree.xpath("//*/" + tag_name): + # Cast: the type returned by xpath depends on the xpath expression: mypy can't deduce this. + for tag in cast(List["etree._Element"], tree.xpath("//*/" + tag_name)): if "src" in tag.attrib: - results.append(tag.attrib["src"]) + results.append(cast(str, tag.attrib["src"])) return results @@ -244,11 +249,12 @@ def calc_description_and_urls(open_graph_response: JsonDict, html_body: str) -> parser = etree.HTMLParser(recover=True, encoding="utf-8") # Attempt to parse the body. If this fails, log and return no metadata. - tree = etree.fromstring(html_body, parser) + # TODO Develop of lxml-stubs has this correct. + tree = etree.fromstring(html_body, parser) # type: ignore[arg-type] # The data was successfully parsed, but no tree was found. if tree is None: - return + return # type: ignore[unreachable] # Attempt to find interesting URLs (images, videos, embeds). if "og:image" not in open_graph_response: diff --git a/synapse/media/preview_html.py b/synapse/media/preview_html.py index 516d0434f095..1bc7ccb7f3b5 100644 --- a/synapse/media/preview_html.py +++ b/synapse/media/preview_html.py @@ -24,6 +24,7 @@ Optional, Set, Union, + cast, ) if TYPE_CHECKING: @@ -115,7 +116,7 @@ def _get_html_media_encodings( def decode_body( body: bytes, uri: str, content_type: Optional[str] = None -) -> Optional["etree.Element"]: +) -> Optional["etree._Element"]: """ This uses lxml to parse the HTML document. @@ -152,11 +153,12 @@ def decode_body( # Attempt to parse the body. Returns None if the body was successfully # parsed, but no tree was found. - return etree.fromstring(body, parser) + # TODO Develop of lxml-stubs has this correct. + return etree.fromstring(body, parser) # type: ignore[arg-type] def _get_meta_tags( - tree: "etree.Element", + tree: "etree._Element", property: str, prefix: str, property_mapper: Optional[Callable[[str], Optional[str]]] = None, @@ -175,9 +177,15 @@ def _get_meta_tags( Returns: A map of tag name to value. """ + # This actually returns Dict[str, str], but the caller sets this as a variable + # which is Dict[str, Optional[str]]. results: Dict[str, Optional[str]] = {} - for tag in tree.xpath( - f"//*/meta[starts-with(@{property}, '{prefix}:')][@content][not(@content='')]" + # Cast: the type returned by xpath depends on the xpath expression: mypy can't deduce this. + for tag in cast( + List["etree._Element"], + tree.xpath( + f"//*/meta[starts-with(@{property}, '{prefix}:')][@content][not(@content='')]" + ), ): # if we've got more than 50 tags, someone is taking the piss if len(results) >= 50: @@ -187,14 +195,15 @@ def _get_meta_tags( ) return {} - key = tag.attrib[property] + key = cast(str, tag.attrib[property]) if property_mapper: - key = property_mapper(key) + new_key = property_mapper(key) # None is a special value used to ignore a value. - if key is None: + if new_key is None: continue + key = new_key - results[key] = tag.attrib["content"] + results[key] = cast(str, tag.attrib["content"]) return results @@ -219,7 +228,7 @@ def _map_twitter_to_open_graph(key: str) -> Optional[str]: return "og" + key[7:] -def parse_html_to_open_graph(tree: "etree.Element") -> Dict[str, Optional[str]]: +def parse_html_to_open_graph(tree: "etree._Element") -> Dict[str, Optional[str]]: """ Parse the HTML document into an Open Graph response. @@ -276,24 +285,36 @@ def parse_html_to_open_graph(tree: "etree.Element") -> Dict[str, Optional[str]]: if "og:title" not in og: # Attempt to find a title from the title tag, or the biggest header on the page. - title = tree.xpath("((//title)[1] | (//h1)[1] | (//h2)[1] | (//h3)[1])/text()") + # Cast: the type returned by xpath depends on the xpath expression: mypy can't deduce this. + title = cast( + List["etree._ElementUnicodeResult"], + tree.xpath("((//title)[1] | (//h1)[1] | (//h2)[1] | (//h3)[1])/text()"), + ) if title: og["og:title"] = title[0].strip() else: og["og:title"] = None if "og:image" not in og: - meta_image = tree.xpath( - "//*/meta[translate(@itemprop, 'IMAGE', 'image')='image'][not(@content='')]/@content[1]" + # Cast: the type returned by xpath depends on the xpath expression: mypy can't deduce this. + meta_image = cast( + List["etree._ElementUnicodeResult"], + tree.xpath( + "//*/meta[translate(@itemprop, 'IMAGE', 'image')='image'][not(@content='')]/@content[1]" + ), ) # If a meta image is found, use it. if meta_image: og["og:image"] = meta_image[0] else: # Try to find images which are larger than 10px by 10px. + # Cast: the type returned by xpath depends on the xpath expression: mypy can't deduce this. # # TODO: consider inlined CSS styles as well as width & height attribs - images = tree.xpath("//img[@src][number(@width)>10][number(@height)>10]") + images = cast( + List["etree._Element"], + tree.xpath("//img[@src][number(@width)>10][number(@height)>10]"), + ) images = sorted( images, key=lambda i: ( @@ -302,20 +323,29 @@ def parse_html_to_open_graph(tree: "etree.Element") -> Dict[str, Optional[str]]: ) # If no images were found, try to find *any* images. if not images: - images = tree.xpath("//img[@src][1]") + # Cast: the type returned by xpath depends on the xpath expression: mypy can't deduce this. + images = cast(List["etree._Element"], tree.xpath("//img[@src][1]")) if images: - og["og:image"] = images[0].attrib["src"] + og["og:image"] = cast(str, images[0].attrib["src"]) # Finally, fallback to the favicon if nothing else. else: - favicons = tree.xpath("//link[@href][contains(@rel, 'icon')]/@href[1]") + # Cast: the type returned by xpath depends on the xpath expression: mypy can't deduce this. + favicons = cast( + List["etree._ElementUnicodeResult"], + tree.xpath("//link[@href][contains(@rel, 'icon')]/@href[1]"), + ) if favicons: og["og:image"] = favicons[0] if "og:description" not in og: # Check the first meta description tag for content. - meta_description = tree.xpath( - "//*/meta[translate(@name, 'DESCRIPTION', 'description')='description'][not(@content='')]/@content[1]" + # Cast: the type returned by xpath depends on the xpath expression: mypy can't deduce this. + meta_description = cast( + List["etree._ElementUnicodeResult"], + tree.xpath( + "//*/meta[translate(@name, 'DESCRIPTION', 'description')='description'][not(@content='')]/@content[1]" + ), ) # If a meta description is found with content, use it. if meta_description: @@ -332,7 +362,7 @@ def parse_html_to_open_graph(tree: "etree.Element") -> Dict[str, Optional[str]]: return og -def parse_html_description(tree: "etree.Element") -> Optional[str]: +def parse_html_description(tree: "etree._Element") -> Optional[str]: """ Calculate a text description based on an HTML document. @@ -368,6 +398,9 @@ def parse_html_description(tree: "etree.Element") -> Optional[str]: "canvas", "img", "picture", + # etree.Comment is a function which creates an etree._Comment element. + # The "tag" attribute of an etree._Comment instance is confusingly the + # etree.Comment function instead of a string. etree.Comment, } @@ -381,8 +414,8 @@ def parse_html_description(tree: "etree.Element") -> Optional[str]: def _iterate_over_text( - tree: Optional["etree.Element"], - tags_to_ignore: Set[Union[str, "etree.Comment"]], + tree: Optional["etree._Element"], + tags_to_ignore: Set[object], stack_limit: int = 1024, ) -> Generator[str, None, None]: """Iterate over the tree returning text nodes in a depth first fashion, @@ -402,7 +435,7 @@ def _iterate_over_text( # This is a stack whose items are elements to iterate over *or* strings # to be returned. - elements: List[Union[str, "etree.Element"]] = [tree] + elements: List[Union[str, "etree._Element"]] = [tree] while elements: el = elements.pop() diff --git a/tests/media/test_html_preview.py b/tests/media/test_html_preview.py index e7da75db3ee0..ea84bb3d3d53 100644 --- a/tests/media/test_html_preview.py +++ b/tests/media/test_html_preview.py @@ -24,7 +24,7 @@ try: import lxml except ImportError: - lxml = None + lxml = None # type: ignore[assignment] class SummarizeTestCase(unittest.TestCase): @@ -160,6 +160,7 @@ def test_simple(self) -> None: """ tree = decode_body(html, "http://example.com/test.html") + assert tree is not None og = parse_html_to_open_graph(tree) self.assertEqual(og, {"og:title": "Foo", "og:description": "Some text."}) @@ -176,6 +177,7 @@ def test_comment(self) -> None: """ tree = decode_body(html, "http://example.com/test.html") + assert tree is not None og = parse_html_to_open_graph(tree) self.assertEqual(og, {"og:title": "Foo", "og:description": "Some text."}) @@ -195,6 +197,7 @@ def test_comment2(self) -> None: """ tree = decode_body(html, "http://example.com/test.html") + assert tree is not None og = parse_html_to_open_graph(tree) self.assertEqual( @@ -217,6 +220,7 @@ def test_script(self) -> None: """ tree = decode_body(html, "http://example.com/test.html") + assert tree is not None og = parse_html_to_open_graph(tree) self.assertEqual(og, {"og:title": "Foo", "og:description": "Some text."}) @@ -231,6 +235,7 @@ def test_missing_title(self) -> None: """ tree = decode_body(html, "http://example.com/test.html") + assert tree is not None og = parse_html_to_open_graph(tree) self.assertEqual(og, {"og:title": None, "og:description": "Some text."}) @@ -246,6 +251,7 @@ def test_missing_title(self) -> None: """ tree = decode_body(html, "http://example.com/test.html") + assert tree is not None og = parse_html_to_open_graph(tree) self.assertEqual(og, {"og:title": "Title", "og:description": "Title"}) @@ -261,6 +267,7 @@ def test_h1_as_title(self) -> None: """ tree = decode_body(html, "http://example.com/test.html") + assert tree is not None og = parse_html_to_open_graph(tree) self.assertEqual(og, {"og:title": "Title", "og:description": "Some text."}) @@ -281,6 +288,7 @@ def test_empty_description(self) -> None: """ tree = decode_body(html, "http://example.com/test.html") + assert tree is not None og = parse_html_to_open_graph(tree) self.assertEqual(og, {"og:title": "Title", "og:description": "Finally!"}) @@ -296,6 +304,7 @@ def test_missing_title_and_broken_h1(self) -> None: """ tree = decode_body(html, "http://example.com/test.html") + assert tree is not None og = parse_html_to_open_graph(tree) self.assertEqual(og, {"og:title": None, "og:description": "Some text."}) @@ -324,6 +333,7 @@ def test_xml(self) -> None: FooSome text. """.strip() tree = decode_body(html, "http://example.com/test.html") + assert tree is not None og = parse_html_to_open_graph(tree) self.assertEqual(og, {"og:title": "Foo", "og:description": "Some text."}) @@ -338,6 +348,7 @@ def test_invalid_encoding(self) -> None: """ tree = decode_body(html, "http://example.com/test.html", "invalid-encoding") + assert tree is not None og = parse_html_to_open_graph(tree) self.assertEqual(og, {"og:title": "Foo", "og:description": "Some text."}) @@ -353,6 +364,7 @@ def test_invalid_encoding2(self) -> None: """ tree = decode_body(html, "http://example.com/test.html") + assert tree is not None og = parse_html_to_open_graph(tree) self.assertEqual(og, {"og:title": "ÿÿ Foo", "og:description": "Some text."}) @@ -367,6 +379,7 @@ def test_windows_1252(self) -> None: """ tree = decode_body(html, "http://example.com/test.html") + assert tree is not None og = parse_html_to_open_graph(tree) self.assertEqual(og, {"og:title": "ó", "og:description": "Some text."}) @@ -380,6 +393,7 @@ def test_twitter_tag(self) -> None: """ tree = decode_body(html, "http://example.com/test.html") + assert tree is not None og = parse_html_to_open_graph(tree) self.assertEqual( og, @@ -401,6 +415,7 @@ def test_twitter_tag(self) -> None: """ tree = decode_body(html, "http://example.com/test.html") + assert tree is not None og = parse_html_to_open_graph(tree) self.assertEqual( og, @@ -419,6 +434,7 @@ def test_nested_nodes(self) -> None: with a cheeky SVG and some tail text """ tree = decode_body(html, "http://example.com/test.html") + assert tree is not None og = parse_html_to_open_graph(tree) self.assertEqual( og, diff --git a/tests/media/test_oembed.py b/tests/media/test_oembed.py index c8bf8421daf1..3bc19cb1ccd0 100644 --- a/tests/media/test_oembed.py +++ b/tests/media/test_oembed.py @@ -28,7 +28,7 @@ try: import lxml except ImportError: - lxml = None + lxml = None # type: ignore[assignment] class OEmbedTests(HomeserverTestCase): diff --git a/tests/media/test_url_previewer.py b/tests/media/test_url_previewer.py index 3c4c7d676520..46ecde534441 100644 --- a/tests/media/test_url_previewer.py +++ b/tests/media/test_url_previewer.py @@ -24,7 +24,7 @@ try: import lxml except ImportError: - lxml = None + lxml = None # type: ignore[assignment] class URLPreviewTests(unittest.HomeserverTestCase): diff --git a/tests/rest/media/test_url_preview.py b/tests/rest/media/test_url_preview.py index 170fb0534ad5..05d5e39cabd4 100644 --- a/tests/rest/media/test_url_preview.py +++ b/tests/rest/media/test_url_preview.py @@ -40,7 +40,7 @@ try: import lxml except ImportError: - lxml = None + lxml = None # type: ignore[assignment] class URLPreviewTests(unittest.HomeserverTestCase): From 0b5f64ff09d44338d2514cbdba80aa4a4f11d1aa Mon Sep 17 00:00:00 2001 From: Eric Eastwood Date: Wed, 31 May 2023 14:35:49 -0500 Subject: [PATCH 30/75] Add Synapse version deploy annotations to Grafana dashboard (#15674) Fix https://github.com/matrix-org/synapse/issues/15662 This manifests as purple lines that show up on all time series panels that you can hover and see what version was deployed. Also added a new "Deployed Synapse versions over time" panel where the color block changes with each version. And mixed this color block into the "Up" time series panel. To get the Grafana dashboard JSON to copy here: use the **Share** icon at the top -> **Export** -> check the **Export for sharing externally** option -> **View JSON** or **Save to file** --- changelog.d/15674.feature | 1 + contrib/grafana/synapse.json | 1240 +++++++++++++++++++++++++++------- 2 files changed, 981 insertions(+), 260 deletions(-) create mode 100644 changelog.d/15674.feature diff --git a/changelog.d/15674.feature b/changelog.d/15674.feature new file mode 100644 index 000000000000..68cf207dc0f6 --- /dev/null +++ b/changelog.d/15674.feature @@ -0,0 +1 @@ +Add Syanpse version deploy annotations to Grafana dashboard which enables easy correlation between behavior changes witnessed in a graph to a certain Synapse version and nail down regressions. diff --git a/contrib/grafana/synapse.json b/contrib/grafana/synapse.json index f09cd6f87c28..f3253b32b929 100644 --- a/contrib/grafana/synapse.json +++ b/contrib/grafana/synapse.json @@ -56,6 +56,17 @@ "name": "Annotations & Alerts", "showIn": 0, "type": "dashboard" + }, + { + "datasource": { + "type": "prometheus", + "uid": "${DS_PROMETHEUS}" + }, + "enable": true, + "expr": "changes(process_start_time_seconds{instance=\"matrix.org\",job=~\"synapse\"}[$bucket_size]) * on (instance, job) group_left(version) synapse_build_info{instance=\"matrix.org\",job=\"synapse\"}", + "iconColor": "purple", + "name": "deploys", + "titleFormat": "Deployed {{version}}" } ] }, @@ -670,6 +681,95 @@ "align": false } }, + { + "datasource": { + "type": "prometheus", + "uid": "${DS_PROMETHEUS}" + }, + "fieldConfig": { + "defaults": { + "color": { + "mode": "palette-classic" + }, + "custom": { + "axisCenteredZero": false, + "axisColorMode": "text", + "axisLabel": "", + "axisPlacement": "auto", + "axisSoftMax": 1, + "barAlignment": 0, + "drawStyle": "line", + "fillOpacity": 0, + "gradientMode": "none", + "hideFrom": { + "legend": false, + "tooltip": false, + "viz": false + }, + "lineInterpolation": "linear", + "lineWidth": 10, + "pointSize": 5, + "scaleDistribution": { + "type": "linear" + }, + "showPoints": "never", + "spanNulls": false, + "stacking": { + "group": "A", + "mode": "none" + }, + "thresholdsStyle": { + "mode": "off" + } + }, + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green", + "value": null + } + ] + } + }, + "overrides": [] + }, + "gridPos": { + "h": 7, + "w": 12, + "x": 0, + "y": 19 + }, + "id": 245, + "options": { + "legend": { + "calcs": [], + "displayMode": "list", + "placement": "bottom", + "showLegend": true + }, + "tooltip": { + "mode": "single", + "sort": "none" + } + }, + "targets": [ + { + "datasource": { + "type": "prometheus", + "uid": "${DS_PROMETHEUS}" + }, + "editorMode": "code", + "expr": "synapse_build_info{instance=\"$instance\", job=\"synapse\"} - 1", + "legendFormat": "version {{version}}", + "range": true, + "refId": "deployed_synapse_versions" + } + ], + "title": "Deployed Synapse versions over time", + "type": "timeseries" + }, { "aliasColors": {}, "bars": false, @@ -809,6 +909,7 @@ "dashLength": 10, "dashes": false, "datasource": { + "type": "prometheus", "uid": "$datasource" }, "editable": true, @@ -874,11 +975,13 @@ "datasource": { "uid": "$datasource" }, + "editorMode": "code", "expr": "rate(process_cpu_system_seconds_total{instance=\"$instance\",job=~\"$job\",index=~\"$index\"}[$bucket_size])", "format": "time_series", "intervalFactor": 1, "legendFormat": "{{job}}-{{index}} system ", "metric": "", + "range": true, "refId": "B", "step": 20 }, @@ -1328,6 +1431,7 @@ "dashLength": 10, "dashes": false, "datasource": { + "type": "prometheus", "uid": "$datasource" }, "fieldConfig": { @@ -1368,7 +1472,15 @@ "pointradius": 5, "points": false, "renderer": "flot", - "seriesOverrides": [], + "seriesOverrides": [ + { + "$$hashKey": "object:116", + "alias": "/^version .*/", + "lines": true, + "linewidth": 6, + "points": false + } + ], "spaceLength": 10, "stack": false, "steppedLine": false, @@ -1377,11 +1489,25 @@ "datasource": { "uid": "$datasource" }, + "editorMode": "code", "expr": "min_over_time(up{instance=\"$instance\",job=~\"$job\",index=~\"$index\"}[$bucket_size])", "format": "time_series", "intervalFactor": 2, "legendFormat": "{{job}}-{{index}}", + "range": true, "refId": "A" + }, + { + "datasource": { + "type": "prometheus", + "uid": "$datasource" + }, + "editorMode": "code", + "expr": "synapse_build_info{instance=\"$instance\", job=\"synapse\"} - 1", + "hide": false, + "legendFormat": "version {{version}}", + "range": true, + "refId": "deployed_synapse_versions" } ], "thresholds": [], @@ -1788,7 +1914,7 @@ "h": 9, "w": 12, "x": 0, - "y": 56 + "y": 28 }, "heatmap": {}, "hideZeroBuckets": false, @@ -1890,7 +2016,7 @@ "h": 9, "w": 12, "x": 12, - "y": 56 + "y": 28 }, "hiddenSeries": false, "id": 33, @@ -1982,7 +2108,7 @@ "h": 7, "w": 12, "x": 0, - "y": 65 + "y": 37 }, "hiddenSeries": false, "id": 40, @@ -2070,7 +2196,7 @@ "h": 7, "w": 12, "x": 12, - "y": 65 + "y": 37 }, "hiddenSeries": false, "id": 46, @@ -2161,7 +2287,7 @@ "h": 7, "w": 12, "x": 0, - "y": 72 + "y": 44 }, "hiddenSeries": false, "id": 44, @@ -2253,7 +2379,7 @@ "h": 7, "w": 12, "x": 12, - "y": 72 + "y": 44 }, "hiddenSeries": false, "id": 45, @@ -2354,7 +2480,7 @@ "h": 9, "w": 12, "x": 0, - "y": 79 + "y": 51 }, "hiddenSeries": false, "id": 118, @@ -2547,7 +2673,7 @@ "h": 9, "w": 12, "x": 12, - "y": 79 + "y": 51 }, "id": 222, "options": { @@ -2646,7 +2772,7 @@ "h": 8, "w": 12, "x": 0, - "y": 57 + "y": 29 }, "hiddenSeries": false, "id": 4, @@ -2768,7 +2894,7 @@ "h": 8, "w": 12, "x": 12, - "y": 57 + "y": 29 }, "hiddenSeries": false, "id": 32, @@ -2867,7 +2993,7 @@ "h": 8, "w": 12, "x": 0, - "y": 65 + "y": 37 }, "hiddenSeries": false, "id": 139, @@ -2989,7 +3115,7 @@ "h": 8, "w": 12, "x": 12, - "y": 65 + "y": 37 }, "hiddenSeries": false, "id": 52, @@ -3111,7 +3237,7 @@ "h": 8, "w": 12, "x": 0, - "y": 73 + "y": 45 }, "hiddenSeries": false, "id": 7, @@ -3212,7 +3338,7 @@ "h": 8, "w": 12, "x": 12, - "y": 73 + "y": 45 }, "hiddenSeries": false, "id": 47, @@ -3310,7 +3436,7 @@ "h": 9, "w": 12, "x": 0, - "y": 81 + "y": 53 }, "hiddenSeries": false, "id": 103, @@ -3445,7 +3571,7 @@ "h": 9, "w": 12, "x": 0, - "y": 5 + "y": 30 }, "hiddenSeries": false, "id": 99, @@ -3467,7 +3593,7 @@ }, "paceLength": 10, "percentage": false, - "pluginVersion": "8.4.3", + "pluginVersion": "9.2.2", "pointradius": 5, "points": false, "renderer": "flot", @@ -3538,7 +3664,7 @@ "h": 9, "w": 12, "x": 12, - "y": 5 + "y": 30 }, "hiddenSeries": false, "id": 101, @@ -3560,7 +3686,7 @@ }, "paceLength": 10, "percentage": false, - "pluginVersion": "8.4.3", + "pluginVersion": "9.2.2", "pointradius": 5, "points": false, "renderer": "flot", @@ -3631,7 +3757,7 @@ "h": 8, "w": 12, "x": 0, - "y": 14 + "y": 39 }, "hiddenSeries": false, "id": 138, @@ -3651,7 +3777,7 @@ "alertThreshold": true }, "percentage": false, - "pluginVersion": "8.4.3", + "pluginVersion": "9.2.2", "pointradius": 2, "points": false, "renderer": "flot", @@ -3746,7 +3872,7 @@ "h": 9, "w": 12, "x": 0, - "y": 59 + "y": 31 }, "hiddenSeries": false, "id": 79, @@ -3846,7 +3972,7 @@ "h": 9, "w": 12, "x": 12, - "y": 59 + "y": 31 }, "hiddenSeries": false, "id": 83, @@ -3934,6 +4060,7 @@ "dashLength": 10, "dashes": false, "datasource": { + "type": "prometheus", "uid": "$datasource" }, "fieldConfig": { @@ -3948,7 +4075,7 @@ "h": 9, "w": 12, "x": 0, - "y": 68 + "y": 40 }, "hiddenSeries": false, "id": 109, @@ -3983,11 +4110,13 @@ "datasource": { "uid": "$datasource" }, - "expr": "sum(rate(synapse_federation_client_sent_pdu_destinations:total_total{instance=\"$instance\"}[$bucket_size]))", + "editorMode": "code", + "expr": "sum(rate(synapse_federation_client_sent_pdu_destinations_count_total{instance=\"$instance\"}[$bucket_size]))", "format": "time_series", "interval": "", "intervalFactor": 1, "legendFormat": "pdus", + "range": true, "refId": "A" }, { @@ -4052,7 +4181,7 @@ "h": 9, "w": 12, "x": 12, - "y": 68 + "y": 40 }, "hiddenSeries": false, "id": 111, @@ -4130,119 +4259,363 @@ } }, { - "aliasColors": {}, - "bars": false, - "dashLength": 10, - "dashes": false, "datasource": { "type": "prometheus", - "uid": "$datasource" + "uid": "${DS_PROMETHEUS}" }, - "description": "The number of events in the in-memory queues ", + "description": "Triangular growth may indicate a problem with federation sending from the remote host --- but it may also be the case that everyone is asleep and no messages are being sent.\n\nSee https://matrix-org.github.io/synapse/latest/usage/configuration/config_documentation.html#federation_metrics_domains", "fieldConfig": { "defaults": { - "links": [] + "color": { + "mode": "palette-classic" + }, + "custom": { + "axisCenteredZero": false, + "axisColorMode": "text", + "axisLabel": "", + "axisPlacement": "auto", + "axisSoftMax": 60, + "axisSoftMin": 0, + "barAlignment": 0, + "drawStyle": "line", + "fillOpacity": 0, + "gradientMode": "none", + "hideFrom": { + "legend": false, + "tooltip": false, + "viz": false + }, + "lineInterpolation": "linear", + "lineWidth": 1, + "pointSize": 5, + "scaleDistribution": { + "type": "linear" + }, + "showPoints": "auto", + "spanNulls": false, + "stacking": { + "group": "A", + "mode": "none" + }, + "thresholdsStyle": { + "mode": "line" + } + }, + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green" + }, + { + "color": "red", + "value": 60 + } + ] + }, + "unit": "m" }, - "overrides": [] + "overrides": [ + { + "__systemRef": "hideSeriesFrom", + "matcher": { + "id": "byNames", + "options": { + "mode": "exclude", + "names": [ + "libera.chat " + ], + "prefix": "All except:", + "readOnly": true + } + }, + "properties": [ + { + "id": "custom.hideFrom", + "value": { + "legend": false, + "tooltip": false, + "viz": true + } + } + ] + } + ] }, - "fill": 1, - "fillGradient": 0, "gridPos": { "h": 8, "w": 12, "x": 0, - "y": 77 - }, - "hiddenSeries": false, - "id": 142, - "legend": { - "avg": false, - "current": false, - "max": false, - "min": false, - "show": true, - "total": false, - "values": false + "y": 49 }, - "lines": true, - "linewidth": 1, - "nullPointMode": "null", + "id": 243, "options": { - "alertThreshold": true + "legend": { + "calcs": [], + "displayMode": "list", + "placement": "bottom", + "showLegend": true + }, + "tooltip": { + "mode": "single", + "sort": "none" + } }, - "percentage": false, - "pluginVersion": "9.2.2", - "pointradius": 2, - "points": false, - "renderer": "flot", - "seriesOverrides": [], - "spaceLength": 10, - "stack": false, - "steppedLine": false, "targets": [ { "datasource": { "type": "prometheus", - "uid": "$datasource" + "uid": "${DS_PROMETHEUS}" }, "editorMode": "code", - "expr": "synapse_federation_transaction_queue_pending_pdus{instance=\"$instance\",job=~\"$job\",index=~\"$index\"}", - "interval": "", - "legendFormat": "pending PDUs {{job}}-{{index}}", + "exemplar": false, + "expr": "(time() - max without (job, index, host) (avg_over_time(synapse_federation_last_received_pdu_time[10m]))) / 60", + "instant": false, + "legendFormat": "{{server_name}} ", "range": true, "refId": "A" - }, - { - "datasource": { - "type": "prometheus", - "uid": "$datasource" - }, - "expr": "synapse_federation_transaction_queue_pending_edus{instance=\"$instance\",job=~\"$job\",index=~\"$index\"}", - "interval": "", - "legendFormat": "pending EDUs {{job}}-{{index}}", - "refId": "B" - } - ], - "thresholds": [], - "timeRegions": [], - "title": "In-memory federation transmission queues", - "tooltip": { - "shared": true, - "sort": 0, - "value_type": "individual" - }, - "type": "graph", - "xaxis": { - "mode": "time", - "show": true, - "values": [] - }, - "yaxes": [ - { - "$$hashKey": "object:547", - "format": "short", - "label": "events", - "logBase": 1, - "min": "0", - "show": true - }, - { - "$$hashKey": "object:548", - "format": "short", - "label": "", - "logBase": 1, - "show": true } ], - "yaxis": { - "align": false - } + "title": "Age of last PDU received from nominated hosts", + "type": "timeseries" }, { - "aliasColors": {}, - "bars": false, - "dashLength": 10, - "dashes": false, + "datasource": { + "type": "prometheus", + "uid": "${DS_PROMETHEUS}" + }, + "description": "Triangular growth may indicate a problem with federation senders on the monitored instance---but it may also be the case that everyone is asleep and no messages are being sent.\n\nSee https://matrix-org.github.io/synapse/latest/usage/configuration/config_documentation.html#federation_metrics_domains", + "fieldConfig": { + "defaults": { + "color": { + "mode": "palette-classic" + }, + "custom": { + "axisCenteredZero": false, + "axisColorMode": "text", + "axisLabel": "", + "axisPlacement": "auto", + "axisSoftMax": 60, + "axisSoftMin": 0, + "barAlignment": 0, + "drawStyle": "line", + "fillOpacity": 0, + "gradientMode": "none", + "hideFrom": { + "legend": false, + "tooltip": false, + "viz": false + }, + "lineInterpolation": "linear", + "lineWidth": 1, + "pointSize": 5, + "scaleDistribution": { + "type": "linear" + }, + "showPoints": "auto", + "spanNulls": false, + "stacking": { + "group": "A", + "mode": "none" + }, + "thresholdsStyle": { + "mode": "line" + } + }, + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green" + }, + { + "color": "red", + "value": 60 + } + ] + }, + "unit": "m" + }, + "overrides": [ + { + "__systemRef": "hideSeriesFrom", + "matcher": { + "id": "byNames", + "options": { + "mode": "exclude", + "names": [ + "libera.chat" + ], + "prefix": "All except:", + "readOnly": true + } + }, + "properties": [ + { + "id": "custom.hideFrom", + "value": { + "legend": false, + "tooltip": false, + "viz": true + } + } + ] + } + ] + }, + "gridPos": { + "h": 8, + "w": 12, + "x": 12, + "y": 49 + }, + "id": 241, + "options": { + "legend": { + "calcs": [], + "displayMode": "list", + "placement": "bottom", + "showLegend": true + }, + "tooltip": { + "mode": "single", + "sort": "none" + } + }, + "targets": [ + { + "datasource": { + "type": "prometheus", + "uid": "${DS_PROMETHEUS}" + }, + "editorMode": "code", + "exemplar": false, + "expr": "(time() - max without (job, index, host) (avg_over_time(synapse_federation_last_sent_pdu_time[10m]))) / 60", + "instant": false, + "legendFormat": "{{server_name}}", + "range": true, + "refId": "A" + } + ], + "title": "Age of last PDU sent to nominated hosts", + "type": "timeseries" + }, + { + "aliasColors": {}, + "bars": false, + "dashLength": 10, + "dashes": false, + "datasource": { + "type": "prometheus", + "uid": "$datasource" + }, + "description": "The number of events in the in-memory queues ", + "fieldConfig": { + "defaults": { + "links": [] + }, + "overrides": [] + }, + "fill": 1, + "fillGradient": 0, + "gridPos": { + "h": 9, + "w": 12, + "x": 0, + "y": 57 + }, + "hiddenSeries": false, + "id": 142, + "legend": { + "avg": false, + "current": false, + "max": false, + "min": false, + "show": true, + "total": false, + "values": false + }, + "lines": true, + "linewidth": 1, + "nullPointMode": "null", + "options": { + "alertThreshold": true + }, + "percentage": false, + "pluginVersion": "9.2.2", + "pointradius": 2, + "points": false, + "renderer": "flot", + "seriesOverrides": [], + "spaceLength": 10, + "stack": false, + "steppedLine": false, + "targets": [ + { + "datasource": { + "type": "prometheus", + "uid": "$datasource" + }, + "editorMode": "code", + "expr": "synapse_federation_transaction_queue_pending_pdus{instance=\"$instance\",job=~\"$job\",index=~\"$index\"}", + "interval": "", + "legendFormat": "pending PDUs {{job}}-{{index}}", + "range": true, + "refId": "A" + }, + { + "datasource": { + "type": "prometheus", + "uid": "$datasource" + }, + "expr": "synapse_federation_transaction_queue_pending_edus{instance=\"$instance\",job=~\"$job\",index=~\"$index\"}", + "interval": "", + "legendFormat": "pending EDUs {{job}}-{{index}}", + "refId": "B" + } + ], + "thresholds": [], + "timeRegions": [], + "title": "In-memory federation transmission queues", + "tooltip": { + "shared": true, + "sort": 0, + "value_type": "individual" + }, + "type": "graph", + "xaxis": { + "mode": "time", + "show": true, + "values": [] + }, + "yaxes": [ + { + "$$hashKey": "object:547", + "format": "short", + "label": "events", + "logBase": 1, + "min": "0", + "show": true + }, + { + "$$hashKey": "object:548", + "format": "short", + "label": "", + "logBase": 1, + "show": true + } + ], + "yaxis": { + "align": false + } + }, + { + "aliasColors": {}, + "bars": false, + "dashLength": 10, + "dashes": false, "datasource": { "uid": "$datasource" }, @@ -4259,7 +4632,7 @@ "h": 9, "w": 12, "x": 12, - "y": 77 + "y": 57 }, "hiddenSeries": false, "id": 140, @@ -4428,7 +4801,7 @@ "h": 9, "w": 12, "x": 0, - "y": 85 + "y": 66 }, "heatmap": {}, "hideZeroBuckets": false, @@ -4533,7 +4906,7 @@ "h": 9, "w": 12, "x": 12, - "y": 86 + "y": 66 }, "hiddenSeries": false, "id": 162, @@ -4745,11 +5118,26 @@ "datasource": { "uid": "$datasource" }, + "fieldConfig": { + "defaults": { + "custom": { + "hideFrom": { + "legend": false, + "tooltip": false, + "viz": false + }, + "scaleDistribution": { + "type": "linear" + } + } + }, + "overrides": [] + }, "gridPos": { "h": 9, "w": 12, "x": 0, - "y": 94 + "y": 75 }, "heatmap": {}, "hideZeroBuckets": false, @@ -4759,6 +5147,48 @@ "show": false }, "links": [], + "options": { + "calculate": false, + "calculation": {}, + "cellGap": -1, + "cellValues": { + "decimals": 2 + }, + "color": { + "exponent": 0.5, + "fill": "#b4ff00", + "min": 0, + "mode": "scheme", + "reverse": false, + "scale": "exponential", + "scheme": "Inferno", + "steps": 128 + }, + "exemplars": { + "color": "rgba(255,0,255,0.7)" + }, + "filterValues": { + "le": 1e-9 + }, + "legend": { + "show": false + }, + "rowsFrame": { + "layout": "auto" + }, + "showValue": "never", + "tooltip": { + "show": true, + "yHistogram": true + }, + "yAxis": { + "axisPlacement": "left", + "decimals": 0, + "reverse": false, + "unit": "s" + } + }, + "pluginVersion": "9.2.2", "reverseYBuckets": false, "targets": [ { @@ -4798,6 +5228,7 @@ "dashLength": 10, "dashes": false, "datasource": { + "type": "prometheus", "uid": "$datasource" }, "editable": true, @@ -4815,7 +5246,7 @@ "h": 9, "w": 12, "x": 12, - "y": 95 + "y": 75 }, "hiddenSeries": false, "id": 203, @@ -4837,7 +5268,7 @@ }, "paceLength": 10, "percentage": false, - "pluginVersion": "9.0.4", + "pluginVersion": "9.2.2", "pointradius": 5, "points": false, "renderer": "flot", @@ -4850,11 +5281,13 @@ "datasource": { "uid": "$datasource" }, - "expr": "synapse_federation_server_oldest_inbound_pdu_in_staging{job=\"$job\",index=~\"$index\",instance=\"$instance\"}", + "editorMode": "code", + "expr": "synapse_federation_server_oldest_inbound_pdu_in_staging{job=~\"$job\",index=~\"$index\",instance=\"$instance\"}", "format": "time_series", "interval": "", "intervalFactor": 1, - "legendFormat": "rss {{index}}", + "legendFormat": "{{job}}-{{index}}", + "range": true, "refId": "A", "step": 4 } @@ -4899,6 +5332,7 @@ "dashLength": 10, "dashes": false, "datasource": { + "type": "prometheus", "uid": "$datasource" }, "editable": true, @@ -4916,7 +5350,7 @@ "h": 9, "w": 12, "x": 0, - "y": 103 + "y": 84 }, "hiddenSeries": false, "id": 202, @@ -4938,7 +5372,7 @@ }, "paceLength": 10, "percentage": false, - "pluginVersion": "9.0.4", + "pluginVersion": "9.2.2", "pointradius": 5, "points": false, "renderer": "flot", @@ -4951,11 +5385,13 @@ "datasource": { "uid": "$datasource" }, - "expr": "synapse_federation_server_number_inbound_pdu_in_staging{job=\"$job\",index=~\"$index\",instance=\"$instance\"}", + "editorMode": "code", + "expr": "synapse_federation_server_number_inbound_pdu_in_staging{job=~\"$job\",index=~\"$index\",instance=\"$instance\"}", "format": "time_series", "interval": "", "intervalFactor": 1, - "legendFormat": "rss {{index}}", + "legendFormat": "{{job}}-{{index}}", + "range": true, "refId": "A", "step": 4 } @@ -5009,7 +5445,7 @@ "h": 8, "w": 12, "x": 12, - "y": 104 + "y": 84 }, "hiddenSeries": false, "id": 205, @@ -5029,7 +5465,7 @@ "alertThreshold": true }, "percentage": false, - "pluginVersion": "9.0.4", + "pluginVersion": "9.2.2", "pointradius": 2, "points": false, "renderer": "flot", @@ -5115,6 +5551,8 @@ "mode": "palette-classic" }, "custom": { + "axisCenteredZero": false, + "axisColorMode": "text", "axisLabel": "", "axisPlacement": "auto", "barAlignment": 0, @@ -5162,7 +5600,7 @@ "h": 8, "w": 12, "x": 0, - "y": 1 + "y": 154 }, "id": 239, "options": { @@ -5201,6 +5639,8 @@ "mode": "palette-classic" }, "custom": { + "axisCenteredZero": false, + "axisColorMode": "text", "axisLabel": "", "axisPlacement": "auto", "barAlignment": 0, @@ -5248,7 +5688,7 @@ "h": 8, "w": 12, "x": 12, - "y": 1 + "y": 154 }, "id": 235, "options": { @@ -5288,6 +5728,8 @@ "mode": "palette-classic" }, "custom": { + "axisCenteredZero": false, + "axisColorMode": "text", "axisLabel": "", "axisPlacement": "auto", "barAlignment": 0, @@ -5335,7 +5777,7 @@ "h": 8, "w": 12, "x": 0, - "y": 9 + "y": 162 }, "id": 237, "options": { @@ -5376,6 +5818,8 @@ "mode": "palette-classic" }, "custom": { + "axisCenteredZero": false, + "axisColorMode": "text", "axisLabel": "", "axisPlacement": "auto", "barAlignment": 0, @@ -5423,7 +5867,7 @@ "h": 8, "w": 12, "x": 12, - "y": 9 + "y": 162 }, "id": 233, "options": { @@ -5474,7 +5918,7 @@ "h": 8, "w": 12, "x": 0, - "y": 17 + "y": 170 }, "hiddenSeries": false, "id": 229, @@ -5497,7 +5941,7 @@ }, "paceLength": 10, "percentage": false, - "pluginVersion": "9.0.4", + "pluginVersion": "9.2.2", "pointradius": 5, "points": false, "renderer": "flot", @@ -5709,6 +6153,8 @@ "mode": "palette-classic" }, "custom": { + "axisCenteredZero": false, + "axisColorMode": "text", "axisLabel": "", "axisPlacement": "auto", "barAlignment": 0, @@ -5773,7 +6219,7 @@ "h": 8, "w": 12, "x": 12, - "y": 17 + "y": 170 }, "id": 231, "options": { @@ -5832,65 +6278,96 @@ "id": 60, "panels": [ { - "aliasColors": {}, - "bars": false, - "dashLength": 10, - "dashes": false, "datasource": { + "type": "prometheus", "uid": "$datasource" }, "fieldConfig": { "defaults": { - "links": [] + "color": { + "mode": "palette-classic" + }, + "custom": { + "axisCenteredZero": false, + "axisColorMode": "text", + "axisLabel": "", + "axisPlacement": "auto", + "barAlignment": 0, + "drawStyle": "line", + "fillOpacity": 10, + "gradientMode": "none", + "hideFrom": { + "legend": false, + "tooltip": false, + "viz": false + }, + "lineInterpolation": "linear", + "lineWidth": 1, + "pointSize": 5, + "scaleDistribution": { + "type": "linear" + }, + "showPoints": "never", + "spanNulls": false, + "stacking": { + "group": "A", + "mode": "none" + }, + "thresholdsStyle": { + "mode": "off" + } + }, + "links": [], + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green" + }, + { + "color": "red", + "value": 80 + } + ] + }, + "unit": "hertz" }, "overrides": [] }, - "fill": 1, - "fillGradient": 0, "gridPos": { "h": 8, "w": 12, "x": 0, - "y": 32 + "y": 155 }, - "hiddenSeries": false, "id": 51, - "legend": { - "avg": false, - "current": false, - "max": false, - "min": false, - "show": true, - "total": false, - "values": false - }, - "lines": true, - "linewidth": 1, "links": [], - "nullPointMode": "null", "options": { - "alertThreshold": true + "legend": { + "calcs": [], + "displayMode": "list", + "placement": "bottom", + "showLegend": true + }, + "tooltip": { + "mode": "multi", + "sort": "none" + } }, - "paceLength": 10, - "percentage": false, - "pluginVersion": "8.4.3", - "pointradius": 5, - "points": false, - "renderer": "flot", - "seriesOverrides": [], - "spaceLength": 10, - "stack": false, - "steppedLine": false, + "pluginVersion": "9.2.2", "targets": [ { "datasource": { "uid": "$datasource" }, + "editorMode": "code", "expr": "rate(synapse_http_httppusher_http_pushes_processed_total{instance=\"$instance\",job=~\"$job\",index=~\"$index\"}[$bucket_size]) and on (instance, job, index) (synapse_http_httppusher_http_pushes_failed_total + synapse_http_httppusher_http_pushes_processed_total) > 0", "format": "time_series", "interval": "", "intervalFactor": 2, - "legendFormat": "processed {{job}}", + "legendFormat": "processed {{job}}-{{index}}", + "range": true, "refId": "A", "step": 20 }, @@ -5898,43 +6375,18 @@ "datasource": { "uid": "$datasource" }, + "editorMode": "code", "expr": "rate(synapse_http_httppusher_http_pushes_failed_total{instance=\"$instance\",job=~\"$job\",index=~\"$index\"}[$bucket_size]) and on (instance, job, index) (synapse_http_httppusher_http_pushes_failed_total + synapse_http_httppusher_http_pushes_processed_total) > 0", "format": "time_series", "intervalFactor": 2, - "legendFormat": "failed {{job}}", + "legendFormat": "failed {{job}}-{{index}}", + "range": true, "refId": "B", "step": 20 } ], - "thresholds": [], - "timeRegions": [], "title": "HTTP Push rate", - "tooltip": { - "shared": true, - "sort": 0, - "value_type": "individual" - }, - "type": "graph", - "xaxis": { - "mode": "time", - "show": true, - "values": [] - }, - "yaxes": [ - { - "format": "hertz", - "logBase": 1, - "show": true - }, - { - "format": "short", - "logBase": 1, - "show": true - } - ], - "yaxis": { - "align": false - } + "type": "timeseries" }, { "aliasColors": {}, @@ -5957,7 +6409,7 @@ "h": 8, "w": 12, "x": 12, - "y": 32 + "y": 155 }, "hiddenSeries": false, "id": 134, @@ -5978,7 +6430,7 @@ "alertThreshold": true }, "percentage": false, - "pluginVersion": "8.4.3", + "pluginVersion": "9.2.2", "pointradius": 2, "points": false, "renderer": "flot", @@ -7344,7 +7796,7 @@ "h": 13, "w": 12, "x": 0, - "y": 35 + "y": 158 }, "hiddenSeries": false, "id": 12, @@ -7367,7 +7819,7 @@ }, "paceLength": 10, "percentage": false, - "pluginVersion": "9.0.4", + "pluginVersion": "9.2.2", "pointradius": 5, "points": false, "renderer": "flot", @@ -7442,7 +7894,7 @@ "h": 13, "w": 12, "x": 12, - "y": 35 + "y": 158 }, "hiddenSeries": false, "id": 26, @@ -7465,7 +7917,7 @@ }, "paceLength": 10, "percentage": false, - "pluginVersion": "9.0.4", + "pluginVersion": "9.2.2", "pointradius": 5, "points": false, "renderer": "flot", @@ -7541,7 +7993,7 @@ "h": 13, "w": 12, "x": 0, - "y": 48 + "y": 171 }, "hiddenSeries": false, "id": 13, @@ -7564,7 +8016,7 @@ }, "paceLength": 10, "percentage": false, - "pluginVersion": "9.0.4", + "pluginVersion": "9.2.2", "pointradius": 5, "points": false, "renderer": "flot", @@ -7645,7 +8097,7 @@ "h": 13, "w": 12, "x": 12, - "y": 48 + "y": 171 }, "hiddenSeries": false, "id": 27, @@ -7668,7 +8120,7 @@ }, "paceLength": 10, "percentage": false, - "pluginVersion": "9.0.4", + "pluginVersion": "9.2.2", "pointradius": 5, "points": false, "renderer": "flot", @@ -7743,7 +8195,7 @@ "h": 13, "w": 12, "x": 0, - "y": 61 + "y": 184 }, "hiddenSeries": false, "id": 28, @@ -7765,7 +8217,7 @@ }, "paceLength": 10, "percentage": false, - "pluginVersion": "9.0.4", + "pluginVersion": "9.2.2", "pointradius": 5, "points": false, "renderer": "flot", @@ -7840,7 +8292,7 @@ "h": 13, "w": 12, "x": 12, - "y": 61 + "y": 184 }, "hiddenSeries": false, "id": 25, @@ -7862,7 +8314,7 @@ }, "paceLength": 10, "percentage": false, - "pluginVersion": "9.0.4", + "pluginVersion": "9.2.2", "pointradius": 5, "points": false, "renderer": "flot", @@ -7930,7 +8382,7 @@ "h": 15, "w": 12, "x": 0, - "y": 74 + "y": 197 }, "hiddenSeries": false, "id": 154, @@ -7951,7 +8403,7 @@ "alertThreshold": true }, "percentage": false, - "pluginVersion": "9.0.4", + "pluginVersion": "9.2.2", "pointradius": 2, "points": false, "renderer": "flot", @@ -9363,7 +9815,7 @@ "h": 7, "w": 12, "x": 0, - "y": 40 + "y": 162 }, "hiddenSeries": false, "id": 43, @@ -9385,7 +9837,7 @@ }, "paceLength": 10, "percentage": false, - "pluginVersion": "9.0.4", + "pluginVersion": "9.2.2", "pointradius": 5, "points": false, "renderer": "flot", @@ -9449,6 +9901,8 @@ "mode": "palette-classic" }, "custom": { + "axisCenteredZero": false, + "axisColorMode": "text", "axisLabel": "", "axisPlacement": "auto", "barAlignment": 0, @@ -9498,7 +9952,7 @@ "h": 7, "w": 12, "x": 12, - "y": 40 + "y": 162 }, "id": 41, "links": [], @@ -9545,6 +9999,8 @@ "mode": "palette-classic" }, "custom": { + "axisCenteredZero": false, + "axisColorMode": "text", "axisLabel": "", "axisPlacement": "auto", "barAlignment": 0, @@ -9595,7 +10051,7 @@ "h": 7, "w": 12, "x": 0, - "y": 47 + "y": 169 }, "id": 42, "links": [], @@ -9642,6 +10098,8 @@ "mode": "palette-classic" }, "custom": { + "axisCenteredZero": false, + "axisColorMode": "text", "axisLabel": "", "axisPlacement": "auto", "axisSoftMin": 1, @@ -9693,7 +10151,7 @@ "h": 7, "w": 12, "x": 12, - "y": 47 + "y": 169 }, "id": 220, "links": [], @@ -9751,7 +10209,7 @@ "h": 7, "w": 12, "x": 0, - "y": 54 + "y": 176 }, "hiddenSeries": false, "id": 144, @@ -9771,7 +10229,7 @@ "alertThreshold": true }, "percentage": false, - "pluginVersion": "9.0.4", + "pluginVersion": "9.2.2", "pointradius": 2, "points": false, "renderer": "flot", @@ -9844,7 +10302,7 @@ "h": 7, "w": 12, "x": 12, - "y": 54 + "y": 176 }, "hiddenSeries": false, "id": 115, @@ -9866,7 +10324,7 @@ }, "paceLength": 10, "percentage": false, - "pluginVersion": "9.0.4", + "pluginVersion": "9.2.2", "pointradius": 5, "points": false, "renderer": "flot", @@ -9938,7 +10396,7 @@ "h": 7, "w": 12, "x": 0, - "y": 61 + "y": 183 }, "hiddenSeries": false, "id": 113, @@ -9960,7 +10418,7 @@ }, "paceLength": 10, "percentage": false, - "pluginVersion": "9.0.4", + "pluginVersion": "9.2.2", "pointradius": 5, "points": false, "renderer": "flot", @@ -10058,7 +10516,6 @@ }, "fieldConfig": { "defaults": { - "custom": {}, "links": [] }, "overrides": [] @@ -10069,7 +10526,7 @@ "h": 9, "w": 12, "x": 0, - "y": 41 + "y": 163 }, "hiddenSeries": false, "id": 67, @@ -10091,7 +10548,7 @@ }, "paceLength": 10, "percentage": false, - "pluginVersion": "7.3.7", + "pluginVersion": "9.2.2", "pointradius": 5, "points": false, "renderer": "flot", @@ -10154,7 +10611,6 @@ }, "fieldConfig": { "defaults": { - "custom": {}, "links": [] }, "overrides": [] @@ -10165,7 +10621,7 @@ "h": 9, "w": 12, "x": 12, - "y": 41 + "y": 163 }, "hiddenSeries": false, "id": 71, @@ -10187,7 +10643,7 @@ }, "paceLength": 10, "percentage": false, - "pluginVersion": "7.3.7", + "pluginVersion": "9.2.2", "pointradius": 5, "points": false, "renderer": "flot", @@ -10250,7 +10706,6 @@ }, "fieldConfig": { "defaults": { - "custom": {}, "links": [] }, "overrides": [] @@ -10261,7 +10716,7 @@ "h": 9, "w": 12, "x": 0, - "y": 50 + "y": 172 }, "hiddenSeries": false, "id": 121, @@ -10284,7 +10739,7 @@ }, "paceLength": 10, "percentage": false, - "pluginVersion": "7.3.7", + "pluginVersion": "9.2.2", "pointradius": 5, "points": false, "renderer": "flot", @@ -10383,7 +10838,16 @@ "description": "Colour reflects the number of rooms with the given number of forward extremities, or fewer.\n\nThis is only updated once an hour.", "fieldConfig": { "defaults": { - "custom": {} + "custom": { + "hideFrom": { + "legend": false, + "tooltip": false, + "viz": false + }, + "scaleDistribution": { + "type": "linear" + } + } }, "overrides": [] }, @@ -10400,7 +10864,47 @@ "legend": { "show": true }, - "links": [], + "links": [], + "options": { + "calculate": false, + "calculation": {}, + "cellGap": 1, + "cellValues": {}, + "color": { + "exponent": 0.5, + "fill": "#B877D9", + "min": 0, + "mode": "opacity", + "reverse": false, + "scale": "exponential", + "scheme": "Oranges", + "steps": 128 + }, + "exemplars": { + "color": "rgba(255,0,255,0.7)" + }, + "filterValues": { + "le": 1e-9 + }, + "legend": { + "show": true + }, + "rowsFrame": { + "layout": "auto" + }, + "showValue": "never", + "tooltip": { + "show": true, + "yHistogram": true + }, + "yAxis": { + "axisPlacement": "left", + "decimals": 0, + "reverse": false, + "unit": "short" + } + }, + "pluginVersion": "9.2.2", "reverseYBuckets": false, "targets": [ { @@ -10442,7 +10946,6 @@ "description": "Number of rooms with the given number of forward extremities or fewer.\n\nThis is only updated once an hour.", "fieldConfig": { "defaults": { - "custom": {}, "links": [] }, "overrides": [] @@ -10471,8 +10974,11 @@ "linewidth": 1, "links": [], "nullPointMode": "connected", + "options": { + "alertThreshold": true + }, "percentage": false, - "pluginVersion": "7.1.3", + "pluginVersion": "9.2.2", "pointradius": 2, "points": false, "renderer": "flot", @@ -10543,7 +11049,16 @@ "description": "Colour reflects the number of events persisted to rooms with the given number of forward extremities, or fewer.", "fieldConfig": { "defaults": { - "custom": {} + "custom": { + "hideFrom": { + "legend": false, + "tooltip": false, + "viz": false + }, + "scaleDistribution": { + "type": "linear" + } + } }, "overrides": [] }, @@ -10561,6 +11076,46 @@ "show": true }, "links": [], + "options": { + "calculate": false, + "calculation": {}, + "cellGap": 1, + "cellValues": {}, + "color": { + "exponent": 0.5, + "fill": "#5794F2", + "min": 0, + "mode": "opacity", + "reverse": false, + "scale": "exponential", + "scheme": "Oranges", + "steps": 128 + }, + "exemplars": { + "color": "rgba(255,0,255,0.7)" + }, + "filterValues": { + "le": 1e-9 + }, + "legend": { + "show": true + }, + "rowsFrame": { + "layout": "auto" + }, + "showValue": "never", + "tooltip": { + "show": true, + "yHistogram": true + }, + "yAxis": { + "axisPlacement": "left", + "decimals": 0, + "reverse": false, + "unit": "short" + } + }, + "pluginVersion": "9.2.2", "reverseYBuckets": false, "targets": [ { @@ -10602,7 +11157,6 @@ "description": "For a given percentage P, the number X where P% of events were persisted to rooms with X forward extremities or fewer.", "fieldConfig": { "defaults": { - "custom": {}, "links": [] }, "overrides": [] @@ -10630,8 +11184,11 @@ "linewidth": 1, "links": [], "nullPointMode": "null", + "options": { + "alertThreshold": true + }, "percentage": false, - "pluginVersion": "7.1.3", + "pluginVersion": "9.2.2", "pointradius": 2, "points": false, "renderer": "flot", @@ -10732,7 +11289,16 @@ "description": "Colour reflects the number of events persisted to rooms with the given number of stale forward extremities, or fewer.\n\nStale forward extremities are those that were in the previous set of extremities as well as the new.", "fieldConfig": { "defaults": { - "custom": {} + "custom": { + "hideFrom": { + "legend": false, + "tooltip": false, + "viz": false + }, + "scaleDistribution": { + "type": "linear" + } + } }, "overrides": [] }, @@ -10750,6 +11316,46 @@ "show": true }, "links": [], + "options": { + "calculate": false, + "calculation": {}, + "cellGap": 1, + "cellValues": {}, + "color": { + "exponent": 0.5, + "fill": "#FF9830", + "min": 0, + "mode": "opacity", + "reverse": false, + "scale": "exponential", + "scheme": "Oranges", + "steps": 128 + }, + "exemplars": { + "color": "rgba(255,0,255,0.7)" + }, + "filterValues": { + "le": 1e-9 + }, + "legend": { + "show": true + }, + "rowsFrame": { + "layout": "auto" + }, + "showValue": "never", + "tooltip": { + "show": true, + "yHistogram": true + }, + "yAxis": { + "axisPlacement": "left", + "decimals": 0, + "reverse": false, + "unit": "short" + } + }, + "pluginVersion": "9.2.2", "reverseYBuckets": false, "targets": [ { @@ -10791,7 +11397,6 @@ "description": "For given percentage P, the number X where P% of events were persisted to rooms with X stale forward extremities or fewer.\n\nStale forward extremities are those that were in the previous set of extremities as well as the new.", "fieldConfig": { "defaults": { - "custom": {}, "links": [] }, "overrides": [] @@ -10819,8 +11424,11 @@ "linewidth": 1, "links": [], "nullPointMode": "null", + "options": { + "alertThreshold": true + }, "percentage": false, - "pluginVersion": "7.1.3", + "pluginVersion": "9.2.2", "pointradius": 2, "points": false, "renderer": "flot", @@ -10921,7 +11529,16 @@ "description": "Colour reflects the number of state resolution operations performed over the given number of state groups, or fewer.", "fieldConfig": { "defaults": { - "custom": {} + "custom": { + "hideFrom": { + "legend": false, + "tooltip": false, + "viz": false + }, + "scaleDistribution": { + "type": "linear" + } + } }, "overrides": [] }, @@ -10939,6 +11556,46 @@ "show": true }, "links": [], + "options": { + "calculate": false, + "calculation": {}, + "cellGap": 1, + "cellValues": {}, + "color": { + "exponent": 0.5, + "fill": "#73BF69", + "min": 0, + "mode": "opacity", + "reverse": false, + "scale": "exponential", + "scheme": "Oranges", + "steps": 128 + }, + "exemplars": { + "color": "rgba(255,0,255,0.7)" + }, + "filterValues": { + "le": 1e-9 + }, + "legend": { + "show": true + }, + "rowsFrame": { + "layout": "auto" + }, + "showValue": "never", + "tooltip": { + "show": true, + "yHistogram": true + }, + "yAxis": { + "axisPlacement": "left", + "decimals": 0, + "reverse": false, + "unit": "short" + } + }, + "pluginVersion": "9.2.2", "reverseYBuckets": false, "targets": [ { @@ -10976,12 +11633,12 @@ "dashLength": 10, "dashes": false, "datasource": { + "type": "prometheus", "uid": "$datasource" }, "description": "For a given percentage P, the number X where P% of state resolution operations took place over X state groups or fewer.", "fieldConfig": { "defaults": { - "custom": {}, "links": [] }, "overrides": [] @@ -11010,8 +11667,11 @@ "linewidth": 1, "links": [], "nullPointMode": "null", + "options": { + "alertThreshold": true + }, "percentage": false, - "pluginVersion": "7.1.3", + "pluginVersion": "9.2.2", "pointradius": 2, "points": false, "renderer": "flot", @@ -11024,11 +11684,13 @@ "datasource": { "uid": "$datasource" }, + "editorMode": "code", "expr": "histogram_quantile(0.5, rate(synapse_state_number_state_groups_in_resolution_bucket{instance=\"$instance\",job=~\"$job\",index=~\"$index\"}[$bucket_size]))", "format": "time_series", "interval": "", "intervalFactor": 1, "legendFormat": "50%", + "range": true, "refId": "A" }, { @@ -11106,12 +11768,6 @@ "uid": "$datasource" }, "description": "When we do a state res while persisting events we try and see if we can prune any stale extremities.", - "fieldConfig": { - "defaults": { - "custom": {} - }, - "overrides": [] - }, "fill": 1, "fillGradient": 0, "gridPos": { @@ -11134,8 +11790,11 @@ "lines": true, "linewidth": 1, "nullPointMode": "null", + "options": { + "alertThreshold": true + }, "percentage": false, - "pluginVersion": "7.1.3", + "pluginVersion": "9.2.2", "pointradius": 2, "points": false, "renderer": "flot", @@ -12218,6 +12877,8 @@ "mode": "palette-classic" }, "custom": { + "axisCenteredZero": false, + "axisColorMode": "text", "axisLabel": "", "axisPlacement": "auto", "barAlignment": 0, @@ -12266,7 +12927,7 @@ "h": 8, "w": 12, "x": 0, - "y": 46 + "y": 47 }, "id": 191, "options": { @@ -12314,7 +12975,7 @@ "h": 8, "w": 12, "x": 12, - "y": 46 + "y": 47 }, "hiddenSeries": false, "id": 193, @@ -12334,7 +12995,7 @@ "alertThreshold": true }, "percentage": false, - "pluginVersion": "9.0.4", + "pluginVersion": "9.2.2", "pointradius": 2, "points": false, "renderer": "flot", @@ -12404,11 +13065,26 @@ "type": "prometheus", "uid": "$datasource" }, + "fieldConfig": { + "defaults": { + "custom": { + "hideFrom": { + "legend": false, + "tooltip": false, + "viz": false + }, + "scaleDistribution": { + "type": "linear" + } + } + }, + "overrides": [] + }, "gridPos": { "h": 8, "w": 12, "x": 0, - "y": 54 + "y": 55 }, "heatmap": {}, "hideZeroBuckets": false, @@ -12418,6 +13094,48 @@ "show": false }, "links": [], + "options": { + "calculate": false, + "calculation": {}, + "cellGap": -1, + "cellValues": { + "decimals": 2 + }, + "color": { + "exponent": 0.5, + "fill": "#b4ff00", + "min": 0, + "mode": "scheme", + "reverse": false, + "scale": "exponential", + "scheme": "Inferno", + "steps": 128 + }, + "exemplars": { + "color": "rgba(255,0,255,0.7)" + }, + "filterValues": { + "le": 1e-9 + }, + "legend": { + "show": false + }, + "rowsFrame": { + "layout": "auto" + }, + "showValue": "never", + "tooltip": { + "show": true, + "yHistogram": true + }, + "yAxis": { + "axisPlacement": "left", + "decimals": 0, + "reverse": false, + "unit": "s" + } + }, + "pluginVersion": "9.2.2", "reverseYBuckets": false, "targets": [ { @@ -12463,6 +13181,8 @@ "mode": "palette-classic" }, "custom": { + "axisCenteredZero": false, + "axisColorMode": "text", "axisLabel": "", "axisPlacement": "auto", "barAlignment": 0, @@ -12507,7 +13227,7 @@ "h": 8, "w": 12, "x": 12, - "y": 54 + "y": 55 }, "id": 223, "options": { @@ -12757,6 +13477,6 @@ "timezone": "", "title": "Synapse", "uid": "000000012", - "version": 150, + "version": 160, "weekStart": "" -} \ No newline at end of file +} From d1693f03626391097b59ea9568cd8a869ed89569 Mon Sep 17 00:00:00 2001 From: Hugh Nimmo-Smith Date: Thu, 1 Jun 2023 13:52:51 +0100 Subject: [PATCH 31/75] Implement stable support for MSC3882 to allow an existing device/session to generate a login token for use on a new device/session (#15388) Implements stable support for MSC3882; this involves updating Synapse's support to match the MSC / the spec says. Continue to support the unstable version to allow clients to transition. --- changelog.d/15388.feature | 1 + .../configuration/config_documentation.md | 65 +++++++++++------ synapse/config/auth.py | 10 +++ synapse/config/experimental.py | 13 +--- synapse/rest/client/capabilities.py | 3 + synapse/rest/client/login.py | 31 +++++--- synapse/rest/client/login_token_request.py | 47 ++++++++---- synapse/rest/client/versions.py | 4 +- tests/config/test_oauth_delegation.py | 4 +- tests/rest/client/test_capabilities.py | 28 ++++++++ tests/rest/client/test_login.py | 23 ++++++ tests/rest/client/test_login_token_request.py | 71 ++++++++++++++----- 12 files changed, 225 insertions(+), 75 deletions(-) create mode 100644 changelog.d/15388.feature diff --git a/changelog.d/15388.feature b/changelog.d/15388.feature new file mode 100644 index 000000000000..6cc55cafa2e1 --- /dev/null +++ b/changelog.d/15388.feature @@ -0,0 +1 @@ +Stable support for [MSC3882](https://github.com/matrix-org/matrix-spec-proposals/pull/3882) to allow an existing device/session to generate a login token for use on a new device/session. \ No newline at end of file diff --git a/docs/usage/configuration/config_documentation.md b/docs/usage/configuration/config_documentation.md index 5ede6d0a827a..0cf6e075ff11 100644 --- a/docs/usage/configuration/config_documentation.md +++ b/docs/usage/configuration/config_documentation.md @@ -2570,7 +2570,50 @@ Example configuration: ```yaml nonrefreshable_access_token_lifetime: 24h ``` +--- +### `ui_auth` + +The amount of time to allow a user-interactive authentication session to be active. +This defaults to 0, meaning the user is queried for their credentials +before every action, but this can be overridden to allow a single +validation to be re-used. This weakens the protections afforded by +the user-interactive authentication process, by allowing for multiple +(and potentially different) operations to use the same validation session. + +This is ignored for potentially "dangerous" operations (including +deactivating an account, modifying an account password, adding a 3PID, +and minting additional login tokens). + +Use the `session_timeout` sub-option here to change the time allowed for credential validation. + +Example configuration: +```yaml +ui_auth: + session_timeout: "15s" +``` +--- +### `login_via_existing_session` + +Matrix supports the ability of an existing session to mint a login token for +another client. + +Synapse disables this by default as it has security ramifications -- a malicious +client could use the mechanism to spawn more than one session. + +The duration of time the generated token is valid for can be configured with the +`token_timeout` sub-option. + +User-interactive authentication is required when this is enabled unless the +`require_ui_auth` sub-option is set to `False`. + +Example configuration: +```yaml +login_via_existing_session: + enabled: true + require_ui_auth: false + token_timeout: "5m" +``` --- ## Metrics Config options related to metrics. @@ -3415,28 +3458,6 @@ password_config: require_uppercase: true ``` --- -### `ui_auth` - -The amount of time to allow a user-interactive authentication session to be active. - -This defaults to 0, meaning the user is queried for their credentials -before every action, but this can be overridden to allow a single -validation to be re-used. This weakens the protections afforded by -the user-interactive authentication process, by allowing for multiple -(and potentially different) operations to use the same validation session. - -This is ignored for potentially "dangerous" operations (including -deactivating an account, modifying an account password, and -adding a 3PID). - -Use the `session_timeout` sub-option here to change the time allowed for credential validation. - -Example configuration: -```yaml -ui_auth: - session_timeout: "15s" -``` ---- ## Push Configuration settings related to push notifications diff --git a/synapse/config/auth.py b/synapse/config/auth.py index 12e853980e3f..c7ab428f28af 100644 --- a/synapse/config/auth.py +++ b/synapse/config/auth.py @@ -60,3 +60,13 @@ def read_config(self, config: JsonDict, **kwargs: Any) -> None: self.ui_auth_session_timeout = self.parse_duration( ui_auth.get("session_timeout", 0) ) + + # Logging in with an existing session. + login_via_existing = config.get("login_via_existing_session", {}) + self.login_via_existing_enabled = login_via_existing.get("enabled", False) + self.login_via_existing_require_ui_auth = login_via_existing.get( + "require_ui_auth", True + ) + self.login_via_existing_token_timeout = self.parse_duration( + login_via_existing.get("token_timeout", "5m") + ) diff --git a/synapse/config/experimental.py b/synapse/config/experimental.py index 1d189b2e26cf..a9e002cf08f8 100644 --- a/synapse/config/experimental.py +++ b/synapse/config/experimental.py @@ -192,10 +192,10 @@ def check_config_conflicts(self, root: RootConfig) -> None: ("captcha", "enable_registration_captcha"), ) - if root.experimental.msc3882_enabled: + if root.auth.login_via_existing_enabled: raise ConfigError( - "MSC3882 cannot be enabled when OAuth delegation is enabled", - ("experimental_features", "msc3882_enabled"), + "Login via existing session cannot be enabled when OAuth delegation is enabled", + ("login_via_existing_session", "enabled"), ) if root.registration.refresh_token_lifetime: @@ -319,13 +319,6 @@ def read_config(self, config: JsonDict, **kwargs: Any) -> None: # MSC3881: Remotely toggle push notifications for another client self.msc3881_enabled: bool = experimental.get("msc3881_enabled", False) - # MSC3882: Allow an existing session to sign in a new session - self.msc3882_enabled: bool = experimental.get("msc3882_enabled", False) - self.msc3882_ui_auth: bool = experimental.get("msc3882_ui_auth", True) - self.msc3882_token_timeout = self.parse_duration( - experimental.get("msc3882_token_timeout", "5m") - ) - # MSC3874: Filtering /messages with rel_types / not_rel_types. self.msc3874_enabled: bool = experimental.get("msc3874_enabled", False) diff --git a/synapse/rest/client/capabilities.py b/synapse/rest/client/capabilities.py index 0dbf8f6818ab..3154b9f77e95 100644 --- a/synapse/rest/client/capabilities.py +++ b/synapse/rest/client/capabilities.py @@ -65,6 +65,9 @@ async def on_GET(self, request: SynapseRequest) -> Tuple[int, JsonDict]: "m.3pid_changes": { "enabled": self.config.registration.enable_3pid_changes }, + "m.get_login_token": { + "enabled": self.config.auth.login_via_existing_enabled, + }, } } diff --git a/synapse/rest/client/login.py b/synapse/rest/client/login.py index d4dc2462b9c0..6493b00bb803 100644 --- a/synapse/rest/client/login.py +++ b/synapse/rest/client/login.py @@ -104,6 +104,9 @@ def __init__(self, hs: "HomeServer"): and hs.config.experimental.msc3866.require_approval_for_new_accounts ) + # Whether get login token is enabled. + self._get_login_token_enabled = hs.config.auth.login_via_existing_enabled + self.auth = hs.get_auth() self.clock = hs.get_clock() @@ -142,6 +145,9 @@ def on_GET(self, request: SynapseRequest) -> Tuple[int, JsonDict]: # to SSO. flows.append({"type": LoginRestServlet.CAS_TYPE}) + # The login token flow requires m.login.token to be advertised. + support_login_token_flow = self._get_login_token_enabled + if self.cas_enabled or self.saml2_enabled or self.oidc_enabled: flows.append( { @@ -153,14 +159,23 @@ def on_GET(self, request: SynapseRequest) -> Tuple[int, JsonDict]: } ) - # While it's valid for us to advertise this login type generally, - # synapse currently only gives out these tokens as part of the - # SSO login flow. - # Generally we don't want to advertise login flows that clients - # don't know how to implement, since they (currently) will always - # fall back to the fallback API if they don't understand one of the - # login flow types returned. - flows.append({"type": LoginRestServlet.TOKEN_TYPE}) + # SSO requires a login token to be generated, so we need to advertise that flow + support_login_token_flow = True + + # While it's valid for us to advertise this login type generally, + # synapse currently only gives out these tokens as part of the + # SSO login flow or as part of login via an existing session. + # + # Generally we don't want to advertise login flows that clients + # don't know how to implement, since they (currently) will always + # fall back to the fallback API if they don't understand one of the + # login flow types returned. + if support_login_token_flow: + tokenTypeFlow: Dict[str, Any] = {"type": LoginRestServlet.TOKEN_TYPE} + # If the login token flow is enabled advertise the get_login_token flag. + if self._get_login_token_enabled: + tokenTypeFlow["get_login_token"] = True + flows.append(tokenTypeFlow) flows.extend({"type": t} for t in self.auth_handler.get_supported_login_types()) diff --git a/synapse/rest/client/login_token_request.py b/synapse/rest/client/login_token_request.py index 43ea21d5e6ac..b1629f94a5f8 100644 --- a/synapse/rest/client/login_token_request.py +++ b/synapse/rest/client/login_token_request.py @@ -15,6 +15,7 @@ import logging from typing import TYPE_CHECKING, Tuple +from synapse.api.ratelimiting import Ratelimiter from synapse.http.server import HttpServer from synapse.http.servlet import RestServlet, parse_json_object_from_request from synapse.http.site import SynapseRequest @@ -33,7 +34,7 @@ class LoginTokenRequestServlet(RestServlet): Request: - POST /login/token HTTP/1.1 + POST /login/get_token HTTP/1.1 Content-Type: application/json {} @@ -43,30 +44,45 @@ class LoginTokenRequestServlet(RestServlet): HTTP/1.1 200 OK { "login_token": "ABDEFGH", - "expires_in": 3600, + "expires_in_ms": 3600000, } """ - PATTERNS = client_patterns( - "/org.matrix.msc3882/login/token$", releases=[], v1=False, unstable=True - ) + PATTERNS = [ + *client_patterns( + "/login/get_token$", releases=["v1"], v1=False, unstable=False + ), + # TODO: this is no longer needed once unstable MSC3882 does not need to be supported: + *client_patterns( + "/org.matrix.msc3882/login/token$", releases=[], v1=False, unstable=True + ), + ] def __init__(self, hs: "HomeServer"): super().__init__() self.auth = hs.get_auth() - self.store = hs.get_datastores().main - self.clock = hs.get_clock() - self.server_name = hs.config.server.server_name + self._main_store = hs.get_datastores().main self.auth_handler = hs.get_auth_handler() - self.token_timeout = hs.config.experimental.msc3882_token_timeout - self.ui_auth = hs.config.experimental.msc3882_ui_auth + self.token_timeout = hs.config.auth.login_via_existing_token_timeout + self._require_ui_auth = hs.config.auth.login_via_existing_require_ui_auth + + # Ratelimit aggressively to a maxmimum of 1 request per minute. + # + # This endpoint can be used to spawn additional sessions and could be + # abused by a malicious client to create many sessions. + self._ratelimiter = Ratelimiter( + store=self._main_store, + clock=hs.get_clock(), + rate_hz=1 / 60, + burst_count=1, + ) @interactive_auth_handler async def on_POST(self, request: SynapseRequest) -> Tuple[int, JsonDict]: requester = await self.auth.get_user_by_req(request) body = parse_json_object_from_request(request) - if self.ui_auth: + if self._require_ui_auth: await self.auth_handler.validate_user_via_ui_auth( requester, request, @@ -75,9 +91,12 @@ async def on_POST(self, request: SynapseRequest) -> Tuple[int, JsonDict]: can_skip_ui_auth=False, # Don't allow skipping of UI auth ) + # Ensure that this endpoint isn't being used too often. (Ensure this is + # done *after* UI auth.) + await self._ratelimiter.ratelimit(None, requester.user.to_string().lower()) + login_token = await self.auth_handler.create_login_token_for_user_id( user_id=requester.user.to_string(), - auth_provider_id="org.matrix.msc3882.login_token_request", duration_ms=self.token_timeout, ) @@ -85,11 +104,13 @@ async def on_POST(self, request: SynapseRequest) -> Tuple[int, JsonDict]: 200, { "login_token": login_token, + # TODO: this is no longer needed once unstable MSC3882 does not need to be supported: "expires_in": self.token_timeout // 1000, + "expires_in_ms": self.token_timeout, }, ) def register_servlets(hs: "HomeServer", http_server: HttpServer) -> None: - if hs.config.experimental.msc3882_enabled: + if hs.config.auth.login_via_existing_enabled: LoginTokenRequestServlet(hs).register(http_server) diff --git a/synapse/rest/client/versions.py b/synapse/rest/client/versions.py index 32df054f56c3..547bf34df15e 100644 --- a/synapse/rest/client/versions.py +++ b/synapse/rest/client/versions.py @@ -113,8 +113,8 @@ def on_GET(self, request: Request) -> Tuple[int, JsonDict]: "fi.mau.msc2815": self.config.experimental.msc2815_enabled, # Adds a ping endpoint for appservices to check HS->AS connection "fi.mau.msc2659.stable": True, # TODO: remove when "v1.7" is added above - # Adds support for login token requests as per MSC3882 - "org.matrix.msc3882": self.config.experimental.msc3882_enabled, + # TODO: this is no longer needed once unstable MSC3882 does not need to be supported: + "org.matrix.msc3882": self.config.auth.login_via_existing_enabled, # Adds support for remotely enabling/disabling pushers, as per MSC3881 "org.matrix.msc3881": self.config.experimental.msc3881_enabled, # Adds support for filtering /messages by event relation. diff --git a/tests/config/test_oauth_delegation.py b/tests/config/test_oauth_delegation.py index 2ead721b00fc..f57c813a581e 100644 --- a/tests/config/test_oauth_delegation.py +++ b/tests/config/test_oauth_delegation.py @@ -228,8 +228,8 @@ def test_jwt_auth_cannot_be_enabled(self) -> None: with self.assertRaises(ConfigError): self.parse_config() - def test_msc3882_auth_cannot_be_enabled(self) -> None: - self.config_dict["experimental_features"]["msc3882_enabled"] = True + def test_login_via_existing_session_cannot_be_enabled(self) -> None: + self.config_dict["login_via_existing_session"] = {"enabled": True} with self.assertRaises(ConfigError): self.parse_config() diff --git a/tests/rest/client/test_capabilities.py b/tests/rest/client/test_capabilities.py index c16e8d43f419..cf23430f6ad4 100644 --- a/tests/rest/client/test_capabilities.py +++ b/tests/rest/client/test_capabilities.py @@ -186,3 +186,31 @@ def test_get_does_include_msc3244_fields_when_enabled(self) -> None: self.assertGreater(len(details["support"]), 0) for room_version in details["support"]: self.assertTrue(room_version in KNOWN_ROOM_VERSIONS, str(room_version)) + + def test_get_get_token_login_fields_when_disabled(self) -> None: + """By default login via an existing session is disabled.""" + access_token = self.get_success( + self.auth_handler.create_access_token_for_user_id( + self.user, device_id=None, valid_until_ms=None + ) + ) + + channel = self.make_request("GET", self.url, access_token=access_token) + capabilities = channel.json_body["capabilities"] + + self.assertEqual(channel.code, HTTPStatus.OK) + self.assertFalse(capabilities["m.get_login_token"]["enabled"]) + + @override_config({"login_via_existing_session": {"enabled": True}}) + def test_get_get_token_login_fields_when_enabled(self) -> None: + access_token = self.get_success( + self.auth_handler.create_access_token_for_user_id( + self.user, device_id=None, valid_until_ms=None + ) + ) + + channel = self.make_request("GET", self.url, access_token=access_token) + capabilities = channel.json_body["capabilities"] + + self.assertEqual(channel.code, HTTPStatus.OK) + self.assertTrue(capabilities["m.get_login_token"]["enabled"]) diff --git a/tests/rest/client/test_login.py b/tests/rest/client/test_login.py index dc32982e22f8..f3c3bc69a97a 100644 --- a/tests/rest/client/test_login.py +++ b/tests/rest/client/test_login.py @@ -446,6 +446,29 @@ def test_require_approval(self) -> None: ApprovalNoticeMedium.NONE, channel.json_body["approval_notice_medium"] ) + def test_get_login_flows_with_login_via_existing_disabled(self) -> None: + """GET /login should return m.login.token without get_login_token""" + channel = self.make_request("GET", "/_matrix/client/r0/login") + self.assertEqual(channel.code, 200, channel.result) + + flows = {flow["type"]: flow for flow in channel.json_body["flows"]} + self.assertNotIn("m.login.token", flows) + + @override_config({"login_via_existing_session": {"enabled": True}}) + def test_get_login_flows_with_login_via_existing_enabled(self) -> None: + """GET /login should return m.login.token with get_login_token true""" + channel = self.make_request("GET", "/_matrix/client/r0/login") + self.assertEqual(channel.code, 200, channel.result) + + self.assertCountEqual( + channel.json_body["flows"], + [ + {"type": "m.login.token", "get_login_token": True}, + {"type": "m.login.password"}, + {"type": "m.login.application_service"}, + ], + ) + @skip_unless(has_saml2 and HAS_OIDC, "Requires SAML2 and OIDC") class MultiSSOTestCase(unittest.HomeserverTestCase): diff --git a/tests/rest/client/test_login_token_request.py b/tests/rest/client/test_login_token_request.py index b8187db982bf..f05e619aa86d 100644 --- a/tests/rest/client/test_login_token_request.py +++ b/tests/rest/client/test_login_token_request.py @@ -15,14 +15,14 @@ from twisted.test.proto_helpers import MemoryReactor from synapse.rest import admin -from synapse.rest.client import login, login_token_request +from synapse.rest.client import login, login_token_request, versions from synapse.server import HomeServer from synapse.util import Clock from tests import unittest from tests.unittest import override_config -endpoint = "/_matrix/client/unstable/org.matrix.msc3882/login/token" +GET_TOKEN_ENDPOINT = "/_matrix/client/v1/login/get_token" class LoginTokenRequestServletTestCase(unittest.HomeserverTestCase): @@ -30,6 +30,7 @@ class LoginTokenRequestServletTestCase(unittest.HomeserverTestCase): login.register_servlets, admin.register_servlets, login_token_request.register_servlets, + versions.register_servlets, # TODO: remove once unstable revision 0 support is removed ] def make_homeserver(self, reactor: MemoryReactor, clock: Clock) -> HomeServer: @@ -46,26 +47,26 @@ def prepare(self, reactor: MemoryReactor, clock: Clock, hs: HomeServer) -> None: self.password = "password" def test_disabled(self) -> None: - channel = self.make_request("POST", endpoint, {}, access_token=None) + channel = self.make_request("POST", GET_TOKEN_ENDPOINT, {}, access_token=None) self.assertEqual(channel.code, 404) self.register_user(self.user, self.password) token = self.login(self.user, self.password) - channel = self.make_request("POST", endpoint, {}, access_token=token) + channel = self.make_request("POST", GET_TOKEN_ENDPOINT, {}, access_token=token) self.assertEqual(channel.code, 404) - @override_config({"experimental_features": {"msc3882_enabled": True}}) + @override_config({"login_via_existing_session": {"enabled": True}}) def test_require_auth(self) -> None: - channel = self.make_request("POST", endpoint, {}, access_token=None) + channel = self.make_request("POST", GET_TOKEN_ENDPOINT, {}, access_token=None) self.assertEqual(channel.code, 401) - @override_config({"experimental_features": {"msc3882_enabled": True}}) + @override_config({"login_via_existing_session": {"enabled": True}}) def test_uia_on(self) -> None: user_id = self.register_user(self.user, self.password) token = self.login(self.user, self.password) - channel = self.make_request("POST", endpoint, {}, access_token=token) + channel = self.make_request("POST", GET_TOKEN_ENDPOINT, {}, access_token=token) self.assertEqual(channel.code, 401) self.assertIn({"stages": ["m.login.password"]}, channel.json_body["flows"]) @@ -80,9 +81,9 @@ def test_uia_on(self) -> None: }, } - channel = self.make_request("POST", endpoint, uia, access_token=token) + channel = self.make_request("POST", GET_TOKEN_ENDPOINT, uia, access_token=token) self.assertEqual(channel.code, 200) - self.assertEqual(channel.json_body["expires_in"], 300) + self.assertEqual(channel.json_body["expires_in_ms"], 300000) login_token = channel.json_body["login_token"] @@ -95,15 +96,15 @@ def test_uia_on(self) -> None: self.assertEqual(channel.json_body["user_id"], user_id) @override_config( - {"experimental_features": {"msc3882_enabled": True, "msc3882_ui_auth": False}} + {"login_via_existing_session": {"enabled": True, "require_ui_auth": False}} ) def test_uia_off(self) -> None: user_id = self.register_user(self.user, self.password) token = self.login(self.user, self.password) - channel = self.make_request("POST", endpoint, {}, access_token=token) + channel = self.make_request("POST", GET_TOKEN_ENDPOINT, {}, access_token=token) self.assertEqual(channel.code, 200) - self.assertEqual(channel.json_body["expires_in"], 300) + self.assertEqual(channel.json_body["expires_in_ms"], 300000) login_token = channel.json_body["login_token"] @@ -117,10 +118,10 @@ def test_uia_off(self) -> None: @override_config( { - "experimental_features": { - "msc3882_enabled": True, - "msc3882_ui_auth": False, - "msc3882_token_timeout": "15s", + "login_via_existing_session": { + "enabled": True, + "require_ui_auth": False, + "token_timeout": "15s", } } ) @@ -128,6 +129,40 @@ def test_expires_in(self) -> None: self.register_user(self.user, self.password) token = self.login(self.user, self.password) - channel = self.make_request("POST", endpoint, {}, access_token=token) + channel = self.make_request("POST", GET_TOKEN_ENDPOINT, {}, access_token=token) + self.assertEqual(channel.code, 200) + self.assertEqual(channel.json_body["expires_in_ms"], 15000) + + @override_config( + { + "login_via_existing_session": { + "enabled": True, + "require_ui_auth": False, + "token_timeout": "15s", + } + } + ) + def test_unstable_support(self) -> None: + # TODO: remove support for unstable MSC3882 is no longer needed + + # check feature is advertised in versions response: + channel = self.make_request( + "GET", "/_matrix/client/versions", {}, access_token=None + ) + self.assertEqual(channel.code, 200) + self.assertEqual( + channel.json_body["unstable_features"]["org.matrix.msc3882"], True + ) + + self.register_user(self.user, self.password) + token = self.login(self.user, self.password) + + # check feature is available via the unstable endpoint and returns an expires_in value in seconds + channel = self.make_request( + "POST", + "/_matrix/client/unstable/org.matrix.msc3882/login/token", + {}, + access_token=token, + ) self.assertEqual(channel.code, 200) self.assertEqual(channel.json_body["expires_in"], 15) From 5ed0e8c61f6b46289fdc5609e8e573b67c2c1982 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Thu, 1 Jun 2023 14:25:20 +0100 Subject: [PATCH 32/75] Cache requests for user's devices from federation (#15675) This should mitigate the issue where lots of different servers requests the same user's devices all at once. --- changelog.d/15675.misc | 1 + synapse/storage/databases/main/devices.py | 4 ++ .../storage/databases/main/end_to_end_keys.py | 67 ++++++++++++++++++- 3 files changed, 70 insertions(+), 2 deletions(-) create mode 100644 changelog.d/15675.misc diff --git a/changelog.d/15675.misc b/changelog.d/15675.misc new file mode 100644 index 000000000000..05538fdbeff9 --- /dev/null +++ b/changelog.d/15675.misc @@ -0,0 +1 @@ +Cache requests for user's devices over federation. diff --git a/synapse/storage/databases/main/devices.py b/synapse/storage/databases/main/devices.py index a67fdb3c22ce..f677d048aafb 100644 --- a/synapse/storage/databases/main/devices.py +++ b/synapse/storage/databases/main/devices.py @@ -1941,6 +1941,10 @@ def _add_device_change_to_stream_txn( user_id, stream_ids[-1], ) + txn.call_after( + self._get_e2e_device_keys_for_federation_query_inner.invalidate, + (user_id,), + ) min_stream_id = stream_ids[0] diff --git a/synapse/storage/databases/main/end_to_end_keys.py b/synapse/storage/databases/main/end_to_end_keys.py index 4bc391f21316..91ae9c457d78 100644 --- a/synapse/storage/databases/main/end_to_end_keys.py +++ b/synapse/storage/databases/main/end_to_end_keys.py @@ -16,6 +16,7 @@ import abc from typing import ( TYPE_CHECKING, + Any, Collection, Dict, Iterable, @@ -39,6 +40,7 @@ TransactionUnusedFallbackKeys, ) from synapse.logging.opentracing import log_kv, set_tag, trace +from synapse.replication.tcp.streams._base import DeviceListsStream from synapse.storage._base import SQLBaseStore, db_to_json from synapse.storage.database import ( DatabasePool, @@ -104,6 +106,23 @@ def __init__( self.hs.config.federation.allow_device_name_lookup_over_federation ) + def process_replication_rows( + self, + stream_name: str, + instance_name: str, + token: int, + rows: Iterable[Any], + ) -> None: + if stream_name == DeviceListsStream.NAME: + for row in rows: + assert isinstance(row, DeviceListsStream.DeviceListsStreamRow) + if row.entity.startswith("@"): + self._get_e2e_device_keys_for_federation_query_inner.invalidate( + (row.entity,) + ) + + super().process_replication_rows(stream_name, instance_name, token, rows) + async def get_e2e_device_keys_for_federation_query( self, user_id: str ) -> Tuple[int, List[JsonDict]]: @@ -114,6 +133,50 @@ async def get_e2e_device_keys_for_federation_query( """ now_stream_id = self.get_device_stream_token() + # We need to be careful with the caching here, as we need to always + # return *all* persisted devices, however there may be a lag between a + # new device being persisted and the cache being invalidated. + cached_results = ( + self._get_e2e_device_keys_for_federation_query_inner.cache.get_immediate( + user_id, None + ) + ) + if cached_results is not None: + # Check that there have been no new devices added by another worker + # after the cache. This should be quick as there should be few rows + # with a higher stream ordering. + # + # Note that we invalidate based on the device stream, so we only + # have to check for potential invalidations after the + # `now_stream_id`. + sql = """ + SELECT user_id FROM device_lists_stream + WHERE stream_id >= ? AND user_id = ? + """ + rows = await self.db_pool.execute( + "get_e2e_device_keys_for_federation_query_check", + None, + sql, + now_stream_id, + user_id, + ) + if not rows: + # No new rows, so cache is still valid. + return now_stream_id, cached_results + + # There has, so let's invalidate the cache and run the query. + self._get_e2e_device_keys_for_federation_query_inner.invalidate((user_id,)) + + results = await self._get_e2e_device_keys_for_federation_query_inner(user_id) + + return now_stream_id, results + + @cached(iterable=True) + async def _get_e2e_device_keys_for_federation_query_inner( + self, user_id: str + ) -> List[JsonDict]: + """Get all devices (with any device keys) for a user""" + devices = await self.get_e2e_device_keys_and_signatures([(user_id, None)]) if devices: @@ -134,9 +197,9 @@ async def get_e2e_device_keys_for_federation_query( results.append(result) - return now_stream_id, results + return results - return now_stream_id, [] + return [] @trace @cancellable From 30a5076da8ad776c150ad2745b5f34b4446012e0 Mon Sep 17 00:00:00 2001 From: Eric Eastwood Date: Thu, 1 Jun 2023 21:27:18 -0500 Subject: [PATCH 33/75] Log when events are (unexpectedly) filtered out of responses in tests (#14213) See https://github.com/matrix-org/synapse/pull/14095#discussion_r990335492 This is useful because when see that a relevant event is an `outlier` or `soft-failed`, then that's a good unexpected indicator explaining why it's not showing up. `filter_events_for_client` is used in `/sync`, `/messages`, `/context` which are all common end-to-end assertion touch points (also notifications, relations). --- changelog.d/14213.misc | 1 + docker/README.md | 3 ++- docker/conf/log.config | 30 +++++++++++++++++++++------ docker/configure_workers_and_start.py | 3 +++ scripts-dev/complement.sh | 4 ++++ synapse/visibility.py | 14 ++++++------- tests/test_utils/logging_setup.py | 12 +++++++++++ 7 files changed, 53 insertions(+), 14 deletions(-) create mode 100644 changelog.d/14213.misc diff --git a/changelog.d/14213.misc b/changelog.d/14213.misc new file mode 100644 index 000000000000..b0689f3d1574 --- /dev/null +++ b/changelog.d/14213.misc @@ -0,0 +1 @@ +Log when events are (maybe unexpectedly) filtered out of responses in tests. diff --git a/docker/README.md b/docker/README.md index eda3221c2339..08372e95c647 100644 --- a/docker/README.md +++ b/docker/README.md @@ -73,7 +73,8 @@ The following environment variables are supported in `generate` mode: will log sensitive information such as access tokens. This should not be needed unless you are a developer attempting to debug something particularly tricky. - +* `SYNAPSE_LOG_TESTING`: if set, Synapse will log additional information useful + for testing. ## Postgres diff --git a/docker/conf/log.config b/docker/conf/log.config index 90b5179838ca..577232120204 100644 --- a/docker/conf/log.config +++ b/docker/conf/log.config @@ -49,17 +49,35 @@ handlers: class: logging.StreamHandler formatter: precise -{% if not SYNAPSE_LOG_SENSITIVE %} -{# - If SYNAPSE_LOG_SENSITIVE is unset, then override synapse.storage.SQL to INFO - so that DEBUG entries (containing sensitive information) are not emitted. -#} loggers: + # This is just here so we can leave `loggers` in the config regardless of whether + # we configure other loggers below (avoid empty yaml dict error). + _placeholder: + level: "INFO" + + {% if not SYNAPSE_LOG_SENSITIVE %} + {# + If SYNAPSE_LOG_SENSITIVE is unset, then override synapse.storage.SQL to INFO + so that DEBUG entries (containing sensitive information) are not emitted. + #} synapse.storage.SQL: # beware: increasing this to DEBUG will make synapse log sensitive # information such as access tokens. level: INFO -{% endif %} + {% endif %} + + {% if SYNAPSE_LOG_TESTING %} + {# + If Synapse is under test, log a few more useful things for a developer + attempting to debug something particularly tricky. + + With `synapse.visibility.filtered_event_debug`, it logs when events are (maybe + unexpectedly) filtered out of responses in tests. It's just nice to be able to + look at the CI log and figure out why an event isn't being returned. + #} + synapse.visibility.filtered_event_debug: + level: DEBUG + {% endif %} root: level: {{ SYNAPSE_LOG_LEVEL or "INFO" }} diff --git a/docker/configure_workers_and_start.py b/docker/configure_workers_and_start.py index 79b5b8739764..87a740e3d433 100755 --- a/docker/configure_workers_and_start.py +++ b/docker/configure_workers_and_start.py @@ -40,6 +40,8 @@ # log level. INFO is the default. # * SYNAPSE_LOG_SENSITIVE: If unset, SQL and SQL values won't be logged, # regardless of the SYNAPSE_LOG_LEVEL setting. +# * SYNAPSE_LOG_TESTING: if set, Synapse will log additional information useful +# for testing. # # NOTE: According to Complement's ENTRYPOINT expectations for a homeserver image (as defined # in the project's README), this script may be run multiple times, and functionality should @@ -947,6 +949,7 @@ def generate_worker_log_config( extra_log_template_args["SYNAPSE_LOG_SENSITIVE"] = environ.get( "SYNAPSE_LOG_SENSITIVE" ) + extra_log_template_args["SYNAPSE_LOG_TESTING"] = environ.get("SYNAPSE_LOG_TESTING") # Render and write the file log_config_filepath = f"/conf/workers/{worker_name}.log.config" diff --git a/scripts-dev/complement.sh b/scripts-dev/complement.sh index cba2799f1589..131f26234ece 100755 --- a/scripts-dev/complement.sh +++ b/scripts-dev/complement.sh @@ -269,6 +269,10 @@ if [[ -n "$SYNAPSE_TEST_LOG_LEVEL" ]]; then export PASS_SYNAPSE_LOG_SENSITIVE=1 fi +# Log a few more useful things for a developer attempting to debug something +# particularly tricky. +export PASS_SYNAPSE_LOG_TESTING=1 + # Run the tests! echo "Images built; running complement" cd "$COMPLEMENT_DIR" diff --git a/synapse/visibility.py b/synapse/visibility.py index 468e22f8f64e..fc71dc92a4e7 100644 --- a/synapse/visibility.py +++ b/synapse/visibility.py @@ -41,7 +41,7 @@ from synapse.util import Clock logger = logging.getLogger(__name__) - +filtered_event_logger = logging.getLogger("synapse.visibility.filtered_event_debug") VISIBILITY_PRIORITY = ( HistoryVisibility.WORLD_READABLE, @@ -97,8 +97,8 @@ async def filter_events_for_client( events_before_filtering = events events = [e for e in events if not e.internal_metadata.is_soft_failed()] if len(events_before_filtering) != len(events): - if logger.isEnabledFor(logging.DEBUG): - logger.debug( + if filtered_event_logger.isEnabledFor(logging.DEBUG): + filtered_event_logger.debug( "filter_events_for_client: Filtered out soft-failed events: Before=%s, After=%s", [event.event_id for event in events_before_filtering], [event.event_id for event in events], @@ -319,7 +319,7 @@ def _check_client_allowed_to_see_event( _check_filter_send_to_client(event, clock, retention_policy, sender_ignored) == _CheckFilter.DENIED ): - logger.debug( + filtered_event_logger.debug( "_check_client_allowed_to_see_event(event=%s): Filtered out event because `_check_filter_send_to_client` returned `_CheckFilter.DENIED`", event.event_id, ) @@ -341,7 +341,7 @@ def _check_client_allowed_to_see_event( ) return event - logger.debug( + filtered_event_logger.debug( "_check_client_allowed_to_see_event(event=%s): Filtered out event because it's an outlier", event.event_id, ) @@ -367,7 +367,7 @@ def _check_client_allowed_to_see_event( membership_result = _check_membership(user_id, event, visibility, state, is_peeking) if not membership_result.allowed: - logger.debug( + filtered_event_logger.debug( "_check_client_allowed_to_see_event(event=%s): Filtered out event because the user can't see the event because of their membership, membership_result.allowed=%s membership_result.joined=%s", event.event_id, membership_result.allowed, @@ -378,7 +378,7 @@ def _check_client_allowed_to_see_event( # If the sender has been erased and the user was not joined at the time, we # must only return the redacted form. if sender_erased and not membership_result.joined: - logger.debug( + filtered_event_logger.debug( "_check_client_allowed_to_see_event(event=%s): Returning pruned event because `sender_erased` and the user was not joined at the time", event.event_id, ) diff --git a/tests/test_utils/logging_setup.py b/tests/test_utils/logging_setup.py index c37f205ed064..199bb06a81a8 100644 --- a/tests/test_utils/logging_setup.py +++ b/tests/test_utils/logging_setup.py @@ -53,4 +53,16 @@ def setup_logging() -> None: log_level = os.environ.get("SYNAPSE_TEST_LOG_LEVEL", "ERROR") root_logger.setLevel(log_level) + # In order to not add noise by default (since we only log ERROR messages for trial + # tests as configured above), we only enable this for developers for looking for + # more INFO or DEBUG. + if root_logger.isEnabledFor(logging.INFO): + # Log when events are (maybe unexpectedly) filtered out of responses in tests. It's + # just nice to be able to look at the CI log and figure out why an event isn't being + # returned. + logging.getLogger("synapse.visibility.filtered_event_debug").setLevel( + logging.DEBUG + ) + + # Blow away the pyo3-log cache so that it reloads the configuration. reset_logging_config() From e0f2429d137c74059f5b7f151297e28dbfd82d48 Mon Sep 17 00:00:00 2001 From: Mathieu Velten Date: Fri, 2 Jun 2023 15:13:50 +0200 Subject: [PATCH 34/75] Add a catch-all * to the supported relation types when redacting (#15705) This is an update to MSC3912 implementation --- changelog.d/15705.feature | 1 + synapse/handlers/relations.py | 16 ++- synapse/storage/databases/main/relations.py | 30 ++++++ tests/rest/client/test_redactions.py | 104 +++++++++++++++++++- 4 files changed, 143 insertions(+), 8 deletions(-) create mode 100644 changelog.d/15705.feature diff --git a/changelog.d/15705.feature b/changelog.d/15705.feature new file mode 100644 index 000000000000..e3cbb5a12e28 --- /dev/null +++ b/changelog.d/15705.feature @@ -0,0 +1 @@ +Add a catch-all * to the supported relation types when redacting an event and its related events. This is an update to [MSC3912](https://github.com/matrix-org/matrix-spec-proposals/pull/3861) implementation. diff --git a/synapse/handlers/relations.py b/synapse/handlers/relations.py index 48246351625a..db97f7aedee6 100644 --- a/synapse/handlers/relations.py +++ b/synapse/handlers/relations.py @@ -205,16 +205,22 @@ async def redact_events_related_to( event_id: The event IDs to look and redact relations of. initial_redaction_event: The redaction for the event referred to by event_id. - relation_types: The types of relations to look for. + relation_types: The types of relations to look for. If "*" is in the list, + all related events will be redacted regardless of the type. Raises: ShadowBanError if the requester is shadow-banned """ - related_event_ids = ( - await self._main_store.get_all_relations_for_event_with_types( - event_id, relation_types + if "*" in relation_types: + related_event_ids = await self._main_store.get_all_relations_for_event( + event_id + ) + else: + related_event_ids = ( + await self._main_store.get_all_relations_for_event_with_types( + event_id, relation_types + ) ) - ) for related_event_id in related_event_ids: try: diff --git a/synapse/storage/databases/main/relations.py b/synapse/storage/databases/main/relations.py index 4a6c6c724d33..96908f14ba35 100644 --- a/synapse/storage/databases/main/relations.py +++ b/synapse/storage/databases/main/relations.py @@ -365,6 +365,36 @@ def get_all_relation_ids_for_event_with_types_txn( func=get_all_relation_ids_for_event_with_types_txn, ) + async def get_all_relations_for_event( + self, + event_id: str, + ) -> List[str]: + """Get the event IDs of all events that have a relation to the given event. + + Args: + event_id: The event for which to look for related events. + + Returns: + A list of the IDs of the events that relate to the given event. + """ + + def get_all_relation_ids_for_event_txn( + txn: LoggingTransaction, + ) -> List[str]: + rows = self.db_pool.simple_select_list_txn( + txn=txn, + table="event_relations", + keyvalues={"relates_to_id": event_id}, + retcols=["event_id"], + ) + + return [row["event_id"] for row in rows] + + return await self.db_pool.runInteraction( + desc="get_all_relation_ids_for_event", + func=get_all_relation_ids_for_event_txn, + ) + async def event_includes_relation(self, event_id: str) -> bool: """Check if the given event relates to another event. diff --git a/tests/rest/client/test_redactions.py b/tests/rest/client/test_redactions.py index 84a60c0b0721..b43e95292c13 100644 --- a/tests/rest/client/test_redactions.py +++ b/tests/rest/client/test_redactions.py @@ -217,9 +217,9 @@ def test_redact_event_as_moderator_ratelimit(self) -> None: self._redact_event(self.mod_access_token, self.room_id, msg_id) @override_config({"experimental_features": {"msc3912_enabled": True}}) - def test_redact_relations(self) -> None: - """Tests that we can redact the relations of an event at the same time as the - event itself. + def test_redact_relations_with_types(self) -> None: + """Tests that we can redact the relations of an event of specific types + at the same time as the event itself. """ # Send a root event. res = self.helper.send_event( @@ -317,6 +317,104 @@ def test_redact_relations(self) -> None: ) self.assertNotIn("redacted_because", event_dict, event_dict) + @override_config({"experimental_features": {"msc3912_enabled": True}}) + def test_redact_all_relations(self) -> None: + """Tests that we can redact all the relations of an event at the same time as the + event itself. + """ + # Send a root event. + res = self.helper.send_event( + room_id=self.room_id, + type=EventTypes.Message, + content={"msgtype": "m.text", "body": "hello"}, + tok=self.mod_access_token, + ) + root_event_id = res["event_id"] + + # Send an edit to this root event. + res = self.helper.send_event( + room_id=self.room_id, + type=EventTypes.Message, + content={ + "body": " * hello world", + "m.new_content": { + "body": "hello world", + "msgtype": "m.text", + }, + "m.relates_to": { + "event_id": root_event_id, + "rel_type": RelationTypes.REPLACE, + }, + "msgtype": "m.text", + }, + tok=self.mod_access_token, + ) + edit_event_id = res["event_id"] + + # Also send a threaded message whose root is the same as the edit's. + res = self.helper.send_event( + room_id=self.room_id, + type=EventTypes.Message, + content={ + "msgtype": "m.text", + "body": "message 1", + "m.relates_to": { + "event_id": root_event_id, + "rel_type": RelationTypes.THREAD, + }, + }, + tok=self.mod_access_token, + ) + threaded_event_id = res["event_id"] + + # Also send a reaction, again with the same root. + res = self.helper.send_event( + room_id=self.room_id, + type=EventTypes.Reaction, + content={ + "m.relates_to": { + "rel_type": RelationTypes.ANNOTATION, + "event_id": root_event_id, + "key": "👍", + } + }, + tok=self.mod_access_token, + ) + reaction_event_id = res["event_id"] + + # Redact the root event, specifying that we also want to delete all events that + # relate to it. + self._redact_event( + self.mod_access_token, + self.room_id, + root_event_id, + with_relations=["*"], + ) + + # Check that the root event got redacted. + event_dict = self.helper.get_event( + self.room_id, root_event_id, self.mod_access_token + ) + self.assertIn("redacted_because", event_dict, event_dict) + + # Check that the edit got redacted. + event_dict = self.helper.get_event( + self.room_id, edit_event_id, self.mod_access_token + ) + self.assertIn("redacted_because", event_dict, event_dict) + + # Check that the threaded message got redacted. + event_dict = self.helper.get_event( + self.room_id, threaded_event_id, self.mod_access_token + ) + self.assertIn("redacted_because", event_dict, event_dict) + + # Check that the reaction got redacted. + event_dict = self.helper.get_event( + self.room_id, reaction_event_id, self.mod_access_token + ) + self.assertIn("redacted_because", event_dict, event_dict) + @override_config({"experimental_features": {"msc3912_enabled": True}}) def test_redact_relations_no_perms(self) -> None: """Tests that, when redacting a message along with its relations, if not all From d0c4257f14addbf0c9072c2e34ae1c8294716ed5 Mon Sep 17 00:00:00 2001 From: Shay Date: Fri, 2 Jun 2023 17:24:13 -0700 Subject: [PATCH 35/75] `N + 3`: Read from column `full_user_id` rather than `user_id` of tables `profiles` and `user_filters` (#15649) --- changelog.d/15649.misc | 1 + synapse/api/filtering.py | 4 +- synapse/handlers/account_validity.py | 2 +- synapse/handlers/admin.py | 2 +- synapse/handlers/auth.py | 2 +- synapse/handlers/deactivate_account.py | 2 +- synapse/handlers/profile.py | 26 ++--- synapse/handlers/register.py | 2 +- synapse/module_api/__init__.py | 4 +- synapse/push/mailer.py | 2 +- synapse/rest/client/filter.py | 2 +- synapse/rest/client/sync.py | 2 +- synapse/storage/databases/main/filtering.py | 12 +-- synapse/storage/databases/main/profile.py | 12 +-- synapse/storage/schema/__init__.py | 5 +- .../78/01_validate_and_update_profiles.py | 92 ++++++++++++++++++ .../78/02_validate_and_update_user_filters.py | 95 +++++++++++++++++++ tests/api/test_filtering.py | 25 ++--- tests/handlers/test_profile.py | 28 ++---- tests/module_api/test_api.py | 6 +- tests/rest/client/test_filter.py | 4 +- tests/storage/test_profile.py | 17 +--- 22 files changed, 252 insertions(+), 95 deletions(-) create mode 100644 changelog.d/15649.misc create mode 100644 synapse/storage/schema/main/delta/78/01_validate_and_update_profiles.py create mode 100644 synapse/storage/schema/main/delta/78/02_validate_and_update_user_filters.py diff --git a/changelog.d/15649.misc b/changelog.d/15649.misc new file mode 100644 index 000000000000..fca38abe0f60 --- /dev/null +++ b/changelog.d/15649.misc @@ -0,0 +1 @@ +Read from column `full_user_id` rather than `user_id` of tables `profiles` and `user_filters`. diff --git a/synapse/api/filtering.py b/synapse/api/filtering.py index 82aeef8d1913..0995ecbe832a 100644 --- a/synapse/api/filtering.py +++ b/synapse/api/filtering.py @@ -152,9 +152,9 @@ def __init__(self, hs: "HomeServer"): self.DEFAULT_FILTER_COLLECTION = FilterCollection(hs, {}) async def get_user_filter( - self, user_localpart: str, filter_id: Union[int, str] + self, user_id: UserID, filter_id: Union[int, str] ) -> "FilterCollection": - result = await self.store.get_user_filter(user_localpart, filter_id) + result = await self.store.get_user_filter(user_id, filter_id) return FilterCollection(self._hs, result) def add_user_filter(self, user_id: UserID, user_filter: JsonDict) -> Awaitable[int]: diff --git a/synapse/handlers/account_validity.py b/synapse/handlers/account_validity.py index 4aa4ebf7e4a8..f1a7a05df6bc 100644 --- a/synapse/handlers/account_validity.py +++ b/synapse/handlers/account_validity.py @@ -164,7 +164,7 @@ async def _send_renewal_email(self, user_id: str, expiration_ts: int) -> None: try: user_display_name = await self.store.get_profile_displayname( - UserID.from_string(user_id).localpart + UserID.from_string(user_id) ) if user_display_name is None: user_display_name = user_id diff --git a/synapse/handlers/admin.py b/synapse/handlers/admin.py index b06f25b03c21..119c7f838481 100644 --- a/synapse/handlers/admin.py +++ b/synapse/handlers/admin.py @@ -89,7 +89,7 @@ async def get_user(self, user: UserID) -> Optional[JsonDict]: } # Add additional user metadata - profile = await self._store.get_profileinfo(user.localpart) + profile = await self._store.get_profileinfo(user) threepids = await self._store.user_get_threepids(user.to_string()) external_ids = [ ({"auth_provider": auth_provider, "external_id": external_id}) diff --git a/synapse/handlers/auth.py b/synapse/handlers/auth.py index 4f986d90cbd9..59ecafa6a094 100644 --- a/synapse/handlers/auth.py +++ b/synapse/handlers/auth.py @@ -1759,7 +1759,7 @@ async def complete_sso_login( return user_profile_data = await self.store.get_profileinfo( - UserID.from_string(registered_user_id).localpart + UserID.from_string(registered_user_id) ) # Store any extra attributes which will be passed in the login response. diff --git a/synapse/handlers/deactivate_account.py b/synapse/handlers/deactivate_account.py index f299b89a1b2f..67adeae6a7a6 100644 --- a/synapse/handlers/deactivate_account.py +++ b/synapse/handlers/deactivate_account.py @@ -297,5 +297,5 @@ async def activate_account(self, user_id: str) -> None: # Add the user to the directory, if necessary. Note that # this must be done after the user is re-activated, because # deactivated users are excluded from the user directory. - profile = await self.store.get_profileinfo(user.localpart) + profile = await self.store.get_profileinfo(user) await self.user_directory_handler.handle_local_profile_change(user_id, profile) diff --git a/synapse/handlers/profile.py b/synapse/handlers/profile.py index a9160c87e304..a7f8c5e636f8 100644 --- a/synapse/handlers/profile.py +++ b/synapse/handlers/profile.py @@ -67,7 +67,7 @@ async def get_profile(self, user_id: str, ignore_backoff: bool = True) -> JsonDi target_user = UserID.from_string(user_id) if self.hs.is_mine(target_user): - profileinfo = await self.store.get_profileinfo(target_user.localpart) + profileinfo = await self.store.get_profileinfo(target_user) if profileinfo.display_name is None: raise SynapseError(404, "Profile was not found", Codes.NOT_FOUND) @@ -99,9 +99,7 @@ async def get_profile(self, user_id: str, ignore_backoff: bool = True) -> JsonDi async def get_displayname(self, target_user: UserID) -> Optional[str]: if self.hs.is_mine(target_user): try: - displayname = await self.store.get_profile_displayname( - target_user.localpart - ) + displayname = await self.store.get_profile_displayname(target_user) except StoreError as e: if e.code == 404: raise SynapseError(404, "Profile was not found", Codes.NOT_FOUND) @@ -147,7 +145,7 @@ async def set_displayname( raise AuthError(400, "Cannot set another user's displayname") if not by_admin and not self.hs.config.registration.enable_set_displayname: - profile = await self.store.get_profileinfo(target_user.localpart) + profile = await self.store.get_profileinfo(target_user) if profile.display_name: raise SynapseError( 400, @@ -180,7 +178,7 @@ async def set_displayname( await self.store.set_profile_displayname(target_user, displayname_to_set) - profile = await self.store.get_profileinfo(target_user.localpart) + profile = await self.store.get_profileinfo(target_user) await self.user_directory_handler.handle_local_profile_change( target_user.to_string(), profile ) @@ -194,9 +192,7 @@ async def set_displayname( async def get_avatar_url(self, target_user: UserID) -> Optional[str]: if self.hs.is_mine(target_user): try: - avatar_url = await self.store.get_profile_avatar_url( - target_user.localpart - ) + avatar_url = await self.store.get_profile_avatar_url(target_user) except StoreError as e: if e.code == 404: raise SynapseError(404, "Profile was not found", Codes.NOT_FOUND) @@ -241,7 +237,7 @@ async def set_avatar_url( raise AuthError(400, "Cannot set another user's avatar_url") if not by_admin and not self.hs.config.registration.enable_set_avatar_url: - profile = await self.store.get_profileinfo(target_user.localpart) + profile = await self.store.get_profileinfo(target_user) if profile.avatar_url: raise SynapseError( 400, "Changing avatar is disabled on this server", Codes.FORBIDDEN @@ -272,7 +268,7 @@ async def set_avatar_url( await self.store.set_profile_avatar_url(target_user, avatar_url_to_set) - profile = await self.store.get_profileinfo(target_user.localpart) + profile = await self.store.get_profileinfo(target_user) await self.user_directory_handler.handle_local_profile_change( target_user.to_string(), profile ) @@ -369,14 +365,10 @@ async def on_profile_query(self, args: JsonDict) -> JsonDict: response = {} try: if just_field is None or just_field == "displayname": - response["displayname"] = await self.store.get_profile_displayname( - user.localpart - ) + response["displayname"] = await self.store.get_profile_displayname(user) if just_field is None or just_field == "avatar_url": - response["avatar_url"] = await self.store.get_profile_avatar_url( - user.localpart - ) + response["avatar_url"] = await self.store.get_profile_avatar_url(user) except StoreError as e: if e.code == 404: raise SynapseError(404, "Profile was not found", Codes.NOT_FOUND) diff --git a/synapse/handlers/register.py b/synapse/handlers/register.py index c80946c2e976..a2d3f03061fc 100644 --- a/synapse/handlers/register.py +++ b/synapse/handlers/register.py @@ -315,7 +315,7 @@ async def register_user( approved=approved, ) - profile = await self.store.get_profileinfo(localpart) + profile = await self.store.get_profileinfo(user) await self.user_directory_handler.handle_local_profile_change( user_id, profile ) diff --git a/synapse/module_api/__init__.py b/synapse/module_api/__init__.py index a8d6224a4528..84b2aef62086 100644 --- a/synapse/module_api/__init__.py +++ b/synapse/module_api/__init__.py @@ -655,7 +655,9 @@ async def get_profile_for_user(self, localpart: str) -> ProfileInfo: Returns: The profile information (i.e. display name and avatar URL). """ - return await self._store.get_profileinfo(localpart) + server_name = self._hs.hostname + user_id = UserID.from_string(f"@{localpart}:{server_name}") + return await self._store.get_profileinfo(user_id) async def get_threepids_for_user(self, user_id: str) -> List[Dict[str, str]]: """Look up the threepids (email addresses and phone numbers) associated with the diff --git a/synapse/push/mailer.py b/synapse/push/mailer.py index 491a09b71d54..79e0627b6a66 100644 --- a/synapse/push/mailer.py +++ b/synapse/push/mailer.py @@ -247,7 +247,7 @@ async def send_notification_mail( try: user_display_name = await self.store.get_profile_displayname( - UserID.from_string(user_id).localpart + UserID.from_string(user_id) ) if user_display_name is None: user_display_name = user_id diff --git a/synapse/rest/client/filter.py b/synapse/rest/client/filter.py index 04561f36d7a1..5da1e511a281 100644 --- a/synapse/rest/client/filter.py +++ b/synapse/rest/client/filter.py @@ -58,7 +58,7 @@ async def on_GET( try: filter_collection = await self.filtering.get_user_filter( - user_localpart=target_user.localpart, filter_id=filter_id_int + user_id=target_user, filter_id=filter_id_int ) except StoreError as e: if e.code != 404: diff --git a/synapse/rest/client/sync.py b/synapse/rest/client/sync.py index 03b05789456b..d7854ed4fd9d 100644 --- a/synapse/rest/client/sync.py +++ b/synapse/rest/client/sync.py @@ -178,7 +178,7 @@ async def on_GET(self, request: SynapseRequest) -> Tuple[int, JsonDict]: else: try: filter_collection = await self.filtering.get_user_filter( - user.localpart, filter_id + user, filter_id ) except StoreError as err: if err.code != 404: diff --git a/synapse/storage/databases/main/filtering.py b/synapse/storage/databases/main/filtering.py index f777777cbf48..fff417f9e3bb 100644 --- a/synapse/storage/databases/main/filtering.py +++ b/synapse/storage/databases/main/filtering.py @@ -145,7 +145,7 @@ def _final_batch(txn: LoggingTransaction, lower_bound_id: str) -> None: @cached(num_args=2) async def get_user_filter( - self, user_localpart: str, filter_id: Union[int, str] + self, user_id: UserID, filter_id: Union[int, str] ) -> JsonDict: # filter_id is BIGINT UNSIGNED, so if it isn't a number, fail # with a coherent error message rather than 500 M_UNKNOWN. @@ -156,7 +156,7 @@ async def get_user_filter( def_json = await self.db_pool.simple_select_one_onecol( table="user_filters", - keyvalues={"user_id": user_localpart, "filter_id": filter_id}, + keyvalues={"full_user_id": user_id.to_string(), "filter_id": filter_id}, retcol="filter_json", allow_none=False, desc="get_user_filter", @@ -172,15 +172,15 @@ async def add_user_filter(self, user_id: UserID, user_filter: JsonDict) -> int: def _do_txn(txn: LoggingTransaction) -> int: sql = ( "SELECT filter_id FROM user_filters " - "WHERE user_id = ? AND filter_json = ?" + "WHERE full_user_id = ? AND filter_json = ?" ) - txn.execute(sql, (user_id.localpart, bytearray(def_json))) + txn.execute(sql, (user_id.to_string(), bytearray(def_json))) filter_id_response = txn.fetchone() if filter_id_response is not None: return filter_id_response[0] - sql = "SELECT MAX(filter_id) FROM user_filters WHERE user_id = ?" - txn.execute(sql, (user_id.localpart,)) + sql = "SELECT MAX(filter_id) FROM user_filters WHERE full_user_id = ?" + txn.execute(sql, (user_id.to_string(),)) max_id = cast(Tuple[Optional[int]], txn.fetchone())[0] if max_id is None: filter_id = 0 diff --git a/synapse/storage/databases/main/profile.py b/synapse/storage/databases/main/profile.py index 21d54c7a7a7e..3ba9cc88537a 100644 --- a/synapse/storage/databases/main/profile.py +++ b/synapse/storage/databases/main/profile.py @@ -137,11 +137,11 @@ def _final_batch(txn: LoggingTransaction, lower_bound_id: str) -> None: return 50 - async def get_profileinfo(self, user_localpart: str) -> ProfileInfo: + async def get_profileinfo(self, user_id: UserID) -> ProfileInfo: try: profile = await self.db_pool.simple_select_one( table="profiles", - keyvalues={"user_id": user_localpart}, + keyvalues={"full_user_id": user_id.to_string()}, retcols=("displayname", "avatar_url"), desc="get_profileinfo", ) @@ -156,18 +156,18 @@ async def get_profileinfo(self, user_localpart: str) -> ProfileInfo: avatar_url=profile["avatar_url"], display_name=profile["displayname"] ) - async def get_profile_displayname(self, user_localpart: str) -> Optional[str]: + async def get_profile_displayname(self, user_id: UserID) -> Optional[str]: return await self.db_pool.simple_select_one_onecol( table="profiles", - keyvalues={"user_id": user_localpart}, + keyvalues={"full_user_id": user_id.to_string()}, retcol="displayname", desc="get_profile_displayname", ) - async def get_profile_avatar_url(self, user_localpart: str) -> Optional[str]: + async def get_profile_avatar_url(self, user_id: UserID) -> Optional[str]: return await self.db_pool.simple_select_one_onecol( table="profiles", - keyvalues={"user_id": user_localpart}, + keyvalues={"full_user_id": user_id.to_string()}, retcol="avatar_url", desc="get_profile_avatar_url", ) diff --git a/synapse/storage/schema/__init__.py b/synapse/storage/schema/__init__.py index 5cc786f0303d..fc190a8b13cf 100644 --- a/synapse/storage/schema/__init__.py +++ b/synapse/storage/schema/__init__.py @@ -12,7 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -SCHEMA_VERSION = 77 # remember to update the list below when updating +SCHEMA_VERSION = 78 # remember to update the list below when updating """Represents the expectations made by the codebase about the database schema This should be incremented whenever the codebase changes its requirements on the @@ -103,6 +103,9 @@ Changes in SCHEMA_VERSION = 77 - (Postgres) Add NOT VALID CHECK (full_user_id IS NOT NULL) to tables profiles and user_filters + +Changes in SCHEMA_VERSION = 78 + - Validate check (full_user_id IS NOT NULL) on tables profiles and user_filters """ diff --git a/synapse/storage/schema/main/delta/78/01_validate_and_update_profiles.py b/synapse/storage/schema/main/delta/78/01_validate_and_update_profiles.py new file mode 100644 index 000000000000..8398d8f54882 --- /dev/null +++ b/synapse/storage/schema/main/delta/78/01_validate_and_update_profiles.py @@ -0,0 +1,92 @@ +# Copyright 2023 The Matrix.org Foundation C.I.C +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from synapse.config.homeserver import HomeServerConfig +from synapse.storage.database import LoggingTransaction +from synapse.storage.engines import BaseDatabaseEngine, PostgresEngine + + +def run_upgrade( + cur: LoggingTransaction, + database_engine: BaseDatabaseEngine, + config: HomeServerConfig, +) -> None: + """ + Part 3 of a multi-step migration to drop the column `user_id` and replace it with + `full_user_id`. See the database schema docs for more information on the full + migration steps. + """ + hostname = config.server.server_name + + if isinstance(database_engine, PostgresEngine): + # check if the constraint can be validated + check_sql = """ + SELECT user_id from profiles WHERE full_user_id IS NULL + """ + cur.execute(check_sql) + res = cur.fetchall() + + if res: + # there are rows the background job missed, finish them here before we validate the constraint + process_rows_sql = """ + UPDATE profiles + SET full_user_id = '@' || user_id || ? + WHERE user_id IN ( + SELECT user_id FROM profiles WHERE full_user_id IS NULL + ) + """ + cur.execute(process_rows_sql, (f":{hostname}",)) + + # Now we can validate + validate_sql = """ + ALTER TABLE profiles VALIDATE CONSTRAINT full_user_id_not_null + """ + cur.execute(validate_sql) + + else: + # in SQLite we need to rewrite the table to add the constraint. + # First drop any temporary table that might be here from a previous failed migration. + cur.execute("DROP TABLE IF EXISTS temp_profiles") + + create_sql = """ + CREATE TABLE temp_profiles ( + full_user_id text NOT NULL, + user_id text, + displayname text, + avatar_url text, + UNIQUE (full_user_id), + UNIQUE (user_id) + ) + """ + cur.execute(create_sql) + + copy_sql = """ + INSERT INTO temp_profiles ( + user_id, + displayname, + avatar_url, + full_user_id) + SELECT user_id, displayname, avatar_url, '@' || user_id || ':' || ? FROM profiles + """ + cur.execute(copy_sql, (f"{hostname}",)) + + drop_sql = """ + DROP TABLE profiles + """ + cur.execute(drop_sql) + + rename_sql = """ + ALTER TABLE temp_profiles RENAME to profiles + """ + cur.execute(rename_sql) diff --git a/synapse/storage/schema/main/delta/78/02_validate_and_update_user_filters.py b/synapse/storage/schema/main/delta/78/02_validate_and_update_user_filters.py new file mode 100644 index 000000000000..8ef63335e7c3 --- /dev/null +++ b/synapse/storage/schema/main/delta/78/02_validate_and_update_user_filters.py @@ -0,0 +1,95 @@ +# Copyright 2023 The Matrix.org Foundation C.I.C +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from synapse.config.homeserver import HomeServerConfig +from synapse.storage.database import LoggingTransaction +from synapse.storage.engines import BaseDatabaseEngine, PostgresEngine + + +def run_upgrade( + cur: LoggingTransaction, + database_engine: BaseDatabaseEngine, + config: HomeServerConfig, +) -> None: + """ + Part 3 of a multi-step migration to drop the column `user_id` and replace it with + `full_user_id`. See the database schema docs for more information on the full + migration steps. + """ + hostname = config.server.server_name + + if isinstance(database_engine, PostgresEngine): + # check if the constraint can be validated + check_sql = """ + SELECT user_id from user_filters WHERE full_user_id IS NULL + """ + cur.execute(check_sql) + res = cur.fetchall() + + if res: + # there are rows the background job missed, finish them here before we validate constraint + process_rows_sql = """ + UPDATE user_filters + SET full_user_id = '@' || user_id || ? + WHERE user_id IN ( + SELECT user_id FROM user_filters WHERE full_user_id IS NULL + ) + """ + cur.execute(process_rows_sql, (f":{hostname}",)) + + # Now we can validate + validate_sql = """ + ALTER TABLE user_filters VALIDATE CONSTRAINT full_user_id_not_null + """ + cur.execute(validate_sql) + + else: + cur.execute("DROP TABLE IF EXISTS temp_user_filters") + create_sql = """ + CREATE TABLE temp_user_filters ( + full_user_id text NOT NULL, + user_id text NOT NULL, + filter_id bigint NOT NULL, + filter_json bytea NOT NULL, + UNIQUE (full_user_id), + UNIQUE (user_id) + ) + """ + cur.execute(create_sql) + + index_sql = """ + CREATE UNIQUE INDEX IF NOT EXISTS user_filters_unique ON + temp_user_filters (user_id, filter_id) + """ + cur.execute(index_sql) + + copy_sql = """ + INSERT INTO temp_user_filters ( + user_id, + filter_id, + filter_json, + full_user_id) + SELECT user_id, filter_id, filter_json, '@' || user_id || ':' || ? FROM user_filters + """ + cur.execute(copy_sql, (f"{hostname}",)) + + drop_sql = """ + DROP TABLE user_filters + """ + cur.execute(drop_sql) + + rename_sql = """ + ALTER TABLE temp_user_filters RENAME to user_filters + """ + cur.execute(rename_sql) diff --git a/tests/api/test_filtering.py b/tests/api/test_filtering.py index aa6af5ad7bb2..868f0c699535 100644 --- a/tests/api/test_filtering.py +++ b/tests/api/test_filtering.py @@ -35,7 +35,6 @@ user_id = UserID.from_string("@test_user:test") user2_id = UserID.from_string("@test_user2:test") -user_localpart = "test_user" class FilteringTestCase(unittest.HomeserverTestCase): @@ -449,9 +448,7 @@ def test_filter_presence_match(self) -> None: ] user_filter = self.get_success( - self.filtering.get_user_filter( - user_localpart=user_localpart, filter_id=filter_id - ) + self.filtering.get_user_filter(user_id=user_id, filter_id=filter_id) ) results = self.get_success(user_filter.filter_presence(presence_states)) @@ -479,9 +476,7 @@ def test_filter_presence_no_match(self) -> None: ] user_filter = self.get_success( - self.filtering.get_user_filter( - user_localpart=user_localpart + "2", filter_id=filter_id - ) + self.filtering.get_user_filter(user_id=user2_id, filter_id=filter_id) ) results = self.get_success(user_filter.filter_presence(presence_states)) @@ -498,9 +493,7 @@ def test_filter_room_state_match(self) -> None: events = [event] user_filter = self.get_success( - self.filtering.get_user_filter( - user_localpart=user_localpart, filter_id=filter_id - ) + self.filtering.get_user_filter(user_id=user_id, filter_id=filter_id) ) results = self.get_success(user_filter.filter_room_state(events=events)) @@ -519,9 +512,7 @@ def test_filter_room_state_no_match(self) -> None: events = [event] user_filter = self.get_success( - self.filtering.get_user_filter( - user_localpart=user_localpart, filter_id=filter_id - ) + self.filtering.get_user_filter(user_id=user_id, filter_id=filter_id) ) results = self.get_success(user_filter.filter_room_state(events)) @@ -603,9 +594,7 @@ def test_add_filter(self) -> None: user_filter_json, ( self.get_success( - self.datastore.get_user_filter( - user_localpart=user_localpart, filter_id=0 - ) + self.datastore.get_user_filter(user_id=user_id, filter_id=0) ) ), ) @@ -620,9 +609,7 @@ def test_get_filter(self) -> None: ) filter = self.get_success( - self.filtering.get_user_filter( - user_localpart=user_localpart, filter_id=filter_id - ) + self.filtering.get_user_filter(user_id=user_id, filter_id=filter_id) ) self.assertEqual(filter.get_filter_json(), user_filter_json) diff --git a/tests/handlers/test_profile.py b/tests/handlers/test_profile.py index 64a9a22afeca..196ceb0b82d0 100644 --- a/tests/handlers/test_profile.py +++ b/tests/handlers/test_profile.py @@ -80,11 +80,7 @@ def test_set_my_name(self) -> None: ) self.assertEqual( - ( - self.get_success( - self.store.get_profile_displayname(self.frank.localpart) - ) - ), + (self.get_success(self.store.get_profile_displayname(self.frank))), "Frank Jr.", ) @@ -96,11 +92,7 @@ def test_set_my_name(self) -> None: ) self.assertEqual( - ( - self.get_success( - self.store.get_profile_displayname(self.frank.localpart) - ) - ), + (self.get_success(self.store.get_profile_displayname(self.frank))), "Frank", ) @@ -112,7 +104,7 @@ def test_set_my_name(self) -> None: ) self.assertIsNone( - self.get_success(self.store.get_profile_displayname(self.frank.localpart)) + self.get_success(self.store.get_profile_displayname(self.frank)) ) def test_set_my_name_if_disabled(self) -> None: @@ -122,11 +114,7 @@ def test_set_my_name_if_disabled(self) -> None: self.get_success(self.store.set_profile_displayname(self.frank, "Frank")) self.assertEqual( - ( - self.get_success( - self.store.get_profile_displayname(self.frank.localpart) - ) - ), + (self.get_success(self.store.get_profile_displayname(self.frank))), "Frank", ) @@ -201,7 +189,7 @@ def test_set_my_avatar(self) -> None: ) self.assertEqual( - (self.get_success(self.store.get_profile_avatar_url(self.frank.localpart))), + (self.get_success(self.store.get_profile_avatar_url(self.frank))), "http://my.server/pic.gif", ) @@ -215,7 +203,7 @@ def test_set_my_avatar(self) -> None: ) self.assertEqual( - (self.get_success(self.store.get_profile_avatar_url(self.frank.localpart))), + (self.get_success(self.store.get_profile_avatar_url(self.frank))), "http://my.server/me.png", ) @@ -229,7 +217,7 @@ def test_set_my_avatar(self) -> None: ) self.assertIsNone( - (self.get_success(self.store.get_profile_avatar_url(self.frank.localpart))), + (self.get_success(self.store.get_profile_avatar_url(self.frank))), ) def test_set_my_avatar_if_disabled(self) -> None: @@ -241,7 +229,7 @@ def test_set_my_avatar_if_disabled(self) -> None: ) self.assertEqual( - (self.get_success(self.store.get_profile_avatar_url(self.frank.localpart))), + (self.get_success(self.store.get_profile_avatar_url(self.frank))), "http://my.server/me.png", ) diff --git a/tests/module_api/test_api.py b/tests/module_api/test_api.py index bff7114cd89c..b3310abe1b31 100644 --- a/tests/module_api/test_api.py +++ b/tests/module_api/test_api.py @@ -28,7 +28,7 @@ from synapse.rest import admin from synapse.rest.client import login, notifications, presence, profile, room from synapse.server import HomeServer -from synapse.types import JsonDict, create_requester +from synapse.types import JsonDict, UserID, create_requester from synapse.util import Clock from tests.events.test_presence_router import send_presence_update, sync_presence @@ -103,7 +103,9 @@ def test_can_register_user(self) -> None: self.assertEqual(email["added_at"], 0) # Check that the displayname was assigned - displayname = self.get_success(self.store.get_profile_displayname("bob")) + displayname = self.get_success( + self.store.get_profile_displayname(UserID.from_string("@bob:test")) + ) self.assertEqual(displayname, "Bobberino") def test_can_register_admin_user(self) -> None: diff --git a/tests/rest/client/test_filter.py b/tests/rest/client/test_filter.py index 9faa9de05076..a2d5d340be35 100644 --- a/tests/rest/client/test_filter.py +++ b/tests/rest/client/test_filter.py @@ -46,7 +46,9 @@ def test_add_filter(self) -> None: self.assertEqual(channel.code, 200) self.assertEqual(channel.json_body, {"filter_id": "0"}) filter = self.get_success( - self.store.get_user_filter(user_localpart="apple", filter_id=0) + self.store.get_user_filter( + user_id=UserID.from_string(FilterTestCase.user_id), filter_id=0 + ) ) self.pump() self.assertEqual(filter, self.EXAMPLE_FILTER) diff --git a/tests/storage/test_profile.py b/tests/storage/test_profile.py index f9cf0fcb82ed..fe5bb7791336 100644 --- a/tests/storage/test_profile.py +++ b/tests/storage/test_profile.py @@ -11,6 +11,7 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. + from twisted.test.proto_helpers import MemoryReactor from synapse.server import HomeServer @@ -35,18 +36,14 @@ def test_displayname(self) -> None: self.assertEqual( "Frank", - ( - self.get_success( - self.store.get_profile_displayname(self.u_frank.localpart) - ) - ), + (self.get_success(self.store.get_profile_displayname(self.u_frank))), ) # test set to None self.get_success(self.store.set_profile_displayname(self.u_frank, None)) self.assertIsNone( - self.get_success(self.store.get_profile_displayname(self.u_frank.localpart)) + self.get_success(self.store.get_profile_displayname(self.u_frank)) ) def test_avatar_url(self) -> None: @@ -58,18 +55,14 @@ def test_avatar_url(self) -> None: self.assertEqual( "http://my.site/here", - ( - self.get_success( - self.store.get_profile_avatar_url(self.u_frank.localpart) - ) - ), + (self.get_success(self.store.get_profile_avatar_url(self.u_frank))), ) # test set to None self.get_success(self.store.set_profile_avatar_url(self.u_frank, None)) self.assertIsNone( - self.get_success(self.store.get_profile_avatar_url(self.u_frank.localpart)) + self.get_success(self.store.get_profile_avatar_url(self.u_frank)) ) def test_profiles_bg_migration(self) -> None: From 8ba530c0e3b157137031d456225b7ba1e0b1627d Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 5 Jun 2023 10:31:41 +0100 Subject: [PATCH 36/75] Bump importlib-metadata from 6.1.0 to 6.6.0 (#15711) Bumps [importlib-metadata](https://github.com/python/importlib_metadata) from 6.1.0 to 6.6.0. - [Release notes](https://github.com/python/importlib_metadata/releases) - [Changelog](https://github.com/python/importlib_metadata/blob/main/CHANGES.rst) - [Commits](https://github.com/python/importlib_metadata/compare/v6.1.0...v6.6.0) --- updated-dependencies: - dependency-name: importlib-metadata dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- poetry.lock | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/poetry.lock b/poetry.lock index d8964f5719de..180f2740878c 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,4 +1,4 @@ -# This file is automatically @generated by Poetry 1.4.2 and should not be changed by hand. +# This file is automatically @generated by Poetry and should not be changed by hand. [[package]] name = "alabaster" @@ -867,14 +867,14 @@ files = [ [[package]] name = "importlib-metadata" -version = "6.1.0" +version = "6.6.0" description = "Read metadata from Python packages" category = "main" optional = false python-versions = ">=3.7" files = [ - {file = "importlib_metadata-6.1.0-py3-none-any.whl", hash = "sha256:ff80f3b5394912eb1b108fcfd444dc78b7f1f3e16b16188054bd01cb9cb86f09"}, - {file = "importlib_metadata-6.1.0.tar.gz", hash = "sha256:43ce9281e097583d758c2c708c4376371261a02c34682491a8e98352365aad20"}, + {file = "importlib_metadata-6.6.0-py3-none-any.whl", hash = "sha256:43dd286a2cd8995d5eaef7fee2066340423b818ed3fd70adf0bad5f1fac53fed"}, + {file = "importlib_metadata-6.6.0.tar.gz", hash = "sha256:92501cdf9cc66ebd3e612f1b4f0c0765dfa42f0fa38ffb319b6bd84dd675d705"}, ] [package.dependencies] @@ -3424,18 +3424,18 @@ docs = ["Sphinx", "repoze.sphinx.autointerface"] test = ["zope.i18nmessageid", "zope.testing", "zope.testrunner"] [extras] -all = ["Pympler", "authlib", "hiredis", "jaeger-client", "lxml", "matrix-synapse-ldap3", "opentracing", "psycopg2", "psycopg2cffi", "psycopg2cffi-compat", "pyicu", "pysaml2", "sentry-sdk", "txredisapi"] +all = ["matrix-synapse-ldap3", "psycopg2", "psycopg2cffi", "psycopg2cffi-compat", "pysaml2", "authlib", "lxml", "sentry-sdk", "jaeger-client", "opentracing", "txredisapi", "hiredis", "Pympler", "pyicu"] cache-memory = ["Pympler"] jwt = ["authlib"] matrix-synapse-ldap3 = ["matrix-synapse-ldap3"] oidc = ["authlib"] opentracing = ["jaeger-client", "opentracing"] postgres = ["psycopg2", "psycopg2cffi", "psycopg2cffi-compat"] -redis = ["hiredis", "txredisapi"] +redis = ["txredisapi", "hiredis"] saml2 = ["pysaml2"] sentry = ["sentry-sdk"] systemd = ["systemd-python"] -test = ["idna", "parameterized"] +test = ["parameterized", "idna"] url-preview = ["lxml"] user-search = ["pyicu"] From 36a5bcae2cf70f5b7dec44e34c10d7e47ee0bcc2 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 5 Jun 2023 10:31:54 +0100 Subject: [PATCH 37/75] Bump library/redis from 6-bullseye to 7-bullseye in /docker (#15712) Bumps library/redis from 6-bullseye to 7-bullseye. --- updated-dependencies: - dependency-name: library/redis dependency-type: direct:production ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- docker/Dockerfile-workers | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docker/Dockerfile-workers b/docker/Dockerfile-workers index adb9a725e33f..31d6d33407c6 100644 --- a/docker/Dockerfile-workers +++ b/docker/Dockerfile-workers @@ -21,7 +21,7 @@ FROM docker.io/library/debian:bullseye-slim AS deps_base # which makes it much easier to copy (but we need to make sure we use an image # based on the same debian version as the synapse image, to make sure we get # the expected version of libc. -FROM docker.io/library/redis:6-bullseye AS redis_base +FROM docker.io/library/redis:7-bullseye AS redis_base # now build the final image, based on the the regular Synapse docker image FROM $FROM From 5feabbdf062d16577f697fed41687c7bffc60c49 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 5 Jun 2023 10:32:07 +0100 Subject: [PATCH 38/75] Bump pyasn1 from 0.4.8 to 0.5.0 (#15713) Bumps [pyasn1](https://github.com/pyasn1/pyasn1) from 0.4.8 to 0.5.0. - [Release notes](https://github.com/pyasn1/pyasn1/releases) - [Changelog](https://github.com/pyasn1/pyasn1/blob/main/CHANGES.rst) - [Commits](https://github.com/pyasn1/pyasn1/compare/v0.4.8...v0.5.0) --- updated-dependencies: - dependency-name: pyasn1 dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- poetry.lock | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/poetry.lock b/poetry.lock index 180f2740878c..d2fc2c1c9c7c 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1863,14 +1863,14 @@ psycopg2 = "*" [[package]] name = "pyasn1" -version = "0.4.8" -description = "ASN.1 types and codecs" +version = "0.5.0" +description = "Pure-Python implementation of ASN.1 types and DER/BER/CER codecs (X.208)" category = "main" optional = false -python-versions = "*" +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7" files = [ - {file = "pyasn1-0.4.8-py2.py3-none-any.whl", hash = "sha256:39c7e2ec30515947ff4e87fb6f456dfc6e84857d34be479c9d4a4ba4bf46aa5d"}, - {file = "pyasn1-0.4.8.tar.gz", hash = "sha256:aef77c9fb94a3ac588e87841208bdec464471d9871bd5050a287cc9a475cd0ba"}, + {file = "pyasn1-0.5.0-py2.py3-none-any.whl", hash = "sha256:87a2121042a1ac9358cabcaf1d07680ff97ee6404333bacca15f76aa8ad01a57"}, + {file = "pyasn1-0.5.0.tar.gz", hash = "sha256:97b7290ca68e62a832558ec3976f15cbf911bf5d7c7039d8b861c2a0ece69fde"}, ] [[package]] From 1a7aa81715609555cb4d0a7e3cad262b9c234007 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 5 Jun 2023 10:32:16 +0100 Subject: [PATCH 39/75] Bump sentry-sdk from 1.22.1 to 1.25.0 (#15714) Bumps [sentry-sdk](https://github.com/getsentry/sentry-python) from 1.22.1 to 1.25.0. - [Release notes](https://github.com/getsentry/sentry-python/releases) - [Changelog](https://github.com/getsentry/sentry-python/blob/master/CHANGELOG.md) - [Commits](https://github.com/getsentry/sentry-python/compare/1.22.1...1.25.0) --- updated-dependencies: - dependency-name: sentry-sdk dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- poetry.lock | 11 ++++++----- 1 file changed, 6 insertions(+), 5 deletions(-) diff --git a/poetry.lock b/poetry.lock index d2fc2c1c9c7c..9f918574754d 100644 --- a/poetry.lock +++ b/poetry.lock @@ -2397,19 +2397,19 @@ doc = ["Sphinx", "sphinx-rtd-theme"] [[package]] name = "sentry-sdk" -version = "1.22.1" +version = "1.25.0" description = "Python client for Sentry (https://sentry.io)" category = "main" optional = true python-versions = "*" files = [ - {file = "sentry-sdk-1.22.1.tar.gz", hash = "sha256:052dff5069c6f0d836ee014323576824a9b40836fc003fb12489a1f19c60a3c9"}, - {file = "sentry_sdk-1.22.1-py2.py3-none-any.whl", hash = "sha256:c6c6946f8c927adb00af1c5ab6921df38775b2199b9003816d5935a1310352d5"}, + {file = "sentry-sdk-1.25.0.tar.gz", hash = "sha256:5be3296fc574fa8a4d9b213b4dcf8c8d0246c08f8bd78315c6286f386c37555a"}, + {file = "sentry_sdk-1.25.0-py2.py3-none-any.whl", hash = "sha256:fe85cf5d0b3d0aa3480df689f9f6dc487de783defb0a95043368375dc893645e"}, ] [package.dependencies] certifi = "*" -urllib3 = {version = ">=1.26.11,<2.0.0", markers = "python_version >= \"3.6\""} +urllib3 = {version = ">=1.26.11", markers = "python_version >= \"3.6\""} [package.extras] aiohttp = ["aiohttp (>=3.5)"] @@ -2421,10 +2421,11 @@ chalice = ["chalice (>=1.16.0)"] django = ["django (>=1.8)"] falcon = ["falcon (>=1.4)"] fastapi = ["fastapi (>=0.79.0)"] -flask = ["blinker (>=1.1)", "flask (>=0.11)"] +flask = ["blinker (>=1.1)", "flask (>=0.11)", "markupsafe"] grpcio = ["grpcio (>=1.21.1)"] httpx = ["httpx (>=0.16.0)"] huey = ["huey (>=2)"] +loguru = ["loguru (>=0.5)"] opentelemetry = ["opentelemetry-distro (>=0.35b0)"] pure-eval = ["asttokens", "executing", "pure-eval"] pymongo = ["pymongo (>=3.1)"] From 2d97d5b1c359c2a1783365c0db035f17d512dc4c Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 5 Jun 2023 10:32:25 +0100 Subject: [PATCH 40/75] Bump types-jsonschema from 4.17.0.7 to 4.17.0.8 (#15716) Bumps [types-jsonschema](https://github.com/python/typeshed) from 4.17.0.7 to 4.17.0.8. - [Commits](https://github.com/python/typeshed/commits) --- updated-dependencies: - dependency-name: types-jsonschema dependency-type: direct:development update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- poetry.lock | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/poetry.lock b/poetry.lock index 9f918574754d..c94daa6cef70 100644 --- a/poetry.lock +++ b/poetry.lock @@ -3038,14 +3038,14 @@ files = [ [[package]] name = "types-jsonschema" -version = "4.17.0.7" +version = "4.17.0.8" description = "Typing stubs for jsonschema" category = "dev" optional = false python-versions = "*" files = [ - {file = "types-jsonschema-4.17.0.7.tar.gz", hash = "sha256:130e57c5f1ca755f95775d0822ad7a3907294e1461306af54baf804f317fd54c"}, - {file = "types_jsonschema-4.17.0.7-py3-none-any.whl", hash = "sha256:e129b52be6df841d97a98f087631dd558f7812eb91ff7b733c3301bd2446271b"}, + {file = "types-jsonschema-4.17.0.8.tar.gz", hash = "sha256:96a56990910f405e62de58862c0bbb3ac29ee6dba6d3d99aa0ba7f874cc547de"}, + {file = "types_jsonschema-4.17.0.8-py3-none-any.whl", hash = "sha256:f5958eb7b53217dfb5125f0412aeaef226a8a9013eac95816c95b5b523f6796b"}, ] [[package]] From ca8906be2cb821a0fb49ad1adf8440e79e64a398 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 5 Jun 2023 10:39:34 +0100 Subject: [PATCH 41/75] Bump types-requests from 2.31.0.0 to 2.31.0.1 (#15715) Bumps [types-requests](https://github.com/python/typeshed) from 2.31.0.0 to 2.31.0.1. - [Commits](https://github.com/python/typeshed/commits) --- updated-dependencies: - dependency-name: types-requests dependency-type: direct:development update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- poetry.lock | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/poetry.lock b/poetry.lock index c94daa6cef70..1f5cb3a3a85e 100644 --- a/poetry.lock +++ b/poetry.lock @@ -3125,14 +3125,14 @@ files = [ [[package]] name = "types-requests" -version = "2.31.0.0" +version = "2.31.0.1" description = "Typing stubs for requests" category = "dev" optional = false python-versions = "*" files = [ - {file = "types-requests-2.31.0.0.tar.gz", hash = "sha256:c1c29d20ab8d84dff468d7febfe8e0cb0b4664543221b386605e14672b44ea25"}, - {file = "types_requests-2.31.0.0-py3-none-any.whl", hash = "sha256:7c5cea7940f8e92ec560bbc468f65bf684aa3dcf0554a6f8c4710f5f708dc598"}, + {file = "types-requests-2.31.0.1.tar.gz", hash = "sha256:3de667cffa123ce698591de0ad7db034a5317457a596eb0b4944e5a9d9e8d1ac"}, + {file = "types_requests-2.31.0.1-py3-none-any.whl", hash = "sha256:afb06ef8f25ba83d59a1d424bd7a5a939082f94b94e90ab5e6116bd2559deaa3"}, ] [package.dependencies] From f9561b9e37e4cbd97a71dd10549f1f03d3f01b5e Mon Sep 17 00:00:00 2001 From: Eric Eastwood Date: Mon, 5 Jun 2023 23:38:52 -0500 Subject: [PATCH 42/75] Some house keeping on `maybe_backfill()` functions (#15709) --- changelog.d/15709.misc | 1 + synapse/handlers/federation.py | 17 +++++++++++++++++ 2 files changed, 18 insertions(+) create mode 100644 changelog.d/15709.misc diff --git a/changelog.d/15709.misc b/changelog.d/15709.misc new file mode 100644 index 000000000000..e9ce84a94021 --- /dev/null +++ b/changelog.d/15709.misc @@ -0,0 +1 @@ +Update docstring and traces on `maybe_backfill()` functions. diff --git a/synapse/handlers/federation.py b/synapse/handlers/federation.py index 2eb28d55ac82..57d6b70cff48 100644 --- a/synapse/handlers/federation.py +++ b/synapse/handlers/federation.py @@ -200,6 +200,7 @@ def __init__(self, hs: "HomeServer"): ) @trace + @tag_args async def maybe_backfill( self, room_id: str, current_depth: int, limit: int ) -> bool: @@ -214,6 +215,9 @@ async def maybe_backfill( limit: The number of events that the pagination request will return. This is used as part of the heuristic to decide if we should back paginate. + + Returns: + True if we actually tried to backfill something, otherwise False. """ # Starting the processing time here so we can include the room backfill # linearizer lock queue in the timing @@ -227,6 +231,8 @@ async def maybe_backfill( processing_start_time=processing_start_time, ) + @trace + @tag_args async def _maybe_backfill_inner( self, room_id: str, @@ -247,6 +253,9 @@ async def _maybe_backfill_inner( limit: The max number of events to request from the remote federated server. processing_start_time: The time when `maybe_backfill` started processing. Only used for timing. If `None`, no timing observation will be made. + + Returns: + True if we actually tried to backfill something, otherwise False. """ backwards_extremities = [ _BackfillPoint(event_id, depth, _BackfillPointType.BACKWARDS_EXTREMITY) @@ -302,6 +311,14 @@ async def _maybe_backfill_inner( len(sorted_backfill_points), sorted_backfill_points, ) + set_tag( + SynapseTags.RESULT_PREFIX + "sorted_backfill_points", + str(sorted_backfill_points), + ) + set_tag( + SynapseTags.RESULT_PREFIX + "sorted_backfill_points.length", + str(len(sorted_backfill_points)), + ) # If we have no backfill points lower than the `current_depth` then # either we can a) bail or b) still attempt to backfill. We opt to try From f880e64b11bd03d1ebd710b34b541d5b2e044baa Mon Sep 17 00:00:00 2001 From: Patrick Cloke Date: Tue, 6 Jun 2023 04:11:07 -0400 Subject: [PATCH 43/75] Stabilize support for MSC3952: Intentional mentions. (#15520) --- changelog.d/15520.feature | 1 + rust/benches/evaluator.rs | 3 -- rust/src/push/base_rules.rs | 8 ++--- rust/src/push/evaluator.rs | 10 +++--- rust/src/push/mod.rs | 7 ----- stubs/synapse/synapse_rust/push.pyi | 1 - synapse/api/constants.py | 2 +- synapse/config/experimental.py | 5 --- synapse/events/validator.py | 9 ++---- synapse/push/bulk_push_rule_evaluator.py | 8 +---- synapse/rest/client/versions.py | 2 -- synapse/storage/databases/main/push_rule.py | 1 - tests/push/test_bulk_push_rule_evaluator.py | 34 ++++++++------------- 13 files changed, 27 insertions(+), 64 deletions(-) create mode 100644 changelog.d/15520.feature diff --git a/changelog.d/15520.feature b/changelog.d/15520.feature new file mode 100644 index 000000000000..f4fd40ab9451 --- /dev/null +++ b/changelog.d/15520.feature @@ -0,0 +1 @@ +Enable support for [MSC3952](https://github.com/matrix-org/matrix-spec-proposals/pull/3952): intentional mentions. diff --git a/rust/benches/evaluator.rs b/rust/benches/evaluator.rs index 64e13f6486e8..c2f33258a4e3 100644 --- a/rust/benches/evaluator.rs +++ b/rust/benches/evaluator.rs @@ -13,8 +13,6 @@ // limitations under the License. #![feature(test)] -use std::collections::BTreeSet; - use synapse::push::{ evaluator::PushRuleEvaluator, Condition, EventMatchCondition, FilteredPushRules, JsonValue, PushRules, SimpleJsonValue, @@ -197,7 +195,6 @@ fn bench_eval_message(b: &mut Bencher) { false, false, false, - false, ); b.iter(|| eval.run(&rules, Some("bob"), Some("person"))); diff --git a/rust/src/push/base_rules.rs b/rust/src/push/base_rules.rs index 51372e15531b..9d6c304d9285 100644 --- a/rust/src/push/base_rules.rs +++ b/rust/src/push/base_rules.rs @@ -142,11 +142,11 @@ pub const BASE_APPEND_OVERRIDE_RULES: &[PushRule] = &[ default_enabled: true, }, PushRule { - rule_id: Cow::Borrowed(".org.matrix.msc3952.is_user_mention"), + rule_id: Cow::Borrowed("global/override/.m.is_user_mention"), priority_class: 5, conditions: Cow::Borrowed(&[Condition::Known( KnownCondition::ExactEventPropertyContainsType(EventPropertyIsTypeCondition { - key: Cow::Borrowed("content.org\\.matrix\\.msc3952\\.mentions.user_ids"), + key: Cow::Borrowed("content.m\\.mentions.user_ids"), value_type: Cow::Borrowed(&EventMatchPatternType::UserId), }), )]), @@ -163,11 +163,11 @@ pub const BASE_APPEND_OVERRIDE_RULES: &[PushRule] = &[ default_enabled: true, }, PushRule { - rule_id: Cow::Borrowed(".org.matrix.msc3952.is_room_mention"), + rule_id: Cow::Borrowed("global/override/.m.is_room_mention"), priority_class: 5, conditions: Cow::Borrowed(&[ Condition::Known(KnownCondition::EventPropertyIs(EventPropertyIsCondition { - key: Cow::Borrowed("content.org\\.matrix\\.msc3952\\.mentions.room"), + key: Cow::Borrowed("content.m\\.mentions.room"), value: Cow::Borrowed(&SimpleJsonValue::Bool(true)), })), Condition::Known(KnownCondition::SenderNotificationPermission { diff --git a/rust/src/push/evaluator.rs b/rust/src/push/evaluator.rs index 2d7c4c06be96..59c53b1776c6 100644 --- a/rust/src/push/evaluator.rs +++ b/rust/src/push/evaluator.rs @@ -70,7 +70,9 @@ pub struct PushRuleEvaluator { /// The "content.body", if any. body: String, - /// True if the event has a mentions property and MSC3952 support is enabled. + /// True if the event has a m.mentions property. (Note that this is a separate + /// flag instead of checking flattened_keys since the m.mentions property + /// might be an empty map and not appear in flattened_keys. has_mentions: bool, /// The number of users in the room. @@ -155,9 +157,7 @@ impl PushRuleEvaluator { let rule_id = &push_rule.rule_id().to_string(); // For backwards-compatibility the legacy mention rules are disabled - // if the event contains the 'm.mentions' property (and if the - // experimental feature is enabled, both of these are represented - // by the has_mentions flag). + // if the event contains the 'm.mentions' property. if self.has_mentions && (rule_id == "global/override/.m.rule.contains_display_name" || rule_id == "global/content/.m.rule.contains_user_name" @@ -562,7 +562,7 @@ fn test_requires_room_version_supports_condition() { }; let rules = PushRules::new(vec![custom_rule]); result = evaluator.run( - &FilteredPushRules::py_new(rules, BTreeMap::new(), true, false, true, false, false), + &FilteredPushRules::py_new(rules, BTreeMap::new(), true, false, true, false), None, None, ); diff --git a/rust/src/push/mod.rs b/rust/src/push/mod.rs index f19d3c739f1c..514980579b63 100644 --- a/rust/src/push/mod.rs +++ b/rust/src/push/mod.rs @@ -527,7 +527,6 @@ pub struct FilteredPushRules { msc1767_enabled: bool, msc3381_polls_enabled: bool, msc3664_enabled: bool, - msc3952_intentional_mentions: bool, msc3958_suppress_edits_enabled: bool, } @@ -540,7 +539,6 @@ impl FilteredPushRules { msc1767_enabled: bool, msc3381_polls_enabled: bool, msc3664_enabled: bool, - msc3952_intentional_mentions: bool, msc3958_suppress_edits_enabled: bool, ) -> Self { Self { @@ -549,7 +547,6 @@ impl FilteredPushRules { msc1767_enabled, msc3381_polls_enabled, msc3664_enabled, - msc3952_intentional_mentions, msc3958_suppress_edits_enabled, } } @@ -587,10 +584,6 @@ impl FilteredPushRules { return false; } - if !self.msc3952_intentional_mentions && rule.rule_id.contains("org.matrix.msc3952") - { - return false; - } if !self.msc3958_suppress_edits_enabled && rule.rule_id == "global/override/.com.beeper.suppress_edits" { diff --git a/stubs/synapse/synapse_rust/push.pyi b/stubs/synapse/synapse_rust/push.pyi index 5d0ce4b1a4bd..d573a37b9aff 100644 --- a/stubs/synapse/synapse_rust/push.pyi +++ b/stubs/synapse/synapse_rust/push.pyi @@ -46,7 +46,6 @@ class FilteredPushRules: msc1767_enabled: bool, msc3381_polls_enabled: bool, msc3664_enabled: bool, - msc3952_intentional_mentions: bool, msc3958_suppress_edits_enabled: bool, ): ... def rules(self) -> Collection[Tuple[PushRule, bool]]: ... diff --git a/synapse/api/constants.py b/synapse/api/constants.py index cde9a2ecefb3..faf0770c663a 100644 --- a/synapse/api/constants.py +++ b/synapse/api/constants.py @@ -236,7 +236,7 @@ class EventContentFields: AUTHORISING_USER: Final = "join_authorised_via_users_server" # Use for mentioning users. - MSC3952_MENTIONS: Final = "org.matrix.msc3952.mentions" + MENTIONS: Final = "m.mentions" # an unspecced field added to to-device messages to identify them uniquely-ish TO_DEVICE_MSGID: Final = "org.matrix.msgid" diff --git a/synapse/config/experimental.py b/synapse/config/experimental.py index a9e002cf08f8..1d5b5ded455d 100644 --- a/synapse/config/experimental.py +++ b/synapse/config/experimental.py @@ -358,11 +358,6 @@ def read_config(self, config: JsonDict, **kwargs: Any) -> None: # MSC3391: Removing account data. self.msc3391_enabled = experimental.get("msc3391_enabled", False) - # MSC3952: Intentional mentions, this depends on MSC3966. - self.msc3952_intentional_mentions = experimental.get( - "msc3952_intentional_mentions", False - ) - # MSC3959: Do not generate notifications for edits. self.msc3958_supress_edit_notifs = experimental.get( "msc3958_supress_edit_notifs", False diff --git a/synapse/events/validator.py b/synapse/events/validator.py index 47203209db2f..9278f1a1aa65 100644 --- a/synapse/events/validator.py +++ b/synapse/events/validator.py @@ -134,13 +134,8 @@ def validate_new(self, event: EventBase, config: HomeServerConfig) -> None: ) # If the event contains a mentions key, validate it. - if ( - EventContentFields.MSC3952_MENTIONS in event.content - and config.experimental.msc3952_intentional_mentions - ): - validate_json_object( - event.content[EventContentFields.MSC3952_MENTIONS], Mentions - ) + if EventContentFields.MENTIONS in event.content: + validate_json_object(event.content[EventContentFields.MENTIONS], Mentions) def _validate_retention(self, event: EventBase) -> None: """Checks that an event that defines the retention policy for a room respects the diff --git a/synapse/push/bulk_push_rule_evaluator.py b/synapse/push/bulk_push_rule_evaluator.py index 320084f5f58c..33002cc0f275 100644 --- a/synapse/push/bulk_push_rule_evaluator.py +++ b/synapse/push/bulk_push_rule_evaluator.py @@ -120,9 +120,6 @@ def __init__(self, hs: "HomeServer"): self.should_calculate_push_rules = self.hs.config.push.enable_push self._related_event_match_enabled = self.hs.config.experimental.msc3664_enabled - self._intentional_mentions_enabled = ( - self.hs.config.experimental.msc3952_intentional_mentions - ) self.room_push_rule_cache_metrics = register_cache( "cache", @@ -390,10 +387,7 @@ async def _action_for_event_by_user( del notification_levels[key] # Pull out any user and room mentions. - has_mentions = ( - self._intentional_mentions_enabled - and EventContentFields.MSC3952_MENTIONS in event.content - ) + has_mentions = EventContentFields.MENTIONS in event.content evaluator = PushRuleEvaluator( _flatten_dict(event), diff --git a/synapse/rest/client/versions.py b/synapse/rest/client/versions.py index 547bf34df15e..191064875561 100644 --- a/synapse/rest/client/versions.py +++ b/synapse/rest/client/versions.py @@ -124,8 +124,6 @@ def on_GET(self, request: Request) -> Tuple[int, JsonDict]: is not None, # Adds support for relation-based redactions as per MSC3912. "org.matrix.msc3912": self.config.experimental.msc3912_enabled, - # Adds support for unstable "intentional mentions" behaviour. - "org.matrix.msc3952_intentional_mentions": self.config.experimental.msc3952_intentional_mentions, # Whether recursively provide relations is supported. "org.matrix.msc3981": self.config.experimental.msc3981_recurse_relations, # Adds support for deleting account data. diff --git a/synapse/storage/databases/main/push_rule.py b/synapse/storage/databases/main/push_rule.py index 9f862f00c1c5..e098ceea3cdd 100644 --- a/synapse/storage/databases/main/push_rule.py +++ b/synapse/storage/databases/main/push_rule.py @@ -88,7 +88,6 @@ def _load_rules( msc1767_enabled=experimental_config.msc1767_enabled, msc3664_enabled=experimental_config.msc3664_enabled, msc3381_polls_enabled=experimental_config.msc3381_polls_enabled, - msc3952_intentional_mentions=experimental_config.msc3952_intentional_mentions, msc3958_suppress_edits_enabled=experimental_config.msc3958_supress_edit_notifs, ) diff --git a/tests/push/test_bulk_push_rule_evaluator.py b/tests/push/test_bulk_push_rule_evaluator.py index 9501096a7732..1e06f8607159 100644 --- a/tests/push/test_bulk_push_rule_evaluator.py +++ b/tests/push/test_bulk_push_rule_evaluator.py @@ -228,7 +228,6 @@ def _create_and_process( ) return len(result) > 0 - @override_config({"experimental_features": {"msc3952_intentional_mentions": True}}) def test_user_mentions(self) -> None: """Test the behavior of an event which includes invalid user mentions.""" bulk_evaluator = BulkPushRuleEvaluator(self.hs) @@ -237,9 +236,7 @@ def test_user_mentions(self) -> None: self.assertFalse(self._create_and_process(bulk_evaluator)) # An empty mentions field should not notify. self.assertFalse( - self._create_and_process( - bulk_evaluator, {EventContentFields.MSC3952_MENTIONS: {}} - ) + self._create_and_process(bulk_evaluator, {EventContentFields.MENTIONS: {}}) ) # Non-dict mentions should be ignored. @@ -253,7 +250,7 @@ def test_user_mentions(self) -> None: for mentions in (None, True, False, 1, "foo", []): self.assertFalse( self._create_and_process( - bulk_evaluator, {EventContentFields.MSC3952_MENTIONS: mentions} + bulk_evaluator, {EventContentFields.MENTIONS: mentions} ) ) @@ -262,7 +259,7 @@ def test_user_mentions(self) -> None: self.assertFalse( self._create_and_process( bulk_evaluator, - {EventContentFields.MSC3952_MENTIONS: {"user_ids": mentions}}, + {EventContentFields.MENTIONS: {"user_ids": mentions}}, ) ) @@ -270,14 +267,14 @@ def test_user_mentions(self) -> None: self.assertTrue( self._create_and_process( bulk_evaluator, - {EventContentFields.MSC3952_MENTIONS: {"user_ids": [self.alice]}}, + {EventContentFields.MENTIONS: {"user_ids": [self.alice]}}, ) ) self.assertTrue( self._create_and_process( bulk_evaluator, { - EventContentFields.MSC3952_MENTIONS: { + EventContentFields.MENTIONS: { "user_ids": ["@another:test", self.alice] } }, @@ -288,11 +285,7 @@ def test_user_mentions(self) -> None: self.assertTrue( self._create_and_process( bulk_evaluator, - { - EventContentFields.MSC3952_MENTIONS: { - "user_ids": [self.alice, self.alice] - } - }, + {EventContentFields.MENTIONS: {"user_ids": [self.alice, self.alice]}}, ) ) @@ -307,7 +300,7 @@ def test_user_mentions(self) -> None: self._create_and_process( bulk_evaluator, { - EventContentFields.MSC3952_MENTIONS: { + EventContentFields.MENTIONS: { "user_ids": [None, True, False, {}, []] } }, @@ -317,7 +310,7 @@ def test_user_mentions(self) -> None: self._create_and_process( bulk_evaluator, { - EventContentFields.MSC3952_MENTIONS: { + EventContentFields.MENTIONS: { "user_ids": [None, True, False, {}, [], self.alice] } }, @@ -331,12 +324,11 @@ def test_user_mentions(self) -> None: { "body": self.alice, "msgtype": "m.text", - EventContentFields.MSC3952_MENTIONS: {}, + EventContentFields.MENTIONS: {}, }, ) ) - @override_config({"experimental_features": {"msc3952_intentional_mentions": True}}) def test_room_mentions(self) -> None: """Test the behavior of an event which includes invalid room mentions.""" bulk_evaluator = BulkPushRuleEvaluator(self.hs) @@ -344,7 +336,7 @@ def test_room_mentions(self) -> None: # Room mentions from those without power should not notify. self.assertFalse( self._create_and_process( - bulk_evaluator, {EventContentFields.MSC3952_MENTIONS: {"room": True}} + bulk_evaluator, {EventContentFields.MENTIONS: {"room": True}} ) ) @@ -358,7 +350,7 @@ def test_room_mentions(self) -> None: ) self.assertTrue( self._create_and_process( - bulk_evaluator, {EventContentFields.MSC3952_MENTIONS: {"room": True}} + bulk_evaluator, {EventContentFields.MENTIONS: {"room": True}} ) ) @@ -374,7 +366,7 @@ def test_room_mentions(self) -> None: self.assertFalse( self._create_and_process( bulk_evaluator, - {EventContentFields.MSC3952_MENTIONS: {"room": mentions}}, + {EventContentFields.MENTIONS: {"room": mentions}}, ) ) @@ -385,7 +377,7 @@ def test_room_mentions(self) -> None: { "body": "@room", "msgtype": "m.text", - EventContentFields.MSC3952_MENTIONS: {}, + EventContentFields.MENTIONS: {}, }, ) ) From ad690037de0708d932380e3759d57ef3cc981345 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Tue, 6 Jun 2023 10:58:32 +0100 Subject: [PATCH 44/75] Fix link in changelog --- CHANGES.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/CHANGES.md b/CHANGES.md index ea13b554baa2..905713b2afc7 100644 --- a/CHANGES.md +++ b/CHANGES.md @@ -8,7 +8,7 @@ No significant changes since 1.85.0rc2. The following issues are fixed in 1.85.0 (and RCs). -- [GHSA-26c5-ppr8-f33p](https://github.com/matrix-org/synapse/security/advisories/GHSA-26c5-ppr8-f33p) / [CVE-2023-32682](https://cve.mitre.org/cgi-bin/cvename.cgi?name=CVE-2023-32683) — Low Severity +- [GHSA-26c5-ppr8-f33p](https://github.com/matrix-org/synapse/security/advisories/GHSA-26c5-ppr8-f33p) / [CVE-2023-32682](https://cve.mitre.org/cgi-bin/cvename.cgi?name=CVE-2023-32682) — Low Severity It may be possible for a deactivated user to login when using uncommon configurations. From dfd77f426e3e4a66dd027db7078ed0345a4c74dd Mon Sep 17 00:00:00 2001 From: Sean Quah <8349537+squahtx@users.noreply.github.com> Date: Tue, 6 Jun 2023 12:32:29 +0100 Subject: [PATCH 45/75] Remove some unused `server_name` fields (#15723) Signed-off-by: Sean Quah --- changelog.d/15723.misc | 1 + synapse/handlers/presence.py | 1 - synapse/handlers/read_marker.py | 1 - synapse/handlers/room.py | 1 - synapse/handlers/stats.py | 1 - synapse/rest/media/upload_resource.py | 1 - 6 files changed, 1 insertion(+), 5 deletions(-) create mode 100644 changelog.d/15723.misc diff --git a/changelog.d/15723.misc b/changelog.d/15723.misc new file mode 100644 index 000000000000..ba331adca7f2 --- /dev/null +++ b/changelog.d/15723.misc @@ -0,0 +1 @@ +Removed some unused fields. diff --git a/synapse/handlers/presence.py b/synapse/handlers/presence.py index 4ad223357384..0a219b796271 100644 --- a/synapse/handlers/presence.py +++ b/synapse/handlers/presence.py @@ -648,7 +648,6 @@ class PresenceHandler(BasePresenceHandler): def __init__(self, hs: "HomeServer"): super().__init__(hs) self.hs = hs - self.server_name = hs.hostname self.wheel_timer: WheelTimer[str] = WheelTimer() self.notifier = hs.get_notifier() self._presence_enabled = hs.config.server.use_presence diff --git a/synapse/handlers/read_marker.py b/synapse/handlers/read_marker.py index 49a497a86011..df5a4f3e22cf 100644 --- a/synapse/handlers/read_marker.py +++ b/synapse/handlers/read_marker.py @@ -27,7 +27,6 @@ class ReadMarkerHandler: def __init__(self, hs: "HomeServer"): - self.server_name = hs.config.server.server_name self.store = hs.get_datastores().main self.account_data_handler = hs.get_account_data_handler() self.read_marker_linearizer = Linearizer(name="read_marker") diff --git a/synapse/handlers/room.py b/synapse/handlers/room.py index 5e1702d78a4b..cb957f2033a1 100644 --- a/synapse/handlers/room.py +++ b/synapse/handlers/room.py @@ -1490,7 +1490,6 @@ async def filter_evts(events: List[EventBase]) -> List[EventBase]: class TimestampLookupHandler: def __init__(self, hs: "HomeServer"): - self.server_name = hs.hostname self.store = hs.get_datastores().main self.state_handler = hs.get_state_handler() self.federation_client = hs.get_federation_client() diff --git a/synapse/handlers/stats.py b/synapse/handlers/stats.py index 5c01482acfd7..7cabf7980af1 100644 --- a/synapse/handlers/stats.py +++ b/synapse/handlers/stats.py @@ -42,7 +42,6 @@ def __init__(self, hs: "HomeServer"): self.store = hs.get_datastores().main self._storage_controllers = hs.get_storage_controllers() self.state = hs.get_state_handler() - self.server_name = hs.hostname self.clock = hs.get_clock() self.notifier = hs.get_notifier() self.is_mine_id = hs.is_mine_id diff --git a/synapse/rest/media/upload_resource.py b/synapse/rest/media/upload_resource.py index 697348613b52..043e8d6077ea 100644 --- a/synapse/rest/media/upload_resource.py +++ b/synapse/rest/media/upload_resource.py @@ -39,7 +39,6 @@ def __init__(self, hs: "HomeServer", media_repo: "MediaRepository"): self.filepaths = media_repo.filepaths self.store = hs.get_datastores().main self.clock = hs.get_clock() - self.server_name = hs.hostname self.auth = hs.get_auth() self.max_upload_size = hs.config.media.max_upload_size self.clock = hs.get_clock() From d43c72a6c85ab7cf7391f1b716dfd57f8fd0bf3d Mon Sep 17 00:00:00 2001 From: Andrew Morgan <1342360+anoadragon453@users.noreply.github.com> Date: Tue, 6 Jun 2023 19:29:54 +0100 Subject: [PATCH 46/75] Prevent "twisted trunk" and "latest deps" workflows from running on forks (#15726) --- .github/workflows/latest_deps.yml | 23 +++++++++++++++++++++-- .github/workflows/twisted_trunk.yml | 24 ++++++++++++++++++++++-- changelog.d/15726.misc | 1 + 3 files changed, 44 insertions(+), 4 deletions(-) create mode 100644 changelog.d/15726.misc diff --git a/.github/workflows/latest_deps.yml b/.github/workflows/latest_deps.yml index 452600ba1633..ec6391cf8fd4 100644 --- a/.github/workflows/latest_deps.yml +++ b/.github/workflows/latest_deps.yml @@ -22,7 +22,21 @@ concurrency: cancel-in-progress: true jobs: + check_repo: + # Prevent this workflow from running on any fork of Synapse other than matrix-org/synapse, as it is + # only useful to the Synapse core team. + # All other workflow steps depend on this one, thus if 'should_run_workflow' is not 'true', the rest + # of the workflow will be skipped as well. + runs-on: ubuntu-latest + outputs: + should_run_workflow: ${{ steps.check_condition.outputs.should_run_workflow }} + steps: + - id: check_condition + run: echo "should_run_workflow=${{ github.repository == 'matrix-org/synapse' }}" >> "$GITHUB_OUTPUT" + mypy: + needs: check_repo + if: needs.check_repo.outputs.should_run_workflow == 'true' runs-on: ubuntu-latest steps: - uses: actions/checkout@v3 @@ -47,6 +61,8 @@ jobs: run: sed '/warn_unused_ignores = True/d' -i mypy.ini - run: poetry run mypy trial: + needs: check_repo + if: needs.check_repo.outputs.should_run_workflow == 'true' runs-on: ubuntu-latest strategy: matrix: @@ -105,6 +121,8 @@ jobs: sytest: + needs: check_repo + if: needs.check_repo.outputs.should_run_workflow == 'true' runs-on: ubuntu-latest container: image: matrixdotorg/sytest-synapse:testing @@ -156,7 +174,8 @@ jobs: complement: - if: "${{ !failure() && !cancelled() }}" + needs: check_repo + if: "!failure() && !cancelled() && needs.check_repo.outputs.should_run_workflow == 'true'" runs-on: ubuntu-latest strategy: @@ -192,7 +211,7 @@ jobs: # Open an issue if the build fails, so we know about it. # Only do this if we're not experimenting with this action in a PR. open-issue: - if: "failure() && github.event_name != 'push' && github.event_name != 'pull_request'" + if: "failure() && github.event_name != 'push' && github.event_name != 'pull_request' && needs.check_repo.outputs.should_run_workflow == 'true'" needs: # TODO: should mypy be included here? It feels more brittle than the others. - mypy diff --git a/.github/workflows/twisted_trunk.yml b/.github/workflows/twisted_trunk.yml index 14fc6a0389c3..55081f8133b2 100644 --- a/.github/workflows/twisted_trunk.yml +++ b/.github/workflows/twisted_trunk.yml @@ -18,7 +18,22 @@ concurrency: cancel-in-progress: true jobs: + check_repo: + # Prevent this workflow from running on any fork of Synapse other than matrix-org/synapse, as it is + # only useful to the Synapse core team. + # All other workflow steps depend on this one, thus if 'should_run_workflow' is not 'true', the rest + # of the workflow will be skipped as well. + if: github.repository == 'matrix-org/synapse' + runs-on: ubuntu-latest + outputs: + should_run_workflow: ${{ steps.check_condition.outputs.should_run_workflow }} + steps: + - id: check_condition + run: echo "should_run_workflow=${{ github.repository == 'matrix-org/synapse' }}" >> "$GITHUB_OUTPUT" + mypy: + needs: check_repo + if: needs.check_repo.outputs.should_run_workflow == 'true' runs-on: ubuntu-latest steps: @@ -41,6 +56,8 @@ jobs: - run: poetry run mypy trial: + needs: check_repo + if: needs.check_repo.outputs.should_run_workflow == 'true' runs-on: ubuntu-latest steps: @@ -75,6 +92,8 @@ jobs: || true sytest: + needs: check_repo + if: needs.check_repo.outputs.should_run_workflow == 'true' runs-on: ubuntu-latest container: image: matrixdotorg/sytest-synapse:buster @@ -119,7 +138,8 @@ jobs: /logs/**/*.log* complement: - if: "${{ !failure() && !cancelled() }}" + needs: check_repo + if: "!failure() && !cancelled() && needs.check_repo.outputs.should_run_workflow == 'true'" runs-on: ubuntu-latest strategy: @@ -166,7 +186,7 @@ jobs: # open an issue if the build fails, so we know about it. open-issue: - if: failure() + if: failure() && needs.check_repo.outputs.should_run_workflow == 'true' needs: - mypy - trial diff --git a/changelog.d/15726.misc b/changelog.d/15726.misc new file mode 100644 index 000000000000..941e541e7766 --- /dev/null +++ b/changelog.d/15726.misc @@ -0,0 +1 @@ +Prevent the `latest_deps` and `twisted_trunk` daily GitHub Actions workflows from running on forks of the codebase. \ No newline at end of file From 6ee96e936646d6ccc55dc076f62f8cf518c90d1e Mon Sep 17 00:00:00 2001 From: Shay Date: Tue, 6 Jun 2023 13:16:03 -0700 Subject: [PATCH 47/75] Improve performance of user directory search (#15729) --- changelog.d/15729.misc | 1 + synapse/storage/databases/main/user_directory.py | 12 ++++++++---- 2 files changed, 9 insertions(+), 4 deletions(-) create mode 100644 changelog.d/15729.misc diff --git a/changelog.d/15729.misc b/changelog.d/15729.misc new file mode 100644 index 000000000000..394025430535 --- /dev/null +++ b/changelog.d/15729.misc @@ -0,0 +1 @@ +Improve performance of user directory search. diff --git a/synapse/storage/databases/main/user_directory.py b/synapse/storage/databases/main/user_directory.py index a0319575f071..b0a06baf4f0c 100644 --- a/synapse/storage/databases/main/user_directory.py +++ b/synapse/storage/databases/main/user_directory.py @@ -1061,12 +1061,15 @@ async def search_user_dir( # The array of numbers are the weights for the various part of the # search: (domain, _, display name, localpart) sql = """ + WITH matching_users AS ( + SELECT user_id, vector FROM user_directory_search WHERE vector @@ to_tsquery('simple', ?) + LIMIT 10000 + ) SELECT d.user_id AS user_id, display_name, avatar_url - FROM user_directory_search as t + FROM matching_users as t INNER JOIN user_directory AS d USING (user_id) WHERE %(where_clause)s - AND vector @@ to_tsquery('simple', ?) ORDER BY (CASE WHEN d.user_id IS NOT NULL THEN 4.0 ELSE 1.0 END) * (CASE WHEN display_name IS NOT NULL THEN 1.2 ELSE 1.0 END) @@ -1095,8 +1098,9 @@ async def search_user_dir( "order_case_statements": " ".join(additional_ordering_statements), } args = ( - join_args - + (full_query, exact_query, prefix_query) + (full_query,) + + join_args + + (exact_query, prefix_query) + ordering_arguments + (limit + 1,) ) From 33c3550887f412f015cf651db82a9082bb12cd9e Mon Sep 17 00:00:00 2001 From: Eric Eastwood Date: Tue, 6 Jun 2023 16:25:03 -0500 Subject: [PATCH 48/75] Add context for when/why to use the `long_retries` option when sending Federation requests (#15721) --- changelog.d/15721.misc | 1 + synapse/http/matrixfederationclient.py | 11 +++++++++-- 2 files changed, 10 insertions(+), 2 deletions(-) create mode 100644 changelog.d/15721.misc diff --git a/changelog.d/15721.misc b/changelog.d/15721.misc new file mode 100644 index 000000000000..f4d892daf970 --- /dev/null +++ b/changelog.d/15721.misc @@ -0,0 +1 @@ +Add context for when/why to use the `long_retries` option when sending Federation requests. diff --git a/synapse/http/matrixfederationclient.py b/synapse/http/matrixfederationclient.py index 9094dab0feb7..abb5ae581521 100644 --- a/synapse/http/matrixfederationclient.py +++ b/synapse/http/matrixfederationclient.py @@ -499,8 +499,15 @@ async def _send_request( Note that the above intervals are *in addition* to the time spent waiting for the request to complete (up to `timeout` ms). - NB: the long retry algorithm takes over 20 minutes to complete, with - a default timeout of 60s! + NB: the long retry algorithm takes over 20 minutes to complete, with a + default timeout of 60s! It's best not to use the `long_retries` option + for something that is blocking a client so we don't make them wait for + aaaaages, whereas some things like sending transactions (server to + server) we can be a lot more lenient but its very fuzzy / hand-wavey. + + In the future, we could be more intelligent about doing this sort of + thing by looking at things with the bigger picture in mind, + https://github.com/matrix-org/synapse/issues/8917 ignore_backoff: true to ignore the historical backoff data and try the request anyway. From 4e6390cb10676d3f621319663587f49baa57bedc Mon Sep 17 00:00:00 2001 From: Eric Eastwood Date: Tue, 6 Jun 2023 16:26:12 -0500 Subject: [PATCH 49/75] Update error to more plainly explain we can only authorize our own events (#15725) --- changelog.d/15725.misc | 1 + synapse/federation/federation_server.py | 2 +- 2 files changed, 2 insertions(+), 1 deletion(-) create mode 100644 changelog.d/15725.misc diff --git a/changelog.d/15725.misc b/changelog.d/15725.misc new file mode 100644 index 000000000000..6c7a8a41d883 --- /dev/null +++ b/changelog.d/15725.misc @@ -0,0 +1 @@ +Update federation error to more plainly explain we can only authorize our own membership events. diff --git a/synapse/federation/federation_server.py b/synapse/federation/federation_server.py index 149351dda025..9425b3250703 100644 --- a/synapse/federation/federation_server.py +++ b/synapse/federation/federation_server.py @@ -944,7 +944,7 @@ async def _on_send_membership_event( if not self._is_mine_server_name(authorising_server): raise SynapseError( 400, - f"Cannot authorise request from resident server: {authorising_server}", + f"Cannot authorise membership event for {authorising_server}. We can only authorise requests from our own homeserver", ) event.signatures.update( From 8bfded81f3378ab6333f174e182f2aae6ef01f49 Mon Sep 17 00:00:00 2001 From: Eric Eastwood Date: Tue, 6 Jun 2023 17:39:22 -0500 Subject: [PATCH 50/75] Trace functions which return `Awaitable` (#15650) --- changelog.d/15650.misc | 1 + synapse/logging/opentracing.py | 37 ++++++++++++++++++-------- tests/logging/test_opentracing.py | 43 +++++++++++++++++++++++-------- 3 files changed, 59 insertions(+), 22 deletions(-) create mode 100644 changelog.d/15650.misc diff --git a/changelog.d/15650.misc b/changelog.d/15650.misc new file mode 100644 index 000000000000..9bbad113e11e --- /dev/null +++ b/changelog.d/15650.misc @@ -0,0 +1 @@ +Add support for tracing functions which return `Awaitable`s. diff --git a/synapse/logging/opentracing.py b/synapse/logging/opentracing.py index c70eee649c57..75217e3f45bb 100644 --- a/synapse/logging/opentracing.py +++ b/synapse/logging/opentracing.py @@ -171,6 +171,7 @@ def set_fates(clotho, lachesis, atropos, father="Zues", mother="Themis"): from typing import ( TYPE_CHECKING, Any, + Awaitable, Callable, Collection, ContextManager, @@ -903,6 +904,7 @@ def _wrapping_logic(func: Callable[P, R], *args: P.args, **kwargs: P.kwargs) -> """ if inspect.iscoroutinefunction(func): + # For this branch, we handle async functions like `async def func() -> RInner`. # In this branch, R = Awaitable[RInner], for some other type RInner @wraps(func) async def _wrapper( @@ -914,15 +916,16 @@ async def _wrapper( return await func(*args, **kwargs) # type: ignore[misc] else: - # The other case here handles both sync functions and those - # decorated with inlineDeferred. + # The other case here handles sync functions including those decorated with + # `@defer.inlineCallbacks` or that return a `Deferred` or other `Awaitable`. @wraps(func) - def _wrapper(*args: P.args, **kwargs: P.kwargs) -> R: + def _wrapper(*args: P.args, **kwargs: P.kwargs) -> Any: scope = wrapping_logic(func, *args, **kwargs) scope.__enter__() try: result = func(*args, **kwargs) + if isinstance(result, defer.Deferred): def call_back(result: R) -> R: @@ -930,20 +933,32 @@ def call_back(result: R) -> R: return result def err_back(result: R) -> R: + # TODO: Pass the error details into `scope.__exit__(...)` for + # consistency with the other paths. scope.__exit__(None, None, None) return result result.addCallbacks(call_back, err_back) + elif inspect.isawaitable(result): + + async def wrap_awaitable() -> Any: + try: + assert isinstance(result, Awaitable) + awaited_result = await result + scope.__exit__(None, None, None) + return awaited_result + except Exception as e: + scope.__exit__(type(e), None, e.__traceback__) + raise + + # The original method returned an awaitable, eg. a coroutine, so we + # create another awaitable wrapping it that calls + # `scope.__exit__(...)`. + return wrap_awaitable() else: - if inspect.isawaitable(result): - logger.error( - "@trace may not have wrapped %s correctly! " - "The function is not async but returned a %s.", - func.__qualname__, - type(result).__name__, - ) - + # Just a simple sync function so we can just exit the scope and + # return the result without any fuss. scope.__exit__(None, None, None) return result diff --git a/tests/logging/test_opentracing.py b/tests/logging/test_opentracing.py index e28ba84cc2b7..1bc7d64ad9c4 100644 --- a/tests/logging/test_opentracing.py +++ b/tests/logging/test_opentracing.py @@ -12,7 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -from typing import cast +from typing import Awaitable, cast from twisted.internet import defer from twisted.test.proto_helpers import MemoryReactorClock @@ -227,8 +227,6 @@ def test_trace_decorator_deferred(self) -> None: Test whether we can use `@trace_with_opname` (`@trace`) and `@tag_args` with functions that return deferreds """ - reactor = MemoryReactorClock() - with LoggingContext("root context"): @trace_with_opname("fixture_deferred_func", tracer=self._tracer) @@ -240,9 +238,6 @@ def fixture_deferred_func() -> "defer.Deferred[str]": result_d1 = fixture_deferred_func() - # let the tasks complete - reactor.pump((2,) * 8) - self.assertEqual(self.successResultOf(result_d1), "foo") # the span should have been reported @@ -256,8 +251,6 @@ def test_trace_decorator_async(self) -> None: Test whether we can use `@trace_with_opname` (`@trace`) and `@tag_args` with async functions """ - reactor = MemoryReactorClock() - with LoggingContext("root context"): @trace_with_opname("fixture_async_func", tracer=self._tracer) @@ -267,9 +260,6 @@ async def fixture_async_func() -> str: d1 = defer.ensureDeferred(fixture_async_func()) - # let the tasks complete - reactor.pump((2,) * 8) - self.assertEqual(self.successResultOf(d1), "foo") # the span should have been reported @@ -277,3 +267,34 @@ async def fixture_async_func() -> str: [span.operation_name for span in self._reporter.get_spans()], ["fixture_async_func"], ) + + def test_trace_decorator_awaitable_return(self) -> None: + """ + Test whether we can use `@trace_with_opname` (`@trace`) and `@tag_args` + with functions that return an awaitable (e.g. a coroutine) + """ + with LoggingContext("root context"): + # Something we can return without `await` to get a coroutine + async def fixture_async_func() -> str: + return "foo" + + # The actual kind of function we want to test that returns an awaitable + @trace_with_opname("fixture_awaitable_return_func", tracer=self._tracer) + @tag_args + def fixture_awaitable_return_func() -> Awaitable[str]: + return fixture_async_func() + + # Something we can run with `defer.ensureDeferred(runner())` and pump the + # whole async tasks through to completion. + async def runner() -> str: + return await fixture_awaitable_return_func() + + d1 = defer.ensureDeferred(runner()) + + self.assertEqual(self.successResultOf(d1), "foo") + + # the span should have been reported + self.assertEqual( + [span.operation_name for span in self._reporter.get_spans()], + ["fixture_awaitable_return_func"], + ) From 9d911b0da651893e0b67cb3506e18582cb0d95b5 Mon Sep 17 00:00:00 2001 From: Eric Eastwood Date: Tue, 6 Jun 2023 22:19:57 -0500 Subject: [PATCH 51/75] No need for the extra join since `membership` is built-in to `current_state_events` (#15731) This helps with the upstream `is_host_joined()` and `is_host_invited()` functions. `membership` was added to `current_state_events` in https://github.com/matrix-org/synapse/pull/5706 and forced in https://github.com/matrix-org/synapse/pull/13745 --- changelog.d/15731.misc | 1 + synapse/storage/databases/main/roommember.py | 7 +++---- 2 files changed, 4 insertions(+), 4 deletions(-) create mode 100644 changelog.d/15731.misc diff --git a/changelog.d/15731.misc b/changelog.d/15731.misc new file mode 100644 index 000000000000..906bc2696254 --- /dev/null +++ b/changelog.d/15731.misc @@ -0,0 +1 @@ +Remove redundant table join with `room_memberships` when doing a `is_host_joined()`/`is_host_invited()` call (`membership` is already part of the `current_state_events`). diff --git a/synapse/storage/databases/main/roommember.py b/synapse/storage/databases/main/roommember.py index ae9c201b87e8..1b8ec67f5487 100644 --- a/synapse/storage/databases/main/roommember.py +++ b/synapse/storage/databases/main/roommember.py @@ -927,11 +927,10 @@ async def _check_host_room_membership( raise Exception("Invalid host name") sql = """ - SELECT state_key FROM current_state_events AS c - INNER JOIN room_memberships AS m USING (event_id) - WHERE m.membership = ? + SELECT state_key FROM current_state_events + WHERE membership = ? AND type = 'm.room.member' - AND c.room_id = ? + AND room_id = ? AND state_key LIKE ? LIMIT 1 """ From 5c24d7b9ebd8dec2c76dac5118cee22a1bb1032a Mon Sep 17 00:00:00 2001 From: Grant McLean Date: Thu, 8 Jun 2023 03:21:25 +1200 Subject: [PATCH 52/75] Check required power levels earlier in createRoom handler. (#15695) * Check required power levels earlier in createRoom handler. - If a server was configured to reject the creation of rooms with E2EE enabled (by specifying an unattainably high power level for "m.room.encryption" in default_power_level_content_override), the 403 error was not being triggered until after the room was created and before the "m.room.power_levels" was sent. This allowed a user to access the partially-configured room and complete the setup of E2EE and power levels manually. - This change causes the power level overrides to be checked earlier and the request to be rejected before the user gains access to the room. - A new `_validate_room_config` method is added to contain checks that should be run before a room is created. - The new test case confirms that a user request is rejected by the new validation method. Signed-off-by: Grant McLean * Add a changelog file. * Formatting fix for black. * Remove unneeded line from test. --------- Signed-off-by: Grant McLean --- changelog.d/15695.bugfix | 1 + synapse/handlers/room.py | 76 +++++++++++++++++++++++++++------ tests/rest/client/test_rooms.py | 37 ++++++++++++++++ 3 files changed, 100 insertions(+), 14 deletions(-) create mode 100644 changelog.d/15695.bugfix diff --git a/changelog.d/15695.bugfix b/changelog.d/15695.bugfix new file mode 100644 index 000000000000..99bf1fe05e34 --- /dev/null +++ b/changelog.d/15695.bugfix @@ -0,0 +1 @@ +Check permissions for enabling encryption earlier during room creation to avoid creating broken rooms. diff --git a/synapse/handlers/room.py b/synapse/handlers/room.py index cb957f2033a1..bf907b78815c 100644 --- a/synapse/handlers/room.py +++ b/synapse/handlers/room.py @@ -872,6 +872,8 @@ async def create_room( visibility = config.get("visibility", "private") is_public = visibility == "public" + self._validate_room_config(config, visibility) + room_id = await self._generate_and_create_room_id( creator_id=user_id, is_public=is_public, @@ -1111,20 +1113,7 @@ async def create_event( return new_event, new_unpersisted_context - visibility = room_config.get("visibility", "private") - preset_config = room_config.get( - "preset", - RoomCreationPreset.PRIVATE_CHAT - if visibility == "private" - else RoomCreationPreset.PUBLIC_CHAT, - ) - - try: - config = self._presets_dict[preset_config] - except KeyError: - raise SynapseError( - 400, f"'{preset_config}' is not a valid preset", errcode=Codes.BAD_JSON - ) + preset_config, config = self._room_preset_config(room_config) # MSC2175 removes the creator field from the create event. if not room_version.msc2175_implicit_room_creator: @@ -1306,6 +1295,65 @@ async def create_event( assert last_event.internal_metadata.stream_ordering is not None return last_event.internal_metadata.stream_ordering, last_event.event_id, depth + def _validate_room_config( + self, + config: JsonDict, + visibility: str, + ) -> None: + """Checks configuration parameters for a /createRoom request. + + If validation detects invalid parameters an exception may be raised to + cause room creation to be aborted and an error response to be returned + to the client. + + Args: + config: A dict of configuration options. Originally from the body of + the /createRoom request + visibility: One of "public" or "private" + """ + + # Validate the requested preset, raise a 400 error if not valid + preset_name, preset_config = self._room_preset_config(config) + + # If the user is trying to create an encrypted room and this is forbidden + # by the configured default_power_level_content_override, then reject the + # request before the room is created. + raw_initial_state = config.get("initial_state", []) + room_encryption_event = any( + s.get("type", "") == EventTypes.RoomEncryption for s in raw_initial_state + ) + + if preset_config["encrypted"] or room_encryption_event: + if self._default_power_level_content_override: + override = self._default_power_level_content_override.get(preset_name) + if override is not None: + event_levels = override.get("events", {}) + room_admin_level = event_levels.get(EventTypes.PowerLevels, 100) + encryption_level = event_levels.get(EventTypes.RoomEncryption, 100) + if encryption_level > room_admin_level: + raise SynapseError( + 403, + f"You cannot create an encrypted room. user_level ({room_admin_level}) < send_level ({encryption_level})", + ) + + def _room_preset_config(self, room_config: JsonDict) -> Tuple[str, dict]: + # The spec says rooms should default to private visibility if + # `visibility` is not specified. + visibility = room_config.get("visibility", "private") + preset_name = room_config.get( + "preset", + RoomCreationPreset.PRIVATE_CHAT + if visibility == "private" + else RoomCreationPreset.PUBLIC_CHAT, + ) + try: + preset_config = self._presets_dict[preset_name] + except KeyError: + raise SynapseError( + 400, f"'{preset_name}' is not a valid preset", errcode=Codes.BAD_JSON + ) + return preset_name, preset_config + def _generate_room_id(self) -> str: """Generates a random room ID. diff --git a/tests/rest/client/test_rooms.py b/tests/rest/client/test_rooms.py index 4d39c89f6f19..f1b4e1ad2fc1 100644 --- a/tests/rest/client/test_rooms.py +++ b/tests/rest/client/test_rooms.py @@ -1941,6 +1941,43 @@ def test_config_override_applies_only_to_specific_preset(self) -> None: channel.json_body["error"], ) + @unittest.override_config( + { + "default_power_level_content_override": { + "private_chat": { + "events": { + "m.room.avatar": 50, + "m.room.canonical_alias": 50, + "m.room.encryption": 999, + "m.room.history_visibility": 100, + "m.room.name": 50, + "m.room.power_levels": 100, + "m.room.server_acl": 100, + "m.room.tombstone": 100, + }, + "events_default": 0, + }, + } + }, + ) + def test_config_override_blocks_encrypted_room(self) -> None: + # Given the server has config for private_chats, + + # When I attempt to create an encrypted private_chat room + channel = self.make_request( + "POST", + "/createRoom", + '{"creation_content": {"m.federate": false},"name": "Secret Private Room","preset": "private_chat","initial_state": [{"type": "m.room.encryption","state_key": "","content": {"algorithm": "m.megolm.v1.aes-sha2"}}]}', + ) + + # Then I am not allowed because the required power level is unattainable + self.assertEqual(HTTPStatus.FORBIDDEN, channel.code, msg=channel.result["body"]) + self.assertEqual( + "You cannot create an encrypted room. " + + "user_level (100) < send_level (999)", + channel.json_body["error"], + ) + class RoomInitialSyncTestCase(RoomBase): """Tests /rooms/$room_id/initialSync.""" From 195b6a298d509518bf16d5a421d706ecb2ccdce6 Mon Sep 17 00:00:00 2001 From: Eric Eastwood Date: Wed, 7 Jun 2023 11:45:16 -0500 Subject: [PATCH 53/75] Remove redundant `room_memberships` join to find participating servers in a room (#15732) Spawning from https://github.com/matrix-org/synapse/pull/15731 --- changelog.d/15732.doc | 1 + docs/usage/administration/admin_faq.md | 5 ++--- 2 files changed, 3 insertions(+), 3 deletions(-) create mode 100644 changelog.d/15732.doc diff --git a/changelog.d/15732.doc b/changelog.d/15732.doc new file mode 100644 index 000000000000..b0e8639df78d --- /dev/null +++ b/changelog.d/15732.doc @@ -0,0 +1 @@ +Simplify query to find participating servers in a room. diff --git a/docs/usage/administration/admin_faq.md b/docs/usage/administration/admin_faq.md index 28c3dd53a5f4..5c9ee7d0aa6f 100644 --- a/docs/usage/administration/admin_faq.md +++ b/docs/usage/administration/admin_faq.md @@ -27,9 +27,8 @@ What servers are currently participating in this room? Run this sql query on your db: ```sql SELECT DISTINCT split_part(state_key, ':', 2) - FROM current_state_events AS c - INNER JOIN room_memberships AS m USING (room_id, event_id) - WHERE room_id = '!cURbafjkfsMDVwdRDQ:matrix.org' AND membership = 'join'; +FROM current_state_events +WHERE room_id = '!cURbafjkfsMDVwdRDQ:matrix.org' AND membership = 'join'; ``` What users are registered on my server? From e536f02f68135a8494f80ded75d1a53b98cbcb8d Mon Sep 17 00:00:00 2001 From: Eric Eastwood Date: Wed, 7 Jun 2023 11:47:01 -0500 Subject: [PATCH 54/75] Remove superfluous `room_memberships` join from background update (#15733) Spawning from https://github.com/matrix-org/synapse/pull/15731 --- changelog.d/15733.misc | 1 + synapse/storage/databases/main/roommember.py | 1 - 2 files changed, 1 insertion(+), 1 deletion(-) create mode 100644 changelog.d/15733.misc diff --git a/changelog.d/15733.misc b/changelog.d/15733.misc new file mode 100644 index 000000000000..3ae7be3c27d1 --- /dev/null +++ b/changelog.d/15733.misc @@ -0,0 +1 @@ +Remove superfluous `room_memberships` join from background update. diff --git a/synapse/storage/databases/main/roommember.py b/synapse/storage/databases/main/roommember.py index 1b8ec67f5487..582875c91a5b 100644 --- a/synapse/storage/databases/main/roommember.py +++ b/synapse/storage/databases/main/roommember.py @@ -1460,7 +1460,6 @@ def add_membership_profile_txn(txn: LoggingTransaction) -> int: SELECT stream_ordering, event_id, events.room_id, event_json.json FROM events INNER JOIN event_json USING (event_id) - INNER JOIN room_memberships USING (event_id) WHERE ? <= stream_ordering AND stream_ordering < ? AND type = 'm.room.member' ORDER BY stream_ordering DESC From d162aecaac52fb467822e319e4c3c5b216c33ca9 Mon Sep 17 00:00:00 2001 From: David Robertson Date: Wed, 7 Jun 2023 18:12:23 +0100 Subject: [PATCH 55/75] Quick & dirty metric for background update status (#15740) * Quick & dirty metric for background update status * Changelog * Remove debug Co-authored-by: Mathieu Velten * Actually write to _aborted --------- Co-authored-by: Mathieu Velten --- changelog.d/15740.feature | 1 + synapse/metrics/__init__.py | 2 ++ synapse/storage/background_updates.py | 30 +++++++++++++++++++++++++++ synapse/storage/database.py | 8 ++++++- 4 files changed, 40 insertions(+), 1 deletion(-) create mode 100644 changelog.d/15740.feature diff --git a/changelog.d/15740.feature b/changelog.d/15740.feature new file mode 100644 index 000000000000..fed342ea55d8 --- /dev/null +++ b/changelog.d/15740.feature @@ -0,0 +1 @@ +Expose a metric reporting the database background update status. diff --git a/synapse/metrics/__init__.py b/synapse/metrics/__init__.py index 8ce58872293a..39fc629937a8 100644 --- a/synapse/metrics/__init__.py +++ b/synapse/metrics/__init__.py @@ -77,6 +77,8 @@ def collect() -> Iterable[Metric]: @attr.s(slots=True, hash=True, auto_attribs=True) class LaterGauge(Collector): + """A Gauge which periodically calls a user-provided callback to produce metrics.""" + name: str desc: str labels: Optional[Sequence[str]] = attr.ib(hash=False) diff --git a/synapse/storage/background_updates.py b/synapse/storage/background_updates.py index ca085ef8000f..edc97a9d6105 100644 --- a/synapse/storage/background_updates.py +++ b/synapse/storage/background_updates.py @@ -12,6 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. import logging +from enum import IntEnum from types import TracebackType from typing import ( TYPE_CHECKING, @@ -136,6 +137,15 @@ def total_items_per_ms(self) -> Optional[float]: return float(self.total_item_count) / float(self.total_duration_ms) +class UpdaterStatus(IntEnum): + # Use negative values for error conditions. + ABORTED = -1 + DISABLED = 0 + NOT_STARTED = 1 + RUNNING_UPDATE = 2 + COMPLETE = 3 + + class BackgroundUpdater: """Background updates are updates to the database that run in the background. Each update processes a batch of data at once. We attempt to @@ -158,11 +168,16 @@ def __init__(self, hs: "HomeServer", database: "DatabasePool"): self._background_update_performance: Dict[str, BackgroundUpdatePerformance] = {} self._background_update_handlers: Dict[str, _BackgroundUpdateHandler] = {} + # TODO: all these bool flags make me feel icky---can we combine into a status + # enum? self._all_done = False # Whether we're currently running updates self._running = False + # Marker to be set if we abort and halt all background updates. + self._aborted = False + # Whether background updates are enabled. This allows us to # enable/disable background updates via the admin API. self.enabled = True @@ -175,6 +190,20 @@ def __init__(self, hs: "HomeServer", database: "DatabasePool"): self.sleep_duration_ms = hs.config.background_updates.sleep_duration_ms self.sleep_enabled = hs.config.background_updates.sleep_enabled + def get_status(self) -> UpdaterStatus: + """An integer summarising the updater status. Used as a metric.""" + if self._aborted: + return UpdaterStatus.ABORTED + # TODO: a status for "have seen at least one failure, but haven't aborted yet". + if not self.enabled: + return UpdaterStatus.DISABLED + + if self._all_done: + return UpdaterStatus.COMPLETE + if self._running: + return UpdaterStatus.RUNNING_UPDATE + return UpdaterStatus.NOT_STARTED + def register_update_controller_callbacks( self, on_update: ON_UPDATE_CALLBACK, @@ -296,6 +325,7 @@ async def run_background_updates(self, sleep: bool) -> None: except Exception: back_to_back_failures += 1 if back_to_back_failures >= 5: + self._aborted = True raise RuntimeError( "5 back-to-back background update failures; aborting." ) diff --git a/synapse/storage/database.py b/synapse/storage/database.py index bdaa508dbe12..10fa6c4802ee 100644 --- a/synapse/storage/database.py +++ b/synapse/storage/database.py @@ -54,7 +54,7 @@ current_context, make_deferred_yieldable, ) -from synapse.metrics import register_threadpool +from synapse.metrics import LaterGauge, register_threadpool from synapse.metrics.background_process_metrics import run_as_background_process from synapse.storage.background_updates import BackgroundUpdater from synapse.storage.engines import BaseDatabaseEngine, PostgresEngine, Sqlite3Engine @@ -547,6 +547,12 @@ def __init__( self._db_pool = make_pool(hs.get_reactor(), database_config, engine) self.updates = BackgroundUpdater(hs, self) + LaterGauge( + "synapse_background_update_status", + "Background update status", + [], + self.updates.get_status, + ) self._previous_txn_total_time = 0.0 self._current_txn_total_time = 0.0 From c485ed1c5a4c62ae555531cfd001a5e5f8bc2e44 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Thu, 8 Jun 2023 13:14:40 +0100 Subject: [PATCH 56/75] Clear event caches when we purge history (#15609) This should help a little with #13476 --------- Co-authored-by: Patrick Cloke --- changelog.d/15609.bugfix | 1 + synapse/storage/_base.py | 31 ++++ synapse/storage/databases/main/cache.py | 134 ++++++++++++++++++ .../storage/databases/main/events_worker.py | 9 ++ .../storage/databases/main/purge_events.py | 8 +- synapse/util/caches/lrucache.py | 2 +- tests/handlers/test_sync.py | 2 +- tests/rest/client/test_read_marker.py | 3 - .../databases/main/test_events_worker.py | 8 +- 9 files changed, 184 insertions(+), 14 deletions(-) create mode 100644 changelog.d/15609.bugfix diff --git a/changelog.d/15609.bugfix b/changelog.d/15609.bugfix new file mode 100644 index 000000000000..b5a990cfec1e --- /dev/null +++ b/changelog.d/15609.bugfix @@ -0,0 +1 @@ +Correctly clear caches when we delete a room. diff --git a/synapse/storage/_base.py b/synapse/storage/_base.py index 481fec72fe1b..fe4a76341137 100644 --- a/synapse/storage/_base.py +++ b/synapse/storage/_base.py @@ -86,9 +86,14 @@ def _invalidate_state_caches( room_id: Room where state changed members_changed: The user_ids of members that have changed """ + + # XXX: If you add something to this function make sure you add it to + # `_invalidate_state_caches_all` as well. + # If there were any membership changes, purge the appropriate caches. for host in {get_domain_from_id(u) for u in members_changed}: self._attempt_to_invalidate_cache("is_host_joined", (room_id, host)) + self._attempt_to_invalidate_cache("is_host_invited", (room_id, host)) if members_changed: self._attempt_to_invalidate_cache("get_users_in_room", (room_id,)) self._attempt_to_invalidate_cache("get_current_hosts_in_room", (room_id,)) @@ -117,6 +122,32 @@ def _invalidate_state_caches( self._attempt_to_invalidate_cache("get_room_summary", (room_id,)) self._attempt_to_invalidate_cache("get_partial_current_state_ids", (room_id,)) + def _invalidate_state_caches_all(self, room_id: str) -> None: + """Invalidates caches that are based on the current state, but does + not stream invalidations down replication. + + Same as `_invalidate_state_caches`, except that works when we don't know + which memberships have changed. + + Args: + room_id: Room where state changed + """ + self._attempt_to_invalidate_cache("get_partial_current_state_ids", (room_id,)) + self._attempt_to_invalidate_cache("get_users_in_room", (room_id,)) + self._attempt_to_invalidate_cache("is_host_invited", None) + self._attempt_to_invalidate_cache("is_host_joined", None) + self._attempt_to_invalidate_cache("get_current_hosts_in_room", (room_id,)) + self._attempt_to_invalidate_cache("get_users_in_room_with_profiles", (room_id,)) + self._attempt_to_invalidate_cache("get_number_joined_users_in_room", (room_id,)) + self._attempt_to_invalidate_cache("get_local_users_in_room", (room_id,)) + self._attempt_to_invalidate_cache("does_pair_of_users_share_a_room", None) + self._attempt_to_invalidate_cache("get_user_in_room_with_profile", None) + self._attempt_to_invalidate_cache( + "get_rooms_for_user_with_stream_ordering", None + ) + self._attempt_to_invalidate_cache("get_rooms_for_user", None) + self._attempt_to_invalidate_cache("get_room_summary", (room_id,)) + def _attempt_to_invalidate_cache( self, cache_name: str, key: Optional[Collection[Any]] ) -> bool: diff --git a/synapse/storage/databases/main/cache.py b/synapse/storage/databases/main/cache.py index 46fa0a73f9e4..6e1c7d681fe5 100644 --- a/synapse/storage/databases/main/cache.py +++ b/synapse/storage/databases/main/cache.py @@ -46,6 +46,12 @@ # based on the current state when notifying workers over replication. CURRENT_STATE_CACHE_NAME = "cs_cache_fake" +# As above, but for invalidating event caches on history deletion +PURGE_HISTORY_CACHE_NAME = "ph_cache_fake" + +# As above, but for invalidating room caches on room deletion +DELETE_ROOM_CACHE_NAME = "dr_cache_fake" + class CacheInvalidationWorkerStore(SQLBaseStore): def __init__( @@ -175,6 +181,23 @@ def process_replication_rows( room_id = row.keys[0] members_changed = set(row.keys[1:]) self._invalidate_state_caches(room_id, members_changed) + elif row.cache_func == PURGE_HISTORY_CACHE_NAME: + if row.keys is None: + raise Exception( + "Can't send an 'invalidate all' for 'purge history' cache" + ) + + room_id = row.keys[0] + self._invalidate_caches_for_room_events(room_id) + elif row.cache_func == DELETE_ROOM_CACHE_NAME: + if row.keys is None: + raise Exception( + "Can't send an 'invalidate all' for 'delete room' cache" + ) + + room_id = row.keys[0] + self._invalidate_caches_for_room_events(room_id) + self._invalidate_caches_for_room(room_id) else: self._attempt_to_invalidate_cache(row.cache_func, row.keys) @@ -226,6 +249,9 @@ def _invalidate_caches_for_event( relates_to: Optional[str], backfilled: bool, ) -> None: + # XXX: If you add something to this function make sure you add it to + # `_invalidate_caches_for_room_events` as well. + # This invalidates any local in-memory cached event objects, the original # process triggering the invalidation is responsible for clearing any external # cached objects. @@ -271,6 +297,106 @@ def _invalidate_caches_for_event( self._attempt_to_invalidate_cache("get_thread_participated", (relates_to,)) self._attempt_to_invalidate_cache("get_threads", (room_id,)) + def _invalidate_caches_for_room_events_and_stream( + self, txn: LoggingTransaction, room_id: str + ) -> None: + """Invalidate caches associated with events in a room, and stream to + replication. + + Used when we delete events a room, but don't know which events we've + deleted. + """ + + self._send_invalidation_to_replication(txn, PURGE_HISTORY_CACHE_NAME, [room_id]) + txn.call_after(self._invalidate_caches_for_room_events, room_id) + + def _invalidate_caches_for_room_events(self, room_id: str) -> None: + """Invalidate caches associated with events in a room, and stream to + replication. + + Used when we delete events in a room, but don't know which events we've + deleted. + """ + + self._invalidate_local_get_event_cache_all() # type: ignore[attr-defined] + + self._attempt_to_invalidate_cache("have_seen_event", (room_id,)) + self._attempt_to_invalidate_cache("get_latest_event_ids_in_room", (room_id,)) + self._attempt_to_invalidate_cache( + "get_unread_event_push_actions_by_room_for_user", (room_id,) + ) + + self._attempt_to_invalidate_cache("_get_membership_from_event_id", None) + self._attempt_to_invalidate_cache("get_relations_for_event", None) + self._attempt_to_invalidate_cache("get_applicable_edit", None) + self._attempt_to_invalidate_cache("get_thread_id", None) + self._attempt_to_invalidate_cache("get_thread_id_for_receipts", None) + self._attempt_to_invalidate_cache("get_invited_rooms_for_local_user", None) + self._attempt_to_invalidate_cache( + "get_rooms_for_user_with_stream_ordering", None + ) + self._attempt_to_invalidate_cache("get_rooms_for_user", None) + self._attempt_to_invalidate_cache("get_references_for_event", None) + self._attempt_to_invalidate_cache("get_thread_summary", None) + self._attempt_to_invalidate_cache("get_thread_participated", None) + self._attempt_to_invalidate_cache("get_threads", (room_id,)) + + self._attempt_to_invalidate_cache("_get_state_group_for_event", None) + + self._attempt_to_invalidate_cache("get_event_ordering", None) + self._attempt_to_invalidate_cache("is_partial_state_event", None) + self._attempt_to_invalidate_cache("_get_joined_profile_from_event_id", None) + + def _invalidate_caches_for_room_and_stream( + self, txn: LoggingTransaction, room_id: str + ) -> None: + """Invalidate caches associated with rooms, and stream to replication. + + Used when we delete rooms. + """ + + self._send_invalidation_to_replication(txn, DELETE_ROOM_CACHE_NAME, [room_id]) + txn.call_after(self._invalidate_caches_for_room, room_id) + + def _invalidate_caches_for_room(self, room_id: str) -> None: + """Invalidate caches associated with rooms. + + Used when we delete rooms. + """ + + # If we've deleted the room then we also need to purge all event caches. + self._invalidate_caches_for_room_events(room_id) + + self._attempt_to_invalidate_cache("get_account_data_for_room", None) + self._attempt_to_invalidate_cache("get_account_data_for_room_and_type", None) + self._attempt_to_invalidate_cache("get_aliases_for_room", (room_id,)) + self._attempt_to_invalidate_cache("get_latest_event_ids_in_room", (room_id,)) + self._attempt_to_invalidate_cache("_get_forward_extremeties_for_room", None) + self._attempt_to_invalidate_cache( + "get_unread_event_push_actions_by_room_for_user", (room_id,) + ) + self._attempt_to_invalidate_cache( + "_get_linearized_receipts_for_room", (room_id,) + ) + self._attempt_to_invalidate_cache("is_room_blocked", (room_id,)) + self._attempt_to_invalidate_cache("get_retention_policy_for_room", (room_id,)) + self._attempt_to_invalidate_cache( + "_get_partial_state_servers_at_join", (room_id,) + ) + self._attempt_to_invalidate_cache("is_partial_state_room", (room_id,)) + self._attempt_to_invalidate_cache("get_invited_rooms_for_local_user", None) + self._attempt_to_invalidate_cache( + "get_current_hosts_in_room_ordered", (room_id,) + ) + self._attempt_to_invalidate_cache("did_forget", None) + self._attempt_to_invalidate_cache("get_forgotten_rooms_for_user", None) + self._attempt_to_invalidate_cache("_get_membership_from_event_id", None) + self._attempt_to_invalidate_cache("get_room_version_id", (room_id,)) + + # And delete state caches. + + self._invalidate_state_caches_all(room_id) + async def invalidate_cache_and_stream( self, cache_name: str, keys: Tuple[Any, ...] ) -> None: @@ -377,6 +503,14 @@ def _send_invalidation_to_replication( "Can't stream invalidate all with magic current state cache" ) + if cache_name == PURGE_HISTORY_CACHE_NAME and keys is None: + raise Exception( + "Can't stream invalidate all with magic purge history cache" + ) + + if cache_name == DELETE_ROOM_CACHE_NAME and keys is None: + raise Exception("Can't stream invalidate all with magic delete room cache") + if isinstance(self.database_engine, PostgresEngine): assert self._cache_id_gen is not None diff --git a/synapse/storage/databases/main/events_worker.py b/synapse/storage/databases/main/events_worker.py index a39bc9097439..d93ffc4efa75 100644 --- a/synapse/storage/databases/main/events_worker.py +++ b/synapse/storage/databases/main/events_worker.py @@ -903,6 +903,15 @@ def _invalidate_local_get_event_cache(self, event_id: str) -> None: self._event_ref.pop(event_id, None) self._current_event_fetches.pop(event_id, None) + def _invalidate_local_get_event_cache_all(self) -> None: + """Clears the in-memory get event caches. + + Used when we purge room history. + """ + self._get_event_cache.clear() + self._event_ref.clear() + self._current_event_fetches.clear() + async def _get_events_from_cache( self, events: Iterable[str], update_metrics: bool = True ) -> Dict[str, EventCacheEntry]: diff --git a/synapse/storage/databases/main/purge_events.py b/synapse/storage/databases/main/purge_events.py index efbd3e75d99e..9773c1fcd28a 100644 --- a/synapse/storage/databases/main/purge_events.py +++ b/synapse/storage/databases/main/purge_events.py @@ -308,6 +308,8 @@ def _purge_history_txn( logger.info("[purge] done") + self._invalidate_caches_for_room_events_and_stream(txn, room_id) + return referenced_state_groups async def purge_room(self, room_id: str) -> List[int]: @@ -485,10 +487,6 @@ def _purge_room_txn(self, txn: LoggingTransaction, room_id: str) -> List[int]: # index on them. In any case we should be clearing out 'stream' tables # periodically anyway (#5888) - # TODO: we could probably usefully do a bunch more cache invalidation here - - # XXX: as with purge_history, this is racy, but no worse than other races - # that already exist. - self._invalidate_cache_and_stream(txn, self.have_seen_event, (room_id,)) + self._invalidate_caches_for_room_and_stream(txn, room_id) return state_groups diff --git a/synapse/util/caches/lrucache.py b/synapse/util/caches/lrucache.py index ed0da17227d3..6137c85e1051 100644 --- a/synapse/util/caches/lrucache.py +++ b/synapse/util/caches/lrucache.py @@ -862,5 +862,5 @@ def invalidate_local(self, key: KT) -> None: async def contains(self, key: KT) -> bool: return self._lru_cache.contains(key) - async def clear(self) -> None: + def clear(self) -> None: self._lru_cache.clear() diff --git a/tests/handlers/test_sync.py b/tests/handlers/test_sync.py index 0d9a3de92a5d..9f035a02dc69 100644 --- a/tests/handlers/test_sync.py +++ b/tests/handlers/test_sync.py @@ -163,7 +163,7 @@ def test_unknown_room_version(self) -> None: # Blow away caches (supported room versions can only change due to a restart). self.store.get_rooms_for_user_with_stream_ordering.invalidate_all() self.store.get_rooms_for_user.invalidate_all() - self.get_success(self.store._get_event_cache.clear()) + self.store._get_event_cache.clear() self.store._event_ref.clear() # The rooms should be excluded from the sync response. diff --git a/tests/rest/client/test_read_marker.py b/tests/rest/client/test_read_marker.py index 0eedcdb476b4..5cdd5694a04a 100644 --- a/tests/rest/client/test_read_marker.py +++ b/tests/rest/client/test_read_marker.py @@ -131,9 +131,6 @@ def send_message() -> str: event = self.get_success(self.store.get_event(event_id_1, allow_none=True)) assert event is None - # TODO See https://github.com/matrix-org/synapse/issues/13476 - self.store.get_event_ordering.invalidate_all() - # Test moving the read marker to a newer event event_id_2 = send_message() channel = self.make_request( diff --git a/tests/storage/databases/main/test_events_worker.py b/tests/storage/databases/main/test_events_worker.py index 9606ecc43b6b..788500e38f2d 100644 --- a/tests/storage/databases/main/test_events_worker.py +++ b/tests/storage/databases/main/test_events_worker.py @@ -188,7 +188,7 @@ def prepare(self, reactor: MemoryReactor, clock: Clock, hs: HomeServer) -> None: self.event_id = res["event_id"] # Reset the event cache so the tests start with it empty - self.get_success(self.store._get_event_cache.clear()) + self.store._get_event_cache.clear() def test_simple(self) -> None: """Test that we cache events that we pull from the DB.""" @@ -205,7 +205,7 @@ def test_event_ref(self) -> None: """ # Reset the event cache - self.get_success(self.store._get_event_cache.clear()) + self.store._get_event_cache.clear() with LoggingContext("test") as ctx: # We keep hold of the event event though we never use it. @@ -215,7 +215,7 @@ def test_event_ref(self) -> None: self.assertEqual(ctx.get_resource_usage().evt_db_fetch_count, 1) # Reset the event cache - self.get_success(self.store._get_event_cache.clear()) + self.store._get_event_cache.clear() with LoggingContext("test") as ctx: self.get_success(self.store.get_event(self.event_id)) @@ -390,7 +390,7 @@ def prepare(self, reactor: MemoryReactor, clock: Clock, hs: HomeServer) -> None: self.event_id = res["event_id"] # Reset the event cache so the tests start with it empty - self.get_success(self.store._get_event_cache.clear()) + self.store._get_event_cache.clear() @contextmanager def blocking_get_event_calls( From d84e66144dc12dacf71c987a2ba802dd59c0b68e Mon Sep 17 00:00:00 2001 From: Shay Date: Fri, 9 Jun 2023 00:00:46 -0700 Subject: [PATCH 57/75] Allow for the configuration of max request retries and min/max retry delays in the matrix federation client (#12504) Co-authored-by: Mathieu Velten Co-authored-by: Erik Johnston --- changelog.d/12504.misc | 1 + .../configuration/config_documentation.md | 26 +++++++++++++++++++ synapse/config/federation.py | 10 +++++++ synapse/http/matrixfederationclient.py | 21 ++++++++------- tests/http/test_matrixfederationclient.py | 20 +++++++++++++- 5 files changed, 68 insertions(+), 10 deletions(-) create mode 100644 changelog.d/12504.misc diff --git a/changelog.d/12504.misc b/changelog.d/12504.misc new file mode 100644 index 000000000000..0bebaa213d9e --- /dev/null +++ b/changelog.d/12504.misc @@ -0,0 +1 @@ +Allow for the configuration of max request retries and min/max retry delays in the matrix federation client. diff --git a/docs/usage/configuration/config_documentation.md b/docs/usage/configuration/config_documentation.md index 0cf6e075ff11..8426de04179b 100644 --- a/docs/usage/configuration/config_documentation.md +++ b/docs/usage/configuration/config_documentation.md @@ -1196,6 +1196,32 @@ Example configuration: allow_device_name_lookup_over_federation: true ``` --- +### `federation` + +The federation section defines some sub-options related to federation. + +The following options are related to configuring timeout and retry logic for one request, +independently of the others. +Short retry algorithm is used when something or someone will wait for the request to have an +answer, while long retry is used for requests that happen in the background, +like sending a federation transaction. + +* `client_timeout`: timeout for the federation requests in seconds. Default to 60s. +* `max_short_retry_delay`: maximum delay to be used for the short retry algo in seconds. Default to 2s. +* `max_long_retry_delay`: maximum delay to be used for the short retry algo in seconds. Default to 60s. +* `max_short_retries`: maximum number of retries for the short retry algo. Default to 3 attempts. +* `max_long_retries`: maximum number of retries for the long retry algo. Default to 10 attempts. + +Example configuration: +```yaml +federation: + client_timeout: 180 + max_short_retry_delay: 7 + max_long_retry_delay: 100 + max_short_retries: 5 + max_long_retries: 20 +``` +--- ## Caching Options related to caching. diff --git a/synapse/config/federation.py b/synapse/config/federation.py index 336fca578aa1..d21f7fd02a5e 100644 --- a/synapse/config/federation.py +++ b/synapse/config/federation.py @@ -22,6 +22,8 @@ class FederationConfig(Config): section = "federation" def read_config(self, config: JsonDict, **kwargs: Any) -> None: + federation_config = config.setdefault("federation", {}) + # FIXME: federation_domain_whitelist needs sytests self.federation_domain_whitelist: Optional[dict] = None federation_domain_whitelist = config.get("federation_domain_whitelist", None) @@ -49,5 +51,13 @@ def read_config(self, config: JsonDict, **kwargs: Any) -> None: "allow_device_name_lookup_over_federation", False ) + # Allow for the configuration of timeout, max request retries + # and min/max retry delays in the matrix federation client. + self.client_timeout = federation_config.get("client_timeout", 60) + self.max_long_retry_delay = federation_config.get("max_long_retry_delay", 60) + self.max_short_retry_delay = federation_config.get("max_short_retry_delay", 2) + self.max_long_retries = federation_config.get("max_long_retries", 10) + self.max_short_retries = federation_config.get("max_short_retries", 3) + _METRICS_FOR_DOMAINS_SCHEMA = {"type": "array", "items": {"type": "string"}} diff --git a/synapse/http/matrixfederationclient.py b/synapse/http/matrixfederationclient.py index abb5ae581521..ed36825b671c 100644 --- a/synapse/http/matrixfederationclient.py +++ b/synapse/http/matrixfederationclient.py @@ -95,8 +95,6 @@ ) -MAX_LONG_RETRIES = 10 -MAX_SHORT_RETRIES = 3 MAXINT = sys.maxsize @@ -406,7 +404,12 @@ def __init__( self.clock = hs.get_clock() self._store = hs.get_datastores().main self.version_string_bytes = hs.version_string.encode("ascii") - self.default_timeout = 60 + self.default_timeout = hs.config.federation.client_timeout + + self.max_long_retry_delay = hs.config.federation.max_long_retry_delay + self.max_short_retry_delay = hs.config.federation.max_short_retry_delay + self.max_long_retries = hs.config.federation.max_long_retries + self.max_short_retries = hs.config.federation.max_short_retries self._cooperator = Cooperator(scheduler=_make_scheduler(self.reactor)) @@ -583,9 +586,9 @@ async def _send_request( # XXX: Would be much nicer to retry only at the transaction-layer # (once we have reliable transactions in place) if long_retries: - retries_left = MAX_LONG_RETRIES + retries_left = self.max_long_retries else: - retries_left = MAX_SHORT_RETRIES + retries_left = self.max_short_retries url_bytes = request.uri url_str = url_bytes.decode("ascii") @@ -730,12 +733,12 @@ async def _send_request( if retries_left and not timeout: if long_retries: - delay = 4 ** (MAX_LONG_RETRIES + 1 - retries_left) - delay = min(delay, 60) + delay = 4 ** (self.max_long_retries + 1 - retries_left) + delay = min(delay, self.max_long_retry_delay) delay *= random.uniform(0.8, 1.4) else: - delay = 0.5 * 2 ** (MAX_SHORT_RETRIES - retries_left) - delay = min(delay, 2) + delay = 0.5 * 2 ** (self.max_short_retries - retries_left) + delay = min(delay, self.max_short_retry_delay) delay *= random.uniform(0.8, 1.4) logger.debug( diff --git a/tests/http/test_matrixfederationclient.py b/tests/http/test_matrixfederationclient.py index 0dfc03ce50f4..8565f8ac64ad 100644 --- a/tests/http/test_matrixfederationclient.py +++ b/tests/http/test_matrixfederationclient.py @@ -40,7 +40,7 @@ from synapse.util import Clock from tests.server import FakeTransport -from tests.unittest import HomeserverTestCase +from tests.unittest import HomeserverTestCase, override_config def check_logcontext(context: LoggingContextOrSentinel) -> None: @@ -640,3 +640,21 @@ def test_build_auth_headers_rejects_falsey_destinations(self) -> None: self.cl.build_auth_headers( b"", b"GET", b"https://example.com", destination_is=b"" ) + + @override_config( + { + "federation": { + "client_timeout": 180, + "max_long_retry_delay": 100, + "max_short_retry_delay": 7, + "max_long_retries": 20, + "max_short_retries": 5, + } + } + ) + def test_configurable_retry_and_delay_values(self) -> None: + self.assertEqual(self.cl.default_timeout, 180) + self.assertEqual(self.cl.max_long_retry_delay, 100) + self.assertEqual(self.cl.max_short_retry_delay, 7) + self.assertEqual(self.cl.max_long_retries, 20) + self.assertEqual(self.cl.max_short_retries, 5) From 373c0c7ff7cf55b5f46aba43f4c4f9bba5c79c0e Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Fri, 9 Jun 2023 15:00:30 +0100 Subject: [PATCH 58/75] Speed up typechecking CI (#15752) By restoring the rust cache before installing the project. --- .github/workflows/tests.yml | 8 ++++---- changelog.d/15752.misc | 1 + 2 files changed, 5 insertions(+), 4 deletions(-) create mode 100644 changelog.d/15752.misc diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index cf1899b580e8..02a4be3a24a1 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -92,6 +92,10 @@ jobs: - name: Checkout repository uses: actions/checkout@v3 + - name: Install Rust + uses: dtolnay/rust-toolchain@1.58.1 + - uses: Swatinem/rust-cache@v2 + - name: Setup Poetry uses: matrix-org/setup-python-poetry@v1 with: @@ -103,10 +107,6 @@ jobs: # To make CI green, err towards caution and install the project. install-project: "true" - - name: Install Rust - uses: dtolnay/rust-toolchain@1.58.1 - - uses: Swatinem/rust-cache@v2 - # Cribbed from # https://github.com/AustinScola/mypy-cache-github-action/blob/85ea4f2972abed39b33bd02c36e341b28ca59213/src/restore.ts#L10-L17 - name: Restore/persist mypy's cache diff --git a/changelog.d/15752.misc b/changelog.d/15752.misc new file mode 100644 index 000000000000..7e373b12750b --- /dev/null +++ b/changelog.d/15752.misc @@ -0,0 +1 @@ +Speed up typechecking CI. From fcc3ca37e1b404981d9a0d6f2708e14407775b97 Mon Sep 17 00:00:00 2001 From: Eric Eastwood Date: Fri, 9 Jun 2023 15:39:49 -0500 Subject: [PATCH 59/75] Backfill in the background if we're doing it "just because" (#15710) Fix https://github.com/matrix-org/synapse/issues/15702 --- changelog.d/15710.feature | 1 + synapse/handlers/federation.py | 18 ++++++++++++++---- 2 files changed, 15 insertions(+), 4 deletions(-) create mode 100644 changelog.d/15710.feature diff --git a/changelog.d/15710.feature b/changelog.d/15710.feature new file mode 100644 index 000000000000..fe77a2fef6e2 --- /dev/null +++ b/changelog.d/15710.feature @@ -0,0 +1 @@ +Speed up `/messages` by backfilling in the background when there are no backward extremities where we are directly paginating. diff --git a/synapse/handlers/federation.py b/synapse/handlers/federation.py index 57d6b70cff48..b7b5e2102036 100644 --- a/synapse/handlers/federation.py +++ b/synapse/handlers/federation.py @@ -320,14 +320,21 @@ async def _maybe_backfill_inner( str(len(sorted_backfill_points)), ) - # If we have no backfill points lower than the `current_depth` then - # either we can a) bail or b) still attempt to backfill. We opt to try - # backfilling anyway just in case we do get relevant events. + # If we have no backfill points lower than the `current_depth` then either we + # can a) bail or b) still attempt to backfill. We opt to try backfilling anyway + # just in case we do get relevant events. This is good for eventual consistency + # sake but we don't need to block the client for something that is just as + # likely not to return anything relevant so we backfill in the background. The + # only way, this could return something relevant is if we discover a new branch + # of history that extends all the way back to where we are currently paginating + # and it's within the 100 events that are returned from `/backfill`. if not sorted_backfill_points and current_depth != MAX_DEPTH: logger.debug( "_maybe_backfill_inner: all backfill points are *after* current depth. Trying again with later backfill points." ) - return await self._maybe_backfill_inner( + run_as_background_process( + "_maybe_backfill_inner_anyway_with_max_depth", + self._maybe_backfill_inner, room_id=room_id, # We use `MAX_DEPTH` so that we find all backfill points next # time (all events are below the `MAX_DEPTH`) @@ -338,6 +345,9 @@ async def _maybe_backfill_inner( # overall otherwise the smaller one will throw off the results. processing_start_time=None, ) + # We return `False` because we're backfilling in the background and there is + # no new events immediately for the caller to know about yet. + return False # Even after recursing with `MAX_DEPTH`, we didn't find any # backward extremities to backfill from. From 4f2bd6be695c83007ebd6f817b74c5a97cf01e4a Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 12 Jun 2023 09:17:04 +0100 Subject: [PATCH 60/75] Bump types-pyopenssl from 23.1.0.2 to 23.2.0.0 (#15766) Bumps [types-pyopenssl](https://github.com/python/typeshed) from 23.1.0.2 to 23.2.0.0. - [Commits](https://github.com/python/typeshed/commits) --- updated-dependencies: - dependency-name: types-pyopenssl dependency-type: direct:development update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- poetry.lock | 166 +++------------------------------------------------- 1 file changed, 7 insertions(+), 159 deletions(-) diff --git a/poetry.lock b/poetry.lock index 1f5cb3a3a85e..228fccac9cc7 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,10 +1,9 @@ -# This file is automatically @generated by Poetry and should not be changed by hand. +# This file is automatically @generated by Poetry 1.5.1 and should not be changed by hand. [[package]] name = "alabaster" version = "0.7.13" description = "A configurable sidebar-enabled Sphinx theme" -category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -16,7 +15,6 @@ files = [ name = "astroid" version = "2.15.0" description = "An abstract syntax tree for Python with inference support." -category = "dev" optional = false python-versions = ">=3.7.2" files = [ @@ -36,7 +34,6 @@ wrapt = [ name = "attrs" version = "22.2.0" description = "Classes Without Boilerplate" -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -55,7 +52,6 @@ tests-no-zope = ["cloudpickle", "cloudpickle", "hypothesis", "hypothesis", "mypy name = "authlib" version = "1.2.0" description = "The ultimate Python library in building OAuth and OpenID Connect servers and clients." -category = "main" optional = true python-versions = "*" files = [ @@ -70,7 +66,6 @@ cryptography = ">=3.2" name = "automat" version = "22.10.0" description = "Self-service finite-state machines for the programmer on the go." -category = "main" optional = false python-versions = "*" files = [ @@ -89,7 +84,6 @@ visualize = ["Twisted (>=16.1.1)", "graphviz (>0.5.1)"] name = "babel" version = "2.12.1" description = "Internationalization utilities" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -104,7 +98,6 @@ pytz = {version = ">=2015.7", markers = "python_version < \"3.9\""} name = "bcrypt" version = "4.0.1" description = "Modern password hashing for your software and your servers" -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -139,7 +132,6 @@ typecheck = ["mypy"] name = "beautifulsoup4" version = "4.12.0" description = "Screen-scraping library" -category = "dev" optional = false python-versions = ">=3.6.0" files = [ @@ -158,7 +150,6 @@ lxml = ["lxml"] name = "black" version = "23.3.0" description = "The uncompromising code formatter." -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -209,7 +200,6 @@ uvloop = ["uvloop (>=0.15.2)"] name = "bleach" version = "6.0.0" description = "An easy safelist-based HTML-sanitizing tool." -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -228,7 +218,6 @@ css = ["tinycss2 (>=1.1.0,<1.2)"] name = "canonicaljson" version = "2.0.0" description = "Canonical JSON" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -240,7 +229,6 @@ files = [ name = "certifi" version = "2022.12.7" description = "Python package for providing Mozilla's CA Bundle." -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -252,7 +240,6 @@ files = [ name = "cffi" version = "1.15.1" description = "Foreign Function Interface for Python calling C code." -category = "main" optional = false python-versions = "*" files = [ @@ -329,7 +316,6 @@ pycparser = "*" name = "charset-normalizer" version = "3.1.0" description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." -category = "main" optional = false python-versions = ">=3.7.0" files = [ @@ -414,7 +400,6 @@ files = [ name = "click" version = "8.1.3" description = "Composable command line interface toolkit" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -430,7 +415,6 @@ importlib-metadata = {version = "*", markers = "python_version < \"3.8\""} name = "click-default-group" version = "1.2.2" description = "Extends click.Group to invoke a command without explicit subcommand name" -category = "dev" optional = false python-versions = "*" files = [ @@ -444,7 +428,6 @@ click = "*" name = "colorama" version = "0.4.6" description = "Cross-platform colored terminal text." -category = "dev" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" files = [ @@ -456,7 +439,6 @@ files = [ name = "commonmark" version = "0.9.1" description = "Python parser for the CommonMark Markdown spec" -category = "dev" optional = false python-versions = "*" files = [ @@ -471,7 +453,6 @@ test = ["flake8 (==3.7.8)", "hypothesis (==3.55.3)"] name = "constantly" version = "15.1.0" description = "Symbolic constants in Python" -category = "main" optional = false python-versions = "*" files = [ @@ -483,7 +464,6 @@ files = [ name = "cryptography" version = "40.0.2" description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers." -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -525,7 +505,6 @@ tox = ["tox"] name = "defusedxml" version = "0.7.1" description = "XML bomb protection for Python stdlib modules" -category = "main" optional = true python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" files = [ @@ -537,7 +516,6 @@ files = [ name = "deprecated" version = "1.2.13" description = "Python @deprecated decorator to deprecate old python classes, functions or methods." -category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" files = [ @@ -555,7 +533,6 @@ dev = ["PyTest", "PyTest (<5)", "PyTest-Cov", "PyTest-Cov (<2.6)", "bump2version name = "docutils" version = "0.19" description = "Docutils -- Python Documentation Utilities" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -567,7 +544,6 @@ files = [ name = "elementpath" version = "4.1.0" description = "XPath 1.0/2.0/3.0/3.1 parsers and selectors for ElementTree and lxml" -category = "main" optional = true python-versions = ">=3.7" files = [ @@ -582,7 +558,6 @@ dev = ["Sphinx", "coverage", "flake8", "lxml", "lxml-stubs", "memory-profiler", name = "furo" version = "2023.5.20" description = "A clean customisable Sphinx documentation theme." -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -600,7 +575,6 @@ sphinx-basic-ng = "*" name = "gitdb" version = "4.0.10" description = "Git Object Database" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -615,7 +589,6 @@ smmap = ">=3.0.1,<6" name = "gitpython" version = "3.1.31" description = "GitPython is a Python library used to interact with Git repositories" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -631,7 +604,6 @@ typing-extensions = {version = ">=3.7.4.3", markers = "python_version < \"3.8\"" name = "hiredis" version = "2.2.3" description = "Python wrapper for hiredis" -category = "main" optional = true python-versions = ">=3.7" files = [ @@ -730,7 +702,6 @@ files = [ name = "hyperlink" version = "21.0.0" description = "A featureful, immutable, and correct URL for Python." -category = "main" optional = false python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" files = [ @@ -745,7 +716,6 @@ idna = ">=2.5" name = "idna" version = "3.4" description = "Internationalized Domain Names in Applications (IDNA)" -category = "main" optional = false python-versions = ">=3.5" files = [ @@ -757,7 +727,6 @@ files = [ name = "ijson" version = "3.2.0.post0" description = "Iterative JSON parser with standard Python iterator interfaces" -category = "main" optional = false python-versions = "*" files = [ @@ -845,7 +814,6 @@ files = [ name = "imagesize" version = "1.4.1" description = "Getting image size from png/jpeg/jpeg2000/gif file" -category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" files = [ @@ -857,7 +825,6 @@ files = [ name = "immutabledict" version = "2.2.4" description = "Immutable wrapper around dictionaries (a fork of frozendict)" -category = "main" optional = false python-versions = ">=3.7,<4.0" files = [ @@ -869,7 +836,6 @@ files = [ name = "importlib-metadata" version = "6.6.0" description = "Read metadata from Python packages" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -890,7 +856,6 @@ testing = ["flake8 (<5)", "flufl.flake8", "importlib-resources (>=1.3)", "packag name = "importlib-resources" version = "5.12.0" description = "Read resources from Python packages" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -909,7 +874,6 @@ testing = ["flake8 (<5)", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-chec name = "incremental" version = "22.10.0" description = "\"A small library that versions your Python projects.\"" -category = "main" optional = false python-versions = "*" files = [ @@ -925,7 +889,6 @@ scripts = ["click (>=6.0)", "twisted (>=16.4.0)"] name = "isort" version = "5.11.5" description = "A Python utility / library to sort Python imports." -category = "dev" optional = false python-versions = ">=3.7.0" files = [ @@ -943,7 +906,6 @@ requirements-deprecated-finder = ["pip-api", "pipreqs"] name = "jaeger-client" version = "4.8.0" description = "Jaeger Python OpenTracing Tracer implementation" -category = "main" optional = true python-versions = ">=3.7" files = [ @@ -963,7 +925,6 @@ tests = ["codecov", "coverage", "flake8", "flake8-quotes", "flake8-typing-import name = "jaraco-classes" version = "3.2.3" description = "Utility functions for Python class constructs" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -982,7 +943,6 @@ testing = ["flake8 (<5)", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-chec name = "jeepney" version = "0.8.0" description = "Low-level, pure Python DBus protocol wrapper." -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -998,7 +958,6 @@ trio = ["async_generator", "trio"] name = "jinja2" version = "3.1.2" description = "A very fast and expressive template engine." -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -1016,7 +975,6 @@ i18n = ["Babel (>=2.7)"] name = "jsonschema" version = "4.17.3" description = "An implementation of JSON Schema validation for Python" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -1040,7 +998,6 @@ format-nongpl = ["fqdn", "idna", "isoduration", "jsonpointer (>1.13)", "rfc3339- name = "keyring" version = "23.13.1" description = "Store and access your passwords safely." -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -1065,7 +1022,6 @@ testing = ["flake8 (<5)", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-chec name = "lazy-object-proxy" version = "1.9.0" description = "A fast and thorough lazy object proxy." -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -1111,7 +1067,6 @@ files = [ name = "ldap3" version = "2.9.1" description = "A strictly RFC 4510 conforming LDAP V3 pure Python client library" -category = "main" optional = true python-versions = "*" files = [ @@ -1126,7 +1081,6 @@ pyasn1 = ">=0.4.6" name = "lxml" version = "4.9.2" description = "Powerful and Pythonic XML processing library combining libxml2/libxslt with the ElementTree API." -category = "main" optional = true python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, != 3.4.*" files = [ @@ -1219,7 +1173,6 @@ source = ["Cython (>=0.29.7)"] name = "lxml-stubs" version = "0.4.0" description = "Type annotations for the lxml package" -category = "dev" optional = false python-versions = "*" files = [ @@ -1234,7 +1187,6 @@ test = ["coverage[toml] (==5.2)", "pytest (>=6.0.0)", "pytest-mypy-plugins (==1. name = "markdown-it-py" version = "2.2.0" description = "Python port of markdown-it. Markdown parsing, done right!" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -1260,7 +1212,6 @@ testing = ["coverage", "pytest", "pytest-cov", "pytest-regressions"] name = "markupsafe" version = "2.1.2" description = "Safely add untrusted strings to HTML/XML markup." -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -1320,7 +1271,6 @@ files = [ name = "matrix-common" version = "1.3.0" description = "Common utilities for Synapse, Sydent and Sygnal" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -1340,7 +1290,6 @@ test = ["aiounittest", "tox", "twisted"] name = "matrix-synapse-ldap3" version = "0.2.2" description = "An LDAP3 auth provider for Synapse" -category = "main" optional = true python-versions = ">=3.7" files = [ @@ -1360,7 +1309,6 @@ dev = ["black (==22.3.0)", "flake8 (==4.0.1)", "isort (==5.9.3)", "ldaptor", "ma name = "mdit-py-plugins" version = "0.3.5" description = "Collection of plugins for markdown-it-py" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -1380,7 +1328,6 @@ testing = ["coverage", "pytest", "pytest-cov", "pytest-regressions"] name = "mdurl" version = "0.1.2" description = "Markdown URL utilities" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -1392,7 +1339,6 @@ files = [ name = "more-itertools" version = "9.1.0" description = "More routines for operating on iterables, beyond itertools" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -1404,7 +1350,6 @@ files = [ name = "msgpack" version = "1.0.5" description = "MessagePack serializer" -category = "main" optional = false python-versions = "*" files = [ @@ -1477,7 +1422,6 @@ files = [ name = "mypy" version = "1.0.1" description = "Optional static typing for Python" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -1525,7 +1469,6 @@ reports = ["lxml"] name = "mypy-extensions" version = "1.0.0" description = "Type system extensions for programs checked with the mypy type checker." -category = "dev" optional = false python-versions = ">=3.5" files = [ @@ -1537,7 +1480,6 @@ files = [ name = "mypy-zope" version = "0.9.1" description = "Plugin for mypy to support zope interfaces" -category = "dev" optional = false python-versions = "*" files = [ @@ -1557,7 +1499,6 @@ test = ["lxml", "pytest (>=4.6)", "pytest-cov"] name = "myst-parser" version = "1.0.0" description = "An extended [CommonMark](https://spec.commonmark.org/) compliant parser," -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -1584,7 +1525,6 @@ testing-docutils = ["pygments", "pytest (>=7,<8)", "pytest-param-files (>=0.3.4, name = "netaddr" version = "0.8.0" description = "A network address manipulation library for Python" -category = "main" optional = false python-versions = "*" files = [ @@ -1596,7 +1536,6 @@ files = [ name = "opentracing" version = "2.4.0" description = "OpenTracing API for Python. See documentation at http://opentracing.io" -category = "main" optional = true python-versions = "*" files = [ @@ -1610,7 +1549,6 @@ tests = ["Sphinx", "doubles", "flake8", "flake8-quotes", "gevent", "mock", "pyte name = "packaging" version = "23.1" description = "Core utilities for Python packages" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -1622,7 +1560,6 @@ files = [ name = "parameterized" version = "0.9.0" description = "Parameterized testing with any Python test framework" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -1637,7 +1574,6 @@ dev = ["jinja2"] name = "pathspec" version = "0.11.1" description = "Utility library for gitignore style pattern matching of file paths." -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -1649,7 +1585,6 @@ files = [ name = "phonenumbers" version = "8.13.11" description = "Python version of Google's common library for parsing, formatting, storing and validating international phone numbers." -category = "main" optional = false python-versions = "*" files = [ @@ -1661,7 +1596,6 @@ files = [ name = "pillow" version = "9.4.0" description = "Python Imaging Library (Fork)" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -1752,7 +1686,6 @@ tests = ["check-manifest", "coverage", "defusedxml", "markdown2", "olefile", "pa name = "pkginfo" version = "1.9.6" description = "Query metadata from sdists / bdists / installed packages." -category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -1767,7 +1700,6 @@ testing = ["pytest", "pytest-cov"] name = "pkgutil-resolve-name" version = "1.3.10" description = "Resolve a name to an object." -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -1779,7 +1711,6 @@ files = [ name = "platformdirs" version = "3.1.1" description = "A small Python package for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -1798,7 +1729,6 @@ test = ["appdirs (==1.4.4)", "covdefaults (>=2.2.2)", "pytest (>=7.2.1)", "pytes name = "prometheus-client" version = "0.17.0" description = "Python client for the Prometheus monitoring system." -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -1813,7 +1743,6 @@ twisted = ["twisted"] name = "psycopg2" version = "2.9.6" description = "psycopg2 - Python-PostgreSQL Database Adapter" -category = "main" optional = true python-versions = ">=3.6" files = [ @@ -1836,7 +1765,6 @@ files = [ name = "psycopg2cffi" version = "2.9.0" description = ".. image:: https://travis-ci.org/chtd/psycopg2cffi.svg?branch=master" -category = "main" optional = true python-versions = "*" files = [ @@ -1851,7 +1779,6 @@ six = "*" name = "psycopg2cffi-compat" version = "1.1" description = "A Simple library to enable psycopg2 compatability" -category = "main" optional = true python-versions = "*" files = [ @@ -1865,7 +1792,6 @@ psycopg2 = "*" name = "pyasn1" version = "0.5.0" description = "Pure-Python implementation of ASN.1 types and DER/BER/CER codecs (X.208)" -category = "main" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7" files = [ @@ -1877,7 +1803,6 @@ files = [ name = "pyasn1-modules" version = "0.3.0" description = "A collection of ASN.1-based protocols modules" -category = "main" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7" files = [ @@ -1892,7 +1817,6 @@ pyasn1 = ">=0.4.6,<0.6.0" name = "pycparser" version = "2.21" description = "C parser in Python" -category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" files = [ @@ -1904,7 +1828,6 @@ files = [ name = "pydantic" version = "1.10.8" description = "Data validation and settings management using python type hints" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -1957,7 +1880,6 @@ email = ["email-validator (>=1.0.3)"] name = "pygithub" version = "1.58.2" description = "Use the full Github API v3" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -1975,7 +1897,6 @@ requests = ">=2.14.0" name = "pygments" version = "2.14.0" description = "Pygments is a syntax highlighting package written in Python." -category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -1990,7 +1911,6 @@ plugins = ["importlib-metadata"] name = "pyicu" version = "2.11" description = "Python extension wrapping the ICU C++ API" -category = "main" optional = true python-versions = "*" files = [ @@ -2001,7 +1921,6 @@ files = [ name = "pyjwt" version = "2.6.0" description = "JSON Web Token implementation in Python" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -2022,7 +1941,6 @@ tests = ["coverage[toml] (==5.0.4)", "pytest (>=6.0.0,<7.0.0)"] name = "pymacaroons" version = "0.13.0" description = "Macaroon library for Python" -category = "main" optional = false python-versions = "*" files = [ @@ -2038,7 +1956,6 @@ six = ">=1.8.0" name = "pympler" version = "1.0.1" description = "A development tool to measure, monitor and analyze the memory behavior of Python objects." -category = "main" optional = true python-versions = ">=3.6" files = [ @@ -2050,7 +1967,6 @@ files = [ name = "pynacl" version = "1.5.0" description = "Python binding to the Networking and Cryptography (NaCl) library" -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -2077,7 +1993,6 @@ tests = ["hypothesis (>=3.27.0)", "pytest (>=3.2.1,!=3.3.0)"] name = "pyopenssl" version = "23.1.1" description = "Python wrapper module around the OpenSSL library" -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -2096,7 +2011,6 @@ test = ["flaky", "pretend", "pytest (>=3.0.1)"] name = "pyrsistent" version = "0.19.3" description = "Persistent/Functional/Immutable data structures" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -2133,7 +2047,6 @@ files = [ name = "pysaml2" version = "7.3.1" description = "Python implementation of SAML Version 2 Standard" -category = "main" optional = true python-versions = ">=3.6.2,<4.0.0" files = [ @@ -2159,7 +2072,6 @@ s2repoze = ["paste", "repoze.who", "zope.interface"] name = "python-dateutil" version = "2.8.2" description = "Extensions to the standard Python datetime module" -category = "main" optional = true python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" files = [ @@ -2174,7 +2086,6 @@ six = ">=1.5" name = "pytz" version = "2022.7.1" description = "World timezone definitions, modern and historical" -category = "main" optional = false python-versions = "*" files = [ @@ -2186,7 +2097,6 @@ files = [ name = "pywin32-ctypes" version = "0.2.0" description = "" -category = "dev" optional = false python-versions = "*" files = [ @@ -2198,7 +2108,6 @@ files = [ name = "pyyaml" version = "6.0" description = "YAML parser and emitter for Python" -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -2248,7 +2157,6 @@ files = [ name = "readme-renderer" version = "37.3" description = "readme_renderer is a library for rendering \"readme\" descriptions for Warehouse" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -2268,7 +2176,6 @@ md = ["cmarkgfm (>=0.8.0)"] name = "requests" version = "2.31.0" description = "Python HTTP for Humans." -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -2290,7 +2197,6 @@ use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] name = "requests-toolbelt" version = "0.10.1" description = "A utility belt for advanced users of python-requests" -category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" files = [ @@ -2305,7 +2211,6 @@ requests = ">=2.0.1,<3.0.0" name = "rfc3986" version = "2.0.0" description = "Validating URI References per RFC 3986" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -2320,7 +2225,6 @@ idna2008 = ["idna"] name = "rich" version = "13.3.2" description = "Render rich text, tables, progress bars, syntax highlighting, markdown and more to the terminal" -category = "dev" optional = false python-versions = ">=3.7.0" files = [ @@ -2340,7 +2244,6 @@ jupyter = ["ipywidgets (>=7.5.1,<9)"] name = "ruff" version = "0.0.265" description = "An extremely fast Python linter, written in Rust." -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -2367,7 +2270,6 @@ files = [ name = "secretstorage" version = "3.3.3" description = "Python bindings to FreeDesktop.org Secret Service API" -category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -2383,7 +2285,6 @@ jeepney = ">=0.6" name = "semantic-version" version = "2.10.0" description = "A library implementing the 'SemVer' scheme." -category = "main" optional = false python-versions = ">=2.7" files = [ @@ -2399,7 +2300,6 @@ doc = ["Sphinx", "sphinx-rtd-theme"] name = "sentry-sdk" version = "1.25.0" description = "Python client for Sentry (https://sentry.io)" -category = "main" optional = true python-versions = "*" files = [ @@ -2442,7 +2342,6 @@ tornado = ["tornado (>=5)"] name = "service-identity" version = "21.1.0" description = "Service identity verification for pyOpenSSL & cryptography." -category = "main" optional = false python-versions = "*" files = [ @@ -2467,7 +2366,6 @@ tests = ["coverage[toml] (>=5.0.2)", "pytest"] name = "setuptools" version = "67.6.0" description = "Easily download, build, install, upgrade, and uninstall Python packages" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -2484,7 +2382,6 @@ testing-integration = ["build[virtualenv]", "filelock (>=3.4.0)", "jaraco.envs ( name = "setuptools-rust" version = "1.6.0" description = "Setuptools Rust extension plugin" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -2501,7 +2398,6 @@ typing-extensions = ">=3.7.4.3" name = "signedjson" version = "1.1.4" description = "Sign JSON with Ed25519 signatures" -category = "main" optional = false python-versions = "*" files = [ @@ -2523,7 +2419,6 @@ dev = ["typing-extensions (>=3.5)"] name = "six" version = "1.16.0" description = "Python 2 and 3 compatibility utilities" -category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" files = [ @@ -2535,7 +2430,6 @@ files = [ name = "smmap" version = "5.0.0" description = "A pure Python implementation of a sliding window memory map manager" -category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -2547,7 +2441,6 @@ files = [ name = "snowballstemmer" version = "2.2.0" description = "This package provides 29 stemmers for 28 languages generated from Snowball algorithms." -category = "dev" optional = false python-versions = "*" files = [ @@ -2559,7 +2452,6 @@ files = [ name = "sortedcontainers" version = "2.4.0" description = "Sorted Containers -- Sorted List, Sorted Dict, Sorted Set" -category = "main" optional = false python-versions = "*" files = [ @@ -2571,7 +2463,6 @@ files = [ name = "soupsieve" version = "2.4" description = "A modern CSS selector implementation for Beautiful Soup." -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -2583,7 +2474,6 @@ files = [ name = "sphinx" version = "6.2.1" description = "Python documentation generator" -category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -2619,7 +2509,6 @@ test = ["cython", "filelock", "html5lib", "pytest (>=4.6)"] name = "sphinx-autodoc2" version = "0.4.2" description = "Analyse a python project and create documentation for it." -category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -2642,7 +2531,6 @@ testing = ["pytest", "pytest-cov", "pytest-regressions", "sphinx (>=4.0.0)"] name = "sphinx-basic-ng" version = "1.0.0b1" description = "A modern skeleton for Sphinx themes." -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -2660,7 +2548,6 @@ docs = ["furo", "ipython", "myst-parser", "sphinx-copybutton", "sphinx-inline-ta name = "sphinxcontrib-applehelp" version = "1.0.4" description = "sphinxcontrib-applehelp is a Sphinx extension which outputs Apple help books" -category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -2676,7 +2563,6 @@ test = ["pytest"] name = "sphinxcontrib-devhelp" version = "1.0.2" description = "sphinxcontrib-devhelp is a sphinx extension which outputs Devhelp document." -category = "dev" optional = false python-versions = ">=3.5" files = [ @@ -2692,7 +2578,6 @@ test = ["pytest"] name = "sphinxcontrib-htmlhelp" version = "2.0.1" description = "sphinxcontrib-htmlhelp is a sphinx extension which renders HTML help files" -category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -2708,7 +2593,6 @@ test = ["html5lib", "pytest"] name = "sphinxcontrib-jsmath" version = "1.0.1" description = "A sphinx extension which renders display math in HTML via JavaScript" -category = "dev" optional = false python-versions = ">=3.5" files = [ @@ -2723,7 +2607,6 @@ test = ["flake8", "mypy", "pytest"] name = "sphinxcontrib-qthelp" version = "1.0.3" description = "sphinxcontrib-qthelp is a sphinx extension which outputs QtHelp document." -category = "dev" optional = false python-versions = ">=3.5" files = [ @@ -2739,7 +2622,6 @@ test = ["pytest"] name = "sphinxcontrib-serializinghtml" version = "1.1.5" description = "sphinxcontrib-serializinghtml is a sphinx extension which outputs \"serialized\" HTML files (json and pickle)." -category = "dev" optional = false python-versions = ">=3.5" files = [ @@ -2755,7 +2637,6 @@ test = ["pytest"] name = "systemd-python" version = "235" description = "Python interface for libsystemd" -category = "main" optional = true python-versions = "*" files = [ @@ -2766,7 +2647,6 @@ files = [ name = "threadloop" version = "1.0.2" description = "Tornado IOLoop Backed Concurrent Futures" -category = "main" optional = true python-versions = "*" files = [ @@ -2781,7 +2661,6 @@ tornado = "*" name = "thrift" version = "0.16.0" description = "Python bindings for the Apache Thrift RPC system" -category = "main" optional = true python-versions = "*" files = [ @@ -2800,7 +2679,6 @@ twisted = ["twisted"] name = "tomli" version = "2.0.1" description = "A lil' TOML parser" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -2812,7 +2690,6 @@ files = [ name = "tornado" version = "6.2" description = "Tornado is a Python web framework and asynchronous networking library, originally developed at FriendFeed." -category = "main" optional = true python-versions = ">= 3.7" files = [ @@ -2833,7 +2710,6 @@ files = [ name = "towncrier" version = "22.12.0" description = "Building newsfiles for your project." -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -2856,7 +2732,6 @@ dev = ["furo", "packaging", "sphinx (>=5)", "twisted"] name = "treq" version = "22.2.0" description = "High-level Twisted HTTP Client API" -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -2879,7 +2754,6 @@ docs = ["sphinx (>=1.4.8)"] name = "twine" version = "4.0.2" description = "Collection of utilities for publishing packages on PyPI" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -2902,7 +2776,6 @@ urllib3 = ">=1.26.0" name = "twisted" version = "22.10.0" description = "An asynchronous networking framework written in Python" -category = "main" optional = false python-versions = ">=3.7.1" files = [ @@ -2944,7 +2817,6 @@ windows-platform = ["PyHamcrest (>=1.9.0)", "appdirs (>=1.4.0)", "bcrypt (>=3.0. name = "twisted-iocpsupport" version = "1.0.2" description = "An extension for use in the twisted I/O Completion Ports reactor." -category = "main" optional = false python-versions = "*" files = [ @@ -2966,7 +2838,6 @@ files = [ name = "txredisapi" version = "1.4.9" description = "non-blocking redis client for python" -category = "main" optional = true python-versions = "*" files = [ @@ -2982,7 +2853,6 @@ twisted = "*" name = "typed-ast" version = "1.5.4" description = "a fork of Python 2 and 3 ast modules with type comment support" -category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -3016,7 +2886,6 @@ files = [ name = "types-bleach" version = "6.0.0.3" description = "Typing stubs for bleach" -category = "dev" optional = false python-versions = "*" files = [ @@ -3028,7 +2897,6 @@ files = [ name = "types-commonmark" version = "0.9.2.3" description = "Typing stubs for commonmark" -category = "dev" optional = false python-versions = "*" files = [ @@ -3040,7 +2908,6 @@ files = [ name = "types-jsonschema" version = "4.17.0.8" description = "Typing stubs for jsonschema" -category = "dev" optional = false python-versions = "*" files = [ @@ -3052,7 +2919,6 @@ files = [ name = "types-netaddr" version = "0.8.0.8" description = "Typing stubs for netaddr" -category = "dev" optional = false python-versions = "*" files = [ @@ -3064,7 +2930,6 @@ files = [ name = "types-opentracing" version = "2.4.10.4" description = "Typing stubs for opentracing" -category = "dev" optional = false python-versions = "*" files = [ @@ -3076,7 +2941,6 @@ files = [ name = "types-pillow" version = "9.5.0.4" description = "Typing stubs for Pillow" -category = "dev" optional = false python-versions = "*" files = [ @@ -3088,7 +2952,6 @@ files = [ name = "types-psycopg2" version = "2.9.21.10" description = "Typing stubs for psycopg2" -category = "dev" optional = false python-versions = "*" files = [ @@ -3098,14 +2961,13 @@ files = [ [[package]] name = "types-pyopenssl" -version = "23.1.0.2" +version = "23.2.0.0" description = "Typing stubs for pyOpenSSL" -category = "dev" optional = false python-versions = "*" files = [ - {file = "types-pyOpenSSL-23.1.0.2.tar.gz", hash = "sha256:20b80971b86240e8432a1832bd8124cea49c3088c7bfc77dfd23be27ffe4a517"}, - {file = "types_pyOpenSSL-23.1.0.2-py3-none-any.whl", hash = "sha256:b050641aeff6dfebf231ad719bdac12d53b8ee818d4afb67b886333484629957"}, + {file = "types-pyOpenSSL-23.2.0.0.tar.gz", hash = "sha256:43e307e8dfb3a7a8208a19874ca060305f460c529d4eaca8a2669ea89499f244"}, + {file = "types_pyOpenSSL-23.2.0.0-py3-none-any.whl", hash = "sha256:ba803a99440b0c2e9ab4e197084aeefc55bdfe8a580d367b2aa4210810a21240"}, ] [package.dependencies] @@ -3115,7 +2977,6 @@ cryptography = ">=35.0.0" name = "types-pyyaml" version = "6.0.12.10" description = "Typing stubs for PyYAML" -category = "dev" optional = false python-versions = "*" files = [ @@ -3127,7 +2988,6 @@ files = [ name = "types-requests" version = "2.31.0.1" description = "Typing stubs for requests" -category = "dev" optional = false python-versions = "*" files = [ @@ -3142,7 +3002,6 @@ types-urllib3 = "*" name = "types-setuptools" version = "67.8.0.0" description = "Typing stubs for setuptools" -category = "dev" optional = false python-versions = "*" files = [ @@ -3154,7 +3013,6 @@ files = [ name = "types-urllib3" version = "1.26.25.8" description = "Typing stubs for urllib3" -category = "dev" optional = false python-versions = "*" files = [ @@ -3166,7 +3024,6 @@ files = [ name = "typing-extensions" version = "4.5.0" description = "Backported and Experimental Type Hints for Python 3.7+" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -3178,7 +3035,6 @@ files = [ name = "unpaddedbase64" version = "2.1.0" description = "Encode and decode Base64 without \"=\" padding" -category = "main" optional = false python-versions = ">=3.6,<4.0" files = [ @@ -3190,7 +3046,6 @@ files = [ name = "urllib3" version = "1.26.15" description = "HTTP library with thread-safe connection pooling, file post, and more." -category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" files = [ @@ -3207,7 +3062,6 @@ socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"] name = "webencodings" version = "0.5.1" description = "Character encoding aliases for legacy web content" -category = "main" optional = false python-versions = "*" files = [ @@ -3219,7 +3073,6 @@ files = [ name = "wrapt" version = "1.15.0" description = "Module for decorators, wrappers and monkey patching." -category = "dev" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7" files = [ @@ -3304,7 +3157,6 @@ files = [ name = "xmlschema" version = "2.2.2" description = "An XML Schema validator and decoder" -category = "main" optional = true python-versions = ">=3.7" files = [ @@ -3324,7 +3176,6 @@ docs = ["Sphinx", "elementpath (>=4.0.0,<5.0.0)", "jinja2", "sphinx-rtd-theme"] name = "zipp" version = "3.15.0" description = "Backport of pathlib-compatible object wrapper for zip files" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -3340,7 +3191,6 @@ testing = ["big-O", "flake8 (<5)", "jaraco.functools", "jaraco.itertools", "more name = "zope-event" version = "4.6" description = "Very basic event publishing system" -category = "dev" optional = false python-versions = "*" files = [ @@ -3359,7 +3209,6 @@ test = ["zope.testrunner"] name = "zope-interface" version = "6.0" description = "Interfaces for Python" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -3407,7 +3256,6 @@ testing = ["coverage (>=5.0.3)", "zope.event", "zope.testing"] name = "zope-schema" version = "7.0.1" description = "zope.interface extension for defining data schemas" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -3425,18 +3273,18 @@ docs = ["Sphinx", "repoze.sphinx.autointerface"] test = ["zope.i18nmessageid", "zope.testing", "zope.testrunner"] [extras] -all = ["matrix-synapse-ldap3", "psycopg2", "psycopg2cffi", "psycopg2cffi-compat", "pysaml2", "authlib", "lxml", "sentry-sdk", "jaeger-client", "opentracing", "txredisapi", "hiredis", "Pympler", "pyicu"] +all = ["Pympler", "authlib", "hiredis", "jaeger-client", "lxml", "matrix-synapse-ldap3", "opentracing", "psycopg2", "psycopg2cffi", "psycopg2cffi-compat", "pyicu", "pysaml2", "sentry-sdk", "txredisapi"] cache-memory = ["Pympler"] jwt = ["authlib"] matrix-synapse-ldap3 = ["matrix-synapse-ldap3"] oidc = ["authlib"] opentracing = ["jaeger-client", "opentracing"] postgres = ["psycopg2", "psycopg2cffi", "psycopg2cffi-compat"] -redis = ["txredisapi", "hiredis"] +redis = ["hiredis", "txredisapi"] saml2 = ["pysaml2"] sentry = ["sentry-sdk"] systemd = ["systemd-python"] -test = ["parameterized", "idna"] +test = ["idna", "parameterized"] url-preview = ["lxml"] user-search = ["pyicu"] From 046e7e494a11f9a23c0dcd2defae595b35a37579 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 12 Jun 2023 09:17:40 +0100 Subject: [PATCH 61/75] Bump phonenumbers from 8.13.11 to 8.13.13 (#15763) Bumps [phonenumbers](https://github.com/daviddrysdale/python-phonenumbers) from 8.13.11 to 8.13.13. - [Commits](https://github.com/daviddrysdale/python-phonenumbers/compare/v8.13.11...v8.13.13) --- updated-dependencies: - dependency-name: phonenumbers dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- poetry.lock | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/poetry.lock b/poetry.lock index 228fccac9cc7..d726407c58ff 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1583,13 +1583,13 @@ files = [ [[package]] name = "phonenumbers" -version = "8.13.11" +version = "8.13.13" description = "Python version of Google's common library for parsing, formatting, storing and validating international phone numbers." optional = false python-versions = "*" files = [ - {file = "phonenumbers-8.13.11-py2.py3-none-any.whl", hash = "sha256:107469114fd297258a485bdf8238d0522cb392db1257faf2bf23384ecbdb0e8a"}, - {file = "phonenumbers-8.13.11.tar.gz", hash = "sha256:3e3274d88cab3609b55ff5b93417075dbca2d13064f103fbf562e0ea1dda0f9a"}, + {file = "phonenumbers-8.13.13-py2.py3-none-any.whl", hash = "sha256:55657adb607484aba6d56270b8a1f9b302f35496076e6c02051d06ed366374d9"}, + {file = "phonenumbers-8.13.13.tar.gz", hash = "sha256:4bdf8c989aff0cdb105aef170ad2c21f14b4537bcb32cf349f1f710df992a40a"}, ] [[package]] From aad7e2d0c18ee7ba87bef3750da3c962acc2fd95 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 12 Jun 2023 09:19:01 +0100 Subject: [PATCH 62/75] Bump sentry-sdk from 1.25.0 to 1.25.1 (#15764) Bumps [sentry-sdk](https://github.com/getsentry/sentry-python) from 1.25.0 to 1.25.1. - [Release notes](https://github.com/getsentry/sentry-python/releases) - [Changelog](https://github.com/getsentry/sentry-python/blob/master/CHANGELOG.md) - [Commits](https://github.com/getsentry/sentry-python/compare/1.25.0...1.25.1) --- updated-dependencies: - dependency-name: sentry-sdk dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- poetry.lock | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/poetry.lock b/poetry.lock index d726407c58ff..f2221680a8f5 100644 --- a/poetry.lock +++ b/poetry.lock @@ -2298,13 +2298,13 @@ doc = ["Sphinx", "sphinx-rtd-theme"] [[package]] name = "sentry-sdk" -version = "1.25.0" +version = "1.25.1" description = "Python client for Sentry (https://sentry.io)" optional = true python-versions = "*" files = [ - {file = "sentry-sdk-1.25.0.tar.gz", hash = "sha256:5be3296fc574fa8a4d9b213b4dcf8c8d0246c08f8bd78315c6286f386c37555a"}, - {file = "sentry_sdk-1.25.0-py2.py3-none-any.whl", hash = "sha256:fe85cf5d0b3d0aa3480df689f9f6dc487de783defb0a95043368375dc893645e"}, + {file = "sentry-sdk-1.25.1.tar.gz", hash = "sha256:aa796423eb6a2f4a8cd7a5b02ba6558cb10aab4ccdc0537f63a47b038c520c38"}, + {file = "sentry_sdk-1.25.1-py2.py3-none-any.whl", hash = "sha256:79afb7c896014038e358401ad1d36889f97a129dfa8031c49b3f238cd1aa3935"}, ] [package.dependencies] From 0aa731cb6f1e145cf399a948e14f77d4e3720190 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 12 Jun 2023 09:19:43 +0100 Subject: [PATCH 63/75] Bump pydantic from 1.10.8 to 1.10.9 (#15762) Bumps [pydantic](https://github.com/pydantic/pydantic) from 1.10.8 to 1.10.9. - [Release notes](https://github.com/pydantic/pydantic/releases) - [Changelog](https://github.com/pydantic/pydantic/blob/main/HISTORY.md) - [Commits](https://github.com/pydantic/pydantic/compare/v1.10.8...v1.10.9) --- updated-dependencies: - dependency-name: pydantic dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- poetry.lock | 74 ++++++++++++++++++++++++++--------------------------- 1 file changed, 37 insertions(+), 37 deletions(-) diff --git a/poetry.lock b/poetry.lock index f2221680a8f5..5e4168289433 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1826,47 +1826,47 @@ files = [ [[package]] name = "pydantic" -version = "1.10.8" +version = "1.10.9" description = "Data validation and settings management using python type hints" optional = false python-versions = ">=3.7" files = [ - {file = "pydantic-1.10.8-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:1243d28e9b05003a89d72e7915fdb26ffd1d39bdd39b00b7dbe4afae4b557f9d"}, - {file = "pydantic-1.10.8-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c0ab53b609c11dfc0c060d94335993cc2b95b2150e25583bec37a49b2d6c6c3f"}, - {file = "pydantic-1.10.8-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f9613fadad06b4f3bc5db2653ce2f22e0de84a7c6c293909b48f6ed37b83c61f"}, - {file = "pydantic-1.10.8-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:df7800cb1984d8f6e249351139667a8c50a379009271ee6236138a22a0c0f319"}, - {file = "pydantic-1.10.8-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:0c6fafa0965b539d7aab0a673a046466d23b86e4b0e8019d25fd53f4df62c277"}, - {file = "pydantic-1.10.8-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:e82d4566fcd527eae8b244fa952d99f2ca3172b7e97add0b43e2d97ee77f81ab"}, - {file = "pydantic-1.10.8-cp310-cp310-win_amd64.whl", hash = "sha256:ab523c31e22943713d80d8d342d23b6f6ac4b792a1e54064a8d0cf78fd64e800"}, - {file = "pydantic-1.10.8-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:666bdf6066bf6dbc107b30d034615d2627e2121506c555f73f90b54a463d1f33"}, - {file = "pydantic-1.10.8-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:35db5301b82e8661fa9c505c800d0990bc14e9f36f98932bb1d248c0ac5cada5"}, - {file = "pydantic-1.10.8-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f90c1e29f447557e9e26afb1c4dbf8768a10cc676e3781b6a577841ade126b85"}, - {file = "pydantic-1.10.8-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:93e766b4a8226e0708ef243e843105bf124e21331694367f95f4e3b4a92bbb3f"}, - {file = "pydantic-1.10.8-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:88f195f582851e8db960b4a94c3e3ad25692c1c1539e2552f3df7a9e972ef60e"}, - {file = "pydantic-1.10.8-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:34d327c81e68a1ecb52fe9c8d50c8a9b3e90d3c8ad991bfc8f953fb477d42fb4"}, - {file = "pydantic-1.10.8-cp311-cp311-win_amd64.whl", hash = "sha256:d532bf00f381bd6bc62cabc7d1372096b75a33bc197a312b03f5838b4fb84edd"}, - {file = "pydantic-1.10.8-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:7d5b8641c24886d764a74ec541d2fc2c7fb19f6da2a4001e6d580ba4a38f7878"}, - {file = "pydantic-1.10.8-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7b1f6cb446470b7ddf86c2e57cd119a24959af2b01e552f60705910663af09a4"}, - {file = "pydantic-1.10.8-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c33b60054b2136aef8cf190cd4c52a3daa20b2263917c49adad20eaf381e823b"}, - {file = "pydantic-1.10.8-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:1952526ba40b220b912cdc43c1c32bcf4a58e3f192fa313ee665916b26befb68"}, - {file = "pydantic-1.10.8-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:bb14388ec45a7a0dc429e87def6396f9e73c8c77818c927b6a60706603d5f2ea"}, - {file = "pydantic-1.10.8-cp37-cp37m-win_amd64.whl", hash = "sha256:16f8c3e33af1e9bb16c7a91fc7d5fa9fe27298e9f299cff6cb744d89d573d62c"}, - {file = "pydantic-1.10.8-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1ced8375969673929809d7f36ad322934c35de4af3b5e5b09ec967c21f9f7887"}, - {file = "pydantic-1.10.8-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:93e6bcfccbd831894a6a434b0aeb1947f9e70b7468f274154d03d71fabb1d7c6"}, - {file = "pydantic-1.10.8-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:191ba419b605f897ede9892f6c56fb182f40a15d309ef0142212200a10af4c18"}, - {file = "pydantic-1.10.8-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:052d8654cb65174d6f9490cc9b9a200083a82cf5c3c5d3985db765757eb3b375"}, - {file = "pydantic-1.10.8-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:ceb6a23bf1ba4b837d0cfe378329ad3f351b5897c8d4914ce95b85fba96da5a1"}, - {file = "pydantic-1.10.8-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:6f2e754d5566f050954727c77f094e01793bcb5725b663bf628fa6743a5a9108"}, - {file = "pydantic-1.10.8-cp38-cp38-win_amd64.whl", hash = "sha256:6a82d6cda82258efca32b40040228ecf43a548671cb174a1e81477195ed3ed56"}, - {file = "pydantic-1.10.8-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:3e59417ba8a17265e632af99cc5f35ec309de5980c440c255ab1ca3ae96a3e0e"}, - {file = "pydantic-1.10.8-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:84d80219c3f8d4cad44575e18404099c76851bc924ce5ab1c4c8bb5e2a2227d0"}, - {file = "pydantic-1.10.8-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2e4148e635994d57d834be1182a44bdb07dd867fa3c2d1b37002000646cc5459"}, - {file = "pydantic-1.10.8-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:12f7b0bf8553e310e530e9f3a2f5734c68699f42218bf3568ef49cd9b0e44df4"}, - {file = "pydantic-1.10.8-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:42aa0c4b5c3025483240a25b09f3c09a189481ddda2ea3a831a9d25f444e03c1"}, - {file = "pydantic-1.10.8-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:17aef11cc1b997f9d574b91909fed40761e13fac438d72b81f902226a69dac01"}, - {file = "pydantic-1.10.8-cp39-cp39-win_amd64.whl", hash = "sha256:66a703d1983c675a6e0fed8953b0971c44dba48a929a2000a493c3772eb61a5a"}, - {file = "pydantic-1.10.8-py3-none-any.whl", hash = "sha256:7456eb22ed9aaa24ff3e7b4757da20d9e5ce2a81018c1b3ebd81a0b88a18f3b2"}, - {file = "pydantic-1.10.8.tar.gz", hash = "sha256:1410275520dfa70effadf4c21811d755e7ef9bb1f1d077a21958153a92c8d9ca"}, + {file = "pydantic-1.10.9-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e692dec4a40bfb40ca530e07805b1208c1de071a18d26af4a2a0d79015b352ca"}, + {file = "pydantic-1.10.9-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:3c52eb595db83e189419bf337b59154bdcca642ee4b2a09e5d7797e41ace783f"}, + {file = "pydantic-1.10.9-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:939328fd539b8d0edf244327398a667b6b140afd3bf7e347cf9813c736211896"}, + {file = "pydantic-1.10.9-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b48d3d634bca23b172f47f2335c617d3fcb4b3ba18481c96b7943a4c634f5c8d"}, + {file = "pydantic-1.10.9-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:f0b7628fb8efe60fe66fd4adadd7ad2304014770cdc1f4934db41fe46cc8825f"}, + {file = "pydantic-1.10.9-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:e1aa5c2410769ca28aa9a7841b80d9d9a1c5f223928ca8bec7e7c9a34d26b1d4"}, + {file = "pydantic-1.10.9-cp310-cp310-win_amd64.whl", hash = "sha256:eec39224b2b2e861259d6f3c8b6290d4e0fbdce147adb797484a42278a1a486f"}, + {file = "pydantic-1.10.9-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:d111a21bbbfd85c17248130deac02bbd9b5e20b303338e0dbe0faa78330e37e0"}, + {file = "pydantic-1.10.9-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:2e9aec8627a1a6823fc62fb96480abe3eb10168fd0d859ee3d3b395105ae19a7"}, + {file = "pydantic-1.10.9-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:07293ab08e7b4d3c9d7de4949a0ea571f11e4557d19ea24dd3ae0c524c0c334d"}, + {file = "pydantic-1.10.9-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7ee829b86ce984261d99ff2fd6e88f2230068d96c2a582f29583ed602ef3fc2c"}, + {file = "pydantic-1.10.9-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:4b466a23009ff5cdd7076eb56aca537c745ca491293cc38e72bf1e0e00de5b91"}, + {file = "pydantic-1.10.9-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:7847ca62e581e6088d9000f3c497267868ca2fa89432714e21a4fb33a04d52e8"}, + {file = "pydantic-1.10.9-cp311-cp311-win_amd64.whl", hash = "sha256:7845b31959468bc5b78d7b95ec52fe5be32b55d0d09983a877cca6aedc51068f"}, + {file = "pydantic-1.10.9-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:517a681919bf880ce1dac7e5bc0c3af1e58ba118fd774da2ffcd93c5f96eaece"}, + {file = "pydantic-1.10.9-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:67195274fd27780f15c4c372f4ba9a5c02dad6d50647b917b6a92bf00b3d301a"}, + {file = "pydantic-1.10.9-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2196c06484da2b3fded1ab6dbe182bdabeb09f6318b7fdc412609ee2b564c49a"}, + {file = "pydantic-1.10.9-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:6257bb45ad78abacda13f15bde5886efd6bf549dd71085e64b8dcf9919c38b60"}, + {file = "pydantic-1.10.9-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:3283b574b01e8dbc982080d8287c968489d25329a463b29a90d4157de4f2baaf"}, + {file = "pydantic-1.10.9-cp37-cp37m-win_amd64.whl", hash = "sha256:5f8bbaf4013b9a50e8100333cc4e3fa2f81214033e05ac5aa44fa24a98670a29"}, + {file = "pydantic-1.10.9-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:b9cd67fb763248cbe38f0593cd8611bfe4b8ad82acb3bdf2b0898c23415a1f82"}, + {file = "pydantic-1.10.9-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:f50e1764ce9353be67267e7fd0da08349397c7db17a562ad036aa7c8f4adfdb6"}, + {file = "pydantic-1.10.9-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:73ef93e5e1d3c8e83f1ff2e7fdd026d9e063c7e089394869a6e2985696693766"}, + {file = "pydantic-1.10.9-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:128d9453d92e6e81e881dd7e2484e08d8b164da5507f62d06ceecf84bf2e21d3"}, + {file = "pydantic-1.10.9-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:ad428e92ab68798d9326bb3e5515bc927444a3d71a93b4a2ca02a8a5d795c572"}, + {file = "pydantic-1.10.9-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:fab81a92f42d6d525dd47ced310b0c3e10c416bbfae5d59523e63ea22f82b31e"}, + {file = "pydantic-1.10.9-cp38-cp38-win_amd64.whl", hash = "sha256:963671eda0b6ba6926d8fc759e3e10335e1dc1b71ff2a43ed2efd6996634dafb"}, + {file = "pydantic-1.10.9-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:970b1bdc6243ef663ba5c7e36ac9ab1f2bfecb8ad297c9824b542d41a750b298"}, + {file = "pydantic-1.10.9-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:7e1d5290044f620f80cf1c969c542a5468f3656de47b41aa78100c5baa2b8276"}, + {file = "pydantic-1.10.9-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:83fcff3c7df7adff880622a98022626f4f6dbce6639a88a15a3ce0f96466cb60"}, + {file = "pydantic-1.10.9-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0da48717dc9495d3a8f215e0d012599db6b8092db02acac5e0d58a65248ec5bc"}, + {file = "pydantic-1.10.9-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:0a2aabdc73c2a5960e87c3ffebca6ccde88665616d1fd6d3db3178ef427b267a"}, + {file = "pydantic-1.10.9-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:9863b9420d99dfa9c064042304868e8ba08e89081428a1c471858aa2af6f57c4"}, + {file = "pydantic-1.10.9-cp39-cp39-win_amd64.whl", hash = "sha256:e7c9900b43ac14110efa977be3da28931ffc74c27e96ee89fbcaaf0b0fe338e1"}, + {file = "pydantic-1.10.9-py3-none-any.whl", hash = "sha256:6cafde02f6699ce4ff643417d1a9223716ec25e228ddc3b436fe7e2d25a1f305"}, + {file = "pydantic-1.10.9.tar.gz", hash = "sha256:95c70da2cd3b6ddf3b9645ecaa8d98f3d80c606624b6d245558d202cd23ea3be"}, ] [package.dependencies] From 9e321e0098d069711674371c8c3a3cdc80df0c16 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 12 Jun 2023 09:20:55 +0100 Subject: [PATCH 64/75] Bump pyopenssl from 23.1.1 to 23.2.0 (#15765) Bumps [pyopenssl](https://github.com/pyca/pyopenssl) from 23.1.1 to 23.2.0. - [Changelog](https://github.com/pyca/pyopenssl/blob/main/CHANGELOG.rst) - [Commits](https://github.com/pyca/pyopenssl/compare/23.1.1...23.2.0) --- updated-dependencies: - dependency-name: pyopenssl dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- poetry.lock | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/poetry.lock b/poetry.lock index 5e4168289433..cf4a89c85ad1 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1991,17 +1991,17 @@ tests = ["hypothesis (>=3.27.0)", "pytest (>=3.2.1,!=3.3.0)"] [[package]] name = "pyopenssl" -version = "23.1.1" +version = "23.2.0" description = "Python wrapper module around the OpenSSL library" optional = false python-versions = ">=3.6" files = [ - {file = "pyOpenSSL-23.1.1-py3-none-any.whl", hash = "sha256:9e0c526404a210df9d2b18cd33364beadb0dc858a739b885677bc65e105d4a4c"}, - {file = "pyOpenSSL-23.1.1.tar.gz", hash = "sha256:841498b9bec61623b1b6c47ebbc02367c07d60e0e195f19790817f10cc8db0b7"}, + {file = "pyOpenSSL-23.2.0-py3-none-any.whl", hash = "sha256:24f0dc5227396b3e831f4c7f602b950a5e9833d292c8e4a2e06b709292806ae2"}, + {file = "pyOpenSSL-23.2.0.tar.gz", hash = "sha256:276f931f55a452e7dea69c7173e984eb2a4407ce413c918aa34b55f82f9b8bac"}, ] [package.dependencies] -cryptography = ">=38.0.0,<41" +cryptography = ">=38.0.0,<40.0.0 || >40.0.0,<40.0.1 || >40.0.1,<42" [package.extras] docs = ["sphinx (!=5.2.0,!=5.2.0.post0)", "sphinx-rtd-theme"] From 42eb4fea1c671bd7a3eacf329c9afc6644081e4f Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 12 Jun 2023 09:21:20 +0100 Subject: [PATCH 65/75] Bump serde from 1.0.163 to 1.0.164 (#15760) Bumps [serde](https://github.com/serde-rs/serde) from 1.0.163 to 1.0.164. - [Release notes](https://github.com/serde-rs/serde/releases) - [Commits](https://github.com/serde-rs/serde/compare/v1.0.163...v1.0.164) --- updated-dependencies: - dependency-name: serde dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- Cargo.lock | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 08331385c07b..f34a72c26917 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -320,18 +320,18 @@ checksum = "d29ab0c6d3fc0ee92fe66e2d99f700eab17a8d57d1c1d3b748380fb20baa78cd" [[package]] name = "serde" -version = "1.0.163" +version = "1.0.164" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2113ab51b87a539ae008b5c6c02dc020ffa39afd2d83cffcb3f4eb2722cebec2" +checksum = "9e8c8cf938e98f769bc164923b06dce91cea1751522f46f8466461af04c9027d" dependencies = [ "serde_derive", ] [[package]] name = "serde_derive" -version = "1.0.163" +version = "1.0.164" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8c805777e3930c8883389c602315a24224bcc738b63905ef87cd1420353ea93e" +checksum = "d9735b638ccc51c28bf6914d90a2e9725b377144fc612c49a611fddd1b631d68" dependencies = [ "proc-macro2", "quote", From 0b104364f9f118be0ec722894650fad9583bf59c Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 12 Jun 2023 09:22:21 +0100 Subject: [PATCH 66/75] Bump pyo3-log from 0.8.1 to 0.8.2 (#15759) Bumps [pyo3-log](https://github.com/vorner/pyo3-log) from 0.8.1 to 0.8.2. - [Changelog](https://github.com/vorner/pyo3-log/blob/main/CHANGELOG.md) - [Commits](https://github.com/vorner/pyo3-log/compare/v0.8.1...v0.8.2) --- updated-dependencies: - dependency-name: pyo3-log dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- Cargo.lock | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index f34a72c26917..4f75452b3e34 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -229,9 +229,9 @@ dependencies = [ [[package]] name = "pyo3-log" -version = "0.8.1" +version = "0.8.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f9c8b57fe71fb5dcf38970ebedc2b1531cf1c14b1b9b4c560a182a57e115575c" +checksum = "c94ff6535a6bae58d7d0b85e60d4c53f7f84d0d0aa35d6a28c3f3e70bfe51444" dependencies = [ "arc-swap", "log", From ba97b39881e296f4775b8f6dd18edb98a3dc733f Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Mon, 12 Jun 2023 14:27:11 +0100 Subject: [PATCH 67/75] Bump minimum supported Rust version (#15768) Important crates such as `log` and `regex` have bumped theirs to 1.60.0 as well. --- .github/workflows/tests.yml | 18 +++++++++--------- changelog.d/15768.misc | 1 + docs/upgrade.md | 8 ++++++++ rust/Cargo.toml | 2 +- 4 files changed, 19 insertions(+), 10 deletions(-) create mode 100644 changelog.d/15768.misc diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index 02a4be3a24a1..a0d1c24e9075 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -35,7 +35,7 @@ jobs: steps: - uses: actions/checkout@v3 - name: Install Rust - uses: dtolnay/rust-toolchain@1.58.1 + uses: dtolnay/rust-toolchain@1.60.0 - uses: Swatinem/rust-cache@v2 - uses: matrix-org/setup-python-poetry@v1 with: @@ -93,7 +93,7 @@ jobs: uses: actions/checkout@v3 - name: Install Rust - uses: dtolnay/rust-toolchain@1.58.1 + uses: dtolnay/rust-toolchain@1.60.0 - uses: Swatinem/rust-cache@v2 - name: Setup Poetry @@ -150,7 +150,7 @@ jobs: with: ref: ${{ github.event.pull_request.head.sha }} - name: Install Rust - uses: dtolnay/rust-toolchain@1.58.1 + uses: dtolnay/rust-toolchain@1.60.0 - uses: Swatinem/rust-cache@v2 - uses: matrix-org/setup-python-poetry@v1 with: @@ -167,7 +167,7 @@ jobs: - uses: actions/checkout@v3 - name: Install Rust - uses: dtolnay/rust-toolchain@1.58.1 + uses: dtolnay/rust-toolchain@1.60.0 with: components: clippy - uses: Swatinem/rust-cache@v2 @@ -268,7 +268,7 @@ jobs: postgres:${{ matrix.job.postgres-version }} - name: Install Rust - uses: dtolnay/rust-toolchain@1.58.1 + uses: dtolnay/rust-toolchain@1.60.0 - uses: Swatinem/rust-cache@v2 - uses: matrix-org/setup-python-poetry@v1 @@ -308,7 +308,7 @@ jobs: - uses: actions/checkout@v3 - name: Install Rust - uses: dtolnay/rust-toolchain@1.58.1 + uses: dtolnay/rust-toolchain@1.60.0 - uses: Swatinem/rust-cache@v2 # There aren't wheels for some of the older deps, so we need to install @@ -416,7 +416,7 @@ jobs: run: cat sytest-blacklist .ci/worker-blacklist > synapse-blacklist-with-workers - name: Install Rust - uses: dtolnay/rust-toolchain@1.58.1 + uses: dtolnay/rust-toolchain@1.60.0 - uses: Swatinem/rust-cache@v2 - name: Run SyTest @@ -556,7 +556,7 @@ jobs: path: synapse - name: Install Rust - uses: dtolnay/rust-toolchain@1.58.1 + uses: dtolnay/rust-toolchain@1.60.0 - uses: Swatinem/rust-cache@v2 - uses: actions/setup-go@v4 @@ -584,7 +584,7 @@ jobs: - uses: actions/checkout@v3 - name: Install Rust - uses: dtolnay/rust-toolchain@1.58.1 + uses: dtolnay/rust-toolchain@1.60.0 - uses: Swatinem/rust-cache@v2 - run: cargo test diff --git a/changelog.d/15768.misc b/changelog.d/15768.misc new file mode 100644 index 000000000000..bc4b86323cd0 --- /dev/null +++ b/changelog.d/15768.misc @@ -0,0 +1 @@ +Bump minimum supported Rust version to 1.60.0. diff --git a/docs/upgrade.md b/docs/upgrade.md index 49ab00c05760..4cd38b13932a 100644 --- a/docs/upgrade.md +++ b/docs/upgrade.md @@ -87,6 +87,14 @@ process, for example: wget https://packages.matrix.org/debian/pool/main/m/matrix-synapse-py3/matrix-synapse-py3_1.3.0+stretch1_amd64.deb dpkg -i matrix-synapse-py3_1.3.0+stretch1_amd64.deb ``` +# Upgrading to v1.86.0 + +## Minimum supported Rust version + +The minimum supported Rust version has been increased from v1.58.1 to v1.60.0. +Users building from source will need to ensure their `rustc` version is up to +date. + # Upgrading to v1.85.0 diff --git a/rust/Cargo.toml b/rust/Cargo.toml index 533a8cc6776f..3ead01c0521e 100644 --- a/rust/Cargo.toml +++ b/rust/Cargo.toml @@ -7,7 +7,7 @@ name = "synapse" version = "0.1.0" edition = "2021" -rust-version = "1.58.1" +rust-version = "1.60.0" [lib] name = "synapse" From 8afc9a4cda9b884bde1f6c87f7cb3087d04418a5 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 13 Jun 2023 10:05:13 +0100 Subject: [PATCH 68/75] Bump log from 0.4.18 to 0.4.19 (#15761) Bumps [log](https://github.com/rust-lang/log) from 0.4.18 to 0.4.19. - [Release notes](https://github.com/rust-lang/log/releases) - [Changelog](https://github.com/rust-lang/log/blob/master/CHANGELOG.md) - [Commits](https://github.com/rust-lang/log/compare/0.4.18...0.4.19) --- updated-dependencies: - dependency-name: log dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- Cargo.lock | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 4f75452b3e34..9724af5dca3e 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -132,9 +132,9 @@ dependencies = [ [[package]] name = "log" -version = "0.4.18" +version = "0.4.19" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "518ef76f2f87365916b142844c16d8fefd85039bc5699050210a7778ee1cd1de" +checksum = "b06a4cde4c0f271a446782e3eff8de789548ce57dbc8eca9292c27f4a42004b4" [[package]] name = "memchr" From 99c850f79821e12ad1895b9505f8612752deea52 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 13 Jun 2023 10:05:29 +0100 Subject: [PATCH 69/75] Bump regex from 1.7.3 to 1.8.4 (#15769) Bumps [regex](https://github.com/rust-lang/regex) from 1.7.3 to 1.8.4. - [Release notes](https://github.com/rust-lang/regex/releases) - [Changelog](https://github.com/rust-lang/regex/blob/master/CHANGELOG.md) - [Commits](https://github.com/rust-lang/regex/compare/1.7.3...1.8.4) --- updated-dependencies: - dependency-name: regex dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- Cargo.lock | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 9724af5dca3e..9bb8225226f7 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -4,9 +4,9 @@ version = 3 [[package]] name = "aho-corasick" -version = "0.7.19" +version = "1.0.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b4f55bd91a0978cbfd91c457a164bab8b4001c833b7f323132c0a4e1922dd44e" +checksum = "43f6cb1bf222025340178f382c426f13757b2960e89779dfcb319c32542a5a41" dependencies = [ "memchr", ] @@ -291,9 +291,9 @@ dependencies = [ [[package]] name = "regex" -version = "1.7.3" +version = "1.8.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8b1f693b24f6ac912f4893ef08244d70b6067480d2f1a46e950c9691e6749d1d" +checksum = "d0ab3ca65655bb1e41f2a8c8cd662eb4fb035e67c3f78da1d61dffe89d07300f" dependencies = [ "aho-corasick", "memchr", @@ -302,9 +302,9 @@ dependencies = [ [[package]] name = "regex-syntax" -version = "0.6.29" +version = "0.7.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f162c6dd7b008981e4d40210aca20b4bd0f9b60ca9271061b07f78537722f2e1" +checksum = "436b050e76ed2903236f032a59761c1eb99e1b0aead2c257922771dab1fc8c78" [[package]] name = "ryu" From 9966eb10a3671958992b57d723fab27b57b6faff Mon Sep 17 00:00:00 2001 From: Mathieu Velten Date: Tue, 13 Jun 2023 14:30:51 +0200 Subject: [PATCH 70/75] 1.86.0rc1 --- CHANGES.md | 69 +++++++++++++++++++++++++++++++++++++++ changelog.d/12504.misc | 1 - changelog.d/14213.misc | 1 - changelog.d/15388.feature | 1 - changelog.d/15450.feature | 1 - changelog.d/15520.feature | 1 - changelog.d/15582.feature | 1 - changelog.d/15609.bugfix | 1 - changelog.d/15649.misc | 1 - changelog.d/15650.misc | 1 - changelog.d/15674.feature | 1 - changelog.d/15675.misc | 1 - changelog.d/15689.misc | 1 - changelog.d/15690.misc | 1 - changelog.d/15694.misc | 1 - changelog.d/15695.bugfix | 1 - changelog.d/15697.misc | 1 - changelog.d/15705.feature | 1 - changelog.d/15709.misc | 1 - changelog.d/15710.feature | 1 - changelog.d/15721.misc | 1 - changelog.d/15723.misc | 1 - changelog.d/15725.misc | 1 - changelog.d/15726.misc | 1 - changelog.d/15729.misc | 1 - changelog.d/15731.misc | 1 - changelog.d/15732.doc | 1 - changelog.d/15733.misc | 1 - changelog.d/15740.feature | 1 - changelog.d/15752.misc | 1 - changelog.d/15768.misc | 1 - debian/changelog | 6 ++++ pyproject.toml | 2 +- 33 files changed, 76 insertions(+), 31 deletions(-) delete mode 100644 changelog.d/12504.misc delete mode 100644 changelog.d/14213.misc delete mode 100644 changelog.d/15388.feature delete mode 100644 changelog.d/15450.feature delete mode 100644 changelog.d/15520.feature delete mode 100644 changelog.d/15582.feature delete mode 100644 changelog.d/15609.bugfix delete mode 100644 changelog.d/15649.misc delete mode 100644 changelog.d/15650.misc delete mode 100644 changelog.d/15674.feature delete mode 100644 changelog.d/15675.misc delete mode 100644 changelog.d/15689.misc delete mode 100644 changelog.d/15690.misc delete mode 100644 changelog.d/15694.misc delete mode 100644 changelog.d/15695.bugfix delete mode 100644 changelog.d/15697.misc delete mode 100644 changelog.d/15705.feature delete mode 100644 changelog.d/15709.misc delete mode 100644 changelog.d/15710.feature delete mode 100644 changelog.d/15721.misc delete mode 100644 changelog.d/15723.misc delete mode 100644 changelog.d/15725.misc delete mode 100644 changelog.d/15726.misc delete mode 100644 changelog.d/15729.misc delete mode 100644 changelog.d/15731.misc delete mode 100644 changelog.d/15732.doc delete mode 100644 changelog.d/15733.misc delete mode 100644 changelog.d/15740.feature delete mode 100644 changelog.d/15752.misc delete mode 100644 changelog.d/15768.misc diff --git a/CHANGES.md b/CHANGES.md index 5debbc35b6b0..99c246a3bc2d 100644 --- a/CHANGES.md +++ b/CHANGES.md @@ -1,3 +1,72 @@ +Synapse 1.86.0rc1 (2023-06-13) +============================== + +Features +-------- + +- Stable support for [MSC3882](https://github.com/matrix-org/matrix-spec-proposals/pull/3882) to allow an existing device/session to generate a login token for use on a new device/session. ([\#15388](https://github.com/matrix-org/synapse/issues/15388)) +- Support resolving a room's [canonical alias](https://spec.matrix.org/v1.7/client-server-api/#mroomcanonical_alias) via the module API. ([\#15450](https://github.com/matrix-org/synapse/issues/15450)) +- Enable support for [MSC3952](https://github.com/matrix-org/matrix-spec-proposals/pull/3952): intentional mentions. ([\#15520](https://github.com/matrix-org/synapse/issues/15520)) +- Experimental [MSC3861](https://github.com/matrix-org/matrix-spec-proposals/pull/3861) support: delegate auth to an OIDC provider. ([\#15582](https://github.com/matrix-org/synapse/issues/15582)) +- Add Syanpse version deploy annotations to Grafana dashboard which enables easy correlation between behavior changes witnessed in a graph to a certain Synapse version and nail down regressions. ([\#15674](https://github.com/matrix-org/synapse/issues/15674)) +- Add a catch-all * to the supported relation types when redacting an event and its related events. This is an update to [MSC3912](https://github.com/matrix-org/matrix-spec-proposals/pull/3861) implementation. ([\#15705](https://github.com/matrix-org/synapse/issues/15705)) +- Speed up `/messages` by backfilling in the background when there are no backward extremities where we are directly paginating. ([\#15710](https://github.com/matrix-org/synapse/issues/15710)) +- Expose a metric reporting the database background update status. ([\#15740](https://github.com/matrix-org/synapse/issues/15740)) + + +Bugfixes +-------- + +- Correctly clear caches when we delete a room. ([\#15609](https://github.com/matrix-org/synapse/issues/15609)) +- Check permissions for enabling encryption earlier during room creation to avoid creating broken rooms. ([\#15695](https://github.com/matrix-org/synapse/issues/15695)) + + +Improved Documentation +---------------------- + +- Simplify query to find participating servers in a room. ([\#15732](https://github.com/matrix-org/synapse/issues/15732)) + + +Internal Changes +---------------- + +- Allow for the configuration of max request retries and min/max retry delays in the matrix federation client. ([\#12504](https://github.com/matrix-org/synapse/issues/12504)) +- Log when events are (maybe unexpectedly) filtered out of responses in tests. ([\#14213](https://github.com/matrix-org/synapse/issues/14213)) +- Read from column `full_user_id` rather than `user_id` of tables `profiles` and `user_filters`. ([\#15649](https://github.com/matrix-org/synapse/issues/15649)) +- Add support for tracing functions which return `Awaitable`s. ([\#15650](https://github.com/matrix-org/synapse/issues/15650)) +- Cache requests for user's devices over federation. ([\#15675](https://github.com/matrix-org/synapse/issues/15675)) +- Add fully qualified docker image names to Dockerfiles. ([\#15689](https://github.com/matrix-org/synapse/issues/15689)) +- Remove some unused code. ([\#15690](https://github.com/matrix-org/synapse/issues/15690)) +- Improve type hints. ([\#15694](https://github.com/matrix-org/synapse/issues/15694), [\#15697](https://github.com/matrix-org/synapse/issues/15697)) +- Update docstring and traces on `maybe_backfill()` functions. ([\#15709](https://github.com/matrix-org/synapse/issues/15709)) +- Add context for when/why to use the `long_retries` option when sending Federation requests. ([\#15721](https://github.com/matrix-org/synapse/issues/15721)) +- Removed some unused fields. ([\#15723](https://github.com/matrix-org/synapse/issues/15723)) +- Update federation error to more plainly explain we can only authorize our own membership events. ([\#15725](https://github.com/matrix-org/synapse/issues/15725)) +- Prevent the `latest_deps` and `twisted_trunk` daily GitHub Actions workflows from running on forks of the codebase. ([\#15726](https://github.com/matrix-org/synapse/issues/15726)) +- Improve performance of user directory search. ([\#15729](https://github.com/matrix-org/synapse/issues/15729)) +- Remove redundant table join with `room_memberships` when doing a `is_host_joined()`/`is_host_invited()` call (`membership` is already part of the `current_state_events`). ([\#15731](https://github.com/matrix-org/synapse/issues/15731)) +- Remove superfluous `room_memberships` join from background update. ([\#15733](https://github.com/matrix-org/synapse/issues/15733)) +- Speed up typechecking CI. ([\#15752](https://github.com/matrix-org/synapse/issues/15752)) +- Bump minimum supported Rust version to 1.60.0. ([\#15768](https://github.com/matrix-org/synapse/issues/15768)) + +### Updates to locked dependencies + +* Bump importlib-metadata from 6.1.0 to 6.6.0. ([\#15711](https://github.com/matrix-org/synapse/issues/15711)) +* Bump library/redis from 6-bullseye to 7-bullseye in /docker. ([\#15712](https://github.com/matrix-org/synapse/issues/15712)) +* Bump log from 0.4.18 to 0.4.19. ([\#15761](https://github.com/matrix-org/synapse/issues/15761)) +* Bump phonenumbers from 8.13.11 to 8.13.13. ([\#15763](https://github.com/matrix-org/synapse/issues/15763)) +* Bump pyasn1 from 0.4.8 to 0.5.0. ([\#15713](https://github.com/matrix-org/synapse/issues/15713)) +* Bump pydantic from 1.10.8 to 1.10.9. ([\#15762](https://github.com/matrix-org/synapse/issues/15762)) +* Bump pyo3-log from 0.8.1 to 0.8.2. ([\#15759](https://github.com/matrix-org/synapse/issues/15759)) +* Bump pyopenssl from 23.1.1 to 23.2.0. ([\#15765](https://github.com/matrix-org/synapse/issues/15765)) +* Bump regex from 1.7.3 to 1.8.4. ([\#15769](https://github.com/matrix-org/synapse/issues/15769)) +* Bump sentry-sdk from 1.22.1 to 1.25.0. ([\#15714](https://github.com/matrix-org/synapse/issues/15714)) +* Bump sentry-sdk from 1.25.0 to 1.25.1. ([\#15764](https://github.com/matrix-org/synapse/issues/15764)) +* Bump serde from 1.0.163 to 1.0.164. ([\#15760](https://github.com/matrix-org/synapse/issues/15760)) +* Bump types-jsonschema from 4.17.0.7 to 4.17.0.8. ([\#15716](https://github.com/matrix-org/synapse/issues/15716)) +* Bump types-pyopenssl from 23.1.0.2 to 23.2.0.0. ([\#15766](https://github.com/matrix-org/synapse/issues/15766)) +* Bump types-requests from 2.31.0.0 to 2.31.0.1. ([\#15715](https://github.com/matrix-org/synapse/issues/15715)) + Synapse 1.85.2 (2023-06-08) =========================== diff --git a/changelog.d/12504.misc b/changelog.d/12504.misc deleted file mode 100644 index 0bebaa213d9e..000000000000 --- a/changelog.d/12504.misc +++ /dev/null @@ -1 +0,0 @@ -Allow for the configuration of max request retries and min/max retry delays in the matrix federation client. diff --git a/changelog.d/14213.misc b/changelog.d/14213.misc deleted file mode 100644 index b0689f3d1574..000000000000 --- a/changelog.d/14213.misc +++ /dev/null @@ -1 +0,0 @@ -Log when events are (maybe unexpectedly) filtered out of responses in tests. diff --git a/changelog.d/15388.feature b/changelog.d/15388.feature deleted file mode 100644 index 6cc55cafa2e1..000000000000 --- a/changelog.d/15388.feature +++ /dev/null @@ -1 +0,0 @@ -Stable support for [MSC3882](https://github.com/matrix-org/matrix-spec-proposals/pull/3882) to allow an existing device/session to generate a login token for use on a new device/session. \ No newline at end of file diff --git a/changelog.d/15450.feature b/changelog.d/15450.feature deleted file mode 100644 index 2102381143ba..000000000000 --- a/changelog.d/15450.feature +++ /dev/null @@ -1 +0,0 @@ -Support resolving a room's [canonical alias](https://spec.matrix.org/v1.7/client-server-api/#mroomcanonical_alias) via the module API. \ No newline at end of file diff --git a/changelog.d/15520.feature b/changelog.d/15520.feature deleted file mode 100644 index f4fd40ab9451..000000000000 --- a/changelog.d/15520.feature +++ /dev/null @@ -1 +0,0 @@ -Enable support for [MSC3952](https://github.com/matrix-org/matrix-spec-proposals/pull/3952): intentional mentions. diff --git a/changelog.d/15582.feature b/changelog.d/15582.feature deleted file mode 100644 index 00959500a54e..000000000000 --- a/changelog.d/15582.feature +++ /dev/null @@ -1 +0,0 @@ -Experimental [MSC3861](https://github.com/matrix-org/matrix-spec-proposals/pull/3861) support: delegate auth to an OIDC provider. diff --git a/changelog.d/15609.bugfix b/changelog.d/15609.bugfix deleted file mode 100644 index b5a990cfec1e..000000000000 --- a/changelog.d/15609.bugfix +++ /dev/null @@ -1 +0,0 @@ -Correctly clear caches when we delete a room. diff --git a/changelog.d/15649.misc b/changelog.d/15649.misc deleted file mode 100644 index fca38abe0f60..000000000000 --- a/changelog.d/15649.misc +++ /dev/null @@ -1 +0,0 @@ -Read from column `full_user_id` rather than `user_id` of tables `profiles` and `user_filters`. diff --git a/changelog.d/15650.misc b/changelog.d/15650.misc deleted file mode 100644 index 9bbad113e11e..000000000000 --- a/changelog.d/15650.misc +++ /dev/null @@ -1 +0,0 @@ -Add support for tracing functions which return `Awaitable`s. diff --git a/changelog.d/15674.feature b/changelog.d/15674.feature deleted file mode 100644 index 68cf207dc0f6..000000000000 --- a/changelog.d/15674.feature +++ /dev/null @@ -1 +0,0 @@ -Add Syanpse version deploy annotations to Grafana dashboard which enables easy correlation between behavior changes witnessed in a graph to a certain Synapse version and nail down regressions. diff --git a/changelog.d/15675.misc b/changelog.d/15675.misc deleted file mode 100644 index 05538fdbeff9..000000000000 --- a/changelog.d/15675.misc +++ /dev/null @@ -1 +0,0 @@ -Cache requests for user's devices over federation. diff --git a/changelog.d/15689.misc b/changelog.d/15689.misc deleted file mode 100644 index 4262cc951507..000000000000 --- a/changelog.d/15689.misc +++ /dev/null @@ -1 +0,0 @@ -Add fully qualified docker image names to Dockerfiles. diff --git a/changelog.d/15690.misc b/changelog.d/15690.misc deleted file mode 100644 index c6c259eb7d14..000000000000 --- a/changelog.d/15690.misc +++ /dev/null @@ -1 +0,0 @@ -Remove some unused code. diff --git a/changelog.d/15694.misc b/changelog.d/15694.misc deleted file mode 100644 index 93ceaeafc9b9..000000000000 --- a/changelog.d/15694.misc +++ /dev/null @@ -1 +0,0 @@ -Improve type hints. diff --git a/changelog.d/15695.bugfix b/changelog.d/15695.bugfix deleted file mode 100644 index 99bf1fe05e34..000000000000 --- a/changelog.d/15695.bugfix +++ /dev/null @@ -1 +0,0 @@ -Check permissions for enabling encryption earlier during room creation to avoid creating broken rooms. diff --git a/changelog.d/15697.misc b/changelog.d/15697.misc deleted file mode 100644 index 93ceaeafc9b9..000000000000 --- a/changelog.d/15697.misc +++ /dev/null @@ -1 +0,0 @@ -Improve type hints. diff --git a/changelog.d/15705.feature b/changelog.d/15705.feature deleted file mode 100644 index e3cbb5a12e28..000000000000 --- a/changelog.d/15705.feature +++ /dev/null @@ -1 +0,0 @@ -Add a catch-all * to the supported relation types when redacting an event and its related events. This is an update to [MSC3912](https://github.com/matrix-org/matrix-spec-proposals/pull/3861) implementation. diff --git a/changelog.d/15709.misc b/changelog.d/15709.misc deleted file mode 100644 index e9ce84a94021..000000000000 --- a/changelog.d/15709.misc +++ /dev/null @@ -1 +0,0 @@ -Update docstring and traces on `maybe_backfill()` functions. diff --git a/changelog.d/15710.feature b/changelog.d/15710.feature deleted file mode 100644 index fe77a2fef6e2..000000000000 --- a/changelog.d/15710.feature +++ /dev/null @@ -1 +0,0 @@ -Speed up `/messages` by backfilling in the background when there are no backward extremities where we are directly paginating. diff --git a/changelog.d/15721.misc b/changelog.d/15721.misc deleted file mode 100644 index f4d892daf970..000000000000 --- a/changelog.d/15721.misc +++ /dev/null @@ -1 +0,0 @@ -Add context for when/why to use the `long_retries` option when sending Federation requests. diff --git a/changelog.d/15723.misc b/changelog.d/15723.misc deleted file mode 100644 index ba331adca7f2..000000000000 --- a/changelog.d/15723.misc +++ /dev/null @@ -1 +0,0 @@ -Removed some unused fields. diff --git a/changelog.d/15725.misc b/changelog.d/15725.misc deleted file mode 100644 index 6c7a8a41d883..000000000000 --- a/changelog.d/15725.misc +++ /dev/null @@ -1 +0,0 @@ -Update federation error to more plainly explain we can only authorize our own membership events. diff --git a/changelog.d/15726.misc b/changelog.d/15726.misc deleted file mode 100644 index 941e541e7766..000000000000 --- a/changelog.d/15726.misc +++ /dev/null @@ -1 +0,0 @@ -Prevent the `latest_deps` and `twisted_trunk` daily GitHub Actions workflows from running on forks of the codebase. \ No newline at end of file diff --git a/changelog.d/15729.misc b/changelog.d/15729.misc deleted file mode 100644 index 394025430535..000000000000 --- a/changelog.d/15729.misc +++ /dev/null @@ -1 +0,0 @@ -Improve performance of user directory search. diff --git a/changelog.d/15731.misc b/changelog.d/15731.misc deleted file mode 100644 index 906bc2696254..000000000000 --- a/changelog.d/15731.misc +++ /dev/null @@ -1 +0,0 @@ -Remove redundant table join with `room_memberships` when doing a `is_host_joined()`/`is_host_invited()` call (`membership` is already part of the `current_state_events`). diff --git a/changelog.d/15732.doc b/changelog.d/15732.doc deleted file mode 100644 index b0e8639df78d..000000000000 --- a/changelog.d/15732.doc +++ /dev/null @@ -1 +0,0 @@ -Simplify query to find participating servers in a room. diff --git a/changelog.d/15733.misc b/changelog.d/15733.misc deleted file mode 100644 index 3ae7be3c27d1..000000000000 --- a/changelog.d/15733.misc +++ /dev/null @@ -1 +0,0 @@ -Remove superfluous `room_memberships` join from background update. diff --git a/changelog.d/15740.feature b/changelog.d/15740.feature deleted file mode 100644 index fed342ea55d8..000000000000 --- a/changelog.d/15740.feature +++ /dev/null @@ -1 +0,0 @@ -Expose a metric reporting the database background update status. diff --git a/changelog.d/15752.misc b/changelog.d/15752.misc deleted file mode 100644 index 7e373b12750b..000000000000 --- a/changelog.d/15752.misc +++ /dev/null @@ -1 +0,0 @@ -Speed up typechecking CI. diff --git a/changelog.d/15768.misc b/changelog.d/15768.misc deleted file mode 100644 index bc4b86323cd0..000000000000 --- a/changelog.d/15768.misc +++ /dev/null @@ -1 +0,0 @@ -Bump minimum supported Rust version to 1.60.0. diff --git a/debian/changelog b/debian/changelog index a7503ea60a7a..1c13433c4775 100644 --- a/debian/changelog +++ b/debian/changelog @@ -1,3 +1,9 @@ +matrix-synapse-py3 (1.86.0~rc1) stable; urgency=medium + + * New Synapse release 1.86.0rc1. + + -- Synapse Packaging team Tue, 13 Jun 2023 14:30:45 +0200 + matrix-synapse-py3 (1.85.2) stable; urgency=medium * New Synapse release 1.85.2. diff --git a/pyproject.toml b/pyproject.toml index d42d7644d80e..6bbbf9500199 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -89,7 +89,7 @@ manifest-path = "rust/Cargo.toml" [tool.poetry] name = "matrix-synapse" -version = "1.85.2" +version = "1.86.0rc1" description = "Homeserver for the Matrix decentralised comms protocol" authors = ["Matrix.org Team and Contributors "] license = "Apache-2.0" From 629115836f9d32aad8e2afcf98196753877d70fd Mon Sep 17 00:00:00 2001 From: Mathieu Velten Date: Tue, 13 Jun 2023 14:38:53 +0200 Subject: [PATCH 71/75] Fix changelog typo --- CHANGES.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/CHANGES.md b/CHANGES.md index 99c246a3bc2d..5412581eef25 100644 --- a/CHANGES.md +++ b/CHANGES.md @@ -8,7 +8,7 @@ Features - Support resolving a room's [canonical alias](https://spec.matrix.org/v1.7/client-server-api/#mroomcanonical_alias) via the module API. ([\#15450](https://github.com/matrix-org/synapse/issues/15450)) - Enable support for [MSC3952](https://github.com/matrix-org/matrix-spec-proposals/pull/3952): intentional mentions. ([\#15520](https://github.com/matrix-org/synapse/issues/15520)) - Experimental [MSC3861](https://github.com/matrix-org/matrix-spec-proposals/pull/3861) support: delegate auth to an OIDC provider. ([\#15582](https://github.com/matrix-org/synapse/issues/15582)) -- Add Syanpse version deploy annotations to Grafana dashboard which enables easy correlation between behavior changes witnessed in a graph to a certain Synapse version and nail down regressions. ([\#15674](https://github.com/matrix-org/synapse/issues/15674)) +- Add Synapse version deploy annotations to Grafana dashboard which enables easy correlation between behavior changes witnessed in a graph to a certain Synapse version and nail down regressions. ([\#15674](https://github.com/matrix-org/synapse/issues/15674)) - Add a catch-all * to the supported relation types when redacting an event and its related events. This is an update to [MSC3912](https://github.com/matrix-org/matrix-spec-proposals/pull/3861) implementation. ([\#15705](https://github.com/matrix-org/synapse/issues/15705)) - Speed up `/messages` by backfilling in the background when there are no backward extremities where we are directly paginating. ([\#15710](https://github.com/matrix-org/synapse/issues/15710)) - Expose a metric reporting the database background update status. ([\#15740](https://github.com/matrix-org/synapse/issues/15740)) From 14f9d9b4520099118f009ae4f4c6b11b779af499 Mon Sep 17 00:00:00 2001 From: Mathieu Velten Date: Wed, 14 Jun 2023 11:53:55 +0200 Subject: [PATCH 72/75] Fix empty scope when having version mismatch between workers (#15774) --- changelog.d/15774.bugfix | 1 + synapse/types/__init__.py | 2 +- 2 files changed, 2 insertions(+), 1 deletion(-) create mode 100644 changelog.d/15774.bugfix diff --git a/changelog.d/15774.bugfix b/changelog.d/15774.bugfix new file mode 100644 index 000000000000..c24d6c25e4fc --- /dev/null +++ b/changelog.d/15774.bugfix @@ -0,0 +1 @@ +Fix an error when having workers of different versions running. diff --git a/synapse/types/__init__.py b/synapse/types/__init__.py index dfc95e8ebb8a..095be070e0c5 100644 --- a/synapse/types/__init__.py +++ b/synapse/types/__init__.py @@ -177,7 +177,7 @@ def deserialize( user=UserID.from_string(input["user_id"]), access_token_id=input["access_token_id"], is_guest=input["is_guest"], - scope=set(input["scope"]), + scope=set(input.get("scope", [])), shadow_banned=input["shadow_banned"], device_id=input["device_id"], app_service=appservice, From ef0d3d7bd941b497ad8291c58bcc53700e08b999 Mon Sep 17 00:00:00 2001 From: Mathieu Velten Date: Wed, 14 Jun 2023 11:55:09 +0200 Subject: [PATCH 73/75] Revert "Allow for the configuration of max request retries and min/max retry delays in the matrix federation client (#12504)" This reverts commit d84e66144dc12dacf71c987a2ba802dd59c0b68e. --- CHANGES.md | 1 - .../configuration/config_documentation.md | 26 ------------------- synapse/config/federation.py | 10 ------- synapse/http/matrixfederationclient.py | 21 +++++++-------- tests/http/test_matrixfederationclient.py | 20 +------------- 5 files changed, 10 insertions(+), 68 deletions(-) diff --git a/CHANGES.md b/CHANGES.md index 5412581eef25..d89859366479 100644 --- a/CHANGES.md +++ b/CHANGES.md @@ -30,7 +30,6 @@ Improved Documentation Internal Changes ---------------- -- Allow for the configuration of max request retries and min/max retry delays in the matrix federation client. ([\#12504](https://github.com/matrix-org/synapse/issues/12504)) - Log when events are (maybe unexpectedly) filtered out of responses in tests. ([\#14213](https://github.com/matrix-org/synapse/issues/14213)) - Read from column `full_user_id` rather than `user_id` of tables `profiles` and `user_filters`. ([\#15649](https://github.com/matrix-org/synapse/issues/15649)) - Add support for tracing functions which return `Awaitable`s. ([\#15650](https://github.com/matrix-org/synapse/issues/15650)) diff --git a/docs/usage/configuration/config_documentation.md b/docs/usage/configuration/config_documentation.md index 8426de04179b..0cf6e075ff11 100644 --- a/docs/usage/configuration/config_documentation.md +++ b/docs/usage/configuration/config_documentation.md @@ -1196,32 +1196,6 @@ Example configuration: allow_device_name_lookup_over_federation: true ``` --- -### `federation` - -The federation section defines some sub-options related to federation. - -The following options are related to configuring timeout and retry logic for one request, -independently of the others. -Short retry algorithm is used when something or someone will wait for the request to have an -answer, while long retry is used for requests that happen in the background, -like sending a federation transaction. - -* `client_timeout`: timeout for the federation requests in seconds. Default to 60s. -* `max_short_retry_delay`: maximum delay to be used for the short retry algo in seconds. Default to 2s. -* `max_long_retry_delay`: maximum delay to be used for the short retry algo in seconds. Default to 60s. -* `max_short_retries`: maximum number of retries for the short retry algo. Default to 3 attempts. -* `max_long_retries`: maximum number of retries for the long retry algo. Default to 10 attempts. - -Example configuration: -```yaml -federation: - client_timeout: 180 - max_short_retry_delay: 7 - max_long_retry_delay: 100 - max_short_retries: 5 - max_long_retries: 20 -``` ---- ## Caching Options related to caching. diff --git a/synapse/config/federation.py b/synapse/config/federation.py index d21f7fd02a5e..336fca578aa1 100644 --- a/synapse/config/federation.py +++ b/synapse/config/federation.py @@ -22,8 +22,6 @@ class FederationConfig(Config): section = "federation" def read_config(self, config: JsonDict, **kwargs: Any) -> None: - federation_config = config.setdefault("federation", {}) - # FIXME: federation_domain_whitelist needs sytests self.federation_domain_whitelist: Optional[dict] = None federation_domain_whitelist = config.get("federation_domain_whitelist", None) @@ -51,13 +49,5 @@ def read_config(self, config: JsonDict, **kwargs: Any) -> None: "allow_device_name_lookup_over_federation", False ) - # Allow for the configuration of timeout, max request retries - # and min/max retry delays in the matrix federation client. - self.client_timeout = federation_config.get("client_timeout", 60) - self.max_long_retry_delay = federation_config.get("max_long_retry_delay", 60) - self.max_short_retry_delay = federation_config.get("max_short_retry_delay", 2) - self.max_long_retries = federation_config.get("max_long_retries", 10) - self.max_short_retries = federation_config.get("max_short_retries", 3) - _METRICS_FOR_DOMAINS_SCHEMA = {"type": "array", "items": {"type": "string"}} diff --git a/synapse/http/matrixfederationclient.py b/synapse/http/matrixfederationclient.py index ed36825b671c..abb5ae581521 100644 --- a/synapse/http/matrixfederationclient.py +++ b/synapse/http/matrixfederationclient.py @@ -95,6 +95,8 @@ ) +MAX_LONG_RETRIES = 10 +MAX_SHORT_RETRIES = 3 MAXINT = sys.maxsize @@ -404,12 +406,7 @@ def __init__( self.clock = hs.get_clock() self._store = hs.get_datastores().main self.version_string_bytes = hs.version_string.encode("ascii") - self.default_timeout = hs.config.federation.client_timeout - - self.max_long_retry_delay = hs.config.federation.max_long_retry_delay - self.max_short_retry_delay = hs.config.federation.max_short_retry_delay - self.max_long_retries = hs.config.federation.max_long_retries - self.max_short_retries = hs.config.federation.max_short_retries + self.default_timeout = 60 self._cooperator = Cooperator(scheduler=_make_scheduler(self.reactor)) @@ -586,9 +583,9 @@ async def _send_request( # XXX: Would be much nicer to retry only at the transaction-layer # (once we have reliable transactions in place) if long_retries: - retries_left = self.max_long_retries + retries_left = MAX_LONG_RETRIES else: - retries_left = self.max_short_retries + retries_left = MAX_SHORT_RETRIES url_bytes = request.uri url_str = url_bytes.decode("ascii") @@ -733,12 +730,12 @@ async def _send_request( if retries_left and not timeout: if long_retries: - delay = 4 ** (self.max_long_retries + 1 - retries_left) - delay = min(delay, self.max_long_retry_delay) + delay = 4 ** (MAX_LONG_RETRIES + 1 - retries_left) + delay = min(delay, 60) delay *= random.uniform(0.8, 1.4) else: - delay = 0.5 * 2 ** (self.max_short_retries - retries_left) - delay = min(delay, self.max_short_retry_delay) + delay = 0.5 * 2 ** (MAX_SHORT_RETRIES - retries_left) + delay = min(delay, 2) delay *= random.uniform(0.8, 1.4) logger.debug( diff --git a/tests/http/test_matrixfederationclient.py b/tests/http/test_matrixfederationclient.py index 8565f8ac64ad..0dfc03ce50f4 100644 --- a/tests/http/test_matrixfederationclient.py +++ b/tests/http/test_matrixfederationclient.py @@ -40,7 +40,7 @@ from synapse.util import Clock from tests.server import FakeTransport -from tests.unittest import HomeserverTestCase, override_config +from tests.unittest import HomeserverTestCase def check_logcontext(context: LoggingContextOrSentinel) -> None: @@ -640,21 +640,3 @@ def test_build_auth_headers_rejects_falsey_destinations(self) -> None: self.cl.build_auth_headers( b"", b"GET", b"https://example.com", destination_is=b"" ) - - @override_config( - { - "federation": { - "client_timeout": 180, - "max_long_retry_delay": 100, - "max_short_retry_delay": 7, - "max_long_retries": 20, - "max_short_retries": 5, - } - } - ) - def test_configurable_retry_and_delay_values(self) -> None: - self.assertEqual(self.cl.default_timeout, 180) - self.assertEqual(self.cl.max_long_retry_delay, 100) - self.assertEqual(self.cl.max_short_retry_delay, 7) - self.assertEqual(self.cl.max_long_retries, 20) - self.assertEqual(self.cl.max_short_retries, 5) From 825c5909de642c9c6494ef464684e29630d197b5 Mon Sep 17 00:00:00 2001 From: Mathieu Velten Date: Wed, 14 Jun 2023 12:16:41 +0200 Subject: [PATCH 74/75] 1.86.0rc2 --- CHANGES.md | 11 +++++++++++ changelog.d/15774.bugfix | 1 - debian/changelog | 6 ++++++ pyproject.toml | 2 +- 4 files changed, 18 insertions(+), 2 deletions(-) delete mode 100644 changelog.d/15774.bugfix diff --git a/CHANGES.md b/CHANGES.md index d89859366479..f2f39c3b6eb3 100644 --- a/CHANGES.md +++ b/CHANGES.md @@ -1,6 +1,17 @@ +Synapse 1.86.0rc2 (2023-06-14) +============================== + +Bugfixes +-------- + +- Fix an error when having workers of different versions running. ([\#15774](https://github.com/matrix-org/synapse/issues/15774)) + + Synapse 1.86.0rc1 (2023-06-13) ============================== +This version was tagged but never released. + Features -------- diff --git a/changelog.d/15774.bugfix b/changelog.d/15774.bugfix deleted file mode 100644 index c24d6c25e4fc..000000000000 --- a/changelog.d/15774.bugfix +++ /dev/null @@ -1 +0,0 @@ -Fix an error when having workers of different versions running. diff --git a/debian/changelog b/debian/changelog index 1c13433c4775..81b71ba342a1 100644 --- a/debian/changelog +++ b/debian/changelog @@ -1,3 +1,9 @@ +matrix-synapse-py3 (1.86.0~rc2) stable; urgency=medium + + * New Synapse release 1.86.0rc2. + + -- Synapse Packaging team Wed, 14 Jun 2023 12:16:27 +0200 + matrix-synapse-py3 (1.86.0~rc1) stable; urgency=medium * New Synapse release 1.86.0rc1. diff --git a/pyproject.toml b/pyproject.toml index 6bbbf9500199..097bd039433f 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -89,7 +89,7 @@ manifest-path = "rust/Cargo.toml" [tool.poetry] name = "matrix-synapse" -version = "1.86.0rc1" +version = "1.86.0rc2" description = "Homeserver for the Matrix decentralised comms protocol" authors = ["Matrix.org Team and Contributors "] license = "Apache-2.0" From 7d3da399dd905d2a05da5a1941e996cbf2380e99 Mon Sep 17 00:00:00 2001 From: Mathieu Velten Date: Tue, 20 Jun 2023 17:22:50 +0200 Subject: [PATCH 75/75] 1.86.0 --- CHANGES.md | 6 ++++++ debian/changelog | 6 ++++++ pyproject.toml | 2 +- 3 files changed, 13 insertions(+), 1 deletion(-) diff --git a/CHANGES.md b/CHANGES.md index f2f39c3b6eb3..ff4126044bd9 100644 --- a/CHANGES.md +++ b/CHANGES.md @@ -1,3 +1,9 @@ +Synapse 1.86.0 (2023-06-20) +=========================== + +No significant changes since 1.86.0rc2. + + Synapse 1.86.0rc2 (2023-06-14) ============================== diff --git a/debian/changelog b/debian/changelog index 81b71ba342a1..9d057c65ef46 100644 --- a/debian/changelog +++ b/debian/changelog @@ -1,3 +1,9 @@ +matrix-synapse-py3 (1.86.0) stable; urgency=medium + + * New Synapse release 1.86.0. + + -- Synapse Packaging team Tue, 20 Jun 2023 17:22:46 +0200 + matrix-synapse-py3 (1.86.0~rc2) stable; urgency=medium * New Synapse release 1.86.0rc2. diff --git a/pyproject.toml b/pyproject.toml index 097bd039433f..3626be9797e8 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -89,7 +89,7 @@ manifest-path = "rust/Cargo.toml" [tool.poetry] name = "matrix-synapse" -version = "1.86.0rc2" +version = "1.86.0" description = "Homeserver for the Matrix decentralised comms protocol" authors = ["Matrix.org Team and Contributors "] license = "Apache-2.0"