From 2920e540bfd263e33fa25a6f6d642a9f2b965c2f Mon Sep 17 00:00:00 2001 From: Patrick Cloke Date: Tue, 13 Dec 2022 08:43:53 -0500 Subject: [PATCH 01/82] Use the room type from stats in hierarchy response. (#14263) This avoids pulling additional state information (and events) from the database for each item returned in the hierarchy response. The room type might be out of date until a background update finishes running, the worst impact of this would be spaces being treated as rooms in the hierarchy response. This should self-heal once the background update finishes. --- changelog.d/14263.misc | 1 + synapse/handlers/room_summary.py | 14 +++++--------- 2 files changed, 6 insertions(+), 9 deletions(-) create mode 100644 changelog.d/14263.misc diff --git a/changelog.d/14263.misc b/changelog.d/14263.misc new file mode 100644 index 000000000000..11d9446a4b1e --- /dev/null +++ b/changelog.d/14263.misc @@ -0,0 +1 @@ +Improve performance of the `/hierarchy` endpoint. diff --git a/synapse/handlers/room_summary.py b/synapse/handlers/room_summary.py index 8d08625237bc..c6b869c6f44e 100644 --- a/synapse/handlers/room_summary.py +++ b/synapse/handlers/room_summary.py @@ -20,7 +20,6 @@ import attr from synapse.api.constants import ( - EventContentFields, EventTypes, HistoryVisibility, JoinRules, @@ -701,13 +700,6 @@ async def _build_room_entry(self, room_id: str, for_federation: bool) -> JsonDic # there should always be an entry assert stats is not None, "unable to retrieve stats for %s" % (room_id,) - current_state_ids = await self._storage_controllers.state.get_current_state_ids( - room_id - ) - create_event = await self._store.get_event( - current_state_ids[(EventTypes.Create, "")] - ) - entry = { "room_id": stats["room_id"], "name": stats["name"], @@ -720,7 +712,7 @@ async def _build_room_entry(self, room_id: str, for_federation: bool) -> JsonDic stats["history_visibility"] == HistoryVisibility.WORLD_READABLE ), "guest_can_join": stats["guest_access"] == "can_join", - "room_type": create_event.content.get(EventContentFields.ROOM_TYPE), + "room_type": stats["room_type"], } if self._msc3266_enabled: @@ -730,7 +722,11 @@ async def _build_room_entry(self, room_id: str, for_federation: bool) -> JsonDic # Federation requests need to provide additional information so the # requested server is able to filter the response appropriately. if for_federation: + current_state_ids = ( + await self._storage_controllers.state.get_current_state_ids(room_id) + ) room_version = await self._store.get_room_version(room_id) + if await self._event_auth_handler.has_restricted_join_rules( current_state_ids, room_version ): From 51e7255fbb684245e664ae9e715bfac77b4dd103 Mon Sep 17 00:00:00 2001 From: reivilibre Date: Tue, 13 Dec 2022 14:19:43 +0000 Subject: [PATCH 02/82] Fix the *MAU Limits* section of the Grafana dashboard relying on a specific `job` name for the workers of a Synapse deployment. (#14644) --- changelog.d/14644.bugfix | 1 + contrib/grafana/synapse.json | 15 ++++++--------- 2 files changed, 7 insertions(+), 9 deletions(-) create mode 100644 changelog.d/14644.bugfix diff --git a/changelog.d/14644.bugfix b/changelog.d/14644.bugfix new file mode 100644 index 000000000000..711088bb7ed2 --- /dev/null +++ b/changelog.d/14644.bugfix @@ -0,0 +1 @@ +Fix the *MAU Limits* section of the Grafana dashboard relying on a specific `job` name for the workers of a Synapse deployment. \ No newline at end of file diff --git a/contrib/grafana/synapse.json b/contrib/grafana/synapse.json index 68705b6e6d26..f09cd6f87c28 100644 --- a/contrib/grafana/synapse.json +++ b/contrib/grafana/synapse.json @@ -1008,8 +1008,7 @@ "mode": "absolute", "steps": [ { - "color": "green", - "value": null + "color": "green" }, { "color": "red", @@ -1681,8 +1680,7 @@ "mode": "absolute", "steps": [ { - "color": "green", - "value": null + "color": "green" }, { "color": "red", @@ -2533,8 +2531,7 @@ "mode": "absolute", "steps": [ { - "color": "green", - "value": null + "color": "green" }, { "color": "red", @@ -11296,7 +11293,7 @@ "uid": "$datasource" }, "editorMode": "code", - "expr": "synapse_admin_mau_max{instance=\"$instance\", job=~\"(hhs_)?synapse\"}", + "expr": "max(synapse_admin_mau_max{instance=\"$instance\"})", "format": "time_series", "interval": "", "intervalFactor": 1, @@ -11310,7 +11307,7 @@ "uid": "$datasource" }, "editorMode": "code", - "expr": "synapse_admin_mau_current{instance=\"$instance\", job=~\"(hhs_)?synapse\"}", + "expr": "max(synapse_admin_mau_current{instance=\"$instance\"})", "hide": false, "legendFormat": "Current", "range": true, @@ -12760,6 +12757,6 @@ "timezone": "", "title": "Synapse", "uid": "000000012", - "version": 149, + "version": 150, "weekStart": "" } \ No newline at end of file From d567a8265fd6dc35084398a873eca485bd94cc74 Mon Sep 17 00:00:00 2001 From: David Robertson Date: Tue, 13 Dec 2022 15:36:27 +0000 Subject: [PATCH 03/82] Declare support for Python 3.11 (#14673) * Declare support for Python 3.11 * Changelog --- changelog.d/14673.doc | 1 + docs/setup/installation.md | 2 +- 2 files changed, 2 insertions(+), 1 deletion(-) create mode 100644 changelog.d/14673.doc diff --git a/changelog.d/14673.doc b/changelog.d/14673.doc new file mode 100644 index 000000000000..7baf5f7f38c9 --- /dev/null +++ b/changelog.d/14673.doc @@ -0,0 +1 @@ +Declare support for Python 3.11. diff --git a/docs/setup/installation.md b/docs/setup/installation.md index 436041f8a8ca..306085e005e2 100644 --- a/docs/setup/installation.md +++ b/docs/setup/installation.md @@ -200,7 +200,7 @@ When following this route please make sure that the [Platform-specific prerequis System requirements: - POSIX-compliant system (tested on Linux & OS X) -- Python 3.7 or later, up to Python 3.10. +- Python 3.7 or later, up to Python 3.11. - At least 1GB of free RAM if you want to join large public rooms like #matrix:matrix.org If building on an uncommon architecture for which pre-built wheels are From e512b25cd1618941d165b37f0518ec5765a3b23d Mon Sep 17 00:00:00 2001 From: Jeyachandran Rathnam Date: Wed, 14 Dec 2022 07:02:28 -0500 Subject: [PATCH 04/82] Fix #11308 : Remove dependency on jquery on reCAPTCHA page (#14672) --- changelog.d/14672.misc | 1 + synapse/res/templates/recaptcha.html | 3 +-- 2 files changed, 2 insertions(+), 2 deletions(-) create mode 100644 changelog.d/14672.misc diff --git a/changelog.d/14672.misc b/changelog.d/14672.misc new file mode 100644 index 000000000000..b94ebed97152 --- /dev/null +++ b/changelog.d/14672.misc @@ -0,0 +1 @@ +Remove dependency on jQuery on reCAPTCHA page. diff --git a/synapse/res/templates/recaptcha.html b/synapse/res/templates/recaptcha.html index 8204928cdf53..f00992a24be8 100644 --- a/synapse/res/templates/recaptcha.html +++ b/synapse/res/templates/recaptcha.html @@ -3,11 +3,10 @@ {% block header %} - {% endblock %} From 24a97b3e7144720545df69c321e320c9d35166a6 Mon Sep 17 00:00:00 2001 From: Patrick Cloke Date: Wed, 14 Dec 2022 09:25:33 -0500 Subject: [PATCH 05/82] Delete event_push_summary_unique_index again. (#14669) if a Synapse deployment upgraded (from < 1.62.0 to >= 1.70.0) then it is possible for schema deltas to run before background updates causing drift in the database schema due to: 1. A delta registered a background update to create an index. 2. A delta dropped the above index if it exists (but it yet exist won't since the background job hasn't run). 3. The code assumed the index was dropped. To fix this we: 1. Cancel the background update which could create the index. 2. Drop the index again. 3. Drop a related index which is dropped by the background update. --- changelog.d/14669.bugfix | 1 + .../databases/main/event_push_actions.py | 9 ----- .../main/delta/73/23_fix_thread_index.sql | 33 +++++++++++++++++++ 3 files changed, 34 insertions(+), 9 deletions(-) create mode 100644 changelog.d/14669.bugfix create mode 100644 synapse/storage/schema/main/delta/73/23_fix_thread_index.sql diff --git a/changelog.d/14669.bugfix b/changelog.d/14669.bugfix new file mode 100644 index 000000000000..bea316b06550 --- /dev/null +++ b/changelog.d/14669.bugfix @@ -0,0 +1 @@ +Fix a bug introduced in Synapse 1.70.0 which could cause spurious `UNIQUE constraint failed` errors in the `rotate_notifs` background job. diff --git a/synapse/storage/databases/main/event_push_actions.py b/synapse/storage/databases/main/event_push_actions.py index 7ebe34f773ab..3a0c370fde1d 100644 --- a/synapse/storage/databases/main/event_push_actions.py +++ b/synapse/storage/databases/main/event_push_actions.py @@ -274,15 +274,6 @@ def __init__( self._clear_old_push_actions_staging, 30 * 60 * 1000 ) - self.db_pool.updates.register_background_index_update( - "event_push_summary_unique_index", - index_name="event_push_summary_unique_index", - table="event_push_summary", - columns=["user_id", "room_id"], - unique=True, - replaces_index="event_push_summary_user_rm", - ) - self.db_pool.updates.register_background_index_update( "event_push_summary_unique_index2", index_name="event_push_summary_unique_index2", diff --git a/synapse/storage/schema/main/delta/73/23_fix_thread_index.sql b/synapse/storage/schema/main/delta/73/23_fix_thread_index.sql new file mode 100644 index 000000000000..ec519ceebfb3 --- /dev/null +++ b/synapse/storage/schema/main/delta/73/23_fix_thread_index.sql @@ -0,0 +1,33 @@ +/* Copyright 2022 The Matrix.org Foundation C.I.C + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +-- If a Synapse deployment made a large jump in versions (from < 1.62.0 to >= 1.70.0) +-- in a single upgrade then it might be possible for the event_push_summary_unique_index +-- to be created in the background from delta 71/02event_push_summary_unique.sql after +-- delta 73/06thread_notifications_thread_id_idx.sql is executed, causing it to +-- not drop the event_push_summary_unique_index index. +-- +-- See https://github.com/matrix-org/synapse/issues/14641 + +-- Stop the index from being scheduled for creation in the background. +DELETE FROM background_updates WHERE update_name = 'event_push_summary_unique_index'; + +-- The above background job also replaces another index, so ensure that side-effect +-- is applied. +DROP INDEX IF EXISTS event_push_summary_user_rm; + +-- Fix deployments which ran the 73/06thread_notifications_thread_id_idx.sql delta +-- before the event_push_summary_unique_index background job was run. +DROP INDEX IF EXISTS event_push_summary_unique_index; From fb60cb16fe3cf26fbd947eec926cb4b24b8e9fc7 Mon Sep 17 00:00:00 2001 From: reivilibre Date: Wed, 14 Dec 2022 14:47:11 +0000 Subject: [PATCH 06/82] Faster remote room joins: stream the un-partial-stating of events over replication. [rei:frrj/streams/unpsr] (#14545) --- changelog.d/14545.misc | 1 + synapse/handlers/federation_event.py | 2 + synapse/replication/tcp/streams/__init__.py | 7 +- .../replication/tcp/streams/partial_state.py | 28 ++++++ .../storage/databases/main/events_worker.py | 88 +++++++++++++++++++ synapse/storage/databases/main/state.py | 34 +++++-- .../73/22_un_partial_stated_event_stream.sql | 34 +++++++ ...artial_stated_room_stream_seq.sql.postgres | 20 +++++ 8 files changed, 204 insertions(+), 10 deletions(-) create mode 100644 changelog.d/14545.misc create mode 100644 synapse/storage/schema/main/delta/73/22_un_partial_stated_event_stream.sql create mode 100644 synapse/storage/schema/main/delta/73/23_un_partial_stated_room_stream_seq.sql.postgres diff --git a/changelog.d/14545.misc b/changelog.d/14545.misc new file mode 100644 index 000000000000..60b6761a51b3 --- /dev/null +++ b/changelog.d/14545.misc @@ -0,0 +1 @@ +Faster remote room joins: stream the un-partial-stating of events over replication. \ No newline at end of file diff --git a/synapse/handlers/federation_event.py b/synapse/handlers/federation_event.py index 66aca2f8642b..31df7f55cc97 100644 --- a/synapse/handlers/federation_event.py +++ b/synapse/handlers/federation_event.py @@ -610,6 +610,8 @@ async def update_state_for_partial_state_event( self._state_storage_controller.notify_event_un_partial_stated( event.event_id ) + # Notify that there's a new row in the un_partial_stated_events stream. + self._notifier.notify_replication() @trace async def backfill( diff --git a/synapse/replication/tcp/streams/__init__.py b/synapse/replication/tcp/streams/__init__.py index 8575666d9ce1..110f10aab9a5 100644 --- a/synapse/replication/tcp/streams/__init__.py +++ b/synapse/replication/tcp/streams/__init__.py @@ -42,7 +42,10 @@ ) from synapse.replication.tcp.streams.events import EventsStream from synapse.replication.tcp.streams.federation import FederationStream -from synapse.replication.tcp.streams.partial_state import UnPartialStatedRoomStream +from synapse.replication.tcp.streams.partial_state import ( + UnPartialStatedEventStream, + UnPartialStatedRoomStream, +) STREAMS_MAP = { stream.NAME: stream @@ -63,6 +66,7 @@ AccountDataStream, UserSignatureStream, UnPartialStatedRoomStream, + UnPartialStatedEventStream, ) } @@ -83,4 +87,5 @@ "AccountDataStream", "UserSignatureStream", "UnPartialStatedRoomStream", + "UnPartialStatedEventStream", ] diff --git a/synapse/replication/tcp/streams/partial_state.py b/synapse/replication/tcp/streams/partial_state.py index 18f087ffa251..b5a2ae74b685 100644 --- a/synapse/replication/tcp/streams/partial_state.py +++ b/synapse/replication/tcp/streams/partial_state.py @@ -46,3 +46,31 @@ def __init__(self, hs: "HomeServer"): current_token_without_instance(store.get_un_partial_stated_rooms_token), store.get_un_partial_stated_rooms_from_stream, ) + + +@attr.s(slots=True, frozen=True, auto_attribs=True) +class UnPartialStatedEventStreamRow: + # ID of the event that has been un-partial-stated. + event_id: str + + # True iff the rejection status of the event changed as a result of being + # un-partial-stated. + rejection_status_changed: bool + + +class UnPartialStatedEventStream(Stream): + """ + Stream to notify about events becoming un-partial-stated. + """ + + NAME = "un_partial_stated_event" + ROW_TYPE = UnPartialStatedEventStreamRow + + def __init__(self, hs: "HomeServer"): + store = hs.get_datastores().main + super().__init__( + hs.get_instance_name(), + # TODO(faster_joins, multiple writers): we need to account for instance names + current_token_without_instance(store.get_un_partial_stated_events_token), + store.get_un_partial_stated_events_from_stream, + ) diff --git a/synapse/storage/databases/main/events_worker.py b/synapse/storage/databases/main/events_worker.py index 318fd7dc7133..e19b16064b16 100644 --- a/synapse/storage/databases/main/events_worker.py +++ b/synapse/storage/databases/main/events_worker.py @@ -70,6 +70,7 @@ from synapse.storage.engines import PostgresEngine from synapse.storage.types import Cursor from synapse.storage.util.id_generators import ( + AbstractStreamIdGenerator, AbstractStreamIdTracker, MultiWriterIdGenerator, StreamIdGenerator, @@ -292,6 +293,93 @@ def get_chain_id_txn(txn: Cursor) -> int: id_column="chain_id", ) + self._un_partial_stated_events_stream_id_gen: AbstractStreamIdGenerator + + if isinstance(database.engine, PostgresEngine): + self._un_partial_stated_events_stream_id_gen = MultiWriterIdGenerator( + db_conn=db_conn, + db=database, + stream_name="un_partial_stated_event_stream", + instance_name=hs.get_instance_name(), + tables=[ + ("un_partial_stated_event_stream", "instance_name", "stream_id") + ], + sequence_name="un_partial_stated_event_stream_sequence", + # TODO(faster_joins, multiple writers) Support multiple writers. + writers=["master"], + ) + else: + self._un_partial_stated_events_stream_id_gen = StreamIdGenerator( + db_conn, "un_partial_stated_event_stream", "stream_id" + ) + + def get_un_partial_stated_events_token(self) -> int: + # TODO(faster_joins, multiple writers): This is inappropriate if there are multiple + # writers because workers that don't write often will hold all + # readers up. + return self._un_partial_stated_events_stream_id_gen.get_current_token() + + async def get_un_partial_stated_events_from_stream( + self, instance_name: str, last_id: int, current_id: int, limit: int + ) -> Tuple[List[Tuple[int, Tuple[str, bool]]], int, bool]: + """Get updates for the un-partial-stated events replication stream. + + Args: + instance_name: The writer we want to fetch updates from. Unused + here since there is only ever one writer. + last_id: The token to fetch updates from. Exclusive. + current_id: The token to fetch updates up to. Inclusive. + limit: The requested limit for the number of rows to return. The + function may return more or fewer rows. + + Returns: + A tuple consisting of: the updates, a token to use to fetch + subsequent updates, and whether we returned fewer rows than exists + between the requested tokens due to the limit. + + The token returned can be used in a subsequent call to this + function to get further updatees. + + The updates are a list of 2-tuples of stream ID and the row data + """ + + if last_id == current_id: + return [], current_id, False + + def get_un_partial_stated_events_from_stream_txn( + txn: LoggingTransaction, + ) -> Tuple[List[Tuple[int, Tuple[str, bool]]], int, bool]: + sql = """ + SELECT stream_id, event_id, rejection_status_changed + FROM un_partial_stated_event_stream + WHERE ? < stream_id AND stream_id <= ? AND instance_name = ? + ORDER BY stream_id ASC + LIMIT ? + """ + txn.execute(sql, (last_id, current_id, instance_name, limit)) + updates = [ + ( + row[0], + ( + row[1], + bool(row[2]), + ), + ) + for row in txn + ] + limited = False + upto_token = current_id + if len(updates) >= limit: + upto_token = updates[-1][0] + limited = True + + return updates, upto_token, limited + + return await self.db_pool.runInteraction( + "get_un_partial_stated_events_from_stream", + get_un_partial_stated_events_from_stream_txn, + ) + def process_replication_rows( self, stream_name: str, diff --git a/synapse/storage/databases/main/state.py b/synapse/storage/databases/main/state.py index c801a93b5b06..f855903c390c 100644 --- a/synapse/storage/databases/main/state.py +++ b/synapse/storage/databases/main/state.py @@ -80,6 +80,7 @@ def __init__( hs: "HomeServer", ): super().__init__(database, db_conn, hs) + self._instance_name: str = hs.get_instance_name() async def get_room_version(self, room_id: str) -> RoomVersion: """Get the room_version of a given room @@ -404,18 +405,21 @@ async def update_state_for_partial_state_event( context: EventContext, ) -> None: """Update the state group for a partial state event""" - await self.db_pool.runInteraction( - "update_state_for_partial_state_event", - self._update_state_for_partial_state_event_txn, - event, - context, - ) + async with self._un_partial_stated_events_stream_id_gen.get_next() as un_partial_state_event_stream_id: + await self.db_pool.runInteraction( + "update_state_for_partial_state_event", + self._update_state_for_partial_state_event_txn, + event, + context, + un_partial_state_event_stream_id, + ) def _update_state_for_partial_state_event_txn( self, txn: LoggingTransaction, event: EventBase, context: EventContext, + un_partial_state_event_stream_id: int, ) -> None: # we shouldn't have any outliers here assert not event.internal_metadata.is_outlier() @@ -436,7 +440,10 @@ def _update_state_for_partial_state_event_txn( # the event may now be rejected where it was not before, or vice versa, # in which case we need to update the rejected flags. - if bool(context.rejected) != (event.rejected_reason is not None): + rejection_status_changed = bool(context.rejected) != ( + event.rejected_reason is not None + ) + if rejection_status_changed: self.mark_event_rejected_txn(txn, event.event_id, context.rejected) self.db_pool.simple_delete_one_txn( @@ -445,8 +452,6 @@ def _update_state_for_partial_state_event_txn( keyvalues={"event_id": event.event_id}, ) - # TODO(faster_joins): need to do something about workers here - # https://github.com/matrix-org/synapse/issues/12994 txn.call_after(self.is_partial_state_event.invalidate, (event.event_id,)) txn.call_after( self._get_state_group_for_event.prefill, @@ -454,6 +459,17 @@ def _update_state_for_partial_state_event_txn( state_group, ) + self.db_pool.simple_insert_txn( + txn, + "un_partial_stated_event_stream", + { + "stream_id": un_partial_state_event_stream_id, + "instance_name": self._instance_name, + "event_id": event.event_id, + "rejection_status_changed": rejection_status_changed, + }, + ) + class MainStateBackgroundUpdateStore(RoomMemberWorkerStore): diff --git a/synapse/storage/schema/main/delta/73/22_un_partial_stated_event_stream.sql b/synapse/storage/schema/main/delta/73/22_un_partial_stated_event_stream.sql new file mode 100644 index 000000000000..0e571f78c30f --- /dev/null +++ b/synapse/storage/schema/main/delta/73/22_un_partial_stated_event_stream.sql @@ -0,0 +1,34 @@ +/* Copyright 2022 The Matrix.org Foundation C.I.C + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +-- Stream for notifying that an event has become un-partial-stated. +CREATE TABLE un_partial_stated_event_stream( + -- Position in the stream + stream_id BIGINT PRIMARY KEY NOT NULL, + + -- Which instance wrote this entry. + instance_name TEXT NOT NULL, + + -- Which event has been un-partial-stated. + event_id TEXT NOT NULL REFERENCES events(event_id) ON DELETE CASCADE, + + -- true iff the `rejected` status of the event changed when it became + -- un-partial-stated. + rejection_status_changed BOOLEAN NOT NULL +); + +-- We want an index here because of the foreign key constraint: +-- upon deleting an event, the database needs to be able to check here. +CREATE UNIQUE INDEX un_partial_stated_event_stream_room_id ON un_partial_stated_event_stream (event_id); diff --git a/synapse/storage/schema/main/delta/73/23_un_partial_stated_room_stream_seq.sql.postgres b/synapse/storage/schema/main/delta/73/23_un_partial_stated_room_stream_seq.sql.postgres new file mode 100644 index 000000000000..1ec24702f39a --- /dev/null +++ b/synapse/storage/schema/main/delta/73/23_un_partial_stated_room_stream_seq.sql.postgres @@ -0,0 +1,20 @@ +/* Copyright 2022 The Matrix.org Foundation C.I.C + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +CREATE SEQUENCE IF NOT EXISTS un_partial_stated_event_stream_sequence; + +SELECT setval('un_partial_stated_event_stream_sequence', ( + SELECT COALESCE(MAX(stream_id), 1) FROM un_partial_stated_event_stream +)); From 4f4d69042345134c040de137a8e1aa108ff71acb Mon Sep 17 00:00:00 2001 From: David Robertson Date: Wed, 14 Dec 2022 14:52:35 +0000 Subject: [PATCH 07/82] Allow `compute_state_after_events` to use partial state (#14676) * Allow `compute_state_after_events` to use partial state if fetching a subset of state that is trusted during a partial join. * Changelog --- changelog.d/14676.misc | 1 + synapse/state/__init__.py | 10 ++++++++-- 2 files changed, 9 insertions(+), 2 deletions(-) create mode 100644 changelog.d/14676.misc diff --git a/changelog.d/14676.misc b/changelog.d/14676.misc new file mode 100644 index 000000000000..8a41df9c64c3 --- /dev/null +++ b/changelog.d/14676.misc @@ -0,0 +1 @@ +Faster joins: make `computer_state_after_events` consistent with other state-fetching functions that take a `StateFilter`. diff --git a/synapse/state/__init__.py b/synapse/state/__init__.py index ee5469d5a8e9..fdfb46ab82ad 100644 --- a/synapse/state/__init__.py +++ b/synapse/state/__init__.py @@ -202,14 +202,20 @@ async def compute_state_after_events( room_id: the room_id containing the given events. event_ids: the events whose state should be fetched and resolved. await_full_state: if `True`, will block if we do not yet have complete state - at the given `event_id`s, regardless of whether `state_filter` is - satisfied by partial state. + at these events and `state_filter` is not satisfied by partial state. + Defaults to `True`. Returns: the state dict (a mapping from (event_type, state_key) -> event_id) which holds the resolution of the states after the given event IDs. """ logger.debug("calling resolve_state_groups from compute_state_after_events") + if ( + await_full_state + and state_filter + and not state_filter.must_await_full_state(self.hs.is_mine_id) + ): + await_full_state = False ret = await self.resolve_state_groups_for_events( room_id, event_ids, await_full_state ) From 046320b9b602957b258587a62a63c2508ea31c23 Mon Sep 17 00:00:00 2001 From: Jeremy Kescher Date: Thu, 15 Dec 2022 04:03:13 +0000 Subject: [PATCH 08/82] Fix missing word in autotune sub-option description (#14674) Fix `target_memory_usage` being used in the description for the actual `cache_autotune` sub-option `target_cache_memory_usage`. Signed-off-by: Jeremy Kescher Signed-off-by: Jeremy Kescher --- changelog.d/14674.doc | 1 + docs/usage/configuration/config_documentation.md | 2 +- 2 files changed, 2 insertions(+), 1 deletion(-) create mode 100644 changelog.d/14674.doc diff --git a/changelog.d/14674.doc b/changelog.d/14674.doc new file mode 100644 index 000000000000..df2141781935 --- /dev/null +++ b/changelog.d/14674.doc @@ -0,0 +1 @@ +Fix `target_memory_usage` being used in the description for the actual `cache_autotune` sub-option `target_cache_memory_usage`. diff --git a/docs/usage/configuration/config_documentation.md b/docs/usage/configuration/config_documentation.md index 4d32902fea21..6b8768f45dbf 100644 --- a/docs/usage/configuration/config_documentation.md +++ b/docs/usage/configuration/config_documentation.md @@ -1148,7 +1148,7 @@ number of entries that can be stored. * `max_cache_memory_usage` sets a ceiling on how much memory the cache can use before caches begin to be continuously evicted. They will continue to be evicted until the memory usage drops below the `target_memory_usage`, set in the setting below, or until the `min_cache_ttl` is hit. There is no default value for this option. - * `target_memory_usage` sets a rough target for the desired memory usage of the caches. There is no default value + * `target_cache_memory_usage` sets a rough target for the desired memory usage of the caches. There is no default value for this option. * `min_cache_ttl` sets a limit under which newer cache entries are not evicted and is only applied when caches are actively being evicted/`max_cache_memory_usage` has been exceeded. This is to protect hot caches From 54c012c5a8722725cf104fa6205f253b5b9b0192 Mon Sep 17 00:00:00 2001 From: Mathieu Velten Date: Thu, 15 Dec 2022 17:04:23 +0100 Subject: [PATCH 09/82] Make `handle_new_client_event` throws `PartialStateConflictError` (#14665) Then adapts calling code to retry when needed so it doesn't 500 to clients. Signed-off-by: Mathieu Velten Co-authored-by: Sean Quah <8349537+squahtx@users.noreply.github.com> --- changelog.d/14665.misc | 1 + synapse/handlers/federation.py | 117 ++++++++++----- synapse/handlers/message.py | 202 ++++++++++++++------------ synapse/handlers/room.py | 95 +++++++----- synapse/handlers/room_batch.py | 2 + synapse/handlers/room_member.py | 168 +++++++++++++-------- synapse/util/caches/response_cache.py | 14 +- 7 files changed, 360 insertions(+), 239 deletions(-) create mode 100644 changelog.d/14665.misc diff --git a/changelog.d/14665.misc b/changelog.d/14665.misc new file mode 100644 index 000000000000..2b7c96143d1f --- /dev/null +++ b/changelog.d/14665.misc @@ -0,0 +1 @@ +Change `handle_new_client_event` signature so that a 429 does not reach clients on `PartialStateConflictError`, and internally retry when needed instead. diff --git a/synapse/handlers/federation.py b/synapse/handlers/federation.py index b2784d73339d..eca75f1108d1 100644 --- a/synapse/handlers/federation.py +++ b/synapse/handlers/federation.py @@ -1343,32 +1343,53 @@ async def exchange_third_party_invite( ) EventValidator().validate_builder(builder) - event, context = await self.event_creation_handler.create_new_client_event( - builder=builder - ) - event, context = await self.add_display_name_to_third_party_invite( - room_version_obj, event_dict, event, context - ) + # Try several times, it could fail with PartialStateConflictError + # in send_membership_event, cf comment in except block. + max_retries = 5 + for i in range(max_retries): + try: + ( + event, + context, + ) = await self.event_creation_handler.create_new_client_event( + builder=builder + ) - EventValidator().validate_new(event, self.config) + event, context = await self.add_display_name_to_third_party_invite( + room_version_obj, event_dict, event, context + ) - # We need to tell the transaction queue to send this out, even - # though the sender isn't a local user. - event.internal_metadata.send_on_behalf_of = self.hs.hostname + EventValidator().validate_new(event, self.config) - try: - validate_event_for_room_version(event) - await self._event_auth_handler.check_auth_rules_from_context(event) - except AuthError as e: - logger.warning("Denying new third party invite %r because %s", event, e) - raise e + # We need to tell the transaction queue to send this out, even + # though the sender isn't a local user. + event.internal_metadata.send_on_behalf_of = self.hs.hostname - await self._check_signature(event, context) + try: + validate_event_for_room_version(event) + await self._event_auth_handler.check_auth_rules_from_context( + event + ) + except AuthError as e: + logger.warning( + "Denying new third party invite %r because %s", event, e + ) + raise e - # We retrieve the room member handler here as to not cause a cyclic dependency - member_handler = self.hs.get_room_member_handler() - await member_handler.send_membership_event(None, event, context) + await self._check_signature(event, context) + + # We retrieve the room member handler here as to not cause a cyclic dependency + member_handler = self.hs.get_room_member_handler() + await member_handler.send_membership_event(None, event, context) + + break + except PartialStateConflictError as e: + # Persisting couldn't happen because the room got un-partial stated + # in the meantime and context needs to be recomputed, so let's do so. + if i == max_retries - 1: + raise e + pass else: destinations = {x.split(":", 1)[-1] for x in (sender_user_id, room_id)} @@ -1400,28 +1421,46 @@ async def on_exchange_third_party_invite_request( room_version_obj, event_dict ) - event, context = await self.event_creation_handler.create_new_client_event( - builder=builder - ) - event, context = await self.add_display_name_to_third_party_invite( - room_version_obj, event_dict, event, context - ) + # Try several times, it could fail with PartialStateConflictError + # in send_membership_event, cf comment in except block. + max_retries = 5 + for i in range(max_retries): + try: + ( + event, + context, + ) = await self.event_creation_handler.create_new_client_event( + builder=builder + ) + event, context = await self.add_display_name_to_third_party_invite( + room_version_obj, event_dict, event, context + ) - try: - validate_event_for_room_version(event) - await self._event_auth_handler.check_auth_rules_from_context(event) - except AuthError as e: - logger.warning("Denying third party invite %r because %s", event, e) - raise e - await self._check_signature(event, context) + try: + validate_event_for_room_version(event) + await self._event_auth_handler.check_auth_rules_from_context(event) + except AuthError as e: + logger.warning("Denying third party invite %r because %s", event, e) + raise e + await self._check_signature(event, context) + + # We need to tell the transaction queue to send this out, even + # though the sender isn't a local user. + event.internal_metadata.send_on_behalf_of = get_domain_from_id( + event.sender + ) - # We need to tell the transaction queue to send this out, even - # though the sender isn't a local user. - event.internal_metadata.send_on_behalf_of = get_domain_from_id(event.sender) + # We retrieve the room member handler here as to not cause a cyclic dependency + member_handler = self.hs.get_room_member_handler() + await member_handler.send_membership_event(None, event, context) - # We retrieve the room member handler here as to not cause a cyclic dependency - member_handler = self.hs.get_room_member_handler() - await member_handler.send_membership_event(None, event, context) + break + except PartialStateConflictError as e: + # Persisting couldn't happen because the room got un-partial stated + # in the meantime and context needs to be recomputed, so let's do so. + if i == max_retries - 1: + raise e + pass async def add_display_name_to_third_party_invite( self, diff --git a/synapse/handlers/message.py b/synapse/handlers/message.py index 845f6833583d..88fc51a4c97e 100644 --- a/synapse/handlers/message.py +++ b/synapse/handlers/message.py @@ -37,7 +37,6 @@ AuthError, Codes, ConsentNotGivenError, - LimitExceededError, NotFoundError, ShadowBanError, SynapseError, @@ -999,60 +998,73 @@ async def create_and_send_nonmember_event( event.internal_metadata.stream_ordering, ) - event, context = await self.create_event( - requester, - event_dict, - txn_id=txn_id, - allow_no_prev_events=allow_no_prev_events, - prev_event_ids=prev_event_ids, - state_event_ids=state_event_ids, - outlier=outlier, - historical=historical, - depth=depth, - ) + # Try several times, it could fail with PartialStateConflictError + # in handle_new_client_event, cf comment in except block. + max_retries = 5 + for i in range(max_retries): + try: + event, context = await self.create_event( + requester, + event_dict, + txn_id=txn_id, + allow_no_prev_events=allow_no_prev_events, + prev_event_ids=prev_event_ids, + state_event_ids=state_event_ids, + outlier=outlier, + historical=historical, + depth=depth, + ) - assert self.hs.is_mine_id(event.sender), "User must be our own: %s" % ( - event.sender, - ) + assert self.hs.is_mine_id(event.sender), "User must be our own: %s" % ( + event.sender, + ) - spam_check_result = await self.spam_checker.check_event_for_spam(event) - if spam_check_result != self.spam_checker.NOT_SPAM: - if isinstance(spam_check_result, tuple): - try: - [code, dict] = spam_check_result - raise SynapseError( - 403, - "This message had been rejected as probable spam", - code, - dict, - ) - except ValueError: - logger.error( - "Spam-check module returned invalid error value. Expecting [code, dict], got %s", - spam_check_result, - ) + spam_check_result = await self.spam_checker.check_event_for_spam(event) + if spam_check_result != self.spam_checker.NOT_SPAM: + if isinstance(spam_check_result, tuple): + try: + [code, dict] = spam_check_result + raise SynapseError( + 403, + "This message had been rejected as probable spam", + code, + dict, + ) + except ValueError: + logger.error( + "Spam-check module returned invalid error value. Expecting [code, dict], got %s", + spam_check_result, + ) - raise SynapseError( - 403, - "This message has been rejected as probable spam", - Codes.FORBIDDEN, - ) + raise SynapseError( + 403, + "This message has been rejected as probable spam", + Codes.FORBIDDEN, + ) - # Backwards compatibility: if the return value is not an error code, it - # means the module returned an error message to be included in the - # SynapseError (which is now deprecated). - raise SynapseError( - 403, - spam_check_result, - Codes.FORBIDDEN, + # Backwards compatibility: if the return value is not an error code, it + # means the module returned an error message to be included in the + # SynapseError (which is now deprecated). + raise SynapseError( + 403, + spam_check_result, + Codes.FORBIDDEN, + ) + + ev = await self.handle_new_client_event( + requester=requester, + events_and_context=[(event, context)], + ratelimit=ratelimit, + ignore_shadow_ban=ignore_shadow_ban, ) - ev = await self.handle_new_client_event( - requester=requester, - events_and_context=[(event, context)], - ratelimit=ratelimit, - ignore_shadow_ban=ignore_shadow_ban, - ) + break + except PartialStateConflictError as e: + # Persisting couldn't happen because the room got un-partial stated + # in the meantime and context needs to be recomputed, so let's do so. + if i == max_retries - 1: + raise e + pass # we know it was persisted, so must have a stream ordering assert ev.internal_metadata.stream_ordering @@ -1356,7 +1368,7 @@ async def handle_new_client_event( Raises: ShadowBanError if the requester has been shadow-banned. - SynapseError(503) if attempting to persist a partial state event in + PartialStateConflictError if attempting to persist a partial state event in a room that has been un-partial stated. """ extra_users = extra_users or [] @@ -1418,34 +1430,23 @@ async def handle_new_client_event( # We now persist the event (and update the cache in parallel, since we # don't want to block on it). event, context = events_and_context[0] - try: - result, _ = await make_deferred_yieldable( - gather_results( - ( - run_in_background( - self._persist_events, - requester=requester, - events_and_context=events_and_context, - ratelimit=ratelimit, - extra_users=extra_users, - ), - run_in_background( - self.cache_joined_hosts_for_events, events_and_context - ).addErrback( - log_failure, "cache_joined_hosts_for_event failed" - ), + result, _ = await make_deferred_yieldable( + gather_results( + ( + run_in_background( + self._persist_events, + requester=requester, + events_and_context=events_and_context, + ratelimit=ratelimit, + extra_users=extra_users, ), - consumeErrors=True, - ) - ).addErrback(unwrapFirstError) - except PartialStateConflictError as e: - # The event context needs to be recomputed. - # Turn the error into a 429, as a hint to the client to try again. - logger.info( - "Room %s was un-partial stated while persisting client event.", - event.room_id, + run_in_background( + self.cache_joined_hosts_for_events, events_and_context + ).addErrback(log_failure, "cache_joined_hosts_for_event failed"), + ), + consumeErrors=True, ) - raise LimitExceededError(msg=e.msg, errcode=e.errcode, retry_after_ms=0) + ).addErrback(unwrapFirstError) return result @@ -2012,26 +2013,39 @@ async def _send_dummy_event_for_room(self, room_id: str) -> bool: for user_id in members: requester = create_requester(user_id, authenticated_entity=self.server_name) try: - event, context = await self.create_event( - requester, - { - "type": EventTypes.Dummy, - "content": {}, - "room_id": room_id, - "sender": user_id, - }, - ) + # Try several times, it could fail with PartialStateConflictError + # in handle_new_client_event, cf comment in except block. + max_retries = 5 + for i in range(max_retries): + try: + event, context = await self.create_event( + requester, + { + "type": EventTypes.Dummy, + "content": {}, + "room_id": room_id, + "sender": user_id, + }, + ) - event.internal_metadata.proactively_send = False + event.internal_metadata.proactively_send = False - # Since this is a dummy-event it is OK if it is sent by a - # shadow-banned user. - await self.handle_new_client_event( - requester, - events_and_context=[(event, context)], - ratelimit=False, - ignore_shadow_ban=True, - ) + # Since this is a dummy-event it is OK if it is sent by a + # shadow-banned user. + await self.handle_new_client_event( + requester, + events_and_context=[(event, context)], + ratelimit=False, + ignore_shadow_ban=True, + ) + + break + except PartialStateConflictError as e: + # Persisting couldn't happen because the room got un-partial stated + # in the meantime and context needs to be recomputed, so let's do so. + if i == max_retries - 1: + raise e + pass return True except AuthError: logger.info( diff --git a/synapse/handlers/room.py b/synapse/handlers/room.py index f81241c2b335..572c7b4db344 100644 --- a/synapse/handlers/room.py +++ b/synapse/handlers/room.py @@ -62,6 +62,7 @@ from synapse.handlers.relations import BundledAggregations from synapse.module_api import NOT_SPAM from synapse.rest.admin._base import assert_user_is_admin +from synapse.storage.databases.main.events import PartialStateConflictError from synapse.streams import EventSource from synapse.types import ( JsonDict, @@ -207,46 +208,64 @@ async def upgrade_room( new_room_id = self._generate_room_id() - # Check whether the user has the power level to carry out the upgrade. - # `check_auth_rules_from_context` will check that they are in the room and have - # the required power level to send the tombstone event. - ( - tombstone_event, - tombstone_context, - ) = await self.event_creation_handler.create_event( - requester, - { - "type": EventTypes.Tombstone, - "state_key": "", - "room_id": old_room_id, - "sender": user_id, - "content": { - "body": "This room has been replaced", - "replacement_room": new_room_id, - }, - }, - ) - validate_event_for_room_version(tombstone_event) - await self._event_auth_handler.check_auth_rules_from_context(tombstone_event) + # Try several times, it could fail with PartialStateConflictError + # in _upgrade_room, cf comment in except block. + max_retries = 5 + for i in range(max_retries): + try: + # Check whether the user has the power level to carry out the upgrade. + # `check_auth_rules_from_context` will check that they are in the room and have + # the required power level to send the tombstone event. + ( + tombstone_event, + tombstone_context, + ) = await self.event_creation_handler.create_event( + requester, + { + "type": EventTypes.Tombstone, + "state_key": "", + "room_id": old_room_id, + "sender": user_id, + "content": { + "body": "This room has been replaced", + "replacement_room": new_room_id, + }, + }, + ) + validate_event_for_room_version(tombstone_event) + await self._event_auth_handler.check_auth_rules_from_context( + tombstone_event + ) - # Upgrade the room - # - # If this user has sent multiple upgrade requests for the same room - # and one of them is not complete yet, cache the response and - # return it to all subsequent requests - ret = await self._upgrade_response_cache.wrap( - (old_room_id, user_id), - self._upgrade_room, - requester, - old_room_id, - old_room, # args for _upgrade_room - new_room_id, - new_version, - tombstone_event, - tombstone_context, - ) + # Upgrade the room + # + # If this user has sent multiple upgrade requests for the same room + # and one of them is not complete yet, cache the response and + # return it to all subsequent requests + ret = await self._upgrade_response_cache.wrap( + (old_room_id, user_id), + self._upgrade_room, + requester, + old_room_id, + old_room, # args for _upgrade_room + new_room_id, + new_version, + tombstone_event, + tombstone_context, + ) - return ret + return ret + except PartialStateConflictError as e: + # Clean up the cache so we can retry properly + self._upgrade_response_cache.unset((old_room_id, user_id)) + # Persisting couldn't happen because the room got un-partial stated + # in the meantime and context needs to be recomputed, so let's do so. + if i == max_retries - 1: + raise e + pass + + # This is to satisfy mypy and should never happen + raise PartialStateConflictError() async def _upgrade_room( self, diff --git a/synapse/handlers/room_batch.py b/synapse/handlers/room_batch.py index 411a6fb22fdb..c73d2adaad47 100644 --- a/synapse/handlers/room_batch.py +++ b/synapse/handlers/room_batch.py @@ -375,6 +375,8 @@ async def persist_historical_events( # Events are sorted by (topological_ordering, stream_ordering) # where topological_ordering is just depth. for (event, context) in reversed(events_to_persist): + # This call can't raise `PartialStateConflictError` since we forbid + # use of the historical batch API during partial state await self.event_creation_handler.handle_new_client_event( await self.create_requester_for_user_id_from_app_service( event.sender, app_service_requester.app_service diff --git a/synapse/handlers/room_member.py b/synapse/handlers/room_member.py index 0c39e852a12e..d236cc09b526 100644 --- a/synapse/handlers/room_member.py +++ b/synapse/handlers/room_member.py @@ -34,6 +34,7 @@ from synapse.handlers.profile import MAX_AVATAR_URL_LEN, MAX_DISPLAYNAME_LEN from synapse.logging import opentracing from synapse.module_api import NOT_SPAM +from synapse.storage.databases.main.events import PartialStateConflictError from synapse.types import ( JsonDict, Requester, @@ -392,60 +393,81 @@ async def _local_membership_update( event_pos = await self.store.get_position_for_event(existing_event_id) return existing_event_id, event_pos.stream - event, context = await self.event_creation_handler.create_event( - requester, - { - "type": EventTypes.Member, - "content": content, - "room_id": room_id, - "sender": requester.user.to_string(), - "state_key": user_id, - # For backwards compatibility: - "membership": membership, - "origin_server_ts": origin_server_ts, - }, - txn_id=txn_id, - allow_no_prev_events=allow_no_prev_events, - prev_event_ids=prev_event_ids, - state_event_ids=state_event_ids, - depth=depth, - require_consent=require_consent, - outlier=outlier, - historical=historical, - ) - - prev_state_ids = await context.get_prev_state_ids( - StateFilter.from_types([(EventTypes.Member, None)]) - ) + # Try several times, it could fail with PartialStateConflictError, + # in handle_new_client_event, cf comment in except block. + max_retries = 5 + for i in range(max_retries): + try: + event, context = await self.event_creation_handler.create_event( + requester, + { + "type": EventTypes.Member, + "content": content, + "room_id": room_id, + "sender": requester.user.to_string(), + "state_key": user_id, + # For backwards compatibility: + "membership": membership, + "origin_server_ts": origin_server_ts, + }, + txn_id=txn_id, + allow_no_prev_events=allow_no_prev_events, + prev_event_ids=prev_event_ids, + state_event_ids=state_event_ids, + depth=depth, + require_consent=require_consent, + outlier=outlier, + historical=historical, + ) - prev_member_event_id = prev_state_ids.get((EventTypes.Member, user_id), None) + prev_state_ids = await context.get_prev_state_ids( + StateFilter.from_types([(EventTypes.Member, None)]) + ) - if event.membership == Membership.JOIN: - newly_joined = True - if prev_member_event_id: - prev_member_event = await self.store.get_event(prev_member_event_id) - newly_joined = prev_member_event.membership != Membership.JOIN - - # Only rate-limit if the user actually joined the room, otherwise we'll end - # up blocking profile updates. - if newly_joined and ratelimit: - await self._join_rate_limiter_local.ratelimit(requester) - await self._join_rate_per_room_limiter.ratelimit( - requester, key=room_id, update=False + prev_member_event_id = prev_state_ids.get( + (EventTypes.Member, user_id), None ) - with opentracing.start_active_span("handle_new_client_event"): - result_event = await self.event_creation_handler.handle_new_client_event( - requester, - events_and_context=[(event, context)], - extra_users=[target], - ratelimit=ratelimit, - ) - if event.membership == Membership.LEAVE: - if prev_member_event_id: - prev_member_event = await self.store.get_event(prev_member_event_id) - if prev_member_event.membership == Membership.JOIN: - await self._user_left_room(target, room_id) + if event.membership == Membership.JOIN: + newly_joined = True + if prev_member_event_id: + prev_member_event = await self.store.get_event( + prev_member_event_id + ) + newly_joined = prev_member_event.membership != Membership.JOIN + + # Only rate-limit if the user actually joined the room, otherwise we'll end + # up blocking profile updates. + if newly_joined and ratelimit: + await self._join_rate_limiter_local.ratelimit(requester) + await self._join_rate_per_room_limiter.ratelimit( + requester, key=room_id, update=False + ) + with opentracing.start_active_span("handle_new_client_event"): + result_event = ( + await self.event_creation_handler.handle_new_client_event( + requester, + events_and_context=[(event, context)], + extra_users=[target], + ratelimit=ratelimit, + ) + ) + + if event.membership == Membership.LEAVE: + if prev_member_event_id: + prev_member_event = await self.store.get_event( + prev_member_event_id + ) + if prev_member_event.membership == Membership.JOIN: + await self._user_left_room(target, room_id) + + break + except PartialStateConflictError as e: + # Persisting couldn't happen because the room got un-partial stated + # in the meantime and context needs to be recomputed, so let's do so. + if i == max_retries - 1: + raise e + pass # we know it was persisted, so should have a stream ordering assert result_event.internal_metadata.stream_ordering @@ -1234,6 +1256,8 @@ async def send_membership_event( ratelimit: Whether to rate limit this request. Raises: SynapseError if there was a problem changing the membership. + PartialStateConflictError: if attempting to persist a partial state event in + a room that has been un-partial stated. """ target_user = UserID.from_string(event.state_key) room_id = event.room_id @@ -1863,21 +1887,37 @@ async def _generate_local_out_of_band_leave( list(previous_membership_event.auth_event_ids()) + prev_event_ids ) - event, context = await self.event_creation_handler.create_event( - requester, - event_dict, - txn_id=txn_id, - prev_event_ids=prev_event_ids, - auth_event_ids=auth_event_ids, - outlier=True, - ) - event.internal_metadata.out_of_band_membership = True + # Try several times, it could fail with PartialStateConflictError + # in handle_new_client_event, cf comment in except block. + max_retries = 5 + for i in range(max_retries): + try: + event, context = await self.event_creation_handler.create_event( + requester, + event_dict, + txn_id=txn_id, + prev_event_ids=prev_event_ids, + auth_event_ids=auth_event_ids, + outlier=True, + ) + event.internal_metadata.out_of_band_membership = True + + result_event = ( + await self.event_creation_handler.handle_new_client_event( + requester, + events_and_context=[(event, context)], + extra_users=[UserID.from_string(target_user)], + ) + ) + + break + except PartialStateConflictError as e: + # Persisting couldn't happen because the room got un-partial stated + # in the meantime and context needs to be recomputed, so let's do so. + if i == max_retries - 1: + raise e + pass - result_event = await self.event_creation_handler.handle_new_client_event( - requester, - events_and_context=[(event, context)], - extra_users=[UserID.from_string(target_user)], - ) # we know it was persisted, so must have a stream ordering assert result_event.internal_metadata.stream_ordering diff --git a/synapse/util/caches/response_cache.py b/synapse/util/caches/response_cache.py index a3eb5f741bfc..340e5e914533 100644 --- a/synapse/util/caches/response_cache.py +++ b/synapse/util/caches/response_cache.py @@ -167,12 +167,10 @@ def on_complete(r: RV) -> RV: # the should_cache bit, we leave it in the cache for now and schedule # its removal later. if self.timeout_sec and context.should_cache: - self.clock.call_later( - self.timeout_sec, self._result_cache.pop, key, None - ) + self.clock.call_later(self.timeout_sec, self.unset, key) else: # otherwise, remove the result immediately. - self._result_cache.pop(key, None) + self.unset(key) return r # make sure we do this *after* adding the entry to result_cache, @@ -181,6 +179,14 @@ def on_complete(r: RV) -> RV: result.addBoth(on_complete) return entry + def unset(self, key: KV) -> None: + """Remove the cached value for this key from the cache, if any. + + Args: + key: key used to remove the cached value + """ + self._result_cache.pop(key, None) + async def wrap( self, key: KV, From 652d1669c5a103b1c20478770c4aaf18849c09a3 Mon Sep 17 00:00:00 2001 From: Patrick Cloke Date: Fri, 16 Dec 2022 06:53:01 -0500 Subject: [PATCH 10/82] Add missing type hints to tests.handlers. (#14680) And do not allow untyped defs in tests.handlers. --- changelog.d/14680.misc | 1 + mypy.ini | 5 +- synapse/handlers/auth.py | 2 +- tests/handlers/test_appservice.py | 54 +++---- tests/handlers/test_cas.py | 2 +- tests/handlers/test_directory.py | 27 ++-- tests/handlers/test_e2e_room_keys.py | 76 ++++++---- tests/handlers/test_federation.py | 2 +- tests/handlers/test_federation_event.py | 10 +- tests/handlers/test_message.py | 26 ++-- tests/handlers/test_oidc.py | 48 +++--- tests/handlers/test_password_providers.py | 144 +++++++++--------- tests/handlers/test_presence.py | 100 +++++++------ tests/handlers/test_profile.py | 4 +- tests/handlers/test_receipts.py | 6 +- tests/handlers/test_register.py | 169 +++++++++++++--------- tests/handlers/test_room.py | 6 +- tests/handlers/test_room_summary.py | 76 ++++++---- tests/handlers/test_saml.py | 33 +++-- tests/handlers/test_send_email.py | 29 ++-- tests/handlers/test_stats.py | 74 +++++++--- tests/handlers/test_sync.py | 11 +- 22 files changed, 527 insertions(+), 378 deletions(-) create mode 100644 changelog.d/14680.misc diff --git a/changelog.d/14680.misc b/changelog.d/14680.misc new file mode 100644 index 000000000000..d44571b73149 --- /dev/null +++ b/changelog.d/14680.misc @@ -0,0 +1 @@ +Add missing type hints. diff --git a/mypy.ini b/mypy.ini index 37acf589c90f..1a37414e581c 100644 --- a/mypy.ini +++ b/mypy.ini @@ -95,10 +95,7 @@ disallow_untyped_defs = True [mypy-tests.federation.transport.test_client] disallow_untyped_defs = True -[mypy-tests.handlers.test_sso] -disallow_untyped_defs = True - -[mypy-tests.handlers.test_user_directory] +[mypy-tests.handlers.*] disallow_untyped_defs = True [mypy-tests.metrics.test_background_process_metrics] diff --git a/synapse/handlers/auth.py b/synapse/handlers/auth.py index 8b9ef25d296f..30f2d46c3c4a 100644 --- a/synapse/handlers/auth.py +++ b/synapse/handlers/auth.py @@ -2031,7 +2031,7 @@ def __init__(self) -> None: self.is_3pid_allowed_callbacks: List[IS_3PID_ALLOWED_CALLBACK] = [] # Mapping from login type to login parameters - self._supported_login_types: Dict[str, Iterable[str]] = {} + self._supported_login_types: Dict[str, Tuple[str, ...]] = {} # Mapping from login type to auth checker callbacks self.auth_checker_callbacks: Dict[str, List[CHECK_AUTH_CALLBACK]] = {} diff --git a/tests/handlers/test_appservice.py b/tests/handlers/test_appservice.py index 57bfbd77341f..a7495ab21a41 100644 --- a/tests/handlers/test_appservice.py +++ b/tests/handlers/test_appservice.py @@ -31,7 +31,7 @@ from synapse.handlers.appservice import ApplicationServicesHandler from synapse.rest.client import login, receipts, register, room, sendtodevice from synapse.server import HomeServer -from synapse.types import RoomStreamToken +from synapse.types import JsonDict, RoomStreamToken from synapse.util import Clock from synapse.util.stringutils import random_string @@ -44,7 +44,7 @@ class AppServiceHandlerTestCase(unittest.TestCase): """Tests the ApplicationServicesHandler.""" - def setUp(self): + def setUp(self) -> None: self.mock_store = Mock() self.mock_as_api = Mock() self.mock_scheduler = Mock() @@ -61,7 +61,7 @@ def setUp(self): self.handler = ApplicationServicesHandler(hs) self.event_source = hs.get_event_sources() - def test_notify_interested_services(self): + def test_notify_interested_services(self) -> None: interested_service = self._mkservice(is_interested_in_event=True) services = [ self._mkservice(is_interested_in_event=False), @@ -90,7 +90,7 @@ def test_notify_interested_services(self): interested_service, events=[event] ) - def test_query_user_exists_unknown_user(self): + def test_query_user_exists_unknown_user(self) -> None: user_id = "@someone:anywhere" services = [self._mkservice(is_interested_in_event=True)] services[0].is_interested_in_user.return_value = True @@ -107,7 +107,7 @@ def test_query_user_exists_unknown_user(self): self.mock_as_api.query_user.assert_called_once_with(services[0], user_id) - def test_query_user_exists_known_user(self): + def test_query_user_exists_known_user(self) -> None: user_id = "@someone:anywhere" services = [self._mkservice(is_interested_in_event=True)] services[0].is_interested_in_user.return_value = True @@ -127,7 +127,7 @@ def test_query_user_exists_known_user(self): "query_user called when it shouldn't have been.", ) - def test_query_room_alias_exists(self): + def test_query_room_alias_exists(self) -> None: room_alias_str = "#foo:bar" room_alias = Mock() room_alias.to_string.return_value = room_alias_str @@ -157,7 +157,7 @@ def test_query_room_alias_exists(self): self.assertEqual(result.room_id, room_id) self.assertEqual(result.servers, servers) - def test_get_3pe_protocols_no_appservices(self): + def test_get_3pe_protocols_no_appservices(self) -> None: self.mock_store.get_app_services.return_value = [] response = self.successResultOf( defer.ensureDeferred(self.handler.get_3pe_protocols("my-protocol")) @@ -165,7 +165,7 @@ def test_get_3pe_protocols_no_appservices(self): self.mock_as_api.get_3pe_protocol.assert_not_called() self.assertEqual(response, {}) - def test_get_3pe_protocols_no_protocols(self): + def test_get_3pe_protocols_no_protocols(self) -> None: service = self._mkservice(False, []) self.mock_store.get_app_services.return_value = [service] response = self.successResultOf( @@ -174,7 +174,7 @@ def test_get_3pe_protocols_no_protocols(self): self.mock_as_api.get_3pe_protocol.assert_not_called() self.assertEqual(response, {}) - def test_get_3pe_protocols_protocol_no_response(self): + def test_get_3pe_protocols_protocol_no_response(self) -> None: service = self._mkservice(False, ["my-protocol"]) self.mock_store.get_app_services.return_value = [service] self.mock_as_api.get_3pe_protocol.return_value = make_awaitable(None) @@ -186,7 +186,7 @@ def test_get_3pe_protocols_protocol_no_response(self): ) self.assertEqual(response, {}) - def test_get_3pe_protocols_select_one_protocol(self): + def test_get_3pe_protocols_select_one_protocol(self) -> None: service = self._mkservice(False, ["my-protocol"]) self.mock_store.get_app_services.return_value = [service] self.mock_as_api.get_3pe_protocol.return_value = make_awaitable( @@ -202,7 +202,7 @@ def test_get_3pe_protocols_select_one_protocol(self): response, {"my-protocol": {"x-protocol-data": 42, "instances": []}} ) - def test_get_3pe_protocols_one_protocol(self): + def test_get_3pe_protocols_one_protocol(self) -> None: service = self._mkservice(False, ["my-protocol"]) self.mock_store.get_app_services.return_value = [service] self.mock_as_api.get_3pe_protocol.return_value = make_awaitable( @@ -218,7 +218,7 @@ def test_get_3pe_protocols_one_protocol(self): response, {"my-protocol": {"x-protocol-data": 42, "instances": []}} ) - def test_get_3pe_protocols_multiple_protocol(self): + def test_get_3pe_protocols_multiple_protocol(self) -> None: service_one = self._mkservice(False, ["my-protocol"]) service_two = self._mkservice(False, ["other-protocol"]) self.mock_store.get_app_services.return_value = [service_one, service_two] @@ -237,11 +237,13 @@ def test_get_3pe_protocols_multiple_protocol(self): }, ) - def test_get_3pe_protocols_multiple_info(self): + def test_get_3pe_protocols_multiple_info(self) -> None: service_one = self._mkservice(False, ["my-protocol"]) service_two = self._mkservice(False, ["my-protocol"]) - async def get_3pe_protocol(service, unusedProtocol): + async def get_3pe_protocol( + service: ApplicationService, protocol: str + ) -> Optional[JsonDict]: if service == service_one: return { "x-protocol-data": 42, @@ -276,7 +278,7 @@ async def get_3pe_protocol(service, unusedProtocol): }, ) - def test_notify_interested_services_ephemeral(self): + def test_notify_interested_services_ephemeral(self) -> None: """ Test sending ephemeral events to the appservice handler are scheduled to be pushed out to interested appservices, and that the stream ID is @@ -306,7 +308,7 @@ def test_notify_interested_services_ephemeral(self): 580, ) - def test_notify_interested_services_ephemeral_out_of_order(self): + def test_notify_interested_services_ephemeral_out_of_order(self) -> None: """ Test sending out of order ephemeral events to the appservice handler are ignored. @@ -390,7 +392,7 @@ class ApplicationServicesHandlerSendEventsTestCase(unittest.HomeserverTestCase): receipts.register_servlets, ] - def prepare(self, reactor: MemoryReactor, clock: Clock, hs: HomeServer): + def prepare(self, reactor: MemoryReactor, clock: Clock, hs: HomeServer) -> None: self.hs = hs # Mock the ApplicationServiceScheduler's _TransactionController's send method so that # we can track any outgoing ephemeral events @@ -417,7 +419,7 @@ def prepare(self, reactor: MemoryReactor, clock: Clock, hs: HomeServer): "exclusive_as_user", "password", self.exclusive_as_user_device_id ) - def _notify_interested_services(self): + def _notify_interested_services(self) -> None: # This is normally set in `notify_interested_services` but we need to call the # internal async version so the reactor gets pushed to completion. self.hs.get_application_service_handler().current_max += 1 @@ -443,7 +445,7 @@ def _notify_interested_services(self): ) def test_match_interesting_room_members( self, interesting_user: str, should_notify: bool - ): + ) -> None: """ Test to make sure that a interesting user (local or remote) in the room is notified as expected when someone else in the room sends a message. @@ -512,7 +514,9 @@ def test_match_interesting_room_members( else: self.send_mock.assert_not_called() - def test_application_services_receive_events_sent_by_interesting_local_user(self): + def test_application_services_receive_events_sent_by_interesting_local_user( + self, + ) -> None: """ Test to make sure that a messages sent from a local user can be interesting and picked up by the appservice. @@ -568,7 +572,7 @@ def test_application_services_receive_events_sent_by_interesting_local_user(self self.assertEqual(events[0]["type"], "m.room.message") self.assertEqual(events[0]["sender"], alice) - def test_sending_read_receipt_batches_to_application_services(self): + def test_sending_read_receipt_batches_to_application_services(self) -> None: """Tests that a large batch of read receipts are sent correctly to interested application services. """ @@ -644,7 +648,7 @@ def test_sending_read_receipt_batches_to_application_services(self): @unittest.override_config( {"experimental_features": {"msc2409_to_device_messages_enabled": True}} ) - def test_application_services_receive_local_to_device(self): + def test_application_services_receive_local_to_device(self) -> None: """ Test that when a user sends a to-device message to another user that is an application service's user namespace, the @@ -722,7 +726,7 @@ def test_application_services_receive_local_to_device(self): @unittest.override_config( {"experimental_features": {"msc2409_to_device_messages_enabled": True}} ) - def test_application_services_receive_bursts_of_to_device(self): + def test_application_services_receive_bursts_of_to_device(self) -> None: """ Test that when a user sends >100 to-device messages at once, any interested AS's will receive them in separate transactions. @@ -913,7 +917,7 @@ def test_application_service_receives_device_list_updates( experimental_feature_enabled: bool, as_supports_txn_extensions: bool, as_should_receive_device_list_updates: bool, - ): + ) -> None: """ Tests that an application service receives notice of changed device lists for a user, when a user changes their device lists. @@ -1070,7 +1074,7 @@ def _set_up_devices_and_a_room(self) -> str: and a room for the users to talk in. """ - async def preparation(): + async def preparation() -> None: await self._add_otks_for_device(self._sender_user, self._sender_device, 42) await self._add_fallback_key_for_device( self._sender_user, self._sender_device, used=True diff --git a/tests/handlers/test_cas.py b/tests/handlers/test_cas.py index 2b21547d0f55..2733719d8270 100644 --- a/tests/handlers/test_cas.py +++ b/tests/handlers/test_cas.py @@ -199,7 +199,7 @@ def test_required_attributes(self) -> None: ) -def _mock_request(): +def _mock_request() -> Mock: """Returns a mock which will stand in as a SynapseRequest""" mock = Mock( spec=[ diff --git a/tests/handlers/test_directory.py b/tests/handlers/test_directory.py index 3b72c4c9d019..90aec484c48c 100644 --- a/tests/handlers/test_directory.py +++ b/tests/handlers/test_directory.py @@ -20,6 +20,7 @@ import synapse.api.errors import synapse.rest.admin from synapse.api.constants import EventTypes +from synapse.events import EventBase from synapse.rest.client import directory, login, room from synapse.server import HomeServer from synapse.types import JsonDict, RoomAlias, create_requester @@ -201,7 +202,7 @@ def prepare(self, reactor: MemoryReactor, clock: Clock, hs: HomeServer) -> None: self.test_user_tok = self.login("user", "pass") self.helper.join(room=self.room_id, user=self.test_user, tok=self.test_user_tok) - def _create_alias(self, user) -> None: + def _create_alias(self, user: str) -> None: # Create a new alias to this room. self.get_success( self.store.create_room_alias_association( @@ -324,7 +325,7 @@ def _add_alias(self, alias: str) -> RoomAlias: ) return room_alias - def _set_canonical_alias(self, content) -> None: + def _set_canonical_alias(self, content: JsonDict) -> None: """Configure the canonical alias state on the room.""" self.helper.send_state( self.room_id, @@ -333,13 +334,15 @@ def _set_canonical_alias(self, content) -> None: tok=self.admin_user_tok, ) - def _get_canonical_alias(self): + def _get_canonical_alias(self) -> EventBase: """Get the canonical alias state of the room.""" - return self.get_success( + result = self.get_success( self._storage_controllers.state.get_current_state_event( self.room_id, EventTypes.CanonicalAlias, "" ) ) + assert result is not None + return result def test_remove_alias(self) -> None: """Removing an alias that is the canonical alias should remove it there too.""" @@ -349,8 +352,8 @@ def test_remove_alias(self) -> None: ) data = self._get_canonical_alias() - self.assertEqual(data["content"]["alias"], self.test_alias) - self.assertEqual(data["content"]["alt_aliases"], [self.test_alias]) + self.assertEqual(data.content["alias"], self.test_alias) + self.assertEqual(data.content["alt_aliases"], [self.test_alias]) # Finally, delete the alias. self.get_success( @@ -360,8 +363,8 @@ def test_remove_alias(self) -> None: ) data = self._get_canonical_alias() - self.assertNotIn("alias", data["content"]) - self.assertNotIn("alt_aliases", data["content"]) + self.assertNotIn("alias", data.content) + self.assertNotIn("alt_aliases", data.content) def test_remove_other_alias(self) -> None: """Removing an alias listed as in alt_aliases should remove it there too.""" @@ -378,9 +381,9 @@ def test_remove_other_alias(self) -> None: ) data = self._get_canonical_alias() - self.assertEqual(data["content"]["alias"], self.test_alias) + self.assertEqual(data.content["alias"], self.test_alias) self.assertEqual( - data["content"]["alt_aliases"], [self.test_alias, other_test_alias] + data.content["alt_aliases"], [self.test_alias, other_test_alias] ) # Delete the second alias. @@ -391,8 +394,8 @@ def test_remove_other_alias(self) -> None: ) data = self._get_canonical_alias() - self.assertEqual(data["content"]["alias"], self.test_alias) - self.assertEqual(data["content"]["alt_aliases"], [self.test_alias]) + self.assertEqual(data.content["alias"], self.test_alias) + self.assertEqual(data.content["alt_aliases"], [self.test_alias]) class TestCreateAliasACL(unittest.HomeserverTestCase): diff --git a/tests/handlers/test_e2e_room_keys.py b/tests/handlers/test_e2e_room_keys.py index 9b7e7a8e9aff..6c0b30de9ed7 100644 --- a/tests/handlers/test_e2e_room_keys.py +++ b/tests/handlers/test_e2e_room_keys.py @@ -17,7 +17,11 @@ import copy from unittest import mock +from twisted.test.proto_helpers import MemoryReactor + from synapse.api.errors import SynapseError +from synapse.server import HomeServer +from synapse.util import Clock from tests import unittest @@ -39,14 +43,14 @@ class E2eRoomKeysHandlerTestCase(unittest.HomeserverTestCase): - def make_homeserver(self, reactor, clock): + def make_homeserver(self, reactor: MemoryReactor, clock: Clock) -> HomeServer: return self.setup_test_homeserver(replication_layer=mock.Mock()) - def prepare(self, reactor, clock, hs): + def prepare(self, reactor: MemoryReactor, clock: Clock, hs: HomeServer) -> None: self.handler = hs.get_e2e_room_keys_handler() self.local_user = "@boris:" + hs.hostname - def test_get_missing_current_version_info(self): + def test_get_missing_current_version_info(self) -> None: """Check that we get a 404 if we ask for info about the current version if there is no version. """ @@ -56,7 +60,7 @@ def test_get_missing_current_version_info(self): res = e.value.code self.assertEqual(res, 404) - def test_get_missing_version_info(self): + def test_get_missing_version_info(self) -> None: """Check that we get a 404 if we ask for info about a specific version if it doesn't exist. """ @@ -67,9 +71,9 @@ def test_get_missing_version_info(self): res = e.value.code self.assertEqual(res, 404) - def test_create_version(self): + def test_create_version(self) -> None: """Check that we can create and then retrieve versions.""" - res = self.get_success( + version = self.get_success( self.handler.create_version( self.local_user, { @@ -78,7 +82,7 @@ def test_create_version(self): }, ) ) - self.assertEqual(res, "1") + self.assertEqual(version, "1") # check we can retrieve it as the current version res = self.get_success(self.handler.get_version_info(self.local_user)) @@ -110,7 +114,7 @@ def test_create_version(self): ) # upload a new one... - res = self.get_success( + version = self.get_success( self.handler.create_version( self.local_user, { @@ -119,7 +123,7 @@ def test_create_version(self): }, ) ) - self.assertEqual(res, "2") + self.assertEqual(version, "2") # check we can retrieve it as the current version res = self.get_success(self.handler.get_version_info(self.local_user)) @@ -134,7 +138,7 @@ def test_create_version(self): }, ) - def test_update_version(self): + def test_update_version(self) -> None: """Check that we can update versions.""" version = self.get_success( self.handler.create_version( @@ -173,7 +177,7 @@ def test_update_version(self): }, ) - def test_update_missing_version(self): + def test_update_missing_version(self) -> None: """Check that we get a 404 on updating nonexistent versions""" e = self.get_failure( self.handler.update_version( @@ -190,7 +194,7 @@ def test_update_missing_version(self): res = e.value.code self.assertEqual(res, 404) - def test_update_omitted_version(self): + def test_update_omitted_version(self) -> None: """Check that the update succeeds if the version is missing from the body""" version = self.get_success( self.handler.create_version( @@ -227,7 +231,7 @@ def test_update_omitted_version(self): }, ) - def test_update_bad_version(self): + def test_update_bad_version(self) -> None: """Check that we get a 400 if the version in the body doesn't match""" version = self.get_success( self.handler.create_version( @@ -255,7 +259,7 @@ def test_update_bad_version(self): res = e.value.code self.assertEqual(res, 400) - def test_delete_missing_version(self): + def test_delete_missing_version(self) -> None: """Check that we get a 404 on deleting nonexistent versions""" e = self.get_failure( self.handler.delete_version(self.local_user, "1"), SynapseError @@ -263,15 +267,15 @@ def test_delete_missing_version(self): res = e.value.code self.assertEqual(res, 404) - def test_delete_missing_current_version(self): + def test_delete_missing_current_version(self) -> None: """Check that we get a 404 on deleting nonexistent current version""" e = self.get_failure(self.handler.delete_version(self.local_user), SynapseError) res = e.value.code self.assertEqual(res, 404) - def test_delete_version(self): + def test_delete_version(self) -> None: """Check that we can create and then delete versions.""" - res = self.get_success( + version = self.get_success( self.handler.create_version( self.local_user, { @@ -280,7 +284,7 @@ def test_delete_version(self): }, ) ) - self.assertEqual(res, "1") + self.assertEqual(version, "1") # check we can delete it self.get_success(self.handler.delete_version(self.local_user, "1")) @@ -292,7 +296,7 @@ def test_delete_version(self): res = e.value.code self.assertEqual(res, 404) - def test_get_missing_backup(self): + def test_get_missing_backup(self) -> None: """Check that we get a 404 on querying missing backup""" e = self.get_failure( self.handler.get_room_keys(self.local_user, "bogus_version"), SynapseError @@ -300,7 +304,7 @@ def test_get_missing_backup(self): res = e.value.code self.assertEqual(res, 404) - def test_get_missing_room_keys(self): + def test_get_missing_room_keys(self) -> None: """Check we get an empty response from an empty backup""" version = self.get_success( self.handler.create_version( @@ -319,7 +323,7 @@ def test_get_missing_room_keys(self): # TODO: test the locking semantics when uploading room_keys, # although this is probably best done in sytest - def test_upload_room_keys_no_versions(self): + def test_upload_room_keys_no_versions(self) -> None: """Check that we get a 404 on uploading keys when no versions are defined""" e = self.get_failure( self.handler.upload_room_keys(self.local_user, "no_version", room_keys), @@ -328,7 +332,7 @@ def test_upload_room_keys_no_versions(self): res = e.value.code self.assertEqual(res, 404) - def test_upload_room_keys_bogus_version(self): + def test_upload_room_keys_bogus_version(self) -> None: """Check that we get a 404 on uploading keys when an nonexistent version is specified """ @@ -350,7 +354,7 @@ def test_upload_room_keys_bogus_version(self): res = e.value.code self.assertEqual(res, 404) - def test_upload_room_keys_wrong_version(self): + def test_upload_room_keys_wrong_version(self) -> None: """Check that we get a 403 on uploading keys for an old version""" version = self.get_success( self.handler.create_version( @@ -380,7 +384,7 @@ def test_upload_room_keys_wrong_version(self): res = e.value.code self.assertEqual(res, 403) - def test_upload_room_keys_insert(self): + def test_upload_room_keys_insert(self) -> None: """Check that we can insert and retrieve keys for a session""" version = self.get_success( self.handler.create_version( @@ -416,7 +420,7 @@ def test_upload_room_keys_insert(self): ) self.assertDictEqual(res, room_keys) - def test_upload_room_keys_merge(self): + def test_upload_room_keys_merge(self) -> None: """Check that we can upload a new room_key for an existing session and have it correctly merged""" version = self.get_success( @@ -449,9 +453,11 @@ def test_upload_room_keys_merge(self): self.handler.upload_room_keys(self.local_user, version, new_room_keys) ) - res = self.get_success(self.handler.get_room_keys(self.local_user, version)) + res_keys = self.get_success( + self.handler.get_room_keys(self.local_user, version) + ) self.assertEqual( - res["rooms"]["!abc:matrix.org"]["sessions"]["c0ff33"]["session_data"], + res_keys["rooms"]["!abc:matrix.org"]["sessions"]["c0ff33"]["session_data"], "SSBBTSBBIEZJU0gK", ) @@ -465,9 +471,12 @@ def test_upload_room_keys_merge(self): self.handler.upload_room_keys(self.local_user, version, new_room_keys) ) - res = self.get_success(self.handler.get_room_keys(self.local_user, version)) + res_keys = self.get_success( + self.handler.get_room_keys(self.local_user, version) + ) self.assertEqual( - res["rooms"]["!abc:matrix.org"]["sessions"]["c0ff33"]["session_data"], "new" + res_keys["rooms"]["!abc:matrix.org"]["sessions"]["c0ff33"]["session_data"], + "new", ) # the etag should NOT be equal now, since the key changed @@ -483,9 +492,12 @@ def test_upload_room_keys_merge(self): self.handler.upload_room_keys(self.local_user, version, new_room_keys) ) - res = self.get_success(self.handler.get_room_keys(self.local_user, version)) + res_keys = self.get_success( + self.handler.get_room_keys(self.local_user, version) + ) self.assertEqual( - res["rooms"]["!abc:matrix.org"]["sessions"]["c0ff33"]["session_data"], "new" + res_keys["rooms"]["!abc:matrix.org"]["sessions"]["c0ff33"]["session_data"], + "new", ) # the etag should be the same since the session did not change @@ -494,7 +506,7 @@ def test_upload_room_keys_merge(self): # TODO: check edge cases as well as the common variations here - def test_delete_room_keys(self): + def test_delete_room_keys(self) -> None: """Check that we can insert and delete keys for a session""" version = self.get_success( self.handler.create_version( diff --git a/tests/handlers/test_federation.py b/tests/handlers/test_federation.py index d00c69c22917..cedbb9fafcfa 100644 --- a/tests/handlers/test_federation.py +++ b/tests/handlers/test_federation.py @@ -439,7 +439,7 @@ def test_invite_by_user_ratelimit(self) -> None: user_id = self.register_user("kermit", "test") tok = self.login("kermit", "test") - def create_invite(): + def create_invite() -> EventBase: room_id = self.helper.create_room_as(room_creator=user_id, tok=tok) room_version = self.get_success(self.store.get_room_version(room_id)) return event_from_pdu_json( diff --git a/tests/handlers/test_federation_event.py b/tests/handlers/test_federation_event.py index e448cb1901e0..70ea4d15d4d6 100644 --- a/tests/handlers/test_federation_event.py +++ b/tests/handlers/test_federation_event.py @@ -14,6 +14,8 @@ from typing import Optional from unittest import mock +from twisted.test.proto_helpers import MemoryReactor + from synapse.api.errors import AuthError, StoreError from synapse.api.room_versions import RoomVersion from synapse.event_auth import ( @@ -26,8 +28,10 @@ from synapse.logging.context import LoggingContext from synapse.rest import admin from synapse.rest.client import login, room +from synapse.server import HomeServer from synapse.state.v2 import _mainline_sort, _reverse_topological_power_sort from synapse.types import JsonDict +from synapse.util import Clock from tests import unittest from tests.test_utils import event_injection, make_awaitable @@ -40,7 +44,7 @@ class FederationEventHandlerTests(unittest.FederatingHomeserverTestCase): room.register_servlets, ] - def make_homeserver(self, reactor, clock): + def make_homeserver(self, reactor: MemoryReactor, clock: Clock) -> HomeServer: # mock out the federation transport client self.mock_federation_transport_client = mock.Mock( spec=["get_room_state_ids", "get_room_state", "get_event", "backfill"] @@ -165,7 +169,9 @@ def _test_process_pulled_event_with_missing_state( ) else: - async def get_event(destination: str, event_id: str, timeout=None): + async def get_event( + destination: str, event_id: str, timeout: Optional[int] = None + ) -> JsonDict: self.assertEqual(destination, self.OTHER_SERVER_NAME) self.assertEqual(event_id, prev_event.event_id) return {"pdus": [prev_event.get_pdu_json()]} diff --git a/tests/handlers/test_message.py b/tests/handlers/test_message.py index 99384837d05c..c4727ab917fd 100644 --- a/tests/handlers/test_message.py +++ b/tests/handlers/test_message.py @@ -14,12 +14,16 @@ import logging from typing import Tuple +from twisted.test.proto_helpers import MemoryReactor + from synapse.api.constants import EventTypes from synapse.events import EventBase from synapse.events.snapshot import EventContext from synapse.rest import admin from synapse.rest.client import login, room +from synapse.server import HomeServer from synapse.types import create_requester +from synapse.util import Clock from synapse.util.stringutils import random_string from tests import unittest @@ -35,7 +39,7 @@ class EventCreationTestCase(unittest.HomeserverTestCase): room.register_servlets, ] - def prepare(self, reactor, clock, hs): + def prepare(self, reactor: MemoryReactor, clock: Clock, hs: HomeServer) -> None: self.handler = self.hs.get_event_creation_handler() self._persist_event_storage_controller = ( self.hs.get_storage_controllers().persistence @@ -94,7 +98,7 @@ def _create_duplicate_event(self, txn_id: str) -> Tuple[EventBase, EventContext] ) ) - def test_duplicated_txn_id(self): + def test_duplicated_txn_id(self) -> None: """Test that attempting to handle/persist an event with a transaction ID that has already been persisted correctly returns the old event and does *not* produce duplicate messages. @@ -161,7 +165,7 @@ def test_duplicated_txn_id(self): # rather than the new one. self.assertEqual(ret_event1.event_id, ret_event4.event_id) - def test_duplicated_txn_id_one_call(self): + def test_duplicated_txn_id_one_call(self) -> None: """Test that we correctly handle duplicates that we try and persist at the same time. """ @@ -185,7 +189,9 @@ def test_duplicated_txn_id_one_call(self): self.assertEqual(len(events), 2) self.assertEqual(events[0].event_id, events[1].event_id) - def test_when_empty_prev_events_allowed_create_event_with_empty_prev_events(self): + def test_when_empty_prev_events_allowed_create_event_with_empty_prev_events( + self, + ) -> None: """When we set allow_no_prev_events=True, should be able to create a event without any prev_events (only auth_events). """ @@ -214,7 +220,7 @@ def test_when_empty_prev_events_allowed_create_event_with_empty_prev_events(self def test_when_empty_prev_events_not_allowed_reject_event_with_empty_prev_events( self, - ): + ) -> None: """When we set allow_no_prev_events=False, shouldn't be able to create a event without any prev_events even if it has auth_events. Expect an exception to be raised. @@ -245,7 +251,7 @@ def test_when_empty_prev_events_not_allowed_reject_event_with_empty_prev_events( def test_when_empty_prev_events_allowed_reject_event_with_empty_prev_events_and_auth_events( self, - ): + ) -> None: """When we set allow_no_prev_events=True, should be able to create a event without any prev_events or auth_events. Expect an exception to be raised. @@ -277,12 +283,12 @@ class ServerAclValidationTestCase(unittest.HomeserverTestCase): room.register_servlets, ] - def prepare(self, reactor, clock, hs): + def prepare(self, reactor: MemoryReactor, clock: Clock, hs: HomeServer) -> None: self.user_id = self.register_user("tester", "foobar") self.access_token = self.login("tester", "foobar") self.room_id = self.helper.create_room_as(self.user_id, tok=self.access_token) - def test_allow_server_acl(self): + def test_allow_server_acl(self) -> None: """Test that sending an ACL that blocks everyone but ourselves works.""" self.helper.send_state( @@ -293,7 +299,7 @@ def test_allow_server_acl(self): expect_code=200, ) - def test_deny_server_acl_block_outselves(self): + def test_deny_server_acl_block_outselves(self) -> None: """Test that sending an ACL that blocks ourselves does not work.""" self.helper.send_state( self.room_id, @@ -303,7 +309,7 @@ def test_deny_server_acl_block_outselves(self): expect_code=400, ) - def test_deny_redact_server_acl(self): + def test_deny_redact_server_acl(self) -> None: """Test that attempting to redact an ACL is blocked.""" body = self.helper.send_state( diff --git a/tests/handlers/test_oidc.py b/tests/handlers/test_oidc.py index 5955410524c9..49a1842b5ced 100644 --- a/tests/handlers/test_oidc.py +++ b/tests/handlers/test_oidc.py @@ -12,7 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. import os -from typing import Any, Dict, Tuple +from typing import Any, Awaitable, ContextManager, Dict, Optional, Tuple from unittest.mock import ANY, Mock, patch from urllib.parse import parse_qs, urlparse @@ -23,7 +23,7 @@ from synapse.handlers.sso import MappingException from synapse.http.site import SynapseRequest from synapse.server import HomeServer -from synapse.types import UserID +from synapse.types import JsonDict, UserID from synapse.util import Clock from synapse.util.macaroons import get_value_from_macaroon from synapse.util.stringutils import random_string @@ -34,6 +34,10 @@ try: import authlib # noqa: F401 + from authlib.oidc.core import UserInfo + from authlib.oidc.discovery import OpenIDProviderMetadata + + from synapse.handlers.oidc import Token, UserAttributeDict HAS_OIDC = True except ImportError: @@ -70,29 +74,37 @@ class TestMappingProvider: @staticmethod - def parse_config(config): - return + def parse_config(config: JsonDict) -> None: + return None - def __init__(self, config): + def __init__(self, config: None): pass - def get_remote_user_id(self, userinfo): + def get_remote_user_id(self, userinfo: "UserInfo") -> str: return userinfo["sub"] - async def map_user_attributes(self, userinfo, token): - return {"localpart": userinfo["username"], "display_name": None} + async def map_user_attributes( + self, userinfo: "UserInfo", token: "Token" + ) -> "UserAttributeDict": + # This is testing not providing the full map. + return {"localpart": userinfo["username"], "display_name": None} # type: ignore[typeddict-item] # Do not include get_extra_attributes to test backwards compatibility paths. class TestMappingProviderExtra(TestMappingProvider): - async def get_extra_attributes(self, userinfo, token): + async def get_extra_attributes( + self, userinfo: "UserInfo", token: "Token" + ) -> JsonDict: return {"phone": userinfo["phone"]} class TestMappingProviderFailures(TestMappingProvider): - async def map_user_attributes(self, userinfo, token, failures): - return { + # Superclass is testing the legacy interface for map_user_attributes. + async def map_user_attributes( # type: ignore[override] + self, userinfo: "UserInfo", token: "Token", failures: int + ) -> "UserAttributeDict": + return { # type: ignore[typeddict-item] "localpart": userinfo["username"] + (str(failures) if failures else ""), "display_name": None, } @@ -161,13 +173,13 @@ def tearDown(self) -> None: self.hs_patcher.stop() return super().tearDown() - def reset_mocks(self): + def reset_mocks(self) -> None: """Reset all the Mocks.""" self.fake_server.reset_mocks() self.render_error.reset_mock() self.complete_sso_login.reset_mock() - def metadata_edit(self, values): + def metadata_edit(self, values: dict) -> ContextManager[Mock]: """Modify the result that will be returned by the well-known query""" metadata = self.fake_server.get_metadata() @@ -196,7 +208,9 @@ def start_authorization( session = self._generate_oidc_session_token(state, nonce, client_redirect_url) return _build_callback_request(code, state, session), grant - def assertRenderedError(self, error, error_description=None): + def assertRenderedError( + self, error: str, error_description: Optional[str] = None + ) -> Tuple[Any, ...]: self.render_error.assert_called_once() args = self.render_error.call_args[0] self.assertEqual(args[1], error) @@ -273,8 +287,8 @@ def test_validate_config(self) -> None: """Provider metadatas are extensively validated.""" h = self.provider - def force_load_metadata(): - async def force_load(): + def force_load_metadata() -> Awaitable[None]: + async def force_load() -> "OpenIDProviderMetadata": return await h.load_metadata(force=True) return get_awaitable_result(force_load()) @@ -1198,7 +1212,7 @@ def _build_callback_request( state: str, session: str, ip_address: str = "10.0.0.1", -): +) -> Mock: """Builds a fake SynapseRequest to mock the browser callback Returns a Mock object which looks like the SynapseRequest we get from a browser diff --git a/tests/handlers/test_password_providers.py b/tests/handlers/test_password_providers.py index 75934b1707f4..0916de64f548 100644 --- a/tests/handlers/test_password_providers.py +++ b/tests/handlers/test_password_providers.py @@ -15,12 +15,13 @@ """Tests for the password_auth_provider interface""" from http import HTTPStatus -from typing import Any, Type, Union +from typing import Any, Dict, List, Optional, Type, Union from unittest.mock import Mock import synapse from synapse.api.constants import LoginType from synapse.api.errors import Codes +from synapse.handlers.account import AccountHandler from synapse.module_api import ModuleApi from synapse.rest.client import account, devices, login, logout, register from synapse.types import JsonDict, UserID @@ -44,13 +45,13 @@ class LegacyPasswordOnlyAuthProvider: """A legacy password_provider which only implements `check_password`.""" @staticmethod - def parse_config(self): + def parse_config(config: JsonDict) -> None: pass - def __init__(self, config, account_handler): + def __init__(self, config: None, account_handler: AccountHandler): pass - def check_password(self, *args): + def check_password(self, *args: str) -> Mock: return mock_password_provider.check_password(*args) @@ -58,16 +59,16 @@ class LegacyCustomAuthProvider: """A legacy password_provider which implements a custom login type.""" @staticmethod - def parse_config(self): + def parse_config(config: JsonDict) -> None: pass - def __init__(self, config, account_handler): + def __init__(self, config: None, account_handler: AccountHandler): pass - def get_supported_login_types(self): + def get_supported_login_types(self) -> Dict[str, List[str]]: return {"test.login_type": ["test_field"]} - def check_auth(self, *args): + def check_auth(self, *args: str) -> Mock: return mock_password_provider.check_auth(*args) @@ -75,15 +76,15 @@ class CustomAuthProvider: """A module which registers password_auth_provider callbacks for a custom login type.""" @staticmethod - def parse_config(self): + def parse_config(config: JsonDict) -> None: pass - def __init__(self, config, api: ModuleApi): + def __init__(self, config: None, api: ModuleApi): api.register_password_auth_provider_callbacks( auth_checkers={("test.login_type", ("test_field",)): self.check_auth} ) - def check_auth(self, *args): + def check_auth(self, *args: Any) -> Mock: return mock_password_provider.check_auth(*args) @@ -92,16 +93,16 @@ class LegacyPasswordCustomAuthProvider: as a custom type.""" @staticmethod - def parse_config(self): + def parse_config(config: JsonDict) -> None: pass - def __init__(self, config, account_handler): + def __init__(self, config: None, account_handler: AccountHandler): pass - def get_supported_login_types(self): + def get_supported_login_types(self) -> Dict[str, List[str]]: return {"m.login.password": ["password"], "test.login_type": ["test_field"]} - def check_auth(self, *args): + def check_auth(self, *args: str) -> Mock: return mock_password_provider.check_auth(*args) @@ -110,10 +111,10 @@ class PasswordCustomAuthProvider: as well as a password login""" @staticmethod - def parse_config(self): + def parse_config(config: JsonDict) -> None: pass - def __init__(self, config, api: ModuleApi): + def __init__(self, config: None, api: ModuleApi): api.register_password_auth_provider_callbacks( auth_checkers={ ("test.login_type", ("test_field",)): self.check_auth, @@ -121,10 +122,10 @@ def __init__(self, config, api: ModuleApi): } ) - def check_auth(self, *args): + def check_auth(self, *args: Any) -> Mock: return mock_password_provider.check_auth(*args) - def check_pass(self, *args): + def check_pass(self, *args: str) -> Mock: return mock_password_provider.check_password(*args) @@ -161,16 +162,16 @@ class PasswordAuthProviderTests(unittest.HomeserverTestCase): CALLBACK_USERNAME = "get_username_for_registration" CALLBACK_DISPLAYNAME = "get_displayname_for_registration" - def setUp(self): + def setUp(self) -> None: # we use a global mock device, so make sure we are starting with a clean slate mock_password_provider.reset_mock() super().setUp() @override_config(legacy_providers_config(LegacyPasswordOnlyAuthProvider)) - def test_password_only_auth_progiver_login_legacy(self): + def test_password_only_auth_progiver_login_legacy(self) -> None: self.password_only_auth_provider_login_test_body() - def password_only_auth_provider_login_test_body(self): + def password_only_auth_provider_login_test_body(self) -> None: # login flows should only have m.login.password flows = self._get_login_flows() self.assertEqual(flows, [{"type": "m.login.password"}] + ADDITIONAL_LOGIN_FLOWS) @@ -201,10 +202,10 @@ def password_only_auth_provider_login_test_body(self): ) @override_config(legacy_providers_config(LegacyPasswordOnlyAuthProvider)) - def test_password_only_auth_provider_ui_auth_legacy(self): + def test_password_only_auth_provider_ui_auth_legacy(self) -> None: self.password_only_auth_provider_ui_auth_test_body() - def password_only_auth_provider_ui_auth_test_body(self): + def password_only_auth_provider_ui_auth_test_body(self) -> None: """UI Auth should delegate correctly to the password provider""" # create the user, otherwise access doesn't work @@ -238,10 +239,10 @@ def password_only_auth_provider_ui_auth_test_body(self): mock_password_provider.check_password.assert_called_once_with("@u:test", "p") @override_config(legacy_providers_config(LegacyPasswordOnlyAuthProvider)) - def test_local_user_fallback_login_legacy(self): + def test_local_user_fallback_login_legacy(self) -> None: self.local_user_fallback_login_test_body() - def local_user_fallback_login_test_body(self): + def local_user_fallback_login_test_body(self) -> None: """rejected login should fall back to local db""" self.register_user("localuser", "localpass") @@ -255,10 +256,10 @@ def local_user_fallback_login_test_body(self): self.assertEqual("@localuser:test", channel.json_body["user_id"]) @override_config(legacy_providers_config(LegacyPasswordOnlyAuthProvider)) - def test_local_user_fallback_ui_auth_legacy(self): + def test_local_user_fallback_ui_auth_legacy(self) -> None: self.local_user_fallback_ui_auth_test_body() - def local_user_fallback_ui_auth_test_body(self): + def local_user_fallback_ui_auth_test_body(self) -> None: """rejected login should fall back to local db""" self.register_user("localuser", "localpass") @@ -298,10 +299,10 @@ def local_user_fallback_ui_auth_test_body(self): "password_config": {"localdb_enabled": False}, } ) - def test_no_local_user_fallback_login_legacy(self): + def test_no_local_user_fallback_login_legacy(self) -> None: self.no_local_user_fallback_login_test_body() - def no_local_user_fallback_login_test_body(self): + def no_local_user_fallback_login_test_body(self) -> None: """localdb_enabled can block login with the local password""" self.register_user("localuser", "localpass") @@ -320,10 +321,10 @@ def no_local_user_fallback_login_test_body(self): "password_config": {"localdb_enabled": False}, } ) - def test_no_local_user_fallback_ui_auth_legacy(self): + def test_no_local_user_fallback_ui_auth_legacy(self) -> None: self.no_local_user_fallback_ui_auth_test_body() - def no_local_user_fallback_ui_auth_test_body(self): + def no_local_user_fallback_ui_auth_test_body(self) -> None: """localdb_enabled can block ui auth with the local password""" self.register_user("localuser", "localpass") @@ -361,10 +362,10 @@ def no_local_user_fallback_ui_auth_test_body(self): "password_config": {"enabled": False}, } ) - def test_password_auth_disabled_legacy(self): + def test_password_auth_disabled_legacy(self) -> None: self.password_auth_disabled_test_body() - def password_auth_disabled_test_body(self): + def password_auth_disabled_test_body(self) -> None: """password auth doesn't work if it's disabled across the board""" # login flows should be empty flows = self._get_login_flows() @@ -376,14 +377,14 @@ def password_auth_disabled_test_body(self): mock_password_provider.check_password.assert_not_called() @override_config(legacy_providers_config(LegacyCustomAuthProvider)) - def test_custom_auth_provider_login_legacy(self): + def test_custom_auth_provider_login_legacy(self) -> None: self.custom_auth_provider_login_test_body() @override_config(providers_config(CustomAuthProvider)) - def test_custom_auth_provider_login(self): + def test_custom_auth_provider_login(self) -> None: self.custom_auth_provider_login_test_body() - def custom_auth_provider_login_test_body(self): + def custom_auth_provider_login_test_body(self) -> None: # login flows should have the custom flow and m.login.password, since we # haven't disabled local password lookup. # (password must come first, because reasons) @@ -424,14 +425,14 @@ def custom_auth_provider_login_test_body(self): ) @override_config(legacy_providers_config(LegacyCustomAuthProvider)) - def test_custom_auth_provider_ui_auth_legacy(self): + def test_custom_auth_provider_ui_auth_legacy(self) -> None: self.custom_auth_provider_ui_auth_test_body() @override_config(providers_config(CustomAuthProvider)) - def test_custom_auth_provider_ui_auth(self): + def test_custom_auth_provider_ui_auth(self) -> None: self.custom_auth_provider_ui_auth_test_body() - def custom_auth_provider_ui_auth_test_body(self): + def custom_auth_provider_ui_auth_test_body(self) -> None: # register the user and log in twice, to get two devices self.register_user("localuser", "localpass") tok1 = self.login("localuser", "localpass") @@ -486,14 +487,14 @@ def custom_auth_provider_ui_auth_test_body(self): ) @override_config(legacy_providers_config(LegacyCustomAuthProvider)) - def test_custom_auth_provider_callback_legacy(self): + def test_custom_auth_provider_callback_legacy(self) -> None: self.custom_auth_provider_callback_test_body() @override_config(providers_config(CustomAuthProvider)) - def test_custom_auth_provider_callback(self): + def test_custom_auth_provider_callback(self) -> None: self.custom_auth_provider_callback_test_body() - def custom_auth_provider_callback_test_body(self): + def custom_auth_provider_callback_test_body(self) -> None: callback = Mock(return_value=make_awaitable(None)) mock_password_provider.check_auth.return_value = make_awaitable( @@ -521,16 +522,16 @@ def custom_auth_provider_callback_test_body(self): "password_config": {"enabled": False}, } ) - def test_custom_auth_password_disabled_legacy(self): + def test_custom_auth_password_disabled_legacy(self) -> None: self.custom_auth_password_disabled_test_body() @override_config( {**providers_config(CustomAuthProvider), "password_config": {"enabled": False}} ) - def test_custom_auth_password_disabled(self): + def test_custom_auth_password_disabled(self) -> None: self.custom_auth_password_disabled_test_body() - def custom_auth_password_disabled_test_body(self): + def custom_auth_password_disabled_test_body(self) -> None: """Test login with a custom auth provider where password login is disabled""" self.register_user("localuser", "localpass") @@ -548,7 +549,7 @@ def custom_auth_password_disabled_test_body(self): "password_config": {"enabled": False, "localdb_enabled": False}, } ) - def test_custom_auth_password_disabled_localdb_enabled_legacy(self): + def test_custom_auth_password_disabled_localdb_enabled_legacy(self) -> None: self.custom_auth_password_disabled_localdb_enabled_test_body() @override_config( @@ -557,10 +558,10 @@ def test_custom_auth_password_disabled_localdb_enabled_legacy(self): "password_config": {"enabled": False, "localdb_enabled": False}, } ) - def test_custom_auth_password_disabled_localdb_enabled(self): + def test_custom_auth_password_disabled_localdb_enabled(self) -> None: self.custom_auth_password_disabled_localdb_enabled_test_body() - def custom_auth_password_disabled_localdb_enabled_test_body(self): + def custom_auth_password_disabled_localdb_enabled_test_body(self) -> None: """Check the localdb_enabled == enabled == False Regression test for https://github.com/matrix-org/synapse/issues/8914: check @@ -583,7 +584,7 @@ def custom_auth_password_disabled_localdb_enabled_test_body(self): "password_config": {"enabled": False}, } ) - def test_password_custom_auth_password_disabled_login_legacy(self): + def test_password_custom_auth_password_disabled_login_legacy(self) -> None: self.password_custom_auth_password_disabled_login_test_body() @override_config( @@ -592,10 +593,10 @@ def test_password_custom_auth_password_disabled_login_legacy(self): "password_config": {"enabled": False}, } ) - def test_password_custom_auth_password_disabled_login(self): + def test_password_custom_auth_password_disabled_login(self) -> None: self.password_custom_auth_password_disabled_login_test_body() - def password_custom_auth_password_disabled_login_test_body(self): + def password_custom_auth_password_disabled_login_test_body(self) -> None: """log in with a custom auth provider which implements password, but password login is disabled""" self.register_user("localuser", "localpass") @@ -615,7 +616,7 @@ def password_custom_auth_password_disabled_login_test_body(self): "password_config": {"enabled": False}, } ) - def test_password_custom_auth_password_disabled_ui_auth_legacy(self): + def test_password_custom_auth_password_disabled_ui_auth_legacy(self) -> None: self.password_custom_auth_password_disabled_ui_auth_test_body() @override_config( @@ -624,10 +625,10 @@ def test_password_custom_auth_password_disabled_ui_auth_legacy(self): "password_config": {"enabled": False}, } ) - def test_password_custom_auth_password_disabled_ui_auth(self): + def test_password_custom_auth_password_disabled_ui_auth(self) -> None: self.password_custom_auth_password_disabled_ui_auth_test_body() - def password_custom_auth_password_disabled_ui_auth_test_body(self): + def password_custom_auth_password_disabled_ui_auth_test_body(self) -> None: """UI Auth with a custom auth provider which implements password, but password login is disabled""" # register the user and log in twice via the test login type to get two devices, @@ -689,7 +690,7 @@ def password_custom_auth_password_disabled_ui_auth_test_body(self): "password_config": {"localdb_enabled": False}, } ) - def test_custom_auth_no_local_user_fallback_legacy(self): + def test_custom_auth_no_local_user_fallback_legacy(self) -> None: self.custom_auth_no_local_user_fallback_test_body() @override_config( @@ -698,10 +699,10 @@ def test_custom_auth_no_local_user_fallback_legacy(self): "password_config": {"localdb_enabled": False}, } ) - def test_custom_auth_no_local_user_fallback(self): + def test_custom_auth_no_local_user_fallback(self) -> None: self.custom_auth_no_local_user_fallback_test_body() - def custom_auth_no_local_user_fallback_test_body(self): + def custom_auth_no_local_user_fallback_test_body(self) -> None: """Test login with a custom auth provider where the local db is disabled""" self.register_user("localuser", "localpass") @@ -713,14 +714,16 @@ def custom_auth_no_local_user_fallback_test_body(self): channel = self._send_password_login("localuser", "localpass") self.assertEqual(channel.code, HTTPStatus.BAD_REQUEST, channel.result) - def test_on_logged_out(self): + def test_on_logged_out(self) -> None: """Tests that the on_logged_out callback is called when the user logs out.""" self.register_user("rin", "password") tok = self.login("rin", "password") self.called = False - async def on_logged_out(user_id, device_id, access_token): + async def on_logged_out( + user_id: str, device_id: Optional[str], access_token: str + ) -> None: self.called = True on_logged_out = Mock(side_effect=on_logged_out) @@ -738,7 +741,7 @@ async def on_logged_out(user_id, device_id, access_token): on_logged_out.assert_called_once() self.assertTrue(self.called) - def test_username(self): + def test_username(self) -> None: """Tests that the get_username_for_registration callback can define the username of a user when registering. """ @@ -763,7 +766,7 @@ def test_username(self): mxid = channel.json_body["user_id"] self.assertEqual(UserID.from_string(mxid).localpart, username + "-foo") - def test_username_uia(self): + def test_username_uia(self) -> None: """Tests that the get_username_for_registration callback is only called at the end of the UIA flow. """ @@ -782,7 +785,7 @@ def test_username_uia(self): # Set some email configuration so the test doesn't fail because of its absence. @override_config({"email": {"notif_from": "noreply@test"}}) - def test_3pid_allowed(self): + def test_3pid_allowed(self) -> None: """Tests that an is_3pid_allowed_callbacks forbidding a 3PID makes Synapse refuse to bind the new 3PID, and that one allowing a 3PID makes Synapse accept to bind the 3PID. Also checks that the module is passed a boolean indicating whether the @@ -791,7 +794,7 @@ def test_3pid_allowed(self): self._test_3pid_allowed("rin", False) self._test_3pid_allowed("kitay", True) - def test_displayname(self): + def test_displayname(self) -> None: """Tests that the get_displayname_for_registration callback can define the display name of a user when registering. """ @@ -820,7 +823,7 @@ def test_displayname(self): self.assertEqual(display_name, username + "-foo") - def test_displayname_uia(self): + def test_displayname_uia(self) -> None: """Tests that the get_displayname_for_registration callback is only called at the end of the UIA flow. """ @@ -841,7 +844,7 @@ def test_displayname_uia(self): # Check that the callback has been called. m.assert_called_once() - def _test_3pid_allowed(self, username: str, registration: bool): + def _test_3pid_allowed(self, username: str, registration: bool) -> None: """Tests that the "is_3pid_allowed" module callback is called correctly, using either /register or /account URLs depending on the arguments. @@ -907,7 +910,7 @@ def _setup_get_name_for_registration(self, callback_name: str) -> Mock: client is trying to register. """ - async def callback(uia_results, params): + async def callback(uia_results: JsonDict, params: JsonDict) -> str: self.assertIn(LoginType.DUMMY, uia_results) username = params["username"] return username + "-foo" @@ -950,12 +953,13 @@ def _get_login_flows(self) -> JsonDict: def _send_password_login(self, user: str, password: str) -> FakeChannel: return self._send_login(type="m.login.password", user=user, password=password) - def _send_login(self, type, user, **params) -> FakeChannel: - params.update({"identifier": {"type": "m.id.user", "user": user}, "type": type}) + def _send_login(self, type: str, user: str, **extra_params: str) -> FakeChannel: + params = {"identifier": {"type": "m.id.user", "user": user}, "type": type} + params.update(extra_params) channel = self.make_request("POST", "/_matrix/client/r0/login", params) return channel - def _start_delete_device_session(self, access_token, device_id) -> str: + def _start_delete_device_session(self, access_token: str, device_id: str) -> str: """Make an initial delete device request, and return the UI Auth session ID""" channel = self._delete_device(access_token, device_id) self.assertEqual(channel.code, 401) diff --git a/tests/handlers/test_presence.py b/tests/handlers/test_presence.py index 584e7b89712c..19f5322317a1 100644 --- a/tests/handlers/test_presence.py +++ b/tests/handlers/test_presence.py @@ -12,12 +12,14 @@ # See the License for the specific language governing permissions and # limitations under the License. -from typing import Optional +from typing import Optional, cast from unittest.mock import Mock, call from parameterized import parameterized from signedjson.key import generate_signing_key +from twisted.test.proto_helpers import MemoryReactor + from synapse.api.constants import EventTypes, Membership, PresenceState from synapse.api.presence import UserPresenceState from synapse.api.room_versions import KNOWN_ROOM_VERSIONS @@ -35,7 +37,9 @@ ) from synapse.rest import admin from synapse.rest.client import room -from synapse.types import UserID, get_domain_from_id +from synapse.server import HomeServer +from synapse.types import JsonDict, UserID, get_domain_from_id +from synapse.util import Clock from tests import unittest from tests.replication._base import BaseMultiWorkerStreamTestCase @@ -44,10 +48,12 @@ class PresenceUpdateTestCase(unittest.HomeserverTestCase): servlets = [admin.register_servlets] - def prepare(self, reactor, clock, homeserver): + def prepare( + self, reactor: MemoryReactor, clock: Clock, homeserver: HomeServer + ) -> None: self.store = homeserver.get_datastores().main - def test_offline_to_online(self): + def test_offline_to_online(self) -> None: wheel_timer = Mock() user_id = "@foo:bar" now = 5000000 @@ -85,7 +91,7 @@ def test_offline_to_online(self): any_order=True, ) - def test_online_to_online(self): + def test_online_to_online(self) -> None: wheel_timer = Mock() user_id = "@foo:bar" now = 5000000 @@ -128,7 +134,7 @@ def test_online_to_online(self): any_order=True, ) - def test_online_to_online_last_active_noop(self): + def test_online_to_online_last_active_noop(self) -> None: wheel_timer = Mock() user_id = "@foo:bar" now = 5000000 @@ -173,7 +179,7 @@ def test_online_to_online_last_active_noop(self): any_order=True, ) - def test_online_to_online_last_active(self): + def test_online_to_online_last_active(self) -> None: wheel_timer = Mock() user_id = "@foo:bar" now = 5000000 @@ -210,7 +216,7 @@ def test_online_to_online_last_active(self): any_order=True, ) - def test_remote_ping_timer(self): + def test_remote_ping_timer(self) -> None: wheel_timer = Mock() user_id = "@foo:bar" now = 5000000 @@ -244,7 +250,7 @@ def test_remote_ping_timer(self): any_order=True, ) - def test_online_to_offline(self): + def test_online_to_offline(self) -> None: wheel_timer = Mock() user_id = "@foo:bar" now = 5000000 @@ -266,7 +272,7 @@ def test_online_to_offline(self): self.assertEqual(wheel_timer.insert.call_count, 0) - def test_online_to_idle(self): + def test_online_to_idle(self) -> None: wheel_timer = Mock() user_id = "@foo:bar" now = 5000000 @@ -300,7 +306,7 @@ def test_online_to_idle(self): any_order=True, ) - def test_persisting_presence_updates(self): + def test_persisting_presence_updates(self) -> None: """Tests that the latest presence state for each user is persisted correctly""" # Create some test users and presence states for them presence_states = [] @@ -322,7 +328,7 @@ def test_persisting_presence_updates(self): self.get_success(self.store.update_presence(presence_states)) # Check that each update is present in the database - db_presence_states = self.get_success( + db_presence_states_raw = self.get_success( self.store.get_all_presence_updates( instance_name="master", last_id=0, @@ -332,7 +338,7 @@ def test_persisting_presence_updates(self): ) # Extract presence update user ID and state information into lists of tuples - db_presence_states = [(ps[0], ps[1]) for _, ps in db_presence_states[0]] + db_presence_states = [(ps[0], ps[1]) for _, ps in db_presence_states_raw[0]] presence_states_compare = [(ps.user_id, ps.state) for ps in presence_states] # Compare what we put into the storage with what we got out. @@ -343,7 +349,7 @@ def test_persisting_presence_updates(self): class PresenceTimeoutTestCase(unittest.TestCase): """Tests different timers and that the timer does not change `status_msg` of user.""" - def test_idle_timer(self): + def test_idle_timer(self) -> None: user_id = "@foo:bar" status_msg = "I'm here!" now = 5000000 @@ -363,7 +369,7 @@ def test_idle_timer(self): self.assertEqual(new_state.state, PresenceState.UNAVAILABLE) self.assertEqual(new_state.status_msg, status_msg) - def test_busy_no_idle(self): + def test_busy_no_idle(self) -> None: """ Tests that a user setting their presence to busy but idling doesn't turn their presence state into unavailable. @@ -387,7 +393,7 @@ def test_busy_no_idle(self): self.assertEqual(new_state.state, PresenceState.BUSY) self.assertEqual(new_state.status_msg, status_msg) - def test_sync_timeout(self): + def test_sync_timeout(self) -> None: user_id = "@foo:bar" status_msg = "I'm here!" now = 5000000 @@ -407,7 +413,7 @@ def test_sync_timeout(self): self.assertEqual(new_state.state, PresenceState.OFFLINE) self.assertEqual(new_state.status_msg, status_msg) - def test_sync_online(self): + def test_sync_online(self) -> None: user_id = "@foo:bar" status_msg = "I'm here!" now = 5000000 @@ -429,7 +435,7 @@ def test_sync_online(self): self.assertEqual(new_state.state, PresenceState.ONLINE) self.assertEqual(new_state.status_msg, status_msg) - def test_federation_ping(self): + def test_federation_ping(self) -> None: user_id = "@foo:bar" status_msg = "I'm here!" now = 5000000 @@ -448,7 +454,7 @@ def test_federation_ping(self): self.assertIsNotNone(new_state) self.assertEqual(state, new_state) - def test_no_timeout(self): + def test_no_timeout(self) -> None: user_id = "@foo:bar" now = 5000000 @@ -464,7 +470,7 @@ def test_no_timeout(self): self.assertIsNone(new_state) - def test_federation_timeout(self): + def test_federation_timeout(self) -> None: user_id = "@foo:bar" status_msg = "I'm here!" now = 5000000 @@ -487,7 +493,7 @@ def test_federation_timeout(self): self.assertEqual(new_state.state, PresenceState.OFFLINE) self.assertEqual(new_state.status_msg, status_msg) - def test_last_active(self): + def test_last_active(self) -> None: user_id = "@foo:bar" status_msg = "I'm here!" now = 5000000 @@ -508,15 +514,15 @@ def test_last_active(self): class PresenceHandlerTestCase(BaseMultiWorkerStreamTestCase): - def prepare(self, reactor, clock, hs): + def prepare(self, reactor: MemoryReactor, clock: Clock, hs: HomeServer) -> None: self.presence_handler = hs.get_presence_handler() self.clock = hs.get_clock() - def test_external_process_timeout(self): + def test_external_process_timeout(self) -> None: """Test that if an external process doesn't update the records for a while we time out their syncing users presence. """ - process_id = 1 + process_id = "1" user_id = "@test:server" # Notify handler that a user is now syncing. @@ -544,7 +550,7 @@ def test_external_process_timeout(self): ) self.assertEqual(state.state, PresenceState.OFFLINE) - def test_user_goes_offline_by_timeout_status_msg_remain(self): + def test_user_goes_offline_by_timeout_status_msg_remain(self) -> None: """Test that if a user doesn't update the records for a while users presence goes `OFFLINE` because of timeout and `status_msg` remains. """ @@ -576,7 +582,7 @@ def test_user_goes_offline_by_timeout_status_msg_remain(self): self.assertEqual(state.state, PresenceState.OFFLINE) self.assertEqual(state.status_msg, status_msg) - def test_user_goes_offline_manually_with_no_status_msg(self): + def test_user_goes_offline_manually_with_no_status_msg(self) -> None: """Test that if a user change presence manually to `OFFLINE` and no status is set, that `status_msg` is `None`. """ @@ -601,7 +607,7 @@ def test_user_goes_offline_manually_with_no_status_msg(self): self.assertEqual(state.state, PresenceState.OFFLINE) self.assertEqual(state.status_msg, None) - def test_user_goes_offline_manually_with_status_msg(self): + def test_user_goes_offline_manually_with_status_msg(self) -> None: """Test that if a user change presence manually to `OFFLINE` and a status is set, that `status_msg` appears. """ @@ -618,7 +624,7 @@ def test_user_goes_offline_manually_with_status_msg(self): user_id, PresenceState.OFFLINE, "And now here." ) - def test_user_reset_online_with_no_status(self): + def test_user_reset_online_with_no_status(self) -> None: """Test that if a user set again the presence manually and no status is set, that `status_msg` is `None`. """ @@ -644,7 +650,7 @@ def test_user_reset_online_with_no_status(self): self.assertEqual(state.state, PresenceState.ONLINE) self.assertEqual(state.status_msg, None) - def test_set_presence_with_status_msg_none(self): + def test_set_presence_with_status_msg_none(self) -> None: """Test that if a user set again the presence manually and status is `None`, that `status_msg` is `None`. """ @@ -659,7 +665,7 @@ def test_set_presence_with_status_msg_none(self): # Mark user as online and `status_msg = None` self._set_presencestate_with_status_msg(user_id, PresenceState.ONLINE, None) - def test_set_presence_from_syncing_not_set(self): + def test_set_presence_from_syncing_not_set(self) -> None: """Test that presence is not set by syncing if affect_presence is false""" user_id = "@test:server" status_msg = "I'm here!" @@ -680,7 +686,7 @@ def test_set_presence_from_syncing_not_set(self): # and status message should still be the same self.assertEqual(state.status_msg, status_msg) - def test_set_presence_from_syncing_is_set(self): + def test_set_presence_from_syncing_is_set(self) -> None: """Test that presence is set by syncing if affect_presence is true""" user_id = "@test:server" status_msg = "I'm here!" @@ -699,7 +705,7 @@ def test_set_presence_from_syncing_is_set(self): # we should now be online self.assertEqual(state.state, PresenceState.ONLINE) - def test_set_presence_from_syncing_keeps_status(self): + def test_set_presence_from_syncing_keeps_status(self) -> None: """Test that presence set by syncing retains status message""" user_id = "@test:server" status_msg = "I'm here!" @@ -726,7 +732,9 @@ def test_set_presence_from_syncing_keeps_status(self): }, } ) - def test_set_presence_from_syncing_keeps_busy(self, test_with_workers: bool): + def test_set_presence_from_syncing_keeps_busy( + self, test_with_workers: bool + ) -> None: """Test that presence set by syncing doesn't affect busy status Args: @@ -767,7 +775,7 @@ def test_set_presence_from_syncing_keeps_busy(self, test_with_workers: bool): def _set_presencestate_with_status_msg( self, user_id: str, state: str, status_msg: Optional[str] - ): + ) -> None: """Set a PresenceState and status_msg and check the result. Args: @@ -790,14 +798,14 @@ def _set_presencestate_with_status_msg( class PresenceFederationQueueTestCase(unittest.HomeserverTestCase): - def prepare(self, reactor, clock, hs): + def prepare(self, reactor: MemoryReactor, clock: Clock, hs: HomeServer) -> None: self.presence_handler = hs.get_presence_handler() self.clock = hs.get_clock() self.instance_name = hs.get_instance_name() self.queue = self.presence_handler.get_federation_queue() - def test_send_and_get(self): + def test_send_and_get(self) -> None: state1 = UserPresenceState.default("@user1:test") state2 = UserPresenceState.default("@user2:test") state3 = UserPresenceState.default("@user3:test") @@ -834,7 +842,7 @@ def test_send_and_get(self): self.assertFalse(limited) self.assertCountEqual(rows, []) - def test_send_and_get_split(self): + def test_send_and_get_split(self) -> None: state1 = UserPresenceState.default("@user1:test") state2 = UserPresenceState.default("@user2:test") state3 = UserPresenceState.default("@user3:test") @@ -877,7 +885,7 @@ def test_send_and_get_split(self): self.assertCountEqual(rows, expected_rows) - def test_clear_queue_all(self): + def test_clear_queue_all(self) -> None: state1 = UserPresenceState.default("@user1:test") state2 = UserPresenceState.default("@user2:test") state3 = UserPresenceState.default("@user3:test") @@ -921,7 +929,7 @@ def test_clear_queue_all(self): self.assertCountEqual(rows, expected_rows) - def test_partially_clear_queue(self): + def test_partially_clear_queue(self) -> None: state1 = UserPresenceState.default("@user1:test") state2 = UserPresenceState.default("@user2:test") state3 = UserPresenceState.default("@user3:test") @@ -982,7 +990,7 @@ class PresenceJoinTestCase(unittest.HomeserverTestCase): servlets = [room.register_servlets] - def make_homeserver(self, reactor, clock): + def make_homeserver(self, reactor: MemoryReactor, clock: Clock) -> HomeServer: hs = self.setup_test_homeserver( "server", federation_http_client=None, @@ -990,14 +998,14 @@ def make_homeserver(self, reactor, clock): ) return hs - def default_config(self): + def default_config(self) -> JsonDict: config = super().default_config() # Enable federation sending on the main process. config["federation_sender_instances"] = None return config - def prepare(self, reactor, clock, hs): - self.federation_sender = hs.get_federation_sender() + def prepare(self, reactor: MemoryReactor, clock: Clock, hs: HomeServer) -> None: + self.federation_sender = cast(Mock, hs.get_federation_sender()) self.event_builder_factory = hs.get_event_builder_factory() self.federation_event_handler = hs.get_federation_event_handler() self.presence_handler = hs.get_presence_handler() @@ -1013,7 +1021,7 @@ def prepare(self, reactor, clock, hs): # random key to use. self.random_signing_key = generate_signing_key("ver") - def test_remote_joins(self): + def test_remote_joins(self) -> None: # We advance time to something that isn't 0, as we use 0 as a special # value. self.reactor.advance(1000000000000) @@ -1061,7 +1069,7 @@ def test_remote_joins(self): destinations={"server3"}, states=[expected_state] ) - def test_remote_gets_presence_when_local_user_joins(self): + def test_remote_gets_presence_when_local_user_joins(self) -> None: # We advance time to something that isn't 0, as we use 0 as a special # value. self.reactor.advance(1000000000000) @@ -1110,7 +1118,7 @@ def test_remote_gets_presence_when_local_user_joins(self): destinations={"server2", "server3"}, states=[expected_state] ) - def _add_new_user(self, room_id, user_id): + def _add_new_user(self, room_id: str, user_id: str) -> None: """Add new user to the room by creating an event and poking the federation API.""" hostname = get_domain_from_id(user_id) diff --git a/tests/handlers/test_profile.py b/tests/handlers/test_profile.py index 675aa023acec..7c174782da36 100644 --- a/tests/handlers/test_profile.py +++ b/tests/handlers/test_profile.py @@ -332,7 +332,7 @@ def test_avatar_constraint_mime_type(self) -> None: @unittest.override_config( {"server_name": "test:8888", "allowed_avatar_mimetypes": ["image/png"]} ) - def test_avatar_constraint_on_local_server_with_port(self): + def test_avatar_constraint_on_local_server_with_port(self) -> None: """Test that avatar metadata is correctly fetched when the media is on a local server and the server has an explicit port. @@ -376,7 +376,7 @@ def test_check_avatar_on_remote_server(self, remote_server_name: str) -> None: self.get_success(self.handler.check_avatar_size_and_mime_type(remote_mxc)) ) - def _setup_local_files(self, names_and_props: Dict[str, Dict[str, Any]]): + def _setup_local_files(self, names_and_props: Dict[str, Dict[str, Any]]) -> None: """Stores metadata about files in the database. Args: diff --git a/tests/handlers/test_receipts.py b/tests/handlers/test_receipts.py index b55238650c6a..f60400ff8d0f 100644 --- a/tests/handlers/test_receipts.py +++ b/tests/handlers/test_receipts.py @@ -15,14 +15,18 @@ from copy import deepcopy from typing import List +from twisted.test.proto_helpers import MemoryReactor + from synapse.api.constants import EduTypes, ReceiptTypes +from synapse.server import HomeServer from synapse.types import JsonDict +from synapse.util import Clock from tests import unittest class ReceiptsTestCase(unittest.HomeserverTestCase): - def prepare(self, reactor, clock, hs): + def prepare(self, reactor: MemoryReactor, clock: Clock, hs: HomeServer) -> None: self.event_source = hs.get_event_sources().sources.receipt def test_filters_out_private_receipt(self) -> None: diff --git a/tests/handlers/test_register.py b/tests/handlers/test_register.py index 765df75d914f..b9332d97dcdc 100644 --- a/tests/handlers/test_register.py +++ b/tests/handlers/test_register.py @@ -12,8 +12,11 @@ # See the License for the specific language governing permissions and # limitations under the License. +from typing import Any, Collection, List, Optional, Tuple from unittest.mock import Mock +from twisted.test.proto_helpers import MemoryReactor + from synapse.api.auth import Auth from synapse.api.constants import UserTypes from synapse.api.errors import ( @@ -22,8 +25,18 @@ ResourceLimitError, SynapseError, ) +from synapse.module_api import ModuleApi +from synapse.server import HomeServer from synapse.spam_checker_api import RegistrationBehaviour -from synapse.types import RoomAlias, RoomID, UserID, create_requester +from synapse.types import ( + JsonDict, + Requester, + RoomAlias, + RoomID, + UserID, + create_requester, +) +from synapse.util import Clock from tests.test_utils import make_awaitable from tests.unittest import override_config @@ -33,94 +46,98 @@ class TestSpamChecker: - def __init__(self, config, api): + def __init__(self, config: None, api: ModuleApi): api.register_spam_checker_callbacks( check_registration_for_spam=self.check_registration_for_spam, ) @staticmethod - def parse_config(config): - return config + def parse_config(config: JsonDict) -> None: + return None async def check_registration_for_spam( self, - email_threepid, - username, - request_info, - auth_provider_id, - ): + email_threepid: Optional[dict], + username: Optional[str], + request_info: Collection[Tuple[str, str]], + auth_provider_id: Optional[str], + ) -> RegistrationBehaviour: pass class DenyAll(TestSpamChecker): async def check_registration_for_spam( self, - email_threepid, - username, - request_info, - auth_provider_id, - ): + email_threepid: Optional[dict], + username: Optional[str], + request_info: Collection[Tuple[str, str]], + auth_provider_id: Optional[str], + ) -> RegistrationBehaviour: return RegistrationBehaviour.DENY class BanAll(TestSpamChecker): async def check_registration_for_spam( self, - email_threepid, - username, - request_info, - auth_provider_id, - ): + email_threepid: Optional[dict], + username: Optional[str], + request_info: Collection[Tuple[str, str]], + auth_provider_id: Optional[str], + ) -> RegistrationBehaviour: return RegistrationBehaviour.SHADOW_BAN class BanBadIdPUser(TestSpamChecker): async def check_registration_for_spam( - self, email_threepid, username, request_info, auth_provider_id=None - ): + self, + email_threepid: Optional[dict], + username: Optional[str], + request_info: Collection[Tuple[str, str]], + auth_provider_id: Optional[str] = None, + ) -> RegistrationBehaviour: # Reject any user coming from CAS and whose username contains profanity - if auth_provider_id == "cas" and "flimflob" in username: + if auth_provider_id == "cas" and username and "flimflob" in username: return RegistrationBehaviour.DENY return RegistrationBehaviour.ALLOW class TestLegacyRegistrationSpamChecker: - def __init__(self, config, api): + def __init__(self, config: None, api: ModuleApi): pass async def check_registration_for_spam( self, - email_threepid, - username, - request_info, - ): + email_threepid: Optional[dict], + username: Optional[str], + request_info: Collection[Tuple[str, str]], + ) -> RegistrationBehaviour: pass class LegacyAllowAll(TestLegacyRegistrationSpamChecker): async def check_registration_for_spam( self, - email_threepid, - username, - request_info, - ): + email_threepid: Optional[dict], + username: Optional[str], + request_info: Collection[Tuple[str, str]], + ) -> RegistrationBehaviour: return RegistrationBehaviour.ALLOW class LegacyDenyAll(TestLegacyRegistrationSpamChecker): async def check_registration_for_spam( self, - email_threepid, - username, - request_info, - ): + email_threepid: Optional[dict], + username: Optional[str], + request_info: Collection[Tuple[str, str]], + ) -> RegistrationBehaviour: return RegistrationBehaviour.DENY class RegistrationTestCase(unittest.HomeserverTestCase): """Tests the RegistrationHandler.""" - def make_homeserver(self, reactor, clock): + def make_homeserver(self, reactor: MemoryReactor, clock: Clock) -> HomeServer: hs_config = self.default_config() # some of the tests rely on us having a user consent version @@ -145,7 +162,7 @@ def make_homeserver(self, reactor, clock): return hs - def prepare(self, reactor, clock, hs): + def prepare(self, reactor: MemoryReactor, clock: Clock, hs: HomeServer) -> None: self.handler = self.hs.get_registration_handler() self.store = self.hs.get_datastores().main self.lots_of_users = 100 @@ -153,7 +170,7 @@ def prepare(self, reactor, clock, hs): self.requester = create_requester("@requester:test") - def test_user_is_created_and_logged_in_if_doesnt_exist(self): + def test_user_is_created_and_logged_in_if_doesnt_exist(self) -> None: frank = UserID.from_string("@frank:test") user_id = frank.to_string() requester = create_requester(user_id) @@ -164,7 +181,7 @@ def test_user_is_created_and_logged_in_if_doesnt_exist(self): self.assertIsInstance(result_token, str) self.assertGreater(len(result_token), 20) - def test_if_user_exists(self): + def test_if_user_exists(self) -> None: store = self.hs.get_datastores().main frank = UserID.from_string("@frank:test") self.get_success( @@ -180,12 +197,12 @@ def test_if_user_exists(self): self.assertTrue(result_token is not None) @override_config({"limit_usage_by_mau": False}) - def test_mau_limits_when_disabled(self): + def test_mau_limits_when_disabled(self) -> None: # Ensure does not throw exception self.get_success(self.get_or_create_user(self.requester, "a", "display_name")) @override_config({"limit_usage_by_mau": True}) - def test_get_or_create_user_mau_not_blocked(self): + def test_get_or_create_user_mau_not_blocked(self) -> None: self.store.count_monthly_users = Mock( return_value=make_awaitable(self.hs.config.server.max_mau_value - 1) ) @@ -193,7 +210,7 @@ def test_get_or_create_user_mau_not_blocked(self): self.get_success(self.get_or_create_user(self.requester, "c", "User")) @override_config({"limit_usage_by_mau": True}) - def test_get_or_create_user_mau_blocked(self): + def test_get_or_create_user_mau_blocked(self) -> None: self.store.get_monthly_active_count = Mock( return_value=make_awaitable(self.lots_of_users) ) @@ -211,7 +228,7 @@ def test_get_or_create_user_mau_blocked(self): ) @override_config({"limit_usage_by_mau": True}) - def test_register_mau_blocked(self): + def test_register_mau_blocked(self) -> None: self.store.get_monthly_active_count = Mock( return_value=make_awaitable(self.lots_of_users) ) @@ -229,7 +246,7 @@ def test_register_mau_blocked(self): @override_config( {"auto_join_rooms": ["#room:test"], "auto_join_rooms_for_guests": False} ) - def test_auto_join_rooms_for_guests(self): + def test_auto_join_rooms_for_guests(self) -> None: user_id = self.get_success( self.handler.register_user(localpart="jeff", make_guest=True), ) @@ -237,7 +254,7 @@ def test_auto_join_rooms_for_guests(self): self.assertEqual(len(rooms), 0) @override_config({"auto_join_rooms": ["#room:test"]}) - def test_auto_create_auto_join_rooms(self): + def test_auto_create_auto_join_rooms(self) -> None: room_alias_str = "#room:test" user_id = self.get_success(self.handler.register_user(localpart="jeff")) rooms = self.get_success(self.store.get_rooms_for_user(user_id)) @@ -249,7 +266,7 @@ def test_auto_create_auto_join_rooms(self): self.assertEqual(len(rooms), 1) @override_config({"auto_join_rooms": []}) - def test_auto_create_auto_join_rooms_with_no_rooms(self): + def test_auto_create_auto_join_rooms_with_no_rooms(self) -> None: frank = UserID.from_string("@frank:test") user_id = self.get_success(self.handler.register_user(frank.localpart)) self.assertEqual(user_id, frank.to_string()) @@ -257,7 +274,7 @@ def test_auto_create_auto_join_rooms_with_no_rooms(self): self.assertEqual(len(rooms), 0) @override_config({"auto_join_rooms": ["#room:another"]}) - def test_auto_create_auto_join_where_room_is_another_domain(self): + def test_auto_create_auto_join_where_room_is_another_domain(self) -> None: frank = UserID.from_string("@frank:test") user_id = self.get_success(self.handler.register_user(frank.localpart)) self.assertEqual(user_id, frank.to_string()) @@ -267,13 +284,13 @@ def test_auto_create_auto_join_where_room_is_another_domain(self): @override_config( {"auto_join_rooms": ["#room:test"], "autocreate_auto_join_rooms": False} ) - def test_auto_create_auto_join_where_auto_create_is_false(self): + def test_auto_create_auto_join_where_auto_create_is_false(self) -> None: user_id = self.get_success(self.handler.register_user(localpart="jeff")) rooms = self.get_success(self.store.get_rooms_for_user(user_id)) self.assertEqual(len(rooms), 0) @override_config({"auto_join_rooms": ["#room:test"]}) - def test_auto_create_auto_join_rooms_when_user_is_not_a_real_user(self): + def test_auto_create_auto_join_rooms_when_user_is_not_a_real_user(self) -> None: room_alias_str = "#room:test" self.store.is_real_user = Mock(return_value=make_awaitable(False)) user_id = self.get_success(self.handler.register_user(localpart="support")) @@ -284,7 +301,7 @@ def test_auto_create_auto_join_rooms_when_user_is_not_a_real_user(self): self.get_failure(directory_handler.get_association(room_alias), SynapseError) @override_config({"auto_join_rooms": ["#room:test"]}) - def test_auto_create_auto_join_rooms_when_user_is_the_first_real_user(self): + def test_auto_create_auto_join_rooms_when_user_is_the_first_real_user(self) -> None: room_alias_str = "#room:test" self.store.count_real_users = Mock(return_value=make_awaitable(1)) @@ -299,7 +316,9 @@ def test_auto_create_auto_join_rooms_when_user_is_the_first_real_user(self): self.assertEqual(len(rooms), 1) @override_config({"auto_join_rooms": ["#room:test"]}) - def test_auto_create_auto_join_rooms_when_user_is_not_the_first_real_user(self): + def test_auto_create_auto_join_rooms_when_user_is_not_the_first_real_user( + self, + ) -> None: self.store.count_real_users = Mock(return_value=make_awaitable(2)) self.store.is_real_user = Mock(return_value=make_awaitable(True)) user_id = self.get_success(self.handler.register_user(localpart="real")) @@ -312,7 +331,7 @@ def test_auto_create_auto_join_rooms_when_user_is_not_the_first_real_user(self): "autocreate_auto_join_rooms_federated": False, } ) - def test_auto_create_auto_join_rooms_federated(self): + def test_auto_create_auto_join_rooms_federated(self) -> None: """ Auto-created rooms that are private require an invite to go to the user (instead of directly joining it). @@ -339,7 +358,7 @@ def test_auto_create_auto_join_rooms_federated(self): @override_config( {"auto_join_rooms": ["#room:test"], "auto_join_mxid_localpart": "support"} ) - def test_auto_join_mxid_localpart(self): + def test_auto_join_mxid_localpart(self) -> None: """ Ensure the user still needs up in the room created by a different user. """ @@ -376,7 +395,7 @@ def test_auto_join_mxid_localpart(self): "auto_join_mxid_localpart": "support", } ) - def test_auto_create_auto_join_room_preset(self): + def test_auto_create_auto_join_room_preset(self) -> None: """ Auto-created rooms that are private require an invite to go to the user (instead of directly joining it). @@ -416,7 +435,7 @@ def test_auto_create_auto_join_room_preset(self): "auto_join_mxid_localpart": "support", } ) - def test_auto_create_auto_join_room_preset_guest(self): + def test_auto_create_auto_join_room_preset_guest(self) -> None: """ Auto-created rooms that are private require an invite to go to the user (instead of directly joining it). @@ -454,7 +473,7 @@ def test_auto_create_auto_join_room_preset_guest(self): "auto_join_mxid_localpart": "support", } ) - def test_auto_create_auto_join_room_preset_invalid_permissions(self): + def test_auto_create_auto_join_room_preset_invalid_permissions(self) -> None: """ Auto-created rooms that are private require an invite, check that registration doesn't completely break if the inviter doesn't have proper @@ -525,7 +544,7 @@ def test_auto_create_auto_join_room_preset_invalid_permissions(self): "auto_join_rooms": ["#room:test"], }, ) - def test_auto_create_auto_join_where_no_consent(self): + def test_auto_create_auto_join_where_no_consent(self) -> None: """Test to ensure that the first user is not auto-joined to a room if they have not given general consent. """ @@ -550,19 +569,19 @@ def test_auto_create_auto_join_where_no_consent(self): rooms = self.get_success(self.store.get_rooms_for_user(user_id)) self.assertEqual(len(rooms), 1) - def test_register_support_user(self): + def test_register_support_user(self) -> None: user_id = self.get_success( self.handler.register_user(localpart="user", user_type=UserTypes.SUPPORT) ) d = self.store.is_support_user(user_id) self.assertTrue(self.get_success(d)) - def test_register_not_support_user(self): + def test_register_not_support_user(self) -> None: user_id = self.get_success(self.handler.register_user(localpart="user")) d = self.store.is_support_user(user_id) self.assertFalse(self.get_success(d)) - def test_invalid_user_id_length(self): + def test_invalid_user_id_length(self) -> None: invalid_user_id = "x" * 256 self.get_failure( self.handler.register_user(localpart=invalid_user_id), SynapseError @@ -577,7 +596,7 @@ def test_invalid_user_id_length(self): ] } ) - def test_spam_checker_deny(self): + def test_spam_checker_deny(self) -> None: """A spam checker can deny registration, which results in an error.""" self.get_failure(self.handler.register_user(localpart="user"), SynapseError) @@ -590,7 +609,7 @@ def test_spam_checker_deny(self): ] } ) - def test_spam_checker_legacy_allow(self): + def test_spam_checker_legacy_allow(self) -> None: """Tests that a legacy spam checker implementing the legacy 3-arg version of the check_registration_for_spam callback is correctly called. @@ -610,7 +629,7 @@ def test_spam_checker_legacy_allow(self): ] } ) - def test_spam_checker_legacy_deny(self): + def test_spam_checker_legacy_deny(self) -> None: """Tests that a legacy spam checker implementing the legacy 3-arg version of the check_registration_for_spam callback is correctly called. @@ -630,7 +649,7 @@ def test_spam_checker_legacy_deny(self): ] } ) - def test_spam_checker_shadow_ban(self): + def test_spam_checker_shadow_ban(self) -> None: """A spam checker can choose to shadow-ban a user, which allows registration to succeed.""" user_id = self.get_success(self.handler.register_user(localpart="user")) @@ -660,7 +679,7 @@ def test_spam_checker_shadow_ban(self): ] } ) - def test_spam_checker_receives_sso_type(self): + def test_spam_checker_receives_sso_type(self) -> None: """Test rejecting registration based on SSO type""" f = self.get_failure( self.handler.register_user(localpart="bobflimflob", auth_provider_id="cas"), @@ -678,8 +697,12 @@ def test_spam_checker_receives_sso_type(self): ) async def get_or_create_user( - self, requester, localpart, displayname, password_hash=None - ): + self, + requester: Requester, + localpart: str, + displayname: Optional[str], + password_hash: Optional[str] = None, + ) -> Tuple[str, str]: """Creates a new user if the user does not exist, else revokes all previous access tokens and generates a new one. @@ -734,13 +757,15 @@ async def get_or_create_user( class RemoteAutoJoinTestCase(unittest.HomeserverTestCase): """Tests auto-join on remote rooms.""" - def make_homeserver(self, reactor, clock): + def make_homeserver(self, reactor: MemoryReactor, clock: Clock) -> HomeServer: self.room_id = "!roomid:remotetest" - async def update_membership(*args, **kwargs): + async def update_membership(*args: Any, **kwargs: Any) -> None: pass - async def lookup_room_alias(*args, **kwargs): + async def lookup_room_alias( + *args: Any, **kwargs: Any + ) -> Tuple[RoomID, List[str]]: return RoomID.from_string(self.room_id), ["remotetest"] self.room_member_handler = Mock(spec=["update_membership", "lookup_room_alias"]) @@ -750,12 +775,12 @@ async def lookup_room_alias(*args, **kwargs): hs = self.setup_test_homeserver(room_member_handler=self.room_member_handler) return hs - def prepare(self, reactor, clock, hs): + def prepare(self, reactor: MemoryReactor, clock: Clock, hs: HomeServer) -> None: self.handler = self.hs.get_registration_handler() self.store = self.hs.get_datastores().main @override_config({"auto_join_rooms": ["#room:remotetest"]}) - def test_auto_create_auto_join_remote_room(self): + def test_auto_create_auto_join_remote_room(self) -> None: """Tests that we don't attempt to create remote rooms, and that we don't attempt to invite ourselves to rooms we're not in.""" diff --git a/tests/handlers/test_room.py b/tests/handlers/test_room.py index fcde5dab7272..df95490d3b47 100644 --- a/tests/handlers/test_room.py +++ b/tests/handlers/test_room.py @@ -14,7 +14,7 @@ class EncryptedByDefaultTestCase(unittest.HomeserverTestCase): ] @override_config({"encryption_enabled_by_default_for_room_type": "all"}) - def test_encrypted_by_default_config_option_all(self): + def test_encrypted_by_default_config_option_all(self) -> None: """Tests that invite-only and non-invite-only rooms have encryption enabled by default when the config option encryption_enabled_by_default_for_room_type is "all". """ @@ -45,7 +45,7 @@ def test_encrypted_by_default_config_option_all(self): self.assertEqual(event_content, {"algorithm": RoomEncryptionAlgorithms.DEFAULT}) @override_config({"encryption_enabled_by_default_for_room_type": "invite"}) - def test_encrypted_by_default_config_option_invite(self): + def test_encrypted_by_default_config_option_invite(self) -> None: """Tests that only new, invite-only rooms have encryption enabled by default when the config option encryption_enabled_by_default_for_room_type is "invite". """ @@ -76,7 +76,7 @@ def test_encrypted_by_default_config_option_invite(self): ) @override_config({"encryption_enabled_by_default_for_room_type": "off"}) - def test_encrypted_by_default_config_option_off(self): + def test_encrypted_by_default_config_option_off(self) -> None: """Tests that neither new invite-only nor non-invite-only rooms have encryption enabled by default when the config option encryption_enabled_by_default_for_room_type is "off". diff --git a/tests/handlers/test_room_summary.py b/tests/handlers/test_room_summary.py index aa650756e40b..d907fcaf04bd 100644 --- a/tests/handlers/test_room_summary.py +++ b/tests/handlers/test_room_summary.py @@ -11,10 +11,11 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. -from typing import Any, Iterable, List, Optional, Tuple +from typing import Any, Dict, Iterable, List, Optional, Set, Tuple from unittest import mock from twisted.internet.defer import ensureDeferred +from twisted.test.proto_helpers import MemoryReactor from synapse.api.constants import ( EventContentFields, @@ -34,11 +35,14 @@ from synapse.rest.client import login, room from synapse.server import HomeServer from synapse.types import JsonDict, UserID, create_requester +from synapse.util import Clock from tests import unittest -def _create_event(room_id: str, order: Optional[Any] = None, origin_server_ts: int = 0): +def _create_event( + room_id: str, order: Optional[Any] = None, origin_server_ts: int = 0 +) -> mock.Mock: result = mock.Mock(name=room_id) result.room_id = room_id result.content = {} @@ -48,40 +52,40 @@ def _create_event(room_id: str, order: Optional[Any] = None, origin_server_ts: i return result -def _order(*events): +def _order(*events: mock.Mock) -> List[mock.Mock]: return sorted(events, key=_child_events_comparison_key) class TestSpaceSummarySort(unittest.TestCase): - def test_no_order_last(self): + def test_no_order_last(self) -> None: """An event with no ordering is placed behind those with an ordering.""" ev1 = _create_event("!abc:test") ev2 = _create_event("!xyz:test", "xyz") self.assertEqual([ev2, ev1], _order(ev1, ev2)) - def test_order(self): + def test_order(self) -> None: """The ordering should be used.""" ev1 = _create_event("!abc:test", "xyz") ev2 = _create_event("!xyz:test", "abc") self.assertEqual([ev2, ev1], _order(ev1, ev2)) - def test_order_origin_server_ts(self): + def test_order_origin_server_ts(self) -> None: """Origin server is a tie-breaker for ordering.""" ev1 = _create_event("!abc:test", origin_server_ts=10) ev2 = _create_event("!xyz:test", origin_server_ts=30) self.assertEqual([ev1, ev2], _order(ev1, ev2)) - def test_order_room_id(self): + def test_order_room_id(self) -> None: """Room ID is a final tie-breaker for ordering.""" ev1 = _create_event("!abc:test") ev2 = _create_event("!xyz:test") self.assertEqual([ev1, ev2], _order(ev1, ev2)) - def test_invalid_ordering_type(self): + def test_invalid_ordering_type(self) -> None: """Invalid orderings are considered the same as missing.""" ev1 = _create_event("!abc:test", 1) ev2 = _create_event("!xyz:test", "xyz") @@ -97,7 +101,7 @@ def test_invalid_ordering_type(self): ev1 = _create_event("!abc:test", True) self.assertEqual([ev2, ev1], _order(ev1, ev2)) - def test_invalid_ordering_value(self): + def test_invalid_ordering_value(self) -> None: """Invalid orderings are considered the same as missing.""" ev1 = _create_event("!abc:test", "foo\n") ev2 = _create_event("!xyz:test", "xyz") @@ -115,7 +119,7 @@ class SpaceSummaryTestCase(unittest.HomeserverTestCase): login.register_servlets, ] - def prepare(self, reactor, clock, hs: HomeServer): + def prepare(self, reactor: MemoryReactor, clock: Clock, hs: HomeServer) -> None: self.hs = hs self.handler = self.hs.get_room_summary_handler() @@ -223,7 +227,7 @@ def _poke_fed_invite(self, room_id: str, from_user: str) -> None: fed_handler.on_invite_request(fed_hostname, event, RoomVersions.V6) ) - def test_simple_space(self): + def test_simple_space(self) -> None: """Test a simple space with a single room.""" # The result should have the space and the room in it, along with a link # from space -> room. @@ -234,7 +238,7 @@ def test_simple_space(self): ) self._assert_hierarchy(result, expected) - def test_large_space(self): + def test_large_space(self) -> None: """Test a space with a large number of rooms.""" rooms = [self.room] # Make at least 51 rooms that are part of the space. @@ -260,7 +264,7 @@ def test_large_space(self): result["rooms"] += result2["rooms"] self._assert_hierarchy(result, expected) - def test_visibility(self): + def test_visibility(self) -> None: """A user not in a space cannot inspect it.""" user2 = self.register_user("user2", "pass") token2 = self.login("user2", "pass") @@ -380,7 +384,7 @@ def test_room_hierarchy_cache_sharing(self) -> None: self._assert_hierarchy(result2, [(self.space, [self.room])]) def _create_room_with_join_rule( - self, join_rule: str, room_version: Optional[str] = None, **extra_content + self, join_rule: str, room_version: Optional[str] = None, **extra_content: Any ) -> str: """Create a room with the given join rule and add it to the space.""" room_id = self.helper.create_room_as( @@ -403,7 +407,7 @@ def _create_room_with_join_rule( self._add_child(self.space, room_id, self.token) return room_id - def test_filtering(self): + def test_filtering(self) -> None: """ Rooms should be properly filtered to only include rooms the user has access to. """ @@ -476,7 +480,7 @@ def test_filtering(self): ) self._assert_hierarchy(result, expected) - def test_complex_space(self): + def test_complex_space(self) -> None: """ Create a "complex" space to see how it handles things like loops and subspaces. """ @@ -516,7 +520,7 @@ def test_complex_space(self): ) self._assert_hierarchy(result, expected) - def test_pagination(self): + def test_pagination(self) -> None: """Test simple pagination works.""" room_ids = [] for i in range(1, 10): @@ -553,7 +557,7 @@ def test_pagination(self): self._assert_hierarchy(result, expected) self.assertNotIn("next_batch", result) - def test_invalid_pagination_token(self): + def test_invalid_pagination_token(self) -> None: """An invalid pagination token, or changing other parameters, shoudl be rejected.""" room_ids = [] for i in range(1, 10): @@ -604,7 +608,7 @@ def test_invalid_pagination_token(self): SynapseError, ) - def test_max_depth(self): + def test_max_depth(self) -> None: """Create a deep tree to test the max depth against.""" spaces = [self.space] rooms = [self.room] @@ -659,7 +663,7 @@ def test_max_depth(self): ] self._assert_hierarchy(result, expected) - def test_unknown_room_version(self): + def test_unknown_room_version(self) -> None: """ If a room with an unknown room version is encountered it should not cause the entire summary to skip. @@ -685,7 +689,7 @@ def test_unknown_room_version(self): ) self._assert_hierarchy(result, expected) - def test_fed_complex(self): + def test_fed_complex(self) -> None: """ Return data over federation and ensure that it is handled properly. """ @@ -722,7 +726,9 @@ def test_fed_complex(self): "world_readable": True, } - async def summarize_remote_room_hierarchy(_self, room, suggested_only): + async def summarize_remote_room_hierarchy( + _self: Any, room: Any, suggested_only: bool + ) -> Tuple[Optional[_RoomEntry], Dict[str, JsonDict], Set[str]]: return requested_room_entry, {subroom: child_room}, set() # Add a room to the space which is on another server. @@ -744,7 +750,7 @@ async def summarize_remote_room_hierarchy(_self, room, suggested_only): ) self._assert_hierarchy(result, expected) - def test_fed_filtering(self): + def test_fed_filtering(self) -> None: """ Rooms returned over federation should be properly filtered to only include rooms the user has access to. @@ -853,7 +859,9 @@ def test_fed_filtering(self): ], ) - async def summarize_remote_room_hierarchy(_self, room, suggested_only): + async def summarize_remote_room_hierarchy( + _self: Any, room: Any, suggested_only: bool + ) -> Tuple[Optional[_RoomEntry], Dict[str, JsonDict], Set[str]]: return subspace_room_entry, dict(children_rooms), set() # Add a room to the space which is on another server. @@ -892,7 +900,7 @@ async def summarize_remote_room_hierarchy(_self, room, suggested_only): ) self._assert_hierarchy(result, expected) - def test_fed_invited(self): + def test_fed_invited(self) -> None: """ A room which the user was invited to should be included in the response. @@ -915,7 +923,9 @@ def test_fed_invited(self): }, ) - async def summarize_remote_room_hierarchy(_self, room, suggested_only): + async def summarize_remote_room_hierarchy( + _self: Any, room: Any, suggested_only: bool + ) -> Tuple[Optional[_RoomEntry], Dict[str, JsonDict], Set[str]]: return fed_room_entry, {}, set() # Add a room to the space which is on another server. @@ -936,7 +946,7 @@ async def summarize_remote_room_hierarchy(_self, room, suggested_only): ) self._assert_hierarchy(result, expected) - def test_fed_caching(self): + def test_fed_caching(self) -> None: """ Federation `/hierarchy` responses should be cached. """ @@ -1023,7 +1033,7 @@ class RoomSummaryTestCase(unittest.HomeserverTestCase): login.register_servlets, ] - def prepare(self, reactor, clock, hs: HomeServer): + def prepare(self, reactor: MemoryReactor, clock: Clock, hs: HomeServer) -> None: self.hs = hs self.handler = self.hs.get_room_summary_handler() @@ -1040,12 +1050,12 @@ def prepare(self, reactor, clock, hs: HomeServer): tok=self.token, ) - def test_own_room(self): + def test_own_room(self) -> None: """Test a simple room created by the requester.""" result = self.get_success(self.handler.get_room_summary(self.user, self.room)) self.assertEqual(result.get("room_id"), self.room) - def test_visibility(self): + def test_visibility(self) -> None: """A user not in a private room cannot get its summary.""" user2 = self.register_user("user2", "pass") token2 = self.login("user2", "pass") @@ -1093,7 +1103,7 @@ def test_visibility(self): result = self.get_success(self.handler.get_room_summary(user2, self.room)) self.assertEqual(result.get("room_id"), self.room) - def test_fed(self): + def test_fed(self) -> None: """ Return data over federation and ensure that it is handled properly. """ @@ -1105,7 +1115,9 @@ def test_fed(self): {"room_id": fed_room, "world_readable": True}, ) - async def summarize_remote_room_hierarchy(_self, room, suggested_only): + async def summarize_remote_room_hierarchy( + _self: Any, room: Any, suggested_only: bool + ) -> Tuple[Optional[_RoomEntry], Dict[str, JsonDict], Set[str]]: return requested_room_entry, {}, set() with mock.patch( diff --git a/tests/handlers/test_saml.py b/tests/handlers/test_saml.py index a0f84e29403d..9b1b8b9f1301 100644 --- a/tests/handlers/test_saml.py +++ b/tests/handlers/test_saml.py @@ -12,7 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -from typing import Any, Dict, Optional +from typing import Any, Dict, Optional, Set, Tuple from unittest.mock import Mock import attr @@ -20,7 +20,9 @@ from twisted.test.proto_helpers import MemoryReactor from synapse.api.errors import RedirectException +from synapse.module_api import ModuleApi from synapse.server import HomeServer +from synapse.types import JsonDict from synapse.util import Clock from tests.test_utils import simple_async_mock @@ -29,6 +31,7 @@ # Check if we have the dependencies to run the tests. try: import saml2.config + import saml2.response from saml2.sigver import SigverError has_saml2 = True @@ -56,31 +59,39 @@ class FakeAuthnResponse: class TestMappingProvider: - def __init__(self, config, module): + def __init__(self, config: None, module: ModuleApi): pass @staticmethod - def parse_config(config): - return + def parse_config(config: JsonDict) -> None: + return None @staticmethod - def get_saml_attributes(config): + def get_saml_attributes(config: None) -> Tuple[Set[str], Set[str]]: return {"uid"}, {"displayName"} - def get_remote_user_id(self, saml_response, client_redirect_url): + def get_remote_user_id( + self, saml_response: "saml2.response.AuthnResponse", client_redirect_url: str + ) -> str: return saml_response.ava["uid"] def saml_response_to_user_attributes( - self, saml_response, failures, client_redirect_url - ): + self, + saml_response: "saml2.response.AuthnResponse", + failures: int, + client_redirect_url: str, + ) -> dict: localpart = saml_response.ava["username"] + (str(failures) if failures else "") return {"mxid_localpart": localpart, "displayname": None} class TestRedirectMappingProvider(TestMappingProvider): def saml_response_to_user_attributes( - self, saml_response, failures, client_redirect_url - ): + self, + saml_response: "saml2.response.AuthnResponse", + failures: int, + client_redirect_url: str, + ) -> dict: raise RedirectException(b"https://custom-saml-redirect/") @@ -347,7 +358,7 @@ def test_attribute_requirements(self) -> None: ) -def _mock_request(): +def _mock_request() -> Mock: """Returns a mock which will stand in as a SynapseRequest""" mock = Mock( spec=[ diff --git a/tests/handlers/test_send_email.py b/tests/handlers/test_send_email.py index da4bf8b5829a..8b6e4a40b620 100644 --- a/tests/handlers/test_send_email.py +++ b/tests/handlers/test_send_email.py @@ -13,7 +13,7 @@ # limitations under the License. -from typing import List, Tuple +from typing import Callable, List, Tuple from zope.interface import implementer @@ -28,20 +28,27 @@ @implementer(interfaces.IMessageDelivery) class _DummyMessageDelivery: - def __init__(self): + def __init__(self) -> None: # (recipient, message) tuples self.messages: List[Tuple[smtp.Address, bytes]] = [] - def receivedHeader(self, helo, origin, recipients): + def receivedHeader( + self, + helo: Tuple[bytes, bytes], + origin: smtp.Address, + recipients: List[smtp.User], + ) -> None: return None - def validateFrom(self, helo, origin): + def validateFrom( + self, helo: Tuple[bytes, bytes], origin: smtp.Address + ) -> smtp.Address: return origin - def record_message(self, recipient: smtp.Address, message: bytes): + def record_message(self, recipient: smtp.Address, message: bytes) -> None: self.messages.append((recipient, message)) - def validateTo(self, user: smtp.User): + def validateTo(self, user: smtp.User) -> Callable[[], interfaces.IMessageSMTP]: return lambda: _DummyMessage(self, user) @@ -56,20 +63,20 @@ def __init__(self, delivery: _DummyMessageDelivery, user: smtp.User): self._user = user self._buffer: List[bytes] = [] - def lineReceived(self, line): + def lineReceived(self, line: bytes) -> None: self._buffer.append(line) - def eomReceived(self): + def eomReceived(self) -> "defer.Deferred[bytes]": message = b"\n".join(self._buffer) + b"\n" self._delivery.record_message(self._user.dest, message) return defer.succeed(b"saved") - def connectionLost(self): + def connectionLost(self) -> None: pass class SendEmailHandlerTestCase(HomeserverTestCase): - def test_send_email(self): + def test_send_email(self) -> None: """Happy-path test that we can send email to a non-TLS server.""" h = self.hs.get_send_email_handler() d = ensureDeferred( @@ -119,7 +126,7 @@ def test_send_email(self): }, } ) - def test_send_email_force_tls(self): + def test_send_email_force_tls(self) -> None: """Happy-path test that we can send email to an Implicit TLS server.""" h = self.hs.get_send_email_handler() d = ensureDeferred( diff --git a/tests/handlers/test_stats.py b/tests/handlers/test_stats.py index 05f9ec3c5153..f1a50c5bcb26 100644 --- a/tests/handlers/test_stats.py +++ b/tests/handlers/test_stats.py @@ -12,9 +12,15 @@ # See the License for the specific language governing permissions and # limitations under the License. +from typing import Any, Dict, List, Optional + +from twisted.test.proto_helpers import MemoryReactor + from synapse.rest import admin from synapse.rest.client import login, room +from synapse.server import HomeServer from synapse.storage.databases.main import stats +from synapse.util import Clock from tests import unittest @@ -32,11 +38,11 @@ class StatsRoomTests(unittest.HomeserverTestCase): login.register_servlets, ] - def prepare(self, reactor, clock, hs): + def prepare(self, reactor: MemoryReactor, clock: Clock, hs: HomeServer) -> None: self.store = hs.get_datastores().main self.handler = self.hs.get_stats_handler() - def _add_background_updates(self): + def _add_background_updates(self) -> None: """ Add the background updates we need to run. """ @@ -63,12 +69,14 @@ def _add_background_updates(self): ) ) - async def get_all_room_state(self): + async def get_all_room_state(self) -> List[Dict[str, Any]]: return await self.store.db_pool.simple_select_list( "room_stats_state", None, retcols=("name", "topic", "canonical_alias") ) - def _get_current_stats(self, stats_type, stat_id): + def _get_current_stats( + self, stats_type: str, stat_id: str + ) -> Optional[Dict[str, Any]]: table, id_col = stats.TYPE_TO_TABLE[stats_type] cols = list(stats.ABSOLUTE_STATS_FIELDS[stats_type]) @@ -82,13 +90,13 @@ def _get_current_stats(self, stats_type, stat_id): ) ) - def _perform_background_initial_update(self): + def _perform_background_initial_update(self) -> None: # Do the initial population of the stats via the background update self._add_background_updates() self.wait_for_background_updates() - def test_initial_room(self): + def test_initial_room(self) -> None: """ The background updates will build the table from scratch. """ @@ -125,7 +133,7 @@ def test_initial_room(self): self.assertEqual(len(r), 1) self.assertEqual(r[0]["topic"], "foo") - def test_create_user(self): + def test_create_user(self) -> None: """ When we create a user, it should have statistics already ready. """ @@ -134,12 +142,12 @@ def test_create_user(self): u1stats = self._get_current_stats("user", u1) - self.assertIsNotNone(u1stats) + assert u1stats is not None # not in any rooms by default self.assertEqual(u1stats["joined_rooms"], 0) - def test_create_room(self): + def test_create_room(self) -> None: """ When we create a room, it should have statistics already ready. """ @@ -153,8 +161,8 @@ def test_create_room(self): r2 = self.helper.create_room_as(u1, tok=u1token, is_public=False) r2stats = self._get_current_stats("room", r2) - self.assertIsNotNone(r1stats) - self.assertIsNotNone(r2stats) + assert r1stats is not None + assert r2stats is not None self.assertEqual( r1stats["current_state_events"], EXPT_NUM_STATE_EVTS_IN_FRESH_PUBLIC_ROOM @@ -171,7 +179,9 @@ def test_create_room(self): self.assertEqual(r2stats["invited_members"], 0) self.assertEqual(r2stats["banned_members"], 0) - def test_updating_profile_information_does_not_increase_joined_members_count(self): + def test_updating_profile_information_does_not_increase_joined_members_count( + self, + ) -> None: """ Check that the joined_members count does not increase when a user changes their profile information (which is done by sending another join membership event into @@ -186,6 +196,7 @@ def test_updating_profile_information_does_not_increase_joined_members_count(sel # Get the current room stats r1stats_ante = self._get_current_stats("room", r1) + assert r1stats_ante is not None # Send a profile update into the room new_profile = {"displayname": "bob"} @@ -195,6 +206,7 @@ def test_updating_profile_information_does_not_increase_joined_members_count(sel # Get the new room stats r1stats_post = self._get_current_stats("room", r1) + assert r1stats_post is not None # Ensure that the user count did not changed self.assertEqual(r1stats_post["joined_members"], r1stats_ante["joined_members"]) @@ -202,7 +214,7 @@ def test_updating_profile_information_does_not_increase_joined_members_count(sel r1stats_post["local_users_in_room"], r1stats_ante["local_users_in_room"] ) - def test_send_state_event_nonoverwriting(self): + def test_send_state_event_nonoverwriting(self) -> None: """ When we send a non-overwriting state event, it increments current_state_events """ @@ -218,19 +230,21 @@ def test_send_state_event_nonoverwriting(self): ) r1stats_ante = self._get_current_stats("room", r1) + assert r1stats_ante is not None self.helper.send_state( r1, "cat.hissing", {"value": False}, tok=u1token, state_key="moggy" ) r1stats_post = self._get_current_stats("room", r1) + assert r1stats_post is not None self.assertEqual( r1stats_post["current_state_events"] - r1stats_ante["current_state_events"], 1, ) - def test_join_first_time(self): + def test_join_first_time(self) -> None: """ When a user joins a room for the first time, current_state_events and joined_members should increase by exactly 1. @@ -246,10 +260,12 @@ def test_join_first_time(self): u2token = self.login("u2", "pass") r1stats_ante = self._get_current_stats("room", r1) + assert r1stats_ante is not None self.helper.join(r1, u2, tok=u2token) r1stats_post = self._get_current_stats("room", r1) + assert r1stats_post is not None self.assertEqual( r1stats_post["current_state_events"] - r1stats_ante["current_state_events"], @@ -259,7 +275,7 @@ def test_join_first_time(self): r1stats_post["joined_members"] - r1stats_ante["joined_members"], 1 ) - def test_join_after_leave(self): + def test_join_after_leave(self) -> None: """ When a user joins a room after being previously left, joined_members should increase by exactly 1. @@ -280,10 +296,12 @@ def test_join_after_leave(self): self.helper.leave(r1, u2, tok=u2token) r1stats_ante = self._get_current_stats("room", r1) + assert r1stats_ante is not None self.helper.join(r1, u2, tok=u2token) r1stats_post = self._get_current_stats("room", r1) + assert r1stats_post is not None self.assertEqual( r1stats_post["current_state_events"] - r1stats_ante["current_state_events"], @@ -296,7 +314,7 @@ def test_join_after_leave(self): r1stats_post["left_members"] - r1stats_ante["left_members"], -1 ) - def test_invited(self): + def test_invited(self) -> None: """ When a user invites another user, current_state_events and invited_members should increase by exactly 1. @@ -311,10 +329,12 @@ def test_invited(self): u2 = self.register_user("u2", "pass") r1stats_ante = self._get_current_stats("room", r1) + assert r1stats_ante is not None self.helper.invite(r1, u1, u2, tok=u1token) r1stats_post = self._get_current_stats("room", r1) + assert r1stats_post is not None self.assertEqual( r1stats_post["current_state_events"] - r1stats_ante["current_state_events"], @@ -324,7 +344,7 @@ def test_invited(self): r1stats_post["invited_members"] - r1stats_ante["invited_members"], +1 ) - def test_join_after_invite(self): + def test_join_after_invite(self) -> None: """ When a user joins a room after being invited and joined_members should increase by exactly 1. @@ -344,10 +364,12 @@ def test_join_after_invite(self): self.helper.invite(r1, u1, u2, tok=u1token) r1stats_ante = self._get_current_stats("room", r1) + assert r1stats_ante is not None self.helper.join(r1, u2, tok=u2token) r1stats_post = self._get_current_stats("room", r1) + assert r1stats_post is not None self.assertEqual( r1stats_post["current_state_events"] - r1stats_ante["current_state_events"], @@ -360,7 +382,7 @@ def test_join_after_invite(self): r1stats_post["invited_members"] - r1stats_ante["invited_members"], -1 ) - def test_left(self): + def test_left(self) -> None: """ When a user leaves a room after joining and left_members should increase by exactly 1. @@ -380,10 +402,12 @@ def test_left(self): self.helper.join(r1, u2, tok=u2token) r1stats_ante = self._get_current_stats("room", r1) + assert r1stats_ante is not None self.helper.leave(r1, u2, tok=u2token) r1stats_post = self._get_current_stats("room", r1) + assert r1stats_post is not None self.assertEqual( r1stats_post["current_state_events"] - r1stats_ante["current_state_events"], @@ -396,7 +420,7 @@ def test_left(self): r1stats_post["joined_members"] - r1stats_ante["joined_members"], -1 ) - def test_banned(self): + def test_banned(self) -> None: """ When a user is banned from a room after joining and left_members should increase by exactly 1. @@ -416,10 +440,12 @@ def test_banned(self): self.helper.join(r1, u2, tok=u2token) r1stats_ante = self._get_current_stats("room", r1) + assert r1stats_ante is not None self.helper.change_membership(r1, u1, u2, "ban", tok=u1token) r1stats_post = self._get_current_stats("room", r1) + assert r1stats_post is not None self.assertEqual( r1stats_post["current_state_events"] - r1stats_ante["current_state_events"], @@ -432,7 +458,7 @@ def test_banned(self): r1stats_post["joined_members"] - r1stats_ante["joined_members"], -1 ) - def test_initial_background_update(self): + def test_initial_background_update(self) -> None: """ Test that statistics can be generated by the initial background update handler. @@ -462,6 +488,9 @@ def test_initial_background_update(self): r1stats = self._get_current_stats("room", r1) u1stats = self._get_current_stats("user", u1) + assert r1stats is not None + assert u1stats is not None + self.assertEqual(r1stats["joined_members"], 1) self.assertEqual( r1stats["current_state_events"], EXPT_NUM_STATE_EVTS_IN_FRESH_PUBLIC_ROOM @@ -469,7 +498,7 @@ def test_initial_background_update(self): self.assertEqual(u1stats["joined_rooms"], 1) - def test_incomplete_stats(self): + def test_incomplete_stats(self) -> None: """ This tests that we track incomplete statistics. @@ -533,8 +562,11 @@ def test_incomplete_stats(self): self.wait_for_background_updates() r1stats_complete = self._get_current_stats("room", r1) + assert r1stats_complete is not None u1stats_complete = self._get_current_stats("user", u1) + assert u1stats_complete is not None u2stats_complete = self._get_current_stats("user", u2) + assert u2stats_complete is not None # now we make our assertions diff --git a/tests/handlers/test_sync.py b/tests/handlers/test_sync.py index ab5c101eb708..0d9a3de92a5d 100644 --- a/tests/handlers/test_sync.py +++ b/tests/handlers/test_sync.py @@ -14,6 +14,8 @@ from typing import Optional from unittest.mock import MagicMock, Mock, patch +from twisted.test.proto_helpers import MemoryReactor + from synapse.api.constants import EventTypes, JoinRules from synapse.api.errors import Codes, ResourceLimitError from synapse.api.filtering import Filtering @@ -23,6 +25,7 @@ from synapse.rest.client import knock, login, room from synapse.server import HomeServer from synapse.types import UserID, create_requester +from synapse.util import Clock import tests.unittest import tests.utils @@ -39,7 +42,7 @@ class SyncTestCase(tests.unittest.HomeserverTestCase): room.register_servlets, ] - def prepare(self, reactor, clock, hs: HomeServer): + def prepare(self, reactor: MemoryReactor, clock: Clock, hs: HomeServer) -> None: self.sync_handler = self.hs.get_sync_handler() self.store = self.hs.get_datastores().main @@ -47,7 +50,7 @@ def prepare(self, reactor, clock, hs: HomeServer): # modify its config instead of the hs' self.auth_blocking = self.hs.get_auth_blocking() - def test_wait_for_sync_for_user_auth_blocking(self): + def test_wait_for_sync_for_user_auth_blocking(self) -> None: user_id1 = "@user1:test" user_id2 = "@user2:test" sync_config = generate_sync_config(user_id1) @@ -82,7 +85,7 @@ def test_wait_for_sync_for_user_auth_blocking(self): ) self.assertEqual(e.value.errcode, Codes.RESOURCE_LIMIT_EXCEEDED) - def test_unknown_room_version(self): + def test_unknown_room_version(self) -> None: """ A room with an unknown room version should not break sync (and should be excluded). """ @@ -186,7 +189,7 @@ def test_unknown_room_version(self): self.assertNotIn(invite_room, [r.room_id for r in result.invited]) self.assertNotIn(knock_room, [r.room_id for r in result.knocked]) - def test_ban_wins_race_with_join(self): + def test_ban_wins_race_with_join(self) -> None: """Rooms shouldn't appear under "joined" if a join loses a race to a ban. A complicated edge case. Imagine the following scenario: From 864c3f85b0c420f755a064a3c50a45716db3f8af Mon Sep 17 00:00:00 2001 From: reivilibre Date: Fri, 16 Dec 2022 13:04:54 +0000 Subject: [PATCH 11/82] Improve type annotations for the helper methods on a `CachedFunction`. (#14685) --- changelog.d/14685.misc | 1 + synapse/util/caches/descriptors.py | 6 +++--- 2 files changed, 4 insertions(+), 3 deletions(-) create mode 100644 changelog.d/14685.misc diff --git a/changelog.d/14685.misc b/changelog.d/14685.misc new file mode 100644 index 000000000000..3ba22701000b --- /dev/null +++ b/changelog.d/14685.misc @@ -0,0 +1 @@ +Improve type annotations for the helper methods on a `CachedFunction`. \ No newline at end of file diff --git a/synapse/util/caches/descriptors.py b/synapse/util/caches/descriptors.py index 72227359b9a4..81df71a0c514 100644 --- a/synapse/util/caches/descriptors.py +++ b/synapse/util/caches/descriptors.py @@ -53,9 +53,9 @@ class CachedFunction(Generic[F]): - invalidate: Any = None - invalidate_all: Any = None - prefill: Any = None + invalidate: Callable[[Tuple[Any, ...]], None] + invalidate_all: Callable[[], None] + prefill: Callable[[Tuple[Any, ...], Any], None] cache: Any = None num_args: Any = None From 3aeca2588b79111a48a6083c88efc4d68a2cea19 Mon Sep 17 00:00:00 2001 From: Patrick Cloke Date: Fri, 16 Dec 2022 08:53:28 -0500 Subject: [PATCH 12/82] Add missing type hints to tests.config. (#14681) --- changelog.d/14681.misc | 1 + mypy.ini | 4 +- synapse/config/cache.py | 4 +- synapse/util/caches/lrucache.py | 9 +--- tests/config/test___main__.py | 6 +-- tests/config/test_background_update.py | 4 +- tests/config/test_base.py | 10 ++--- tests/config/test_cache.py | 57 ++++++++++++------------ tests/config/test_database.py | 2 +- tests/config/test_generate.py | 8 ++-- tests/config/test_load.py | 12 ++--- tests/config/test_ratelimiting.py | 2 +- tests/config/test_registration_config.py | 4 +- tests/config/test_room_directory.py | 4 +- tests/config/test_server.py | 18 ++++---- tests/config/test_tls.py | 53 +++++++++++++--------- tests/config/test_util.py | 2 +- tests/config/utils.py | 11 ++--- 18 files changed, 108 insertions(+), 103 deletions(-) create mode 100644 changelog.d/14681.misc diff --git a/changelog.d/14681.misc b/changelog.d/14681.misc new file mode 100644 index 000000000000..d44571b73149 --- /dev/null +++ b/changelog.d/14681.misc @@ -0,0 +1 @@ +Add missing type hints. diff --git a/mypy.ini b/mypy.ini index 1a37414e581c..80fbcdfeabf0 100644 --- a/mypy.ini +++ b/mypy.ini @@ -36,8 +36,6 @@ exclude = (?x) |tests/api/test_ratelimiting.py |tests/app/test_openid_listener.py |tests/appservice/test_scheduler.py - |tests/config/test_cache.py - |tests/config/test_tls.py |tests/crypto/test_keyring.py |tests/events/test_presence_router.py |tests/events/test_utils.py @@ -89,7 +87,7 @@ disallow_untyped_defs = False [mypy-tests.*] disallow_untyped_defs = False -[mypy-tests.config.test_api] +[mypy-tests.config.*] disallow_untyped_defs = True [mypy-tests.federation.transport.test_client] diff --git a/synapse/config/cache.py b/synapse/config/cache.py index eb4194a5a91b..015b2a138e85 100644 --- a/synapse/config/cache.py +++ b/synapse/config/cache.py @@ -16,7 +16,7 @@ import os import re import threading -from typing import Any, Callable, Dict, Optional +from typing import Any, Callable, Dict, Mapping, Optional import attr @@ -94,7 +94,7 @@ def add_resizable_cache( class CacheConfig(Config): section = "caches" - _environ = os.environ + _environ: Mapping[str, str] = os.environ event_cache_size: int cache_factors: Dict[str, float] diff --git a/synapse/util/caches/lrucache.py b/synapse/util/caches/lrucache.py index dcf0eac3bf08..452d5d04c1c0 100644 --- a/synapse/util/caches/lrucache.py +++ b/synapse/util/caches/lrucache.py @@ -788,26 +788,21 @@ def __len__(self) -> int: def __contains__(self, key: KT) -> bool: return self.contains(key) - def set_cache_factor(self, factor: float) -> bool: + def set_cache_factor(self, factor: float) -> None: """ Set the cache factor for this individual cache. This will trigger a resize if it changes, which may require evicting items from the cache. - - Returns: - Whether the cache changed size or not. """ if not self.apply_cache_factor_from_config: - return False + return new_size = int(self._original_max_size * factor) if new_size != self.max_size: self.max_size = new_size if self._on_resize: self._on_resize() - return True - return False def __del__(self) -> None: # We're about to be deleted, so we make sure to clear up all the nodes diff --git a/tests/config/test___main__.py b/tests/config/test___main__.py index b1c73d36124f..cb5d4b05c366 100644 --- a/tests/config/test___main__.py +++ b/tests/config/test___main__.py @@ -17,15 +17,15 @@ class ConfigMainFileTestCase(ConfigFileTestCase): - def test_executes_without_an_action(self): + def test_executes_without_an_action(self) -> None: self.generate_config() main(["", "-c", self.config_file]) - def test_read__error_if_key_not_found(self): + def test_read__error_if_key_not_found(self) -> None: self.generate_config() with self.assertRaises(SystemExit): main(["", "read", "foo.bar.hello", "-c", self.config_file]) - def test_read__passes_if_key_found(self): + def test_read__passes_if_key_found(self) -> None: self.generate_config() main(["", "read", "server.server_name", "-c", self.config_file]) diff --git a/tests/config/test_background_update.py b/tests/config/test_background_update.py index 0c32c1ca299e..e4bad2ba6e5f 100644 --- a/tests/config/test_background_update.py +++ b/tests/config/test_background_update.py @@ -22,7 +22,7 @@ class BackgroundUpdateConfigTestCase(HomeserverTestCase): # Tests that the default values in the config are correctly loaded. Note that the default # values are loaded when the corresponding config options are commented out, which is why there isn't # a config specified here. - def test_default_configuration(self): + def test_default_configuration(self) -> None: background_updater = BackgroundUpdater( self.hs, self.hs.get_datastores().main.db_pool ) @@ -46,7 +46,7 @@ def test_default_configuration(self): """ ) ) - def test_custom_configuration(self): + def test_custom_configuration(self) -> None: background_updater = BackgroundUpdater( self.hs, self.hs.get_datastores().main.db_pool ) diff --git a/tests/config/test_base.py b/tests/config/test_base.py index 6a52f862f488..3fbfe6c1da2d 100644 --- a/tests/config/test_base.py +++ b/tests/config/test_base.py @@ -24,13 +24,13 @@ class BaseConfigTestCase(unittest.TestCase): - def setUp(self): + def setUp(self) -> None: # The root object needs a server property with a public_baseurl. root = Mock() root.server.public_baseurl = "http://test" self.config = Config(root) - def test_loading_missing_templates(self): + def test_loading_missing_templates(self) -> None: # Use a temporary directory that exists on the system, but that isn't likely to # contain template files with tempfile.TemporaryDirectory() as tmp_dir: @@ -50,7 +50,7 @@ def test_loading_missing_templates(self): "Template file did not contain our test string", ) - def test_loading_custom_templates(self): + def test_loading_custom_templates(self) -> None: # Use a temporary directory that exists on the system with tempfile.TemporaryDirectory() as tmp_dir: # Create a temporary bogus template file @@ -79,7 +79,7 @@ def test_loading_custom_templates(self): "Template file did not contain our test string", ) - def test_multiple_custom_template_directories(self): + def test_multiple_custom_template_directories(self) -> None: """Tests that directories are searched in the right order if multiple custom template directories are provided. """ @@ -137,7 +137,7 @@ def test_multiple_custom_template_directories(self): for td in tempdirs: td.cleanup() - def test_loading_template_from_nonexistent_custom_directory(self): + def test_loading_template_from_nonexistent_custom_directory(self) -> None: with self.assertRaises(ConfigError): self.config.read_templates( ["some_filename.html"], ("a_nonexistent_directory",) diff --git a/tests/config/test_cache.py b/tests/config/test_cache.py index d2b3c299e354..96f66af328dd 100644 --- a/tests/config/test_cache.py +++ b/tests/config/test_cache.py @@ -13,26 +13,27 @@ # limitations under the License. from synapse.config.cache import CacheConfig, add_resizable_cache +from synapse.types import JsonDict from synapse.util.caches.lrucache import LruCache from tests.unittest import TestCase class CacheConfigTests(TestCase): - def setUp(self): + def setUp(self) -> None: # Reset caches before each test since there's global state involved. self.config = CacheConfig() self.config.reset() - def tearDown(self): + def tearDown(self) -> None: # Also reset the caches after each test to leave state pristine. self.config.reset() - def test_individual_caches_from_environ(self): + def test_individual_caches_from_environ(self) -> None: """ Individual cache factors will be loaded from the environment. """ - config = {} + config: JsonDict = {} self.config._environ = { "SYNAPSE_CACHE_FACTOR_SOMETHING_OR_OTHER": "2", "SYNAPSE_NOT_CACHE": "BLAH", @@ -42,15 +43,15 @@ def test_individual_caches_from_environ(self): self.assertEqual(dict(self.config.cache_factors), {"something_or_other": 2.0}) - def test_config_overrides_environ(self): + def test_config_overrides_environ(self) -> None: """ Individual cache factors defined in the environment will take precedence over those in the config. """ - config = {"caches": {"per_cache_factors": {"foo": 2, "bar": 3}}} + config: JsonDict = {"caches": {"per_cache_factors": {"foo": 2, "bar": 3}}} self.config._environ = { "SYNAPSE_CACHE_FACTOR_SOMETHING_OR_OTHER": "2", - "SYNAPSE_CACHE_FACTOR_FOO": 1, + "SYNAPSE_CACHE_FACTOR_FOO": "1", } self.config.read_config(config, config_dir_path="", data_dir_path="") self.config.resize_all_caches() @@ -60,104 +61,104 @@ def test_config_overrides_environ(self): {"foo": 1.0, "bar": 3.0, "something_or_other": 2.0}, ) - def test_individual_instantiated_before_config_load(self): + def test_individual_instantiated_before_config_load(self) -> None: """ If a cache is instantiated before the config is read, it will be given the default cache size in the interim, and then resized once the config is loaded. """ - cache = LruCache(100) + cache: LruCache = LruCache(100) add_resizable_cache("foo", cache_resize_callback=cache.set_cache_factor) self.assertEqual(cache.max_size, 50) - config = {"caches": {"per_cache_factors": {"foo": 3}}} + config: JsonDict = {"caches": {"per_cache_factors": {"foo": 3}}} self.config.read_config(config) self.config.resize_all_caches() self.assertEqual(cache.max_size, 300) - def test_individual_instantiated_after_config_load(self): + def test_individual_instantiated_after_config_load(self) -> None: """ If a cache is instantiated after the config is read, it will be immediately resized to the correct size given the per_cache_factor if there is one. """ - config = {"caches": {"per_cache_factors": {"foo": 2}}} + config: JsonDict = {"caches": {"per_cache_factors": {"foo": 2}}} self.config.read_config(config, config_dir_path="", data_dir_path="") self.config.resize_all_caches() - cache = LruCache(100) + cache: LruCache = LruCache(100) add_resizable_cache("foo", cache_resize_callback=cache.set_cache_factor) self.assertEqual(cache.max_size, 200) - def test_global_instantiated_before_config_load(self): + def test_global_instantiated_before_config_load(self) -> None: """ If a cache is instantiated before the config is read, it will be given the default cache size in the interim, and then resized to the new default cache size once the config is loaded. """ - cache = LruCache(100) + cache: LruCache = LruCache(100) add_resizable_cache("foo", cache_resize_callback=cache.set_cache_factor) self.assertEqual(cache.max_size, 50) - config = {"caches": {"global_factor": 4}} + config: JsonDict = {"caches": {"global_factor": 4}} self.config.read_config(config, config_dir_path="", data_dir_path="") self.config.resize_all_caches() self.assertEqual(cache.max_size, 400) - def test_global_instantiated_after_config_load(self): + def test_global_instantiated_after_config_load(self) -> None: """ If a cache is instantiated after the config is read, it will be immediately resized to the correct size given the global factor if there is no per-cache factor. """ - config = {"caches": {"global_factor": 1.5}} + config: JsonDict = {"caches": {"global_factor": 1.5}} self.config.read_config(config, config_dir_path="", data_dir_path="") self.config.resize_all_caches() - cache = LruCache(100) + cache: LruCache = LruCache(100) add_resizable_cache("foo", cache_resize_callback=cache.set_cache_factor) self.assertEqual(cache.max_size, 150) - def test_cache_with_asterisk_in_name(self): + def test_cache_with_asterisk_in_name(self) -> None: """Some caches have asterisks in their name, test that they are set correctly.""" - config = { + config: JsonDict = { "caches": { "per_cache_factors": {"*cache_a*": 5, "cache_b": 6, "cache_c": 2} } } self.config._environ = { "SYNAPSE_CACHE_FACTOR_CACHE_A": "2", - "SYNAPSE_CACHE_FACTOR_CACHE_B": 3, + "SYNAPSE_CACHE_FACTOR_CACHE_B": "3", } self.config.read_config(config, config_dir_path="", data_dir_path="") self.config.resize_all_caches() - cache_a = LruCache(100) + cache_a: LruCache = LruCache(100) add_resizable_cache("*cache_a*", cache_resize_callback=cache_a.set_cache_factor) self.assertEqual(cache_a.max_size, 200) - cache_b = LruCache(100) + cache_b: LruCache = LruCache(100) add_resizable_cache("*Cache_b*", cache_resize_callback=cache_b.set_cache_factor) self.assertEqual(cache_b.max_size, 300) - cache_c = LruCache(100) + cache_c: LruCache = LruCache(100) add_resizable_cache("*cache_c*", cache_resize_callback=cache_c.set_cache_factor) self.assertEqual(cache_c.max_size, 200) - def test_apply_cache_factor_from_config(self): + def test_apply_cache_factor_from_config(self) -> None: """Caches can disable applying cache factor updates, mainly used by event cache size. """ - config = {"caches": {"event_cache_size": "10k"}} + config: JsonDict = {"caches": {"event_cache_size": "10k"}} self.config.read_config(config, config_dir_path="", data_dir_path="") self.config.resize_all_caches() - cache = LruCache( + cache: LruCache = LruCache( max_size=self.config.event_cache_size, apply_cache_factor_from_config=False, ) diff --git a/tests/config/test_database.py b/tests/config/test_database.py index 9eca10bbe9b6..240277bcc6b0 100644 --- a/tests/config/test_database.py +++ b/tests/config/test_database.py @@ -20,7 +20,7 @@ class DatabaseConfigTestCase(unittest.TestCase): - def test_database_configured_correctly(self): + def test_database_configured_correctly(self) -> None: conf = yaml.safe_load( DatabaseConfig().generate_config_section(data_dir_path="/data_dir_path") ) diff --git a/tests/config/test_generate.py b/tests/config/test_generate.py index fdfbb0e38e9c..3a023669320f 100644 --- a/tests/config/test_generate.py +++ b/tests/config/test_generate.py @@ -25,14 +25,14 @@ class ConfigGenerationTestCase(unittest.TestCase): - def setUp(self): + def setUp(self) -> None: self.dir = tempfile.mkdtemp() self.file = os.path.join(self.dir, "homeserver.yaml") - def tearDown(self): + def tearDown(self) -> None: shutil.rmtree(self.dir) - def test_generate_config_generates_files(self): + def test_generate_config_generates_files(self) -> None: with redirect_stdout(StringIO()): HomeServerConfig.load_or_generate_config( "", @@ -56,7 +56,7 @@ def test_generate_config_generates_files(self): os.path.join(os.getcwd(), "homeserver.log"), ) - def assert_log_filename_is(self, log_config_file, expected): + def assert_log_filename_is(self, log_config_file: str, expected: str) -> None: with open(log_config_file) as f: config = f.read() # find the 'filename' line diff --git a/tests/config/test_load.py b/tests/config/test_load.py index 69a4e9413b26..fcbe79cc7a28 100644 --- a/tests/config/test_load.py +++ b/tests/config/test_load.py @@ -21,14 +21,14 @@ class ConfigLoadingFileTestCase(ConfigFileTestCase): - def test_load_fails_if_server_name_missing(self): + def test_load_fails_if_server_name_missing(self) -> None: self.generate_config_and_remove_lines_containing("server_name") with self.assertRaises(ConfigError): HomeServerConfig.load_config("", ["-c", self.config_file]) with self.assertRaises(ConfigError): HomeServerConfig.load_or_generate_config("", ["-c", self.config_file]) - def test_generates_and_loads_macaroon_secret_key(self): + def test_generates_and_loads_macaroon_secret_key(self) -> None: self.generate_config() with open(self.config_file) as f: @@ -58,7 +58,7 @@ def test_generates_and_loads_macaroon_secret_key(self): "was: %r" % (config2.key.macaroon_secret_key,) ) - def test_load_succeeds_if_macaroon_secret_key_missing(self): + def test_load_succeeds_if_macaroon_secret_key_missing(self) -> None: self.generate_config_and_remove_lines_containing("macaroon") config1 = HomeServerConfig.load_config("", ["-c", self.config_file]) config2 = HomeServerConfig.load_config("", ["-c", self.config_file]) @@ -73,7 +73,7 @@ def test_load_succeeds_if_macaroon_secret_key_missing(self): config1.key.macaroon_secret_key, config3.key.macaroon_secret_key ) - def test_disable_registration(self): + def test_disable_registration(self) -> None: self.generate_config() self.add_lines_to_config( ["enable_registration: true", "disable_registration: true"] @@ -93,7 +93,7 @@ def test_disable_registration(self): assert config3 is not None self.assertTrue(config3.registration.enable_registration) - def test_stats_enabled(self): + def test_stats_enabled(self) -> None: self.generate_config_and_remove_lines_containing("enable_metrics") self.add_lines_to_config(["enable_metrics: true"]) @@ -101,7 +101,7 @@ def test_stats_enabled(self): config = HomeServerConfig.load_config("", ["-c", self.config_file]) self.assertFalse(config.metrics.metrics_flags.known_servers) - def test_depreciated_identity_server_flag_throws_error(self): + def test_depreciated_identity_server_flag_throws_error(self) -> None: self.generate_config() # Needed to ensure that actual key/value pair added below don't end up on a line with a comment self.add_lines_to_config([" "]) diff --git a/tests/config/test_ratelimiting.py b/tests/config/test_ratelimiting.py index 1b63e1adfd36..f12147eaa000 100644 --- a/tests/config/test_ratelimiting.py +++ b/tests/config/test_ratelimiting.py @@ -18,7 +18,7 @@ class RatelimitConfigTestCase(TestCase): - def test_parse_rc_federation(self): + def test_parse_rc_federation(self) -> None: config_dict = default_config("test") config_dict["rc_federation"] = { "window_size": 20000, diff --git a/tests/config/test_registration_config.py b/tests/config/test_registration_config.py index 33d7b70e32ed..f6869d7f0645 100644 --- a/tests/config/test_registration_config.py +++ b/tests/config/test_registration_config.py @@ -21,7 +21,7 @@ class RegistrationConfigTestCase(ConfigFileTestCase): - def test_session_lifetime_must_not_be_exceeded_by_smaller_lifetimes(self): + def test_session_lifetime_must_not_be_exceeded_by_smaller_lifetimes(self) -> None: """ session_lifetime should logically be larger than, or at least as large as, all the different token lifetimes. @@ -91,7 +91,7 @@ def test_session_lifetime_must_not_be_exceeded_by_smaller_lifetimes(self): "", ) - def test_refuse_to_start_if_open_registration_and_no_verification(self): + def test_refuse_to_start_if_open_registration_and_no_verification(self) -> None: self.generate_config() self.add_lines_to_config( [ diff --git a/tests/config/test_room_directory.py b/tests/config/test_room_directory.py index db745815eff2..297ab377921a 100644 --- a/tests/config/test_room_directory.py +++ b/tests/config/test_room_directory.py @@ -20,7 +20,7 @@ class RoomDirectoryConfigTestCase(unittest.TestCase): - def test_alias_creation_acl(self): + def test_alias_creation_acl(self) -> None: config = yaml.safe_load( """ alias_creation_rules: @@ -78,7 +78,7 @@ def test_alias_creation_acl(self): ) ) - def test_room_publish_acl(self): + def test_room_publish_acl(self) -> None: config = yaml.safe_load( """ alias_creation_rules: [] diff --git a/tests/config/test_server.py b/tests/config/test_server.py index 1f27a5470189..41a3fb0b6db6 100644 --- a/tests/config/test_server.py +++ b/tests/config/test_server.py @@ -21,7 +21,7 @@ class ServerConfigTestCase(unittest.TestCase): - def test_is_threepid_reserved(self): + def test_is_threepid_reserved(self) -> None: user1 = {"medium": "email", "address": "user1@example.com"} user2 = {"medium": "email", "address": "user2@example.com"} user3 = {"medium": "email", "address": "user3@example.com"} @@ -32,7 +32,7 @@ def test_is_threepid_reserved(self): self.assertFalse(is_threepid_reserved(config, user3)) self.assertFalse(is_threepid_reserved(config, user1_msisdn)) - def test_unsecure_listener_no_listeners_open_private_ports_false(self): + def test_unsecure_listener_no_listeners_open_private_ports_false(self) -> None: conf = yaml.safe_load( ServerConfig().generate_config_section( "CONFDIR", "/data_dir_path", "che.org", False, None @@ -52,7 +52,7 @@ def test_unsecure_listener_no_listeners_open_private_ports_false(self): self.assertEqual(conf["listeners"], expected_listeners) - def test_unsecure_listener_no_listeners_open_private_ports_true(self): + def test_unsecure_listener_no_listeners_open_private_ports_true(self) -> None: conf = yaml.safe_load( ServerConfig().generate_config_section( "CONFDIR", "/data_dir_path", "che.org", True, None @@ -71,7 +71,7 @@ def test_unsecure_listener_no_listeners_open_private_ports_true(self): self.assertEqual(conf["listeners"], expected_listeners) - def test_listeners_set_correctly_open_private_ports_false(self): + def test_listeners_set_correctly_open_private_ports_false(self) -> None: listeners = [ { "port": 8448, @@ -95,7 +95,7 @@ def test_listeners_set_correctly_open_private_ports_false(self): self.assertEqual(conf["listeners"], listeners) - def test_listeners_set_correctly_open_private_ports_true(self): + def test_listeners_set_correctly_open_private_ports_true(self) -> None: listeners = [ { "port": 8448, @@ -131,14 +131,14 @@ def test_listeners_set_correctly_open_private_ports_true(self): class GenerateIpSetTestCase(unittest.TestCase): - def test_empty(self): + def test_empty(self) -> None: ip_set = generate_ip_set(()) self.assertFalse(ip_set) ip_set = generate_ip_set((), ()) self.assertFalse(ip_set) - def test_generate(self): + def test_generate(self) -> None: """Check adding IPv4 and IPv6 addresses.""" # IPv4 address ip_set = generate_ip_set(("1.2.3.4",)) @@ -160,7 +160,7 @@ def test_generate(self): ip_set = generate_ip_set(("1.2.3.4", "::1.2.3.4")) self.assertEqual(len(ip_set.iter_cidrs()), 4) - def test_extra(self): + def test_extra(self) -> None: """Extra IP addresses are treated the same.""" ip_set = generate_ip_set((), ("1.2.3.4",)) self.assertEqual(len(ip_set.iter_cidrs()), 4) @@ -172,7 +172,7 @@ def test_extra(self): ip_set = generate_ip_set(("1.2.3.4",), ("1.2.3.4",)) self.assertEqual(len(ip_set.iter_cidrs()), 4) - def test_bad_value(self): + def test_bad_value(self) -> None: """An error should be raised if a bad value is passed in.""" with self.assertRaises(ConfigError): generate_ip_set(("not-an-ip",)) diff --git a/tests/config/test_tls.py b/tests/config/test_tls.py index 9ba5781573fc..7510fc464333 100644 --- a/tests/config/test_tls.py +++ b/tests/config/test_tls.py @@ -13,13 +13,20 @@ # See the License for the specific language governing permissions and # limitations under the License. +from typing import cast + import idna from OpenSSL import SSL from synapse.config._base import Config, RootConfig +from synapse.config.homeserver import HomeServerConfig from synapse.config.tls import ConfigError, TlsConfig -from synapse.crypto.context_factory import FederationPolicyForHTTPS +from synapse.crypto.context_factory import ( + FederationPolicyForHTTPS, + SSLClientConnectionCreator, +) +from synapse.types import JsonDict from tests.unittest import TestCase @@ -27,7 +34,7 @@ class FakeServer(Config): section = "server" - def has_tls_listener(self): + def has_tls_listener(self) -> bool: return False @@ -36,21 +43,21 @@ class TestConfig(RootConfig): class TLSConfigTests(TestCase): - def test_tls_client_minimum_default(self): + def test_tls_client_minimum_default(self) -> None: """ The default client TLS version is 1.0. """ - config = {} + config: JsonDict = {} t = TestConfig() t.tls.read_config(config, config_dir_path="", data_dir_path="") self.assertEqual(t.tls.federation_client_minimum_tls_version, "1") - def test_tls_client_minimum_set(self): + def test_tls_client_minimum_set(self) -> None: """ The default client TLS version can be set to 1.0, 1.1, and 1.2. """ - config = {"federation_client_minimum_tls_version": 1} + config: JsonDict = {"federation_client_minimum_tls_version": 1} t = TestConfig() t.tls.read_config(config, config_dir_path="", data_dir_path="") self.assertEqual(t.tls.federation_client_minimum_tls_version, "1") @@ -76,7 +83,7 @@ def test_tls_client_minimum_set(self): t.tls.read_config(config, config_dir_path="", data_dir_path="") self.assertEqual(t.tls.federation_client_minimum_tls_version, "1.2") - def test_tls_client_minimum_1_point_3_missing(self): + def test_tls_client_minimum_1_point_3_missing(self) -> None: """ If TLS 1.3 support is missing and it's configured, it will raise a ConfigError. @@ -88,7 +95,7 @@ def test_tls_client_minimum_1_point_3_missing(self): self.addCleanup(setattr, SSL, "SSL.OP_NO_TLSv1_3", OP_NO_TLSv1_3) assert not hasattr(SSL, "OP_NO_TLSv1_3") - config = {"federation_client_minimum_tls_version": 1.3} + config: JsonDict = {"federation_client_minimum_tls_version": 1.3} t = TestConfig() with self.assertRaises(ConfigError) as e: t.tls.read_config(config, config_dir_path="", data_dir_path="") @@ -100,7 +107,7 @@ def test_tls_client_minimum_1_point_3_missing(self): ), ) - def test_tls_client_minimum_1_point_3_exists(self): + def test_tls_client_minimum_1_point_3_exists(self) -> None: """ If TLS 1.3 support exists and it's configured, it will be settable. """ @@ -110,20 +117,20 @@ def test_tls_client_minimum_1_point_3_exists(self): self.addCleanup(lambda: delattr(SSL, "OP_NO_TLSv1_3")) assert hasattr(SSL, "OP_NO_TLSv1_3") - config = {"federation_client_minimum_tls_version": 1.3} + config: JsonDict = {"federation_client_minimum_tls_version": 1.3} t = TestConfig() t.tls.read_config(config, config_dir_path="", data_dir_path="") self.assertEqual(t.tls.federation_client_minimum_tls_version, "1.3") - def test_tls_client_minimum_set_passed_through_1_2(self): + def test_tls_client_minimum_set_passed_through_1_2(self) -> None: """ The configured TLS version is correctly configured by the ContextFactory. """ - config = {"federation_client_minimum_tls_version": 1.2} + config: JsonDict = {"federation_client_minimum_tls_version": 1.2} t = TestConfig() t.tls.read_config(config, config_dir_path="", data_dir_path="") - cf = FederationPolicyForHTTPS(t) + cf = FederationPolicyForHTTPS(cast(HomeServerConfig, t)) options = _get_ssl_context_options(cf._verify_ssl_context) # The context has had NO_TLSv1_1 and NO_TLSv1_0 set, but not NO_TLSv1_2 @@ -131,15 +138,15 @@ def test_tls_client_minimum_set_passed_through_1_2(self): self.assertNotEqual(options & SSL.OP_NO_TLSv1_1, 0) self.assertEqual(options & SSL.OP_NO_TLSv1_2, 0) - def test_tls_client_minimum_set_passed_through_1_0(self): + def test_tls_client_minimum_set_passed_through_1_0(self) -> None: """ The configured TLS version is correctly configured by the ContextFactory. """ - config = {"federation_client_minimum_tls_version": 1} + config: JsonDict = {"federation_client_minimum_tls_version": 1} t = TestConfig() t.tls.read_config(config, config_dir_path="", data_dir_path="") - cf = FederationPolicyForHTTPS(t) + cf = FederationPolicyForHTTPS(cast(HomeServerConfig, t)) options = _get_ssl_context_options(cf._verify_ssl_context) # The context has not had any of the NO_TLS set. @@ -147,11 +154,11 @@ def test_tls_client_minimum_set_passed_through_1_0(self): self.assertEqual(options & SSL.OP_NO_TLSv1_1, 0) self.assertEqual(options & SSL.OP_NO_TLSv1_2, 0) - def test_whitelist_idna_failure(self): + def test_whitelist_idna_failure(self) -> None: """ The federation certificate whitelist will not allow IDNA domain names. """ - config = { + config: JsonDict = { "federation_certificate_verification_whitelist": [ "example.com", "*.ドメイン.テスト", @@ -163,11 +170,11 @@ def test_whitelist_idna_failure(self): ) self.assertIn("IDNA domain names", str(e)) - def test_whitelist_idna_result(self): + def test_whitelist_idna_result(self) -> None: """ The federation certificate whitelist will match on IDNA encoded names. """ - config = { + config: JsonDict = { "federation_certificate_verification_whitelist": [ "example.com", "*.xn--eckwd4c7c.xn--zckzah", @@ -176,14 +183,16 @@ def test_whitelist_idna_result(self): t = TestConfig() t.tls.read_config(config, config_dir_path="", data_dir_path="") - cf = FederationPolicyForHTTPS(t) + cf = FederationPolicyForHTTPS(cast(HomeServerConfig, t)) # Not in the whitelist opts = cf.get_options(b"notexample.com") + assert isinstance(opts, SSLClientConnectionCreator) self.assertTrue(opts._verifier._verify_certs) # Caught by the wildcard opts = cf.get_options(idna.encode("テスト.ドメイン.テスト")) + assert isinstance(opts, SSLClientConnectionCreator) self.assertFalse(opts._verifier._verify_certs) @@ -191,4 +200,4 @@ def _get_ssl_context_options(ssl_context: SSL.Context) -> int: """get the options bits from an openssl context object""" # the OpenSSL.SSL.Context wrapper doesn't expose get_options, so we have to # use the low-level interface - return SSL._lib.SSL_CTX_get_options(ssl_context._context) + return SSL._lib.SSL_CTX_get_options(ssl_context._context) # type: ignore[attr-defined] diff --git a/tests/config/test_util.py b/tests/config/test_util.py index 3d4929daacf0..7073654832e1 100644 --- a/tests/config/test_util.py +++ b/tests/config/test_util.py @@ -21,7 +21,7 @@ class ValidateConfigTestCase(TestCase): """Test cases for synapse.config._util.validate_config""" - def test_bad_object_in_array(self): + def test_bad_object_in_array(self) -> None: """malformed objects within an array should be validated correctly""" # consider a structure: diff --git a/tests/config/utils.py b/tests/config/utils.py index 94c18a052ba4..4c0e8a064a6c 100644 --- a/tests/config/utils.py +++ b/tests/config/utils.py @@ -17,19 +17,20 @@ import unittest from contextlib import redirect_stdout from io import StringIO +from typing import List from synapse.config.homeserver import HomeServerConfig class ConfigFileTestCase(unittest.TestCase): - def setUp(self): + def setUp(self) -> None: self.dir = tempfile.mkdtemp() self.config_file = os.path.join(self.dir, "homeserver.yaml") - def tearDown(self): + def tearDown(self) -> None: shutil.rmtree(self.dir) - def generate_config(self): + def generate_config(self) -> None: with redirect_stdout(StringIO()): HomeServerConfig.load_or_generate_config( "", @@ -43,7 +44,7 @@ def generate_config(self): ], ) - def generate_config_and_remove_lines_containing(self, needle): + def generate_config_and_remove_lines_containing(self, needle: str) -> None: self.generate_config() with open(self.config_file) as f: @@ -52,7 +53,7 @@ def generate_config_and_remove_lines_containing(self, needle): with open(self.config_file, "w") as f: f.write("".join(contents)) - def add_lines_to_config(self, lines): + def add_lines_to_config(self, lines: List[str]) -> None: with open(self.config_file, "a") as f: for line in lines: f.write(line + "\n") From 9205249be7bde191adacefadf35f98943346c716 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 19 Dec 2022 14:10:29 +0000 Subject: [PATCH 13/82] Bump JasonEtco/create-an-issue from 2.8.1 to 2.8.2 (#14693) * Bump JasonEtco/create-an-issue from 2.8.1 to 2.8.2 Bumps [JasonEtco/create-an-issue](https://github.com/JasonEtco/create-an-issue) from 2.8.1 to 2.8.2. - [Release notes](https://github.com/JasonEtco/create-an-issue/releases) - [Commits](https://github.com/JasonEtco/create-an-issue/compare/77399b6110ef82b94c1c9f9f615acf9e604f7f56...3a8ba796516b57db8cb2ee6dfc65bc76cd39d56d) --- updated-dependencies: - dependency-name: JasonEtco/create-an-issue dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] * Changelog Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: GitHub Actions --- .github/workflows/latest_deps.yml | 2 +- .github/workflows/twisted_trunk.yml | 2 +- changelog.d/14693.misc | 1 + 3 files changed, 3 insertions(+), 2 deletions(-) create mode 100644 changelog.d/14693.misc diff --git a/.github/workflows/latest_deps.yml b/.github/workflows/latest_deps.yml index 4bc4266c4d2a..e5e4e8da77cc 100644 --- a/.github/workflows/latest_deps.yml +++ b/.github/workflows/latest_deps.yml @@ -208,7 +208,7 @@ jobs: steps: - uses: actions/checkout@v3 - - uses: JasonEtco/create-an-issue@77399b6110ef82b94c1c9f9f615acf9e604f7f56 # v2.5.0, 2020-12-06 + - uses: JasonEtco/create-an-issue@3a8ba796516b57db8cb2ee6dfc65bc76cd39d56d # v2.8.2 env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} with: diff --git a/.github/workflows/twisted_trunk.yml b/.github/workflows/twisted_trunk.yml index 262b17a20db5..b08222f289be 100644 --- a/.github/workflows/twisted_trunk.yml +++ b/.github/workflows/twisted_trunk.yml @@ -174,7 +174,7 @@ jobs: steps: - uses: actions/checkout@v3 - - uses: JasonEtco/create-an-issue@77399b6110ef82b94c1c9f9f615acf9e604f7f56 # v2.5.0, 2020-12-06 + - uses: JasonEtco/create-an-issue@3a8ba796516b57db8cb2ee6dfc65bc76cd39d56d # v2.8.2 env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} with: diff --git a/changelog.d/14693.misc b/changelog.d/14693.misc new file mode 100644 index 000000000000..86771f41b2f8 --- /dev/null +++ b/changelog.d/14693.misc @@ -0,0 +1 @@ +Bump JasonEtco/create-an-issue from 2.8.1 to 2.8.2. From 51abfe56258526cca47804212025f056b75e0c7b Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 19 Dec 2022 14:11:15 +0000 Subject: [PATCH 14/82] Bump blake2 from 0.10.5 to 0.10.6 (#14695) * Bump blake2 from 0.10.5 to 0.10.6 Bumps [blake2](https://github.com/RustCrypto/hashes) from 0.10.5 to 0.10.6. - [Release notes](https://github.com/RustCrypto/hashes/releases) - [Commits](https://github.com/RustCrypto/hashes/compare/blake2-v0.10.5...blake2-v0.10.6) --- updated-dependencies: - dependency-name: blake2 dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] * Changelog Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: GitHub Actions --- Cargo.lock | 4 ++-- changelog.d/14695.misc | 1 + 2 files changed, 3 insertions(+), 2 deletions(-) create mode 100644 changelog.d/14695.misc diff --git a/Cargo.lock b/Cargo.lock index 6e97fb8fb10f..8abbaeee387b 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -37,9 +37,9 @@ checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a" [[package]] name = "blake2" -version = "0.10.5" +version = "0.10.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b12e5fd123190ce1c2e559308a94c9bacad77907d4c6005d9e58fe1a0689e55e" +checksum = "46502ad458c9a52b69d4d4d32775c788b7a1b85e8bc9d482d92250fc0e3f8efe" dependencies = [ "digest", ] diff --git a/changelog.d/14695.misc b/changelog.d/14695.misc new file mode 100644 index 000000000000..57e08498be2d --- /dev/null +++ b/changelog.d/14695.misc @@ -0,0 +1 @@ +Bump blake2 from 0.10.5 to 0.10.6. From 9c89707b56908e755de546c7d390637cecb53159 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 19 Dec 2022 14:14:12 +0000 Subject: [PATCH 15/82] Bump hiredis from 2.0.0 to 2.1.0 (#14699) * Bump hiredis from 2.0.0 to 2.1.0 Bumps [hiredis](https://github.com/redis/hiredis-py) from 2.0.0 to 2.1.0. - [Release notes](https://github.com/redis/hiredis-py/releases) - [Changelog](https://github.com/redis/hiredis-py/blob/master/CHANGELOG.md) - [Commits](https://github.com/redis/hiredis-py/compare/v2.0.0...v2.1.0) --- updated-dependencies: - dependency-name: hiredis dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] * Changelog Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: GitHub Actions --- changelog.d/14699.misc | 1 + poetry.lock | 133 ++++++++++++++++++++++++++++------------- 2 files changed, 91 insertions(+), 43 deletions(-) create mode 100644 changelog.d/14699.misc diff --git a/changelog.d/14699.misc b/changelog.d/14699.misc new file mode 100644 index 000000000000..d73ef25634b8 --- /dev/null +++ b/changelog.d/14699.misc @@ -0,0 +1 @@ +Bump hiredis from 2.0.0 to 2.1.0. diff --git a/poetry.lock b/poetry.lock index 6fd4bd5ba574..a870d8af5709 100644 --- a/poetry.lock +++ b/poetry.lock @@ -318,11 +318,11 @@ typing-extensions = {version = ">=3.7.4.3", markers = "python_version < \"3.8\"" [[package]] name = "hiredis" -version = "2.0.0" +version = "2.1.0" description = "Python wrapper for hiredis" category = "main" optional = true -python-versions = ">=3.6" +python-versions = ">=3.7" [[package]] name = "hyperlink" @@ -1866,47 +1866,94 @@ gitpython = [ {file = "GitPython-3.1.29.tar.gz", hash = "sha256:cc36bfc4a3f913e66805a28e84703e419d9c264c1077e537b54f0e1af85dbefd"}, ] hiredis = [ - {file = "hiredis-2.0.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:b4c8b0bc5841e578d5fb32a16e0c305359b987b850a06964bd5a62739d688048"}, - {file = "hiredis-2.0.0-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:0adea425b764a08270820531ec2218d0508f8ae15a448568109ffcae050fee26"}, - {file = "hiredis-2.0.0-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:3d55e36715ff06cdc0ab62f9591607c4324297b6b6ce5b58cb9928b3defe30ea"}, - {file = "hiredis-2.0.0-cp36-cp36m-manylinux2010_i686.whl", hash = "sha256:5d2a48c80cf5a338d58aae3c16872f4d452345e18350143b3bf7216d33ba7b99"}, - {file = "hiredis-2.0.0-cp36-cp36m-manylinux2010_x86_64.whl", hash = "sha256:240ce6dc19835971f38caf94b5738092cb1e641f8150a9ef9251b7825506cb05"}, - {file = "hiredis-2.0.0-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:5dc7a94bb11096bc4bffd41a3c4f2b958257085c01522aa81140c68b8bf1630a"}, - {file = "hiredis-2.0.0-cp36-cp36m-win32.whl", hash = "sha256:139705ce59d94eef2ceae9fd2ad58710b02aee91e7fa0ccb485665ca0ecbec63"}, - {file = "hiredis-2.0.0-cp36-cp36m-win_amd64.whl", hash = "sha256:c39c46d9e44447181cd502a35aad2bb178dbf1b1f86cf4db639d7b9614f837c6"}, - {file = "hiredis-2.0.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:adf4dd19d8875ac147bf926c727215a0faf21490b22c053db464e0bf0deb0485"}, - {file = "hiredis-2.0.0-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:0f41827028901814c709e744060843c77e78a3aca1e0d6875d2562372fcb405a"}, - {file = "hiredis-2.0.0-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:508999bec4422e646b05c95c598b64bdbef1edf0d2b715450a078ba21b385bcc"}, - {file = "hiredis-2.0.0-cp37-cp37m-manylinux2010_i686.whl", hash = "sha256:0d5109337e1db373a892fdcf78eb145ffb6bbd66bb51989ec36117b9f7f9b579"}, - {file = "hiredis-2.0.0-cp37-cp37m-manylinux2010_x86_64.whl", hash = "sha256:04026461eae67fdefa1949b7332e488224eac9e8f2b5c58c98b54d29af22093e"}, - {file = "hiredis-2.0.0-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:a00514362df15af041cc06e97aebabf2895e0a7c42c83c21894be12b84402d79"}, - {file = "hiredis-2.0.0-cp37-cp37m-win32.whl", hash = "sha256:09004096e953d7ebd508cded79f6b21e05dff5d7361771f59269425108e703bc"}, - {file = "hiredis-2.0.0-cp37-cp37m-win_amd64.whl", hash = "sha256:f8196f739092a78e4f6b1b2172679ed3343c39c61a3e9d722ce6fcf1dac2824a"}, - {file = "hiredis-2.0.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:294a6697dfa41a8cba4c365dd3715abc54d29a86a40ec6405d677ca853307cfb"}, - {file = "hiredis-2.0.0-cp38-cp38-manylinux1_i686.whl", hash = "sha256:3dddf681284fe16d047d3ad37415b2e9ccdc6c8986c8062dbe51ab9a358b50a5"}, - {file = "hiredis-2.0.0-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:dcef843f8de4e2ff5e35e96ec2a4abbdf403bd0f732ead127bd27e51f38ac298"}, - {file = "hiredis-2.0.0-cp38-cp38-manylinux2010_i686.whl", hash = "sha256:87c7c10d186f1743a8fd6a971ab6525d60abd5d5d200f31e073cd5e94d7e7a9d"}, - {file = "hiredis-2.0.0-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:7f0055f1809b911ab347a25d786deff5e10e9cf083c3c3fd2dd04e8612e8d9db"}, - {file = "hiredis-2.0.0-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:11d119507bb54e81f375e638225a2c057dda748f2b1deef05c2b1a5d42686048"}, - {file = "hiredis-2.0.0-cp38-cp38-win32.whl", hash = "sha256:7492af15f71f75ee93d2a618ca53fea8be85e7b625e323315169977fae752426"}, - {file = "hiredis-2.0.0-cp38-cp38-win_amd64.whl", hash = "sha256:65d653df249a2f95673976e4e9dd7ce10de61cfc6e64fa7eeaa6891a9559c581"}, - {file = "hiredis-2.0.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:ae8427a5e9062ba66fc2c62fb19a72276cf12c780e8db2b0956ea909c48acff5"}, - {file = "hiredis-2.0.0-cp39-cp39-manylinux1_i686.whl", hash = "sha256:3f5f7e3a4ab824e3de1e1700f05ad76ee465f5f11f5db61c4b297ec29e692b2e"}, - {file = "hiredis-2.0.0-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:e3447d9e074abf0e3cd85aef8131e01ab93f9f0e86654db7ac8a3f73c63706ce"}, - {file = "hiredis-2.0.0-cp39-cp39-manylinux2010_i686.whl", hash = "sha256:8b42c0dc927b8d7c0eb59f97e6e34408e53bc489f9f90e66e568f329bff3e443"}, - {file = "hiredis-2.0.0-cp39-cp39-manylinux2010_x86_64.whl", hash = "sha256:b84f29971f0ad4adaee391c6364e6f780d5aae7e9226d41964b26b49376071d0"}, - {file = "hiredis-2.0.0-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:0b39ec237459922c6544d071cdcf92cbb5bc6685a30e7c6d985d8a3e3a75326e"}, - {file = "hiredis-2.0.0-cp39-cp39-win32.whl", hash = "sha256:a7928283143a401e72a4fad43ecc85b35c27ae699cf5d54d39e1e72d97460e1d"}, - {file = "hiredis-2.0.0-cp39-cp39-win_amd64.whl", hash = "sha256:a4ee8000454ad4486fb9f28b0cab7fa1cd796fc36d639882d0b34109b5b3aec9"}, - {file = "hiredis-2.0.0-pp36-pypy36_pp73-macosx_10_9_x86_64.whl", hash = "sha256:1f03d4dadd595f7a69a75709bc81902673fa31964c75f93af74feac2f134cc54"}, - {file = "hiredis-2.0.0-pp36-pypy36_pp73-manylinux1_x86_64.whl", hash = "sha256:04927a4c651a0e9ec11c68e4427d917e44ff101f761cd3b5bc76f86aaa431d27"}, - {file = "hiredis-2.0.0-pp36-pypy36_pp73-manylinux2010_x86_64.whl", hash = "sha256:a39efc3ade8c1fb27c097fd112baf09d7fd70b8cb10ef1de4da6efbe066d381d"}, - {file = "hiredis-2.0.0-pp36-pypy36_pp73-win32.whl", hash = "sha256:07bbf9bdcb82239f319b1f09e8ef4bdfaec50ed7d7ea51a56438f39193271163"}, - {file = "hiredis-2.0.0-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:807b3096205c7cec861c8803a6738e33ed86c9aae76cac0e19454245a6bbbc0a"}, - {file = "hiredis-2.0.0-pp37-pypy37_pp73-manylinux1_x86_64.whl", hash = "sha256:1233e303645f468e399ec906b6b48ab7cd8391aae2d08daadbb5cad6ace4bd87"}, - {file = "hiredis-2.0.0-pp37-pypy37_pp73-manylinux2010_x86_64.whl", hash = "sha256:cb2126603091902767d96bcb74093bd8b14982f41809f85c9b96e519c7e1dc41"}, - {file = "hiredis-2.0.0-pp37-pypy37_pp73-win32.whl", hash = "sha256:f52010e0a44e3d8530437e7da38d11fb822acfb0d5b12e9cd5ba655509937ca0"}, - {file = "hiredis-2.0.0.tar.gz", hash = "sha256:81d6d8e39695f2c37954d1011c0480ef7cf444d4e3ae24bc5e89ee5de360139a"}, + {file = "hiredis-2.1.0-cp310-cp310-macosx_10_12_universal2.whl", hash = "sha256:7b339a7542a3f6a10b3bbc157e4abc9bae9628e2df7faf5f8a32f730014719ae"}, + {file = "hiredis-2.1.0-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:dd82370c2f9f804ec617b95d25edb0fd04882251afb2ecdf08b9ced0c3aa4bcc"}, + {file = "hiredis-2.1.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:92077511d3a62109d5d11bf584e41264a993ae3c77c72de63c1f741b7809bacb"}, + {file = "hiredis-2.1.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a6544c7807cbb75bc6ae9ab85773b4413edbcd55342e9e3d7d3f159f677f7428"}, + {file = "hiredis-2.1.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8181d73f25943fbdca904154e51b845317103cee08116cfae258f96927ce1e74"}, + {file = "hiredis-2.1.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:040f861e4e43daa9287f3a85979542f9c7ee8cfab695fa662f3b6186c6f7d5e8"}, + {file = "hiredis-2.1.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ef5ae8c1af82a8000742003cb16a6fa6c57919abb861ab214dcb27db8573ee64"}, + {file = "hiredis-2.1.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9b9aa1b0ec46dec5b05dcec22e50bbd4af33da121fca83bd2601dc60c79183f9"}, + {file = "hiredis-2.1.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:c53c36a630a6c6fd9dfe439f4266e564ca58995015a780c1d964567ebf328466"}, + {file = "hiredis-2.1.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:05aab35210bd7fbd7bd066efb2a42eb5c2878c2c137a9cff597204be2c07475b"}, + {file = "hiredis-2.1.0-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:e6097e1cef647c665f71cd0e58346389580db98365e804f7a9ad5d96e66b7150"}, + {file = "hiredis-2.1.0-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:32f98370efed38088d000df2eb2c8ed43d93d99bbf4a0a740e15eb4a887cc23f"}, + {file = "hiredis-2.1.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:b85276ed57e0aee8910b48383a38a299851935ba134460bad394988c750985fe"}, + {file = "hiredis-2.1.0-cp310-cp310-win32.whl", hash = "sha256:bd9d99606008a8cfa6b9e950abaa35f5b87496f03e63b73197d02b0fe7ecb6d3"}, + {file = "hiredis-2.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:6a8e796c94b7b8c63c99757d6ec2075069e4c362dfb0f130aaf874422bea3e7d"}, + {file = "hiredis-2.1.0-cp311-cp311-macosx_10_12_universal2.whl", hash = "sha256:e7bb5cab604fc45b45cee40e84e84d9e30eeb34c571a3784392ae658273bbd23"}, + {file = "hiredis-2.1.0-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:e0d4b074ff5ebba00933da27a06f3752b8af2448a6aa9dc895d5279f43011530"}, + {file = "hiredis-2.1.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f0c2dbaffd4a9e8df04731a012c8a67b7517abec7e53bb12c3cd749865c63428"}, + {file = "hiredis-2.1.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c19151e79b36e0d849899a21fc10539aa1903af94b31754bddab1bea876cd508"}, + {file = "hiredis-2.1.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:08ec41519a533f5cd1f1f8bd1797929358117c8e4570b679b469f768b45b7dbf"}, + {file = "hiredis-2.1.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:98f0db3667fa8abbd37ac66385b460841029033bfc1ba8d7e5b3ff1e01d3346a"}, + {file = "hiredis-2.1.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f592d1522b5981890b34b0b814f4bfa4a68b23ee90f538aac321d17e8bf859c8"}, + {file = "hiredis-2.1.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dddd2be67de25a62b3bf871f091181c13da3b32186d4be6af49dadbf6fdc266d"}, + {file = "hiredis-2.1.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:4ee8f6d0774cd6179c625688201e961a2d03da212230adaa2193cfb7a04f9169"}, + {file = "hiredis-2.1.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:5000942ffb6b6410ccbc87089c15fde5f48bd205664ee8b3067e6b2fb5689485"}, + {file = "hiredis-2.1.0-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:21e0017b8f50abd13b4c4c4218c7dfd5a42623e3255b460dfa5f70b45c4e7c3e"}, + {file = "hiredis-2.1.0-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:40b55fb46fcc78b04190176c0ae28bfa3cc7f418fca9df06c037028af5942b6a"}, + {file = "hiredis-2.1.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:24a55169a7f0bd9458935ac644bf8191f127c8aa50cdd70c0b87928cc515cae5"}, + {file = "hiredis-2.1.0-cp311-cp311-win32.whl", hash = "sha256:bb60f79e8c1eb5971b10fd256764ea0c89c4ad2d55ac4379981f678f349411f2"}, + {file = "hiredis-2.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:b223668844f26034759a6c24a72f0bb8e4fb64a43b27e2f3e8378639eaac1661"}, + {file = "hiredis-2.1.0-cp37-cp37m-macosx_10_12_x86_64.whl", hash = "sha256:7f7e7d91d6533fcb1939d467cf8bfb98640edf715897959f31ae83f5ad29aed3"}, + {file = "hiredis-2.1.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:531d1d3955244831b69272b993e16f93489ce2dadfdf800ac856dc2d9a43d353"}, + {file = "hiredis-2.1.0-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:66ffcbfc4db52dd87cdfd53bda45881ab3ab07c80ec43244fd8d70ee69d42c01"}, + {file = "hiredis-2.1.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:023b3b3ac410d6cfdb45ee943b8c528c90379f31419a1fd229888aa2b965732d"}, + {file = "hiredis-2.1.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c972385a0647120d4b0fe0e9567257cad7b2577b9f1315815713c571af0e778d"}, + {file = "hiredis-2.1.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:32893825426e73d57b3290b68110dd76229945e6c79b08a37795f536501935c4"}, + {file = "hiredis-2.1.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:262148f9b616c0cdd0f2c6bda45cd0f1ce6ce2d1974efd296b85b44e5c7567c2"}, + {file = "hiredis-2.1.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:9d601c27b9599fe52cade3096351f92f665e527d29af8d3e29353a76bfcf5615"}, + {file = "hiredis-2.1.0-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:d248acc7d7713c1b3d48ed8ea67d6ba43b104aa67d63078846a3590adbab6b73"}, + {file = "hiredis-2.1.0-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:969ffe37a8980a6e5404993ccfe605a40fa6732fa6d7b26a1a718c9121197002"}, + {file = "hiredis-2.1.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:288d5d0566d3cbcd800e46c7a547428d321842898b8c7de037a7e78b5644e88a"}, + {file = "hiredis-2.1.0-cp37-cp37m-win32.whl", hash = "sha256:06cb776d3cd3cbec86010f1bab6895ee16af8036aae8c3594a5e96c24f0f83a5"}, + {file = "hiredis-2.1.0-cp37-cp37m-win_amd64.whl", hash = "sha256:6766376dc43ef186113422ecacec0ece0d4b12c0e5f4b556669e639b20ccabb1"}, + {file = "hiredis-2.1.0-cp38-cp38-macosx_10_12_universal2.whl", hash = "sha256:41afba30304adcbe1c93fc8272a7169b7fc4e4d3d470ad8babd391678a519d76"}, + {file = "hiredis-2.1.0-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:6df0115f8b0766cd3d12416e2e2e914efed5b1a1a27605c9f37bc92de086877a"}, + {file = "hiredis-2.1.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:5d7d7078f3b841ad86e35459e9f1a49db6d793b796a25fe866333166196d9fec"}, + {file = "hiredis-2.1.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:835c4cbf8b38c83240b3eb9bd575cd1bfefe5ea5c46cc5ac2bf2d1f47d1fd696"}, + {file = "hiredis-2.1.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:718589c48e97820bdc2a99e2621b5039884cc23199213756054d10cd309ad56c"}, + {file = "hiredis-2.1.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b2d96be6917ea8f753691a4674f682dd5e145b70edab28c05aa5552ae873e843"}, + {file = "hiredis-2.1.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b5fe1bb4b1525751f3050337097b3b2bfe445836e59a5a0984928dd0797f9abf"}, + {file = "hiredis-2.1.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:91dc73310b92b4aeccffdcd4a762955fe71380f5eaa4e242ee95019e41519101"}, + {file = "hiredis-2.1.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:bb858218de60a930a164a991fff001c70b0c3d923d3ae40fef2acf3321126b00"}, + {file = "hiredis-2.1.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:53040c3b3488b52f4609775453fc759262f2885b733150ee2e1d88257fdafed8"}, + {file = "hiredis-2.1.0-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:a1c9b7d6d7bf35e1e2217b2847710154b11d25bf86b77bb7e190161f8b89917e"}, + {file = "hiredis-2.1.0-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:dfbe939fdddbc7b90cab4124f3ddd6391099fb964f6dab3386aa8cf56f37b5ba"}, + {file = "hiredis-2.1.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:3a51cb4ea466276a845a940931357b4a876f903eabde514ba95e45050e1c2150"}, + {file = "hiredis-2.1.0-cp38-cp38-win32.whl", hash = "sha256:8bce4c687136bf13df76072072b9baadbd52f7d1b143fbbda96387f50e8ebaeb"}, + {file = "hiredis-2.1.0-cp38-cp38-win_amd64.whl", hash = "sha256:1f94684b13fbbee1239303018d5ea900d786e486cdb130cde3144d53f4e262e4"}, + {file = "hiredis-2.1.0-cp39-cp39-macosx_10_12_universal2.whl", hash = "sha256:879668ffab582bdffd9f10f6c8797aac055db183f266e3aa3a6438ff0768bc29"}, + {file = "hiredis-2.1.0-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:f1d5a99de0fd02438f251e50ec64936d22d542c8e5d80bdec236f9713eeef334"}, + {file = "hiredis-2.1.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ab622bcddcf334b4b1fc4b22e163e93160e3afdd7feaedd77ac6f258e0c77b68"}, + {file = "hiredis-2.1.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:964c4f23ff450fb8d73edf06fc7475a4e81a3f9b03a9a04a907ec81c84052fcf"}, + {file = "hiredis-2.1.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d9f8b8daef346ffc0268d7086c213ab24c2a3fcbd4249eacfbb3635602c79d20"}, + {file = "hiredis-2.1.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1e2039cdaa2e6656eae4a2e2537ed77e27f29b7487b97ce7ae6a3cb88d01b968"}, + {file = "hiredis-2.1.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:43d3168da0a81fa0a9e4bc6e14316beac8e5f1b439ca5cc5af7f9a558cfba741"}, + {file = "hiredis-2.1.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0021ba034b74c5006f62e4cfdd79d04c7c720731eda256ce29d769ac6483adc3"}, + {file = "hiredis-2.1.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:39a1bb45bcd698baf70ad4e9a94af164525bf053caea7df3777172d20d69538a"}, + {file = "hiredis-2.1.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:c1b636b05777536a83b4cced157cbdc2d0012d494a9ec2f7b7e07c54296cd773"}, + {file = "hiredis-2.1.0-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:58a7ceb71f967fcc1878fb64666a12fbc5f243ab00d0653d3752a811941d8261"}, + {file = "hiredis-2.1.0-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:c5263c676dc4d55202e7ca0429b949fc6ba7c0dd3a3a2b80538593ab27d82836"}, + {file = "hiredis-2.1.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:b5879d13025b04903ddf71921812db27fe1156a0952ad253014354d72463aaa9"}, + {file = "hiredis-2.1.0-cp39-cp39-win32.whl", hash = "sha256:9259f637d77544ffeb97acb0a87fdd192a8aced7a2fbd7439160dbee8341d446"}, + {file = "hiredis-2.1.0-cp39-cp39-win_amd64.whl", hash = "sha256:fb818b6e0981e16dfdfc9e507c9842f8d210e6ecaf3edb8ac3039dbd24768839"}, + {file = "hiredis-2.1.0-pp37-pypy37_pp73-macosx_10_12_x86_64.whl", hash = "sha256:648d4648bf6b3dcc418a974df143b2f96627ab8b50bda23a57759c273880ecfb"}, + {file = "hiredis-2.1.0-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:654949cfc0bc76a5292b6ac111113b2eafb0739e0496495368981ea2e80bf4ec"}, + {file = "hiredis-2.1.0-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9f2a98b835c2088998a47da51b1b3661b587b2d4b3305d03fc9893888cc2aa54"}, + {file = "hiredis-2.1.0-pp37-pypy37_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7222bd9243387d778245619d0ac62d35cf72ee746ec0efb7b9b230ae3e0c3a39"}, + {file = "hiredis-2.1.0-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:778f6de73c3abd67d447a3442f89e7d43a8de1eb5093f416af14dddc1d5c9cb5"}, + {file = "hiredis-2.1.0-pp38-pypy38_pp73-macosx_10_12_x86_64.whl", hash = "sha256:c4cfb61fe642f30a22789055847004393bc65b5686988c64191e379ea4ccd069"}, + {file = "hiredis-2.1.0-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:03b6bef7eb50415aca87200a511d66a2fd69f1fcc75cfe1408e1201cbe28ddfb"}, + {file = "hiredis-2.1.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3195e13a700f6ff35894c4920fcce8f6c2b01cdbc01f76fe567753c495849e9b"}, + {file = "hiredis-2.1.0-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:19f724405c808a89db422ed1010caab80a16d3e5b49632356ae7912513b6d58e"}, + {file = "hiredis-2.1.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:8ecebeff966b412138b0cd105d7572f8d5e65e96355af699863890f8370707e6"}, + {file = "hiredis-2.1.0-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:4f34eefaf164bf43b29ccc809c168248eb95001837ed0e9e3279891f57ae2fab"}, + {file = "hiredis-2.1.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:11fad16beb9d623ea423c9129bab0e392ea4c84363d61c125f679be3d029442f"}, + {file = "hiredis-2.1.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2c763eb9a1414c4d665945c70ae2ef74a843600667b0069fe90e2aabc78e5411"}, + {file = "hiredis-2.1.0-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:edb7f156a8f8a1999574f27bda67dd2bff2d5b180bb6aed996a1792cafbcc668"}, + {file = "hiredis-2.1.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:e057d5545189d4c9e22ae0f7dc283ea0a225f56999511022c062cce7f9589d69"}, ] hyperlink = [ {file = "hyperlink-21.0.0-py2.py3-none-any.whl", hash = "sha256:e6b14c37ecb73e89c77d78cdb4c2cc8f3fb59a885c5b3f819ff4ed80f25af1b4"}, From adbf0cffc4a0995d655e0abc1ca29650b08b480a Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 19 Dec 2022 14:15:29 +0000 Subject: [PATCH 16/82] Bump sentry-sdk from 1.11.1 to 1.12.0 (#14701) * Bump sentry-sdk from 1.11.1 to 1.12.0 Bumps [sentry-sdk](https://github.com/getsentry/sentry-python) from 1.11.1 to 1.12.0. - [Release notes](https://github.com/getsentry/sentry-python/releases) - [Changelog](https://github.com/getsentry/sentry-python/blob/master/CHANGELOG.md) - [Commits](https://github.com/getsentry/sentry-python/compare/1.11.1...1.12.0) --- updated-dependencies: - dependency-name: sentry-sdk dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] * Changelog Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: GitHub Actions --- changelog.d/14701.misc | 1 + poetry.lock | 7 ++++--- 2 files changed, 5 insertions(+), 3 deletions(-) create mode 100644 changelog.d/14701.misc diff --git a/changelog.d/14701.misc b/changelog.d/14701.misc new file mode 100644 index 000000000000..05c89d5948f4 --- /dev/null +++ b/changelog.d/14701.misc @@ -0,0 +1 @@ +Bump sentry-sdk from 1.11.1 to 1.12.0. diff --git a/poetry.lock b/poetry.lock index a870d8af5709..3142ce506fc7 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1070,7 +1070,7 @@ doc = ["Sphinx", "sphinx-rtd-theme"] [[package]] name = "sentry-sdk" -version = "1.11.1" +version = "1.12.0" description = "Python client for Sentry (https://sentry.io)" category = "main" optional = true @@ -1091,6 +1091,7 @@ falcon = ["falcon (>=1.4)"] fastapi = ["fastapi (>=0.79.0)"] flask = ["blinker (>=1.1)", "flask (>=0.11)"] httpx = ["httpx (>=0.16.0)"] +opentelemetry = ["opentelemetry-distro (>=0.350b0)"] pure-eval = ["asttokens", "executing", "pure-eval"] pymongo = ["pymongo (>=3.1)"] pyspark = ["pyspark (>=2.4.4)"] @@ -2610,8 +2611,8 @@ semantic-version = [ {file = "semantic_version-2.10.0.tar.gz", hash = "sha256:bdabb6d336998cbb378d4b9db3a4b56a1e3235701dc05ea2690d9a997ed5041c"}, ] sentry-sdk = [ - {file = "sentry-sdk-1.11.1.tar.gz", hash = "sha256:675f6279b6bb1fea09fd61751061f9a90dca3b5929ef631dd50dc8b3aeb245e9"}, - {file = "sentry_sdk-1.11.1-py2.py3-none-any.whl", hash = "sha256:8b4ff696c0bdcceb3f70bbb87a57ba84fd3168b1332d493fcd16c137f709578c"}, + {file = "sentry-sdk-1.12.0.tar.gz", hash = "sha256:dc0fe6ef2f77a3853b399c75c97d87be7666098817c1c314f8fcdf68a6865914"}, + {file = "sentry_sdk-1.12.0-py2.py3-none-any.whl", hash = "sha256:3c9bc64025976842c1103cd75d45cff94a7c0cc48fa07770d07a09d2ab8dac30"}, ] service-identity = [ {file = "service-identity-21.1.0.tar.gz", hash = "sha256:6e6c6086ca271dc11b033d17c3a8bea9f24ebff920c587da090afc9519419d34"}, From 2888d7ec83b33b3ce848d9219c921ffe0b88ffbf Mon Sep 17 00:00:00 2001 From: reivilibre Date: Mon, 19 Dec 2022 14:57:51 +0000 Subject: [PATCH 17/82] Faster remote room joins: invalidate caches and unblock requests when receiving un-partial-stated event notifications over replication. [rei:frrj/streams/unpsr] (#14546) --- changelog.d/14546.misc | 1 + synapse/replication/tcp/client.py | 14 +++++++++- .../storage/databases/main/events_worker.py | 27 ++++++++++--------- synapse/storage/databases/main/state.py | 18 ++++++++++++- 4 files changed, 46 insertions(+), 14 deletions(-) create mode 100644 changelog.d/14546.misc diff --git a/changelog.d/14546.misc b/changelog.d/14546.misc new file mode 100644 index 000000000000..60b6761a51b3 --- /dev/null +++ b/changelog.d/14546.misc @@ -0,0 +1 @@ +Faster remote room joins: stream the un-partial-stating of events over replication. \ No newline at end of file diff --git a/synapse/replication/tcp/client.py b/synapse/replication/tcp/client.py index b4dad47b45ad..658d89210d31 100644 --- a/synapse/replication/tcp/client.py +++ b/synapse/replication/tcp/client.py @@ -36,6 +36,7 @@ TagAccountDataStream, ToDeviceStream, TypingStream, + UnPartialStatedEventStream, UnPartialStatedRoomStream, ) from synapse.replication.tcp.streams.events import ( @@ -43,7 +44,10 @@ EventsStreamEventRow, EventsStreamRow, ) -from synapse.replication.tcp.streams.partial_state import UnPartialStatedRoomStreamRow +from synapse.replication.tcp.streams.partial_state import ( + UnPartialStatedEventStreamRow, + UnPartialStatedRoomStreamRow, +) from synapse.types import PersistedEventPosition, ReadReceipt, StreamKeyType, UserID from synapse.util.async_helpers import Linearizer, timeout_deferred from synapse.util.metrics import Measure @@ -247,6 +251,14 @@ async def on_rdata( self._state_storage_controller.notify_room_un_partial_stated( row.room_id ) + elif stream_name == UnPartialStatedEventStream.NAME: + for row in rows: + assert isinstance(row, UnPartialStatedEventStreamRow) + + # Wake up any tasks waiting for the event to be un-partial-stated. + self._state_storage_controller.notify_event_un_partial_stated( + row.event_id + ) await self._presence_handler.process_replication_rows( stream_name, instance_name, token, rows diff --git a/synapse/storage/databases/main/events_worker.py b/synapse/storage/databases/main/events_worker.py index e19b16064b16..761b15a8150a 100644 --- a/synapse/storage/databases/main/events_worker.py +++ b/synapse/storage/databases/main/events_worker.py @@ -59,8 +59,9 @@ run_as_background_process, wrap_as_background_process, ) -from synapse.replication.tcp.streams import BackfillStream +from synapse.replication.tcp.streams import BackfillStream, UnPartialStatedEventStream from synapse.replication.tcp.streams.events import EventsStream +from synapse.replication.tcp.streams.partial_state import UnPartialStatedEventStreamRow from synapse.storage._base import SQLBaseStore, db_to_json, make_in_list_sql_clause from synapse.storage.database import ( DatabasePool, @@ -391,6 +392,16 @@ def process_replication_rows( self._stream_id_gen.advance(instance_name, token) elif stream_name == BackfillStream.NAME: self._backfill_id_gen.advance(instance_name, -token) + elif stream_name == UnPartialStatedEventStream.NAME: + for row in rows: + assert isinstance(row, UnPartialStatedEventStreamRow) + + self.is_partial_state_event.invalidate((row.event_id,)) + + if row.rejection_status_changed: + # If the partial-stated event became rejected or unrejected + # when it wasn't before, we need to invalidate this cache. + self._invalidate_local_get_event_cache(row.event_id) super().process_replication_rows(stream_name, instance_name, token, rows) @@ -2380,6 +2391,9 @@ def mark_event_rejected_txn( This can happen, for example, when resyncing state during a faster join. + It is the caller's responsibility to ensure that other workers are + sent a notification so that they call `_invalidate_local_get_event_cache()`. + Args: txn: event_id: ID of event to update @@ -2418,14 +2432,3 @@ def mark_event_rejected_txn( ) self.invalidate_get_event_cache_after_txn(txn, event_id) - - # TODO(faster_joins): invalidate the cache on workers. Ideally we'd just - # call '_send_invalidation_to_replication', but we actually need the other - # end to call _invalidate_local_get_event_cache() rather than (just) - # _get_event_cache.invalidate(). - # - # One solution might be to (somehow) get the workers to call - # _invalidate_caches_for_event() (though that will invalidate more than - # strictly necessary). - # - # https://github.com/matrix-org/synapse/issues/12994 diff --git a/synapse/storage/databases/main/state.py b/synapse/storage/databases/main/state.py index f855903c390c..f32cbb2decd8 100644 --- a/synapse/storage/databases/main/state.py +++ b/synapse/storage/databases/main/state.py @@ -14,7 +14,7 @@ # limitations under the License. import collections.abc import logging -from typing import TYPE_CHECKING, Collection, Dict, Iterable, Optional, Set, Tuple +from typing import TYPE_CHECKING, Any, Collection, Dict, Iterable, Optional, Set, Tuple import attr @@ -24,6 +24,8 @@ from synapse.events import EventBase from synapse.events.snapshot import EventContext from synapse.logging.opentracing import trace +from synapse.replication.tcp.streams import UnPartialStatedEventStream +from synapse.replication.tcp.streams.partial_state import UnPartialStatedEventStreamRow from synapse.storage._base import SQLBaseStore from synapse.storage.database import ( DatabasePool, @@ -82,6 +84,20 @@ def __init__( super().__init__(database, db_conn, hs) self._instance_name: str = hs.get_instance_name() + def process_replication_rows( + self, + stream_name: str, + instance_name: str, + token: int, + rows: Iterable[Any], + ) -> None: + if stream_name == UnPartialStatedEventStream.NAME: + for row in rows: + assert isinstance(row, UnPartialStatedEventStreamRow) + self._get_state_group_for_event.invalidate((row.event_id,)) + + super().process_replication_rows(stream_name, instance_name, token, rows) + async def get_room_version(self, room_id: str) -> RoomVersion: """Get the room_version of a given room Raises: From 4f1eba469c7d3cf3d94eb67cdd6548c480104714 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 19 Dec 2022 16:00:05 +0000 Subject: [PATCH 18/82] Bump types-setuptools from 65.6.0.1 to 65.6.0.2 (#14702) * Bump types-setuptools from 65.6.0.1 to 65.6.0.2 Bumps [types-setuptools](https://github.com/python/typeshed) from 65.6.0.1 to 65.6.0.2. - [Release notes](https://github.com/python/typeshed/releases) - [Commits](https://github.com/python/typeshed/commits) --- updated-dependencies: - dependency-name: types-setuptools dependency-type: direct:development update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] * Changelog Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: GitHub Actions Co-authored-by: reivilibre --- changelog.d/14702.misc | 1 + poetry.lock | 6 +++--- 2 files changed, 4 insertions(+), 3 deletions(-) create mode 100644 changelog.d/14702.misc diff --git a/changelog.d/14702.misc b/changelog.d/14702.misc new file mode 100644 index 000000000000..17c0485f1217 --- /dev/null +++ b/changelog.d/14702.misc @@ -0,0 +1 @@ +Bump types-setuptools from 65.6.0.1 to 65.6.0.2. diff --git a/poetry.lock b/poetry.lock index 3142ce506fc7..276388902f5d 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1481,7 +1481,7 @@ types-urllib3 = "<1.27" [[package]] name = "types-setuptools" -version = "65.6.0.1" +version = "65.6.0.2" description = "Typing stubs for setuptools" category = "dev" optional = false @@ -2871,8 +2871,8 @@ types-requests = [ {file = "types_requests-2.28.11.5-py3-none-any.whl", hash = "sha256:091d4a5a33c1b4f20d8b1b952aa8fa27a6e767c44c3cf65e56580df0b05fd8a9"}, ] types-setuptools = [ - {file = "types-setuptools-65.6.0.1.tar.gz", hash = "sha256:a03cf72f336929c9405f485dd90baef31a401776675f785f69a5a519f0b099ca"}, - {file = "types_setuptools-65.6.0.1-py3-none-any.whl", hash = "sha256:c957599502195ab98e90f0560466fa963f6a23373905e6d4e1772dbfaf1e44b7"}, + {file = "types-setuptools-65.6.0.2.tar.gz", hash = "sha256:ad60ccf01d626de9762224448f36c13e0660e863afd6dc11d979b3739a6c7d24"}, + {file = "types_setuptools-65.6.0.2-py3-none-any.whl", hash = "sha256:2c2b4f756f79778074ce2d21f745aa737b12160d9f8dfa274f47a7287c7a2fee"}, ] types-urllib3 = [ {file = "types-urllib3-1.26.10.tar.gz", hash = "sha256:a26898f530e6c3f43f25b907f2b884486868ffd56a9faa94cbf9b3eb6e165d6a"}, From 08a881dba42a8ba1765327822edba9d1ea03be07 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 19 Dec 2022 16:00:27 +0000 Subject: [PATCH 19/82] Bump types-jsonschema from 4.17.0.1 to 4.17.0.2 (#14700) * Bump types-jsonschema from 4.17.0.1 to 4.17.0.2 Bumps [types-jsonschema](https://github.com/python/typeshed) from 4.17.0.1 to 4.17.0.2. - [Release notes](https://github.com/python/typeshed/releases) - [Commits](https://github.com/python/typeshed/commits) --- updated-dependencies: - dependency-name: types-jsonschema dependency-type: direct:development update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] * Changelog Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: GitHub Actions Co-authored-by: reivilibre --- changelog.d/14700.misc | 1 + poetry.lock | 6 +++--- 2 files changed, 4 insertions(+), 3 deletions(-) create mode 100644 changelog.d/14700.misc diff --git a/changelog.d/14700.misc b/changelog.d/14700.misc new file mode 100644 index 000000000000..253eb1721dcb --- /dev/null +++ b/changelog.d/14700.misc @@ -0,0 +1 @@ +Bump types-jsonschema from 4.17.0.1 to 4.17.0.2. diff --git a/poetry.lock b/poetry.lock index 276388902f5d..1412cfc32643 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1419,7 +1419,7 @@ python-versions = "*" [[package]] name = "types-jsonschema" -version = "4.17.0.1" +version = "4.17.0.2" description = "Typing stubs for jsonschema" category = "dev" optional = false @@ -2843,8 +2843,8 @@ types-ipaddress = [ {file = "types_ipaddress-1.0.8-py3-none-any.whl", hash = "sha256:4933b74da157ba877b1a705d64f6fa7742745e9ffd65e51011f370c11ebedb55"}, ] types-jsonschema = [ - {file = "types-jsonschema-4.17.0.1.tar.gz", hash = "sha256:62625d492e4930411a431909ac32301aeab6180500e70ee222f81d43204cfb3c"}, - {file = "types_jsonschema-4.17.0.1-py3-none-any.whl", hash = "sha256:77badbe3881cbf79ac9561be2be2b1f37ab104b13afd2231840e6dd6e94e63c2"}, + {file = "types-jsonschema-4.17.0.2.tar.gz", hash = "sha256:8b9e1140d4d780f0f19b5cab1b8a3732e8dd5e49dbc1f174cc0b499125ca6f6c"}, + {file = "types_jsonschema-4.17.0.2-py3-none-any.whl", hash = "sha256:8fd2f9aea4da54f9a811baa6963aac10fd680c18baa6237392c079b97d152738"}, ] types-opentracing = [ {file = "types-opentracing-2.4.10.tar.gz", hash = "sha256:6101414f3b6d3b9c10f1c510a261e8439b6c8d67c723d5c2872084697b4580a7"}, From e8884cc282408610489be9bcf217b7dda2d2a27b Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 19 Dec 2022 16:01:02 +0000 Subject: [PATCH 20/82] Bump serde from 1.0.150 to 1.0.151 (#14697) * Bump serde from 1.0.150 to 1.0.151 Bumps [serde](https://github.com/serde-rs/serde) from 1.0.150 to 1.0.151. - [Release notes](https://github.com/serde-rs/serde/releases) - [Commits](https://github.com/serde-rs/serde/compare/v1.0.150...v1.0.151) --- updated-dependencies: - dependency-name: serde dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] * Changelog Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: GitHub Actions Co-authored-by: reivilibre --- Cargo.lock | 8 ++++---- changelog.d/14697.misc | 1 + 2 files changed, 5 insertions(+), 4 deletions(-) create mode 100644 changelog.d/14697.misc diff --git a/Cargo.lock b/Cargo.lock index 8abbaeee387b..f0d2cda33691 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -323,18 +323,18 @@ checksum = "d29ab0c6d3fc0ee92fe66e2d99f700eab17a8d57d1c1d3b748380fb20baa78cd" [[package]] name = "serde" -version = "1.0.150" +version = "1.0.151" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e326c9ec8042f1b5da33252c8a37e9ffbd2c9bef0155215b6e6c80c790e05f91" +checksum = "97fed41fc1a24994d044e6db6935e69511a1153b52c15eb42493b26fa87feba0" dependencies = [ "serde_derive", ] [[package]] name = "serde_derive" -version = "1.0.150" +version = "1.0.151" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "42a3df25b0713732468deadad63ab9da1f1fd75a48a15024b50363f128db627e" +checksum = "255abe9a125a985c05190d687b320c12f9b1f0b99445e608c21ba0782c719ad8" dependencies = [ "proc-macro2", "quote", diff --git a/changelog.d/14697.misc b/changelog.d/14697.misc new file mode 100644 index 000000000000..514209fcc3c0 --- /dev/null +++ b/changelog.d/14697.misc @@ -0,0 +1 @@ +Bump serde from 1.0.150 to 1.0.151. From a17c4e05904c8b9f155d429ad6f7dae9e1f36acf Mon Sep 17 00:00:00 2001 From: Andrew Morgan <1342360+anoadragon453@users.noreply.github.com> Date: Mon, 19 Dec 2022 16:44:09 +0000 Subject: [PATCH 21/82] Add the `.direnv/` directory to .gitignore (#14707) --- .gitignore | 1 + changelog.d/14707.misc | 1 + 2 files changed, 2 insertions(+) create mode 100644 changelog.d/14707.misc diff --git a/.gitignore b/.gitignore index 15fbfdddf195..2b09bddf18f6 100644 --- a/.gitignore +++ b/.gitignore @@ -36,6 +36,7 @@ __pycache__/ # For direnv users /.envrc +.direnv/ # IDEs /.idea/ diff --git a/changelog.d/14707.misc b/changelog.d/14707.misc new file mode 100644 index 000000000000..38f47a6f307d --- /dev/null +++ b/changelog.d/14707.misc @@ -0,0 +1 @@ +Add `.direnv/` directory to .gitignore to prevent local state generated by the [direnv](https://direnv.net/) development tool from being committed. \ No newline at end of file From 4de951180daf1f1a9192429c83e3920ac47fd4c1 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 19 Dec 2022 16:47:56 +0000 Subject: [PATCH 22/82] Bump anyhow from 1.0.66 to 1.0.68 (#14694) * Bump anyhow from 1.0.66 to 1.0.68 Bumps [anyhow](https://github.com/dtolnay/anyhow) from 1.0.66 to 1.0.68. - [Release notes](https://github.com/dtolnay/anyhow/releases) - [Commits](https://github.com/dtolnay/anyhow/compare/1.0.66...1.0.68) --- updated-dependencies: - dependency-name: anyhow dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] * Changelog Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: GitHub Actions Co-authored-by: reivilibre --- Cargo.lock | 4 ++-- changelog.d/14694.misc | 1 + 2 files changed, 3 insertions(+), 2 deletions(-) create mode 100644 changelog.d/14694.misc diff --git a/Cargo.lock b/Cargo.lock index f0d2cda33691..9c8941ae6a4c 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -13,9 +13,9 @@ dependencies = [ [[package]] name = "anyhow" -version = "1.0.66" +version = "1.0.68" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "216261ddc8289130e551ddcd5ce8a064710c0d064a4d2895c67151c92b5443f6" +checksum = "2cb2f989d18dd141ab8ae82f64d1a8cdd37e0840f73a406896cf5e99502fab61" [[package]] name = "arc-swap" diff --git a/changelog.d/14694.misc b/changelog.d/14694.misc new file mode 100644 index 000000000000..146238d8c50b --- /dev/null +++ b/changelog.d/14694.misc @@ -0,0 +1 @@ +Bump anyhow from 1.0.66 to 1.0.68. From af347e4d69843b870c7f7546f89d0b7c20c66606 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 19 Dec 2022 16:48:06 +0000 Subject: [PATCH 23/82] Bump serde_json from 1.0.89 to 1.0.91 (#14696) * Bump serde_json from 1.0.89 to 1.0.91 Bumps [serde_json](https://github.com/serde-rs/json) from 1.0.89 to 1.0.91. - [Release notes](https://github.com/serde-rs/json/releases) - [Commits](https://github.com/serde-rs/json/compare/v1.0.89...v1.0.91) --- updated-dependencies: - dependency-name: serde_json dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] * Changelog Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: GitHub Actions Co-authored-by: reivilibre --- Cargo.lock | 4 ++-- changelog.d/14696.misc | 1 + 2 files changed, 3 insertions(+), 2 deletions(-) create mode 100644 changelog.d/14696.misc diff --git a/Cargo.lock b/Cargo.lock index 9c8941ae6a4c..c249ec56f709 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -343,9 +343,9 @@ dependencies = [ [[package]] name = "serde_json" -version = "1.0.89" +version = "1.0.91" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "020ff22c755c2ed3f8cf162dbb41a7268d934702f3ed3631656ea597e08fc3db" +checksum = "877c235533714907a8c2464236f5c4b2a17262ef1bd71f38f35ea592c8da6883" dependencies = [ "itoa", "ryu", diff --git a/changelog.d/14696.misc b/changelog.d/14696.misc new file mode 100644 index 000000000000..9849366b9f25 --- /dev/null +++ b/changelog.d/14696.misc @@ -0,0 +1 @@ +Bump serde_json from 1.0.89 to 1.0.91. From 4be998add4bb4671cbbafb477a0af149cfc3d59f Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 19 Dec 2022 16:48:20 +0000 Subject: [PATCH 24/82] Bump lxml from 4.9.1 to 4.9.2 (#14698) * Bump lxml from 4.9.1 to 4.9.2 Bumps [lxml](https://github.com/lxml/lxml) from 4.9.1 to 4.9.2. - [Release notes](https://github.com/lxml/lxml/releases) - [Changelog](https://github.com/lxml/lxml/blob/master/CHANGES.txt) - [Commits](https://github.com/lxml/lxml/compare/lxml-4.9.1...lxml-4.9.2) --- updated-dependencies: - dependency-name: lxml dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] * Changelog Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: GitHub Actions Co-authored-by: reivilibre --- changelog.d/14698.misc | 1 + poetry.lock | 147 +++++++++++++++++++++-------------------- 2 files changed, 77 insertions(+), 71 deletions(-) create mode 100644 changelog.d/14698.misc diff --git a/changelog.d/14698.misc b/changelog.d/14698.misc new file mode 100644 index 000000000000..2e2072183ef6 --- /dev/null +++ b/changelog.d/14698.misc @@ -0,0 +1 @@ +Bump lxml from 4.9.1 to 4.9.2. diff --git a/poetry.lock b/poetry.lock index 1412cfc32643..3fbad339a320 100644 --- a/poetry.lock +++ b/poetry.lock @@ -501,7 +501,7 @@ pyasn1 = ">=0.4.6" [[package]] name = "lxml" -version = "4.9.1" +version = "4.9.2" description = "Powerful and Pythonic XML processing library combining libxml2/libxslt with the ElementTree API." category = "main" optional = true @@ -2068,76 +2068,81 @@ ldap3 = [ {file = "ldap3-2.9.1.tar.gz", hash = "sha256:f3e7fc4718e3f09dda568b57100095e0ce58633bcabbed8667ce3f8fbaa4229f"}, ] lxml = [ - {file = "lxml-4.9.1-cp27-cp27m-macosx_10_15_x86_64.whl", hash = "sha256:98cafc618614d72b02185ac583c6f7796202062c41d2eeecdf07820bad3295ed"}, - {file = "lxml-4.9.1-cp27-cp27m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:c62e8dd9754b7debda0c5ba59d34509c4688f853588d75b53c3791983faa96fc"}, - {file = "lxml-4.9.1-cp27-cp27m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:21fb3d24ab430fc538a96e9fbb9b150029914805d551deeac7d7822f64631dfc"}, - {file = "lxml-4.9.1-cp27-cp27m-win32.whl", hash = "sha256:86e92728ef3fc842c50a5cb1d5ba2bc66db7da08a7af53fb3da79e202d1b2cd3"}, - {file = "lxml-4.9.1-cp27-cp27m-win_amd64.whl", hash = "sha256:4cfbe42c686f33944e12f45a27d25a492cc0e43e1dc1da5d6a87cbcaf2e95627"}, - {file = "lxml-4.9.1-cp27-cp27mu-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:dad7b164905d3e534883281c050180afcf1e230c3d4a54e8038aa5cfcf312b84"}, - {file = "lxml-4.9.1-cp27-cp27mu-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:a614e4afed58c14254e67862456d212c4dcceebab2eaa44d627c2ca04bf86837"}, - {file = "lxml-4.9.1-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:f9ced82717c7ec65a67667bb05865ffe38af0e835cdd78728f1209c8fffe0cad"}, - {file = "lxml-4.9.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:d9fc0bf3ff86c17348dfc5d322f627d78273eba545db865c3cd14b3f19e57fa5"}, - {file = "lxml-4.9.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:e5f66bdf0976ec667fc4594d2812a00b07ed14d1b44259d19a41ae3fff99f2b8"}, - {file = "lxml-4.9.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:fe17d10b97fdf58155f858606bddb4e037b805a60ae023c009f760d8361a4eb8"}, - {file = "lxml-4.9.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:8caf4d16b31961e964c62194ea3e26a0e9561cdf72eecb1781458b67ec83423d"}, - {file = "lxml-4.9.1-cp310-cp310-win32.whl", hash = "sha256:4780677767dd52b99f0af1f123bc2c22873d30b474aa0e2fc3fe5e02217687c7"}, - {file = "lxml-4.9.1-cp310-cp310-win_amd64.whl", hash = "sha256:b122a188cd292c4d2fcd78d04f863b789ef43aa129b233d7c9004de08693728b"}, - {file = "lxml-4.9.1-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:be9eb06489bc975c38706902cbc6888f39e946b81383abc2838d186f0e8b6a9d"}, - {file = "lxml-4.9.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:f1be258c4d3dc609e654a1dc59d37b17d7fef05df912c01fc2e15eb43a9735f3"}, - {file = "lxml-4.9.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:927a9dd016d6033bc12e0bf5dee1dde140235fc8d0d51099353c76081c03dc29"}, - {file = "lxml-4.9.1-cp35-cp35m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:9232b09f5efee6a495a99ae6824881940d6447debe272ea400c02e3b68aad85d"}, - {file = "lxml-4.9.1-cp35-cp35m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:04da965dfebb5dac2619cb90fcf93efdb35b3c6994fea58a157a834f2f94b318"}, - {file = "lxml-4.9.1-cp35-cp35m-win32.whl", hash = "sha256:4d5bae0a37af799207140652a700f21a85946f107a199bcb06720b13a4f1f0b7"}, - {file = "lxml-4.9.1-cp35-cp35m-win_amd64.whl", hash = "sha256:4878e667ebabe9b65e785ac8da4d48886fe81193a84bbe49f12acff8f7a383a4"}, - {file = "lxml-4.9.1-cp36-cp36m-macosx_10_15_x86_64.whl", hash = "sha256:1355755b62c28950f9ce123c7a41460ed9743c699905cbe664a5bcc5c9c7c7fb"}, - {file = "lxml-4.9.1-cp36-cp36m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:bcaa1c495ce623966d9fc8a187da80082334236a2a1c7e141763ffaf7a405067"}, - {file = "lxml-4.9.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6eafc048ea3f1b3c136c71a86db393be36b5b3d9c87b1c25204e7d397cee9536"}, - {file = "lxml-4.9.1-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:13c90064b224e10c14dcdf8086688d3f0e612db53766e7478d7754703295c7c8"}, - {file = "lxml-4.9.1-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:206a51077773c6c5d2ce1991327cda719063a47adc02bd703c56a662cdb6c58b"}, - {file = "lxml-4.9.1-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:e8f0c9d65da595cfe91713bc1222af9ecabd37971762cb830dea2fc3b3bb2acf"}, - {file = "lxml-4.9.1-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:8f0a4d179c9a941eb80c3a63cdb495e539e064f8054230844dcf2fcb812b71d3"}, - {file = "lxml-4.9.1-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:830c88747dce8a3e7525defa68afd742b4580df6aa2fdd6f0855481e3994d391"}, - {file = "lxml-4.9.1-cp36-cp36m-win32.whl", hash = "sha256:1e1cf47774373777936c5aabad489fef7b1c087dcd1f426b621fda9dcc12994e"}, - {file = "lxml-4.9.1-cp36-cp36m-win_amd64.whl", hash = "sha256:5974895115737a74a00b321e339b9c3f45c20275d226398ae79ac008d908bff7"}, - {file = "lxml-4.9.1-cp37-cp37m-macosx_10_15_x86_64.whl", hash = "sha256:1423631e3d51008871299525b541413c9b6c6423593e89f9c4cfbe8460afc0a2"}, - {file = "lxml-4.9.1-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:2aaf6a0a6465d39b5ca69688fce82d20088c1838534982996ec46633dc7ad6cc"}, - {file = "lxml-4.9.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:9f36de4cd0c262dd9927886cc2305aa3f2210db437aa4fed3fb4940b8bf4592c"}, - {file = "lxml-4.9.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:ae06c1e4bc60ee076292e582a7512f304abdf6c70db59b56745cca1684f875a4"}, - {file = "lxml-4.9.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:57e4d637258703d14171b54203fd6822fda218c6c2658a7d30816b10995f29f3"}, - {file = "lxml-4.9.1-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:6d279033bf614953c3fc4a0aa9ac33a21e8044ca72d4fa8b9273fe75359d5cca"}, - {file = "lxml-4.9.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:a60f90bba4c37962cbf210f0188ecca87daafdf60271f4c6948606e4dabf8785"}, - {file = "lxml-4.9.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:6ca2264f341dd81e41f3fffecec6e446aa2121e0b8d026fb5130e02de1402785"}, - {file = "lxml-4.9.1-cp37-cp37m-win32.whl", hash = "sha256:27e590352c76156f50f538dbcebd1925317a0f70540f7dc8c97d2931c595783a"}, - {file = "lxml-4.9.1-cp37-cp37m-win_amd64.whl", hash = "sha256:eea5d6443b093e1545ad0210e6cf27f920482bfcf5c77cdc8596aec73523bb7e"}, - {file = "lxml-4.9.1-cp38-cp38-macosx_10_15_x86_64.whl", hash = "sha256:f05251bbc2145349b8d0b77c0d4e5f3b228418807b1ee27cefb11f69ed3d233b"}, - {file = "lxml-4.9.1-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:487c8e61d7acc50b8be82bda8c8d21d20e133c3cbf41bd8ad7eb1aaeb3f07c97"}, - {file = "lxml-4.9.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:8d1a92d8e90b286d491e5626af53afef2ba04da33e82e30744795c71880eaa21"}, - {file = "lxml-4.9.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:b570da8cd0012f4af9fa76a5635cd31f707473e65a5a335b186069d5c7121ff2"}, - {file = "lxml-4.9.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:5ef87fca280fb15342726bd5f980f6faf8b84a5287fcc2d4962ea8af88b35130"}, - {file = "lxml-4.9.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:93e414e3206779ef41e5ff2448067213febf260ba747fc65389a3ddaa3fb8715"}, - {file = "lxml-4.9.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:6653071f4f9bac46fbc30f3c7838b0e9063ee335908c5d61fb7a4a86c8fd2036"}, - {file = "lxml-4.9.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:32a73c53783becdb7eaf75a2a1525ea8e49379fb7248c3eeefb9412123536387"}, - {file = "lxml-4.9.1-cp38-cp38-win32.whl", hash = "sha256:1a7c59c6ffd6ef5db362b798f350e24ab2cfa5700d53ac6681918f314a4d3b94"}, - {file = "lxml-4.9.1-cp38-cp38-win_amd64.whl", hash = "sha256:1436cf0063bba7888e43f1ba8d58824f085410ea2025befe81150aceb123e345"}, - {file = "lxml-4.9.1-cp39-cp39-macosx_10_15_x86_64.whl", hash = "sha256:4beea0f31491bc086991b97517b9683e5cfb369205dac0148ef685ac12a20a67"}, - {file = "lxml-4.9.1-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:41fb58868b816c202e8881fd0f179a4644ce6e7cbbb248ef0283a34b73ec73bb"}, - {file = "lxml-4.9.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:bd34f6d1810d9354dc7e35158aa6cc33456be7706df4420819af6ed966e85448"}, - {file = "lxml-4.9.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:edffbe3c510d8f4bf8640e02ca019e48a9b72357318383ca60e3330c23aaffc7"}, - {file = "lxml-4.9.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:6d949f53ad4fc7cf02c44d6678e7ff05ec5f5552b235b9e136bd52e9bf730b91"}, - {file = "lxml-4.9.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:079b68f197c796e42aa80b1f739f058dcee796dc725cc9a1be0cdb08fc45b000"}, - {file = "lxml-4.9.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:9c3a88d20e4fe4a2a4a84bf439a5ac9c9aba400b85244c63a1ab7088f85d9d25"}, - {file = "lxml-4.9.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:4e285b5f2bf321fc0857b491b5028c5f276ec0c873b985d58d7748ece1d770dd"}, - {file = "lxml-4.9.1-cp39-cp39-win32.whl", hash = "sha256:ef72013e20dd5ba86a8ae1aed7f56f31d3374189aa8b433e7b12ad182c0d2dfb"}, - {file = "lxml-4.9.1-cp39-cp39-win_amd64.whl", hash = "sha256:10d2017f9150248563bb579cd0d07c61c58da85c922b780060dcc9a3aa9f432d"}, - {file = "lxml-4.9.1-pp37-pypy37_pp73-macosx_10_15_x86_64.whl", hash = "sha256:0538747a9d7827ce3e16a8fdd201a99e661c7dee3c96c885d8ecba3c35d1032c"}, - {file = "lxml-4.9.1-pp37-pypy37_pp73-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:0645e934e940107e2fdbe7c5b6fb8ec6232444260752598bc4d09511bd056c0b"}, - {file = "lxml-4.9.1-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:6daa662aba22ef3258934105be2dd9afa5bb45748f4f702a3b39a5bf53a1f4dc"}, - {file = "lxml-4.9.1-pp38-pypy38_pp73-macosx_10_15_x86_64.whl", hash = "sha256:603a464c2e67d8a546ddaa206d98e3246e5db05594b97db844c2f0a1af37cf5b"}, - {file = "lxml-4.9.1-pp38-pypy38_pp73-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:c4b2e0559b68455c085fb0f6178e9752c4be3bba104d6e881eb5573b399d1eb2"}, - {file = "lxml-4.9.1-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:0f3f0059891d3254c7b5fb935330d6db38d6519ecd238ca4fce93c234b4a0f73"}, - {file = "lxml-4.9.1-pp39-pypy39_pp73-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:c852b1530083a620cb0de5f3cd6826f19862bafeaf77586f1aef326e49d95f0c"}, - {file = "lxml-4.9.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:287605bede6bd36e930577c5925fcea17cb30453d96a7b4c63c14a257118dbb9"}, - {file = "lxml-4.9.1.tar.gz", hash = "sha256:fe749b052bb7233fe5d072fcb549221a8cb1a16725c47c37e42b0b9cb3ff2c3f"}, + {file = "lxml-4.9.2-cp27-cp27m-macosx_10_15_x86_64.whl", hash = "sha256:76cf573e5a365e790396a5cc2b909812633409306c6531a6877c59061e42c4f2"}, + {file = "lxml-4.9.2-cp27-cp27m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b1f42b6921d0e81b1bcb5e395bc091a70f41c4d4e55ba99c6da2b31626c44892"}, + {file = "lxml-4.9.2-cp27-cp27m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:9f102706d0ca011de571de32c3247c6476b55bb6bc65a20f682f000b07a4852a"}, + {file = "lxml-4.9.2-cp27-cp27m-win32.whl", hash = "sha256:8d0b4612b66ff5d62d03bcaa043bb018f74dfea51184e53f067e6fdcba4bd8de"}, + {file = "lxml-4.9.2-cp27-cp27m-win_amd64.whl", hash = "sha256:4c8f293f14abc8fd3e8e01c5bd86e6ed0b6ef71936ded5bf10fe7a5efefbaca3"}, + {file = "lxml-4.9.2-cp27-cp27mu-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2899456259589aa38bfb018c364d6ae7b53c5c22d8e27d0ec7609c2a1ff78b50"}, + {file = "lxml-4.9.2-cp27-cp27mu-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:6749649eecd6a9871cae297bffa4ee76f90b4504a2a2ab528d9ebe912b101975"}, + {file = "lxml-4.9.2-cp310-cp310-macosx_10_15_x86_64.whl", hash = "sha256:a08cff61517ee26cb56f1e949cca38caabe9ea9fbb4b1e10a805dc39844b7d5c"}, + {file = "lxml-4.9.2-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:85cabf64adec449132e55616e7ca3e1000ab449d1d0f9d7f83146ed5bdcb6d8a"}, + {file = "lxml-4.9.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:8340225bd5e7a701c0fa98284c849c9b9fc9238abf53a0ebd90900f25d39a4e4"}, + {file = "lxml-4.9.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:1ab8f1f932e8f82355e75dda5413a57612c6ea448069d4fb2e217e9a4bed13d4"}, + {file = "lxml-4.9.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:699a9af7dffaf67deeae27b2112aa06b41c370d5e7633e0ee0aea2e0b6c211f7"}, + {file = "lxml-4.9.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:b9cc34af337a97d470040f99ba4282f6e6bac88407d021688a5d585e44a23184"}, + {file = "lxml-4.9.2-cp310-cp310-win32.whl", hash = "sha256:d02a5399126a53492415d4906ab0ad0375a5456cc05c3fc0fc4ca11771745cda"}, + {file = "lxml-4.9.2-cp310-cp310-win_amd64.whl", hash = "sha256:a38486985ca49cfa574a507e7a2215c0c780fd1778bb6290c21193b7211702ab"}, + {file = "lxml-4.9.2-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:c83203addf554215463b59f6399835201999b5e48019dc17f182ed5ad87205c9"}, + {file = "lxml-4.9.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:2a87fa548561d2f4643c99cd13131acb607ddabb70682dcf1dff5f71f781a4bf"}, + {file = "lxml-4.9.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:d6b430a9938a5a5d85fc107d852262ddcd48602c120e3dbb02137c83d212b380"}, + {file = "lxml-4.9.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:3efea981d956a6f7173b4659849f55081867cf897e719f57383698af6f618a92"}, + {file = "lxml-4.9.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:df0623dcf9668ad0445e0558a21211d4e9a149ea8f5666917c8eeec515f0a6d1"}, + {file = "lxml-4.9.2-cp311-cp311-win32.whl", hash = "sha256:da248f93f0418a9e9d94b0080d7ebc407a9a5e6d0b57bb30db9b5cc28de1ad33"}, + {file = "lxml-4.9.2-cp311-cp311-win_amd64.whl", hash = "sha256:3818b8e2c4b5148567e1b09ce739006acfaa44ce3156f8cbbc11062994b8e8dd"}, + {file = "lxml-4.9.2-cp35-cp35m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:ca989b91cf3a3ba28930a9fc1e9aeafc2a395448641df1f387a2d394638943b0"}, + {file = "lxml-4.9.2-cp35-cp35m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:822068f85e12a6e292803e112ab876bc03ed1f03dddb80154c395f891ca6b31e"}, + {file = "lxml-4.9.2-cp35-cp35m-win32.whl", hash = "sha256:be7292c55101e22f2a3d4d8913944cbea71eea90792bf914add27454a13905df"}, + {file = "lxml-4.9.2-cp36-cp36m-macosx_10_15_x86_64.whl", hash = "sha256:b26a29f0b7fc6f0897f043ca366142d2b609dc60756ee6e4e90b5f762c6adc53"}, + {file = "lxml-4.9.2-cp36-cp36m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:ab323679b8b3030000f2be63e22cdeea5b47ee0abd2d6a1dc0c8103ddaa56cd7"}, + {file = "lxml-4.9.2-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:689bb688a1db722485e4610a503e3e9210dcc20c520b45ac8f7533c837be76fe"}, + {file = "lxml-4.9.2-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:f49e52d174375a7def9915c9f06ec4e569d235ad428f70751765f48d5926678c"}, + {file = "lxml-4.9.2-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:36c3c175d34652a35475a73762b545f4527aec044910a651d2bf50de9c3352b1"}, + {file = "lxml-4.9.2-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:a35f8b7fa99f90dd2f5dc5a9fa12332642f087a7641289ca6c40d6e1a2637d8e"}, + {file = "lxml-4.9.2-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:58bfa3aa19ca4c0f28c5dde0ff56c520fbac6f0daf4fac66ed4c8d2fb7f22e74"}, + {file = "lxml-4.9.2-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:bc718cd47b765e790eecb74d044cc8d37d58562f6c314ee9484df26276d36a38"}, + {file = "lxml-4.9.2-cp36-cp36m-win32.whl", hash = "sha256:d5bf6545cd27aaa8a13033ce56354ed9e25ab0e4ac3b5392b763d8d04b08e0c5"}, + {file = "lxml-4.9.2-cp37-cp37m-macosx_10_15_x86_64.whl", hash = "sha256:05ca3f6abf5cf78fe053da9b1166e062ade3fa5d4f92b4ed688127ea7d7b1d03"}, + {file = "lxml-4.9.2-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:a5da296eb617d18e497bcf0a5c528f5d3b18dadb3619fbdadf4ed2356ef8d941"}, + {file = "lxml-4.9.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:04876580c050a8c5341d706dd464ff04fd597095cc8c023252566a8826505726"}, + {file = "lxml-4.9.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:c9ec3eaf616d67db0764b3bb983962b4f385a1f08304fd30c7283954e6a7869b"}, + {file = "lxml-4.9.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2a29ba94d065945944016b6b74e538bdb1751a1db6ffb80c9d3c2e40d6fa9894"}, + {file = "lxml-4.9.2-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:a82d05da00a58b8e4c0008edbc8a4b6ec5a4bc1e2ee0fb6ed157cf634ed7fa45"}, + {file = "lxml-4.9.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:223f4232855ade399bd409331e6ca70fb5578efef22cf4069a6090acc0f53c0e"}, + {file = "lxml-4.9.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:d17bc7c2ccf49c478c5bdd447594e82692c74222698cfc9b5daae7ae7e90743b"}, + {file = "lxml-4.9.2-cp37-cp37m-win32.whl", hash = "sha256:b64d891da92e232c36976c80ed7ebb383e3f148489796d8d31a5b6a677825efe"}, + {file = "lxml-4.9.2-cp37-cp37m-win_amd64.whl", hash = "sha256:a0a336d6d3e8b234a3aae3c674873d8f0e720b76bc1d9416866c41cd9500ffb9"}, + {file = "lxml-4.9.2-cp38-cp38-macosx_10_15_x86_64.whl", hash = "sha256:da4dd7c9c50c059aba52b3524f84d7de956f7fef88f0bafcf4ad7dde94a064e8"}, + {file = "lxml-4.9.2-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:821b7f59b99551c69c85a6039c65b75f5683bdc63270fec660f75da67469ca24"}, + {file = "lxml-4.9.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:e5168986b90a8d1f2f9dc1b841467c74221bd752537b99761a93d2d981e04889"}, + {file = "lxml-4.9.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:8e20cb5a47247e383cf4ff523205060991021233ebd6f924bca927fcf25cf86f"}, + {file = "lxml-4.9.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:13598ecfbd2e86ea7ae45ec28a2a54fb87ee9b9fdb0f6d343297d8e548392c03"}, + {file = "lxml-4.9.2-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:880bbbcbe2fca64e2f4d8e04db47bcdf504936fa2b33933efd945e1b429bea8c"}, + {file = "lxml-4.9.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:7d2278d59425777cfcb19735018d897ca8303abe67cc735f9f97177ceff8027f"}, + {file = "lxml-4.9.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:5344a43228767f53a9df6e5b253f8cdca7dfc7b7aeae52551958192f56d98457"}, + {file = "lxml-4.9.2-cp38-cp38-win32.whl", hash = "sha256:925073b2fe14ab9b87e73f9a5fde6ce6392da430f3004d8b72cc86f746f5163b"}, + {file = "lxml-4.9.2-cp38-cp38-win_amd64.whl", hash = "sha256:9b22c5c66f67ae00c0199f6055705bc3eb3fcb08d03d2ec4059a2b1b25ed48d7"}, + {file = "lxml-4.9.2-cp39-cp39-macosx_10_15_x86_64.whl", hash = "sha256:5f50a1c177e2fa3ee0667a5ab79fdc6b23086bc8b589d90b93b4bd17eb0e64d1"}, + {file = "lxml-4.9.2-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:090c6543d3696cbe15b4ac6e175e576bcc3f1ccfbba970061b7300b0c15a2140"}, + {file = "lxml-4.9.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:63da2ccc0857c311d764e7d3d90f429c252e83b52d1f8f1d1fe55be26827d1f4"}, + {file = "lxml-4.9.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:5b4545b8a40478183ac06c073e81a5ce4cf01bf1734962577cf2bb569a5b3bbf"}, + {file = "lxml-4.9.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2e430cd2824f05f2d4f687701144556646bae8f249fd60aa1e4c768ba7018947"}, + {file = "lxml-4.9.2-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:6804daeb7ef69e7b36f76caddb85cccd63d0c56dedb47555d2fc969e2af6a1a5"}, + {file = "lxml-4.9.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:a6e441a86553c310258aca15d1c05903aaf4965b23f3bc2d55f200804e005ee5"}, + {file = "lxml-4.9.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ca34efc80a29351897e18888c71c6aca4a359247c87e0b1c7ada14f0ab0c0fb2"}, + {file = "lxml-4.9.2-cp39-cp39-win32.whl", hash = "sha256:6b418afe5df18233fc6b6093deb82a32895b6bb0b1155c2cdb05203f583053f1"}, + {file = "lxml-4.9.2-cp39-cp39-win_amd64.whl", hash = "sha256:f1496ea22ca2c830cbcbd473de8f114a320da308438ae65abad6bab7867fe38f"}, + {file = "lxml-4.9.2-pp37-pypy37_pp73-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:b264171e3143d842ded311b7dccd46ff9ef34247129ff5bf5066123c55c2431c"}, + {file = "lxml-4.9.2-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:0dc313ef231edf866912e9d8f5a042ddab56c752619e92dfd3a2c277e6a7299a"}, + {file = "lxml-4.9.2-pp38-pypy38_pp73-macosx_10_15_x86_64.whl", hash = "sha256:16efd54337136e8cd72fb9485c368d91d77a47ee2d42b057564aae201257d419"}, + {file = "lxml-4.9.2-pp38-pypy38_pp73-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:0f2b1e0d79180f344ff9f321327b005ca043a50ece8713de61d1cb383fb8ac05"}, + {file = "lxml-4.9.2-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:7b770ed79542ed52c519119473898198761d78beb24b107acf3ad65deae61f1f"}, + {file = "lxml-4.9.2-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:efa29c2fe6b4fdd32e8ef81c1528506895eca86e1d8c4657fda04c9b3786ddf9"}, + {file = "lxml-4.9.2-pp39-pypy39_pp73-macosx_10_15_x86_64.whl", hash = "sha256:7e91ee82f4199af8c43d8158024cbdff3d931df350252288f0d4ce656df7f3b5"}, + {file = "lxml-4.9.2-pp39-pypy39_pp73-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:b23e19989c355ca854276178a0463951a653309fb8e57ce674497f2d9f208746"}, + {file = "lxml-4.9.2-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:01d36c05f4afb8f7c20fd9ed5badca32a2029b93b1750f571ccc0b142531caf7"}, + {file = "lxml-4.9.2-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:7b515674acfdcadb0eb5d00d8a709868173acece5cb0be3dd165950cbfdf5409"}, + {file = "lxml-4.9.2.tar.gz", hash = "sha256:2455cfaeb7ac70338b3257f41e21f0724f4b5b0c0e7702da67ee6c3640835b67"}, ] markupsafe = [ {file = "MarkupSafe-2.1.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:3028252424c72b2602a323f70fbf50aa80a5d3aa616ea6add4ba21ae9cc9da4c"}, From ec656be48044419107ee670b88c89cc4f510060e Mon Sep 17 00:00:00 2001 From: reivilibre Date: Wed, 21 Dec 2022 12:28:13 +0000 Subject: [PATCH 25/82] Revert update of hiredis in Poetry lockfile: revert from 2.1.0 to 2.0.0. (#14718) * Revert "Bump hiredis from 2.0.0 to 2.1.0 (#14699)" This reverts commit 9c89707b56908e755de546c7d390637cecb53159. * Newsfile Signed-off-by: Olivier Wilkinson (reivilibre) Signed-off-by: Olivier Wilkinson (reivilibre) --- changelog.d/14699.misc | 1 - changelog.d/14718.misc | 1 + poetry.lock | 133 +++++++++++++---------------------------- 3 files changed, 44 insertions(+), 91 deletions(-) delete mode 100644 changelog.d/14699.misc create mode 100644 changelog.d/14718.misc diff --git a/changelog.d/14699.misc b/changelog.d/14699.misc deleted file mode 100644 index d73ef25634b8..000000000000 --- a/changelog.d/14699.misc +++ /dev/null @@ -1 +0,0 @@ -Bump hiredis from 2.0.0 to 2.1.0. diff --git a/changelog.d/14718.misc b/changelog.d/14718.misc new file mode 100644 index 000000000000..cda3ededd16e --- /dev/null +++ b/changelog.d/14718.misc @@ -0,0 +1 @@ +Revert update of hiredis in Poetry lockfile: revert from 2.1.0 to 2.0.0. \ No newline at end of file diff --git a/poetry.lock b/poetry.lock index 3fbad339a320..9a9a141a14b7 100644 --- a/poetry.lock +++ b/poetry.lock @@ -318,11 +318,11 @@ typing-extensions = {version = ">=3.7.4.3", markers = "python_version < \"3.8\"" [[package]] name = "hiredis" -version = "2.1.0" +version = "2.0.0" description = "Python wrapper for hiredis" category = "main" optional = true -python-versions = ">=3.7" +python-versions = ">=3.6" [[package]] name = "hyperlink" @@ -1867,94 +1867,47 @@ gitpython = [ {file = "GitPython-3.1.29.tar.gz", hash = "sha256:cc36bfc4a3f913e66805a28e84703e419d9c264c1077e537b54f0e1af85dbefd"}, ] hiredis = [ - {file = "hiredis-2.1.0-cp310-cp310-macosx_10_12_universal2.whl", hash = "sha256:7b339a7542a3f6a10b3bbc157e4abc9bae9628e2df7faf5f8a32f730014719ae"}, - {file = "hiredis-2.1.0-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:dd82370c2f9f804ec617b95d25edb0fd04882251afb2ecdf08b9ced0c3aa4bcc"}, - {file = "hiredis-2.1.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:92077511d3a62109d5d11bf584e41264a993ae3c77c72de63c1f741b7809bacb"}, - {file = "hiredis-2.1.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a6544c7807cbb75bc6ae9ab85773b4413edbcd55342e9e3d7d3f159f677f7428"}, - {file = "hiredis-2.1.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8181d73f25943fbdca904154e51b845317103cee08116cfae258f96927ce1e74"}, - {file = "hiredis-2.1.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:040f861e4e43daa9287f3a85979542f9c7ee8cfab695fa662f3b6186c6f7d5e8"}, - {file = "hiredis-2.1.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ef5ae8c1af82a8000742003cb16a6fa6c57919abb861ab214dcb27db8573ee64"}, - {file = "hiredis-2.1.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9b9aa1b0ec46dec5b05dcec22e50bbd4af33da121fca83bd2601dc60c79183f9"}, - {file = "hiredis-2.1.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:c53c36a630a6c6fd9dfe439f4266e564ca58995015a780c1d964567ebf328466"}, - {file = "hiredis-2.1.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:05aab35210bd7fbd7bd066efb2a42eb5c2878c2c137a9cff597204be2c07475b"}, - {file = "hiredis-2.1.0-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:e6097e1cef647c665f71cd0e58346389580db98365e804f7a9ad5d96e66b7150"}, - {file = "hiredis-2.1.0-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:32f98370efed38088d000df2eb2c8ed43d93d99bbf4a0a740e15eb4a887cc23f"}, - {file = "hiredis-2.1.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:b85276ed57e0aee8910b48383a38a299851935ba134460bad394988c750985fe"}, - {file = "hiredis-2.1.0-cp310-cp310-win32.whl", hash = "sha256:bd9d99606008a8cfa6b9e950abaa35f5b87496f03e63b73197d02b0fe7ecb6d3"}, - {file = "hiredis-2.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:6a8e796c94b7b8c63c99757d6ec2075069e4c362dfb0f130aaf874422bea3e7d"}, - {file = "hiredis-2.1.0-cp311-cp311-macosx_10_12_universal2.whl", hash = "sha256:e7bb5cab604fc45b45cee40e84e84d9e30eeb34c571a3784392ae658273bbd23"}, - {file = "hiredis-2.1.0-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:e0d4b074ff5ebba00933da27a06f3752b8af2448a6aa9dc895d5279f43011530"}, - {file = "hiredis-2.1.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f0c2dbaffd4a9e8df04731a012c8a67b7517abec7e53bb12c3cd749865c63428"}, - {file = "hiredis-2.1.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c19151e79b36e0d849899a21fc10539aa1903af94b31754bddab1bea876cd508"}, - {file = "hiredis-2.1.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:08ec41519a533f5cd1f1f8bd1797929358117c8e4570b679b469f768b45b7dbf"}, - {file = "hiredis-2.1.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:98f0db3667fa8abbd37ac66385b460841029033bfc1ba8d7e5b3ff1e01d3346a"}, - {file = "hiredis-2.1.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f592d1522b5981890b34b0b814f4bfa4a68b23ee90f538aac321d17e8bf859c8"}, - {file = "hiredis-2.1.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dddd2be67de25a62b3bf871f091181c13da3b32186d4be6af49dadbf6fdc266d"}, - {file = "hiredis-2.1.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:4ee8f6d0774cd6179c625688201e961a2d03da212230adaa2193cfb7a04f9169"}, - {file = "hiredis-2.1.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:5000942ffb6b6410ccbc87089c15fde5f48bd205664ee8b3067e6b2fb5689485"}, - {file = "hiredis-2.1.0-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:21e0017b8f50abd13b4c4c4218c7dfd5a42623e3255b460dfa5f70b45c4e7c3e"}, - {file = "hiredis-2.1.0-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:40b55fb46fcc78b04190176c0ae28bfa3cc7f418fca9df06c037028af5942b6a"}, - {file = "hiredis-2.1.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:24a55169a7f0bd9458935ac644bf8191f127c8aa50cdd70c0b87928cc515cae5"}, - {file = "hiredis-2.1.0-cp311-cp311-win32.whl", hash = "sha256:bb60f79e8c1eb5971b10fd256764ea0c89c4ad2d55ac4379981f678f349411f2"}, - {file = "hiredis-2.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:b223668844f26034759a6c24a72f0bb8e4fb64a43b27e2f3e8378639eaac1661"}, - {file = "hiredis-2.1.0-cp37-cp37m-macosx_10_12_x86_64.whl", hash = "sha256:7f7e7d91d6533fcb1939d467cf8bfb98640edf715897959f31ae83f5ad29aed3"}, - {file = "hiredis-2.1.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:531d1d3955244831b69272b993e16f93489ce2dadfdf800ac856dc2d9a43d353"}, - {file = "hiredis-2.1.0-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:66ffcbfc4db52dd87cdfd53bda45881ab3ab07c80ec43244fd8d70ee69d42c01"}, - {file = "hiredis-2.1.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:023b3b3ac410d6cfdb45ee943b8c528c90379f31419a1fd229888aa2b965732d"}, - {file = "hiredis-2.1.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c972385a0647120d4b0fe0e9567257cad7b2577b9f1315815713c571af0e778d"}, - {file = "hiredis-2.1.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:32893825426e73d57b3290b68110dd76229945e6c79b08a37795f536501935c4"}, - {file = "hiredis-2.1.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:262148f9b616c0cdd0f2c6bda45cd0f1ce6ce2d1974efd296b85b44e5c7567c2"}, - {file = "hiredis-2.1.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:9d601c27b9599fe52cade3096351f92f665e527d29af8d3e29353a76bfcf5615"}, - {file = "hiredis-2.1.0-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:d248acc7d7713c1b3d48ed8ea67d6ba43b104aa67d63078846a3590adbab6b73"}, - {file = "hiredis-2.1.0-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:969ffe37a8980a6e5404993ccfe605a40fa6732fa6d7b26a1a718c9121197002"}, - {file = "hiredis-2.1.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:288d5d0566d3cbcd800e46c7a547428d321842898b8c7de037a7e78b5644e88a"}, - {file = "hiredis-2.1.0-cp37-cp37m-win32.whl", hash = "sha256:06cb776d3cd3cbec86010f1bab6895ee16af8036aae8c3594a5e96c24f0f83a5"}, - {file = "hiredis-2.1.0-cp37-cp37m-win_amd64.whl", hash = "sha256:6766376dc43ef186113422ecacec0ece0d4b12c0e5f4b556669e639b20ccabb1"}, - {file = "hiredis-2.1.0-cp38-cp38-macosx_10_12_universal2.whl", hash = "sha256:41afba30304adcbe1c93fc8272a7169b7fc4e4d3d470ad8babd391678a519d76"}, - {file = "hiredis-2.1.0-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:6df0115f8b0766cd3d12416e2e2e914efed5b1a1a27605c9f37bc92de086877a"}, - {file = "hiredis-2.1.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:5d7d7078f3b841ad86e35459e9f1a49db6d793b796a25fe866333166196d9fec"}, - {file = "hiredis-2.1.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:835c4cbf8b38c83240b3eb9bd575cd1bfefe5ea5c46cc5ac2bf2d1f47d1fd696"}, - {file = "hiredis-2.1.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:718589c48e97820bdc2a99e2621b5039884cc23199213756054d10cd309ad56c"}, - {file = "hiredis-2.1.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b2d96be6917ea8f753691a4674f682dd5e145b70edab28c05aa5552ae873e843"}, - {file = "hiredis-2.1.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b5fe1bb4b1525751f3050337097b3b2bfe445836e59a5a0984928dd0797f9abf"}, - {file = "hiredis-2.1.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:91dc73310b92b4aeccffdcd4a762955fe71380f5eaa4e242ee95019e41519101"}, - {file = "hiredis-2.1.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:bb858218de60a930a164a991fff001c70b0c3d923d3ae40fef2acf3321126b00"}, - {file = "hiredis-2.1.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:53040c3b3488b52f4609775453fc759262f2885b733150ee2e1d88257fdafed8"}, - {file = "hiredis-2.1.0-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:a1c9b7d6d7bf35e1e2217b2847710154b11d25bf86b77bb7e190161f8b89917e"}, - {file = "hiredis-2.1.0-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:dfbe939fdddbc7b90cab4124f3ddd6391099fb964f6dab3386aa8cf56f37b5ba"}, - {file = "hiredis-2.1.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:3a51cb4ea466276a845a940931357b4a876f903eabde514ba95e45050e1c2150"}, - {file = "hiredis-2.1.0-cp38-cp38-win32.whl", hash = "sha256:8bce4c687136bf13df76072072b9baadbd52f7d1b143fbbda96387f50e8ebaeb"}, - {file = "hiredis-2.1.0-cp38-cp38-win_amd64.whl", hash = "sha256:1f94684b13fbbee1239303018d5ea900d786e486cdb130cde3144d53f4e262e4"}, - {file = "hiredis-2.1.0-cp39-cp39-macosx_10_12_universal2.whl", hash = "sha256:879668ffab582bdffd9f10f6c8797aac055db183f266e3aa3a6438ff0768bc29"}, - {file = "hiredis-2.1.0-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:f1d5a99de0fd02438f251e50ec64936d22d542c8e5d80bdec236f9713eeef334"}, - {file = "hiredis-2.1.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ab622bcddcf334b4b1fc4b22e163e93160e3afdd7feaedd77ac6f258e0c77b68"}, - {file = "hiredis-2.1.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:964c4f23ff450fb8d73edf06fc7475a4e81a3f9b03a9a04a907ec81c84052fcf"}, - {file = "hiredis-2.1.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d9f8b8daef346ffc0268d7086c213ab24c2a3fcbd4249eacfbb3635602c79d20"}, - {file = "hiredis-2.1.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1e2039cdaa2e6656eae4a2e2537ed77e27f29b7487b97ce7ae6a3cb88d01b968"}, - {file = "hiredis-2.1.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:43d3168da0a81fa0a9e4bc6e14316beac8e5f1b439ca5cc5af7f9a558cfba741"}, - {file = "hiredis-2.1.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0021ba034b74c5006f62e4cfdd79d04c7c720731eda256ce29d769ac6483adc3"}, - {file = "hiredis-2.1.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:39a1bb45bcd698baf70ad4e9a94af164525bf053caea7df3777172d20d69538a"}, - {file = "hiredis-2.1.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:c1b636b05777536a83b4cced157cbdc2d0012d494a9ec2f7b7e07c54296cd773"}, - {file = "hiredis-2.1.0-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:58a7ceb71f967fcc1878fb64666a12fbc5f243ab00d0653d3752a811941d8261"}, - {file = "hiredis-2.1.0-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:c5263c676dc4d55202e7ca0429b949fc6ba7c0dd3a3a2b80538593ab27d82836"}, - {file = "hiredis-2.1.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:b5879d13025b04903ddf71921812db27fe1156a0952ad253014354d72463aaa9"}, - {file = "hiredis-2.1.0-cp39-cp39-win32.whl", hash = "sha256:9259f637d77544ffeb97acb0a87fdd192a8aced7a2fbd7439160dbee8341d446"}, - {file = "hiredis-2.1.0-cp39-cp39-win_amd64.whl", hash = "sha256:fb818b6e0981e16dfdfc9e507c9842f8d210e6ecaf3edb8ac3039dbd24768839"}, - {file = "hiredis-2.1.0-pp37-pypy37_pp73-macosx_10_12_x86_64.whl", hash = "sha256:648d4648bf6b3dcc418a974df143b2f96627ab8b50bda23a57759c273880ecfb"}, - {file = "hiredis-2.1.0-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:654949cfc0bc76a5292b6ac111113b2eafb0739e0496495368981ea2e80bf4ec"}, - {file = "hiredis-2.1.0-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9f2a98b835c2088998a47da51b1b3661b587b2d4b3305d03fc9893888cc2aa54"}, - {file = "hiredis-2.1.0-pp37-pypy37_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7222bd9243387d778245619d0ac62d35cf72ee746ec0efb7b9b230ae3e0c3a39"}, - {file = "hiredis-2.1.0-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:778f6de73c3abd67d447a3442f89e7d43a8de1eb5093f416af14dddc1d5c9cb5"}, - {file = "hiredis-2.1.0-pp38-pypy38_pp73-macosx_10_12_x86_64.whl", hash = "sha256:c4cfb61fe642f30a22789055847004393bc65b5686988c64191e379ea4ccd069"}, - {file = "hiredis-2.1.0-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:03b6bef7eb50415aca87200a511d66a2fd69f1fcc75cfe1408e1201cbe28ddfb"}, - {file = "hiredis-2.1.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3195e13a700f6ff35894c4920fcce8f6c2b01cdbc01f76fe567753c495849e9b"}, - {file = "hiredis-2.1.0-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:19f724405c808a89db422ed1010caab80a16d3e5b49632356ae7912513b6d58e"}, - {file = "hiredis-2.1.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:8ecebeff966b412138b0cd105d7572f8d5e65e96355af699863890f8370707e6"}, - {file = "hiredis-2.1.0-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:4f34eefaf164bf43b29ccc809c168248eb95001837ed0e9e3279891f57ae2fab"}, - {file = "hiredis-2.1.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:11fad16beb9d623ea423c9129bab0e392ea4c84363d61c125f679be3d029442f"}, - {file = "hiredis-2.1.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2c763eb9a1414c4d665945c70ae2ef74a843600667b0069fe90e2aabc78e5411"}, - {file = "hiredis-2.1.0-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:edb7f156a8f8a1999574f27bda67dd2bff2d5b180bb6aed996a1792cafbcc668"}, - {file = "hiredis-2.1.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:e057d5545189d4c9e22ae0f7dc283ea0a225f56999511022c062cce7f9589d69"}, + {file = "hiredis-2.0.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:b4c8b0bc5841e578d5fb32a16e0c305359b987b850a06964bd5a62739d688048"}, + {file = "hiredis-2.0.0-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:0adea425b764a08270820531ec2218d0508f8ae15a448568109ffcae050fee26"}, + {file = "hiredis-2.0.0-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:3d55e36715ff06cdc0ab62f9591607c4324297b6b6ce5b58cb9928b3defe30ea"}, + {file = "hiredis-2.0.0-cp36-cp36m-manylinux2010_i686.whl", hash = "sha256:5d2a48c80cf5a338d58aae3c16872f4d452345e18350143b3bf7216d33ba7b99"}, + {file = "hiredis-2.0.0-cp36-cp36m-manylinux2010_x86_64.whl", hash = "sha256:240ce6dc19835971f38caf94b5738092cb1e641f8150a9ef9251b7825506cb05"}, + {file = "hiredis-2.0.0-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:5dc7a94bb11096bc4bffd41a3c4f2b958257085c01522aa81140c68b8bf1630a"}, + {file = "hiredis-2.0.0-cp36-cp36m-win32.whl", hash = "sha256:139705ce59d94eef2ceae9fd2ad58710b02aee91e7fa0ccb485665ca0ecbec63"}, + {file = "hiredis-2.0.0-cp36-cp36m-win_amd64.whl", hash = "sha256:c39c46d9e44447181cd502a35aad2bb178dbf1b1f86cf4db639d7b9614f837c6"}, + {file = "hiredis-2.0.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:adf4dd19d8875ac147bf926c727215a0faf21490b22c053db464e0bf0deb0485"}, + {file = "hiredis-2.0.0-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:0f41827028901814c709e744060843c77e78a3aca1e0d6875d2562372fcb405a"}, + {file = "hiredis-2.0.0-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:508999bec4422e646b05c95c598b64bdbef1edf0d2b715450a078ba21b385bcc"}, + {file = "hiredis-2.0.0-cp37-cp37m-manylinux2010_i686.whl", hash = "sha256:0d5109337e1db373a892fdcf78eb145ffb6bbd66bb51989ec36117b9f7f9b579"}, + {file = "hiredis-2.0.0-cp37-cp37m-manylinux2010_x86_64.whl", hash = "sha256:04026461eae67fdefa1949b7332e488224eac9e8f2b5c58c98b54d29af22093e"}, + {file = "hiredis-2.0.0-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:a00514362df15af041cc06e97aebabf2895e0a7c42c83c21894be12b84402d79"}, + {file = "hiredis-2.0.0-cp37-cp37m-win32.whl", hash = "sha256:09004096e953d7ebd508cded79f6b21e05dff5d7361771f59269425108e703bc"}, + {file = "hiredis-2.0.0-cp37-cp37m-win_amd64.whl", hash = "sha256:f8196f739092a78e4f6b1b2172679ed3343c39c61a3e9d722ce6fcf1dac2824a"}, + {file = "hiredis-2.0.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:294a6697dfa41a8cba4c365dd3715abc54d29a86a40ec6405d677ca853307cfb"}, + {file = "hiredis-2.0.0-cp38-cp38-manylinux1_i686.whl", hash = "sha256:3dddf681284fe16d047d3ad37415b2e9ccdc6c8986c8062dbe51ab9a358b50a5"}, + {file = "hiredis-2.0.0-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:dcef843f8de4e2ff5e35e96ec2a4abbdf403bd0f732ead127bd27e51f38ac298"}, + {file = "hiredis-2.0.0-cp38-cp38-manylinux2010_i686.whl", hash = "sha256:87c7c10d186f1743a8fd6a971ab6525d60abd5d5d200f31e073cd5e94d7e7a9d"}, + {file = "hiredis-2.0.0-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:7f0055f1809b911ab347a25d786deff5e10e9cf083c3c3fd2dd04e8612e8d9db"}, + {file = "hiredis-2.0.0-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:11d119507bb54e81f375e638225a2c057dda748f2b1deef05c2b1a5d42686048"}, + {file = "hiredis-2.0.0-cp38-cp38-win32.whl", hash = "sha256:7492af15f71f75ee93d2a618ca53fea8be85e7b625e323315169977fae752426"}, + {file = "hiredis-2.0.0-cp38-cp38-win_amd64.whl", hash = "sha256:65d653df249a2f95673976e4e9dd7ce10de61cfc6e64fa7eeaa6891a9559c581"}, + {file = "hiredis-2.0.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:ae8427a5e9062ba66fc2c62fb19a72276cf12c780e8db2b0956ea909c48acff5"}, + {file = "hiredis-2.0.0-cp39-cp39-manylinux1_i686.whl", hash = "sha256:3f5f7e3a4ab824e3de1e1700f05ad76ee465f5f11f5db61c4b297ec29e692b2e"}, + {file = "hiredis-2.0.0-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:e3447d9e074abf0e3cd85aef8131e01ab93f9f0e86654db7ac8a3f73c63706ce"}, + {file = "hiredis-2.0.0-cp39-cp39-manylinux2010_i686.whl", hash = "sha256:8b42c0dc927b8d7c0eb59f97e6e34408e53bc489f9f90e66e568f329bff3e443"}, + {file = "hiredis-2.0.0-cp39-cp39-manylinux2010_x86_64.whl", hash = "sha256:b84f29971f0ad4adaee391c6364e6f780d5aae7e9226d41964b26b49376071d0"}, + {file = "hiredis-2.0.0-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:0b39ec237459922c6544d071cdcf92cbb5bc6685a30e7c6d985d8a3e3a75326e"}, + {file = "hiredis-2.0.0-cp39-cp39-win32.whl", hash = "sha256:a7928283143a401e72a4fad43ecc85b35c27ae699cf5d54d39e1e72d97460e1d"}, + {file = "hiredis-2.0.0-cp39-cp39-win_amd64.whl", hash = "sha256:a4ee8000454ad4486fb9f28b0cab7fa1cd796fc36d639882d0b34109b5b3aec9"}, + {file = "hiredis-2.0.0-pp36-pypy36_pp73-macosx_10_9_x86_64.whl", hash = "sha256:1f03d4dadd595f7a69a75709bc81902673fa31964c75f93af74feac2f134cc54"}, + {file = "hiredis-2.0.0-pp36-pypy36_pp73-manylinux1_x86_64.whl", hash = "sha256:04927a4c651a0e9ec11c68e4427d917e44ff101f761cd3b5bc76f86aaa431d27"}, + {file = "hiredis-2.0.0-pp36-pypy36_pp73-manylinux2010_x86_64.whl", hash = "sha256:a39efc3ade8c1fb27c097fd112baf09d7fd70b8cb10ef1de4da6efbe066d381d"}, + {file = "hiredis-2.0.0-pp36-pypy36_pp73-win32.whl", hash = "sha256:07bbf9bdcb82239f319b1f09e8ef4bdfaec50ed7d7ea51a56438f39193271163"}, + {file = "hiredis-2.0.0-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:807b3096205c7cec861c8803a6738e33ed86c9aae76cac0e19454245a6bbbc0a"}, + {file = "hiredis-2.0.0-pp37-pypy37_pp73-manylinux1_x86_64.whl", hash = "sha256:1233e303645f468e399ec906b6b48ab7cd8391aae2d08daadbb5cad6ace4bd87"}, + {file = "hiredis-2.0.0-pp37-pypy37_pp73-manylinux2010_x86_64.whl", hash = "sha256:cb2126603091902767d96bcb74093bd8b14982f41809f85c9b96e519c7e1dc41"}, + {file = "hiredis-2.0.0-pp37-pypy37_pp73-win32.whl", hash = "sha256:f52010e0a44e3d8530437e7da38d11fb822acfb0d5b12e9cd5ba655509937ca0"}, + {file = "hiredis-2.0.0.tar.gz", hash = "sha256:81d6d8e39695f2c37954d1011c0480ef7cf444d4e3ae24bc5e89ee5de360139a"}, ] hyperlink = [ {file = "hyperlink-21.0.0-py2.py3-none-any.whl", hash = "sha256:e6b14c37ecb73e89c77d78cdb4c2cc8f3fb59a885c5b3f819ff4ed80f25af1b4"}, From b624e010f1e5e008fe244af19b75f0b7d9b6f4db Mon Sep 17 00:00:00 2001 From: "Olivier Wilkinson (reivilibre)" Date: Wed, 21 Dec 2022 12:28:55 +0000 Subject: [PATCH 26/82] (remove no-op changelog entry) --- changelog.d/14718.misc | 1 - 1 file changed, 1 deletion(-) delete mode 100644 changelog.d/14718.misc diff --git a/changelog.d/14718.misc b/changelog.d/14718.misc deleted file mode 100644 index cda3ededd16e..000000000000 --- a/changelog.d/14718.misc +++ /dev/null @@ -1 +0,0 @@ -Revert update of hiredis in Poetry lockfile: revert from 2.1.0 to 2.0.0. \ No newline at end of file From 5831bed45061d9be237cf9e97812c2b73a18fea9 Mon Sep 17 00:00:00 2001 From: Patrick Cloke Date: Wed, 21 Dec 2022 12:29:19 -0500 Subject: [PATCH 27/82] Bump minimum PyYAML to 3.13. (#14720) PyYAML 3.13 fixes some issues with Python 3.7 compatibility and was released in 2018. --- changelog.d/14720.misc | 1 + pyproject.toml | 2 +- 2 files changed, 2 insertions(+), 1 deletion(-) create mode 100644 changelog.d/14720.misc diff --git a/changelog.d/14720.misc b/changelog.d/14720.misc new file mode 100644 index 000000000000..0defc0155090 --- /dev/null +++ b/changelog.d/14720.misc @@ -0,0 +1 @@ +Bump minimum PyYAML to 3.13. diff --git a/pyproject.toml b/pyproject.toml index 21bc11da88c0..3281441534bc 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -136,7 +136,7 @@ Twisted = {extras = ["tls"], version = ">=18.9.0"} treq = ">=15.1" # Twisted has required pyopenssl 16.0 since about Twisted 16.6. pyOpenSSL = ">=16.0.0" -PyYAML = ">=3.11" +PyYAML = ">=3.13" pyasn1 = ">=0.1.9" pyasn1-modules = ">=0.0.7" bcrypt = ">=3.1.7" From 7010a3d0151b88b3a9a7451201eaf9c5bbe48d64 Mon Sep 17 00:00:00 2001 From: Patrick Cloke Date: Wed, 21 Dec 2022 13:05:21 -0500 Subject: [PATCH 28/82] Switch to ruff instead of flake8. (#14633) ruff is a flake8-compatible Python linter written in Rust. It supports the flake8 plugins that we use and is significantly faster in testing. --- .flake8 | 18 ---- .github/workflows/tests.yml | 2 +- changelog.d/14633.misc | 1 + poetry.lock | 119 +++++++------------------- pyproject.toml | 46 +++++++++- scripts-dev/lint.sh | 5 +- stubs/frozendict.pyi | 2 + stubs/icu.pyi | 2 + stubs/sortedcontainers/sorteddict.pyi | 2 + stubs/sortedcontainers/sortedlist.pyi | 2 + stubs/sortedcontainers/sortedset.pyi | 2 + synapse/config/_base.pyi | 2 + 12 files changed, 87 insertions(+), 116 deletions(-) delete mode 100644 .flake8 create mode 100644 changelog.d/14633.misc diff --git a/.flake8 b/.flake8 deleted file mode 100644 index 4c6a4d5843e2..000000000000 --- a/.flake8 +++ /dev/null @@ -1,18 +0,0 @@ -# TODO: incorporate this into pyproject.toml if flake8 supports it in the future. -# See https://github.com/PyCQA/flake8/issues/234 -[flake8] -# see https://pycodestyle.readthedocs.io/en/latest/intro.html#error-codes -# for error codes. The ones we ignore are: -# W503: line break before binary operator -# W504: line break after binary operator -# E203: whitespace before ':' (which is contrary to pep8?) -# E731: do not assign a lambda expression, use a def -# E501: Line too long (black enforces this for us) -# -# flake8-bugbear runs extra checks. Its error codes are described at -# https://github.com/PyCQA/flake8-bugbear#list-of-warnings -# B019: Use of functools.lru_cache or functools.cache on methods can lead to memory leaks -# B023: Functions defined inside a loop must not use variables redefined in the loop -# B024: Abstract base class with no abstract method. - -ignore=W503,W504,E203,E731,E501,B019,B023,B024 diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index f07655d982fb..5a0c0a0d65c9 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -53,7 +53,7 @@ jobs: - run: scripts-dev/check_schema_delta.py --force-colors lint: - uses: "matrix-org/backend-meta/.github/workflows/python-poetry-ci.yml@v1" + uses: "matrix-org/backend-meta/.github/workflows/python-poetry-ci.yml@v2" with: typechecking-extras: "all" diff --git a/changelog.d/14633.misc b/changelog.d/14633.misc new file mode 100644 index 000000000000..def187b12b48 --- /dev/null +++ b/changelog.d/14633.misc @@ -0,0 +1 @@ +Use [ruff](https://github.com/charliermarsh/ruff/) instead of flake8. diff --git a/poetry.lock b/poetry.lock index 9a9a141a14b7..c83cad3e1a79 100644 --- a/poetry.lock +++ b/poetry.lock @@ -244,47 +244,6 @@ python-versions = ">=3.7" [package.extras] dev = ["Sphinx", "coverage", "flake8", "lxml", "memory-profiler", "mypy (==0.910)", "tox", "xmlschema (>=1.8.0)"] -[[package]] -name = "flake8" -version = "5.0.4" -description = "the modular source code checker: pep8 pyflakes and co" -category = "dev" -optional = false -python-versions = ">=3.6.1" - -[package.dependencies] -importlib-metadata = {version = ">=1.1.0,<4.3", markers = "python_version < \"3.8\""} -mccabe = ">=0.7.0,<0.8.0" -pycodestyle = ">=2.9.0,<2.10.0" -pyflakes = ">=2.5.0,<2.6.0" - -[[package]] -name = "flake8-bugbear" -version = "22.12.6" -description = "A plugin for flake8 finding likely bugs and design problems in your program. Contains warnings that don't belong in pyflakes and pycodestyle." -category = "dev" -optional = false -python-versions = ">=3.7" - -[package.dependencies] -attrs = ">=19.2.0" -flake8 = ">=3.0.0" - -[package.extras] -dev = ["coverage", "hypothesis", "hypothesmith (>=0.2)", "pre-commit", "tox"] - -[[package]] -name = "flake8-comprehensions" -version = "3.10.1" -description = "A flake8 plugin to help you write better list/set/dict comprehensions." -category = "dev" -optional = false -python-versions = ">=3.7" - -[package.dependencies] -flake8 = ">=3.0,<3.2.0 || >3.2.0" -importlib-metadata = {version = "*", markers = "python_version < \"3.8\""} - [[package]] name = "frozendict" version = "2.3.4" @@ -553,14 +512,6 @@ Twisted = ">=15.1.0" [package.extras] dev = ["black (==22.3.0)", "flake8 (==4.0.1)", "isort (==5.9.3)", "ldaptor", "matrix-synapse", "mypy (==0.910)", "tox", "types-setuptools"] -[[package]] -name = "mccabe" -version = "0.7.0" -description = "McCabe checker, plugin for flake8" -category = "dev" -optional = false -python-versions = ">=3.6" - [[package]] name = "msgpack" version = "1.0.4" @@ -770,14 +721,6 @@ python-versions = "*" [package.dependencies] pyasn1 = ">=0.4.6,<0.5.0" -[[package]] -name = "pycodestyle" -version = "2.9.1" -description = "Python style guide checker" -category = "dev" -optional = false -python-versions = ">=3.6" - [[package]] name = "pycparser" version = "2.21" @@ -801,14 +744,6 @@ typing-extensions = ">=4.1.0" dotenv = ["python-dotenv (>=0.10.4)"] email = ["email-validator (>=1.0.3)"] -[[package]] -name = "pyflakes" -version = "2.5.0" -description = "passive checker of Python programs" -category = "dev" -optional = false -python-versions = ">=3.6" - [[package]] name = "pygithub" version = "1.57" @@ -1044,6 +979,14 @@ typing-extensions = {version = ">=4.0.0,<5.0", markers = "python_version < \"3.9 [package.extras] jupyter = ["ipywidgets (>=7.5.1,<8.0.0)"] +[[package]] +name = "ruff" +version = "0.0.189" +description = "An extremely fast Python linter, written in Rust." +category = "dev" +optional = false +python-versions = ">=3.7" + [[package]] name = "secretstorage" version = "3.3.1" @@ -1635,7 +1578,7 @@ user-search = ["pyicu"] [metadata] lock-version = "1.1" python-versions = "^3.7.1" -content-hash = "f20007013f33bc35a01e412c48adc62a936030f3074e06286674c5ad7f44d300" +content-hash = "d20b6aea682a74e6a161080bb459e73160b8eb79526f5d17a525639ac3fe3e9e" [metadata.files] attrs = [ @@ -1827,18 +1770,6 @@ elementpath = [ {file = "elementpath-2.5.0-py3-none-any.whl", hash = "sha256:2a432775e37a19e4362443078130a7dbfc457d7d093cd421c03958d9034cc08b"}, {file = "elementpath-2.5.0.tar.gz", hash = "sha256:3a27aaf3399929fccda013899cb76d3ff111734abf4281e5f9d3721ba0b9ffa3"}, ] -flake8 = [ - {file = "flake8-5.0.4-py2.py3-none-any.whl", hash = "sha256:7a1cf6b73744f5806ab95e526f6f0d8c01c66d7bbe349562d22dfca20610b248"}, - {file = "flake8-5.0.4.tar.gz", hash = "sha256:6fbe320aad8d6b95cec8b8e47bc933004678dc63095be98528b7bdd2a9f510db"}, -] -flake8-bugbear = [ - {file = "flake8-bugbear-22.12.6.tar.gz", hash = "sha256:4cdb2c06e229971104443ae293e75e64c6107798229202fbe4f4091427a30ac0"}, - {file = "flake8_bugbear-22.12.6-py3-none-any.whl", hash = "sha256:b69a510634f8a9c298dfda2b18a8036455e6b19ecac4fe582e4d7a0abfa50a30"}, -] -flake8-comprehensions = [ - {file = "flake8-comprehensions-3.10.1.tar.gz", hash = "sha256:412052ac4a947f36b891143430fef4859705af11b2572fbb689f90d372cf26ab"}, - {file = "flake8_comprehensions-3.10.1-py3-none-any.whl", hash = "sha256:d763de3c74bc18a79c039a7ec732e0a1985b0c79309ceb51e56401ad0a2cd44e"}, -] frozendict = [ {file = "frozendict-2.3.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:4a3b32d47282ae0098b9239a6d53ec539da720258bd762d62191b46f2f87c5fc"}, {file = "frozendict-2.3.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:84c9887179a245a66a50f52afa08d4d92ae0f269839fab82285c70a0fa0dd782"}, @@ -2046,6 +1977,7 @@ lxml = [ {file = "lxml-4.9.2-cp35-cp35m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:ca989b91cf3a3ba28930a9fc1e9aeafc2a395448641df1f387a2d394638943b0"}, {file = "lxml-4.9.2-cp35-cp35m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:822068f85e12a6e292803e112ab876bc03ed1f03dddb80154c395f891ca6b31e"}, {file = "lxml-4.9.2-cp35-cp35m-win32.whl", hash = "sha256:be7292c55101e22f2a3d4d8913944cbea71eea90792bf914add27454a13905df"}, + {file = "lxml-4.9.2-cp35-cp35m-win_amd64.whl", hash = "sha256:998c7c41910666d2976928c38ea96a70d1aa43be6fe502f21a651e17483a43c5"}, {file = "lxml-4.9.2-cp36-cp36m-macosx_10_15_x86_64.whl", hash = "sha256:b26a29f0b7fc6f0897f043ca366142d2b609dc60756ee6e4e90b5f762c6adc53"}, {file = "lxml-4.9.2-cp36-cp36m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:ab323679b8b3030000f2be63e22cdeea5b47ee0abd2d6a1dc0c8103ddaa56cd7"}, {file = "lxml-4.9.2-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:689bb688a1db722485e4610a503e3e9210dcc20c520b45ac8f7533c837be76fe"}, @@ -2055,6 +1987,7 @@ lxml = [ {file = "lxml-4.9.2-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:58bfa3aa19ca4c0f28c5dde0ff56c520fbac6f0daf4fac66ed4c8d2fb7f22e74"}, {file = "lxml-4.9.2-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:bc718cd47b765e790eecb74d044cc8d37d58562f6c314ee9484df26276d36a38"}, {file = "lxml-4.9.2-cp36-cp36m-win32.whl", hash = "sha256:d5bf6545cd27aaa8a13033ce56354ed9e25ab0e4ac3b5392b763d8d04b08e0c5"}, + {file = "lxml-4.9.2-cp36-cp36m-win_amd64.whl", hash = "sha256:3ab9fa9d6dc2a7f29d7affdf3edebf6ece6fb28a6d80b14c3b2fb9d39b9322c3"}, {file = "lxml-4.9.2-cp37-cp37m-macosx_10_15_x86_64.whl", hash = "sha256:05ca3f6abf5cf78fe053da9b1166e062ade3fa5d4f92b4ed688127ea7d7b1d03"}, {file = "lxml-4.9.2-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:a5da296eb617d18e497bcf0a5c528f5d3b18dadb3619fbdadf4ed2356ef8d941"}, {file = "lxml-4.9.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:04876580c050a8c5341d706dd464ff04fd597095cc8c023252566a8826505726"}, @@ -2147,10 +2080,6 @@ matrix-synapse-ldap3 = [ {file = "matrix-synapse-ldap3-0.2.2.tar.gz", hash = "sha256:b388d95693486eef69adaefd0fd9e84463d52fe17b0214a00efcaa669b73cb74"}, {file = "matrix_synapse_ldap3-0.2.2-py3-none-any.whl", hash = "sha256:66ee4c85d7952c6c27fd04c09cdfdf4847b8e8b7d6a7ada6ba1100013bda060f"}, ] -mccabe = [ - {file = "mccabe-0.7.0-py2.py3-none-any.whl", hash = "sha256:6c2d30ab6be0e4a46919781807b4f0d834ebdd6c6e3dca0bda5a15f863427b6e"}, - {file = "mccabe-0.7.0.tar.gz", hash = "sha256:348e0240c33b60bbdf4e523192ef919f28cb2c3d7d5c7794f74009290f236325"}, -] msgpack = [ {file = "msgpack-1.0.4-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:4ab251d229d10498e9a2f3b1e68ef64cb393394ec477e3370c457f9430ce9250"}, {file = "msgpack-1.0.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:112b0f93202d7c0fef0b7810d465fde23c746a2d482e1e2de2aafd2ce1492c88"}, @@ -2370,10 +2299,6 @@ pyasn1-modules = [ {file = "pyasn1-modules-0.2.8.tar.gz", hash = "sha256:905f84c712230b2c592c19470d3ca8d552de726050d1d1716282a1f6146be65e"}, {file = "pyasn1_modules-0.2.8-py2.py3-none-any.whl", hash = "sha256:a50b808ffeb97cb3601dd25981f6b016cbb3d31fbf57a8b8a87428e6158d0c74"}, ] -pycodestyle = [ - {file = "pycodestyle-2.9.1-py2.py3-none-any.whl", hash = "sha256:d1735fc58b418fd7c5f658d28d943854f8a849b01a5d0a1e6f3f3fdd0166804b"}, - {file = "pycodestyle-2.9.1.tar.gz", hash = "sha256:2c9607871d58c76354b697b42f5d57e1ada7d261c261efac224b664affdc5785"}, -] pycparser = [ {file = "pycparser-2.21-py2.py3-none-any.whl", hash = "sha256:8ee45429555515e1f6b185e78100aea234072576aa43ab53aefcae078162fca9"}, {file = "pycparser-2.21.tar.gz", hash = "sha256:e644fdec12f7872f86c58ff790da456218b10f863970249516d60a5eaca77206"}, @@ -2416,10 +2341,6 @@ pydantic = [ {file = "pydantic-1.10.2-py3-none-any.whl", hash = "sha256:1b6ee725bd6e83ec78b1aa32c5b1fa67a3a65badddde3976bca5fe4568f27709"}, {file = "pydantic-1.10.2.tar.gz", hash = "sha256:91b8e218852ef6007c2b98cd861601c6a09f1aa32bbbb74fab5b1c33d4a1e410"}, ] -pyflakes = [ - {file = "pyflakes-2.5.0-py2.py3-none-any.whl", hash = "sha256:4579f67d887f804e67edb544428f264b7b24f435b263c4614f384135cea553d2"}, - {file = "pyflakes-2.5.0.tar.gz", hash = "sha256:491feb020dca48ccc562a8c0cbe8df07ee13078df59813b83959cbdada312ea3"}, -] pygithub = [ {file = "PyGithub-1.57-py3-none-any.whl", hash = "sha256:5822febeac2391f1306c55a99af2bc8f86c8bf82ded000030cd02c18f31b731f"}, {file = "PyGithub-1.57.tar.gz", hash = "sha256:c273f252b278fb81f1769505cc6921bdb6791e1cebd6ac850cc97dad13c31ff3"}, @@ -2560,6 +2481,24 @@ rich = [ {file = "rich-12.6.0-py3-none-any.whl", hash = "sha256:a4eb26484f2c82589bd9a17c73d32a010b1e29d89f1604cd9bf3a2097b81bb5e"}, {file = "rich-12.6.0.tar.gz", hash = "sha256:ba3a3775974105c221d31141f2c116f4fd65c5ceb0698657a11e9f295ec93fd0"}, ] +ruff = [ + {file = "ruff-0.0.189-py3-none-macosx_10_7_x86_64.whl", hash = "sha256:07c947b42d3c5efc6761214acdb6b71a49b833ad9fb9b320454244a6fe01f212"}, + {file = "ruff-0.0.189-py3-none-macosx_10_9_x86_64.macosx_11_0_arm64.macosx_10_9_universal2.whl", hash = "sha256:76e6161d021bde5738bf9d123ae445cb3a22fa60f14958ce64961d8af16141a0"}, + {file = "ruff-0.0.189-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c27f51e5b48cd483459cdd1c95a6bd989adcf7653ccc440ca437f4993fe4b812"}, + {file = "ruff-0.0.189-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:e89f488a16ce2b21d940fc6271ed161affec788955f7b41761a9693a92e994bb"}, + {file = "ruff-0.0.189-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fee593d8d470811c316ff2eb0124ac74668a3d637ab3fb237aa3fa8561fb89aa"}, + {file = "ruff-0.0.189-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:bc3a73683a5b3b4b7bf951bbd4aa7d79b993c8c2e608a68de120c342ebe510f2"}, + {file = "ruff-0.0.189-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e5d73877558651f48c86d958afe0f662b6c3639990c230a6b9d82ac6093484db"}, + {file = "ruff-0.0.189-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3d1e6e9813f59ba54e7cb6f28c1f2a9a756197f6e321bd68519afe57f8522fce"}, + {file = "ruff-0.0.189-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8d177090cf03004b14814b0aad530758f5186d391250afb737570edd55beabc6"}, + {file = "ruff-0.0.189-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:48de3253856a0a85f9b53a0ca1982946c7fd343c796cdc76ece0ae359d5b71b5"}, + {file = "ruff-0.0.189-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:e935bb5a213030de312ad00df477f38c78ac97af58b0e6a4ae5762705a5113da"}, + {file = "ruff-0.0.189-py3-none-musllinux_1_2_i686.whl", hash = "sha256:bdb8173d6efff96e0cc5fe38f5fc4daa0d28fb11553482b9989d372fdafc7708"}, + {file = "ruff-0.0.189-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:14486fd8632bc4c7f926137a9c6a8c45993ff6667ddb7a88192c369c3afd86e9"}, + {file = "ruff-0.0.189-py3-none-win32.whl", hash = "sha256:e281080e2ed04f01275b3df5baa0afe2802ab145349298e24700cdd09c0afddc"}, + {file = "ruff-0.0.189-py3-none-win_amd64.whl", hash = "sha256:c552ff0b0587a5e13f935131d2a19782c0baf8b59175cf3160a76545fbdbdd76"}, + {file = "ruff-0.0.189.tar.gz", hash = "sha256:90a3031461ed83686ff78f96e58d28cdee835110c51bdfa0968a2d5892610c71"}, +] secretstorage = [ {file = "SecretStorage-3.3.1-py3-none-any.whl", hash = "sha256:422d82c36172d88d6a0ed5afdec956514b189ddbfb72fefab0c8a1cee4eaf71f"}, {file = "SecretStorage-3.3.1.tar.gz", hash = "sha256:fd666c51a6bf200643495a04abb261f83229dcb6fd8472ec393df7ffc8b6f195"}, diff --git a/pyproject.toml b/pyproject.toml index 3281441534bc..37b9ab3a7702 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -40,6 +40,46 @@ target-version = ['py37', 'py38', 'py39', 'py310'] # https://black.readthedocs.io/en/stable/usage_and_configuration/file_collection_and_discovery.html#gitignore # Use `extend-exclude` if you want to exclude something in addition to this. +[tool.ruff] +line-length = 88 + +# See https://github.com/charliermarsh/ruff/#pycodestyle +# for error codes. The ones we ignore are: +# E731: do not assign a lambda expression, use a def +# E501: Line too long (black enforces this for us) +# +# See https://github.com/charliermarsh/ruff/#pyflakes +# F401: unused import +# F811: Redefinition of unused +# F821: Undefined name +# +# flake8-bugbear compatible checks. Its error codes are described at +# https://github.com/charliermarsh/ruff/#flake8-bugbear +# B019: Use of functools.lru_cache or functools.cache on methods can lead to memory leaks +# B023: Functions defined inside a loop must not use variables redefined in the loop +# B024: Abstract base class with no abstract method. +ignore = [ + "B019", + "B023", + "B024", + "E501", + "E731", + "F401", + "F811", + "F821", +] +select = [ + # pycodestyle checks. + "E", + "W", + # pyflakes checks. + "F", + # flake8-bugbear checks. + "B0", + # flake8-comprehensions checks. + "C4", +] + [tool.isort] line_length = 88 sections = ["FUTURE", "STDLIB", "THIRDPARTY", "TWISTED", "FIRSTPARTY", "TESTS", "LOCALFOLDER"] @@ -274,12 +314,10 @@ all = [ ] [tool.poetry.dev-dependencies] -## We pin black so that our tests don't start failing on new releases. +# We pin black so that our tests don't start failing on new releases. isort = ">=5.10.1" black = ">=22.3.0" -flake8-comprehensions = "*" -flake8-bugbear = ">=21.3.2" -flake8 = "*" +ruff = "0.0.189" # Typechecking mypy = "*" diff --git a/scripts-dev/lint.sh b/scripts-dev/lint.sh index bf900645b1f7..f6b81013c306 100755 --- a/scripts-dev/lint.sh +++ b/scripts-dev/lint.sh @@ -1,9 +1,8 @@ #!/usr/bin/env bash # # Runs linting scripts over the local Synapse checkout -# isort - sorts import statements # black - opinionated code formatter -# flake8 - lints and finds mistakes +# ruff - lints and finds mistakes set -e @@ -105,6 +104,6 @@ set -x isort "${files[@]}" python3 -m black "${files[@]}" ./scripts-dev/config-lint.sh -flake8 "${files[@]}" +ruff "${files[@]}" ./scripts-dev/check_pydantic_models.py lint mypy diff --git a/stubs/frozendict.pyi b/stubs/frozendict.pyi index 24c6f3af77b1..196dee4461ea 100644 --- a/stubs/frozendict.pyi +++ b/stubs/frozendict.pyi @@ -14,6 +14,8 @@ # Stub for frozendict. +from __future__ import annotations + from typing import Any, Hashable, Iterable, Iterator, Mapping, Tuple, TypeVar, overload _KT = TypeVar("_KT", bound=Hashable) # Key type. diff --git a/stubs/icu.pyi b/stubs/icu.pyi index efeda7938a73..7736df8a9224 100644 --- a/stubs/icu.pyi +++ b/stubs/icu.pyi @@ -14,6 +14,8 @@ # Stub for PyICU. +from __future__ import annotations + class Locale: @staticmethod def getDefault() -> Locale: ... diff --git a/stubs/sortedcontainers/sorteddict.pyi b/stubs/sortedcontainers/sorteddict.pyi index 7c399ab38d5e..81f581b034b8 100644 --- a/stubs/sortedcontainers/sorteddict.pyi +++ b/stubs/sortedcontainers/sorteddict.pyi @@ -2,6 +2,8 @@ # https://github.com/grantjenks/python-sortedcontainers/blob/eea42df1f7bad2792e8da77335ff888f04b9e5ae/sortedcontainers/sorteddict.pyi # (from https://github.com/grantjenks/python-sortedcontainers/pull/107) +from __future__ import annotations + from typing import ( Any, Callable, diff --git a/stubs/sortedcontainers/sortedlist.pyi b/stubs/sortedcontainers/sortedlist.pyi index 403897e3919e..cd4c969849b1 100644 --- a/stubs/sortedcontainers/sortedlist.pyi +++ b/stubs/sortedcontainers/sortedlist.pyi @@ -2,6 +2,8 @@ # https://github.com/grantjenks/python-sortedcontainers/blob/a419ffbd2b1c935b09f11f0971696e537fd0c510/sortedcontainers/sortedlist.pyi # (from https://github.com/grantjenks/python-sortedcontainers/pull/107) +from __future__ import annotations + from typing import ( Any, Callable, diff --git a/stubs/sortedcontainers/sortedset.pyi b/stubs/sortedcontainers/sortedset.pyi index 43c860f4221e..d761c438f792 100644 --- a/stubs/sortedcontainers/sortedset.pyi +++ b/stubs/sortedcontainers/sortedset.pyi @@ -2,6 +2,8 @@ # https://github.com/grantjenks/python-sortedcontainers/blob/d0a225d7fd0fb4c54532b8798af3cbeebf97e2d5/sortedcontainers/sortedset.pyi # (from https://github.com/grantjenks/python-sortedcontainers/pull/107) +from __future__ import annotations + from typing import ( AbstractSet, Any, diff --git a/synapse/config/_base.pyi b/synapse/config/_base.pyi index 01ea2b4dab56..bd265de53613 100644 --- a/synapse/config/_base.pyi +++ b/synapse/config/_base.pyi @@ -1,3 +1,5 @@ +from __future__ import annotations + import argparse from typing import ( Any, From 14abf22dd696129f69cd1a14b2044a4ca6cb1972 Mon Sep 17 00:00:00 2001 From: Patrick Cloke Date: Wed, 21 Dec 2022 13:08:20 -0500 Subject: [PATCH 29/82] Update docs about ruff vs. flake8. --- docs/code_style.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/docs/code_style.md b/docs/code_style.md index d65fda62d140..3aa7d0d741ba 100644 --- a/docs/code_style.md +++ b/docs/code_style.md @@ -10,7 +10,7 @@ The necessary tools are: - [black](https://black.readthedocs.io/en/stable/), a source code formatter; - [isort](https://pycqa.github.io/isort/), which organises each file's imports; -- [flake8](https://flake8.pycqa.org/en/latest/), which can spot common errors; and +- [ruff](https://github.com/charliermarsh/ruff), which can spot common errors; and - [mypy](https://mypy.readthedocs.io/en/stable/), a type checker. Install them with: @@ -28,7 +28,7 @@ scripts-dev/lint.sh It's worth noting that modern IDEs and text editors can run these tools automatically on save. It may be worth looking into whether this functionality is supported in your editor for a more convenient -development workflow. It is not, however, recommended to run `flake8` or `mypy` +development workflow. It is not, however, recommended to run `mypy` on save as they take a while and can be very resource intensive. ## General rules From 5c9be9c76021ac54f425f10e8f935532d3197de5 Mon Sep 17 00:00:00 2001 From: Jeyachandran Rathnam Date: Thu, 22 Dec 2022 13:26:37 -0500 Subject: [PATCH 30/82] Check sqlite database file exists before porting. (#14692) To avoid creating an empty SQLite file if the given path is incorrect. --- changelog.d/14692.misc | 1 + synapse/_scripts/synapse_port_db.py | 2 +- 2 files changed, 2 insertions(+), 1 deletion(-) create mode 100644 changelog.d/14692.misc diff --git a/changelog.d/14692.misc b/changelog.d/14692.misc new file mode 100644 index 000000000000..0edac253b7ff --- /dev/null +++ b/changelog.d/14692.misc @@ -0,0 +1 @@ +Check that the SQLite database file exists before porting to PostgreSQL. \ No newline at end of file diff --git a/synapse/_scripts/synapse_port_db.py b/synapse/_scripts/synapse_port_db.py index d850e54e1751..c463b60b2620 100755 --- a/synapse/_scripts/synapse_port_db.py +++ b/synapse/_scripts/synapse_port_db.py @@ -1307,7 +1307,7 @@ def main() -> None: sqlite_config = { "name": "sqlite3", "args": { - "database": args.sqlite_database, + "database": "file:{}?mode=rw".format(args.sqlite_database), "cp_min": 1, "cp_max": 1, "check_same_thread": False, From a52822d39c866b4d5e6d2a0176f29ae49bf3f8e9 Mon Sep 17 00:00:00 2001 From: Richard van der Hoff <1389908+richvdh@users.noreply.github.com> Date: Fri, 23 Dec 2022 14:04:50 +0000 Subject: [PATCH 31/82] Log to-device msgids when we return them over /sync (#14724) --- changelog.d/14724.misc | 1 + synapse/handlers/sync.py | 20 +++++++++++++------- 2 files changed, 14 insertions(+), 7 deletions(-) create mode 100644 changelog.d/14724.misc diff --git a/changelog.d/14724.misc b/changelog.d/14724.misc new file mode 100644 index 000000000000..270e5ed18802 --- /dev/null +++ b/changelog.d/14724.misc @@ -0,0 +1 @@ +If debug logging is enabled, log the `msgid`s of any to-device messages that are returned over `/sync`. diff --git a/synapse/handlers/sync.py b/synapse/handlers/sync.py index 7d6a653747b6..4fa480262b03 100644 --- a/synapse/handlers/sync.py +++ b/synapse/handlers/sync.py @@ -37,6 +37,7 @@ from synapse.api.room_versions import KNOWN_ROOM_VERSIONS from synapse.events import EventBase from synapse.handlers.relations import BundledAggregations +from synapse.logging import issue9533_logger from synapse.logging.context import current_context from synapse.logging.opentracing import ( SynapseTags, @@ -1623,13 +1624,18 @@ async def _generate_sync_entry_for_to_device( } ) - logger.debug( - "Returning %d to-device messages between %d and %d (current token: %d)", - len(messages), - since_stream_id, - stream_id, - now_token.to_device_key, - ) + if messages and issue9533_logger.isEnabledFor(logging.DEBUG): + issue9533_logger.debug( + "Returning to-device messages with stream_ids (%d, %d]; now: %d;" + " msgids: %s", + since_stream_id, + stream_id, + now_token.to_device_key, + [ + message["content"].get(EventContentFields.TO_DEVICE_MSGID) + for message in messages + ], + ) sync_result_builder.now_token = now_token.copy_and_replace( StreamKeyType.TO_DEVICE, stream_id ) From 2fb4071c1f74387976666c32823d844f2977c5dc Mon Sep 17 00:00:00 2001 From: Dirk Klimpel <5740567+dklimpel@users.noreply.github.com> Date: Wed, 28 Dec 2022 13:17:51 +0100 Subject: [PATCH 32/82] Move `email` to Server section in config file documentation (#14730) * Move `email` to server in config file documentation * changelog --- changelog.d/14730.doc | 1 + .../configuration/config_documentation.md | 217 +++++++++--------- 2 files changed, 110 insertions(+), 108 deletions(-) create mode 100644 changelog.d/14730.doc diff --git a/changelog.d/14730.doc b/changelog.d/14730.doc new file mode 100644 index 000000000000..6015c7d2833e --- /dev/null +++ b/changelog.d/14730.doc @@ -0,0 +1 @@ +Move `email` to Server section in config file documentation. \ No newline at end of file diff --git a/docs/usage/configuration/config_documentation.md b/docs/usage/configuration/config_documentation.md index 6b8768f45dbf..67e0acc9104f 100644 --- a/docs/usage/configuration/config_documentation.md +++ b/docs/usage/configuration/config_documentation.md @@ -569,6 +569,115 @@ Example configuration: ```yaml delete_stale_devices_after: 1y ``` +--- +### `email` + +Configuration for sending emails from Synapse. + +Server admins can configure custom templates for email content. See +[here](../../templates.md) for more information. + +This setting has the following sub-options: +* `smtp_host`: The hostname of the outgoing SMTP server to use. Defaults to 'localhost'. +* `smtp_port`: The port on the mail server for outgoing SMTP. Defaults to 465 if `force_tls` is true, else 25. + + _Changed in Synapse 1.64.0:_ the default port is now aware of `force_tls`. +* `smtp_user` and `smtp_pass`: Username/password for authentication to the SMTP server. By default, no + authentication is attempted. +* `force_tls`: By default, Synapse connects over plain text and then optionally upgrades + to TLS via STARTTLS. If this option is set to true, TLS is used from the start (Implicit TLS), + and the option `require_transport_security` is ignored. + It is recommended to enable this if supported by your mail server. + + _New in Synapse 1.64.0._ +* `require_transport_security`: Set to true to require TLS transport security for SMTP. + By default, Synapse will connect over plain text, and will then switch to + TLS via STARTTLS *if the SMTP server supports it*. If this option is set, + Synapse will refuse to connect unless the server supports STARTTLS. +* `enable_tls`: By default, if the server supports TLS, it will be used, and the server + must present a certificate that is valid for 'smtp_host'. If this option + is set to false, TLS will not be used. +* `notif_from`: defines the "From" address to use when sending emails. + It must be set if email sending is enabled. The placeholder '%(app)s' will be replaced by the application name, + which is normally set in `app_name`, but may be overridden by the + Matrix client application. Note that the placeholder must be written '%(app)s', including the + trailing 's'. +* `app_name`: `app_name` defines the default value for '%(app)s' in `notif_from` and email + subjects. It defaults to 'Matrix'. +* `enable_notifs`: Set to true to enable sending emails for messages that the user + has missed. Disabled by default. +* `notif_for_new_users`: Set to false to disable automatic subscription to email + notifications for new users. Enabled by default. +* `client_base_url`: Custom URL for client links within the email notifications. By default + links will be based on "https://matrix.to". (This setting used to be called `riot_base_url`; + the old name is still supported for backwards-compatibility but is now deprecated.) +* `validation_token_lifetime`: Configures the time that a validation email will expire after sending. + Defaults to 1h. +* `invite_client_location`: The web client location to direct users to during an invite. This is passed + to the identity server as the `org.matrix.web_client_location` key. Defaults + to unset, giving no guidance to the identity server. +* `subjects`: Subjects to use when sending emails from Synapse. The placeholder '%(app)s' will + be replaced with the value of the `app_name` setting, or by a value dictated by the Matrix client application. + In addition, each subject can use the following placeholders: '%(person)s', which will be replaced by the displayname + of the user(s) that sent the message(s), e.g. "Alice and Bob", and '%(room)s', which will be replaced by the name of the room the + message(s) have been sent to, e.g. "My super room". In addition, emails related to account administration will + can use the '%(server_name)s' placeholder, which will be replaced by the value of the + `server_name` setting in your Synapse configuration. + + Here is a list of subjects for notification emails that can be set: + * `message_from_person_in_room`: Subject to use to notify about one message from one or more user(s) in a + room which has a name. Defaults to "[%(app)s] You have a message on %(app)s from %(person)s in the %(room)s room..." + * `message_from_person`: Subject to use to notify about one message from one or more user(s) in a + room which doesn't have a name. Defaults to "[%(app)s] You have a message on %(app)s from %(person)s..." + * `messages_from_person`: Subject to use to notify about multiple messages from one or more users in + a room which doesn't have a name. Defaults to "[%(app)s] You have messages on %(app)s from %(person)s..." + * `messages_in_room`: Subject to use to notify about multiple messages in a room which has a + name. Defaults to "[%(app)s] You have messages on %(app)s in the %(room)s room..." + * `messages_in_room_and_others`: Subject to use to notify about multiple messages in multiple rooms. + Defaults to "[%(app)s] You have messages on %(app)s in the %(room)s room and others..." + * `messages_from_person_and_others`: Subject to use to notify about multiple messages from multiple persons in + multiple rooms. This is similar to the setting above except it's used when + the room in which the notification was triggered has no name. Defaults to + "[%(app)s] You have messages on %(app)s from %(person)s and others..." + * `invite_from_person_to_room`: Subject to use to notify about an invite to a room which has a name. + Defaults to "[%(app)s] %(person)s has invited you to join the %(room)s room on %(app)s..." + * `invite_from_person`: Subject to use to notify about an invite to a room which doesn't have a + name. Defaults to "[%(app)s] %(person)s has invited you to chat on %(app)s..." + * `password_reset`: Subject to use when sending a password reset email. Defaults to "[%(server_name)s] Password reset" + * `email_validation`: Subject to use when sending a verification email to assert an address's + ownership. Defaults to "[%(server_name)s] Validate your email" + +Example configuration: + +```yaml +email: + smtp_host: mail.server + smtp_port: 587 + smtp_user: "exampleusername" + smtp_pass: "examplepassword" + force_tls: true + require_transport_security: true + enable_tls: false + notif_from: "Your Friendly %(app)s homeserver " + app_name: my_branded_matrix_server + enable_notifs: true + notif_for_new_users: false + client_base_url: "http://localhost/riot" + validation_token_lifetime: 15m + invite_client_location: https://app.element.io + + subjects: + message_from_person_in_room: "[%(app)s] You have a message on %(app)s from %(person)s in the %(room)s room..." + message_from_person: "[%(app)s] You have a message on %(app)s from %(person)s..." + messages_from_person: "[%(app)s] You have messages on %(app)s from %(person)s..." + messages_in_room: "[%(app)s] You have messages on %(app)s in the %(room)s room..." + messages_in_room_and_others: "[%(app)s] You have messages on %(app)s in the %(room)s room and others..." + messages_from_person_and_others: "[%(app)s] You have messages on %(app)s from %(person)s and others..." + invite_from_person_to_room: "[%(app)s] %(person)s has invited you to join the %(room)s room on %(app)s..." + invite_from_person: "[%(app)s] %(person)s has invited you to chat on %(app)s..." + password_reset: "[%(server_name)s] Password reset" + email_validation: "[%(server_name)s] Validate your email" +``` ## Homeserver blocking Useful options for Synapse admins. @@ -3259,114 +3368,6 @@ ui_auth: session_timeout: "15s" ``` --- -### `email` - -Configuration for sending emails from Synapse. - -Server admins can configure custom templates for email content. See -[here](../../templates.md) for more information. - -This setting has the following sub-options: -* `smtp_host`: The hostname of the outgoing SMTP server to use. Defaults to 'localhost'. -* `smtp_port`: The port on the mail server for outgoing SMTP. Defaults to 465 if `force_tls` is true, else 25. - - _Changed in Synapse 1.64.0:_ the default port is now aware of `force_tls`. -* `smtp_user` and `smtp_pass`: Username/password for authentication to the SMTP server. By default, no - authentication is attempted. -* `force_tls`: By default, Synapse connects over plain text and then optionally upgrades - to TLS via STARTTLS. If this option is set to true, TLS is used from the start (Implicit TLS), - and the option `require_transport_security` is ignored. - It is recommended to enable this if supported by your mail server. - - _New in Synapse 1.64.0._ -* `require_transport_security`: Set to true to require TLS transport security for SMTP. - By default, Synapse will connect over plain text, and will then switch to - TLS via STARTTLS *if the SMTP server supports it*. If this option is set, - Synapse will refuse to connect unless the server supports STARTTLS. -* `enable_tls`: By default, if the server supports TLS, it will be used, and the server - must present a certificate that is valid for 'smtp_host'. If this option - is set to false, TLS will not be used. -* `notif_from`: defines the "From" address to use when sending emails. - It must be set if email sending is enabled. The placeholder '%(app)s' will be replaced by the application name, - which is normally set in `app_name`, but may be overridden by the - Matrix client application. Note that the placeholder must be written '%(app)s', including the - trailing 's'. -* `app_name`: `app_name` defines the default value for '%(app)s' in `notif_from` and email - subjects. It defaults to 'Matrix'. -* `enable_notifs`: Set to true to enable sending emails for messages that the user - has missed. Disabled by default. -* `notif_for_new_users`: Set to false to disable automatic subscription to email - notifications for new users. Enabled by default. -* `client_base_url`: Custom URL for client links within the email notifications. By default - links will be based on "https://matrix.to". (This setting used to be called `riot_base_url`; - the old name is still supported for backwards-compatibility but is now deprecated.) -* `validation_token_lifetime`: Configures the time that a validation email will expire after sending. - Defaults to 1h. -* `invite_client_location`: The web client location to direct users to during an invite. This is passed - to the identity server as the `org.matrix.web_client_location` key. Defaults - to unset, giving no guidance to the identity server. -* `subjects`: Subjects to use when sending emails from Synapse. The placeholder '%(app)s' will - be replaced with the value of the `app_name` setting, or by a value dictated by the Matrix client application. - In addition, each subject can use the following placeholders: '%(person)s', which will be replaced by the displayname - of the user(s) that sent the message(s), e.g. "Alice and Bob", and '%(room)s', which will be replaced by the name of the room the - message(s) have been sent to, e.g. "My super room". In addition, emails related to account administration will - can use the '%(server_name)s' placeholder, which will be replaced by the value of the - `server_name` setting in your Synapse configuration. - - Here is a list of subjects for notification emails that can be set: - * `message_from_person_in_room`: Subject to use to notify about one message from one or more user(s) in a - room which has a name. Defaults to "[%(app)s] You have a message on %(app)s from %(person)s in the %(room)s room..." - * `message_from_person`: Subject to use to notify about one message from one or more user(s) in a - room which doesn't have a name. Defaults to "[%(app)s] You have a message on %(app)s from %(person)s..." - * `messages_from_person`: Subject to use to notify about multiple messages from one or more users in - a room which doesn't have a name. Defaults to "[%(app)s] You have messages on %(app)s from %(person)s..." - * `messages_in_room`: Subject to use to notify about multiple messages in a room which has a - name. Defaults to "[%(app)s] You have messages on %(app)s in the %(room)s room..." - * `messages_in_room_and_others`: Subject to use to notify about multiple messages in multiple rooms. - Defaults to "[%(app)s] You have messages on %(app)s in the %(room)s room and others..." - * `messages_from_person_and_others`: Subject to use to notify about multiple messages from multiple persons in - multiple rooms. This is similar to the setting above except it's used when - the room in which the notification was triggered has no name. Defaults to - "[%(app)s] You have messages on %(app)s from %(person)s and others..." - * `invite_from_person_to_room`: Subject to use to notify about an invite to a room which has a name. - Defaults to "[%(app)s] %(person)s has invited you to join the %(room)s room on %(app)s..." - * `invite_from_person`: Subject to use to notify about an invite to a room which doesn't have a - name. Defaults to "[%(app)s] %(person)s has invited you to chat on %(app)s..." - * `password_reset`: Subject to use when sending a password reset email. Defaults to "[%(server_name)s] Password reset" - * `email_validation`: Subject to use when sending a verification email to assert an address's - ownership. Defaults to "[%(server_name)s] Validate your email" - -Example configuration: -```yaml -email: - smtp_host: mail.server - smtp_port: 587 - smtp_user: "exampleusername" - smtp_pass: "examplepassword" - force_tls: true - require_transport_security: true - enable_tls: false - notif_from: "Your Friendly %(app)s homeserver " - app_name: my_branded_matrix_server - enable_notifs: true - notif_for_new_users: false - client_base_url: "http://localhost/riot" - validation_token_lifetime: 15m - invite_client_location: https://app.element.io - - subjects: - message_from_person_in_room: "[%(app)s] You have a message on %(app)s from %(person)s in the %(room)s room..." - message_from_person: "[%(app)s] You have a message on %(app)s from %(person)s..." - messages_from_person: "[%(app)s] You have messages on %(app)s from %(person)s..." - messages_in_room: "[%(app)s] You have messages on %(app)s in the %(room)s room..." - messages_in_room_and_others: "[%(app)s] You have messages on %(app)s in the %(room)s room and others..." - messages_from_person_and_others: "[%(app)s] You have messages on %(app)s from %(person)s and others..." - invite_from_person_to_room: "[%(app)s] %(person)s has invited you to join the %(room)s room on %(app)s..." - invite_from_person: "[%(app)s] %(person)s has invited you to chat on %(app)s..." - password_reset: "[%(server_name)s] Password reset" - email_validation: "[%(server_name)s] Validate your email" -``` ---- ## Push Configuration settings related to push notifications From a4ca770655a6b067468de3d507292ec133fdc5ca Mon Sep 17 00:00:00 2001 From: Patrick Cloke Date: Wed, 28 Dec 2022 08:29:35 -0500 Subject: [PATCH 33/82] Add missing type hints to tests. (#14687) Adds type hints to tests.metrics and tests.crypto. --- changelog.d/14687.misc | 1 + mypy.ini | 6 +- tests/crypto/test_event_signing.py | 6 +- tests/crypto/test_keyring.py | 122 +++++++++++++++++------------ tests/metrics/test_metrics.py | 24 ++++-- 5 files changed, 95 insertions(+), 64 deletions(-) create mode 100644 changelog.d/14687.misc diff --git a/changelog.d/14687.misc b/changelog.d/14687.misc new file mode 100644 index 000000000000..d44571b73149 --- /dev/null +++ b/changelog.d/14687.misc @@ -0,0 +1 @@ +Add missing type hints. diff --git a/mypy.ini b/mypy.ini index 80fbcdfeabf0..013fbbdfc02a 100644 --- a/mypy.ini +++ b/mypy.ini @@ -36,7 +36,6 @@ exclude = (?x) |tests/api/test_ratelimiting.py |tests/app/test_openid_listener.py |tests/appservice/test_scheduler.py - |tests/crypto/test_keyring.py |tests/events/test_presence_router.py |tests/events/test_utils.py |tests/federation/test_federation_catch_up.py @@ -90,13 +89,16 @@ disallow_untyped_defs = False [mypy-tests.config.*] disallow_untyped_defs = True +[mypy-tests.crypto.*] +disallow_untyped_defs = True + [mypy-tests.federation.transport.test_client] disallow_untyped_defs = True [mypy-tests.handlers.*] disallow_untyped_defs = True -[mypy-tests.metrics.test_background_process_metrics] +[mypy-tests.metrics.*] disallow_untyped_defs = True [mypy-tests.push.test_bulk_push_rule_evaluator] diff --git a/tests/crypto/test_event_signing.py b/tests/crypto/test_event_signing.py index 8fa710c9dc35..2b0972eee8e1 100644 --- a/tests/crypto/test_event_signing.py +++ b/tests/crypto/test_event_signing.py @@ -33,12 +33,12 @@ class EventSigningTestCase(unittest.TestCase): - def setUp(self): + def setUp(self) -> None: self.signing_key: SigningKey = decode_signing_key_base64( KEY_ALG, KEY_VER, SIGNING_KEY_SEED ) - def test_sign_minimal(self): + def test_sign_minimal(self) -> None: event_dict = { "event_id": "$0:domain", "origin": "domain", @@ -69,7 +69,7 @@ def test_sign_minimal(self): "aIbygsSdLOFzvdDjww8zUVKCmI02eP9xtyJxc/cLiBA", ) - def test_sign_message(self): + def test_sign_message(self) -> None: event_dict = { "content": {"body": "Here is the message content"}, "event_id": "$0:domain", diff --git a/tests/crypto/test_keyring.py b/tests/crypto/test_keyring.py index f7c309cad008..0e8af2da54b5 100644 --- a/tests/crypto/test_keyring.py +++ b/tests/crypto/test_keyring.py @@ -12,7 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. import time -from typing import Dict, List +from typing import Any, Dict, List, Optional, cast from unittest.mock import Mock import attr @@ -20,10 +20,11 @@ import signedjson.key import signedjson.sign from signedjson.key import encode_verify_key_base64, get_verify_key -from signedjson.types import SigningKey +from signedjson.types import SigningKey, VerifyKey from twisted.internet import defer from twisted.internet.defer import Deferred, ensureDeferred +from twisted.test.proto_helpers import MemoryReactor from synapse.api.errors import SynapseError from synapse.crypto import keyring @@ -33,11 +34,15 @@ StoreKeyFetcher, ) from synapse.logging.context import ( + ContextRequest, LoggingContext, current_context, make_deferred_yieldable, ) +from synapse.server import HomeServer from synapse.storage.keys import FetchKeyResult +from synapse.types import JsonDict +from synapse.util import Clock from tests import unittest from tests.test_utils import make_awaitable @@ -45,15 +50,15 @@ class MockPerspectiveServer: - def __init__(self): + def __init__(self) -> None: self.server_name = "mock_server" - self.key = signedjson.key.generate_signing_key(0) + self.key = signedjson.key.generate_signing_key("0") - def get_verify_keys(self): + def get_verify_keys(self) -> Dict[str, str]: vk = signedjson.key.get_verify_key(self.key) return {"%s:%s" % (vk.alg, vk.version): encode_verify_key_base64(vk)} - def get_signed_key(self, server_name, verify_key): + def get_signed_key(self, server_name: str, verify_key: VerifyKey) -> JsonDict: key_id = "%s:%s" % (verify_key.alg, verify_key.version) res = { "server_name": server_name, @@ -64,34 +69,36 @@ def get_signed_key(self, server_name, verify_key): self.sign_response(res) return res - def sign_response(self, res): + def sign_response(self, res: JsonDict) -> None: signedjson.sign.sign_json(res, self.server_name, self.key) -@attr.s(slots=True) +@attr.s(slots=True, auto_attribs=True) class FakeRequest: - id = attr.ib() + id: str @logcontext_clean class KeyringTestCase(unittest.HomeserverTestCase): - def check_context(self, val, expected): + def check_context( + self, val: ContextRequest, expected: Optional[ContextRequest] + ) -> ContextRequest: self.assertEqual(getattr(current_context(), "request", None), expected) return val - def test_verify_json_objects_for_server_awaits_previous_requests(self): + def test_verify_json_objects_for_server_awaits_previous_requests(self) -> None: mock_fetcher = Mock() mock_fetcher.get_keys = Mock() kr = keyring.Keyring(self.hs, key_fetchers=(mock_fetcher,)) # a signed object that we are going to try to validate - key1 = signedjson.key.generate_signing_key(1) - json1 = {} + key1 = signedjson.key.generate_signing_key("1") + json1: JsonDict = {} signedjson.sign.sign_json(json1, "server10", key1) # start off a first set of lookups. We make the mock fetcher block until this # deferred completes. - first_lookup_deferred = Deferred() + first_lookup_deferred: "Deferred[None]" = Deferred() async def first_lookup_fetch( server_name: str, key_ids: List[str], minimum_valid_until_ts: int @@ -106,8 +113,10 @@ async def first_lookup_fetch( mock_fetcher.get_keys.side_effect = first_lookup_fetch - async def first_lookup(): - with LoggingContext("context_11", request=FakeRequest("context_11")): + async def first_lookup() -> None: + with LoggingContext( + "context_11", request=cast(ContextRequest, FakeRequest("context_11")) + ): res_deferreds = kr.verify_json_objects_for_server( [("server10", json1, 0), ("server11", {}, 0)] ) @@ -144,8 +153,10 @@ async def second_lookup_fetch( mock_fetcher.get_keys.side_effect = second_lookup_fetch second_lookup_state = [0] - async def second_lookup(): - with LoggingContext("context_12", request=FakeRequest("context_12")): + async def second_lookup() -> None: + with LoggingContext( + "context_12", request=cast(ContextRequest, FakeRequest("context_12")) + ): res_deferreds_2 = kr.verify_json_objects_for_server( [ ( @@ -175,10 +186,10 @@ async def second_lookup(): self.get_success(d0) self.get_success(d2) - def test_verify_json_for_server(self): + def test_verify_json_for_server(self) -> None: kr = keyring.Keyring(self.hs) - key1 = signedjson.key.generate_signing_key(1) + key1 = signedjson.key.generate_signing_key("1") r = self.hs.get_datastores().main.store_server_verify_keys( "server9", time.time() * 1000, @@ -186,7 +197,7 @@ def test_verify_json_for_server(self): ) self.get_success(r) - json1 = {} + json1: JsonDict = {} signedjson.sign.sign_json(json1, "server9", key1) # should fail immediately on an unsigned object @@ -198,12 +209,12 @@ def test_verify_json_for_server(self): # self.assertFalse(d.called) self.get_success(d) - def test_verify_for_local_server(self): + def test_verify_for_local_server(self) -> None: """Ensure that locally signed JSON can be verified without fetching keys over federation """ kr = keyring.Keyring(self.hs) - json1 = {} + json1: JsonDict = {} signedjson.sign.sign_json(json1, self.hs.hostname, self.hs.signing_key) # Test that verify_json_for_server succeeds on a object signed by ourselves @@ -216,22 +227,24 @@ def test_verify_for_local_server(self): { "old_signing_keys": { f"{OLD_KEY.alg}:{OLD_KEY.version}": { - "key": encode_verify_key_base64(OLD_KEY.verify_key), + "key": encode_verify_key_base64( + signedjson.key.get_verify_key(OLD_KEY) + ), "expired_ts": 1000, } } } ) - def test_verify_for_local_server_old_key(self): + def test_verify_for_local_server_old_key(self) -> None: """Can also use keys in old_signing_keys for verification""" - json1 = {} + json1: JsonDict = {} signedjson.sign.sign_json(json1, self.hs.hostname, self.OLD_KEY) kr = keyring.Keyring(self.hs) d = kr.verify_json_for_server(self.hs.hostname, json1, 0) self.get_success(d) - def test_verify_for_local_server_unknown_key(self): + def test_verify_for_local_server_unknown_key(self) -> None: """Local keys that we no longer have should be fetched via the fetcher""" # the key we'll sign things with (nb, not known to the Keyring) @@ -253,14 +266,14 @@ async def get_keys( ) # sign the json - json1 = {} + json1: JsonDict = {} signedjson.sign.sign_json(json1, self.hs.hostname, key2) # ... and check we can verify it. d = kr.verify_json_for_server(self.hs.hostname, json1, 0) self.get_success(d) - def test_verify_json_for_server_with_null_valid_until_ms(self): + def test_verify_json_for_server_with_null_valid_until_ms(self) -> None: """Tests that we correctly handle key requests for keys we've stored with a null `ts_valid_until_ms` """ @@ -271,15 +284,18 @@ def test_verify_json_for_server_with_null_valid_until_ms(self): self.hs, key_fetchers=(StoreKeyFetcher(self.hs), mock_fetcher) ) - key1 = signedjson.key.generate_signing_key(1) + key1 = signedjson.key.generate_signing_key("1") r = self.hs.get_datastores().main.store_server_verify_keys( "server9", time.time() * 1000, - [("server9", get_key_id(key1), FetchKeyResult(get_verify_key(key1), None))], + # None is not a valid value in FetchKeyResult, but we're abusing this + # API to insert null values into the database. The nulls get converted + # to 0 when fetched in KeyStore.get_server_verify_keys. + [("server9", get_key_id(key1), FetchKeyResult(get_verify_key(key1), None))], # type: ignore[arg-type] ) self.get_success(r) - json1 = {} + json1: JsonDict = {} signedjson.sign.sign_json(json1, "server9", key1) # should fail immediately on an unsigned object @@ -304,9 +320,9 @@ def test_verify_json_for_server_with_null_valid_until_ms(self): ) self.get_success(d) - def test_verify_json_dedupes_key_requests(self): + def test_verify_json_dedupes_key_requests(self) -> None: """Two requests for the same key should be deduped.""" - key1 = signedjson.key.generate_signing_key(1) + key1 = signedjson.key.generate_signing_key("1") async def get_keys( server_name: str, key_ids: List[str], minimum_valid_until_ts: int @@ -322,7 +338,7 @@ async def get_keys( mock_fetcher.get_keys = Mock(side_effect=get_keys) kr = keyring.Keyring(self.hs, key_fetchers=(mock_fetcher,)) - json1 = {} + json1: JsonDict = {} signedjson.sign.sign_json(json1, "server1", key1) # the first request should succeed; the second should fail because the key @@ -346,9 +362,9 @@ async def get_keys( # there should have been a single call to the fetcher mock_fetcher.get_keys.assert_called_once() - def test_verify_json_falls_back_to_other_fetchers(self): + def test_verify_json_falls_back_to_other_fetchers(self) -> None: """If the first fetcher cannot provide a recent enough key, we fall back""" - key1 = signedjson.key.generate_signing_key(1) + key1 = signedjson.key.generate_signing_key("1") async def get_keys1( server_name: str, key_ids: List[str], minimum_valid_until_ts: int @@ -372,7 +388,7 @@ async def get_keys2( mock_fetcher2.get_keys = Mock(side_effect=get_keys2) kr = keyring.Keyring(self.hs, key_fetchers=(mock_fetcher1, mock_fetcher2)) - json1 = {} + json1: JsonDict = {} signedjson.sign.sign_json(json1, "server1", key1) results = kr.verify_json_objects_for_server( @@ -402,12 +418,12 @@ async def get_keys2( @logcontext_clean class ServerKeyFetcherTestCase(unittest.HomeserverTestCase): - def make_homeserver(self, reactor, clock): + def make_homeserver(self, reactor: MemoryReactor, clock: Clock) -> HomeServer: self.http_client = Mock() hs = self.setup_test_homeserver(federation_http_client=self.http_client) return hs - def test_get_keys_from_server(self): + def test_get_keys_from_server(self) -> None: # arbitrarily advance the clock a bit self.reactor.advance(100) @@ -431,7 +447,7 @@ def test_get_keys_from_server(self): } signedjson.sign.sign_json(response, SERVER_NAME, testkey) - async def get_json(destination, path, **kwargs): + async def get_json(destination: str, path: str, **kwargs: Any) -> JsonDict: self.assertEqual(destination, SERVER_NAME) self.assertEqual(path, "/_matrix/key/v2/server") return response @@ -471,7 +487,7 @@ async def get_json(destination, path, **kwargs): class PerspectivesKeyFetcherTestCase(unittest.HomeserverTestCase): - def make_homeserver(self, reactor, clock): + def make_homeserver(self, reactor: MemoryReactor, clock: Clock) -> HomeServer: self.mock_perspective_server = MockPerspectiveServer() self.http_client = Mock() @@ -522,7 +538,9 @@ def expect_outgoing_key_query( Tell the mock http client to expect a perspectives-server key query """ - async def post_json(destination, path, data, **kwargs): + async def post_json( + destination: str, path: str, data: JsonDict, **kwargs: Any + ) -> JsonDict: self.assertEqual(destination, self.mock_perspective_server.server_name) self.assertEqual(path, "/_matrix/key/v2/query") @@ -533,7 +551,7 @@ async def post_json(destination, path, data, **kwargs): self.http_client.post_json.side_effect = post_json - def test_get_keys_from_perspectives(self): + def test_get_keys_from_perspectives(self) -> None: # arbitrarily advance the clock a bit self.reactor.advance(100) @@ -578,7 +596,7 @@ def test_get_keys_from_perspectives(self): bytes(res["key_json"]), canonicaljson.encode_canonical_json(response) ) - def test_get_multiple_keys_from_perspectives(self): + def test_get_multiple_keys_from_perspectives(self) -> None: """Check that we can correctly request multiple keys for the same server""" fetcher = PerspectivesKeyFetcher(self.hs) @@ -606,7 +624,9 @@ def test_get_multiple_keys_from_perspectives(self): VALID_UNTIL_TS, ) - async def post_json(destination, path, data, **kwargs): + async def post_json( + destination: str, path: str, data: JsonDict, **kwargs: str + ) -> JsonDict: self.assertEqual(destination, self.mock_perspective_server.server_name) self.assertEqual(path, "/_matrix/key/v2/query") @@ -648,7 +668,7 @@ async def post_json(destination, path, data, **kwargs): # finally, ensure that only one request was sent self.assertEqual(self.http_client.post_json.call_count, 1) - def test_get_perspectives_own_key(self): + def test_get_perspectives_own_key(self) -> None: """Check that we can get the perspectives server's own keys This is slightly complicated by the fact that the perspectives server may @@ -697,7 +717,7 @@ def test_get_perspectives_own_key(self): bytes(res["key_json"]), canonicaljson.encode_canonical_json(response) ) - def test_invalid_perspectives_responses(self): + def test_invalid_perspectives_responses(self) -> None: """Check that invalid responses from the perspectives server are rejected""" # arbitrarily advance the clock a bit self.reactor.advance(100) @@ -708,12 +728,12 @@ def test_invalid_perspectives_responses(self): testverifykey_id = "ed25519:ver1" VALID_UNTIL_TS = 200 * 1000 - def build_response(): + def build_response() -> dict: return self.build_perspectives_response( SERVER_NAME, testkey, VALID_UNTIL_TS ) - def get_key_from_perspectives(response): + def get_key_from_perspectives(response: JsonDict) -> Dict[str, FetchKeyResult]: fetcher = PerspectivesKeyFetcher(self.hs) self.expect_outgoing_key_query(SERVER_NAME, "key1", response) return self.get_success(fetcher.get_keys(SERVER_NAME, ["key1"], 0)) @@ -737,6 +757,6 @@ def get_key_from_perspectives(response): self.assertEqual(keys, {}, "Expected empty dict with missing origin server sig") -def get_key_id(key): +def get_key_id(key: SigningKey) -> str: """Get the matrix ID tag for a given SigningKey or VerifyKey""" return "%s:%s" % (key.alg, key.version) diff --git a/tests/metrics/test_metrics.py b/tests/metrics/test_metrics.py index bddc4228bc92..7c3656d049f1 100644 --- a/tests/metrics/test_metrics.py +++ b/tests/metrics/test_metrics.py @@ -12,6 +12,8 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. +from typing import Dict, Tuple + from typing_extensions import Protocol try: @@ -22,6 +24,7 @@ from unittest.mock import patch from pkg_resources import parse_version +from prometheus_client.core import Sample from synapse.app._base import _set_prometheus_client_use_created_metrics from synapse.metrics import REGISTRY, InFlightGauge, generate_latest @@ -30,7 +33,7 @@ from tests import unittest -def get_sample_labels_value(sample): +def get_sample_labels_value(sample: Sample) -> Tuple[Dict[str, str], float]: """Extract the labels and values of a sample. prometheus_client 0.5 changed the sample type to a named tuple with more @@ -48,12 +51,15 @@ def get_sample_labels_value(sample): return sample.labels, sample.value # Otherwise fall back to treating it as a plain 3 tuple. else: - _, labels, value = sample + # In older versions of prometheus_client Sample was a 3-tuple. + labels: Dict[str, str] + value: float + _, labels, value = sample # type: ignore[misc] return labels, value class TestMauLimit(unittest.TestCase): - def test_basic(self): + def test_basic(self) -> None: class MetricEntry(Protocol): foo: int bar: int @@ -62,11 +68,11 @@ class MetricEntry(Protocol): "test1", "", labels=["test_label"], sub_metrics=["foo", "bar"] ) - def handle1(metrics): + def handle1(metrics: MetricEntry) -> None: metrics.foo += 2 metrics.bar = max(metrics.bar, 5) - def handle2(metrics): + def handle2(metrics: MetricEntry) -> None: metrics.foo += 3 metrics.bar = max(metrics.bar, 7) @@ -116,7 +122,9 @@ def handle2(metrics): self.get_metrics_from_gauge(gauge), ) - def get_metrics_from_gauge(self, gauge): + def get_metrics_from_gauge( + self, gauge: InFlightGauge + ) -> Dict[str, Dict[Tuple[str, ...], float]]: results = {} for r in gauge.collect(): @@ -129,7 +137,7 @@ def get_metrics_from_gauge(self, gauge): class BuildInfoTests(unittest.TestCase): - def test_get_build(self): + def test_get_build(self) -> None: """ The synapse_build_info metric reports the OS version, Python version, and Synapse version. @@ -147,7 +155,7 @@ def test_get_build(self): class CacheMetricsTests(unittest.HomeserverTestCase): - def test_cache_metric(self): + def test_cache_metric(self) -> None: """ Caches produce metrics reflecting their state when scraped. """ From 3854d0f94947ddd5a9ee98198af8d7ae839962c9 Mon Sep 17 00:00:00 2001 From: Brendan Abolivier Date: Wed, 28 Dec 2022 14:48:21 +0100 Subject: [PATCH 34/82] Add a `cached` helper to the module API (#14663) --- changelog.d/14663.feature | 1 + synapse/module_api/__init__.py | 40 +++++++++++++++++++++++++++++++++- 2 files changed, 40 insertions(+), 1 deletion(-) create mode 100644 changelog.d/14663.feature diff --git a/changelog.d/14663.feature b/changelog.d/14663.feature new file mode 100644 index 000000000000..b03f3ee54e33 --- /dev/null +++ b/changelog.d/14663.feature @@ -0,0 +1 @@ +Add a `cached` function to `synapse.module_api` that returns a decorator to cache return values of functions. diff --git a/synapse/module_api/__init__.py b/synapse/module_api/__init__.py index 0092a03c59a7..6f4a934b0509 100644 --- a/synapse/module_api/__init__.py +++ b/synapse/module_api/__init__.py @@ -18,6 +18,7 @@ TYPE_CHECKING, Any, Callable, + Collection, Dict, Generator, Iterable, @@ -126,7 +127,7 @@ from synapse.types.state import StateFilter from synapse.util import Clock from synapse.util.async_helpers import maybe_awaitable -from synapse.util.caches.descriptors import CachedFunction, cached +from synapse.util.caches.descriptors import CachedFunction, cached as _cached from synapse.util.frozenutils import freeze if TYPE_CHECKING: @@ -136,6 +137,7 @@ T = TypeVar("T") P = ParamSpec("P") +F = TypeVar("F", bound=Callable[..., Any]) """ This package defines the 'stable' API which can be used by extension modules which @@ -185,6 +187,42 @@ class UserIpAndAgent: last_seen: int +def cached( + *, + max_entries: int = 1000, + num_args: Optional[int] = None, + uncached_args: Optional[Collection[str]] = None, +) -> Callable[[F], CachedFunction[F]]: + """Returns a decorator that applies a memoizing cache around the function. This + decorator behaves similarly to functools.lru_cache. + + Example: + + @cached() + def foo('a', 'b'): + ... + + Added in Synapse v1.74.0. + + Args: + max_entries: The maximum number of entries in the cache. If the cache is full + and a new entry is added, the least recently accessed entry will be evicted + from the cache. + num_args: The number of positional arguments (excluding `self`) to use as cache + keys. Defaults to all named args of the function. + uncached_args: A list of argument names to not use as the cache key. (`self` is + always ignored.) Cannot be used with num_args. + + Returns: + A decorator that applies a memoizing cache around the function. + """ + return _cached( + max_entries=max_entries, + num_args=num_args, + uncached_args=uncached_args, + ) + + class ModuleApi: """A proxy object that gets passed to various plugin modules so they can register new users etc if necessary. From 8d20b1ba1eb072c983ae4d5e455b09195dcf6755 Mon Sep 17 00:00:00 2001 From: Vertux Date: Wed, 28 Dec 2022 16:45:28 +0100 Subject: [PATCH 35/82] Broken link "request_id_header" (#14740) * Broken link "request_id_header" The link above leads to an ERROR 404 * Update docs/reverse_proxy.md Co-authored-by: reivilibre --- docs/reverse_proxy.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/reverse_proxy.md b/docs/reverse_proxy.md index 48dbc1c58eca..06337e7c0039 100644 --- a/docs/reverse_proxy.md +++ b/docs/reverse_proxy.md @@ -46,7 +46,7 @@ when using a containerized Synapse, as that will prevent it from responding to proxied traffic.) Optionally, you can also set -[`request_id_header`](../usage/configuration/config_documentation.md#listeners) +[`request_id_header`](./usage/configuration/config_documentation.md#listeners) so that the server extracts and re-uses the same request ID format that the reverse proxy is using. From 46993770e56f11a6daa0d338d7f4d87a2b43d0c1 Mon Sep 17 00:00:00 2001 From: reivilibre Date: Wed, 28 Dec 2022 17:23:19 +0000 Subject: [PATCH 36/82] Suppress the update check in the ruff linter. (#14741) * Suppress update check in ruff * Newsfile Signed-off-by: Olivier Wilkinson (reivilibre) Signed-off-by: Olivier Wilkinson (reivilibre) --- changelog.d/14741.misc | 1 + scripts-dev/lint.sh | 3 ++- 2 files changed, 3 insertions(+), 1 deletion(-) create mode 100644 changelog.d/14741.misc diff --git a/changelog.d/14741.misc b/changelog.d/14741.misc new file mode 100644 index 000000000000..def187b12b48 --- /dev/null +++ b/changelog.d/14741.misc @@ -0,0 +1 @@ +Use [ruff](https://github.com/charliermarsh/ruff/) instead of flake8. diff --git a/scripts-dev/lint.sh b/scripts-dev/lint.sh index f6b81013c306..2bf58ac5d4a0 100755 --- a/scripts-dev/lint.sh +++ b/scripts-dev/lint.sh @@ -104,6 +104,7 @@ set -x isort "${files[@]}" python3 -m black "${files[@]}" ./scripts-dev/config-lint.sh -ruff "${files[@]}" +# --quiet suppresses the update check. +ruff --quiet "${files[@]}" ./scripts-dev/check_pydantic_models.py lint mypy From 9aaf27b42a429182ac1c4c8c3a5fc883a6143c12 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 28 Dec 2022 17:53:11 +0000 Subject: [PATCH 37/82] Bump towncrier from 22.8.0 to 22.12.0 (#14732) Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: GitHub Actions Co-authored-by: reivilibre --- changelog.d/14732.misc | 1 + poetry.lock | 12 ++++++------ 2 files changed, 7 insertions(+), 6 deletions(-) create mode 100644 changelog.d/14732.misc diff --git a/changelog.d/14732.misc b/changelog.d/14732.misc new file mode 100644 index 000000000000..308858e841dc --- /dev/null +++ b/changelog.d/14732.misc @@ -0,0 +1 @@ +Bump towncrier from 22.8.0 to 22.12.0. diff --git a/poetry.lock b/poetry.lock index c83cad3e1a79..4cdd5d85dd84 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1195,7 +1195,7 @@ python-versions = ">= 3.5" [[package]] name = "towncrier" -version = "22.8.0" +version = "22.12.0" description = "Building newsfiles for your project." category = "dev" optional = false @@ -1207,10 +1207,10 @@ click-default-group = "*" incremental = "*" jinja2 = "*" setuptools = "*" -tomli = "*" +tomli = {version = "*", markers = "python_version < \"3.11\""} [package.extras] -dev = ["packaging"] +dev = ["furo", "packaging", "sphinx (>=5)", "twisted"] [[package]] name = "treq" @@ -1578,7 +1578,7 @@ user-search = ["pyicu"] [metadata] lock-version = "1.1" python-versions = "^3.7.1" -content-hash = "d20b6aea682a74e6a161080bb459e73160b8eb79526f5d17a525639ac3fe3e9e" +content-hash = "a8fc81be719e55ce60792ba0393e35592582e748b99ff79024b977ce6357a13e" [metadata.files] attrs = [ @@ -2660,8 +2660,8 @@ tornado = [ {file = "tornado-6.1.tar.gz", hash = "sha256:33c6e81d7bd55b468d2e793517c909b139960b6c790a60b7991b9b6b76fb9791"}, ] towncrier = [ - {file = "towncrier-22.8.0-py2.py3-none-any.whl", hash = "sha256:3b780c3d966e1b26414830aec3d15000654b31e64e024f3e5fd128b4c6eb8f47"}, - {file = "towncrier-22.8.0.tar.gz", hash = "sha256:7d3839b033859b45fb55df82b74cfd702431933c0cc9f287a5a7ea3e05d042cb"}, + {file = "towncrier-22.12.0-py3-none-any.whl", hash = "sha256:9767a899a4d6856950f3598acd9e8f08da2663c49fdcda5ea0f9e6ba2afc8eea"}, + {file = "towncrier-22.12.0.tar.gz", hash = "sha256:9c49d7e75f646a9aea02ae904c0bc1639c8fd14a01292d2b123b8d307564034d"}, ] treq = [ {file = "treq-22.2.0-py3-none-any.whl", hash = "sha256:27d95b07c5c14be3e7b280416139b036087617ad5595be913b1f9b3ce981b9b2"}, From ee0e00a2004cc65627bc6bd13d38665a7105375d Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 28 Dec 2022 17:53:18 +0000 Subject: [PATCH 38/82] Bump sentry-sdk from 1.12.0 to 1.12.1 (#14736) Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: GitHub Actions Co-authored-by: reivilibre --- changelog.d/14736.misc | 1 + poetry.lock | 6 +++--- 2 files changed, 4 insertions(+), 3 deletions(-) create mode 100644 changelog.d/14736.misc diff --git a/changelog.d/14736.misc b/changelog.d/14736.misc new file mode 100644 index 000000000000..458d5accdfb4 --- /dev/null +++ b/changelog.d/14736.misc @@ -0,0 +1 @@ +Bump sentry-sdk from 1.12.0 to 1.12.1. diff --git a/poetry.lock b/poetry.lock index 4cdd5d85dd84..ea9369be9b44 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1013,7 +1013,7 @@ doc = ["Sphinx", "sphinx-rtd-theme"] [[package]] name = "sentry-sdk" -version = "1.12.0" +version = "1.12.1" description = "Python client for Sentry (https://sentry.io)" category = "main" optional = true @@ -2508,8 +2508,8 @@ semantic-version = [ {file = "semantic_version-2.10.0.tar.gz", hash = "sha256:bdabb6d336998cbb378d4b9db3a4b56a1e3235701dc05ea2690d9a997ed5041c"}, ] sentry-sdk = [ - {file = "sentry-sdk-1.12.0.tar.gz", hash = "sha256:dc0fe6ef2f77a3853b399c75c97d87be7666098817c1c314f8fcdf68a6865914"}, - {file = "sentry_sdk-1.12.0-py2.py3-none-any.whl", hash = "sha256:3c9bc64025976842c1103cd75d45cff94a7c0cc48fa07770d07a09d2ab8dac30"}, + {file = "sentry-sdk-1.12.1.tar.gz", hash = "sha256:5bbe4b72de22f9ac1e67f2a4e6efe8fbd595bb59b7b223443f50fe5802a5551c"}, + {file = "sentry_sdk-1.12.1-py2.py3-none-any.whl", hash = "sha256:9f0b960694e2d8bb04db4ba6ac2a645040caef4e762c65937998ff06064f10d6"}, ] service-identity = [ {file = "service-identity-21.1.0.tar.gz", hash = "sha256:6e6c6086ca271dc11b033d17c3a8bea9f24ebff920c587da090afc9519419d34"}, From ba2d38f22dddd3bb5910450424487311732f4090 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 28 Dec 2022 17:53:25 +0000 Subject: [PATCH 39/82] Bump black from 22.10.0 to 22.12.0 (#14735) Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: GitHub Actions Co-authored-by: reivilibre --- changelog.d/14735.misc | 1 + poetry.lock | 35 +++++++++++++---------------------- 2 files changed, 14 insertions(+), 22 deletions(-) create mode 100644 changelog.d/14735.misc diff --git a/changelog.d/14735.misc b/changelog.d/14735.misc new file mode 100644 index 000000000000..76b6c1e29d1c --- /dev/null +++ b/changelog.d/14735.misc @@ -0,0 +1 @@ +Bump black from 22.10.0 to 22.12.0. diff --git a/poetry.lock b/poetry.lock index ea9369be9b44..07d9ba218dd1 100644 --- a/poetry.lock +++ b/poetry.lock @@ -52,7 +52,7 @@ typecheck = ["mypy"] [[package]] name = "black" -version = "22.10.0" +version = "22.12.0" description = "The uncompromising code formatter." category = "dev" optional = false @@ -1617,27 +1617,18 @@ bcrypt = [ {file = "bcrypt-4.0.1.tar.gz", hash = "sha256:27d375903ac8261cfe4047f6709d16f7d18d39b1ec92aaf72af989552a650ebd"}, ] black = [ - {file = "black-22.10.0-1fixedarch-cp310-cp310-macosx_11_0_x86_64.whl", hash = "sha256:5cc42ca67989e9c3cf859e84c2bf014f6633db63d1cbdf8fdb666dcd9e77e3fa"}, - {file = "black-22.10.0-1fixedarch-cp311-cp311-macosx_11_0_x86_64.whl", hash = "sha256:5d8f74030e67087b219b032aa33a919fae8806d49c867846bfacde57f43972ef"}, - {file = "black-22.10.0-1fixedarch-cp37-cp37m-macosx_10_16_x86_64.whl", hash = "sha256:197df8509263b0b8614e1df1756b1dd41be6738eed2ba9e9769f3880c2b9d7b6"}, - {file = "black-22.10.0-1fixedarch-cp38-cp38-macosx_10_16_x86_64.whl", hash = "sha256:2644b5d63633702bc2c5f3754b1b475378fbbfb481f62319388235d0cd104c2d"}, - {file = "black-22.10.0-1fixedarch-cp39-cp39-macosx_11_0_x86_64.whl", hash = "sha256:e41a86c6c650bcecc6633ee3180d80a025db041a8e2398dcc059b3afa8382cd4"}, - {file = "black-22.10.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:2039230db3c6c639bd84efe3292ec7b06e9214a2992cd9beb293d639c6402edb"}, - {file = "black-22.10.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:14ff67aec0a47c424bc99b71005202045dc09270da44a27848d534600ac64fc7"}, - {file = "black-22.10.0-cp310-cp310-win_amd64.whl", hash = "sha256:819dc789f4498ecc91438a7de64427c73b45035e2e3680c92e18795a839ebb66"}, - {file = "black-22.10.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:5b9b29da4f564ba8787c119f37d174f2b69cdfdf9015b7d8c5c16121ddc054ae"}, - {file = "black-22.10.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b8b49776299fece66bffaafe357d929ca9451450f5466e997a7285ab0fe28e3b"}, - {file = "black-22.10.0-cp311-cp311-win_amd64.whl", hash = "sha256:21199526696b8f09c3997e2b4db8d0b108d801a348414264d2eb8eb2532e540d"}, - {file = "black-22.10.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1e464456d24e23d11fced2bc8c47ef66d471f845c7b7a42f3bd77bf3d1789650"}, - {file = "black-22.10.0-cp37-cp37m-win_amd64.whl", hash = "sha256:9311e99228ae10023300ecac05be5a296f60d2fd10fff31cf5c1fa4ca4b1988d"}, - {file = "black-22.10.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:fba8a281e570adafb79f7755ac8721b6cf1bbf691186a287e990c7929c7692ff"}, - {file = "black-22.10.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:915ace4ff03fdfff953962fa672d44be269deb2eaf88499a0f8805221bc68c87"}, - {file = "black-22.10.0-cp38-cp38-win_amd64.whl", hash = "sha256:444ebfb4e441254e87bad00c661fe32df9969b2bf224373a448d8aca2132b395"}, - {file = "black-22.10.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:974308c58d057a651d182208a484ce80a26dac0caef2895836a92dd6ebd725e0"}, - {file = "black-22.10.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:72ef3925f30e12a184889aac03d77d031056860ccae8a1e519f6cbb742736383"}, - {file = "black-22.10.0-cp39-cp39-win_amd64.whl", hash = "sha256:432247333090c8c5366e69627ccb363bc58514ae3e63f7fc75c54b1ea80fa7de"}, - {file = "black-22.10.0-py3-none-any.whl", hash = "sha256:c957b2b4ea88587b46cf49d1dc17681c1e672864fd7af32fc1e9664d572b3458"}, - {file = "black-22.10.0.tar.gz", hash = "sha256:f513588da599943e0cde4e32cc9879e825d58720d6557062d1098c5ad80080e1"}, + {file = "black-22.12.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9eedd20838bd5d75b80c9f5487dbcb06836a43833a37846cf1d8c1cc01cef59d"}, + {file = "black-22.12.0-cp310-cp310-win_amd64.whl", hash = "sha256:159a46a4947f73387b4d83e87ea006dbb2337eab6c879620a3ba52699b1f4351"}, + {file = "black-22.12.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d30b212bffeb1e252b31dd269dfae69dd17e06d92b87ad26e23890f3efea366f"}, + {file = "black-22.12.0-cp311-cp311-win_amd64.whl", hash = "sha256:7412e75863aa5c5411886804678b7d083c7c28421210180d67dfd8cf1221e1f4"}, + {file = "black-22.12.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c116eed0efb9ff870ded8b62fe9f28dd61ef6e9ddd28d83d7d264a38417dcee2"}, + {file = "black-22.12.0-cp37-cp37m-win_amd64.whl", hash = "sha256:1f58cbe16dfe8c12b7434e50ff889fa479072096d79f0a7f25e4ab8e94cd8350"}, + {file = "black-22.12.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:77d86c9f3db9b1bf6761244bc0b3572a546f5fe37917a044e02f3166d5aafa7d"}, + {file = "black-22.12.0-cp38-cp38-win_amd64.whl", hash = "sha256:82d9fe8fee3401e02e79767016b4907820a7dc28d70d137eb397b92ef3cc5bfc"}, + {file = "black-22.12.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:101c69b23df9b44247bd88e1d7e90154336ac4992502d4197bdac35dd7ee3320"}, + {file = "black-22.12.0-cp39-cp39-win_amd64.whl", hash = "sha256:559c7a1ba9a006226f09e4916060982fd27334ae1998e7a38b3f33a37f7a2148"}, + {file = "black-22.12.0-py3-none-any.whl", hash = "sha256:436cc9167dd28040ad90d3b404aec22cedf24a6e4d7de221bec2730ec0c97bcf"}, + {file = "black-22.12.0.tar.gz", hash = "sha256:229351e5a18ca30f447bf724d007f890f97e13af070bb6ad4c0a441cd7596a2f"}, ] bleach = [ {file = "bleach-5.0.1-py3-none-any.whl", hash = "sha256:085f7f33c15bd408dd9b17a4ad77c577db66d76203e5984b1bd59baeee948b2a"}, From 8ea6fd8d0bdb325a59b4b09be3dd289ae6767273 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 29 Dec 2022 10:48:39 +0100 Subject: [PATCH 40/82] Bump setuptools from 65.3.0 to 65.5.1 (#14738) Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: GitHub Actions Co-authored-by: reivilibre --- changelog.d/14738.misc | 1 + poetry.lock | 10 +++++----- 2 files changed, 6 insertions(+), 5 deletions(-) create mode 100644 changelog.d/14738.misc diff --git a/changelog.d/14738.misc b/changelog.d/14738.misc new file mode 100644 index 000000000000..9530b7075cc2 --- /dev/null +++ b/changelog.d/14738.misc @@ -0,0 +1 @@ +Bump setuptools from 65.3.0 to 65.5.1. diff --git a/poetry.lock b/poetry.lock index 07d9ba218dd1..6b5265bf1ceb 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1068,15 +1068,15 @@ tests = ["coverage[toml] (>=5.0.2)", "pytest"] [[package]] name = "setuptools" -version = "65.3.0" +version = "65.5.1" description = "Easily download, build, install, upgrade, and uninstall Python packages" category = "main" optional = false python-versions = ">=3.7" [package.extras] -docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx", "sphinx-favicon", "sphinx-hoverxref (<2)", "sphinx-inline-tabs", "sphinx-notfound-page (==0.8.3)", "sphinx-reredirects", "sphinxcontrib-towncrier"] -testing = ["build[virtualenv]", "filelock (>=3.4.0)", "flake8 (<5)", "flake8-2020", "ini2toml[lite] (>=0.9)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "mock", "pip (>=19.1)", "pip-run (>=8.8)", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-xdist", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] +docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-hoverxref (<2)", "sphinx-inline-tabs", "sphinx-notfound-page (==0.8.3)", "sphinx-reredirects", "sphinxcontrib-towncrier"] +testing = ["build[virtualenv]", "filelock (>=3.4.0)", "flake8 (<5)", "flake8-2020", "ini2toml[lite] (>=0.9)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pip (>=19.1)", "pip-run (>=8.8)", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-timeout", "pytest-xdist", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] testing-integration = ["build[virtualenv]", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] [[package]] @@ -2507,8 +2507,8 @@ service-identity = [ {file = "service_identity-21.1.0-py2.py3-none-any.whl", hash = "sha256:f0b0caac3d40627c3c04d7a51b6e06721857a0e10a8775f2d1d7e72901b3a7db"}, ] setuptools = [ - {file = "setuptools-65.3.0-py3-none-any.whl", hash = "sha256:2e24e0bec025f035a2e72cdd1961119f557d78ad331bb00ff82efb2ab8da8e82"}, - {file = "setuptools-65.3.0.tar.gz", hash = "sha256:7732871f4f7fa58fb6bdcaeadb0161b2bd046c85905dbaa066bdcbcc81953b57"}, + {file = "setuptools-65.5.1-py3-none-any.whl", hash = "sha256:d0b9a8433464d5800cbe05094acf5c6d52a91bfac9b52bcfc4d41382be5d5d31"}, + {file = "setuptools-65.5.1.tar.gz", hash = "sha256:e197a19aa8ec9722928f2206f8de752def0e4c9fc6953527360d1c36d94ddb2f"}, ] setuptools-rust = [ {file = "setuptools-rust-1.5.2.tar.gz", hash = "sha256:d8daccb14dc0eae1b6b6eb3ecef79675bd37b4065369f79c35393dd5c55652c7"}, From 368ad7c5c739054a463742fc035a80b23301cd2d Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 29 Dec 2022 10:49:30 +0100 Subject: [PATCH 41/82] Bump isort from 5.10.1 to 5.11.4 (#14733) Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: GitHub Actions Co-authored-by: reivilibre --- changelog.d/14733.misc | 1 + poetry.lock | 8 ++++---- 2 files changed, 5 insertions(+), 4 deletions(-) create mode 100644 changelog.d/14733.misc diff --git a/changelog.d/14733.misc b/changelog.d/14733.misc new file mode 100644 index 000000000000..53afc6c00e52 --- /dev/null +++ b/changelog.d/14733.misc @@ -0,0 +1 @@ +Bump isort from 5.10.1 to 5.11.4. diff --git a/poetry.lock b/poetry.lock index 6b5265bf1ceb..845879f7bbfb 100644 --- a/poetry.lock +++ b/poetry.lock @@ -354,11 +354,11 @@ scripts = ["click (>=6.0)", "twisted (>=16.4.0)"] [[package]] name = "isort" -version = "5.10.1" +version = "5.11.4" description = "A Python utility / library to sort Python imports." category = "dev" optional = false -python-versions = ">=3.6.1,<4.0" +python-versions = ">=3.7.0" [package.extras] colors = ["colorama (>=0.4.3,<0.5.0)"] @@ -1916,8 +1916,8 @@ incremental = [ {file = "incremental-21.3.0.tar.gz", hash = "sha256:02f5de5aff48f6b9f665d99d48bfc7ec03b6e3943210de7cfc88856d755d6f57"}, ] isort = [ - {file = "isort-5.10.1-py3-none-any.whl", hash = "sha256:6f62d78e2f89b4500b080fe3a81690850cd254227f27f75c3a0c491a1f351ba7"}, - {file = "isort-5.10.1.tar.gz", hash = "sha256:e8443a5e7a020e9d7f97f1d7d9cd17c88bcb3bc7e218bf9cf5095fe550be2951"}, + {file = "isort-5.11.4-py3-none-any.whl", hash = "sha256:c033fd0edb91000a7f09527fe5c75321878f98322a77ddcc81adbd83724afb7b"}, + {file = "isort-5.11.4.tar.gz", hash = "sha256:6db30c5ded9815d813932c04c2f85a360bcdd35fed496f4d8f35495ef0a261b6"}, ] jaeger-client = [ {file = "jaeger-client-4.8.0.tar.gz", hash = "sha256:3157836edab8e2c209bd2d6ae61113db36f7ee399e66b1dcbb715d87ab49bfe0"}, From eb9ae47799912a77a309348611fead94cb269147 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 29 Dec 2022 11:21:56 +0100 Subject: [PATCH 42/82] Bump attrs from 22.1.0 to 22.2.0 (#14734) Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: GitHub Actions Co-authored-by: reivilibre --- changelog.d/14734.misc | 1 + poetry.lock | 17 +++++++++-------- 2 files changed, 10 insertions(+), 8 deletions(-) create mode 100644 changelog.d/14734.misc diff --git a/changelog.d/14734.misc b/changelog.d/14734.misc new file mode 100644 index 000000000000..06b24e7d8cc2 --- /dev/null +++ b/changelog.d/14734.misc @@ -0,0 +1 @@ +Bump attrs from 22.1.0 to 22.2.0. diff --git a/poetry.lock b/poetry.lock index 845879f7bbfb..8b864dd6f1c6 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,16 +1,17 @@ [[package]] name = "attrs" -version = "22.1.0" +version = "22.2.0" description = "Classes Without Boilerplate" category = "main" optional = false -python-versions = ">=3.5" +python-versions = ">=3.6" [package.extras] -dev = ["cloudpickle", "coverage[toml] (>=5.0.2)", "furo", "hypothesis", "mypy (>=0.900,!=0.940)", "pre-commit", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "sphinx", "sphinx-notfound-page", "zope.interface"] -docs = ["furo", "sphinx", "sphinx-notfound-page", "zope.interface"] -tests = ["cloudpickle", "coverage[toml] (>=5.0.2)", "hypothesis", "mypy (>=0.900,!=0.940)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "zope.interface"] -tests-no-zope = ["cloudpickle", "coverage[toml] (>=5.0.2)", "hypothesis", "mypy (>=0.900,!=0.940)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins"] +cov = ["attrs[tests]", "coverage-enable-subprocess", "coverage[toml] (>=5.3)"] +dev = ["attrs[docs,tests]"] +docs = ["furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier", "zope.interface"] +tests = ["attrs[tests-no-zope]", "zope.interface"] +tests-no-zope = ["cloudpickle", "cloudpickle", "hypothesis", "hypothesis", "mypy (>=0.971,<0.990)", "mypy (>=0.971,<0.990)", "pympler", "pympler", "pytest (>=4.3.0)", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-mypy-plugins", "pytest-xdist[psutil]", "pytest-xdist[psutil]"] [[package]] name = "authlib" @@ -1582,8 +1583,8 @@ content-hash = "a8fc81be719e55ce60792ba0393e35592582e748b99ff79024b977ce6357a13e [metadata.files] attrs = [ - {file = "attrs-22.1.0-py2.py3-none-any.whl", hash = "sha256:86efa402f67bf2df34f51a335487cf46b1ec130d02b8d39fd248abfd30da551c"}, - {file = "attrs-22.1.0.tar.gz", hash = "sha256:29adc2665447e5191d0e7c568fde78b21f9672d344281d0c6e1ab085429b22b6"}, + {file = "attrs-22.2.0-py3-none-any.whl", hash = "sha256:29e95c7f6778868dbd49170f98f8818f78f3dc5e0e37c0b1f474e3561b240836"}, + {file = "attrs-22.2.0.tar.gz", hash = "sha256:c9227bfc2f01993c03f68db37d1d15c9690188323c067c641f1a35ca58185f99"}, ] authlib = [ {file = "Authlib-1.2.0-py2.py3-none-any.whl", hash = "sha256:4ddf4fd6cfa75c9a460b361d4bd9dac71ffda0be879dbe4292a02e92349ad55a"}, From 044fa1a1de3c954f247a98c0ce8f734c675a5efb Mon Sep 17 00:00:00 2001 From: Patrick Cloke Date: Thu, 29 Dec 2022 12:18:06 -0500 Subject: [PATCH 43/82] Actually use the picture_claim as configured in OIDC config. (#14751) Previously it was only using the default value ("picture") when fetching the picture from the user info. --- changelog.d/14751.bugfix | 1 + synapse/handlers/oidc.py | 2 +- 2 files changed, 2 insertions(+), 1 deletion(-) create mode 100644 changelog.d/14751.bugfix diff --git a/changelog.d/14751.bugfix b/changelog.d/14751.bugfix new file mode 100644 index 000000000000..56ef8522881a --- /dev/null +++ b/changelog.d/14751.bugfix @@ -0,0 +1 @@ +Fix a bug introduced in Synapse 1.73.0 where the `picture_claim` configured under `oidc_providers` was unused (the default value of `"picture"` was used instead). diff --git a/synapse/handlers/oidc.py b/synapse/handlers/oidc.py index 03de6a4ba637..23fb00c9c987 100644 --- a/synapse/handlers/oidc.py +++ b/synapse/handlers/oidc.py @@ -1615,7 +1615,7 @@ def render_template_field(template: Optional[Template]) -> Optional[str]: if email: emails.append(email) - picture = userinfo.get("picture") + picture = userinfo.get(self._config.picture_claim) return UserAttributeDict( localpart=localpart, From c4456114e1a5471bb61cb45605e782263dc8233c Mon Sep 17 00:00:00 2001 From: Andrew Morgan <1342360+anoadragon453@users.noreply.github.com> Date: Sun, 1 Jan 2023 03:40:46 +0000 Subject: [PATCH 44/82] Add experimental support for MSC3391: deleting account data (#14714) --- changelog.d/14714.feature | 1 + .../conf/workers-shared-extra.yaml.j2 | 2 + scripts-dev/complement.sh | 2 +- synapse/config/experimental.py | 3 + synapse/handlers/account_data.py | 111 ++++++++- synapse/replication/http/account_data.py | 92 +++++++- synapse/rest/client/account_data.py | 115 +++++++++ synapse/storage/database.py | 33 ++- .../storage/databases/main/account_data.py | 219 ++++++++++++++++-- 9 files changed, 547 insertions(+), 31 deletions(-) create mode 100644 changelog.d/14714.feature diff --git a/changelog.d/14714.feature b/changelog.d/14714.feature new file mode 100644 index 000000000000..5f3a20b7a733 --- /dev/null +++ b/changelog.d/14714.feature @@ -0,0 +1 @@ +Add experimental support for [MSC3391](https://github.com/matrix-org/matrix-spec-proposals/pull/3391) (removing account data). \ No newline at end of file diff --git a/docker/complement/conf/workers-shared-extra.yaml.j2 b/docker/complement/conf/workers-shared-extra.yaml.j2 index ca640c343be7..cb839fed078d 100644 --- a/docker/complement/conf/workers-shared-extra.yaml.j2 +++ b/docker/complement/conf/workers-shared-extra.yaml.j2 @@ -102,6 +102,8 @@ experimental_features: {% endif %} # Filtering /messages by relation type. msc3874_enabled: true + # Enable removing account data support + msc3391_enabled: true server_notices: system_mxid_localpart: _server diff --git a/scripts-dev/complement.sh b/scripts-dev/complement.sh index 8741ba3e34b2..51d1bac6183c 100755 --- a/scripts-dev/complement.sh +++ b/scripts-dev/complement.sh @@ -190,7 +190,7 @@ fi extra_test_args=() -test_tags="synapse_blacklist,msc3787,msc3874" +test_tags="synapse_blacklist,msc3787,msc3874,msc3391" # All environment variables starting with PASS_ will be shared. # (The prefix is stripped off before reaching the container.) diff --git a/synapse/config/experimental.py b/synapse/config/experimental.py index 573fa0386fa7..0f3870bfe18e 100644 --- a/synapse/config/experimental.py +++ b/synapse/config/experimental.py @@ -136,3 +136,6 @@ def read_config(self, config: JsonDict, **kwargs: Any) -> None: # Enable room version (and thus applicable push rules from MSC3931/3932) version_id = RoomVersions.MSC1767v10.identifier KNOWN_ROOM_VERSIONS[version_id] = RoomVersions.MSC1767v10 + + # MSC3391: Removing account data. + self.msc3391_enabled = experimental.get("msc3391_enabled", False) diff --git a/synapse/handlers/account_data.py b/synapse/handlers/account_data.py index fc21d5800159..aba7315cf730 100644 --- a/synapse/handlers/account_data.py +++ b/synapse/handlers/account_data.py @@ -17,10 +17,12 @@ from typing import TYPE_CHECKING, Awaitable, Callable, Collection, List, Optional, Tuple from synapse.replication.http.account_data import ( + ReplicationAddRoomAccountDataRestServlet, ReplicationAddTagRestServlet, + ReplicationAddUserAccountDataRestServlet, + ReplicationRemoveRoomAccountDataRestServlet, ReplicationRemoveTagRestServlet, - ReplicationRoomAccountDataRestServlet, - ReplicationUserAccountDataRestServlet, + ReplicationRemoveUserAccountDataRestServlet, ) from synapse.streams import EventSource from synapse.types import JsonDict, StreamKeyType, UserID @@ -41,8 +43,18 @@ def __init__(self, hs: "HomeServer"): self._instance_name = hs.get_instance_name() self._notifier = hs.get_notifier() - self._user_data_client = ReplicationUserAccountDataRestServlet.make_client(hs) - self._room_data_client = ReplicationRoomAccountDataRestServlet.make_client(hs) + self._add_user_data_client = ( + ReplicationAddUserAccountDataRestServlet.make_client(hs) + ) + self._remove_user_data_client = ( + ReplicationRemoveUserAccountDataRestServlet.make_client(hs) + ) + self._add_room_data_client = ( + ReplicationAddRoomAccountDataRestServlet.make_client(hs) + ) + self._remove_room_data_client = ( + ReplicationRemoveRoomAccountDataRestServlet.make_client(hs) + ) self._add_tag_client = ReplicationAddTagRestServlet.make_client(hs) self._remove_tag_client = ReplicationRemoveTagRestServlet.make_client(hs) self._account_data_writers = hs.config.worker.writers.account_data @@ -112,7 +124,7 @@ async def add_account_data_to_room( return max_stream_id else: - response = await self._room_data_client( + response = await self._add_room_data_client( instance_name=random.choice(self._account_data_writers), user_id=user_id, room_id=room_id, @@ -121,15 +133,59 @@ async def add_account_data_to_room( ) return response["max_stream_id"] + async def remove_account_data_for_room( + self, user_id: str, room_id: str, account_data_type: str + ) -> Optional[int]: + """ + Deletes the room account data for the given user and account data type. + + "Deleting" account data merely means setting the content of the account data + to an empty JSON object: {}. + + Args: + user_id: The user ID to remove room account data for. + room_id: The room ID to target. + account_data_type: The account data type to remove. + + Returns: + The maximum stream ID, or None if the room account data item did not exist. + """ + if self._instance_name in self._account_data_writers: + max_stream_id = await self._store.remove_account_data_for_room( + user_id, room_id, account_data_type + ) + if max_stream_id is None: + # The referenced account data did not exist, so no delete occurred. + return None + + self._notifier.on_new_event( + StreamKeyType.ACCOUNT_DATA, max_stream_id, users=[user_id] + ) + + # Notify Synapse modules that the content of the type has changed to an + # empty dictionary. + await self._notify_modules(user_id, room_id, account_data_type, {}) + + return max_stream_id + else: + response = await self._remove_room_data_client( + instance_name=random.choice(self._account_data_writers), + user_id=user_id, + room_id=room_id, + account_data_type=account_data_type, + content={}, + ) + return response["max_stream_id"] + async def add_account_data_for_user( self, user_id: str, account_data_type: str, content: JsonDict ) -> int: """Add some global account_data for a user. Args: - user_id: The user to add a tag for. + user_id: The user to add some account data for. account_data_type: The type of account_data to add. - content: A json object to associate with the tag. + content: The content json dictionary. Returns: The maximum stream ID. @@ -148,7 +204,7 @@ async def add_account_data_for_user( return max_stream_id else: - response = await self._user_data_client( + response = await self._add_user_data_client( instance_name=random.choice(self._account_data_writers), user_id=user_id, account_data_type=account_data_type, @@ -156,6 +212,45 @@ async def add_account_data_for_user( ) return response["max_stream_id"] + async def remove_account_data_for_user( + self, user_id: str, account_data_type: str + ) -> Optional[int]: + """Removes a piece of global account_data for a user. + + Args: + user_id: The user to remove account data for. + account_data_type: The type of account_data to remove. + + Returns: + The maximum stream ID, or None if the room account data item did not exist. + """ + + if self._instance_name in self._account_data_writers: + max_stream_id = await self._store.remove_account_data_for_user( + user_id, account_data_type + ) + if max_stream_id is None: + # The referenced account data did not exist, so no delete occurred. + return None + + self._notifier.on_new_event( + StreamKeyType.ACCOUNT_DATA, max_stream_id, users=[user_id] + ) + + # Notify Synapse modules that the content of the type has changed to an + # empty dictionary. + await self._notify_modules(user_id, None, account_data_type, {}) + + return max_stream_id + else: + response = await self._remove_user_data_client( + instance_name=random.choice(self._account_data_writers), + user_id=user_id, + account_data_type=account_data_type, + content={}, + ) + return response["max_stream_id"] + async def add_tag_to_room( self, user_id: str, room_id: str, tag: str, content: JsonDict ) -> int: diff --git a/synapse/replication/http/account_data.py b/synapse/replication/http/account_data.py index 310f60915324..0edc95977b3a 100644 --- a/synapse/replication/http/account_data.py +++ b/synapse/replication/http/account_data.py @@ -28,7 +28,7 @@ logger = logging.getLogger(__name__) -class ReplicationUserAccountDataRestServlet(ReplicationEndpoint): +class ReplicationAddUserAccountDataRestServlet(ReplicationEndpoint): """Add user account data on the appropriate account data worker. Request format: @@ -49,7 +49,6 @@ def __init__(self, hs: "HomeServer"): super().__init__(hs) self.handler = hs.get_account_data_handler() - self.clock = hs.get_clock() @staticmethod async def _serialize_payload( # type: ignore[override] @@ -73,7 +72,45 @@ async def _handle_request( # type: ignore[override] return 200, {"max_stream_id": max_stream_id} -class ReplicationRoomAccountDataRestServlet(ReplicationEndpoint): +class ReplicationRemoveUserAccountDataRestServlet(ReplicationEndpoint): + """Remove user account data on the appropriate account data worker. + + Request format: + + POST /_synapse/replication/remove_user_account_data/:user_id/:type + + { + "content": { ... }, + } + + """ + + NAME = "remove_user_account_data" + PATH_ARGS = ("user_id", "account_data_type") + CACHE = False + + def __init__(self, hs: "HomeServer"): + super().__init__(hs) + + self.handler = hs.get_account_data_handler() + + @staticmethod + async def _serialize_payload( # type: ignore[override] + user_id: str, account_data_type: str + ) -> JsonDict: + return {} + + async def _handle_request( # type: ignore[override] + self, request: Request, user_id: str, account_data_type: str + ) -> Tuple[int, JsonDict]: + max_stream_id = await self.handler.remove_account_data_for_user( + user_id, account_data_type + ) + + return 200, {"max_stream_id": max_stream_id} + + +class ReplicationAddRoomAccountDataRestServlet(ReplicationEndpoint): """Add room account data on the appropriate account data worker. Request format: @@ -94,7 +131,6 @@ def __init__(self, hs: "HomeServer"): super().__init__(hs) self.handler = hs.get_account_data_handler() - self.clock = hs.get_clock() @staticmethod async def _serialize_payload( # type: ignore[override] @@ -118,6 +154,44 @@ async def _handle_request( # type: ignore[override] return 200, {"max_stream_id": max_stream_id} +class ReplicationRemoveRoomAccountDataRestServlet(ReplicationEndpoint): + """Remove room account data on the appropriate account data worker. + + Request format: + + POST /_synapse/replication/remove_room_account_data/:user_id/:room_id/:account_data_type + + { + "content": { ... }, + } + + """ + + NAME = "remove_room_account_data" + PATH_ARGS = ("user_id", "room_id", "account_data_type") + CACHE = False + + def __init__(self, hs: "HomeServer"): + super().__init__(hs) + + self.handler = hs.get_account_data_handler() + + @staticmethod + async def _serialize_payload( # type: ignore[override] + user_id: str, room_id: str, account_data_type: str, content: JsonDict + ) -> JsonDict: + return {} + + async def _handle_request( # type: ignore[override] + self, request: Request, user_id: str, room_id: str, account_data_type: str + ) -> Tuple[int, JsonDict]: + max_stream_id = await self.handler.remove_account_data_for_room( + user_id, room_id, account_data_type + ) + + return 200, {"max_stream_id": max_stream_id} + + class ReplicationAddTagRestServlet(ReplicationEndpoint): """Add tag on the appropriate account data worker. @@ -139,7 +213,6 @@ def __init__(self, hs: "HomeServer"): super().__init__(hs) self.handler = hs.get_account_data_handler() - self.clock = hs.get_clock() @staticmethod async def _serialize_payload( # type: ignore[override] @@ -186,7 +259,6 @@ def __init__(self, hs: "HomeServer"): super().__init__(hs) self.handler = hs.get_account_data_handler() - self.clock = hs.get_clock() @staticmethod async def _serialize_payload(user_id: str, room_id: str, tag: str) -> JsonDict: # type: ignore[override] @@ -206,7 +278,11 @@ async def _handle_request( # type: ignore[override] def register_servlets(hs: "HomeServer", http_server: HttpServer) -> None: - ReplicationUserAccountDataRestServlet(hs).register(http_server) - ReplicationRoomAccountDataRestServlet(hs).register(http_server) + ReplicationAddUserAccountDataRestServlet(hs).register(http_server) + ReplicationAddRoomAccountDataRestServlet(hs).register(http_server) ReplicationAddTagRestServlet(hs).register(http_server) ReplicationRemoveTagRestServlet(hs).register(http_server) + + if hs.config.experimental.msc3391_enabled: + ReplicationRemoveUserAccountDataRestServlet(hs).register(http_server) + ReplicationRemoveRoomAccountDataRestServlet(hs).register(http_server) diff --git a/synapse/rest/client/account_data.py b/synapse/rest/client/account_data.py index f13970b8980a..e805196fec06 100644 --- a/synapse/rest/client/account_data.py +++ b/synapse/rest/client/account_data.py @@ -41,6 +41,7 @@ class AccountDataServlet(RestServlet): def __init__(self, hs: "HomeServer"): super().__init__() + self._hs = hs self.auth = hs.get_auth() self.store = hs.get_datastores().main self.handler = hs.get_account_data_handler() @@ -54,6 +55,16 @@ async def on_PUT( body = parse_json_object_from_request(request) + # If experimental support for MSC3391 is enabled, then providing an empty dict + # as the value for an account data type should be functionally equivalent to + # calling the DELETE method on the same type. + if self._hs.config.experimental.msc3391_enabled: + if body == {}: + await self.handler.remove_account_data_for_user( + user_id, account_data_type + ) + return 200, {} + await self.handler.add_account_data_for_user(user_id, account_data_type, body) return 200, {} @@ -72,9 +83,48 @@ async def on_GET( if event is None: raise NotFoundError("Account data not found") + # If experimental support for MSC3391 is enabled, then this endpoint should + # return a 404 if the content for an account data type is an empty dict. + if self._hs.config.experimental.msc3391_enabled and event == {}: + raise NotFoundError("Account data not found") + return 200, event +class UnstableAccountDataServlet(RestServlet): + """ + Contains an unstable endpoint for removing user account data, as specified by + MSC3391. If that MSC is accepted, this code should have unstable prefixes removed + and become incorporated into AccountDataServlet above. + """ + + PATTERNS = client_patterns( + "/org.matrix.msc3391/user/(?P[^/]*)" + "/account_data/(?P[^/]*)", + unstable=True, + releases=(), + ) + + def __init__(self, hs: "HomeServer"): + super().__init__() + self.auth = hs.get_auth() + self.handler = hs.get_account_data_handler() + + async def on_DELETE( + self, + request: SynapseRequest, + user_id: str, + account_data_type: str, + ) -> Tuple[int, JsonDict]: + requester = await self.auth.get_user_by_req(request) + if user_id != requester.user.to_string(): + raise AuthError(403, "Cannot delete account data for other users.") + + await self.handler.remove_account_data_for_user(user_id, account_data_type) + + return 200, {} + + class RoomAccountDataServlet(RestServlet): """ PUT /user/{user_id}/rooms/{room_id}/account_data/{account_dataType} HTTP/1.1 @@ -89,6 +139,7 @@ class RoomAccountDataServlet(RestServlet): def __init__(self, hs: "HomeServer"): super().__init__() + self._hs = hs self.auth = hs.get_auth() self.store = hs.get_datastores().main self.handler = hs.get_account_data_handler() @@ -121,6 +172,16 @@ async def on_PUT( Codes.BAD_JSON, ) + # If experimental support for MSC3391 is enabled, then providing an empty dict + # as the value for an account data type should be functionally equivalent to + # calling the DELETE method on the same type. + if self._hs.config.experimental.msc3391_enabled: + if body == {}: + await self.handler.remove_account_data_for_room( + user_id, room_id, account_data_type + ) + return 200, {} + await self.handler.add_account_data_to_room( user_id, room_id, account_data_type, body ) @@ -152,9 +213,63 @@ async def on_GET( if event is None: raise NotFoundError("Room account data not found") + # If experimental support for MSC3391 is enabled, then this endpoint should + # return a 404 if the content for an account data type is an empty dict. + if self._hs.config.experimental.msc3391_enabled and event == {}: + raise NotFoundError("Room account data not found") + return 200, event +class UnstableRoomAccountDataServlet(RestServlet): + """ + Contains an unstable endpoint for removing room account data, as specified by + MSC3391. If that MSC is accepted, this code should have unstable prefixes removed + and become incorporated into RoomAccountDataServlet above. + """ + + PATTERNS = client_patterns( + "/org.matrix.msc3391/user/(?P[^/]*)" + "/rooms/(?P[^/]*)" + "/account_data/(?P[^/]*)", + unstable=True, + releases=(), + ) + + def __init__(self, hs: "HomeServer"): + super().__init__() + self.auth = hs.get_auth() + self.handler = hs.get_account_data_handler() + + async def on_DELETE( + self, + request: SynapseRequest, + user_id: str, + room_id: str, + account_data_type: str, + ) -> Tuple[int, JsonDict]: + requester = await self.auth.get_user_by_req(request) + if user_id != requester.user.to_string(): + raise AuthError(403, "Cannot delete account data for other users.") + + if not RoomID.is_valid(room_id): + raise SynapseError( + 400, + f"{room_id} is not a valid room ID", + Codes.INVALID_PARAM, + ) + + await self.handler.remove_account_data_for_room( + user_id, room_id, account_data_type + ) + + return 200, {} + + def register_servlets(hs: "HomeServer", http_server: HttpServer) -> None: AccountDataServlet(hs).register(http_server) RoomAccountDataServlet(hs).register(http_server) + + if hs.config.experimental.msc3391_enabled: + UnstableAccountDataServlet(hs).register(http_server) + UnstableRoomAccountDataServlet(hs).register(http_server) diff --git a/synapse/storage/database.py b/synapse/storage/database.py index 0b29e67b9447..88479a16db0c 100644 --- a/synapse/storage/database.py +++ b/synapse/storage/database.py @@ -1762,7 +1762,8 @@ async def simple_select_list( desc: description of the transaction, for logging and metrics Returns: - A list of dictionaries. + A list of dictionaries, one per result row, each a mapping between the + column names from `retcols` and that column's value for the row. """ return await self.runInteraction( desc, @@ -1791,6 +1792,10 @@ def simple_select_list_txn( column names and values to select the rows with, or None to not apply a WHERE clause. retcols: the names of the columns to return + + Returns: + A list of dictionaries, one per result row, each a mapping between the + column names from `retcols` and that column's value for the row. """ if keyvalues: sql = "SELECT %s FROM %s WHERE %s" % ( @@ -1898,6 +1903,19 @@ async def simple_update( updatevalues: Dict[str, Any], desc: str, ) -> int: + """ + Update rows in the given database table. + If the given keyvalues don't match anything, nothing will be updated. + + Args: + table: The database table to update. + keyvalues: A mapping of column name to value to match rows on. + updatevalues: A mapping of column name to value to replace in any matched rows. + desc: description of the transaction, for logging and metrics. + + Returns: + The number of rows that were updated. Will be 0 if no matching rows were found. + """ return await self.runInteraction( desc, self.simple_update_txn, table, keyvalues, updatevalues ) @@ -1909,6 +1927,19 @@ def simple_update_txn( keyvalues: Dict[str, Any], updatevalues: Dict[str, Any], ) -> int: + """ + Update rows in the given database table. + If the given keyvalues don't match anything, nothing will be updated. + + Args: + txn: The database transaction object. + table: The database table to update. + keyvalues: A mapping of column name to value to match rows on. + updatevalues: A mapping of column name to value to replace in any matched rows. + + Returns: + The number of rows that were updated. Will be 0 if no matching rows were found. + """ if keyvalues: where = "WHERE %s" % " AND ".join("%s = ?" % k for k in keyvalues.keys()) else: diff --git a/synapse/storage/databases/main/account_data.py b/synapse/storage/databases/main/account_data.py index 07908c41d9ca..e59776f4349f 100644 --- a/synapse/storage/databases/main/account_data.py +++ b/synapse/storage/databases/main/account_data.py @@ -123,7 +123,11 @@ def get_max_account_data_stream_id(self) -> int: async def get_account_data_for_user( self, user_id: str ) -> Tuple[Dict[str, JsonDict], Dict[str, Dict[str, JsonDict]]]: - """Get all the client account_data for a user. + """ + Get all the client account_data for a user. + + If experimental MSC3391 support is enabled, any entries with an empty + content body are excluded; as this means they have been deleted. Args: user_id: The user to get the account_data for. @@ -135,27 +139,48 @@ async def get_account_data_for_user( def get_account_data_for_user_txn( txn: LoggingTransaction, ) -> Tuple[Dict[str, JsonDict], Dict[str, Dict[str, JsonDict]]]: - rows = self.db_pool.simple_select_list_txn( - txn, - "account_data", - {"user_id": user_id}, - ["account_data_type", "content"], - ) + # The 'content != '{}' condition below prevents us from using + # `simple_select_list_txn` here, as it doesn't support conditions + # other than 'equals'. + sql = """ + SELECT account_data_type, content FROM account_data + WHERE user_id = ? + """ + + # If experimental MSC3391 support is enabled, then account data entries + # with an empty content are considered "deleted". So skip adding them to + # the results. + if self.hs.config.experimental.msc3391_enabled: + sql += " AND content != '{}'" + + txn.execute(sql, (user_id,)) + rows = self.db_pool.cursor_to_dict(txn) global_account_data = { row["account_data_type"]: db_to_json(row["content"]) for row in rows } - rows = self.db_pool.simple_select_list_txn( - txn, - "room_account_data", - {"user_id": user_id}, - ["room_id", "account_data_type", "content"], - ) + # The 'content != '{}' condition below prevents us from using + # `simple_select_list_txn` here, as it doesn't support conditions + # other than 'equals'. + sql = """ + SELECT room_id, account_data_type, content FROM room_account_data + WHERE user_id = ? + """ + + # If experimental MSC3391 support is enabled, then account data entries + # with an empty content are considered "deleted". So skip adding them to + # the results. + if self.hs.config.experimental.msc3391_enabled: + sql += " AND content != '{}'" + + txn.execute(sql, (user_id,)) + rows = self.db_pool.cursor_to_dict(txn) by_room: Dict[str, Dict[str, JsonDict]] = {} for row in rows: room_data = by_room.setdefault(row["room_id"], {}) + room_data[row["account_data_type"]] = db_to_json(row["content"]) return global_account_data, by_room @@ -469,6 +494,72 @@ async def add_account_data_to_room( return self._account_data_id_gen.get_current_token() + async def remove_account_data_for_room( + self, user_id: str, room_id: str, account_data_type: str + ) -> Optional[int]: + """Delete the room account data for the user of a given type. + + Args: + user_id: The user to remove account_data for. + room_id: The room ID to scope the request to. + account_data_type: The account data type to delete. + + Returns: + The maximum stream position, or None if there was no matching room account + data to delete. + """ + assert self._can_write_to_account_data + assert isinstance(self._account_data_id_gen, AbstractStreamIdGenerator) + + def _remove_account_data_for_room_txn( + txn: LoggingTransaction, next_id: int + ) -> bool: + """ + Args: + txn: The transaction object. + next_id: The stream_id to update any existing rows to. + + Returns: + True if an entry in room_account_data had its content set to '{}', + otherwise False. This informs callers of whether there actually was an + existing room account data entry to delete, or if the call was a no-op. + """ + # We can't use `simple_update` as it doesn't have the ability to specify + # where clauses other than '=', which we need for `content != '{}'` below. + sql = """ + UPDATE room_account_data + SET stream_id = ?, content = '{}' + WHERE user_id = ? + AND room_id = ? + AND account_data_type = ? + AND content != '{}' + """ + txn.execute( + sql, + (next_id, user_id, room_id, account_data_type), + ) + # Return true if any rows were updated. + return txn.rowcount != 0 + + async with self._account_data_id_gen.get_next() as next_id: + row_updated = await self.db_pool.runInteraction( + "remove_account_data_for_room", + _remove_account_data_for_room_txn, + next_id, + ) + + if not row_updated: + return None + + self._account_data_stream_cache.entity_has_changed(user_id, next_id) + self.get_account_data_for_user.invalidate((user_id,)) + self.get_account_data_for_room.invalidate((user_id, room_id)) + self.get_account_data_for_room_and_type.prefill( + (user_id, room_id, account_data_type), {} + ) + + return self._account_data_id_gen.get_current_token() + async def add_account_data_for_user( self, user_id: str, account_data_type: str, content: JsonDict ) -> int: @@ -569,6 +660,108 @@ def _add_account_data_for_user( self._invalidate_cache_and_stream(txn, self.ignored_by, (ignored_user_id,)) self._invalidate_cache_and_stream(txn, self.ignored_users, (user_id,)) + async def remove_account_data_for_user( + self, + user_id: str, + account_data_type: str, + ) -> Optional[int]: + """ + Delete a single piece of user account data by type. + + A "delete" is performed by updating a potentially existing row in the + "account_data" database table for (user_id, account_data_type) and + setting its content to "{}". + + Args: + user_id: The user ID to modify the account data of. + account_data_type: The type to remove. + + Returns: + The maximum stream position, or None if there was no matching account data + to delete. + """ + assert self._can_write_to_account_data + assert isinstance(self._account_data_id_gen, AbstractStreamIdGenerator) + + def _remove_account_data_for_user_txn( + txn: LoggingTransaction, next_id: int + ) -> bool: + """ + Args: + txn: The transaction object. + next_id: The stream_id to update any existing rows to. + + Returns: + True if an entry in account_data had its content set to '{}', otherwise + False. This informs callers of whether there actually was an existing + account data entry to delete, or if the call was a no-op. + """ + # We can't use `simple_update` as it doesn't have the ability to specify + # where clauses other than '=', which we need for `content != '{}'` below. + sql = """ + UPDATE account_data + SET stream_id = ?, content = '{}' + WHERE user_id = ? + AND account_data_type = ? + AND content != '{}' + """ + txn.execute(sql, (next_id, user_id, account_data_type)) + if txn.rowcount == 0: + # We didn't update any rows. This means that there was no matching room + # account data entry to delete in the first place. + return False + + # Ignored users get denormalized into a separate table as an optimisation. + if account_data_type == AccountDataTypes.IGNORED_USER_LIST: + # If this method was called with the ignored users account data type, we + # simply delete all ignored users. + + # First pull all the users that this user ignores. + previously_ignored_users = set( + self.db_pool.simple_select_onecol_txn( + txn, + table="ignored_users", + keyvalues={"ignorer_user_id": user_id}, + retcol="ignored_user_id", + ) + ) + + # Then delete them from the database. + self.db_pool.simple_delete_txn( + txn, + table="ignored_users", + keyvalues={"ignorer_user_id": user_id}, + ) + + # Invalidate the cache for ignored users which were removed. + for ignored_user_id in previously_ignored_users: + self._invalidate_cache_and_stream( + txn, self.ignored_by, (ignored_user_id,) + ) + + # Invalidate for this user the cache tracking ignored users. + self._invalidate_cache_and_stream(txn, self.ignored_users, (user_id,)) + + return True + + async with self._account_data_id_gen.get_next() as next_id: + row_updated = await self.db_pool.runInteraction( + "remove_account_data_for_user", + _remove_account_data_for_user_txn, + next_id, + ) + + if not row_updated: + return None + + self._account_data_stream_cache.entity_has_changed(user_id, next_id) + self.get_account_data_for_user.invalidate((user_id,)) + self.get_global_account_data_by_type_for_user.prefill( + (user_id, account_data_type), {} + ) + + return self._account_data_id_gen.get_current_token() + async def purge_account_data_for_user(self, user_id: str) -> None: """ Removes ALL the account data for a user. From db1cfe9c80a707995fcad8f3faa839acb247068a Mon Sep 17 00:00:00 2001 From: Nick Mills-Barrett Date: Wed, 4 Jan 2023 11:49:26 +0000 Subject: [PATCH 45/82] Update all stream IDs after processing replication rows (#14723) This creates a new store method, `process_replication_position` that is called after `process_replication_rows`. By moving stream ID advances here this guarantees any relevant cache invalidations will have been applied before the stream is advanced. This avoids race conditions where Python switches between threads mid way through processing the `process_replication_rows` method where stream IDs may be advanced before caches are invalidated due to class resolution ordering. See this comment/issue for further discussion: https://github.com/matrix-org/synapse/issues/14158#issuecomment-1344048703 --- changelog.d/14723.bugfix | 1 + synapse/replication/tcp/client.py | 3 +++ synapse/storage/_base.py | 17 ++++++++++++++++- synapse/storage/databases/main/account_data.py | 14 ++++++++++---- synapse/storage/databases/main/cache.py | 11 ++++++++--- synapse/storage/databases/main/deviceinbox.py | 7 +++++++ synapse/storage/databases/main/devices.py | 11 +++++++++-- synapse/storage/databases/main/events_worker.py | 15 ++++++++++----- synapse/storage/databases/main/presence.py | 8 +++++++- synapse/storage/databases/main/push_rule.py | 7 +++++++ synapse/storage/databases/main/pusher.py | 6 +++--- synapse/storage/databases/main/receipts.py | 7 +++++++ synapse/storage/databases/main/tags.py | 8 +++++++- 13 files changed, 95 insertions(+), 20 deletions(-) create mode 100644 changelog.d/14723.bugfix diff --git a/changelog.d/14723.bugfix b/changelog.d/14723.bugfix new file mode 100644 index 000000000000..e1f89cee35c8 --- /dev/null +++ b/changelog.d/14723.bugfix @@ -0,0 +1 @@ +Ensure stream IDs are always updated after caches get invalidated with workers. Contributed by Nick @ Beeper (@fizzadar). diff --git a/synapse/replication/tcp/client.py b/synapse/replication/tcp/client.py index 658d89210d31..b5e40da5337e 100644 --- a/synapse/replication/tcp/client.py +++ b/synapse/replication/tcp/client.py @@ -152,6 +152,9 @@ async def on_rdata( rows: a list of Stream.ROW_TYPE objects as returned by Stream.parse_row. """ self.store.process_replication_rows(stream_name, instance_name, token, rows) + # NOTE: this must be called after process_replication_rows to ensure any + # cache invalidations are first handled before any stream ID advances. + self.store.process_replication_position(stream_name, instance_name, token) if self.send_handler: await self.send_handler.process_replication_rows(stream_name, token, rows) diff --git a/synapse/storage/_base.py b/synapse/storage/_base.py index 69abf6fa8741..41d9111019b2 100644 --- a/synapse/storage/_base.py +++ b/synapse/storage/_base.py @@ -57,7 +57,22 @@ def process_replication_rows( # noqa: B027 (no-op by design) token: int, rows: Iterable[Any], ) -> None: - pass + """ + Used by storage classes to invalidate caches based on incoming replication data. These + must not update any ID generators, use `process_replication_position`. + """ + + def process_replication_position( # noqa: B027 (no-op by design) + self, + stream_name: str, + instance_name: str, + token: int, + ) -> None: + """ + Used by storage classes to advance ID generators based on incoming replication data. This + is called after process_replication_rows such that caches are invalidated before any token + positions advance. + """ def _invalidate_state_caches( self, room_id: str, members_changed: Collection[str] diff --git a/synapse/storage/databases/main/account_data.py b/synapse/storage/databases/main/account_data.py index e59776f4349f..86032897f53a 100644 --- a/synapse/storage/databases/main/account_data.py +++ b/synapse/storage/databases/main/account_data.py @@ -436,10 +436,7 @@ def process_replication_rows( token: int, rows: Iterable[Any], ) -> None: - if stream_name == TagAccountDataStream.NAME: - self._account_data_id_gen.advance(instance_name, token) - elif stream_name == AccountDataStream.NAME: - self._account_data_id_gen.advance(instance_name, token) + if stream_name == AccountDataStream.NAME: for row in rows: if not row.room_id: self.get_global_account_data_by_type_for_user.invalidate( @@ -454,6 +451,15 @@ def process_replication_rows( super().process_replication_rows(stream_name, instance_name, token, rows) + def process_replication_position( + self, stream_name: str, instance_name: str, token: int + ) -> None: + if stream_name == TagAccountDataStream.NAME: + self._account_data_id_gen.advance(instance_name, token) + elif stream_name == AccountDataStream.NAME: + self._account_data_id_gen.advance(instance_name, token) + super().process_replication_position(stream_name, instance_name, token) + async def add_account_data_to_room( self, user_id: str, room_id: str, account_data_type: str, content: JsonDict ) -> int: diff --git a/synapse/storage/databases/main/cache.py b/synapse/storage/databases/main/cache.py index a58668a38036..2179a8bf5922 100644 --- a/synapse/storage/databases/main/cache.py +++ b/synapse/storage/databases/main/cache.py @@ -164,9 +164,6 @@ def process_replication_rows( backfilled=True, ) elif stream_name == CachesStream.NAME: - if self._cache_id_gen: - self._cache_id_gen.advance(instance_name, token) - for row in rows: if row.cache_func == CURRENT_STATE_CACHE_NAME: if row.keys is None: @@ -182,6 +179,14 @@ def process_replication_rows( super().process_replication_rows(stream_name, instance_name, token, rows) + def process_replication_position( + self, stream_name: str, instance_name: str, token: int + ) -> None: + if stream_name == CachesStream.NAME: + if self._cache_id_gen: + self._cache_id_gen.advance(instance_name, token) + super().process_replication_position(stream_name, instance_name, token) + def _process_event_stream_row(self, token: int, row: EventsStreamRow) -> None: data = row.data diff --git a/synapse/storage/databases/main/deviceinbox.py b/synapse/storage/databases/main/deviceinbox.py index 48a54d9cb86b..713be91c5dd8 100644 --- a/synapse/storage/databases/main/deviceinbox.py +++ b/synapse/storage/databases/main/deviceinbox.py @@ -157,6 +157,13 @@ def process_replication_rows( ) return super().process_replication_rows(stream_name, instance_name, token, rows) + def process_replication_position( + self, stream_name: str, instance_name: str, token: int + ) -> None: + if stream_name == ToDeviceStream.NAME: + self._device_inbox_id_gen.advance(instance_name, token) + super().process_replication_position(stream_name, instance_name, token) + def get_to_device_stream_token(self) -> int: return self._device_inbox_id_gen.get_current_token() diff --git a/synapse/storage/databases/main/devices.py b/synapse/storage/databases/main/devices.py index a5bb4d404e20..db877e3f1374 100644 --- a/synapse/storage/databases/main/devices.py +++ b/synapse/storage/databases/main/devices.py @@ -162,14 +162,21 @@ def process_replication_rows( self, stream_name: str, instance_name: str, token: int, rows: Iterable[Any] ) -> None: if stream_name == DeviceListsStream.NAME: - self._device_list_id_gen.advance(instance_name, token) self._invalidate_caches_for_devices(token, rows) elif stream_name == UserSignatureStream.NAME: - self._device_list_id_gen.advance(instance_name, token) for row in rows: self._user_signature_stream_cache.entity_has_changed(row.user_id, token) return super().process_replication_rows(stream_name, instance_name, token, rows) + def process_replication_position( + self, stream_name: str, instance_name: str, token: int + ) -> None: + if stream_name == DeviceListsStream.NAME: + self._device_list_id_gen.advance(instance_name, token) + elif stream_name == UserSignatureStream.NAME: + self._device_list_id_gen.advance(instance_name, token) + super().process_replication_position(stream_name, instance_name, token) + def _invalidate_caches_for_devices( self, token: int, rows: Iterable[DeviceListsStream.DeviceListsStreamRow] ) -> None: diff --git a/synapse/storage/databases/main/events_worker.py b/synapse/storage/databases/main/events_worker.py index 761b15a8150a..d150fa8a943d 100644 --- a/synapse/storage/databases/main/events_worker.py +++ b/synapse/storage/databases/main/events_worker.py @@ -388,11 +388,7 @@ def process_replication_rows( token: int, rows: Iterable[Any], ) -> None: - if stream_name == EventsStream.NAME: - self._stream_id_gen.advance(instance_name, token) - elif stream_name == BackfillStream.NAME: - self._backfill_id_gen.advance(instance_name, -token) - elif stream_name == UnPartialStatedEventStream.NAME: + if stream_name == UnPartialStatedEventStream.NAME: for row in rows: assert isinstance(row, UnPartialStatedEventStreamRow) @@ -405,6 +401,15 @@ def process_replication_rows( super().process_replication_rows(stream_name, instance_name, token, rows) + def process_replication_position( + self, stream_name: str, instance_name: str, token: int + ) -> None: + if stream_name == EventsStream.NAME: + self._stream_id_gen.advance(instance_name, token) + elif stream_name == BackfillStream.NAME: + self._backfill_id_gen.advance(instance_name, -token) + super().process_replication_position(stream_name, instance_name, token) + async def have_censored_event(self, event_id: str) -> bool: """Check if an event has been censored, i.e. if the content of the event has been erased from the database due to a redaction. diff --git a/synapse/storage/databases/main/presence.py b/synapse/storage/databases/main/presence.py index 9769a18a9d0c..7b60815043a6 100644 --- a/synapse/storage/databases/main/presence.py +++ b/synapse/storage/databases/main/presence.py @@ -439,8 +439,14 @@ def process_replication_rows( rows: Iterable[Any], ) -> None: if stream_name == PresenceStream.NAME: - self._presence_id_gen.advance(instance_name, token) for row in rows: self.presence_stream_cache.entity_has_changed(row.user_id, token) self._get_presence_for_user.invalidate((row.user_id,)) return super().process_replication_rows(stream_name, instance_name, token, rows) + + def process_replication_position( + self, stream_name: str, instance_name: str, token: int + ) -> None: + if stream_name == PresenceStream.NAME: + self._presence_id_gen.advance(instance_name, token) + super().process_replication_position(stream_name, instance_name, token) diff --git a/synapse/storage/databases/main/push_rule.py b/synapse/storage/databases/main/push_rule.py index d4c64c46ad44..d4e4b777da95 100644 --- a/synapse/storage/databases/main/push_rule.py +++ b/synapse/storage/databases/main/push_rule.py @@ -154,6 +154,13 @@ def process_replication_rows( self.push_rules_stream_cache.entity_has_changed(row.user_id, token) return super().process_replication_rows(stream_name, instance_name, token, rows) + def process_replication_position( + self, stream_name: str, instance_name: str, token: int + ) -> None: + if stream_name == PushRulesStream.NAME: + self._push_rules_stream_id_gen.advance(instance_name, token) + super().process_replication_position(stream_name, instance_name, token) + @cached(max_entries=5000) async def get_push_rules_for_user(self, user_id: str) -> FilteredPushRules: rows = await self.db_pool.simple_select_list( diff --git a/synapse/storage/databases/main/pusher.py b/synapse/storage/databases/main/pusher.py index 40fd781a6ab1..7f24a3b6ec5e 100644 --- a/synapse/storage/databases/main/pusher.py +++ b/synapse/storage/databases/main/pusher.py @@ -111,12 +111,12 @@ def _decode_pushers_rows(self, rows: Iterable[dict]) -> Iterator[PusherConfig]: def get_pushers_stream_token(self) -> int: return self._pushers_id_gen.get_current_token() - def process_replication_rows( - self, stream_name: str, instance_name: str, token: int, rows: Iterable[Any] + def process_replication_position( + self, stream_name: str, instance_name: str, token: int ) -> None: if stream_name == PushersStream.NAME: self._pushers_id_gen.advance(instance_name, token) - return super().process_replication_rows(stream_name, instance_name, token, rows) + super().process_replication_position(stream_name, instance_name, token) async def get_pushers_by_app_id_and_pushkey( self, app_id: str, pushkey: str diff --git a/synapse/storage/databases/main/receipts.py b/synapse/storage/databases/main/receipts.py index e06725f69c31..86f5bce5f08d 100644 --- a/synapse/storage/databases/main/receipts.py +++ b/synapse/storage/databases/main/receipts.py @@ -588,6 +588,13 @@ def process_replication_rows( return super().process_replication_rows(stream_name, instance_name, token, rows) + def process_replication_position( + self, stream_name: str, instance_name: str, token: int + ) -> None: + if stream_name == ReceiptsStream.NAME: + self._receipts_id_gen.advance(instance_name, token) + super().process_replication_position(stream_name, instance_name, token) + def _insert_linearized_receipt_txn( self, txn: LoggingTransaction, diff --git a/synapse/storage/databases/main/tags.py b/synapse/storage/databases/main/tags.py index b0f5de67a30d..e23c927e02f2 100644 --- a/synapse/storage/databases/main/tags.py +++ b/synapse/storage/databases/main/tags.py @@ -300,13 +300,19 @@ def process_replication_rows( rows: Iterable[Any], ) -> None: if stream_name == TagAccountDataStream.NAME: - self._account_data_id_gen.advance(instance_name, token) for row in rows: self.get_tags_for_user.invalidate((row.user_id,)) self._account_data_stream_cache.entity_has_changed(row.user_id, token) super().process_replication_rows(stream_name, instance_name, token, rows) + def process_replication_position( + self, stream_name: str, instance_name: str, token: int + ) -> None: + if stream_name == TagAccountDataStream.NAME: + self._account_data_id_gen.advance(instance_name, token) + super().process_replication_position(stream_name, instance_name, token) + class TagsStore(TagsWorkerStore): pass From 906dfaa2cf5a79ed9c18529b1a370ffd49c0204e Mon Sep 17 00:00:00 2001 From: Patrick Cloke Date: Wed, 4 Jan 2023 08:26:10 -0500 Subject: [PATCH 46/82] Support non-OpenID compliant user info endpoints (#14753) OpenID specifies the format of the user info endpoint and some OAuth 2.0 IdPs do not follow it, e.g. NextCloud and Twitter. This adds subject_template and picture_template options to the default mapping provider for more flexibility in matching those user info responses. --- changelog.d/14753.feature | 1 + .../configuration/config_documentation.md | 18 +++++++++++ synapse/handlers/oidc.py | 31 ++++++++++++++----- 3 files changed, 42 insertions(+), 8 deletions(-) create mode 100644 changelog.d/14753.feature diff --git a/changelog.d/14753.feature b/changelog.d/14753.feature new file mode 100644 index 000000000000..38b4d6af4b38 --- /dev/null +++ b/changelog.d/14753.feature @@ -0,0 +1 @@ +Support non-OpenID compliant userinfo claims for subject and picture. diff --git a/docs/usage/configuration/config_documentation.md b/docs/usage/configuration/config_documentation.md index 67e0acc9104f..23f9dcbea2c7 100644 --- a/docs/usage/configuration/config_documentation.md +++ b/docs/usage/configuration/config_documentation.md @@ -3098,10 +3098,26 @@ Options for each entry include: For the default provider, the following settings are available: + * `subject_template`: Jinja2 template for a unique identifier for the user. + Defaults to `{{ user.sub }}`, which OpenID Connect compliant providers should provide. + + This replaces and overrides `subject_claim`. + * `subject_claim`: name of the claim containing a unique identifier for the user. Defaults to 'sub', which OpenID Connect compliant providers should provide. + *Deprecated in Synapse v1.75.0.* + + * `picture_template`: Jinja2 template for an url for the user's profile picture. + Defaults to `{{ user.picture }}`, which OpenID Connect compliant providers should + provide and has to refer to a direct image file such as PNG, JPEG, or GIF image file. + + This replaces and overrides `picture_claim`. + + Currently only supported in monolithic (single-process) server configurations + where the media repository runs within the Synapse process. + * `picture_claim`: name of the claim containing an url for the user's profile picture. Defaults to 'picture', which OpenID Connect compliant providers should provide and has to refer to a direct image file such as PNG, JPEG, or GIF image file. @@ -3109,6 +3125,8 @@ Options for each entry include: Currently only supported in monolithic (single-process) server configurations where the media repository runs within the Synapse process. + *Deprecated in Synapse v1.75.0.* + * `localpart_template`: Jinja2 template for the localpart of the MXID. If this is not set, the user will be prompted to choose their own username (see the documentation for the `sso_auth_account_details.html` diff --git a/synapse/handlers/oidc.py b/synapse/handlers/oidc.py index 23fb00c9c987..24e1cec5b603 100644 --- a/synapse/handlers/oidc.py +++ b/synapse/handlers/oidc.py @@ -1520,8 +1520,8 @@ def jinja_finalize(thing: Any) -> Any: @attr.s(slots=True, frozen=True, auto_attribs=True) class JinjaOidcMappingConfig: - subject_claim: str - picture_claim: str + subject_template: Template + picture_template: Template localpart_template: Optional[Template] display_name_template: Optional[Template] email_template: Optional[Template] @@ -1540,8 +1540,23 @@ def __init__(self, config: JinjaOidcMappingConfig): @staticmethod def parse_config(config: dict) -> JinjaOidcMappingConfig: - subject_claim = config.get("subject_claim", "sub") - picture_claim = config.get("picture_claim", "picture") + def parse_template_config_with_claim( + option_name: str, default_claim: str + ) -> Template: + template_name = f"{option_name}_template" + template = config.get(template_name) + if not template: + # Convert the legacy subject_claim into a template. + claim = config.get(f"{option_name}_claim", default_claim) + template = "{{ user.%s }}" % (claim,) + + try: + return env.from_string(template) + except Exception as e: + raise ConfigError("invalid jinja template", path=[template_name]) from e + + subject_template = parse_template_config_with_claim("subject", "sub") + picture_template = parse_template_config_with_claim("picture", "picture") def parse_template_config(option_name: str) -> Optional[Template]: if option_name not in config: @@ -1574,8 +1589,8 @@ def parse_template_config(option_name: str) -> Optional[Template]: raise ConfigError("must be a bool", path=["confirm_localpart"]) return JinjaOidcMappingConfig( - subject_claim=subject_claim, - picture_claim=picture_claim, + subject_template=subject_template, + picture_template=picture_template, localpart_template=localpart_template, display_name_template=display_name_template, email_template=email_template, @@ -1584,7 +1599,7 @@ def parse_template_config(option_name: str) -> Optional[Template]: ) def get_remote_user_id(self, userinfo: UserInfo) -> str: - return userinfo[self._config.subject_claim] + return self._config.subject_template.render(user=userinfo).strip() async def map_user_attributes( self, userinfo: UserInfo, token: Token, failures: int @@ -1615,7 +1630,7 @@ def render_template_field(template: Optional[Template]) -> Optional[str]: if email: emails.append(email) - picture = userinfo.get(self._config.picture_claim) + picture = self._config.picture_template.render(user=userinfo).strip() return UserAttributeDict( localpart=localpart, From e787fb776cc4ce7cdb923a823d571fa57c3c92cb Mon Sep 17 00:00:00 2001 From: Andrew Morgan <1342360+anoadragon453@users.noreply.github.com> Date: Wed, 4 Jan 2023 16:26:29 +0000 Subject: [PATCH 47/82] Switch to our fork of dh-virtualenv for compatibility with Python 3.11 (#14774) --- changelog.d/14774.misc | 1 + docker/Dockerfile-dhvirtualenv | 4 +- poetry.lock | 116 ++++++++++++++++++--------------- 3 files changed, 69 insertions(+), 52 deletions(-) create mode 100644 changelog.d/14774.misc diff --git a/changelog.d/14774.misc b/changelog.d/14774.misc new file mode 100644 index 000000000000..b6c9f8ca52a8 --- /dev/null +++ b/changelog.d/14774.misc @@ -0,0 +1 @@ +Switch to our fork of `dh-virtualenv` to work around an upstream Python 3.11 incompatibility. \ No newline at end of file diff --git a/docker/Dockerfile-dhvirtualenv b/docker/Dockerfile-dhvirtualenv index f3b5b00ce61a..2013732422ce 100644 --- a/docker/Dockerfile-dhvirtualenv +++ b/docker/Dockerfile-dhvirtualenv @@ -36,8 +36,10 @@ RUN env DEBIAN_FRONTEND=noninteractive apt-get install \ wget # fetch and unpack the package +# We are temporarily using a fork of dh-virtualenv due to an incompatibility with Python 3.11, which ships with +# Debian sid. TODO: Switch back to upstream once https://github.com/spotify/dh-virtualenv/pull/354 has merged. RUN mkdir /dh-virtualenv -RUN wget -q -O /dh-virtualenv.tar.gz https://github.com/spotify/dh-virtualenv/archive/refs/tags/1.2.2.tar.gz +RUN wget -q -O /dh-virtualenv.tar.gz https://github.com/matrix-org/dh-virtualenv/archive/refs/tags/matrixorg-2023010302.tar.gz RUN tar -xv --strip-components=1 -C /dh-virtualenv -f /dh-virtualenv.tar.gz # install its build deps. We do another apt-cache-update here, because we might diff --git a/poetry.lock b/poetry.lock index 8b864dd6f1c6..b0aef9835dcf 100644 --- a/poetry.lock +++ b/poetry.lock @@ -115,7 +115,7 @@ python-versions = ">=3.6" [[package]] name = "cffi" -version = "1.15.0" +version = "1.15.1" description = "Foreign Function Interface for Python calling C code." category = "main" optional = false @@ -1644,56 +1644,70 @@ certifi = [ {file = "certifi-2022.12.7.tar.gz", hash = "sha256:35824b4c3a97115964b408844d64aa14db1cc518f6562e8d7261699d1350a9e3"}, ] cffi = [ - {file = "cffi-1.15.0-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:c2502a1a03b6312837279c8c1bd3ebedf6c12c4228ddbad40912d671ccc8a962"}, - {file = "cffi-1.15.0-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:23cfe892bd5dd8941608f93348c0737e369e51c100d03718f108bf1add7bd6d0"}, - {file = "cffi-1.15.0-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:41d45de54cd277a7878919867c0f08b0cf817605e4eb94093e7516505d3c8d14"}, - {file = "cffi-1.15.0-cp27-cp27m-win32.whl", hash = "sha256:4a306fa632e8f0928956a41fa8e1d6243c71e7eb59ffbd165fc0b41e316b2474"}, - {file = "cffi-1.15.0-cp27-cp27m-win_amd64.whl", hash = "sha256:e7022a66d9b55e93e1a845d8c9eba2a1bebd4966cd8bfc25d9cd07d515b33fa6"}, - {file = "cffi-1.15.0-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:14cd121ea63ecdae71efa69c15c5543a4b5fbcd0bbe2aad864baca0063cecf27"}, - {file = "cffi-1.15.0-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:d4d692a89c5cf08a8557fdeb329b82e7bf609aadfaed6c0d79f5a449a3c7c023"}, - {file = "cffi-1.15.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0104fb5ae2391d46a4cb082abdd5c69ea4eab79d8d44eaaf79f1b1fd806ee4c2"}, - {file = "cffi-1.15.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:91ec59c33514b7c7559a6acda53bbfe1b283949c34fe7440bcf917f96ac0723e"}, - {file = "cffi-1.15.0-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:f5c7150ad32ba43a07c4479f40241756145a1f03b43480e058cfd862bf5041c7"}, - {file = "cffi-1.15.0-cp310-cp310-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:00c878c90cb53ccfaae6b8bc18ad05d2036553e6d9d1d9dbcf323bbe83854ca3"}, - {file = "cffi-1.15.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:abb9a20a72ac4e0fdb50dae135ba5e77880518e742077ced47eb1499e29a443c"}, - {file = "cffi-1.15.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a5263e363c27b653a90078143adb3d076c1a748ec9ecc78ea2fb916f9b861962"}, - {file = "cffi-1.15.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f54a64f8b0c8ff0b64d18aa76675262e1700f3995182267998c31ae974fbc382"}, - {file = "cffi-1.15.0-cp310-cp310-win32.whl", hash = "sha256:c21c9e3896c23007803a875460fb786118f0cdd4434359577ea25eb556e34c55"}, - {file = "cffi-1.15.0-cp310-cp310-win_amd64.whl", hash = "sha256:5e069f72d497312b24fcc02073d70cb989045d1c91cbd53979366077959933e0"}, - {file = "cffi-1.15.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:64d4ec9f448dfe041705426000cc13e34e6e5bb13736e9fd62e34a0b0c41566e"}, - {file = "cffi-1.15.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2756c88cbb94231c7a147402476be2c4df2f6078099a6f4a480d239a8817ae39"}, - {file = "cffi-1.15.0-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3b96a311ac60a3f6be21d2572e46ce67f09abcf4d09344c49274eb9e0bf345fc"}, - {file = "cffi-1.15.0-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:75e4024375654472cc27e91cbe9eaa08567f7fbdf822638be2814ce059f58032"}, - {file = "cffi-1.15.0-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:59888172256cac5629e60e72e86598027aca6bf01fa2465bdb676d37636573e8"}, - {file = "cffi-1.15.0-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:27c219baf94952ae9d50ec19651a687b826792055353d07648a5695413e0c605"}, - {file = "cffi-1.15.0-cp36-cp36m-win32.whl", hash = "sha256:4958391dbd6249d7ad855b9ca88fae690783a6be9e86df65865058ed81fc860e"}, - {file = "cffi-1.15.0-cp36-cp36m-win_amd64.whl", hash = "sha256:f6f824dc3bce0edab5f427efcfb1d63ee75b6fcb7282900ccaf925be84efb0fc"}, - {file = "cffi-1.15.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:06c48159c1abed75c2e721b1715c379fa3200c7784271b3c46df01383b593636"}, - {file = "cffi-1.15.0-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:c2051981a968d7de9dd2d7b87bcb9c939c74a34626a6e2f8181455dd49ed69e4"}, - {file = "cffi-1.15.0-cp37-cp37m-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:fd8a250edc26254fe5b33be00402e6d287f562b6a5b2152dec302fa15bb3e997"}, - {file = "cffi-1.15.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:91d77d2a782be4274da750752bb1650a97bfd8f291022b379bb8e01c66b4e96b"}, - {file = "cffi-1.15.0-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:45db3a33139e9c8f7c09234b5784a5e33d31fd6907800b316decad50af323ff2"}, - {file = "cffi-1.15.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:263cc3d821c4ab2213cbe8cd8b355a7f72a8324577dc865ef98487c1aeee2bc7"}, - {file = "cffi-1.15.0-cp37-cp37m-win32.whl", hash = "sha256:17771976e82e9f94976180f76468546834d22a7cc404b17c22df2a2c81db0c66"}, - {file = "cffi-1.15.0-cp37-cp37m-win_amd64.whl", hash = "sha256:3415c89f9204ee60cd09b235810be700e993e343a408693e80ce7f6a40108029"}, - {file = "cffi-1.15.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:4238e6dab5d6a8ba812de994bbb0a79bddbdf80994e4ce802b6f6f3142fcc880"}, - {file = "cffi-1.15.0-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:0808014eb713677ec1292301ea4c81ad277b6cdf2fdd90fd540af98c0b101d20"}, - {file = "cffi-1.15.0-cp38-cp38-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:57e9ac9ccc3101fac9d6014fba037473e4358ef4e89f8e181f8951a2c0162024"}, - {file = "cffi-1.15.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8b6c2ea03845c9f501ed1313e78de148cd3f6cad741a75d43a29b43da27f2e1e"}, - {file = "cffi-1.15.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:10dffb601ccfb65262a27233ac273d552ddc4d8ae1bf93b21c94b8511bffe728"}, - {file = "cffi-1.15.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:786902fb9ba7433aae840e0ed609f45c7bcd4e225ebb9c753aa39725bb3e6ad6"}, - {file = "cffi-1.15.0-cp38-cp38-win32.whl", hash = "sha256:da5db4e883f1ce37f55c667e5c0de439df76ac4cb55964655906306918e7363c"}, - {file = "cffi-1.15.0-cp38-cp38-win_amd64.whl", hash = "sha256:181dee03b1170ff1969489acf1c26533710231c58f95534e3edac87fff06c443"}, - {file = "cffi-1.15.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:45e8636704eacc432a206ac7345a5d3d2c62d95a507ec70d62f23cd91770482a"}, - {file = "cffi-1.15.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:31fb708d9d7c3f49a60f04cf5b119aeefe5644daba1cd2a0fe389b674fd1de37"}, - {file = "cffi-1.15.0-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:6dc2737a3674b3e344847c8686cf29e500584ccad76204efea14f451d4cc669a"}, - {file = "cffi-1.15.0-cp39-cp39-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:74fdfdbfdc48d3f47148976f49fab3251e550a8720bebc99bf1483f5bfb5db3e"}, - {file = "cffi-1.15.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ffaa5c925128e29efbde7301d8ecaf35c8c60ffbcd6a1ffd3a552177c8e5e796"}, - {file = "cffi-1.15.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3f7d084648d77af029acb79a0ff49a0ad7e9d09057a9bf46596dac9514dc07df"}, - {file = "cffi-1.15.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ef1f279350da2c586a69d32fc8733092fd32cc8ac95139a00377841f59a3f8d8"}, - {file = "cffi-1.15.0-cp39-cp39-win32.whl", hash = "sha256:2a23af14f408d53d5e6cd4e3d9a24ff9e05906ad574822a10563efcef137979a"}, - {file = "cffi-1.15.0-cp39-cp39-win_amd64.whl", hash = "sha256:3773c4d81e6e818df2efbc7dd77325ca0dcb688116050fb2b3011218eda36139"}, - {file = "cffi-1.15.0.tar.gz", hash = "sha256:920f0d66a896c2d99f0adbb391f990a84091179542c205fa53ce5787aff87954"}, + {file = "cffi-1.15.1-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:a66d3508133af6e8548451b25058d5812812ec3798c886bf38ed24a98216fab2"}, + {file = "cffi-1.15.1-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:470c103ae716238bbe698d67ad020e1db9d9dba34fa5a899b5e21577e6d52ed2"}, + {file = "cffi-1.15.1-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:9ad5db27f9cabae298d151c85cf2bad1d359a1b9c686a275df03385758e2f914"}, + {file = "cffi-1.15.1-cp27-cp27m-win32.whl", hash = "sha256:b3bbeb01c2b273cca1e1e0c5df57f12dce9a4dd331b4fa1635b8bec26350bde3"}, + {file = "cffi-1.15.1-cp27-cp27m-win_amd64.whl", hash = "sha256:e00b098126fd45523dd056d2efba6c5a63b71ffe9f2bbe1a4fe1716e1d0c331e"}, + {file = "cffi-1.15.1-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:d61f4695e6c866a23a21acab0509af1cdfd2c013cf256bbf5b6b5e2695827162"}, + {file = "cffi-1.15.1-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:ed9cb427ba5504c1dc15ede7d516b84757c3e3d7868ccc85121d9310d27eed0b"}, + {file = "cffi-1.15.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:39d39875251ca8f612b6f33e6b1195af86d1b3e60086068be9cc053aa4376e21"}, + {file = "cffi-1.15.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:285d29981935eb726a4399badae8f0ffdff4f5050eaa6d0cfc3f64b857b77185"}, + {file = "cffi-1.15.1-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3eb6971dcff08619f8d91607cfc726518b6fa2a9eba42856be181c6d0d9515fd"}, + {file = "cffi-1.15.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:21157295583fe8943475029ed5abdcf71eb3911894724e360acff1d61c1d54bc"}, + {file = "cffi-1.15.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5635bd9cb9731e6d4a1132a498dd34f764034a8ce60cef4f5319c0541159392f"}, + {file = "cffi-1.15.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2012c72d854c2d03e45d06ae57f40d78e5770d252f195b93f581acf3ba44496e"}, + {file = "cffi-1.15.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd86c085fae2efd48ac91dd7ccffcfc0571387fe1193d33b6394db7ef31fe2a4"}, + {file = "cffi-1.15.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:fa6693661a4c91757f4412306191b6dc88c1703f780c8234035eac011922bc01"}, + {file = "cffi-1.15.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:59c0b02d0a6c384d453fece7566d1c7e6b7bae4fc5874ef2ef46d56776d61c9e"}, + {file = "cffi-1.15.1-cp310-cp310-win32.whl", hash = "sha256:cba9d6b9a7d64d4bd46167096fc9d2f835e25d7e4c121fb2ddfc6528fb0413b2"}, + {file = "cffi-1.15.1-cp310-cp310-win_amd64.whl", hash = "sha256:ce4bcc037df4fc5e3d184794f27bdaab018943698f4ca31630bc7f84a7b69c6d"}, + {file = "cffi-1.15.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:3d08afd128ddaa624a48cf2b859afef385b720bb4b43df214f85616922e6a5ac"}, + {file = "cffi-1.15.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:3799aecf2e17cf585d977b780ce79ff0dc9b78d799fc694221ce814c2c19db83"}, + {file = "cffi-1.15.1-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a591fe9e525846e4d154205572a029f653ada1a78b93697f3b5a8f1f2bc055b9"}, + {file = "cffi-1.15.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3548db281cd7d2561c9ad9984681c95f7b0e38881201e157833a2342c30d5e8c"}, + {file = "cffi-1.15.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:91fc98adde3d7881af9b59ed0294046f3806221863722ba7d8d120c575314325"}, + {file = "cffi-1.15.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:94411f22c3985acaec6f83c6df553f2dbe17b698cc7f8ae751ff2237d96b9e3c"}, + {file = "cffi-1.15.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:03425bdae262c76aad70202debd780501fabeaca237cdfddc008987c0e0f59ef"}, + {file = "cffi-1.15.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:cc4d65aeeaa04136a12677d3dd0b1c0c94dc43abac5860ab33cceb42b801c1e8"}, + {file = "cffi-1.15.1-cp311-cp311-win32.whl", hash = "sha256:a0f100c8912c114ff53e1202d0078b425bee3649ae34d7b070e9697f93c5d52d"}, + {file = "cffi-1.15.1-cp311-cp311-win_amd64.whl", hash = "sha256:04ed324bda3cda42b9b695d51bb7d54b680b9719cfab04227cdd1e04e5de3104"}, + {file = "cffi-1.15.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50a74364d85fd319352182ef59c5c790484a336f6db772c1a9231f1c3ed0cbd7"}, + {file = "cffi-1.15.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e263d77ee3dd201c3a142934a086a4450861778baaeeb45db4591ef65550b0a6"}, + {file = "cffi-1.15.1-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:cec7d9412a9102bdc577382c3929b337320c4c4c4849f2c5cdd14d7368c5562d"}, + {file = "cffi-1.15.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4289fc34b2f5316fbb762d75362931e351941fa95fa18789191b33fc4cf9504a"}, + {file = "cffi-1.15.1-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:173379135477dc8cac4bc58f45db08ab45d228b3363adb7af79436135d028405"}, + {file = "cffi-1.15.1-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:6975a3fac6bc83c4a65c9f9fcab9e47019a11d3d2cf7f3c0d03431bf145a941e"}, + {file = "cffi-1.15.1-cp36-cp36m-win32.whl", hash = "sha256:2470043b93ff09bf8fb1d46d1cb756ce6132c54826661a32d4e4d132e1977adf"}, + {file = "cffi-1.15.1-cp36-cp36m-win_amd64.whl", hash = "sha256:30d78fbc8ebf9c92c9b7823ee18eb92f2e6ef79b45ac84db507f52fbe3ec4497"}, + {file = "cffi-1.15.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:198caafb44239b60e252492445da556afafc7d1e3ab7a1fb3f0584ef6d742375"}, + {file = "cffi-1.15.1-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5ef34d190326c3b1f822a5b7a45f6c4535e2f47ed06fec77d3d799c450b2651e"}, + {file = "cffi-1.15.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8102eaf27e1e448db915d08afa8b41d6c7ca7a04b7d73af6514df10a3e74bd82"}, + {file = "cffi-1.15.1-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5df2768244d19ab7f60546d0c7c63ce1581f7af8b5de3eb3004b9b6fc8a9f84b"}, + {file = "cffi-1.15.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a8c4917bd7ad33e8eb21e9a5bbba979b49d9a97acb3a803092cbc1133e20343c"}, + {file = "cffi-1.15.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0e2642fe3142e4cc4af0799748233ad6da94c62a8bec3a6648bf8ee68b1c7426"}, + {file = "cffi-1.15.1-cp37-cp37m-win32.whl", hash = "sha256:e229a521186c75c8ad9490854fd8bbdd9a0c9aa3a524326b55be83b54d4e0ad9"}, + {file = "cffi-1.15.1-cp37-cp37m-win_amd64.whl", hash = "sha256:a0b71b1b8fbf2b96e41c4d990244165e2c9be83d54962a9a1d118fd8657d2045"}, + {file = "cffi-1.15.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:320dab6e7cb2eacdf0e658569d2575c4dad258c0fcc794f46215e1e39f90f2c3"}, + {file = "cffi-1.15.1-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1e74c6b51a9ed6589199c787bf5f9875612ca4a8a0785fb2d4a84429badaf22a"}, + {file = "cffi-1.15.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a5c84c68147988265e60416b57fc83425a78058853509c1b0629c180094904a5"}, + {file = "cffi-1.15.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3b926aa83d1edb5aa5b427b4053dc420ec295a08e40911296b9eb1b6170f6cca"}, + {file = "cffi-1.15.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:87c450779d0914f2861b8526e035c5e6da0a3199d8f1add1a665e1cbc6fc6d02"}, + {file = "cffi-1.15.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4f2c9f67e9821cad2e5f480bc8d83b8742896f1242dba247911072d4fa94c192"}, + {file = "cffi-1.15.1-cp38-cp38-win32.whl", hash = "sha256:8b7ee99e510d7b66cdb6c593f21c043c248537a32e0bedf02e01e9553a172314"}, + {file = "cffi-1.15.1-cp38-cp38-win_amd64.whl", hash = "sha256:00a9ed42e88df81ffae7a8ab6d9356b371399b91dbdf0c3cb1e84c03a13aceb5"}, + {file = "cffi-1.15.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:54a2db7b78338edd780e7ef7f9f6c442500fb0d41a5a4ea24fff1c929d5af585"}, + {file = "cffi-1.15.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:fcd131dd944808b5bdb38e6f5b53013c5aa4f334c5cad0c72742f6eba4b73db0"}, + {file = "cffi-1.15.1-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7473e861101c9e72452f9bf8acb984947aa1661a7704553a9f6e4baa5ba64415"}, + {file = "cffi-1.15.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6c9a799e985904922a4d207a94eae35c78ebae90e128f0c4e521ce339396be9d"}, + {file = "cffi-1.15.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3bcde07039e586f91b45c88f8583ea7cf7a0770df3a1649627bf598332cb6984"}, + {file = "cffi-1.15.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:33ab79603146aace82c2427da5ca6e58f2b3f2fb5da893ceac0c42218a40be35"}, + {file = "cffi-1.15.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5d598b938678ebf3c67377cdd45e09d431369c3b1a5b331058c338e201f12b27"}, + {file = "cffi-1.15.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:db0fbb9c62743ce59a9ff687eb5f4afbe77e5e8403d6697f7446e5f609976f76"}, + {file = "cffi-1.15.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:98d85c6a2bef81588d9227dde12db8a7f47f639f4a17c9ae08e773aa9c697bf3"}, + {file = "cffi-1.15.1-cp39-cp39-win32.whl", hash = "sha256:40f4774f5a9d4f5e344f31a32b5096977b5d48560c5592e2f3d2c4374bd543ee"}, + {file = "cffi-1.15.1-cp39-cp39-win_amd64.whl", hash = "sha256:70df4e3b545a17496c9b3f41f5115e69a4f2e77e94e1d2a8e1070bc0c38c8a3c"}, + {file = "cffi-1.15.1.tar.gz", hash = "sha256:d400bfb9a37b1351253cb402671cea7e89bdecc294e8016a707f6d1d8ac934f9"}, ] charset-normalizer = [ {file = "charset-normalizer-2.0.12.tar.gz", hash = "sha256:2857e29ff0d34db842cd7ca3230549d1a697f96ee6d3fb071cfa6c7393832597"}, From 747f8eb2311099f4f8b6c0c76f3105a941fbb6bb Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Wed, 4 Jan 2023 16:46:25 +0000 Subject: [PATCH 48/82] Use env vars in GHA dependabot changelog (#14772) --- .github/workflows/dependabot_changelog.yml | 7 +++++-- changelog.d/14772.misc | 1 + 2 files changed, 6 insertions(+), 2 deletions(-) create mode 100644 changelog.d/14772.misc diff --git a/.github/workflows/dependabot_changelog.yml b/.github/workflows/dependabot_changelog.yml index b6a29a572298..df47e3dcba43 100644 --- a/.github/workflows/dependabot_changelog.yml +++ b/.github/workflows/dependabot_changelog.yml @@ -6,7 +6,7 @@ on: - reopened # For debugging! permissions: - # Needed to be able to push the commit. See + # Needed to be able to push the commit. See # https://docs.github.com/en/code-security/dependabot/working-with-dependabot/automating-dependabot-with-github-actions#enable-auto-merge-on-a-pull-request # for a similar example contents: write @@ -20,8 +20,11 @@ jobs: with: ref: ${{ github.event.pull_request.head.ref }} - name: Write, commit and push changelog + env: + PR_TITLE: ${{ github.event.pull_request.title }} + PR_NUMBER: ${{ github.event.pull_request.number }} run: | - echo "${{ github.event.pull_request.title }}." > "changelog.d/${{ github.event.pull_request.number }}".misc + echo "${PR_TITLE}." > "changelog.d/${PR_NUMBER}".misc git add changelog.d git config user.email "github-actions[bot]@users.noreply.github.com" git config user.name "GitHub Actions" diff --git a/changelog.d/14772.misc b/changelog.d/14772.misc new file mode 100644 index 000000000000..7ead5a920f45 --- /dev/null +++ b/changelog.d/14772.misc @@ -0,0 +1 @@ +Change GHA CI job to follow best practices. From 630d0aeaf607b4016e67895d81b0402a5dfcc769 Mon Sep 17 00:00:00 2001 From: Patrick Cloke Date: Wed, 4 Jan 2023 14:58:08 -0500 Subject: [PATCH 49/82] Support RFC7636 PKCE in the OAuth 2.0 flow. (#14750) PKCE can protect against certain attacks and is enabled by default. Support can be controlled manually by setting the pkce_method of each oidc_providers entry to 'auto' (default), 'always', or 'never'. This is required by Twitter OAuth 2.0 support. --- changelog.d/14750.feature | 1 + .../configuration/config_documentation.md | 7 +- synapse/config/oidc.py | 6 + synapse/handlers/oidc.py | 54 ++++++- synapse/util/macaroons.py | 7 + tests/handlers/test_oidc.py | 152 +++++++++++++++++- tests/util/test_macaroons.py | 1 + 7 files changed, 212 insertions(+), 16 deletions(-) create mode 100644 changelog.d/14750.feature diff --git a/changelog.d/14750.feature b/changelog.d/14750.feature new file mode 100644 index 000000000000..cfed64ee80f0 --- /dev/null +++ b/changelog.d/14750.feature @@ -0,0 +1 @@ +Support [RFC7636](https://datatracker.ietf.org/doc/html/rfc7636) Proof Key for Code Exchange for OAuth single sign-on. diff --git a/docs/usage/configuration/config_documentation.md b/docs/usage/configuration/config_documentation.md index 23f9dcbea2c7..ec8403c7e99b 100644 --- a/docs/usage/configuration/config_documentation.md +++ b/docs/usage/configuration/config_documentation.md @@ -3053,8 +3053,13 @@ Options for each entry include: values are `client_secret_basic` (default), `client_secret_post` and `none`. +* `pkce_method`: Whether to use proof key for code exchange when requesting + and exchanging the token. Valid values are: `auto`, `always`, or `never`. Defaults + to `auto`, which uses PKCE if supported during metadata discovery. Set to `always` + to force enable PKCE or `never` to force disable PKCE. + * `scopes`: list of scopes to request. This should normally include the "openid" - scope. Defaults to ["openid"]. + scope. Defaults to `["openid"]`. * `authorization_endpoint`: the oauth2 authorization endpoint. Required if provider discovery is disabled. diff --git a/synapse/config/oidc.py b/synapse/config/oidc.py index 0bd83f40100b..df8c42204392 100644 --- a/synapse/config/oidc.py +++ b/synapse/config/oidc.py @@ -117,6 +117,7 @@ def oidc_enabled(self) -> bool: # to avoid importing authlib here. "enum": ["client_secret_basic", "client_secret_post", "none"], }, + "pkce_method": {"type": "string", "enum": ["auto", "always", "never"]}, "scopes": {"type": "array", "items": {"type": "string"}}, "authorization_endpoint": {"type": "string"}, "token_endpoint": {"type": "string"}, @@ -289,6 +290,7 @@ def _parse_oidc_config_dict( client_secret=oidc_config.get("client_secret"), client_secret_jwt_key=client_secret_jwt_key, client_auth_method=oidc_config.get("client_auth_method", "client_secret_basic"), + pkce_method=oidc_config.get("pkce_method", "auto"), scopes=oidc_config.get("scopes", ["openid"]), authorization_endpoint=oidc_config.get("authorization_endpoint"), token_endpoint=oidc_config.get("token_endpoint"), @@ -357,6 +359,10 @@ class OidcProviderConfig: # 'none'. client_auth_method: str + # Whether to enable PKCE when exchanging the authorization & token. + # Valid values are 'auto', 'always', and 'never'. + pkce_method: str + # list of scopes to request scopes: Collection[str] diff --git a/synapse/handlers/oidc.py b/synapse/handlers/oidc.py index 24e1cec5b603..0fc829acf77d 100644 --- a/synapse/handlers/oidc.py +++ b/synapse/handlers/oidc.py @@ -36,6 +36,7 @@ from authlib.jose.errors import InvalidClaimError, JoseError, MissingClaimError from authlib.oauth2.auth import ClientAuth from authlib.oauth2.rfc6749.parameters import prepare_grant_uri +from authlib.oauth2.rfc7636.challenge import create_s256_code_challenge from authlib.oidc.core import CodeIDToken, UserInfo from authlib.oidc.discovery import OpenIDProviderMetadata, get_well_known_url from jinja2 import Environment, Template @@ -475,6 +476,16 @@ def _validate_metadata(self, m: OpenIDProviderMetadata) -> None: ) ) + # If PKCE support is advertised ensure the wanted method is available. + if m.get("code_challenge_methods_supported") is not None: + m.validate_code_challenge_methods_supported() + if "S256" not in m["code_challenge_methods_supported"]: + raise ValueError( + '"S256" not in "code_challenge_methods_supported" ({supported!r})'.format( + supported=m["code_challenge_methods_supported"], + ) + ) + if m.get("response_types_supported") is not None: m.validate_response_types_supported() @@ -602,6 +613,11 @@ async def _load_metadata(self) -> OpenIDProviderMetadata: if self._config.jwks_uri: metadata["jwks_uri"] = self._config.jwks_uri + if self._config.pkce_method == "always": + metadata["code_challenge_methods_supported"] = ["S256"] + elif self._config.pkce_method == "never": + metadata.pop("code_challenge_methods_supported", None) + self._validate_metadata(metadata) return metadata @@ -653,7 +669,7 @@ async def _load_jwks(self) -> JWKS: return jwk_set - async def _exchange_code(self, code: str) -> Token: + async def _exchange_code(self, code: str, code_verifier: str) -> Token: """Exchange an authorization code for a token. This calls the ``token_endpoint`` with the authorization code we @@ -666,6 +682,7 @@ async def _exchange_code(self, code: str) -> Token: Args: code: The authorization code we got from the callback. + code_verifier: The PKCE code verifier to send, blank if unused. Returns: A dict containing various tokens. @@ -696,6 +713,8 @@ async def _exchange_code(self, code: str) -> Token: "code": code, "redirect_uri": self._callback_url, } + if code_verifier: + args["code_verifier"] = code_verifier body = urlencode(args, True) # Fill the body/headers with credentials @@ -914,11 +933,14 @@ async def handle_redirect_request( - ``scope``: the list of scopes set in ``oidc_config.scopes`` - ``state``: a random string - ``nonce``: a random string + - ``code_challenge``: a RFC7636 code challenge (if PKCE is supported) - In addition generating a redirect URL, we are setting a cookie with - a signed macaroon token containing the state, the nonce and the - client_redirect_url params. Those are then checked when the client - comes back from the provider. + In addition to generating a redirect URL, we are setting a cookie with + a signed macaroon token containing the state, the nonce, the + client_redirect_url, and (optionally) the code_verifier params. The state, + nonce, and client_redirect_url are then checked when the client comes back + from the provider. The code_verifier is passed back to the server during + the token exchange and compared to the code_challenge sent in this request. Args: request: the incoming request from the browser. @@ -935,10 +957,25 @@ async def handle_redirect_request( state = generate_token() nonce = generate_token() + code_verifier = "" if not client_redirect_url: client_redirect_url = b"" + metadata = await self.load_metadata() + + # Automatically enable PKCE if it is supported. + extra_grant_values = {} + if metadata.get("code_challenge_methods_supported"): + code_verifier = generate_token(48) + + # Note that we verified the server supports S256 earlier (in + # OidcProvider._validate_metadata). + extra_grant_values = { + "code_challenge_method": "S256", + "code_challenge": create_s256_code_challenge(code_verifier), + } + cookie = self._macaroon_generaton.generate_oidc_session_token( state=state, session_data=OidcSessionData( @@ -946,6 +983,7 @@ async def handle_redirect_request( nonce=nonce, client_redirect_url=client_redirect_url.decode(), ui_auth_session_id=ui_auth_session_id or "", + code_verifier=code_verifier, ), ) @@ -966,7 +1004,6 @@ async def handle_redirect_request( ) ) - metadata = await self.load_metadata() authorization_endpoint = metadata.get("authorization_endpoint") return prepare_grant_uri( authorization_endpoint, @@ -976,6 +1013,7 @@ async def handle_redirect_request( scope=self._scopes, state=state, nonce=nonce, + **extra_grant_values, ) async def handle_oidc_callback( @@ -1003,7 +1041,9 @@ async def handle_oidc_callback( # Exchange the code with the provider try: logger.debug("Exchanging OAuth2 code for a token") - token = await self._exchange_code(code) + token = await self._exchange_code( + code, code_verifier=session_data.code_verifier + ) except OidcError as e: logger.warning("Could not exchange OAuth2 code: %s", e) self._sso_handler.render_error(request, e.error, e.error_description) diff --git a/synapse/util/macaroons.py b/synapse/util/macaroons.py index 5df03d3ddcdd..644c341e8cd2 100644 --- a/synapse/util/macaroons.py +++ b/synapse/util/macaroons.py @@ -110,6 +110,9 @@ class OidcSessionData: ui_auth_session_id: str """The session ID of the ongoing UI Auth ("" if this is a login)""" + code_verifier: str + """The random string used in the RFC7636 code challenge ("" if PKCE is not being used).""" + class MacaroonGenerator: def __init__(self, clock: Clock, location: str, secret_key: bytes): @@ -187,6 +190,7 @@ def generate_oidc_session_token( macaroon.add_first_party_caveat( f"ui_auth_session_id = {session_data.ui_auth_session_id}" ) + macaroon.add_first_party_caveat(f"code_verifier = {session_data.code_verifier}") macaroon.add_first_party_caveat(f"time < {expiry}") return macaroon.serialize() @@ -278,6 +282,7 @@ def verify_oidc_session_token(self, session: bytes, state: str) -> OidcSessionDa v.satisfy_general(lambda c: c.startswith("idp_id = ")) v.satisfy_general(lambda c: c.startswith("client_redirect_url = ")) v.satisfy_general(lambda c: c.startswith("ui_auth_session_id = ")) + v.satisfy_general(lambda c: c.startswith("code_verifier = ")) satisfy_expiry(v, self._clock.time_msec) v.verify(macaroon, self._secret_key) @@ -287,11 +292,13 @@ def verify_oidc_session_token(self, session: bytes, state: str) -> OidcSessionDa idp_id = get_value_from_macaroon(macaroon, "idp_id") client_redirect_url = get_value_from_macaroon(macaroon, "client_redirect_url") ui_auth_session_id = get_value_from_macaroon(macaroon, "ui_auth_session_id") + code_verifier = get_value_from_macaroon(macaroon, "code_verifier") return OidcSessionData( nonce=nonce, idp_id=idp_id, client_redirect_url=client_redirect_url, ui_auth_session_id=ui_auth_session_id, + code_verifier=code_verifier, ) def _generate_base_macaroon(self, type: MacaroonType) -> pymacaroons.Macaroon: diff --git a/tests/handlers/test_oidc.py b/tests/handlers/test_oidc.py index 49a1842b5ced..adddbd002f50 100644 --- a/tests/handlers/test_oidc.py +++ b/tests/handlers/test_oidc.py @@ -396,6 +396,7 @@ def test_redirect_request(self) -> None: self.assertEqual(params["client_id"], [CLIENT_ID]) self.assertEqual(len(params["state"]), 1) self.assertEqual(len(params["nonce"]), 1) + self.assertNotIn("code_challenge", params) # Check what is in the cookies self.assertEqual(len(req.cookies), 2) # two cookies @@ -411,12 +412,117 @@ def test_redirect_request(self) -> None: macaroon = pymacaroons.Macaroon.deserialize(cookie) state = get_value_from_macaroon(macaroon, "state") nonce = get_value_from_macaroon(macaroon, "nonce") + code_verifier = get_value_from_macaroon(macaroon, "code_verifier") redirect = get_value_from_macaroon(macaroon, "client_redirect_url") self.assertEqual(params["state"], [state]) self.assertEqual(params["nonce"], [nonce]) + self.assertEqual(code_verifier, "") self.assertEqual(redirect, "http://client/redirect") + @override_config({"oidc_config": DEFAULT_CONFIG}) + def test_redirect_request_with_code_challenge(self) -> None: + """The redirect request has the right arguments & generates a valid session cookie.""" + req = Mock(spec=["cookies"]) + req.cookies = [] + + with self.metadata_edit({"code_challenge_methods_supported": ["S256"]}): + url = urlparse( + self.get_success( + self.provider.handle_redirect_request( + req, b"http://client/redirect" + ) + ) + ) + + # Ensure the code_challenge param is added to the redirect. + params = parse_qs(url.query) + self.assertEqual(len(params["code_challenge"]), 1) + + # Check what is in the cookies + self.assertEqual(len(req.cookies), 2) # two cookies + cookie_header = req.cookies[0] + + # The cookie name and path don't really matter, just that it has to be coherent + # between the callback & redirect handlers. + parts = [p.strip() for p in cookie_header.split(b";")] + self.assertIn(b"Path=/_synapse/client/oidc", parts) + name, cookie = parts[0].split(b"=") + self.assertEqual(name, b"oidc_session") + + # Ensure the code_verifier is set in the cookie. + macaroon = pymacaroons.Macaroon.deserialize(cookie) + code_verifier = get_value_from_macaroon(macaroon, "code_verifier") + self.assertNotEqual(code_verifier, "") + + @override_config({"oidc_config": {**DEFAULT_CONFIG, "pkce_method": "always"}}) + def test_redirect_request_with_forced_code_challenge(self) -> None: + """The redirect request has the right arguments & generates a valid session cookie.""" + req = Mock(spec=["cookies"]) + req.cookies = [] + + url = urlparse( + self.get_success( + self.provider.handle_redirect_request(req, b"http://client/redirect") + ) + ) + + # Ensure the code_challenge param is added to the redirect. + params = parse_qs(url.query) + self.assertEqual(len(params["code_challenge"]), 1) + + # Check what is in the cookies + self.assertEqual(len(req.cookies), 2) # two cookies + cookie_header = req.cookies[0] + + # The cookie name and path don't really matter, just that it has to be coherent + # between the callback & redirect handlers. + parts = [p.strip() for p in cookie_header.split(b";")] + self.assertIn(b"Path=/_synapse/client/oidc", parts) + name, cookie = parts[0].split(b"=") + self.assertEqual(name, b"oidc_session") + + # Ensure the code_verifier is set in the cookie. + macaroon = pymacaroons.Macaroon.deserialize(cookie) + code_verifier = get_value_from_macaroon(macaroon, "code_verifier") + self.assertNotEqual(code_verifier, "") + + @override_config({"oidc_config": {**DEFAULT_CONFIG, "pkce_method": "never"}}) + def test_redirect_request_with_disabled_code_challenge(self) -> None: + """The redirect request has the right arguments & generates a valid session cookie.""" + req = Mock(spec=["cookies"]) + req.cookies = [] + + # The metadata should state that PKCE is enabled. + with self.metadata_edit({"code_challenge_methods_supported": ["S256"]}): + url = urlparse( + self.get_success( + self.provider.handle_redirect_request( + req, b"http://client/redirect" + ) + ) + ) + + # Ensure the code_challenge param is added to the redirect. + params = parse_qs(url.query) + self.assertNotIn("code_challenge", params) + + # Check what is in the cookies + self.assertEqual(len(req.cookies), 2) # two cookies + cookie_header = req.cookies[0] + + # The cookie name and path don't really matter, just that it has to be coherent + # between the callback & redirect handlers. + parts = [p.strip() for p in cookie_header.split(b";")] + self.assertIn(b"Path=/_synapse/client/oidc", parts) + name, cookie = parts[0].split(b"=") + self.assertEqual(name, b"oidc_session") + + # Ensure the code_verifier is blank in the cookie. + macaroon = pymacaroons.Macaroon.deserialize(cookie) + code_verifier = get_value_from_macaroon(macaroon, "code_verifier") + self.assertEqual(code_verifier, "") + @override_config({"oidc_config": DEFAULT_CONFIG}) def test_callback_error(self) -> None: """Errors from the provider returned in the callback are displayed.""" @@ -601,7 +707,7 @@ def test_exchange_code(self) -> None: payload=token ) code = "code" - ret = self.get_success(self.provider._exchange_code(code)) + ret = self.get_success(self.provider._exchange_code(code, code_verifier="")) kwargs = self.fake_server.request.call_args[1] self.assertEqual(ret, token) @@ -615,13 +721,34 @@ def test_exchange_code(self) -> None: self.assertEqual(args["client_secret"], [CLIENT_SECRET]) self.assertEqual(args["redirect_uri"], [CALLBACK_URL]) + # Test providing a code verifier. + code_verifier = "code_verifier" + ret = self.get_success( + self.provider._exchange_code(code, code_verifier=code_verifier) + ) + kwargs = self.fake_server.request.call_args[1] + + self.assertEqual(ret, token) + self.assertEqual(kwargs["method"], "POST") + self.assertEqual(kwargs["uri"], self.fake_server.token_endpoint) + + args = parse_qs(kwargs["data"].decode("utf-8")) + self.assertEqual(args["grant_type"], ["authorization_code"]) + self.assertEqual(args["code"], [code]) + self.assertEqual(args["client_id"], [CLIENT_ID]) + self.assertEqual(args["client_secret"], [CLIENT_SECRET]) + self.assertEqual(args["redirect_uri"], [CALLBACK_URL]) + self.assertEqual(args["code_verifier"], [code_verifier]) + # Test error handling self.fake_server.post_token_handler.return_value = FakeResponse.json( code=400, payload={"error": "foo", "error_description": "bar"} ) from synapse.handlers.oidc import OidcError - exc = self.get_failure(self.provider._exchange_code(code), OidcError) + exc = self.get_failure( + self.provider._exchange_code(code, code_verifier=""), OidcError + ) self.assertEqual(exc.value.error, "foo") self.assertEqual(exc.value.error_description, "bar") @@ -629,7 +756,9 @@ def test_exchange_code(self) -> None: self.fake_server.post_token_handler.return_value = FakeResponse( code=500, body=b"Not JSON" ) - exc = self.get_failure(self.provider._exchange_code(code), OidcError) + exc = self.get_failure( + self.provider._exchange_code(code, code_verifier=""), OidcError + ) self.assertEqual(exc.value.error, "server_error") # Internal server error with JSON body @@ -637,21 +766,27 @@ def test_exchange_code(self) -> None: code=500, payload={"error": "internal_server_error"} ) - exc = self.get_failure(self.provider._exchange_code(code), OidcError) + exc = self.get_failure( + self.provider._exchange_code(code, code_verifier=""), OidcError + ) self.assertEqual(exc.value.error, "internal_server_error") # 4xx error without "error" field self.fake_server.post_token_handler.return_value = FakeResponse.json( code=400, payload={} ) - exc = self.get_failure(self.provider._exchange_code(code), OidcError) + exc = self.get_failure( + self.provider._exchange_code(code, code_verifier=""), OidcError + ) self.assertEqual(exc.value.error, "server_error") # 2xx error with "error" field self.fake_server.post_token_handler.return_value = FakeResponse.json( code=200, payload={"error": "some_error"} ) - exc = self.get_failure(self.provider._exchange_code(code), OidcError) + exc = self.get_failure( + self.provider._exchange_code(code, code_verifier=""), OidcError + ) self.assertEqual(exc.value.error, "some_error") @override_config( @@ -688,7 +823,7 @@ def test_exchange_code_jwt_key(self) -> None: # timestamps. self.reactor.advance(1000) start_time = self.reactor.seconds() - ret = self.get_success(self.provider._exchange_code(code)) + ret = self.get_success(self.provider._exchange_code(code, code_verifier="")) self.assertEqual(ret, token) @@ -739,7 +874,7 @@ def test_exchange_code_no_auth(self) -> None: payload=token ) code = "code" - ret = self.get_success(self.provider._exchange_code(code)) + ret = self.get_success(self.provider._exchange_code(code, code_verifier="")) self.assertEqual(ret, token) @@ -1203,6 +1338,7 @@ def _generate_oidc_session_token( nonce=nonce, client_redirect_url=client_redirect_url, ui_auth_session_id=ui_auth_session_id, + code_verifier="", ), ) diff --git a/tests/util/test_macaroons.py b/tests/util/test_macaroons.py index f68377a05ac4..e56ec2c86099 100644 --- a/tests/util/test_macaroons.py +++ b/tests/util/test_macaroons.py @@ -92,6 +92,7 @@ def test_oidc_session_token(self) -> None: nonce="nonce", client_redirect_url="https://example.com/", ui_auth_session_id="", + code_verifier="", ) token = self.macaroon_generator.generate_oidc_session_token( state, session_data, duration_in_ms=2 * 60 * 1000 From 44b476b26e50c642059d6110ba3dcbb1eec0b647 Mon Sep 17 00:00:00 2001 From: Patrick Cloke Date: Wed, 4 Jan 2023 15:00:27 -0500 Subject: [PATCH 50/82] Document how to use Twitter as an OAuth 2.0 provider. (#14778) This also alphabetizes the documentation for the various OpenID providers. --- changelog.d/14778.doc | 1 + docs/openid.md | 616 ++++++++++++++++++++++-------------------- 2 files changed, 327 insertions(+), 290 deletions(-) create mode 100644 changelog.d/14778.doc diff --git a/changelog.d/14778.doc b/changelog.d/14778.doc new file mode 100644 index 000000000000..677f999f8da0 --- /dev/null +++ b/changelog.d/14778.doc @@ -0,0 +1 @@ +Document using Twitter as a OAuth 2.0 authentication provider. diff --git a/docs/openid.md b/docs/openid.md index e4ad45f306d6..45aa24dd24ae 100644 --- a/docs/openid.md +++ b/docs/openid.md @@ -88,98 +88,41 @@ oidc_providers: display_name_template: "{{ user.name }}" ``` -### Dex - -[Dex][dex-idp] is a simple, open-source OpenID Connect Provider. -Although it is designed to help building a full-blown provider with an -external database, it can be configured with static passwords in a config file. - -Follow the [Getting Started guide](https://dexidp.io/docs/getting-started/) -to install Dex. - -Edit `examples/config-dev.yaml` config file from the Dex repo to add a client: - -```yaml -staticClients: -- id: synapse - secret: secret - redirectURIs: - - '[synapse public baseurl]/_synapse/client/oidc/callback' - name: 'Synapse' -``` - -Run with `dex serve examples/config-dev.yaml`. - -Synapse config: - -```yaml -oidc_providers: - - idp_id: dex - idp_name: "My Dex server" - skip_verification: true # This is needed as Dex is served on an insecure endpoint - issuer: "http://127.0.0.1:5556/dex" - client_id: "synapse" - client_secret: "secret" - scopes: ["openid", "profile"] - user_mapping_provider: - config: - localpart_template: "{{ user.name }}" - display_name_template: "{{ user.name|capitalize }}" -``` -### Keycloak - -[Keycloak][keycloak-idp] is an opensource IdP maintained by Red Hat. - -Keycloak supports OIDC Back-Channel Logout, which sends logout notification to Synapse, so that Synapse users get logged out when they log out from Keycloak. -This can be optionally enabled by setting `backchannel_logout_enabled` to `true` in the Synapse configuration, and by setting the "Backchannel Logout URL" in Keycloak. - -Follow the [Getting Started Guide](https://www.keycloak.org/getting-started) to install Keycloak and set up a realm. - -1. Click `Clients` in the sidebar and click `Create` - -2. Fill in the fields as below: - -| Field | Value | -|-----------|-----------| -| Client ID | `synapse` | -| Client Protocol | `openid-connect` | +### Apple -3. Click `Save` -4. Fill in the fields as below: +Configuring "Sign in with Apple" (SiWA) requires an Apple Developer account. -| Field | Value | -|-----------|-----------| -| Client ID | `synapse` | -| Enabled | `On` | -| Client Protocol | `openid-connect` | -| Access Type | `confidential` | -| Valid Redirect URIs | `[synapse public baseurl]/_synapse/client/oidc/callback` | -| Backchannel Logout URL (optional) | `[synapse public baseurl]/_synapse/client/oidc/backchannel_logout` | -| Backchannel Logout Session Required (optional) | `On` | +You will need to create a new "Services ID" for SiWA, and create and download a +private key with "SiWA" enabled. -5. Click `Save` -6. On the Credentials tab, update the fields: +As well as the private key file, you will need: + * Client ID: the "identifier" you gave the "Services ID" + * Team ID: a 10-character ID associated with your developer account. + * Key ID: the 10-character identifier for the key. -| Field | Value | -|-------|-------| -| Client Authenticator | `Client ID and Secret` | +[Apple's developer documentation](https://help.apple.com/developer-account/?lang=en#/dev77c875b7e) +has more information on setting up SiWA. -7. Click `Regenerate Secret` -8. Copy Secret +The synapse config will look like this: ```yaml -oidc_providers: - - idp_id: keycloak - idp_name: "My KeyCloak server" - issuer: "https://127.0.0.1:8443/realms/{realm_name}" - client_id: "synapse" - client_secret: "copy secret generated from above" - scopes: ["openid", "profile"] + - idp_id: apple + idp_name: Apple + issuer: "https://appleid.apple.com" + client_id: "your-client-id" # Set to the "identifier" for your "ServicesID" + client_auth_method: "client_secret_post" + client_secret_jwt_key: + key_file: "/path/to/AuthKey_KEYIDCODE.p8" # point to your key file + jwt_header: + alg: ES256 + kid: "KEYIDCODE" # Set to the 10-char Key ID + jwt_payload: + iss: TEAMIDCODE # Set to the 10-char Team ID + scopes: ["name", "email", "openid"] + authorization_endpoint: https://appleid.apple.com/auth/authorize?response_mode=form_post user_mapping_provider: config: - localpart_template: "{{ user.preferred_username }}" - display_name_template: "{{ user.name }}" - backchannel_logout_enabled: true # Optional + email_template: "{{ user.email }}" ``` ### Auth0 @@ -262,123 +205,169 @@ oidc_providers: display_name_template: "{{ user.preferred_username|capitalize }}" # TO BE FILLED: If your users have names in Authentik and you want those in Synapse, this should be replaced with user.name|capitalize. ``` -### LemonLDAP +### Dex -[LemonLDAP::NG][lemonldap] is an open-source IdP solution. +[Dex][dex-idp] is a simple, open-source OpenID Connect Provider. +Although it is designed to help building a full-blown provider with an +external database, it can be configured with static passwords in a config file. -1. Create an OpenID Connect Relying Parties in LemonLDAP::NG -2. The parameters are: -- Client ID under the basic menu of the new Relying Parties (`Options > Basic > - Client ID`) -- Client secret (`Options > Basic > Client secret`) -- JWT Algorithm: RS256 within the security menu of the new Relying Parties - (`Options > Security > ID Token signature algorithm` and `Options > Security > - Access Token signature algorithm`) -- Scopes: OpenID, Email and Profile -- Allowed redirection addresses for login (`Options > Basic > Allowed - redirection addresses for login` ) : - `[synapse public baseurl]/_synapse/client/oidc/callback` +Follow the [Getting Started guide](https://dexidp.io/docs/getting-started/) +to install Dex. + +Edit `examples/config-dev.yaml` config file from the Dex repo to add a client: + +```yaml +staticClients: +- id: synapse + secret: secret + redirectURIs: + - '[synapse public baseurl]/_synapse/client/oidc/callback' + name: 'Synapse' +``` + +Run with `dex serve examples/config-dev.yaml`. Synapse config: + ```yaml oidc_providers: - - idp_id: lemonldap - idp_name: lemonldap - discover: true - issuer: "https://auth.example.org/" # TO BE FILLED: replace with your domain - client_id: "your client id" # TO BE FILLED - client_secret: "your client secret" # TO BE FILLED - scopes: - - "openid" - - "profile" - - "email" + - idp_id: dex + idp_name: "My Dex server" + skip_verification: true # This is needed as Dex is served on an insecure endpoint + issuer: "http://127.0.0.1:5556/dex" + client_id: "synapse" + client_secret: "secret" + scopes: ["openid", "profile"] user_mapping_provider: config: - localpart_template: "{{ user.preferred_username }}}" - # TO BE FILLED: If your users have names in LemonLDAP::NG and you want those in Synapse, this should be replaced with user.name|capitalize or any valid filter. - display_name_template: "{{ user.preferred_username|capitalize }}" + localpart_template: "{{ user.name }}" + display_name_template: "{{ user.name|capitalize }}" ``` -### GitHub +### Django OAuth Toolkit -[GitHub][github-idp] is a bit special as it is not an OpenID Connect compliant provider, but -just a regular OAuth2 provider. +[django-oauth-toolkit](https://github.com/jazzband/django-oauth-toolkit) is a +Django application providing out of the box all the endpoints, data and logic +needed to add OAuth2 capabilities to your Django projects. It supports +[OpenID Connect too](https://django-oauth-toolkit.readthedocs.io/en/latest/oidc.html). -The [`/user` API endpoint](https://developer.github.com/v3/users/#get-the-authenticated-user) -can be used to retrieve information on the authenticated user. As the Synapse -login mechanism needs an attribute to uniquely identify users, and that endpoint -does not return a `sub` property, an alternative `subject_claim` has to be set. +Configuration on Django's side: -1. Create a new OAuth application: [https://github.com/settings/applications/new](https://github.com/settings/applications/new). -2. Set the callback URL to `[synapse public baseurl]/_synapse/client/oidc/callback`. +1. Add an application: `https://example.com/admin/oauth2_provider/application/add/` and choose parameters like this: +* `Redirect uris`: `https://synapse.example.com/_synapse/client/oidc/callback` +* `Client type`: `Confidential` +* `Authorization grant type`: `Authorization code` +* `Algorithm`: `HMAC with SHA-2 256` +2. You can [customize the claims](https://django-oauth-toolkit.readthedocs.io/en/latest/oidc.html#customizing-the-oidc-responses) Django gives to synapse (optional): +
+ Code sample -Synapse config: + ```python + class CustomOAuth2Validator(OAuth2Validator): + + def get_additional_claims(self, request): + return { + "sub": request.user.email, + "email": request.user.email, + "first_name": request.user.first_name, + "last_name": request.user.last_name, + } + ``` +
+Your synapse config is then: ```yaml oidc_providers: - - idp_id: github - idp_name: Github - idp_brand: "github" # optional: styling hint for clients + - idp_id: django_example + idp_name: "Django Example" + issuer: "https://example.com/o/" + client_id: "your-client-id" # CHANGE ME + client_secret: "your-client-secret" # CHANGE ME + scopes: ["openid"] + user_profile_method: "userinfo_endpoint" # needed because oauth-toolkit does not include user information in the authorization response + user_mapping_provider: + config: + localpart_template: "{{ user.email.split('@')[0] }}" + display_name_template: "{{ user.first_name }} {{ user.last_name }}" + email_template: "{{ user.email }}" +``` + +### Facebook + +0. You will need a Facebook developer account. You can register for one + [here](https://developers.facebook.com/async/registration/). +1. On the [apps](https://developers.facebook.com/apps/) page of the developer + console, "Create App", and choose "Build Connected Experiences". +2. Once the app is created, add "Facebook Login" and choose "Web". You don't + need to go through the whole form here. +3. In the left-hand menu, open "Products"/"Facebook Login"/"Settings". + * Add `[synapse public baseurl]/_synapse/client/oidc/callback` as an OAuth Redirect + URL. +4. In the left-hand menu, open "Settings/Basic". Here you can copy the "App ID" + and "App Secret" for use below. + +Synapse config: + +```yaml + - idp_id: facebook + idp_name: Facebook + idp_brand: "facebook" # optional: styling hint for clients discover: false - issuer: "https://github.com/" + issuer: "https://www.facebook.com" client_id: "your-client-id" # TO BE FILLED client_secret: "your-client-secret" # TO BE FILLED - authorization_endpoint: "https://github.com/login/oauth/authorize" - token_endpoint: "https://github.com/login/oauth/access_token" - userinfo_endpoint: "https://api.github.com/user" - scopes: ["read:user"] + scopes: ["openid", "email"] + authorization_endpoint: "https://facebook.com/dialog/oauth" + token_endpoint: "https://graph.facebook.com/v9.0/oauth/access_token" + jwks_uri: "https://www.facebook.com/.well-known/oauth/openid/jwks/" user_mapping_provider: config: - subject_claim: "id" - localpart_template: "{{ user.login }}" display_name_template: "{{ user.name }}" + email_template: "{{ user.email }}" ``` -### Google - -[Google][google-idp] is an OpenID certified authentication and authorisation provider. - -1. Set up a project in the Google API Console (see - [documentation](https://developers.google.com/identity/protocols/oauth2/openid-connect#appsetup)). -3. Add an "OAuth Client ID" for a Web Application under "Credentials". -4. Copy the Client ID and Client Secret, and add the following to your synapse config: - ```yaml - oidc_providers: - - idp_id: google - idp_name: Google - idp_brand: "google" # optional: styling hint for clients - issuer: "https://accounts.google.com/" - client_id: "your-client-id" # TO BE FILLED - client_secret: "your-client-secret" # TO BE FILLED - scopes: ["openid", "profile", "email"] # email is optional, read below - user_mapping_provider: - config: - localpart_template: "{{ user.given_name|lower }}" - display_name_template: "{{ user.name }}" - email_template: "{{ user.email }}" # needs "email" in scopes above - ``` -4. Back in the Google console, add this Authorized redirect URI: `[synapse - public baseurl]/_synapse/client/oidc/callback`. +Relevant documents: + * [Manually Build a Login Flow](https://developers.facebook.com/docs/facebook-login/manually-build-a-login-flow) + * [Using Facebook's Graph API](https://developers.facebook.com/docs/graph-api/using-graph-api/) + * [Reference to the User endpoint](https://developers.facebook.com/docs/graph-api/reference/user) -### Twitch +Facebook do have an [OIDC discovery endpoint](https://www.facebook.com/.well-known/openid-configuration), +but it has a `response_types_supported` which excludes "code" (which we rely on, and +is even mentioned in their [documentation](https://developers.facebook.com/docs/facebook-login/manually-build-a-login-flow#login)), +so we have to disable discovery and configure the URIs manually. -1. Setup a developer account on [Twitch](https://dev.twitch.tv/) -2. Obtain the OAuth 2.0 credentials by [creating an app](https://dev.twitch.tv/console/apps/) -3. Add this OAuth Redirect URL: `[synapse public baseurl]/_synapse/client/oidc/callback` +### GitHub + +[GitHub][github-idp] is a bit special as it is not an OpenID Connect compliant provider, but +just a regular OAuth2 provider. + +The [`/user` API endpoint](https://developer.github.com/v3/users/#get-the-authenticated-user) +can be used to retrieve information on the authenticated user. As the Synapse +login mechanism needs an attribute to uniquely identify users, and that endpoint +does not return a `sub` property, an alternative `subject_claim` has to be set. + +1. Create a new OAuth application: [https://github.com/settings/applications/new](https://github.com/settings/applications/new). +2. Set the callback URL to `[synapse public baseurl]/_synapse/client/oidc/callback`. Synapse config: ```yaml oidc_providers: - - idp_id: twitch - idp_name: Twitch - issuer: "https://id.twitch.tv/oauth2/" + - idp_id: github + idp_name: Github + idp_brand: "github" # optional: styling hint for clients + discover: false + issuer: "https://github.com/" client_id: "your-client-id" # TO BE FILLED client_secret: "your-client-secret" # TO BE FILLED - client_auth_method: "client_secret_post" + authorization_endpoint: "https://github.com/login/oauth/authorize" + token_endpoint: "https://github.com/login/oauth/access_token" + userinfo_endpoint: "https://api.github.com/user" + scopes: ["read:user"] user_mapping_provider: config: - localpart_template: "{{ user.preferred_username }}" + subject_claim: "id" + localpart_template: "{{ user.login }}" display_name_template: "{{ user.name }}" ``` @@ -407,50 +396,6 @@ oidc_providers: display_name_template: '{{ user.name }}' ``` -### Facebook - -0. You will need a Facebook developer account. You can register for one - [here](https://developers.facebook.com/async/registration/). -1. On the [apps](https://developers.facebook.com/apps/) page of the developer - console, "Create App", and choose "Build Connected Experiences". -2. Once the app is created, add "Facebook Login" and choose "Web". You don't - need to go through the whole form here. -3. In the left-hand menu, open "Products"/"Facebook Login"/"Settings". - * Add `[synapse public baseurl]/_synapse/client/oidc/callback` as an OAuth Redirect - URL. -4. In the left-hand menu, open "Settings/Basic". Here you can copy the "App ID" - and "App Secret" for use below. - -Synapse config: - -```yaml - - idp_id: facebook - idp_name: Facebook - idp_brand: "facebook" # optional: styling hint for clients - discover: false - issuer: "https://www.facebook.com" - client_id: "your-client-id" # TO BE FILLED - client_secret: "your-client-secret" # TO BE FILLED - scopes: ["openid", "email"] - authorization_endpoint: "https://facebook.com/dialog/oauth" - token_endpoint: "https://graph.facebook.com/v9.0/oauth/access_token" - jwks_uri: "https://www.facebook.com/.well-known/oauth/openid/jwks/" - user_mapping_provider: - config: - display_name_template: "{{ user.name }}" - email_template: "{{ user.email }}" -``` - -Relevant documents: - * [Manually Build a Login Flow](https://developers.facebook.com/docs/facebook-login/manually-build-a-login-flow) - * [Using Facebook's Graph API](https://developers.facebook.com/docs/graph-api/using-graph-api/) - * [Reference to the User endpoint](https://developers.facebook.com/docs/graph-api/reference/user) - -Facebook do have an [OIDC discovery endpoint](https://www.facebook.com/.well-known/openid-configuration), -but it has a `response_types_supported` which excludes "code" (which we rely on, and -is even mentioned in their [documentation](https://developers.facebook.com/docs/facebook-login/manually-build-a-login-flow#login)), -so we have to disable discovery and configure the URIs manually. - ### Gitea Gitea is, like Github, not an OpenID provider, but just an OAuth2 provider. @@ -485,110 +430,123 @@ oidc_providers: display_name_template: "{{ user.full_name }}" ``` -### XWiki +### Google -Install [OpenID Connect Provider](https://extensions.xwiki.org/xwiki/bin/view/Extension/OpenID%20Connect/OpenID%20Connect%20Provider/) extension in your [XWiki](https://www.xwiki.org) instance. +[Google][google-idp] is an OpenID certified authentication and authorisation provider. -Synapse config: +1. Set up a project in the Google API Console (see + [documentation](https://developers.google.com/identity/protocols/oauth2/openid-connect#appsetup)). +3. Add an "OAuth Client ID" for a Web Application under "Credentials". +4. Copy the Client ID and Client Secret, and add the following to your synapse config: + ```yaml + oidc_providers: + - idp_id: google + idp_name: Google + idp_brand: "google" # optional: styling hint for clients + issuer: "https://accounts.google.com/" + client_id: "your-client-id" # TO BE FILLED + client_secret: "your-client-secret" # TO BE FILLED + scopes: ["openid", "profile", "email"] # email is optional, read below + user_mapping_provider: + config: + localpart_template: "{{ user.given_name|lower }}" + display_name_template: "{{ user.name }}" + email_template: "{{ user.email }}" # needs "email" in scopes above + ``` +4. Back in the Google console, add this Authorized redirect URI: `[synapse + public baseurl]/_synapse/client/oidc/callback`. -```yaml -oidc_providers: - - idp_id: xwiki - idp_name: "XWiki" - issuer: "https://myxwikihost/xwiki/oidc/" - client_id: "your-client-id" # TO BE FILLED - client_auth_method: none - scopes: ["openid", "profile"] - user_profile_method: "userinfo_endpoint" - user_mapping_provider: - config: - localpart_template: "{{ user.preferred_username }}" - display_name_template: "{{ user.name }}" -``` +### Keycloak -### Apple +[Keycloak][keycloak-idp] is an opensource IdP maintained by Red Hat. -Configuring "Sign in with Apple" (SiWA) requires an Apple Developer account. +Keycloak supports OIDC Back-Channel Logout, which sends logout notification to Synapse, so that Synapse users get logged out when they log out from Keycloak. +This can be optionally enabled by setting `backchannel_logout_enabled` to `true` in the Synapse configuration, and by setting the "Backchannel Logout URL" in Keycloak. -You will need to create a new "Services ID" for SiWA, and create and download a -private key with "SiWA" enabled. +Follow the [Getting Started Guide](https://www.keycloak.org/getting-started) to install Keycloak and set up a realm. -As well as the private key file, you will need: - * Client ID: the "identifier" you gave the "Services ID" - * Team ID: a 10-character ID associated with your developer account. - * Key ID: the 10-character identifier for the key. +1. Click `Clients` in the sidebar and click `Create` -[Apple's developer documentation](https://help.apple.com/developer-account/?lang=en#/dev77c875b7e) -has more information on setting up SiWA. +2. Fill in the fields as below: -The synapse config will look like this: +| Field | Value | +|-----------|-----------| +| Client ID | `synapse` | +| Client Protocol | `openid-connect` | + +3. Click `Save` +4. Fill in the fields as below: + +| Field | Value | +|-----------|-----------| +| Client ID | `synapse` | +| Enabled | `On` | +| Client Protocol | `openid-connect` | +| Access Type | `confidential` | +| Valid Redirect URIs | `[synapse public baseurl]/_synapse/client/oidc/callback` | +| Backchannel Logout URL (optional) | `[synapse public baseurl]/_synapse/client/oidc/backchannel_logout` | +| Backchannel Logout Session Required (optional) | `On` | + +5. Click `Save` +6. On the Credentials tab, update the fields: + +| Field | Value | +|-------|-------| +| Client Authenticator | `Client ID and Secret` | + +7. Click `Regenerate Secret` +8. Copy Secret ```yaml - - idp_id: apple - idp_name: Apple - issuer: "https://appleid.apple.com" - client_id: "your-client-id" # Set to the "identifier" for your "ServicesID" - client_auth_method: "client_secret_post" - client_secret_jwt_key: - key_file: "/path/to/AuthKey_KEYIDCODE.p8" # point to your key file - jwt_header: - alg: ES256 - kid: "KEYIDCODE" # Set to the 10-char Key ID - jwt_payload: - iss: TEAMIDCODE # Set to the 10-char Team ID - scopes: ["name", "email", "openid"] - authorization_endpoint: https://appleid.apple.com/auth/authorize?response_mode=form_post +oidc_providers: + - idp_id: keycloak + idp_name: "My KeyCloak server" + issuer: "https://127.0.0.1:8443/realms/{realm_name}" + client_id: "synapse" + client_secret: "copy secret generated from above" + scopes: ["openid", "profile"] user_mapping_provider: config: - email_template: "{{ user.email }}" + localpart_template: "{{ user.preferred_username }}" + display_name_template: "{{ user.name }}" + backchannel_logout_enabled: true # Optional ``` -### Django OAuth Toolkit - -[django-oauth-toolkit](https://github.com/jazzband/django-oauth-toolkit) is a -Django application providing out of the box all the endpoints, data and logic -needed to add OAuth2 capabilities to your Django projects. It supports -[OpenID Connect too](https://django-oauth-toolkit.readthedocs.io/en/latest/oidc.html). - -Configuration on Django's side: - -1. Add an application: `https://example.com/admin/oauth2_provider/application/add/` and choose parameters like this: -* `Redirect uris`: `https://synapse.example.com/_synapse/client/oidc/callback` -* `Client type`: `Confidential` -* `Authorization grant type`: `Authorization code` -* `Algorithm`: `HMAC with SHA-2 256` -2. You can [customize the claims](https://django-oauth-toolkit.readthedocs.io/en/latest/oidc.html#customizing-the-oidc-responses) Django gives to synapse (optional): -
- Code sample +### LemonLDAP - ```python - class CustomOAuth2Validator(OAuth2Validator): +[LemonLDAP::NG][lemonldap] is an open-source IdP solution. - def get_additional_claims(self, request): - return { - "sub": request.user.email, - "email": request.user.email, - "first_name": request.user.first_name, - "last_name": request.user.last_name, - } - ``` -
-Your synapse config is then: +1. Create an OpenID Connect Relying Parties in LemonLDAP::NG +2. The parameters are: +- Client ID under the basic menu of the new Relying Parties (`Options > Basic > + Client ID`) +- Client secret (`Options > Basic > Client secret`) +- JWT Algorithm: RS256 within the security menu of the new Relying Parties + (`Options > Security > ID Token signature algorithm` and `Options > Security > + Access Token signature algorithm`) +- Scopes: OpenID, Email and Profile +- Allowed redirection addresses for login (`Options > Basic > Allowed + redirection addresses for login` ) : + `[synapse public baseurl]/_synapse/client/oidc/callback` +Synapse config: ```yaml oidc_providers: - - idp_id: django_example - idp_name: "Django Example" - issuer: "https://example.com/o/" - client_id: "your-client-id" # CHANGE ME - client_secret: "your-client-secret" # CHANGE ME - scopes: ["openid"] - user_profile_method: "userinfo_endpoint" # needed because oauth-toolkit does not include user information in the authorization response + - idp_id: lemonldap + idp_name: lemonldap + discover: true + issuer: "https://auth.example.org/" # TO BE FILLED: replace with your domain + client_id: "your client id" # TO BE FILLED + client_secret: "your client secret" # TO BE FILLED + scopes: + - "openid" + - "profile" + - "email" user_mapping_provider: config: - localpart_template: "{{ user.email.split('@')[0] }}" - display_name_template: "{{ user.first_name }} {{ user.last_name }}" - email_template: "{{ user.email }}" + localpart_template: "{{ user.preferred_username }}}" + # TO BE FILLED: If your users have names in LemonLDAP::NG and you want those in Synapse, this should be replaced with user.name|capitalize or any valid filter. + display_name_template: "{{ user.preferred_username|capitalize }}" ``` ### Mastodon @@ -631,3 +589,81 @@ oidc_providers: ``` Note that the fields `client_id` and `client_secret` are taken from the CURL response above. + +### Twitch + +1. Setup a developer account on [Twitch](https://dev.twitch.tv/) +2. Obtain the OAuth 2.0 credentials by [creating an app](https://dev.twitch.tv/console/apps/) +3. Add this OAuth Redirect URL: `[synapse public baseurl]/_synapse/client/oidc/callback` + +Synapse config: + +```yaml +oidc_providers: + - idp_id: twitch + idp_name: Twitch + issuer: "https://id.twitch.tv/oauth2/" + client_id: "your-client-id" # TO BE FILLED + client_secret: "your-client-secret" # TO BE FILLED + client_auth_method: "client_secret_post" + user_mapping_provider: + config: + localpart_template: "{{ user.preferred_username }}" + display_name_template: "{{ user.name }}" +``` + +### Twitter + +*Using Twitter as an identity provider requires using Synapse 1.75.0 or later.* + +1. Setup a developer account on [Twitter](https://developer.twitter.com/en/portal/dashboard) +2. Create a project & app. +3. Enable user authentication and under "Type of App" choose "Web App, Automated App or Bot". +4. Under "App info" set the callback URL to `[synapse public baseurl]/_synapse/client/oidc/callback`. +5. Obtain the OAuth 2.0 credentials under the "Keys and tokens" tab, copy the "OAuth 2.0 Client ID and Client Secret" + +Synapse config: + +```yaml +oidc_providers: + - idp_id: twitter + idp_name: Twitter + idp_brand: "twitter" # optional: styling hint for clients + discover: false # Twitter is not OpenID compliant. + issuer: "https://twitter.com/" + client_id: "your-client-id" # TO BE FILLED + client_secret: "your-client-secret" # TO BE FILLED + pkce_method: "always" + # offline.access providers refresh tokens, tweet.read and users.read needed for userinfo request. + scopes: ["offline.access", "tweet.read", "users.read"] + authorization_endpoint: https://twitter.com/i/oauth2/authorize + token_endpoint: https://api.twitter.com/2/oauth2/token + userinfo_endpoint: https://api.twitter.com/2/users/me?user.fields=profile_image_url + user_mapping_provider: + config: + subject_template: "{{ user.data.id }}" + localpart_template: "{{ user.data.username }}" + display_name_template: "{{ user.data.name }}" + picture_template: "{{ user.data.profile_image_url }}" +``` + +### XWiki + +Install [OpenID Connect Provider](https://extensions.xwiki.org/xwiki/bin/view/Extension/OpenID%20Connect/OpenID%20Connect%20Provider/) extension in your [XWiki](https://www.xwiki.org) instance. + +Synapse config: + +```yaml +oidc_providers: + - idp_id: xwiki + idp_name: "XWiki" + issuer: "https://myxwikihost/xwiki/oidc/" + client_id: "your-client-id" # TO BE FILLED + client_auth_method: none + scopes: ["openid", "profile"] + user_profile_method: "userinfo_endpoint" + user_mapping_provider: + config: + localpart_template: "{{ user.preferred_username }}" + display_name_template: "{{ user.name }}" +``` From 827678196e5808ce121d6a4432bee43f9d149936 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 5 Jan 2023 09:49:35 +0000 Subject: [PATCH 51/82] Bump serde from 1.0.151 to 1.0.152 (#14758) --- Cargo.lock | 8 ++++---- changelog.d/14758.misc | 1 + 2 files changed, 5 insertions(+), 4 deletions(-) create mode 100644 changelog.d/14758.misc diff --git a/Cargo.lock b/Cargo.lock index c249ec56f709..ace6a8c50aa0 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -323,18 +323,18 @@ checksum = "d29ab0c6d3fc0ee92fe66e2d99f700eab17a8d57d1c1d3b748380fb20baa78cd" [[package]] name = "serde" -version = "1.0.151" +version = "1.0.152" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "97fed41fc1a24994d044e6db6935e69511a1153b52c15eb42493b26fa87feba0" +checksum = "bb7d1f0d3021d347a83e556fc4683dea2ea09d87bccdf88ff5c12545d89d5efb" dependencies = [ "serde_derive", ] [[package]] name = "serde_derive" -version = "1.0.151" +version = "1.0.152" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "255abe9a125a985c05190d687b320c12f9b1f0b99445e608c21ba0782c719ad8" +checksum = "af487d118eecd09402d70a5d72551860e788df87b464af30e5ea6a38c75c541e" dependencies = [ "proc-macro2", "quote", diff --git a/changelog.d/14758.misc b/changelog.d/14758.misc new file mode 100644 index 000000000000..69c727bab50b --- /dev/null +++ b/changelog.d/14758.misc @@ -0,0 +1 @@ +Bump serde from 1.0.151 to 1.0.152. From f79ef37b8c8e390744b96bd94dd2cdf55b31b069 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 5 Jan 2023 09:49:50 +0000 Subject: [PATCH 52/82] Bump ruff from 0.0.189 to 0.0.206 (#14759) --- changelog.d/14759.misc | 1 + poetry.lock | 36 ++++++++++++++++++------------------ pyproject.toml | 2 +- 3 files changed, 20 insertions(+), 19 deletions(-) create mode 100644 changelog.d/14759.misc diff --git a/changelog.d/14759.misc b/changelog.d/14759.misc new file mode 100644 index 000000000000..30ce74c310db --- /dev/null +++ b/changelog.d/14759.misc @@ -0,0 +1 @@ +Bump ruff from 0.0.189 to 0.0.206. diff --git a/poetry.lock b/poetry.lock index b0aef9835dcf..dceca77c092a 100644 --- a/poetry.lock +++ b/poetry.lock @@ -982,7 +982,7 @@ jupyter = ["ipywidgets (>=7.5.1,<8.0.0)"] [[package]] name = "ruff" -version = "0.0.189" +version = "0.0.206" description = "An extremely fast Python linter, written in Rust." category = "dev" optional = false @@ -1579,7 +1579,7 @@ user-search = ["pyicu"] [metadata] lock-version = "1.1" python-versions = "^3.7.1" -content-hash = "a8fc81be719e55ce60792ba0393e35592582e748b99ff79024b977ce6357a13e" +content-hash = "0b51556b00496ee5d214d343893c94b3ab48932d154a882f23be4f0d3b2af475" [metadata.files] attrs = [ @@ -2488,22 +2488,22 @@ rich = [ {file = "rich-12.6.0.tar.gz", hash = "sha256:ba3a3775974105c221d31141f2c116f4fd65c5ceb0698657a11e9f295ec93fd0"}, ] ruff = [ - {file = "ruff-0.0.189-py3-none-macosx_10_7_x86_64.whl", hash = "sha256:07c947b42d3c5efc6761214acdb6b71a49b833ad9fb9b320454244a6fe01f212"}, - {file = "ruff-0.0.189-py3-none-macosx_10_9_x86_64.macosx_11_0_arm64.macosx_10_9_universal2.whl", hash = "sha256:76e6161d021bde5738bf9d123ae445cb3a22fa60f14958ce64961d8af16141a0"}, - {file = "ruff-0.0.189-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c27f51e5b48cd483459cdd1c95a6bd989adcf7653ccc440ca437f4993fe4b812"}, - {file = "ruff-0.0.189-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:e89f488a16ce2b21d940fc6271ed161affec788955f7b41761a9693a92e994bb"}, - {file = "ruff-0.0.189-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fee593d8d470811c316ff2eb0124ac74668a3d637ab3fb237aa3fa8561fb89aa"}, - {file = "ruff-0.0.189-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:bc3a73683a5b3b4b7bf951bbd4aa7d79b993c8c2e608a68de120c342ebe510f2"}, - {file = "ruff-0.0.189-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e5d73877558651f48c86d958afe0f662b6c3639990c230a6b9d82ac6093484db"}, - {file = "ruff-0.0.189-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3d1e6e9813f59ba54e7cb6f28c1f2a9a756197f6e321bd68519afe57f8522fce"}, - {file = "ruff-0.0.189-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8d177090cf03004b14814b0aad530758f5186d391250afb737570edd55beabc6"}, - {file = "ruff-0.0.189-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:48de3253856a0a85f9b53a0ca1982946c7fd343c796cdc76ece0ae359d5b71b5"}, - {file = "ruff-0.0.189-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:e935bb5a213030de312ad00df477f38c78ac97af58b0e6a4ae5762705a5113da"}, - {file = "ruff-0.0.189-py3-none-musllinux_1_2_i686.whl", hash = "sha256:bdb8173d6efff96e0cc5fe38f5fc4daa0d28fb11553482b9989d372fdafc7708"}, - {file = "ruff-0.0.189-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:14486fd8632bc4c7f926137a9c6a8c45993ff6667ddb7a88192c369c3afd86e9"}, - {file = "ruff-0.0.189-py3-none-win32.whl", hash = "sha256:e281080e2ed04f01275b3df5baa0afe2802ab145349298e24700cdd09c0afddc"}, - {file = "ruff-0.0.189-py3-none-win_amd64.whl", hash = "sha256:c552ff0b0587a5e13f935131d2a19782c0baf8b59175cf3160a76545fbdbdd76"}, - {file = "ruff-0.0.189.tar.gz", hash = "sha256:90a3031461ed83686ff78f96e58d28cdee835110c51bdfa0968a2d5892610c71"}, + {file = "ruff-0.0.206-py3-none-macosx_10_7_x86_64.whl", hash = "sha256:47fa81b999d960464e37135c5863cb0be97a05ba6ad8c5baa8163e5a0d7d2e20"}, + {file = "ruff-0.0.206-py3-none-macosx_10_9_x86_64.macosx_11_0_arm64.macosx_10_9_universal2.whl", hash = "sha256:457829500cec96d307b6dd537e983e148cf3788454ccda83aeef459dcdeccce3"}, + {file = "ruff-0.0.206-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b186fdb99b10a8d5ac112e8c10000eff61c3cc248ce9b87f80abf6e32408746d"}, + {file = "ruff-0.0.206-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:b978f206135cf02d89a51d29b2134eecfb8c05e2533dc75c6554b29a5e7e0844"}, + {file = "ruff-0.0.206-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:632233444d597e02982dfdd1d34eab03943e9c6e042f0dfafab40a3ceb18a6fd"}, + {file = "ruff-0.0.206-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:b70baa8904ff9e11859082eb691d7e087d8637f1bb569512f76a8b2cfb8b3eb6"}, + {file = "ruff-0.0.206-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d64f8452e71fadf9995dee7517a55f251c5a3c87879e08d231af5ef5b7abf076"}, + {file = "ruff-0.0.206-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:66e198b71bd6f39b8adac5d2dcf47d8a3be8860d71680f36c7b7caba4e823ed0"}, + {file = "ruff-0.0.206-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:821b5cee2f5ebf27950580a7a09c1baeedd1659e0c85742ef085356f2ffe6035"}, + {file = "ruff-0.0.206-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:85ab420465395f8e6a5057f8acd7990297fa23a7e20f667ff4d73479f8fd5ca5"}, + {file = "ruff-0.0.206-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:7c4cd2842cecb52464cb3d8b5386beeca029e6b7940d2720d5adaf9da94323b3"}, + {file = "ruff-0.0.206-py3-none-musllinux_1_2_i686.whl", hash = "sha256:9064d59f83d1ddd4b45f1bc565846cf067bf4d0f3e8db5a73f14cc38a2403c49"}, + {file = "ruff-0.0.206-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:cfd8977c264b3975e6cd893b62a20ee2cab6a1893cb0eda8418e0ef062a284c3"}, + {file = "ruff-0.0.206-py3-none-win32.whl", hash = "sha256:c333f4062fd8c86a903f0e11780b529d786981b70de2d65102ee1765949592cd"}, + {file = "ruff-0.0.206-py3-none-win_amd64.whl", hash = "sha256:6e758ff7c9981b91113d6a0f44183ab5dbe33ee5a5ca2ec7db5a22f03f9568eb"}, + {file = "ruff-0.0.206.tar.gz", hash = "sha256:b79b6ffac6ca713c5cad6e661495e77e1821d87c3fedd02139d13a857a6de92a"}, ] secretstorage = [ {file = "SecretStorage-3.3.1-py3-none-any.whl", hash = "sha256:422d82c36172d88d6a0ed5afdec956514b189ddbfb72fefab0c8a1cee4eaf71f"}, diff --git a/pyproject.toml b/pyproject.toml index 37b9ab3a7702..f759459b8041 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -317,7 +317,7 @@ all = [ # We pin black so that our tests don't start failing on new releases. isort = ">=5.10.1" black = ">=22.3.0" -ruff = "0.0.189" +ruff = "0.0.206" # Typechecking mypy = "*" From 62aa5c514d5a7610e667bae9e21c398ce2bd4d50 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 5 Jan 2023 09:50:03 +0000 Subject: [PATCH 53/82] Bump pydantic from 1.10.2 to 1.10.4 (#14760) --- changelog.d/14760.misc | 1 + poetry.lock | 76 +++++++++++++++++++++--------------------- 2 files changed, 39 insertions(+), 38 deletions(-) create mode 100644 changelog.d/14760.misc diff --git a/changelog.d/14760.misc b/changelog.d/14760.misc new file mode 100644 index 000000000000..5eaaee4559ac --- /dev/null +++ b/changelog.d/14760.misc @@ -0,0 +1 @@ +Bump pydantic from 1.10.2 to 1.10.4. diff --git a/poetry.lock b/poetry.lock index dceca77c092a..b2b12e8249c8 100644 --- a/poetry.lock +++ b/poetry.lock @@ -732,14 +732,14 @@ python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" [[package]] name = "pydantic" -version = "1.10.2" +version = "1.10.4" description = "Data validation and settings management using python type hints" category = "main" optional = false python-versions = ">=3.7" [package.dependencies] -typing-extensions = ">=4.1.0" +typing-extensions = ">=4.2.0" [package.extras] dotenv = ["python-dotenv (>=0.10.4)"] @@ -2310,42 +2310,42 @@ pycparser = [ {file = "pycparser-2.21.tar.gz", hash = "sha256:e644fdec12f7872f86c58ff790da456218b10f863970249516d60a5eaca77206"}, ] pydantic = [ - {file = "pydantic-1.10.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:bb6ad4489af1bac6955d38ebcb95079a836af31e4c4f74aba1ca05bb9f6027bd"}, - {file = "pydantic-1.10.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a1f5a63a6dfe19d719b1b6e6106561869d2efaca6167f84f5ab9347887d78b98"}, - {file = "pydantic-1.10.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:352aedb1d71b8b0736c6d56ad2bd34c6982720644b0624462059ab29bd6e5912"}, - {file = "pydantic-1.10.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:19b3b9ccf97af2b7519c42032441a891a5e05c68368f40865a90eb88833c2559"}, - {file = "pydantic-1.10.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:e9069e1b01525a96e6ff49e25876d90d5a563bc31c658289a8772ae186552236"}, - {file = "pydantic-1.10.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:355639d9afc76bcb9b0c3000ddcd08472ae75318a6eb67a15866b87e2efa168c"}, - {file = "pydantic-1.10.2-cp310-cp310-win_amd64.whl", hash = "sha256:ae544c47bec47a86bc7d350f965d8b15540e27e5aa4f55170ac6a75e5f73b644"}, - {file = "pydantic-1.10.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:a4c805731c33a8db4b6ace45ce440c4ef5336e712508b4d9e1aafa617dc9907f"}, - {file = "pydantic-1.10.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:d49f3db871575e0426b12e2f32fdb25e579dea16486a26e5a0474af87cb1ab0a"}, - {file = "pydantic-1.10.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:37c90345ec7dd2f1bcef82ce49b6235b40f282b94d3eec47e801baf864d15525"}, - {file = "pydantic-1.10.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7b5ba54d026c2bd2cb769d3468885f23f43710f651688e91f5fb1edcf0ee9283"}, - {file = "pydantic-1.10.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:05e00dbebbe810b33c7a7362f231893183bcc4251f3f2ff991c31d5c08240c42"}, - {file = "pydantic-1.10.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:2d0567e60eb01bccda3a4df01df677adf6b437958d35c12a3ac3e0f078b0ee52"}, - {file = "pydantic-1.10.2-cp311-cp311-win_amd64.whl", hash = "sha256:c6f981882aea41e021f72779ce2a4e87267458cc4d39ea990729e21ef18f0f8c"}, - {file = "pydantic-1.10.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:c4aac8e7103bf598373208f6299fa9a5cfd1fc571f2d40bf1dd1955a63d6eeb5"}, - {file = "pydantic-1.10.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:81a7b66c3f499108b448f3f004801fcd7d7165fb4200acb03f1c2402da73ce4c"}, - {file = "pydantic-1.10.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bedf309630209e78582ffacda64a21f96f3ed2e51fbf3962d4d488e503420254"}, - {file = "pydantic-1.10.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:9300fcbebf85f6339a02c6994b2eb3ff1b9c8c14f502058b5bf349d42447dcf5"}, - {file = "pydantic-1.10.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:216f3bcbf19c726b1cc22b099dd409aa371f55c08800bcea4c44c8f74b73478d"}, - {file = "pydantic-1.10.2-cp37-cp37m-win_amd64.whl", hash = "sha256:dd3f9a40c16daf323cf913593083698caee97df2804aa36c4b3175d5ac1b92a2"}, - {file = "pydantic-1.10.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:b97890e56a694486f772d36efd2ba31612739bc6f3caeee50e9e7e3ebd2fdd13"}, - {file = "pydantic-1.10.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:9cabf4a7f05a776e7793e72793cd92cc865ea0e83a819f9ae4ecccb1b8aa6116"}, - {file = "pydantic-1.10.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:06094d18dd5e6f2bbf93efa54991c3240964bb663b87729ac340eb5014310624"}, - {file = "pydantic-1.10.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cc78cc83110d2f275ec1970e7a831f4e371ee92405332ebfe9860a715f8336e1"}, - {file = "pydantic-1.10.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:1ee433e274268a4b0c8fde7ad9d58ecba12b069a033ecc4645bb6303c062d2e9"}, - {file = "pydantic-1.10.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:7c2abc4393dea97a4ccbb4ec7d8658d4e22c4765b7b9b9445588f16c71ad9965"}, - {file = "pydantic-1.10.2-cp38-cp38-win_amd64.whl", hash = "sha256:0b959f4d8211fc964772b595ebb25f7652da3f22322c007b6fed26846a40685e"}, - {file = "pydantic-1.10.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:c33602f93bfb67779f9c507e4d69451664524389546bacfe1bee13cae6dc7488"}, - {file = "pydantic-1.10.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:5760e164b807a48a8f25f8aa1a6d857e6ce62e7ec83ea5d5c5a802eac81bad41"}, - {file = "pydantic-1.10.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6eb843dcc411b6a2237a694f5e1d649fc66c6064d02b204a7e9d194dff81eb4b"}, - {file = "pydantic-1.10.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4b8795290deaae348c4eba0cebb196e1c6b98bdbe7f50b2d0d9a4a99716342fe"}, - {file = "pydantic-1.10.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:e0bedafe4bc165ad0a56ac0bd7695df25c50f76961da29c050712596cf092d6d"}, - {file = "pydantic-1.10.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:2e05aed07fa02231dbf03d0adb1be1d79cabb09025dd45aa094aa8b4e7b9dcda"}, - {file = "pydantic-1.10.2-cp39-cp39-win_amd64.whl", hash = "sha256:c1ba1afb396148bbc70e9eaa8c06c1716fdddabaf86e7027c5988bae2a829ab6"}, - {file = "pydantic-1.10.2-py3-none-any.whl", hash = "sha256:1b6ee725bd6e83ec78b1aa32c5b1fa67a3a65badddde3976bca5fe4568f27709"}, - {file = "pydantic-1.10.2.tar.gz", hash = "sha256:91b8e218852ef6007c2b98cd861601c6a09f1aa32bbbb74fab5b1c33d4a1e410"}, + {file = "pydantic-1.10.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:b5635de53e6686fe7a44b5cf25fcc419a0d5e5c1a1efe73d49d48fe7586db854"}, + {file = "pydantic-1.10.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:6dc1cc241440ed7ca9ab59d9929075445da6b7c94ced281b3dd4cfe6c8cff817"}, + {file = "pydantic-1.10.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:51bdeb10d2db0f288e71d49c9cefa609bca271720ecd0c58009bd7504a0c464c"}, + {file = "pydantic-1.10.4-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:78cec42b95dbb500a1f7120bdf95c401f6abb616bbe8785ef09887306792e66e"}, + {file = "pydantic-1.10.4-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:8775d4ef5e7299a2f4699501077a0defdaac5b6c4321173bcb0f3c496fbadf85"}, + {file = "pydantic-1.10.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:572066051eeac73d23f95ba9a71349c42a3e05999d0ee1572b7860235b850cc6"}, + {file = "pydantic-1.10.4-cp310-cp310-win_amd64.whl", hash = "sha256:7feb6a2d401f4d6863050f58325b8d99c1e56f4512d98b11ac64ad1751dc647d"}, + {file = "pydantic-1.10.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:39f4a73e5342b25c2959529f07f026ef58147249f9b7431e1ba8414a36761f53"}, + {file = "pydantic-1.10.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:983e720704431a6573d626b00662eb78a07148c9115129f9b4351091ec95ecc3"}, + {file = "pydantic-1.10.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:75d52162fe6b2b55964fbb0af2ee58e99791a3138588c482572bb6087953113a"}, + {file = "pydantic-1.10.4-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fdf8d759ef326962b4678d89e275ffc55b7ce59d917d9f72233762061fd04a2d"}, + {file = "pydantic-1.10.4-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:05a81b006be15655b2a1bae5faa4280cf7c81d0e09fcb49b342ebf826abe5a72"}, + {file = "pydantic-1.10.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:d88c4c0e5c5dfd05092a4b271282ef0588e5f4aaf345778056fc5259ba098857"}, + {file = "pydantic-1.10.4-cp311-cp311-win_amd64.whl", hash = "sha256:6a05a9db1ef5be0fe63e988f9617ca2551013f55000289c671f71ec16f4985e3"}, + {file = "pydantic-1.10.4-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:887ca463c3bc47103c123bc06919c86720e80e1214aab79e9b779cda0ff92a00"}, + {file = "pydantic-1.10.4-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fdf88ab63c3ee282c76d652fc86518aacb737ff35796023fae56a65ced1a5978"}, + {file = "pydantic-1.10.4-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a48f1953c4a1d9bd0b5167ac50da9a79f6072c63c4cef4cf2a3736994903583e"}, + {file = "pydantic-1.10.4-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:a9f2de23bec87ff306aef658384b02aa7c32389766af3c5dee9ce33e80222dfa"}, + {file = "pydantic-1.10.4-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:cd8702c5142afda03dc2b1ee6bc358b62b3735b2cce53fc77b31ca9f728e4bc8"}, + {file = "pydantic-1.10.4-cp37-cp37m-win_amd64.whl", hash = "sha256:6e7124d6855b2780611d9f5e1e145e86667eaa3bd9459192c8dc1a097f5e9903"}, + {file = "pydantic-1.10.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:0b53e1d41e97063d51a02821b80538053ee4608b9a181c1005441f1673c55423"}, + {file = "pydantic-1.10.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:55b1625899acd33229c4352ce0ae54038529b412bd51c4915349b49ca575258f"}, + {file = "pydantic-1.10.4-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:301d626a59edbe5dfb48fcae245896379a450d04baeed50ef40d8199f2733b06"}, + {file = "pydantic-1.10.4-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b6f9d649892a6f54a39ed56b8dfd5e08b5f3be5f893da430bed76975f3735d15"}, + {file = "pydantic-1.10.4-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:d7b5a3821225f5c43496c324b0d6875fde910a1c2933d726a743ce328fbb2a8c"}, + {file = "pydantic-1.10.4-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:f2f7eb6273dd12472d7f218e1fef6f7c7c2f00ac2e1ecde4db8824c457300416"}, + {file = "pydantic-1.10.4-cp38-cp38-win_amd64.whl", hash = "sha256:4b05697738e7d2040696b0a66d9f0a10bec0efa1883ca75ee9e55baf511909d6"}, + {file = "pydantic-1.10.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:a9a6747cac06c2beb466064dda999a13176b23535e4c496c9d48e6406f92d42d"}, + {file = "pydantic-1.10.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:eb992a1ef739cc7b543576337bebfc62c0e6567434e522e97291b251a41dad7f"}, + {file = "pydantic-1.10.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:990406d226dea0e8f25f643b370224771878142155b879784ce89f633541a024"}, + {file = "pydantic-1.10.4-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2e82a6d37a95e0b1b42b82ab340ada3963aea1317fd7f888bb6b9dfbf4fff57c"}, + {file = "pydantic-1.10.4-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:9193d4f4ee8feca58bc56c8306bcb820f5c7905fd919e0750acdeeeef0615b28"}, + {file = "pydantic-1.10.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:2b3ce5f16deb45c472dde1a0ee05619298c864a20cded09c4edd820e1454129f"}, + {file = "pydantic-1.10.4-cp39-cp39-win_amd64.whl", hash = "sha256:9cbdc268a62d9a98c56e2452d6c41c0263d64a2009aac69246486f01b4f594c4"}, + {file = "pydantic-1.10.4-py3-none-any.whl", hash = "sha256:4948f264678c703f3877d1c8877c4e3b2e12e549c57795107f08cf70c6ec7774"}, + {file = "pydantic-1.10.4.tar.gz", hash = "sha256:b9a3859f24eb4e097502a3be1fb4b2abb79b6103dd9e2e0edb70613a4459a648"}, ] pygithub = [ {file = "PyGithub-1.57-py3-none-any.whl", hash = "sha256:5822febeac2391f1306c55a99af2bc8f86c8bf82ded000030cd02c18f31b731f"}, From be26379d00d6f7de3c6aa799f1f602d670d85323 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 5 Jan 2023 09:50:14 +0000 Subject: [PATCH 54/82] Bump gitpython from 3.1.29 to 3.1.30 (#14761) * Bump gitpython from 3.1.29 to 3.1.30 Bumps [gitpython](https://github.com/gitpython-developers/GitPython) from 3.1.29 to 3.1.30. - [Release notes](https://github.com/gitpython-developers/GitPython/releases) - [Changelog](https://github.com/gitpython-developers/GitPython/blob/main/CHANGES) - [Commits](https://github.com/gitpython-developers/GitPython/compare/3.1.29...3.1.30) --- updated-dependencies: - dependency-name: gitpython dependency-type: direct:development update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] * Changelog Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: GitHub Actions --- changelog.d/14761.misc | 1 + poetry.lock | 6 +++--- 2 files changed, 4 insertions(+), 3 deletions(-) create mode 100644 changelog.d/14761.misc diff --git a/changelog.d/14761.misc b/changelog.d/14761.misc new file mode 100644 index 000000000000..bd96f75f39dd --- /dev/null +++ b/changelog.d/14761.misc @@ -0,0 +1 @@ +Bump gitpython from 3.1.29 to 3.1.30. diff --git a/poetry.lock b/poetry.lock index b2b12e8249c8..f766e4d09035 100644 --- a/poetry.lock +++ b/poetry.lock @@ -266,7 +266,7 @@ smmap = ">=3.0.1,<6" [[package]] name = "gitpython" -version = "3.1.29" +version = "3.1.30" description = "GitPython is a python library used to interact with Git repositories" category = "dev" optional = false @@ -1800,8 +1800,8 @@ gitdb = [ {file = "gitdb-4.0.9.tar.gz", hash = "sha256:bac2fd45c0a1c9cf619e63a90d62bdc63892ef92387424b855792a6cabe789aa"}, ] gitpython = [ - {file = "GitPython-3.1.29-py3-none-any.whl", hash = "sha256:41eea0deec2deea139b459ac03656f0dd28fc4a3387240ec1d3c259a2c47850f"}, - {file = "GitPython-3.1.29.tar.gz", hash = "sha256:cc36bfc4a3f913e66805a28e84703e419d9c264c1077e537b54f0e1af85dbefd"}, + {file = "GitPython-3.1.30-py3-none-any.whl", hash = "sha256:cd455b0000615c60e286208ba540271af9fe531fa6a87cc590a7298785ab2882"}, + {file = "GitPython-3.1.30.tar.gz", hash = "sha256:769c2d83e13f5d938b7688479da374c4e3d49f71549aaf462b646db9602ea6f8"}, ] hiredis = [ {file = "hiredis-2.0.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:b4c8b0bc5841e578d5fb32a16e0c305359b987b850a06964bd5a62739d688048"}, From bd9ada3860e1067b76697710accdce7dc15ecb21 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 5 Jan 2023 09:50:41 +0000 Subject: [PATCH 55/82] Bump pillow from 9.3.0 to 9.4.0 (#14762) * Bump pillow from 9.3.0 to 9.4.0 Bumps [pillow](https://github.com/python-pillow/Pillow) from 9.3.0 to 9.4.0. - [Release notes](https://github.com/python-pillow/Pillow/releases) - [Changelog](https://github.com/python-pillow/Pillow/blob/main/CHANGES.rst) - [Commits](https://github.com/python-pillow/Pillow/compare/9.3.0...9.4.0) --- updated-dependencies: - dependency-name: pillow dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] * Changelog Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: GitHub Actions --- changelog.d/14762.misc | 1 + poetry.lock | 128 +++++++++++++++++++++-------------------- 2 files changed, 66 insertions(+), 63 deletions(-) create mode 100644 changelog.d/14762.misc diff --git a/changelog.d/14762.misc b/changelog.d/14762.misc new file mode 100644 index 000000000000..a9c750c2038c --- /dev/null +++ b/changelog.d/14762.misc @@ -0,0 +1 @@ +Bump pillow from 9.3.0 to 9.4.0. diff --git a/poetry.lock b/poetry.lock index f766e4d09035..c549ffcfcb5f 100644 --- a/poetry.lock +++ b/poetry.lock @@ -620,14 +620,14 @@ python-versions = "*" [[package]] name = "pillow" -version = "9.3.0" +version = "9.4.0" description = "Python Imaging Library (Fork)" category = "main" optional = false python-versions = ">=3.7" [package.extras] -docs = ["furo", "olefile", "sphinx (>=2.4)", "sphinx-copybutton", "sphinx-issues (>=3.0.1)", "sphinx-removed-in", "sphinxext-opengraph"] +docs = ["furo", "olefile", "sphinx (>=2.4)", "sphinx-copybutton", "sphinx-inline-tabs", "sphinx-issues (>=3.0.1)", "sphinx-removed-in", "sphinxext-opengraph"] tests = ["check-manifest", "coverage", "defusedxml", "markdown2", "olefile", "packaging", "pyroma", "pytest", "pytest-cov", "pytest-timeout"] [[package]] @@ -2198,67 +2198,69 @@ phonenumbers = [ {file = "phonenumbers-8.13.2.tar.gz", hash = "sha256:0179f688d48c0e7e161eb7b9d86d587940af1f5174f97c1fdfd893c599c0d94a"}, ] pillow = [ - {file = "Pillow-9.3.0-1-cp37-cp37m-win32.whl", hash = "sha256:e6ea6b856a74d560d9326c0f5895ef8050126acfdc7ca08ad703eb0081e82b74"}, - {file = "Pillow-9.3.0-1-cp37-cp37m-win_amd64.whl", hash = "sha256:32a44128c4bdca7f31de5be641187367fe2a450ad83b833ef78910397db491aa"}, - {file = "Pillow-9.3.0-cp310-cp310-macosx_10_10_x86_64.whl", hash = "sha256:0b7257127d646ff8676ec8a15520013a698d1fdc48bc2a79ba4e53df792526f2"}, - {file = "Pillow-9.3.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:b90f7616ea170e92820775ed47e136208e04c967271c9ef615b6fbd08d9af0e3"}, - {file = "Pillow-9.3.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:68943d632f1f9e3dce98908e873b3a090f6cba1cbb1b892a9e8d97c938871fbe"}, - {file = "Pillow-9.3.0-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:be55f8457cd1eac957af0c3f5ece7bc3f033f89b114ef30f710882717670b2a8"}, - {file = "Pillow-9.3.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5d77adcd56a42d00cc1be30843d3426aa4e660cab4a61021dc84467123f7a00c"}, - {file = "Pillow-9.3.0-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:829f97c8e258593b9daa80638aee3789b7df9da5cf1336035016d76f03b8860c"}, - {file = "Pillow-9.3.0-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:801ec82e4188e935c7f5e22e006d01611d6b41661bba9fe45b60e7ac1a8f84de"}, - {file = "Pillow-9.3.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:871b72c3643e516db4ecf20efe735deb27fe30ca17800e661d769faab45a18d7"}, - {file = "Pillow-9.3.0-cp310-cp310-win32.whl", hash = "sha256:655a83b0058ba47c7c52e4e2df5ecf484c1b0b0349805896dd350cbc416bdd91"}, - {file = "Pillow-9.3.0-cp310-cp310-win_amd64.whl", hash = "sha256:9f47eabcd2ded7698106b05c2c338672d16a6f2a485e74481f524e2a23c2794b"}, - {file = "Pillow-9.3.0-cp311-cp311-macosx_10_10_x86_64.whl", hash = "sha256:57751894f6618fd4308ed8e0c36c333e2f5469744c34729a27532b3db106ee20"}, - {file = "Pillow-9.3.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:7db8b751ad307d7cf238f02101e8e36a128a6cb199326e867d1398067381bff4"}, - {file = "Pillow-9.3.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3033fbe1feb1b59394615a1cafaee85e49d01b51d54de0cbf6aa8e64182518a1"}, - {file = "Pillow-9.3.0-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:22b012ea2d065fd163ca096f4e37e47cd8b59cf4b0fd47bfca6abb93df70b34c"}, - {file = "Pillow-9.3.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b9a65733d103311331875c1dca05cb4606997fd33d6acfed695b1232ba1df193"}, - {file = "Pillow-9.3.0-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:502526a2cbfa431d9fc2a079bdd9061a2397b842bb6bc4239bb176da00993812"}, - {file = "Pillow-9.3.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:90fb88843d3902fe7c9586d439d1e8c05258f41da473952aa8b328d8b907498c"}, - {file = "Pillow-9.3.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:89dca0ce00a2b49024df6325925555d406b14aa3efc2f752dbb5940c52c56b11"}, - {file = "Pillow-9.3.0-cp311-cp311-win32.whl", hash = "sha256:3168434d303babf495d4ba58fc22d6604f6e2afb97adc6a423e917dab828939c"}, - {file = "Pillow-9.3.0-cp311-cp311-win_amd64.whl", hash = "sha256:18498994b29e1cf86d505edcb7edbe814d133d2232d256db8c7a8ceb34d18cef"}, - {file = "Pillow-9.3.0-cp37-cp37m-macosx_10_10_x86_64.whl", hash = "sha256:772a91fc0e03eaf922c63badeca75e91baa80fe2f5f87bdaed4280662aad25c9"}, - {file = "Pillow-9.3.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:afa4107d1b306cdf8953edde0534562607fe8811b6c4d9a486298ad31de733b2"}, - {file = "Pillow-9.3.0-cp37-cp37m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b4012d06c846dc2b80651b120e2cdd787b013deb39c09f407727ba90015c684f"}, - {file = "Pillow-9.3.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:77ec3e7be99629898c9a6d24a09de089fa5356ee408cdffffe62d67bb75fdd72"}, - {file = "Pillow-9.3.0-cp37-cp37m-manylinux_2_28_aarch64.whl", hash = "sha256:6c738585d7a9961d8c2821a1eb3dcb978d14e238be3d70f0a706f7fa9316946b"}, - {file = "Pillow-9.3.0-cp37-cp37m-manylinux_2_28_x86_64.whl", hash = "sha256:828989c45c245518065a110434246c44a56a8b2b2f6347d1409c787e6e4651ee"}, - {file = "Pillow-9.3.0-cp37-cp37m-win32.whl", hash = "sha256:82409ffe29d70fd733ff3c1025a602abb3e67405d41b9403b00b01debc4c9a29"}, - {file = "Pillow-9.3.0-cp37-cp37m-win_amd64.whl", hash = "sha256:41e0051336807468be450d52b8edd12ac60bebaa97fe10c8b660f116e50b30e4"}, - {file = "Pillow-9.3.0-cp38-cp38-macosx_10_10_x86_64.whl", hash = "sha256:b03ae6f1a1878233ac620c98f3459f79fd77c7e3c2b20d460284e1fb370557d4"}, - {file = "Pillow-9.3.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:4390e9ce199fc1951fcfa65795f239a8a4944117b5935a9317fb320e7767b40f"}, - {file = "Pillow-9.3.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:40e1ce476a7804b0fb74bcfa80b0a2206ea6a882938eaba917f7a0f004b42502"}, - {file = "Pillow-9.3.0-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a0a06a052c5f37b4ed81c613a455a81f9a3a69429b4fd7bb913c3fa98abefc20"}, - {file = "Pillow-9.3.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:03150abd92771742d4a8cd6f2fa6246d847dcd2e332a18d0c15cc75bf6703040"}, - {file = "Pillow-9.3.0-cp38-cp38-manylinux_2_28_aarch64.whl", hash = "sha256:15c42fb9dea42465dfd902fb0ecf584b8848ceb28b41ee2b58f866411be33f07"}, - {file = "Pillow-9.3.0-cp38-cp38-manylinux_2_28_x86_64.whl", hash = "sha256:51e0e543a33ed92db9f5ef69a0356e0b1a7a6b6a71b80df99f1d181ae5875636"}, - {file = "Pillow-9.3.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:3dd6caf940756101205dffc5367babf288a30043d35f80936f9bfb37f8355b32"}, - {file = "Pillow-9.3.0-cp38-cp38-win32.whl", hash = "sha256:f1ff2ee69f10f13a9596480335f406dd1f70c3650349e2be67ca3139280cade0"}, - {file = "Pillow-9.3.0-cp38-cp38-win_amd64.whl", hash = "sha256:276a5ca930c913f714e372b2591a22c4bd3b81a418c0f6635ba832daec1cbcfc"}, - {file = "Pillow-9.3.0-cp39-cp39-macosx_10_10_x86_64.whl", hash = "sha256:73bd195e43f3fadecfc50c682f5055ec32ee2c933243cafbfdec69ab1aa87cad"}, - {file = "Pillow-9.3.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:1c7c8ae3864846fc95f4611c78129301e203aaa2af813b703c55d10cc1628535"}, - {file = "Pillow-9.3.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2e0918e03aa0c72ea56edbb00d4d664294815aa11291a11504a377ea018330d3"}, - {file = "Pillow-9.3.0-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b0915e734b33a474d76c28e07292f196cdf2a590a0d25bcc06e64e545f2d146c"}, - {file = "Pillow-9.3.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:af0372acb5d3598f36ec0914deed2a63f6bcdb7b606da04dc19a88d31bf0c05b"}, - {file = "Pillow-9.3.0-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:ad58d27a5b0262c0c19b47d54c5802db9b34d38bbf886665b626aff83c74bacd"}, - {file = "Pillow-9.3.0-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:97aabc5c50312afa5e0a2b07c17d4ac5e865b250986f8afe2b02d772567a380c"}, - {file = "Pillow-9.3.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:9aaa107275d8527e9d6e7670b64aabaaa36e5b6bd71a1015ddd21da0d4e06448"}, - {file = "Pillow-9.3.0-cp39-cp39-win32.whl", hash = "sha256:bac18ab8d2d1e6b4ce25e3424f709aceef668347db8637c2296bcf41acb7cf48"}, - {file = "Pillow-9.3.0-cp39-cp39-win_amd64.whl", hash = "sha256:b472b5ea442148d1c3e2209f20f1e0bb0eb556538690fa70b5e1f79fa0ba8dc2"}, - {file = "Pillow-9.3.0-pp37-pypy37_pp73-macosx_10_10_x86_64.whl", hash = "sha256:ab388aaa3f6ce52ac1cb8e122c4bd46657c15905904b3120a6248b5b8b0bc228"}, - {file = "Pillow-9.3.0-pp37-pypy37_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dbb8e7f2abee51cef77673be97760abff1674ed32847ce04b4af90f610144c7b"}, - {file = "Pillow-9.3.0-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bca31dd6014cb8b0b2db1e46081b0ca7d936f856da3b39744aef499db5d84d02"}, - {file = "Pillow-9.3.0-pp37-pypy37_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:c7025dce65566eb6e89f56c9509d4f628fddcedb131d9465cacd3d8bac337e7e"}, - {file = "Pillow-9.3.0-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:ebf2029c1f464c59b8bdbe5143c79fa2045a581ac53679733d3a91d400ff9efb"}, - {file = "Pillow-9.3.0-pp38-pypy38_pp73-macosx_10_10_x86_64.whl", hash = "sha256:b59430236b8e58840a0dfb4099a0e8717ffb779c952426a69ae435ca1f57210c"}, - {file = "Pillow-9.3.0-pp38-pypy38_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:12ce4932caf2ddf3e41d17fc9c02d67126935a44b86df6a206cf0d7161548627"}, - {file = "Pillow-9.3.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ae5331c23ce118c53b172fa64a4c037eb83c9165aba3a7ba9ddd3ec9fa64a699"}, - {file = "Pillow-9.3.0-pp38-pypy38_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:0b07fffc13f474264c336298d1b4ce01d9c5a011415b79d4ee5527bb69ae6f65"}, - {file = "Pillow-9.3.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:073adb2ae23431d3b9bcbcff3fe698b62ed47211d0716b067385538a1b0f28b8"}, - {file = "Pillow-9.3.0.tar.gz", hash = "sha256:c935a22a557a560108d780f9a0fc426dd7459940dc54faa49d83249c8d3e760f"}, + {file = "Pillow-9.4.0-cp310-cp310-macosx_10_10_x86_64.whl", hash = "sha256:2968c58feca624bb6c8502f9564dd187d0e1389964898f5e9e1fbc8533169157"}, + {file = "Pillow-9.4.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c5c1362c14aee73f50143d74389b2c158707b4abce2cb055b7ad37ce60738d47"}, + {file = "Pillow-9.4.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bd752c5ff1b4a870b7661234694f24b1d2b9076b8bf337321a814c612665f343"}, + {file = "Pillow-9.4.0-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9a3049a10261d7f2b6514d35bbb7a4dfc3ece4c4de14ef5876c4b7a23a0e566d"}, + {file = "Pillow-9.4.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:16a8df99701f9095bea8a6c4b3197da105df6f74e6176c5b410bc2df2fd29a57"}, + {file = "Pillow-9.4.0-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:94cdff45173b1919350601f82d61365e792895e3c3a3443cf99819e6fbf717a5"}, + {file = "Pillow-9.4.0-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:ed3e4b4e1e6de75fdc16d3259098de7c6571b1a6cc863b1a49e7d3d53e036070"}, + {file = "Pillow-9.4.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:d5b2f8a31bd43e0f18172d8ac82347c8f37ef3e0b414431157718aa234991b28"}, + {file = "Pillow-9.4.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:09b89ddc95c248ee788328528e6a2996e09eaccddeeb82a5356e92645733be35"}, + {file = "Pillow-9.4.0-cp310-cp310-win32.whl", hash = "sha256:f09598b416ba39a8f489c124447b007fe865f786a89dbfa48bb5cf395693132a"}, + {file = "Pillow-9.4.0-cp310-cp310-win_amd64.whl", hash = "sha256:f6e78171be3fb7941f9910ea15b4b14ec27725865a73c15277bc39f5ca4f8391"}, + {file = "Pillow-9.4.0-cp311-cp311-macosx_10_10_x86_64.whl", hash = "sha256:3fa1284762aacca6dc97474ee9c16f83990b8eeb6697f2ba17140d54b453e133"}, + {file = "Pillow-9.4.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:eaef5d2de3c7e9b21f1e762f289d17b726c2239a42b11e25446abf82b26ac132"}, + {file = "Pillow-9.4.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a4dfdae195335abb4e89cc9762b2edc524f3c6e80d647a9a81bf81e17e3fb6f0"}, + {file = "Pillow-9.4.0-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6abfb51a82e919e3933eb137e17c4ae9c0475a25508ea88993bb59faf82f3b35"}, + {file = "Pillow-9.4.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:451f10ef963918e65b8869e17d67db5e2f4ab40e716ee6ce7129b0cde2876eab"}, + {file = "Pillow-9.4.0-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:6663977496d616b618b6cfa43ec86e479ee62b942e1da76a2c3daa1c75933ef4"}, + {file = "Pillow-9.4.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:60e7da3a3ad1812c128750fc1bc14a7ceeb8d29f77e0a2356a8fb2aa8925287d"}, + {file = "Pillow-9.4.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:19005a8e58b7c1796bc0167862b1f54a64d3b44ee5d48152b06bb861458bc0f8"}, + {file = "Pillow-9.4.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:f715c32e774a60a337b2bb8ad9839b4abf75b267a0f18806f6f4f5f1688c4b5a"}, + {file = "Pillow-9.4.0-cp311-cp311-win32.whl", hash = "sha256:b222090c455d6d1a64e6b7bb5f4035c4dff479e22455c9eaa1bdd4c75b52c80c"}, + {file = "Pillow-9.4.0-cp311-cp311-win_amd64.whl", hash = "sha256:ba6612b6548220ff5e9df85261bddc811a057b0b465a1226b39bfb8550616aee"}, + {file = "Pillow-9.4.0-cp37-cp37m-macosx_10_10_x86_64.whl", hash = "sha256:5f532a2ad4d174eb73494e7397988e22bf427f91acc8e6ebf5bb10597b49c493"}, + {file = "Pillow-9.4.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5dd5a9c3091a0f414a963d427f920368e2b6a4c2f7527fdd82cde8ef0bc7a327"}, + {file = "Pillow-9.4.0-cp37-cp37m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ef21af928e807f10bf4141cad4746eee692a0dd3ff56cfb25fce076ec3cc8abe"}, + {file = "Pillow-9.4.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:847b114580c5cc9ebaf216dd8c8dbc6b00a3b7ab0131e173d7120e6deade1f57"}, + {file = "Pillow-9.4.0-cp37-cp37m-manylinux_2_28_aarch64.whl", hash = "sha256:653d7fb2df65efefbcbf81ef5fe5e5be931f1ee4332c2893ca638c9b11a409c4"}, + {file = "Pillow-9.4.0-cp37-cp37m-manylinux_2_28_x86_64.whl", hash = "sha256:46f39cab8bbf4a384ba7cb0bc8bae7b7062b6a11cfac1ca4bc144dea90d4a9f5"}, + {file = "Pillow-9.4.0-cp37-cp37m-win32.whl", hash = "sha256:7ac7594397698f77bce84382929747130765f66406dc2cd8b4ab4da68ade4c6e"}, + {file = "Pillow-9.4.0-cp37-cp37m-win_amd64.whl", hash = "sha256:46c259e87199041583658457372a183636ae8cd56dbf3f0755e0f376a7f9d0e6"}, + {file = "Pillow-9.4.0-cp38-cp38-macosx_10_10_x86_64.whl", hash = "sha256:0e51f608da093e5d9038c592b5b575cadc12fd748af1479b5e858045fff955a9"}, + {file = "Pillow-9.4.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:765cb54c0b8724a7c12c55146ae4647e0274a839fb6de7bcba841e04298e1011"}, + {file = "Pillow-9.4.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:519e14e2c49fcf7616d6d2cfc5c70adae95682ae20f0395e9280db85e8d6c4df"}, + {file = "Pillow-9.4.0-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d197df5489004db87d90b918033edbeee0bd6df3848a204bca3ff0a903bef837"}, + {file = "Pillow-9.4.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0845adc64fe9886db00f5ab68c4a8cd933ab749a87747555cec1c95acea64b0b"}, + {file = "Pillow-9.4.0-cp38-cp38-manylinux_2_28_aarch64.whl", hash = "sha256:e1339790c083c5a4de48f688b4841f18df839eb3c9584a770cbd818b33e26d5d"}, + {file = "Pillow-9.4.0-cp38-cp38-manylinux_2_28_x86_64.whl", hash = "sha256:a96e6e23f2b79433390273eaf8cc94fec9c6370842e577ab10dabdcc7ea0a66b"}, + {file = "Pillow-9.4.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:7cfc287da09f9d2a7ec146ee4d72d6ea1342e770d975e49a8621bf54eaa8f30f"}, + {file = "Pillow-9.4.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:d7081c084ceb58278dd3cf81f836bc818978c0ccc770cbbb202125ddabec6628"}, + {file = "Pillow-9.4.0-cp38-cp38-win32.whl", hash = "sha256:df41112ccce5d47770a0c13651479fbcd8793f34232a2dd9faeccb75eb5d0d0d"}, + {file = "Pillow-9.4.0-cp38-cp38-win_amd64.whl", hash = "sha256:7a21222644ab69ddd9967cfe6f2bb420b460dae4289c9d40ff9a4896e7c35c9a"}, + {file = "Pillow-9.4.0-cp39-cp39-macosx_10_10_x86_64.whl", hash = "sha256:0f3269304c1a7ce82f1759c12ce731ef9b6e95b6df829dccd9fe42912cc48569"}, + {file = "Pillow-9.4.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:cb362e3b0976dc994857391b776ddaa8c13c28a16f80ac6522c23d5257156bed"}, + {file = "Pillow-9.4.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a2e0f87144fcbbe54297cae708c5e7f9da21a4646523456b00cc956bd4c65815"}, + {file = "Pillow-9.4.0-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:28676836c7796805914b76b1837a40f76827ee0d5398f72f7dcc634bae7c6264"}, + {file = "Pillow-9.4.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0884ba7b515163a1a05440a138adeb722b8a6ae2c2b33aea93ea3118dd3a899e"}, + {file = "Pillow-9.4.0-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:53dcb50fbdc3fb2c55431a9b30caeb2f7027fcd2aeb501459464f0214200a503"}, + {file = "Pillow-9.4.0-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:e8c5cf126889a4de385c02a2c3d3aba4b00f70234bfddae82a5eaa3ee6d5e3e6"}, + {file = "Pillow-9.4.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:6c6b1389ed66cdd174d040105123a5a1bc91d0aa7059c7261d20e583b6d8cbd2"}, + {file = "Pillow-9.4.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:0dd4c681b82214b36273c18ca7ee87065a50e013112eea7d78c7a1b89a739153"}, + {file = "Pillow-9.4.0-cp39-cp39-win32.whl", hash = "sha256:6d9dfb9959a3b0039ee06c1a1a90dc23bac3b430842dcb97908ddde05870601c"}, + {file = "Pillow-9.4.0-cp39-cp39-win_amd64.whl", hash = "sha256:54614444887e0d3043557d9dbc697dbb16cfb5a35d672b7a0fcc1ed0cf1c600b"}, + {file = "Pillow-9.4.0-pp38-pypy38_pp73-macosx_10_10_x86_64.whl", hash = "sha256:b9b752ab91e78234941e44abdecc07f1f0d8f51fb62941d32995b8161f68cfe5"}, + {file = "Pillow-9.4.0-pp38-pypy38_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d3b56206244dc8711f7e8b7d6cad4663917cd5b2d950799425076681e8766286"}, + {file = "Pillow-9.4.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aabdab8ec1e7ca7f1434d042bf8b1e92056245fb179790dc97ed040361f16bfd"}, + {file = "Pillow-9.4.0-pp38-pypy38_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:db74f5562c09953b2c5f8ec4b7dfd3f5421f31811e97d1dbc0a7c93d6e3a24df"}, + {file = "Pillow-9.4.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:e9d7747847c53a16a729b6ee5e737cf170f7a16611c143d95aa60a109a59c336"}, + {file = "Pillow-9.4.0-pp39-pypy39_pp73-macosx_10_10_x86_64.whl", hash = "sha256:b52ff4f4e002f828ea6483faf4c4e8deea8d743cf801b74910243c58acc6eda3"}, + {file = "Pillow-9.4.0-pp39-pypy39_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:575d8912dca808edd9acd6f7795199332696d3469665ef26163cd090fa1f8bfa"}, + {file = "Pillow-9.4.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c3c4ed2ff6760e98d262e0cc9c9a7f7b8a9f61aa4d47c58835cdaf7b0b8811bb"}, + {file = "Pillow-9.4.0-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:e621b0246192d3b9cb1dc62c78cfa4c6f6d2ddc0ec207d43c0dedecb914f152a"}, + {file = "Pillow-9.4.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:8f127e7b028900421cad64f51f75c051b628db17fb00e099eb148761eed598c9"}, + {file = "Pillow-9.4.0.tar.gz", hash = "sha256:a1c2d7780448eb93fbcc3789bf3916aa5720d942e37945f4056680317f1cd23e"}, ] pkginfo = [ {file = "pkginfo-1.8.2-py2.py3-none-any.whl", hash = "sha256:c24c487c6a7f72c66e816ab1796b96ac6c3d14d49338293d2141664330b55ffc"}, From da911e9ddf55d8230bd2e42bc966173ef8de84d4 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 5 Jan 2023 09:50:52 +0000 Subject: [PATCH 56/82] Bump types-requests from 2.28.11.5 to 2.28.11.7 (#14763) * Bump types-requests from 2.28.11.5 to 2.28.11.7 Bumps [types-requests](https://github.com/python/typeshed) from 2.28.11.5 to 2.28.11.7. - [Release notes](https://github.com/python/typeshed/releases) - [Commits](https://github.com/python/typeshed/commits) --- updated-dependencies: - dependency-name: types-requests dependency-type: direct:development update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] * Changelog Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: GitHub Actions --- changelog.d/14763.misc | 1 + poetry.lock | 6 +++--- 2 files changed, 4 insertions(+), 3 deletions(-) create mode 100644 changelog.d/14763.misc diff --git a/changelog.d/14763.misc b/changelog.d/14763.misc new file mode 100644 index 000000000000..59d616d76dfc --- /dev/null +++ b/changelog.d/14763.misc @@ -0,0 +1 @@ +Bump types-requests from 2.28.11.5 to 2.28.11.7. diff --git a/poetry.lock b/poetry.lock index c549ffcfcb5f..0acffe0f527a 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1414,7 +1414,7 @@ python-versions = "*" [[package]] name = "types-requests" -version = "2.28.11.5" +version = "2.28.11.7" description = "Typing stubs for requests" category = "dev" optional = false @@ -2772,8 +2772,8 @@ types-pyyaml = [ {file = "types_PyYAML-6.0.12.2-py3-none-any.whl", hash = "sha256:1e94e80aafee07a7e798addb2a320e32956a373f376655128ae20637adb2655b"}, ] types-requests = [ - {file = "types-requests-2.28.11.5.tar.gz", hash = "sha256:a7df37cc6fb6187a84097da951f8e21d335448aa2501a6b0a39cbd1d7ca9ee2a"}, - {file = "types_requests-2.28.11.5-py3-none-any.whl", hash = "sha256:091d4a5a33c1b4f20d8b1b952aa8fa27a6e767c44c3cf65e56580df0b05fd8a9"}, + {file = "types-requests-2.28.11.7.tar.gz", hash = "sha256:0ae38633734990d019b80f5463dfa164ebd3581998ac8435f526da6fe4d598c3"}, + {file = "types_requests-2.28.11.7-py3-none-any.whl", hash = "sha256:b6a2fca8109f4fdba33052f11ed86102bddb2338519e1827387137fefc66a98b"}, ] types-setuptools = [ {file = "types-setuptools-65.6.0.2.tar.gz", hash = "sha256:ad60ccf01d626de9762224448f36c13e0660e863afd6dc11d979b3739a6c7d24"}, From 70961911a889faf97aeb1b685cc7476236f181be Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 5 Jan 2023 09:51:09 +0000 Subject: [PATCH 57/82] Bump dawidd6/action-download-artifact from 2.24.2 to 2.24.3 (#14779) * Bump dawidd6/action-download-artifact from 2.24.2 to 2.24.3 Bumps [dawidd6/action-download-artifact](https://github.com/dawidd6/action-download-artifact) from 2.24.2 to 2.24.3. - [Release notes](https://github.com/dawidd6/action-download-artifact/releases) - [Commits](https://github.com/dawidd6/action-download-artifact/compare/e6e25ac3a2b93187502a8be1ef9e9603afc34925...bd10f381a96414ce2b13a11bfa89902ba7cea07f) --- updated-dependencies: - dependency-name: dawidd6/action-download-artifact dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] * Changelog Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: GitHub Actions --- .github/workflows/docs-pr-netlify.yaml | 2 +- changelog.d/14779.misc | 1 + 2 files changed, 2 insertions(+), 1 deletion(-) create mode 100644 changelog.d/14779.misc diff --git a/.github/workflows/docs-pr-netlify.yaml b/.github/workflows/docs-pr-netlify.yaml index 231982f6816c..ef7a38144e00 100644 --- a/.github/workflows/docs-pr-netlify.yaml +++ b/.github/workflows/docs-pr-netlify.yaml @@ -14,7 +14,7 @@ jobs: # There's a 'download artifact' action, but it hasn't been updated for the workflow_run action # (https://github.com/actions/download-artifact/issues/60) so instead we get this mess: - name: 📥 Download artifact - uses: dawidd6/action-download-artifact@e6e25ac3a2b93187502a8be1ef9e9603afc34925 # v2.24.2 + uses: dawidd6/action-download-artifact@bd10f381a96414ce2b13a11bfa89902ba7cea07f # v2.24.3 with: workflow: docs-pr.yaml run_id: ${{ github.event.workflow_run.id }} diff --git a/changelog.d/14779.misc b/changelog.d/14779.misc new file mode 100644 index 000000000000..2bc760dbc661 --- /dev/null +++ b/changelog.d/14779.misc @@ -0,0 +1 @@ +Bump dawidd6/action-download-artifact from 2.24.2 to 2.24.3. From 7b642167e6c5cdebb46dc97915b8937e899c2b53 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 5 Jan 2023 10:10:43 +0000 Subject: [PATCH 58/82] Bump JasonEtco/create-an-issue from 2.8.2 to 2.9.1 (#14731) * Bump JasonEtco/create-an-issue from 2.8.2 to 2.9.1 Bumps [JasonEtco/create-an-issue](https://github.com/JasonEtco/create-an-issue) from 2.8.2 to 2.9.1. - [Release notes](https://github.com/JasonEtco/create-an-issue/releases) - [Commits](https://github.com/JasonEtco/create-an-issue/compare/3a8ba796516b57db8cb2ee6dfc65bc76cd39d56d...e27dddc79c92bc6e4562f268fffa5ed752639abd) --- updated-dependencies: - dependency-name: JasonEtco/create-an-issue dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] * Changelog Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: GitHub Actions Co-authored-by: reivilibre Co-authored-by: Mathieu Velten --- .github/workflows/latest_deps.yml | 2 +- .github/workflows/twisted_trunk.yml | 2 +- changelog.d/14731.misc | 1 + 3 files changed, 3 insertions(+), 2 deletions(-) create mode 100644 changelog.d/14731.misc diff --git a/.github/workflows/latest_deps.yml b/.github/workflows/latest_deps.yml index e5e4e8da77cc..5ab9a8af3411 100644 --- a/.github/workflows/latest_deps.yml +++ b/.github/workflows/latest_deps.yml @@ -208,7 +208,7 @@ jobs: steps: - uses: actions/checkout@v3 - - uses: JasonEtco/create-an-issue@3a8ba796516b57db8cb2ee6dfc65bc76cd39d56d # v2.8.2 + - uses: JasonEtco/create-an-issue@e27dddc79c92bc6e4562f268fffa5ed752639abd # v2.9.1 env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} with: diff --git a/.github/workflows/twisted_trunk.yml b/.github/workflows/twisted_trunk.yml index b08222f289be..0a88f0cd7b6e 100644 --- a/.github/workflows/twisted_trunk.yml +++ b/.github/workflows/twisted_trunk.yml @@ -174,7 +174,7 @@ jobs: steps: - uses: actions/checkout@v3 - - uses: JasonEtco/create-an-issue@3a8ba796516b57db8cb2ee6dfc65bc76cd39d56d # v2.8.2 + - uses: JasonEtco/create-an-issue@e27dddc79c92bc6e4562f268fffa5ed752639abd # v2.9.1 env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} with: diff --git a/changelog.d/14731.misc b/changelog.d/14731.misc new file mode 100644 index 000000000000..511466787f36 --- /dev/null +++ b/changelog.d/14731.misc @@ -0,0 +1 @@ +Bump JasonEtco/create-an-issue from 2.8.2 to 2.9.1. From 4eb2f4e02b83c90519a8a55c562d1fafab7caf0b Mon Sep 17 00:00:00 2001 From: reivilibre Date: Thu, 5 Jan 2023 18:18:00 +0000 Subject: [PATCH 59/82] Fix broken links in the Synapse documentation. (#14744) * Fix stale external links * Fix some internal links * Fix URLs without trailing / where needed * Fix more links * Newsfile Signed-off-by: Olivier Wilkinson (reivilibre) * Reapply docs/openid.md fix after conflict Signed-off-by: Olivier Wilkinson (reivilibre) --- changelog.d/14744.doc | 1 + docs/admin_api/account_validity.md | 2 +- docs/admin_api/event_reports.md | 2 +- docs/admin_api/media_admin_api.md | 2 +- docs/admin_api/purge_history_api.md | 2 +- docs/admin_api/room_membership.md | 2 +- docs/admin_api/rooms.md | 4 ++-- docs/admin_api/statistics.md | 2 +- docs/admin_api/user_admin_api.md | 2 +- docs/development/contributing_guide.md | 9 ++++----- docs/modules/writing_a_module.md | 6 +++--- docs/openid.md | 2 +- docs/postgres.md | 2 +- docs/setup/installation.md | 2 +- docs/sso_mapping_providers.md | 2 +- docs/upgrade.md | 6 +++--- docs/usage/administration/admin_api/README.md | 6 +++--- docs/usage/administration/admin_api/federation.md | 4 ++-- .../administration/admin_api/registration_tokens.md | 2 +- docs/usage/administration/admin_faq.md | 4 ++-- .../monitoring/reporting_homeserver_usage_statistics.md | 2 +- docs/usage/administration/request_log.md | 2 +- docs/usage/configuration/config_documentation.md | 4 ++-- docs/workers.md | 4 ++-- 24 files changed, 38 insertions(+), 38 deletions(-) create mode 100644 changelog.d/14744.doc diff --git a/changelog.d/14744.doc b/changelog.d/14744.doc new file mode 100644 index 000000000000..738ab1a46db4 --- /dev/null +++ b/changelog.d/14744.doc @@ -0,0 +1 @@ +Fix broken links in the Synapse documentation. \ No newline at end of file diff --git a/docs/admin_api/account_validity.md b/docs/admin_api/account_validity.md index d878bf7451e3..87d8f7150e8c 100644 --- a/docs/admin_api/account_validity.md +++ b/docs/admin_api/account_validity.md @@ -5,7 +5,7 @@ use it, you must enable the account validity feature (under `account_validity`) in Synapse's configuration. To use it, you will need to authenticate by providing an `access_token` -for a server admin: see [Admin API](../usage/administration/admin_api). +for a server admin: see [Admin API](../usage/administration/admin_api/). ## Renew account diff --git a/docs/admin_api/event_reports.md b/docs/admin_api/event_reports.md index be6f0961bfcb..beec8bb7efe9 100644 --- a/docs/admin_api/event_reports.md +++ b/docs/admin_api/event_reports.md @@ -3,7 +3,7 @@ This API returns information about reported events. To use it, you will need to authenticate by providing an `access_token` -for a server admin: see [Admin API](../usage/administration/admin_api). +for a server admin: see [Admin API](../usage/administration/admin_api/). The api is: ``` diff --git a/docs/admin_api/media_admin_api.md b/docs/admin_api/media_admin_api.md index d57c5aedae4c..7f8c8e22c1a0 100644 --- a/docs/admin_api/media_admin_api.md +++ b/docs/admin_api/media_admin_api.md @@ -6,7 +6,7 @@ Details about the format of the `media_id` and storage of the media in the file are documented under [media repository](../media_repository.md). To use it, you will need to authenticate by providing an `access_token` -for a server admin: see [Admin API](../usage/administration/admin_api). +for a server admin: see [Admin API](../usage/administration/admin_api/). ## List all media in a room diff --git a/docs/admin_api/purge_history_api.md b/docs/admin_api/purge_history_api.md index 2527e2758ba3..ba6d08aa4d4d 100644 --- a/docs/admin_api/purge_history_api.md +++ b/docs/admin_api/purge_history_api.md @@ -11,7 +11,7 @@ Note that Synapse requires at least one message in each room, so it will never delete the last message in a room. To use it, you will need to authenticate by providing an `access_token` -for a server admin: see [Admin API](../usage/administration/admin_api). +for a server admin: see [Admin API](../usage/administration/admin_api/). The API is: diff --git a/docs/admin_api/room_membership.md b/docs/admin_api/room_membership.md index 310d6ae628fa..94bc95a8d5c0 100644 --- a/docs/admin_api/room_membership.md +++ b/docs/admin_api/room_membership.md @@ -6,7 +6,7 @@ local users. The server administrator must be in the room and have permission to invite users. To use it, you will need to authenticate by providing an `access_token` -for a server admin: see [Admin API](../usage/administration/admin_api). +for a server admin: see [Admin API](../usage/administration/admin_api/). ## Parameters diff --git a/docs/admin_api/rooms.md b/docs/admin_api/rooms.md index 8f727b363eb8..66b29e82dcaa 100644 --- a/docs/admin_api/rooms.md +++ b/docs/admin_api/rooms.md @@ -5,7 +5,7 @@ server. There are various parameters available that allow for filtering and sorting the returned list. This API supports pagination. To use it, you will need to authenticate by providing an `access_token` -for a server admin: see [Admin API](../usage/administration/admin_api). +for a server admin: see [Admin API](../usage/administration/admin_api/). **Parameters** @@ -400,7 +400,7 @@ sent to a room in a given timeframe. There are various parameters available that allow for filtering and ordering the returned list. This API supports pagination. To use it, you will need to authenticate by providing an `access_token` -for a server admin: see [Admin API](../usage/administration/admin_api). +for a server admin: see [Admin API](../usage/administration/admin_api/). This endpoint mirrors the [Matrix Spec defined Messages API](https://spec.matrix.org/v1.1/client-server-api/#get_matrixclientv3roomsroomidmessages). diff --git a/docs/admin_api/statistics.md b/docs/admin_api/statistics.md index a26c76f9f317..03b3621e5595 100644 --- a/docs/admin_api/statistics.md +++ b/docs/admin_api/statistics.md @@ -4,7 +4,7 @@ Returns information about all local media usage of users. Gives the possibility to filter them by time and user. To use it, you will need to authenticate by providing an `access_token` -for a server admin: see [Admin API](../usage/administration/admin_api). +for a server admin: see [Admin API](../usage/administration/admin_api/). The API is: diff --git a/docs/admin_api/user_admin_api.md b/docs/admin_api/user_admin_api.md index 880bef41947a..86c29ab3800c 100644 --- a/docs/admin_api/user_admin_api.md +++ b/docs/admin_api/user_admin_api.md @@ -1,7 +1,7 @@ # User Admin API To use it, you will need to authenticate by providing an `access_token` -for a server admin: see [Admin API](../usage/administration/admin_api). +for a server admin: see [Admin API](../usage/administration/admin_api/). ## Query User Account diff --git a/docs/development/contributing_guide.md b/docs/development/contributing_guide.md index d07790f184d9..43cb3e201d30 100644 --- a/docs/development/contributing_guide.md +++ b/docs/development/contributing_guide.md @@ -106,8 +106,8 @@ regarding Synapse's Admin API, which is used mostly by sysadmins and external service developers. Synapse's code style is documented [here](../code_style.md). Please follow -it, including the conventions for the [sample configuration -file](../code_style.md#configuration-file-format). +it, including the conventions for [configuration +options and documentation](../code_style.md#configuration-code-and-documentation-format). We welcome improvements and additions to our documentation itself! When writing new pages, please @@ -126,7 +126,7 @@ changes to the Rust code. # 8. Test, test, test! - + While you're developing and before submitting a patch, you'll want to test your code. @@ -424,8 +424,7 @@ chicken-and-egg problem. There are two options for solving this: 1. Open the PR without a changelog file, see what number you got, and *then* - add the changelog file to your branch (see [Updating your pull - request](#updating-your-pull-request)), or: + add the changelog file to your branch, or: 1. Look at the [list of all issues/PRs](https://github.com/matrix-org/synapse/issues?q=), add one to the diff --git a/docs/modules/writing_a_module.md b/docs/modules/writing_a_module.md index e6303b739e1a..30de69a53387 100644 --- a/docs/modules/writing_a_module.md +++ b/docs/modules/writing_a_module.md @@ -59,8 +59,8 @@ namespace (such as anything under `/_matrix/client` for example). It is strongly recommended that modules register their web resources under the `/_synapse/client` namespace. -The provided resource is a Python class that implements Twisted's [IResource](https://twistedmatrix.com/documents/current/api/twisted.web.resource.IResource.html) -interface (such as [Resource](https://twistedmatrix.com/documents/current/api/twisted.web.resource.Resource.html)). +The provided resource is a Python class that implements Twisted's [IResource](https://docs.twistedmatrix.com/en/stable/api/twisted.web.resource.IResource.html) +interface (such as [Resource](https://docs.twistedmatrix.com/en/stable/api/twisted.web.resource.Resource.html)). Only one resource can be registered for a given path. If several modules attempt to register a resource for the same path, the module that appears first in Synapse's @@ -82,4 +82,4 @@ the callback name as the argument name and the function as its value. A `register_[...]_callbacks` method exists for each category. Callbacks for each category can be found on their respective page of the -[Synapse documentation website](https://matrix-org.github.io/synapse). \ No newline at end of file +[Synapse documentation website](https://matrix-org.github.io/synapse). diff --git a/docs/openid.md b/docs/openid.md index 45aa24dd24ae..6ee8c83ec0a2 100644 --- a/docs/openid.md +++ b/docs/openid.md @@ -463,7 +463,7 @@ oidc_providers: Keycloak supports OIDC Back-Channel Logout, which sends logout notification to Synapse, so that Synapse users get logged out when they log out from Keycloak. This can be optionally enabled by setting `backchannel_logout_enabled` to `true` in the Synapse configuration, and by setting the "Backchannel Logout URL" in Keycloak. -Follow the [Getting Started Guide](https://www.keycloak.org/getting-started) to install Keycloak and set up a realm. +Follow the [Getting Started Guide](https://www.keycloak.org/guides) to install Keycloak and set up a realm. 1. Click `Clients` in the sidebar and click `Create` diff --git a/docs/postgres.md b/docs/postgres.md index 46b4603fe573..fba4430f33a2 100644 --- a/docs/postgres.md +++ b/docs/postgres.md @@ -16,7 +16,7 @@ connect to a postgres database. - For other pre-built packages, please consult the documentation from the relevant package. - If you installed synapse [in a - virtualenv](setup/installation.md#installing-from-source), you can install + virtualenv](setup/installation.md#installing-as-a-python-module-from-pypi), you can install the library with: ~/synapse/env/bin/pip install "matrix-synapse[postgres]" diff --git a/docs/setup/installation.md b/docs/setup/installation.md index a762ad55dfad..d123e339edc9 100644 --- a/docs/setup/installation.md +++ b/docs/setup/installation.md @@ -136,7 +136,7 @@ Unofficial package are built for SLES 15 in the openSUSE:Backports:SLE-15 reposi #### ArchLinux The quickest way to get up and running with ArchLinux is probably with the community package -, which should pull in most of +, which should pull in most of the necessary dependencies. pip may be outdated (6.0.7-1 and needs to be upgraded to 6.0.8-1 ): diff --git a/docs/sso_mapping_providers.md b/docs/sso_mapping_providers.md index 9f5e5fbbe152..a5d46596193c 100644 --- a/docs/sso_mapping_providers.md +++ b/docs/sso_mapping_providers.md @@ -120,7 +120,7 @@ specified in the config. It is located at ## SAML Mapping Providers The SAML mapping provider can be customized by editing the -[`saml2_config.user_mapping_provider.module`](docs/usage/configuration/config_documentation.md#saml2_config) +[`saml2_config.user_mapping_provider.module`](usage/configuration/config_documentation.md#saml2_config) config option. `saml2_config.user_mapping_provider.config` allows you to provide custom diff --git a/docs/upgrade.md b/docs/upgrade.md index dcae12ec9495..f853dfdb8e62 100644 --- a/docs/upgrade.md +++ b/docs/upgrade.md @@ -889,8 +889,8 @@ Any scripts still using the above APIs should be converted to use the ## User-interactive authentication fallback templates can now display errors This may affect you if you make use of custom HTML templates for the -[reCAPTCHA](../synapse/res/templates/recaptcha.html) or -[terms](../synapse/res/templates/terms.html) fallback pages. +[reCAPTCHA (`synapse/res/templates/recaptcha.html`)](https://github.com/matrix-org/synapse/tree/develop/synapse/res/templates/recaptcha.html) or +[terms (`synapse/res/templates/terms.html`)](https://github.com/matrix-org/synapse/tree/develop/synapse/res/templates/terms.html) fallback pages. The template is now provided an `error` variable if the authentication process failed. See the default templates linked above for an example. @@ -1488,7 +1488,7 @@ New templates (`sso_auth_confirm.html`, `sso_auth_success.html`, and is configured to use SSO and a custom `sso_redirect_confirm_template_dir` configuration then these templates will need to be copied from -[synapse/res/templates](synapse/res/templates) into that directory. +[`synapse/res/templates`](https://github.com/matrix-org/synapse/tree/develop/synapse/res/templates) into that directory. ## Synapse SSO Plugins Method Deprecation diff --git a/docs/usage/administration/admin_api/README.md b/docs/usage/administration/admin_api/README.md index c00de2dd447d..7c85bf751b04 100644 --- a/docs/usage/administration/admin_api/README.md +++ b/docs/usage/administration/admin_api/README.md @@ -7,7 +7,7 @@ server admin. (Note that a server admin is distinct from a room admin.) An existing user can be marked as a server admin by updating the database directly. -Check your [database settings](config_documentation.md#database) in the configuration file, connect to the correct database using either `psql [database name]` (if using PostgreSQL) or `sqlite3 path/to/your/database.db` (if using SQLite) and elevate the user `@foo:bar.com` to administrator. +Check your [database settings](../../configuration/config_documentation.md#database) in the configuration file, connect to the correct database using either `psql [database name]` (if using PostgreSQL) or `sqlite3 path/to/your/database.db` (if using SQLite) and elevate the user `@foo:bar.com` to administrator. ```sql UPDATE users SET admin = 1 WHERE name = '@foo:bar.com'; ``` @@ -32,10 +32,10 @@ curl --header "Authorization: Bearer " with key ed25519:a_EqML: Unable to verify signature for -This is normally caused by a misconfiguration in your reverse-proxy. See [the reverse proxy docs](docs/reverse_proxy.md) and double-check that your settings are correct. +This is normally caused by a misconfiguration in your reverse-proxy. See [the reverse proxy docs](../../reverse_proxy.md) and double-check that your settings are correct. Help!! Synapse is slow and eats all my RAM/CPU! diff --git a/docs/usage/administration/monitoring/reporting_homeserver_usage_statistics.md b/docs/usage/administration/monitoring/reporting_homeserver_usage_statistics.md index 4e53f9883a5d..3a7ed7c80611 100644 --- a/docs/usage/administration/monitoring/reporting_homeserver_usage_statistics.md +++ b/docs/usage/administration/monitoring/reporting_homeserver_usage_statistics.md @@ -78,4 +78,4 @@ If you would like to set up your own statistics collection server and send metri consider using one of the following known implementations: * [Matrix.org's Panopticon](https://github.com/matrix-org/panopticon) -* [Famedly's Barad-dûr](https://gitlab.com/famedly/company/devops/services/barad-dur) +* [Famedly's Barad-dûr](https://gitlab.com/famedly/infra/services/barad-dur) diff --git a/docs/usage/administration/request_log.md b/docs/usage/administration/request_log.md index 82f5ac7b96a5..7dd9969d8668 100644 --- a/docs/usage/administration/request_log.md +++ b/docs/usage/administration/request_log.md @@ -1,6 +1,6 @@ # Request log format -HTTP request logs are written by synapse (see [`site.py`](../synapse/http/site.py) for details). +HTTP request logs are written by synapse (see [`synapse/http/site.py`](https://github.com/matrix-org/synapse/tree/develop/synapse/http/site.py) for details). See the following for how to decode the dense data available from the default logging configuration. diff --git a/docs/usage/configuration/config_documentation.md b/docs/usage/configuration/config_documentation.md index ec8403c7e99b..6cfcce817fde 100644 --- a/docs/usage/configuration/config_documentation.md +++ b/docs/usage/configuration/config_documentation.md @@ -1321,7 +1321,7 @@ Associated sub-options: connection pool. For a reference to valid arguments, see: * for [sqlite](https://docs.python.org/3/library/sqlite3.html#sqlite3.connect) * for [postgres](https://www.postgresql.org/docs/current/libpq-connect.html#LIBPQ-PARAMKEYWORDS) - * for [the connection pool](https://twistedmatrix.com/documents/current/api/twisted.enterprise.adbapi.ConnectionPool.html#__init__) + * for [the connection pool](https://docs.twistedmatrix.com/en/stable/api/twisted.enterprise.adbapi.ConnectionPool.html#__init__) For more information on using Synapse with Postgres, see [here](../../postgres.md). @@ -3981,7 +3981,7 @@ worker_listeners: ### `worker_daemonize` Specifies whether the worker should be started as a daemon process. -If Synapse is being managed by [systemd](../../systemd-with-workers/README.md), this option +If Synapse is being managed by [systemd](../../systemd-with-workers/), this option must be omitted or set to `false`. Defaults to `false`. diff --git a/docs/workers.md b/docs/workers.md index 59a6487e0d6a..c21638564e5d 100644 --- a/docs/workers.md +++ b/docs/workers.md @@ -157,7 +157,7 @@ Finally, you need to start your worker processes. This can be done with either `synctl` or your distribution's preferred service manager such as `systemd`. We recommend the use of `systemd` where available: for information on setting up `systemd` to start synapse workers, see -[Systemd with Workers](systemd-with-workers). To use `synctl`, see +[Systemd with Workers](systemd-with-workers/). To use `synctl`, see [Using synctl with Workers](synctl_workers.md). @@ -386,7 +386,7 @@ so. It will then pass those events over HTTP replication to any configured event persisters (or the main process if none are configured). Note that `event_creator`s and `event_persister`s are implemented using the same -[`synapse.app.generic_worker`](#synapse.app.generic_worker). +[`synapse.app.generic_worker`](#synapseappgeneric_worker). An example [`stream_writers`](usage/configuration/config_documentation.md#stream_writers) configuration with multiple writers: From 1984fc16f129a205902891c6e038693ba71f0a83 Mon Sep 17 00:00:00 2001 From: reivilibre Date: Thu, 5 Jan 2023 18:21:45 +0000 Subject: [PATCH 60/82] Use `htmltest` to check links in the Synapse documentation. (#14743) * Add htmltest to check links in the documentation * Newsfile Signed-off-by: Olivier Wilkinson (reivilibre) Signed-off-by: Olivier Wilkinson (reivilibre) --- .github/workflows/docs-pr.yaml | 26 ++++++++++++++++++++++++++ changelog.d/14743.misc | 1 + 2 files changed, 27 insertions(+) create mode 100644 changelog.d/14743.misc diff --git a/.github/workflows/docs-pr.yaml b/.github/workflows/docs-pr.yaml index cde6cf511e75..d41f6c449055 100644 --- a/.github/workflows/docs-pr.yaml +++ b/.github/workflows/docs-pr.yaml @@ -4,6 +4,8 @@ on: pull_request: paths: - docs/** + - book.toml + - .github/workflows/docs-pr.yaml jobs: pages: @@ -32,3 +34,27 @@ jobs: path: book # We'll only use this in a workflow_run, then we're done with it retention-days: 1 + + link-check: + name: Check links in documentation + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v2 + + - name: Setup mdbook + uses: peaceiris/actions-mdbook@adeb05db28a0c0004681db83893d56c0388ea9ea # v1.2.0 + with: + mdbook-version: '0.4.17' + + - name: Setup htmltest + run: | + wget https://github.com/wjdp/htmltest/releases/download/v0.17.0/htmltest_0.17.0_linux_amd64.tar.gz + echo '775c597ee74899d6002cd2d93076f897f4ba68686bceabe2e5d72e84c57bc0fb htmltest_0.17.0_linux_amd64.tar.gz' | sha256sum -c + tar zxf htmltest_0.17.0_linux_amd64.tar.gz + + - name: Test links with htmltest + # Build the book with `./` as the site URL (to make checks on 404.html possible) + # Then run htmltest (without checking external links since that involves the network and is slow). + run: | + MDBOOK_OUTPUT__HTML__SITE_URL="./" mdbook build + ./htmltest book --skip-external diff --git a/changelog.d/14743.misc b/changelog.d/14743.misc new file mode 100644 index 000000000000..fe949c5bdbcd --- /dev/null +++ b/changelog.d/14743.misc @@ -0,0 +1 @@ +Use `htmltest` to check links in the Synapse documentation. \ No newline at end of file From 331797586e53203a63a05cefecbbb1c8deb25d63 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 9 Jan 2023 10:07:24 +0000 Subject: [PATCH 61/82] Bump types-pillow from 9.3.0.4 to 9.4.0.0 (#14792) * Bump types-pillow from 9.3.0.4 to 9.4.0.0 Bumps [types-pillow](https://github.com/python/typeshed) from 9.3.0.4 to 9.4.0.0. - [Release notes](https://github.com/python/typeshed/releases) - [Commits](https://github.com/python/typeshed/commits) --- updated-dependencies: - dependency-name: types-pillow dependency-type: direct:development update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] * Changelog Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: GitHub Actions --- changelog.d/14792.misc | 1 + poetry.lock | 13 ++++++++++--- 2 files changed, 11 insertions(+), 3 deletions(-) create mode 100644 changelog.d/14792.misc diff --git a/changelog.d/14792.misc b/changelog.d/14792.misc new file mode 100644 index 000000000000..85e9537c64ad --- /dev/null +++ b/changelog.d/14792.misc @@ -0,0 +1 @@ +Bump types-pillow from 9.3.0.4 to 9.4.0.0. diff --git a/poetry.lock b/poetry.lock index 0acffe0f527a..7923d9464128 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1379,7 +1379,7 @@ python-versions = "*" [[package]] name = "types-pillow" -version = "9.3.0.4" +version = "9.4.0.0" description = "Typing stubs for Pillow" category = "dev" optional = false @@ -2198,6 +2198,13 @@ phonenumbers = [ {file = "phonenumbers-8.13.2.tar.gz", hash = "sha256:0179f688d48c0e7e161eb7b9d86d587940af1f5174f97c1fdfd893c599c0d94a"}, ] pillow = [ + {file = "Pillow-9.4.0-1-cp310-cp310-macosx_10_10_x86_64.whl", hash = "sha256:1b4b4e9dda4f4e4c4e6896f93e84a8f0bcca3b059de9ddf67dac3c334b1195e1"}, + {file = "Pillow-9.4.0-1-cp311-cp311-macosx_10_10_x86_64.whl", hash = "sha256:fb5c1ad6bad98c57482236a21bf985ab0ef42bd51f7ad4e4538e89a997624e12"}, + {file = "Pillow-9.4.0-1-cp37-cp37m-macosx_10_10_x86_64.whl", hash = "sha256:f0caf4a5dcf610d96c3bd32932bfac8aee61c96e60481c2a0ea58da435e25acd"}, + {file = "Pillow-9.4.0-1-cp38-cp38-macosx_10_10_x86_64.whl", hash = "sha256:3f4cc516e0b264c8d4ccd6b6cbc69a07c6d582d8337df79be1e15a5056b258c9"}, + {file = "Pillow-9.4.0-1-cp39-cp39-macosx_10_10_x86_64.whl", hash = "sha256:b8c2f6eb0df979ee99433d8b3f6d193d9590f735cf12274c108bd954e30ca858"}, + {file = "Pillow-9.4.0-1-pp38-pypy38_pp73-macosx_10_10_x86_64.whl", hash = "sha256:b70756ec9417c34e097f987b4d8c510975216ad26ba6e57ccb53bc758f490dab"}, + {file = "Pillow-9.4.0-1-pp39-pypy39_pp73-macosx_10_10_x86_64.whl", hash = "sha256:43521ce2c4b865d385e78579a082b6ad1166ebed2b1a2293c3be1d68dd7ca3b9"}, {file = "Pillow-9.4.0-cp310-cp310-macosx_10_10_x86_64.whl", hash = "sha256:2968c58feca624bb6c8502f9564dd187d0e1389964898f5e9e1fbc8533169157"}, {file = "Pillow-9.4.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c5c1362c14aee73f50143d74389b2c158707b4abce2cb055b7ad37ce60738d47"}, {file = "Pillow-9.4.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bd752c5ff1b4a870b7661234694f24b1d2b9076b8bf337321a814c612665f343"}, @@ -2756,8 +2763,8 @@ types-opentracing = [ {file = "types_opentracing-2.4.10-py3-none-any.whl", hash = "sha256:66d9cfbbdc4a6f8ca8189a15ad26f0fe41cee84c07057759c5d194e2505b84c2"}, ] types-pillow = [ - {file = "types-Pillow-9.3.0.4.tar.gz", hash = "sha256:c18d466dc18550d96b8b4a279ff94f0cbad696825b5ad55466604f1daf5709de"}, - {file = "types_Pillow-9.3.0.4-py3-none-any.whl", hash = "sha256:98b8484ff343676f6f7051682a6cfd26896e993e86b3ce9badfa0ec8750f5405"}, + {file = "types-Pillow-9.4.0.0.tar.gz", hash = "sha256:ef8a823638ceb765a144a98a2f816b8912da0337c5c2556d33774f1434f9918c"}, + {file = "types_Pillow-9.4.0.0-py3-none-any.whl", hash = "sha256:246f0dc52d575ef64e01f06f41be37a492b542ee3180638a7b874a6dd4d48c01"}, ] types-psycopg2 = [ {file = "types-psycopg2-2.9.21.2.tar.gz", hash = "sha256:bff045579642ce00b4a3c8f2e401b7f96dfaa34939f10be64b0dd3b53feca57d"}, From 0ae8feee18e7f0e68f349277ddae0043f719d8ce Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 9 Jan 2023 10:07:52 +0000 Subject: [PATCH 62/82] Bump pyopenssl from 22.1.0 to 23.0.0 (#14793) * Bump pyopenssl from 22.1.0 to 23.0.0 Bumps [pyopenssl](https://github.com/pyca/pyopenssl) from 22.1.0 to 23.0.0. - [Release notes](https://github.com/pyca/pyopenssl/releases) - [Changelog](https://github.com/pyca/pyopenssl/blob/main/CHANGELOG.rst) - [Commits](https://github.com/pyca/pyopenssl/compare/22.1.0...23.0.0) --- updated-dependencies: - dependency-name: pyopenssl dependency-type: direct:production update-type: version-update:semver-major ... Signed-off-by: dependabot[bot] * Changelog Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: GitHub Actions --- changelog.d/14793.misc | 1 + poetry.lock | 8 ++++---- 2 files changed, 5 insertions(+), 4 deletions(-) create mode 100644 changelog.d/14793.misc diff --git a/changelog.d/14793.misc b/changelog.d/14793.misc new file mode 100644 index 000000000000..86848da6497d --- /dev/null +++ b/changelog.d/14793.misc @@ -0,0 +1 @@ +Bump pyopenssl from 22.1.0 to 23.0.0. diff --git a/poetry.lock b/poetry.lock index 7923d9464128..b69051686af5 100644 --- a/poetry.lock +++ b/poetry.lock @@ -829,14 +829,14 @@ tests = ["hypothesis (>=3.27.0)", "pytest (>=3.2.1,!=3.3.0)"] [[package]] name = "pyopenssl" -version = "22.1.0" +version = "23.0.0" description = "Python wrapper module around the OpenSSL library" category = "main" optional = false python-versions = ">=3.6" [package.dependencies] -cryptography = ">=38.0.0,<39" +cryptography = ">=38.0.0,<40" [package.extras] docs = ["sphinx (!=5.2.0,!=5.2.0.post0)", "sphinx-rtd-theme"] @@ -2392,8 +2392,8 @@ pynacl = [ {file = "PyNaCl-1.5.0.tar.gz", hash = "sha256:8ac7448f09ab85811607bdd21ec2464495ac8b7c66d146bf545b0f08fb9220ba"}, ] pyopenssl = [ - {file = "pyOpenSSL-22.1.0-py3-none-any.whl", hash = "sha256:b28437c9773bb6c6958628cf9c3bebe585de661dba6f63df17111966363dd15e"}, - {file = "pyOpenSSL-22.1.0.tar.gz", hash = "sha256:7a83b7b272dd595222d672f5ce29aa030f1fb837630ef229f62e72e395ce8968"}, + {file = "pyOpenSSL-23.0.0-py3-none-any.whl", hash = "sha256:df5fc28af899e74e19fccb5510df423581047e10ab6f1f4ba1763ff5fde844c0"}, + {file = "pyOpenSSL-23.0.0.tar.gz", hash = "sha256:c1cc5f86bcacefc84dada7d31175cae1b1518d5f60d3d0bb595a67822a868a6f"}, ] pyrsistent = [ {file = "pyrsistent-0.18.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:df46c854f490f81210870e509818b729db4488e1f30f2a1ce1698b2295a878d1"}, From 51c8ebec33501449fee1ffb6fb2b1f3ad2d0612e Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 9 Jan 2023 10:08:03 +0000 Subject: [PATCH 63/82] Bump types-setuptools from 65.6.0.2 to 65.6.0.3 (#14794) * Bump types-setuptools from 65.6.0.2 to 65.6.0.3 Bumps [types-setuptools](https://github.com/python/typeshed) from 65.6.0.2 to 65.6.0.3. - [Release notes](https://github.com/python/typeshed/releases) - [Commits](https://github.com/python/typeshed/commits) --- updated-dependencies: - dependency-name: types-setuptools dependency-type: direct:development update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] * Changelog Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: GitHub Actions --- changelog.d/14794.misc | 1 + poetry.lock | 21 ++++++++++++++++++--- 2 files changed, 19 insertions(+), 3 deletions(-) create mode 100644 changelog.d/14794.misc diff --git a/changelog.d/14794.misc b/changelog.d/14794.misc new file mode 100644 index 000000000000..8e0887ec76aa --- /dev/null +++ b/changelog.d/14794.misc @@ -0,0 +1 @@ +Bump types-setuptools from 65.6.0.2 to 65.6.0.3. diff --git a/poetry.lock b/poetry.lock index b69051686af5..03dbf11d26ad 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1345,6 +1345,14 @@ python-versions = "*" types-enum34 = "*" types-ipaddress = "*" +[[package]] +name = "types-docutils" +version = "0.19.1.1" +description = "Typing stubs for docutils" +category = "dev" +optional = false +python-versions = "*" + [[package]] name = "types-enum34" version = "1.1.8" @@ -1425,12 +1433,15 @@ types-urllib3 = "<1.27" [[package]] name = "types-setuptools" -version = "65.6.0.2" +version = "65.6.0.3" description = "Typing stubs for setuptools" category = "dev" optional = false python-versions = "*" +[package.dependencies] +types-docutils = "*" + [[package]] name = "types-urllib3" version = "1.26.10" @@ -2746,6 +2757,10 @@ types-cryptography = [ {file = "types-cryptography-3.3.15.tar.gz", hash = "sha256:a7983a75a7b88a18f88832008f0ef140b8d1097888ec1a0824ec8fb7e105273b"}, {file = "types_cryptography-3.3.15-py3-none-any.whl", hash = "sha256:d9b0dd5465d7898d400850e7f35e5518aa93a7e23d3e11757cd81b4777089046"}, ] +types-docutils = [ + {file = "types-docutils-0.19.1.1.tar.gz", hash = "sha256:be0a51ba1c7dd215d9d2df66d6845e63c1009b4bbf4c5beb87a0d9745cdba962"}, + {file = "types_docutils-0.19.1.1-py3-none-any.whl", hash = "sha256:a024cada35f0c13cc45eb0b68a102719018a634013690b7fef723bcbfadbd1f1"}, +] types-enum34 = [ {file = "types-enum34-1.1.8.tar.gz", hash = "sha256:6f9c769641d06d73a55e11c14d38ac76fcd37eb545ce79cebb6eec9d50a64110"}, {file = "types_enum34-1.1.8-py3-none-any.whl", hash = "sha256:05058c7a495f6bfaaca0be4aeac3cce5cdd80a2bad2aab01fd49a20bf4a0209d"}, @@ -2783,8 +2798,8 @@ types-requests = [ {file = "types_requests-2.28.11.7-py3-none-any.whl", hash = "sha256:b6a2fca8109f4fdba33052f11ed86102bddb2338519e1827387137fefc66a98b"}, ] types-setuptools = [ - {file = "types-setuptools-65.6.0.2.tar.gz", hash = "sha256:ad60ccf01d626de9762224448f36c13e0660e863afd6dc11d979b3739a6c7d24"}, - {file = "types_setuptools-65.6.0.2-py3-none-any.whl", hash = "sha256:2c2b4f756f79778074ce2d21f745aa737b12160d9f8dfa274f47a7287c7a2fee"}, + {file = "types-setuptools-65.6.0.3.tar.gz", hash = "sha256:7ddd7415282fa97ab18e490206067c0cdb126b103743e72ee86783d7af6481c5"}, + {file = "types_setuptools-65.6.0.3-py3-none-any.whl", hash = "sha256:ad729fc3a9a3946f73915eaab16ce56b30ed5ae998479253d809d76b3889ee09"}, ] types-urllib3 = [ {file = "types-urllib3-1.26.10.tar.gz", hash = "sha256:a26898f530e6c3f43f25b907f2b884486868ffd56a9faa94cbf9b3eb6e165d6a"}, From 32c2ff8eabf7f83acedfa1a4516d6d2610bcdeac Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 9 Jan 2023 10:08:43 +0000 Subject: [PATCH 64/82] Bump ruff from 0.0.206 to 0.0.215 (#14796) * Bump ruff from 0.0.206 to 0.0.215 Bumps [ruff](https://github.com/charliermarsh/ruff) from 0.0.206 to 0.0.215. - [Release notes](https://github.com/charliermarsh/ruff/releases) - [Changelog](https://github.com/charliermarsh/ruff/blob/main/BREAKING_CHANGES.md) - [Commits](https://github.com/charliermarsh/ruff/compare/v0.0.206...v0.0.215) --- updated-dependencies: - dependency-name: ruff dependency-type: direct:development update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] * Changelog Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: GitHub Actions --- changelog.d/14796.misc | 1 + poetry.lock | 36 ++++++++++++++++++------------------ pyproject.toml | 2 +- 3 files changed, 20 insertions(+), 19 deletions(-) create mode 100644 changelog.d/14796.misc diff --git a/changelog.d/14796.misc b/changelog.d/14796.misc new file mode 100644 index 000000000000..5dc3865fdff9 --- /dev/null +++ b/changelog.d/14796.misc @@ -0,0 +1 @@ +Bump ruff from 0.0.206 to 0.0.215. diff --git a/poetry.lock b/poetry.lock index 03dbf11d26ad..4f39c62a4ab8 100644 --- a/poetry.lock +++ b/poetry.lock @@ -982,7 +982,7 @@ jupyter = ["ipywidgets (>=7.5.1,<8.0.0)"] [[package]] name = "ruff" -version = "0.0.206" +version = "0.0.215" description = "An extremely fast Python linter, written in Rust." category = "dev" optional = false @@ -1590,7 +1590,7 @@ user-search = ["pyicu"] [metadata] lock-version = "1.1" python-versions = "^3.7.1" -content-hash = "0b51556b00496ee5d214d343893c94b3ab48932d154a882f23be4f0d3b2af475" +content-hash = "53867af07a507c3addd614c828dfb26175f6604398848e84c0ea65980f8a59a2" [metadata.files] attrs = [ @@ -2508,22 +2508,22 @@ rich = [ {file = "rich-12.6.0.tar.gz", hash = "sha256:ba3a3775974105c221d31141f2c116f4fd65c5ceb0698657a11e9f295ec93fd0"}, ] ruff = [ - {file = "ruff-0.0.206-py3-none-macosx_10_7_x86_64.whl", hash = "sha256:47fa81b999d960464e37135c5863cb0be97a05ba6ad8c5baa8163e5a0d7d2e20"}, - {file = "ruff-0.0.206-py3-none-macosx_10_9_x86_64.macosx_11_0_arm64.macosx_10_9_universal2.whl", hash = "sha256:457829500cec96d307b6dd537e983e148cf3788454ccda83aeef459dcdeccce3"}, - {file = "ruff-0.0.206-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b186fdb99b10a8d5ac112e8c10000eff61c3cc248ce9b87f80abf6e32408746d"}, - {file = "ruff-0.0.206-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:b978f206135cf02d89a51d29b2134eecfb8c05e2533dc75c6554b29a5e7e0844"}, - {file = "ruff-0.0.206-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:632233444d597e02982dfdd1d34eab03943e9c6e042f0dfafab40a3ceb18a6fd"}, - {file = "ruff-0.0.206-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:b70baa8904ff9e11859082eb691d7e087d8637f1bb569512f76a8b2cfb8b3eb6"}, - {file = "ruff-0.0.206-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d64f8452e71fadf9995dee7517a55f251c5a3c87879e08d231af5ef5b7abf076"}, - {file = "ruff-0.0.206-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:66e198b71bd6f39b8adac5d2dcf47d8a3be8860d71680f36c7b7caba4e823ed0"}, - {file = "ruff-0.0.206-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:821b5cee2f5ebf27950580a7a09c1baeedd1659e0c85742ef085356f2ffe6035"}, - {file = "ruff-0.0.206-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:85ab420465395f8e6a5057f8acd7990297fa23a7e20f667ff4d73479f8fd5ca5"}, - {file = "ruff-0.0.206-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:7c4cd2842cecb52464cb3d8b5386beeca029e6b7940d2720d5adaf9da94323b3"}, - {file = "ruff-0.0.206-py3-none-musllinux_1_2_i686.whl", hash = "sha256:9064d59f83d1ddd4b45f1bc565846cf067bf4d0f3e8db5a73f14cc38a2403c49"}, - {file = "ruff-0.0.206-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:cfd8977c264b3975e6cd893b62a20ee2cab6a1893cb0eda8418e0ef062a284c3"}, - {file = "ruff-0.0.206-py3-none-win32.whl", hash = "sha256:c333f4062fd8c86a903f0e11780b529d786981b70de2d65102ee1765949592cd"}, - {file = "ruff-0.0.206-py3-none-win_amd64.whl", hash = "sha256:6e758ff7c9981b91113d6a0f44183ab5dbe33ee5a5ca2ec7db5a22f03f9568eb"}, - {file = "ruff-0.0.206.tar.gz", hash = "sha256:b79b6ffac6ca713c5cad6e661495e77e1821d87c3fedd02139d13a857a6de92a"}, + {file = "ruff-0.0.215-py3-none-macosx_10_7_x86_64.whl", hash = "sha256:a4963613bca6ffc448deca1ce3a3fc69af216d6234e5d7f256935d7407088724"}, + {file = "ruff-0.0.215-py3-none-macosx_10_9_x86_64.macosx_11_0_arm64.macosx_10_9_universal2.whl", hash = "sha256:7bcd7b07a88c6530bb4e80850d6cf261081b9d4147eb0ea91fbb85a332ba4fe6"}, + {file = "ruff-0.0.215-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bf3fcbf717a1e0c480b3d1fe9fd823043af463f067ec896746dab2123c4dcf10"}, + {file = "ruff-0.0.215-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:8199acc4a20d2b3761c4489171f45f37654f2d5ce096361221ea392f078b4be0"}, + {file = "ruff-0.0.215-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2f68624344209d07000aba115eeac551f362e278970112f0b69838c70f77f7df"}, + {file = "ruff-0.0.215-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:f3c846df8a83445c394e6be58b8e784ec8fc82d67de94f137026c43e6037958b"}, + {file = "ruff-0.0.215-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:65b13019821af35a3225a64f2c93877c1e8059b92bb13fce32281ceefeecd199"}, + {file = "ruff-0.0.215-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2a02cb67a7171418c5a90ad0d8f983b5fd29b321c9861e0164d126cda4869c61"}, + {file = "ruff-0.0.215-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:07d69e654d977842c327f26487ef9b7dba39204b113619d33b4139bd3fdd101c"}, + {file = "ruff-0.0.215-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:ed2a0e13c822f8f0c40e6fe6172ff9c88add55a1dac9e0c05315618f82375648"}, + {file = "ruff-0.0.215-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:1fd4b80bf34e20d18b01bf6d981973975184a85ed39f64934e11d00e2aba882f"}, + {file = "ruff-0.0.215-py3-none-musllinux_1_2_i686.whl", hash = "sha256:5360b476b8720fa76d9dd6ee980c563b930a08524c91c99edddb25364ef656d7"}, + {file = "ruff-0.0.215-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:352534c0c2ffd491b331fe5982b1d3e88a6c2083a3c127466d4eed63918f6ea8"}, + {file = "ruff-0.0.215-py3-none-win32.whl", hash = "sha256:af3cd199a0c6f5b90b9c84a2b9b74b202901194b8b00d5d3e28a0a814037b73f"}, + {file = "ruff-0.0.215-py3-none-win_amd64.whl", hash = "sha256:aa6fe5b56b17a04c8db7f60fef21a9ff96109d10d9232b436ae2dfdc6cc70b7c"}, + {file = "ruff-0.0.215.tar.gz", hash = "sha256:a82ab1452396d5ca389bdcb182e8f273c5f7db854022d7a303764b6218e9e77e"}, ] secretstorage = [ {file = "SecretStorage-3.3.1-py3-none-any.whl", hash = "sha256:422d82c36172d88d6a0ed5afdec956514b189ddbfb72fefab0c8a1cee4eaf71f"}, diff --git a/pyproject.toml b/pyproject.toml index f759459b8041..58063b15e95e 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -317,7 +317,7 @@ all = [ # We pin black so that our tests don't start failing on new releases. isort = ">=5.10.1" black = ">=22.3.0" -ruff = "0.0.206" +ruff = "0.0.215" # Typechecking mypy = "*" From 1438f939489c879242d739261a32ca75169e7acb Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 9 Jan 2023 10:08:50 +0000 Subject: [PATCH 65/82] Bump importlib-metadata from 4.2.0 to 6.0.0 (#14795) * Bump importlib-metadata from 4.2.0 to 6.0.0 Bumps [importlib-metadata](https://github.com/python/importlib_metadata) from 4.2.0 to 6.0.0. - [Release notes](https://github.com/python/importlib_metadata/releases) - [Changelog](https://github.com/python/importlib_metadata/blob/main/CHANGES.rst) - [Commits](https://github.com/python/importlib_metadata/compare/v4.2.0...v6.0.0) --- updated-dependencies: - dependency-name: importlib-metadata dependency-type: direct:production update-type: version-update:semver-major ... Signed-off-by: dependabot[bot] * Changelog Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: GitHub Actions --- changelog.d/14795.misc | 1 + poetry.lock | 13 +++++++------ 2 files changed, 8 insertions(+), 6 deletions(-) create mode 100644 changelog.d/14795.misc diff --git a/changelog.d/14795.misc b/changelog.d/14795.misc new file mode 100644 index 000000000000..5c4bcc891c8e --- /dev/null +++ b/changelog.d/14795.misc @@ -0,0 +1 @@ +Bump importlib-metadata from 4.2.0 to 6.0.0. diff --git a/poetry.lock b/poetry.lock index 4f39c62a4ab8..f148eaf8cf12 100644 --- a/poetry.lock +++ b/poetry.lock @@ -313,19 +313,20 @@ python-versions = "*" [[package]] name = "importlib-metadata" -version = "4.2.0" +version = "6.0.0" description = "Read metadata from Python packages" category = "main" optional = false -python-versions = ">=3.6" +python-versions = ">=3.7" [package.dependencies] typing-extensions = {version = ">=3.6.4", markers = "python_version < \"3.8\""} zipp = ">=0.5" [package.extras] -docs = ["jaraco.packaging (>=8.2)", "rst.linker (>=1.9)", "sphinx"] -testing = ["flufl.flake8", "importlib-resources (>=1.3)", "packaging", "pep517", "pyfakefs", "pytest (>=4.6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.0.1)", "pytest-flake8", "pytest-mypy"] +docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] +perf = ["ipython"] +testing = ["flake8 (<5)", "flufl.flake8", "importlib-resources (>=1.3)", "packaging", "pyfakefs", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1)", "pytest-perf (>=0.9.2)"] [[package]] name = "importlib-resources" @@ -1930,8 +1931,8 @@ ijson = [ {file = "ijson-3.1.4.tar.gz", hash = "sha256:1d1003ae3c6115ec9b587d29dd136860a81a23c7626b682e2b5b12c9fd30e4ea"}, ] importlib-metadata = [ - {file = "importlib_metadata-4.2.0-py3-none-any.whl", hash = "sha256:057e92c15bc8d9e8109738a48db0ccb31b4d9d5cfbee5a8670879a30be66304b"}, - {file = "importlib_metadata-4.2.0.tar.gz", hash = "sha256:b7e52a1f8dec14a75ea73e0891f3060099ca1d8e6a462a4dff11c3e119ea1b31"}, + {file = "importlib_metadata-6.0.0-py3-none-any.whl", hash = "sha256:7efb448ec9a5e313a57655d35aa54cd3e01b7e1fbcf72dce1bf06119420f5bad"}, + {file = "importlib_metadata-6.0.0.tar.gz", hash = "sha256:e354bedeb60efa6affdcc8ae121b73544a7aa74156d047311948f6d711cd378d"}, ] importlib-resources = [ {file = "importlib_resources-5.4.0-py3-none-any.whl", hash = "sha256:33a95faed5fc19b4bc16b29a6eeae248a3fe69dd55d4d229d2b480e23eeaad45"}, From b4de0c63dfaa0e41a79f45f24519b59e7f81d9c0 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 9 Jan 2023 10:09:13 +0000 Subject: [PATCH 66/82] Bump peaceiris/actions-gh-pages from 3.9.0 to 3.9.1 (#14791) * Bump peaceiris/actions-gh-pages from 3.9.0 to 3.9.1 Bumps [peaceiris/actions-gh-pages](https://github.com/peaceiris/actions-gh-pages) from 3.9.0 to 3.9.1. - [Release notes](https://github.com/peaceiris/actions-gh-pages/releases) - [Changelog](https://github.com/peaceiris/actions-gh-pages/blob/main/CHANGELOG.md) - [Commits](https://github.com/peaceiris/actions-gh-pages/compare/de7ea6f8efb354206b205ef54722213d99067935...64b46b4226a4a12da2239ba3ea5aa73e3163c75b) --- updated-dependencies: - dependency-name: peaceiris/actions-gh-pages dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] * Changelog Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: GitHub Actions --- .github/workflows/docs.yaml | 2 +- changelog.d/14791.misc | 1 + 2 files changed, 2 insertions(+), 1 deletion(-) create mode 100644 changelog.d/14791.misc diff --git a/.github/workflows/docs.yaml b/.github/workflows/docs.yaml index 575412d96543..0b33058337b9 100644 --- a/.github/workflows/docs.yaml +++ b/.github/workflows/docs.yaml @@ -58,7 +58,7 @@ jobs: # Deploy to the target directory. - name: Deploy to gh pages - uses: peaceiris/actions-gh-pages@de7ea6f8efb354206b205ef54722213d99067935 # v3.9.0 + uses: peaceiris/actions-gh-pages@64b46b4226a4a12da2239ba3ea5aa73e3163c75b # v3.9.1 with: github_token: ${{ secrets.GITHUB_TOKEN }} publish_dir: ./book diff --git a/changelog.d/14791.misc b/changelog.d/14791.misc new file mode 100644 index 000000000000..6474859f3cdf --- /dev/null +++ b/changelog.d/14791.misc @@ -0,0 +1 @@ +Bump peaceiris/actions-gh-pages from 3.9.0 to 3.9.1. From 5e0888076fea8c70ab84114e1c261dd46330c1d6 Mon Sep 17 00:00:00 2001 From: Jeyachandran Rathnam Date: Mon, 9 Jan 2023 06:12:03 -0500 Subject: [PATCH 67/82] Disable sending confirmation email when 3pid is disabled #14682 (#14725) * Fixes #12277 :Disable sending confirmation email when 3pid is disabled * Fix test_add_email_if_disabled test case to reflect changes to enable_3pid_changes flag * Add changelog file * Rename newsfragment. Co-authored-by: Patrick Cloke --- changelog.d/14725.misc | 1 + synapse/rest/client/account.py | 5 +++++ tests/rest/client/test_account.py | 30 +++++------------------------- 3 files changed, 11 insertions(+), 25 deletions(-) create mode 100644 changelog.d/14725.misc diff --git a/changelog.d/14725.misc b/changelog.d/14725.misc new file mode 100644 index 000000000000..a86c4f8c0527 --- /dev/null +++ b/changelog.d/14725.misc @@ -0,0 +1 @@ +Disable sending confirmation email when 3pid is disabled. diff --git a/synapse/rest/client/account.py b/synapse/rest/client/account.py index b4b92f0c9920..4373c7366262 100644 --- a/synapse/rest/client/account.py +++ b/synapse/rest/client/account.py @@ -338,6 +338,11 @@ def __init__(self, hs: "HomeServer"): ) async def on_POST(self, request: SynapseRequest) -> Tuple[int, JsonDict]: + if not self.hs.config.registration.enable_3pid_changes: + raise SynapseError( + 400, "3PID changes are disabled on this server", Codes.FORBIDDEN + ) + if not self.config.email.can_verify_email: logger.warning( "Adding emails have been disabled due to lack of an email config" diff --git a/tests/rest/client/test_account.py b/tests/rest/client/test_account.py index c1a7fb2f8a10..88f255c9eea9 100644 --- a/tests/rest/client/test_account.py +++ b/tests/rest/client/test_account.py @@ -690,41 +690,21 @@ def test_add_email_if_disabled(self) -> None: self.hs.config.registration.enable_3pid_changes = False client_secret = "foobar" - session_id = self._request_token(self.email, client_secret) - - self.assertEqual(len(self.email_attempts), 1) - link = self._get_link_from_email() - - self._validate_token(link) - channel = self.make_request( "POST", - b"/_matrix/client/unstable/account/3pid/add", + b"/_matrix/client/unstable/account/3pid/email/requestToken", { "client_secret": client_secret, - "sid": session_id, - "auth": { - "type": "m.login.password", - "user": self.user_id, - "password": "test", - }, + "email": "test@example.com", + "send_attempt": 1, }, - access_token=self.user_id_tok, ) + self.assertEqual( HTTPStatus.BAD_REQUEST, channel.code, msg=channel.result["body"] ) - self.assertEqual(Codes.FORBIDDEN, channel.json_body["errcode"]) - # Get user - channel = self.make_request( - "GET", - self.url_3pid, - access_token=self.user_id_tok, - ) - - self.assertEqual(HTTPStatus.OK, channel.code, msg=channel.result["body"]) - self.assertFalse(channel.json_body["threepids"]) + self.assertEqual(Codes.FORBIDDEN, channel.json_body["errcode"]) def test_delete_email(self) -> None: """Test deleting an email from profile""" From 7e582a25f8f350df29d7d83ca902bdb522d1bbaf Mon Sep 17 00:00:00 2001 From: Patrick Cloke Date: Mon, 9 Jan 2023 08:43:50 -0500 Subject: [PATCH 68/82] Improve /sync performance of when passing filters with empty arrays. (#14786) This has two related changes: * It enables fast-path processing for an empty filter (`[]`) which was previously only used for wildcard not-filters (`["*"]`). * It special cases a `/sync` filter with no-rooms to skip all room processing, previously we would partially skip processing, but would generally still calculate intermediate values for each room which were then unused. Future changes might consider further optimizations: * Skip calculating per-room account data when all rooms are filtered (currently this is thrown away). * Make similar improvements to other endpoints which support filters. --- changelog.d/14786.feature | 1 + synapse/api/filtering.py | 13 ++++++++----- synapse/handlers/search.py | 2 +- synapse/handlers/sync.py | 14 +++++++++++--- 4 files changed, 21 insertions(+), 9 deletions(-) create mode 100644 changelog.d/14786.feature diff --git a/changelog.d/14786.feature b/changelog.d/14786.feature new file mode 100644 index 000000000000..008d61ab039c --- /dev/null +++ b/changelog.d/14786.feature @@ -0,0 +1 @@ +Improve performance of `/sync` when filtering all rooms, message types, or senders. diff --git a/synapse/api/filtering.py b/synapse/api/filtering.py index a9888381b451..2b5af264b43d 100644 --- a/synapse/api/filtering.py +++ b/synapse/api/filtering.py @@ -283,6 +283,9 @@ async def filter_room_account_data( await self._room_filter.filter(events) ) + def blocks_all_rooms(self) -> bool: + return self._room_filter.filters_all_rooms() + def blocks_all_presence(self) -> bool: return ( self._presence_filter.filters_all_types() @@ -351,13 +354,13 @@ def __init__(self, hs: "HomeServer", filter_json: JsonDict): self.not_rel_types = filter_json.get("org.matrix.msc3874.not_rel_types", []) def filters_all_types(self) -> bool: - return "*" in self.not_types + return self.types == [] or "*" in self.not_types def filters_all_senders(self) -> bool: - return "*" in self.not_senders + return self.senders == [] or "*" in self.not_senders def filters_all_rooms(self) -> bool: - return "*" in self.not_rooms + return self.rooms == [] or "*" in self.not_rooms def _check(self, event: FilterEvent) -> bool: """Checks whether the filter matches the given event. @@ -450,8 +453,8 @@ def _check_fields(self, field_matchers: Dict[str, Callable[[str], bool]]) -> boo if any(map(match_func, disallowed_values)): return False - # Other the event does not match at least one of the allowed values, - # reject it. + # Otherwise if the event does not match at least one of the allowed + # values, reject it. allowed_values = getattr(self, name) if allowed_values is not None: if not any(map(match_func, allowed_values)): diff --git a/synapse/handlers/search.py b/synapse/handlers/search.py index 33115ce488ca..40f4635c4e29 100644 --- a/synapse/handlers/search.py +++ b/synapse/handlers/search.py @@ -275,7 +275,7 @@ async def _search( ) room_ids = {r.room_id for r in rooms} - # If doing a subset of all rooms seearch, check if any of the rooms + # If doing a subset of all rooms search, check if any of the rooms # are from an upgraded room, and search their contents as well if search_filter.rooms: historical_room_ids: List[str] = [] diff --git a/synapse/handlers/sync.py b/synapse/handlers/sync.py index 4fa480262b03..6942e06c770e 100644 --- a/synapse/handlers/sync.py +++ b/synapse/handlers/sync.py @@ -1403,11 +1403,14 @@ async def generate_sync_result( logger.debug("Fetching room data") - res = await self._generate_sync_entry_for_rooms( + ( + newly_joined_rooms, + newly_joined_or_invited_or_knocked_users, + newly_left_rooms, + newly_left_users, + ) = await self._generate_sync_entry_for_rooms( sync_result_builder, account_data_by_room ) - newly_joined_rooms, newly_joined_or_invited_or_knocked_users, _, _ = res - _, _, newly_left_rooms, newly_left_users = res block_all_presence_data = ( since_token is None and sync_config.filter_collection.blocks_all_presence() @@ -1789,6 +1792,11 @@ async def _generate_sync_entry_for_rooms( - newly_left_rooms - newly_left_users """ + + # If the request doesn't care about rooms then nothing to do! + if sync_result_builder.sync_config.filter_collection.blocks_all_rooms(): + return set(), set(), set(), set() + since_token = sync_result_builder.since_token # 1. Start by fetching all ephemeral events in rooms we've joined (if required). From babeeb4e7a6f5b5c643b837bf724d674805546f6 Mon Sep 17 00:00:00 2001 From: Jeyachandran Rathnam Date: Mon, 9 Jan 2023 09:22:02 -0500 Subject: [PATCH 69/82] Unescape HTML entities in oEmbed titles. (#14781) It doesn't seem valid that HTML entities should appear in the title field of oEmbed responses, but a popular WordPress plug-in seems to do it. There should not be harm in unescaping these. --- changelog.d/14781.misc | 1 + synapse/rest/media/v1/oembed.py | 15 +++++++++------ tests/rest/media/v1/test_oembed.py | 10 ++++++++++ 3 files changed, 20 insertions(+), 6 deletions(-) create mode 100644 changelog.d/14781.misc diff --git a/changelog.d/14781.misc b/changelog.d/14781.misc new file mode 100644 index 000000000000..04f565b41020 --- /dev/null +++ b/changelog.d/14781.misc @@ -0,0 +1 @@ +Unescape HTML entities in URL preview titles making use of oEmbed responses. diff --git a/synapse/rest/media/v1/oembed.py b/synapse/rest/media/v1/oembed.py index 827afd868d65..a3738a62507d 100644 --- a/synapse/rest/media/v1/oembed.py +++ b/synapse/rest/media/v1/oembed.py @@ -11,6 +11,7 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. +import html import logging import urllib.parse from typing import TYPE_CHECKING, List, Optional @@ -161,7 +162,9 @@ def parse_oembed_response(self, url: str, raw_body: bytes) -> OEmbedResult: title = oembed.get("title") if title and isinstance(title, str): - open_graph_response["og:title"] = title + # A common WordPress plug-in seems to incorrectly escape entities + # in the oEmbed response. + open_graph_response["og:title"] = html.unescape(title) author_name = oembed.get("author_name") if not isinstance(author_name, str): @@ -180,9 +183,9 @@ def parse_oembed_response(self, url: str, raw_body: bytes) -> OEmbedResult: # Process each type separately. oembed_type = oembed.get("type") if oembed_type == "rich": - html = oembed.get("html") - if isinstance(html, str): - calc_description_and_urls(open_graph_response, html) + html_str = oembed.get("html") + if isinstance(html_str, str): + calc_description_and_urls(open_graph_response, html_str) elif oembed_type == "photo": # If this is a photo, use the full image, not the thumbnail. @@ -192,8 +195,8 @@ def parse_oembed_response(self, url: str, raw_body: bytes) -> OEmbedResult: elif oembed_type == "video": open_graph_response["og:type"] = "video.other" - html = oembed.get("html") - if html and isinstance(html, str): + html_str = oembed.get("html") + if html_str and isinstance(html_str, str): calc_description_and_urls(open_graph_response, oembed["html"]) for size in ("width", "height"): val = oembed.get(size) diff --git a/tests/rest/media/v1/test_oembed.py b/tests/rest/media/v1/test_oembed.py index 319ae8b1cc2a..3f7f1dbab9b7 100644 --- a/tests/rest/media/v1/test_oembed.py +++ b/tests/rest/media/v1/test_oembed.py @@ -150,3 +150,13 @@ def test_link(self) -> None: result = self.parse_response({"type": "link"}) self.assertIn("og:type", result.open_graph_result) self.assertEqual(result.open_graph_result["og:type"], "website") + + def test_title_html_entities(self) -> None: + """Test HTML entities in title""" + result = self.parse_response( + {"title": "Why JSON isn’t a Good Configuration Language"} + ) + self.assertEqual( + result.open_graph_result["og:title"], + "Why JSON isn’t a Good Configuration Language", + ) From c0145b06f5c331362c94f58ebdf892390612bd24 Mon Sep 17 00:00:00 2001 From: David Robertson Date: Mon, 9 Jan 2023 14:43:46 +0000 Subject: [PATCH 70/82] Fix upgrade notes for installing ICU (#14797) * Fix upgrade notes for installing ICU As noticed in https://github.com/matrix-org/synapse/pull/14712/files#r1058433297 * Changelog --- changelog.d/14797.doc | 1 + docs/upgrade.md | 2 +- 2 files changed, 2 insertions(+), 1 deletion(-) create mode 100644 changelog.d/14797.doc diff --git a/changelog.d/14797.doc b/changelog.d/14797.doc new file mode 100644 index 000000000000..2e25d531d172 --- /dev/null +++ b/changelog.d/14797.doc @@ -0,0 +1 @@ +Fix Synapse 1.74 upgrade notes to correctly explain how to install pyICU when installing Synapse from PyPI. diff --git a/docs/upgrade.md b/docs/upgrade.md index f853dfdb8e62..c4bc5889a95f 100644 --- a/docs/upgrade.md +++ b/docs/upgrade.md @@ -99,7 +99,7 @@ the ICU native dependency and its development headers so that PyICU can build since no prebuilt wheels are available. You can follow [the PyICU documentation](https://pypi.org/project/PyICU/) to do so, -and then do `pip install matrix-synapse[icu]` for a PyPI install. +and then do `pip install matrix-synapse[user-search]` for a PyPI install. Docker images and Debian packages need nothing specific as they already include or specify ICU as an explicit dependency. From c7b2c31161cddd7f10d96cb5ec7a8a7b42ecdf79 Mon Sep 17 00:00:00 2001 From: Dirk Klimpel <5740567+dklimpel@users.noreply.github.com> Date: Mon, 9 Jan 2023 17:33:49 +0100 Subject: [PATCH 71/82] Update link to towncrier in contribution guide (#14801) * Update link to towncrier in contribution guide * newsfile --- changelog.d/14801.doc | 1 + docs/development/contributing_guide.md | 2 +- 2 files changed, 2 insertions(+), 1 deletion(-) create mode 100644 changelog.d/14801.doc diff --git a/changelog.d/14801.doc b/changelog.d/14801.doc new file mode 100644 index 000000000000..ce8e6763f13d --- /dev/null +++ b/changelog.d/14801.doc @@ -0,0 +1 @@ +Update link to towncrier in contribution guide. \ No newline at end of file diff --git a/docs/development/contributing_guide.md b/docs/development/contributing_guide.md index 43cb3e201d30..4c1067671482 100644 --- a/docs/development/contributing_guide.md +++ b/docs/development/contributing_guide.md @@ -382,7 +382,7 @@ To prepare a Pull Request, please: ## Changelog All changes, even minor ones, need a corresponding changelog / newsfragment -entry. These are managed by [Towncrier](https://github.com/hawkowl/towncrier). +entry. These are managed by [Towncrier](https://github.com/twisted/towncrier). To create a changelog entry, make a new file in the `changelog.d` directory named in the format of `PRnumber.type`. The type can be one of the following: From 58d2adc3da6a988452dbb9c6c4202a5ea19c4ca9 Mon Sep 17 00:00:00 2001 From: Jeyachandran Rathnam Date: Mon, 9 Jan 2023 12:17:24 -0500 Subject: [PATCH 72/82] Remove undocumented device from pushrules (#14727) * Remove undocumented device from pushrules * Add changelog * Update changelog.d/14727.misc * Rename 14727.misc to 14727.bugfix Co-authored-by: David Robertson --- changelog.d/14727.bugfix | 1 + synapse/push/clientformat.py | 5 +---- 2 files changed, 2 insertions(+), 4 deletions(-) create mode 100644 changelog.d/14727.bugfix diff --git a/changelog.d/14727.bugfix b/changelog.d/14727.bugfix new file mode 100644 index 000000000000..25079496e4f7 --- /dev/null +++ b/changelog.d/14727.bugfix @@ -0,0 +1 @@ +Remove the unspecced `device` field from `/pushrules` responses. diff --git a/synapse/push/clientformat.py b/synapse/push/clientformat.py index 622a1e35c5fe..bb76c169c6ce 100644 --- a/synapse/push/clientformat.py +++ b/synapse/push/clientformat.py @@ -26,10 +26,7 @@ def format_push_rules_for_user( """Converts a list of rawrules and a enabled map into nested dictionaries to match the Matrix client-server format for push rules""" - rules: Dict[str, Dict[str, List[Dict[str, Any]]]] = { - "global": {}, - "device": {}, - } + rules: Dict[str, Dict[str, List[Dict[str, Any]]]] = {"global": {}} rules["global"] = _add_empty_priority_class_arrays(rules["global"]) From 54a7228fa692912096f1cdec47be0731c53fea0b Mon Sep 17 00:00:00 2001 From: Andrew Morgan <1342360+anoadragon453@users.noreply.github.com> Date: Mon, 9 Jan 2023 17:51:37 +0000 Subject: [PATCH 73/82] Skip testing pypy-3.7-linux wheels as we don't have openssl 3.x on manylinux2014 (#14802) --- .github/workflows/release-artifacts.yml | 2 +- changelog.d/14802.misc | 1 + 2 files changed, 2 insertions(+), 1 deletion(-) create mode 100644 changelog.d/14802.misc diff --git a/.github/workflows/release-artifacts.yml b/.github/workflows/release-artifacts.yml index 0601a7dbaf15..30ac4c157169 100644 --- a/.github/workflows/release-artifacts.yml +++ b/.github/workflows/release-artifacts.yml @@ -148,7 +148,7 @@ jobs: env: # Skip testing for platforms which various libraries don't have wheels # for, and so need extra build deps. - CIBW_TEST_SKIP: pp39-* *i686* *musl* pp37-macosx* + CIBW_TEST_SKIP: pp3{7,9}-* *i686* *musl* # Fix Rust OOM errors on emulated aarch64: https://github.com/rust-lang/cargo/issues/10583 CARGO_NET_GIT_FETCH_WITH_CLI: true CIBW_ENVIRONMENT_PASS_LINUX: CARGO_NET_GIT_FETCH_WITH_CLI diff --git a/changelog.d/14802.misc b/changelog.d/14802.misc new file mode 100644 index 000000000000..81d5c0c642d1 --- /dev/null +++ b/changelog.d/14802.misc @@ -0,0 +1 @@ +Skip testing built wheels for PyPy 3.7 on Linux x86_64 as we lack new required dependencies in the build environment. \ No newline at end of file From 3479599387164aca2613e88d1697191b087e55bc Mon Sep 17 00:00:00 2001 From: Dirk Klimpel <5740567+dklimpel@users.noreply.github.com> Date: Mon, 9 Jan 2023 19:35:19 +0100 Subject: [PATCH 74/82] Add missing worker settings to shared configuration (#14748) * Add missing worker settings to shared configuration * newsfile * update docs after review * more update for doc * This -> These Co-authored-by: David Robertson --- changelog.d/14748.doc | 1 + .../configuration/config_documentation.md | 54 ++++++++++++++++--- docs/workers.md | 41 ++++++++++++-- 3 files changed, 85 insertions(+), 11 deletions(-) create mode 100644 changelog.d/14748.doc diff --git a/changelog.d/14748.doc b/changelog.d/14748.doc new file mode 100644 index 000000000000..6926136f796c --- /dev/null +++ b/changelog.d/14748.doc @@ -0,0 +1 @@ +Add missing worker settings to shared configuration documentation. \ No newline at end of file diff --git a/docs/usage/configuration/config_documentation.md b/docs/usage/configuration/config_documentation.md index 6cfcce817fde..93d6c7fb02dd 100644 --- a/docs/usage/configuration/config_documentation.md +++ b/docs/usage/configuration/config_documentation.md @@ -2623,18 +2623,18 @@ state events are shared with users: - `m.room.topic` To change the default behavior, use the following sub-options: -* `disable_default_event_types`: boolean. Set to `true` to disable the above +* `disable_default_event_types`: boolean. Set to `true` to disable the above defaults. If this is enabled, only the event types listed in `additional_event_types` are shared. Defaults to `false`. -* `additional_event_types`: A list of additional state events to include in the - events to be shared. By default, this list is empty (so only the default event +* `additional_event_types`: A list of additional state events to include in the + events to be shared. By default, this list is empty (so only the default event types are shared). Each entry in this list should be either a single string or a list of two - strings. + strings. * A standalone string `t` represents all events with type `t` (i.e. with no restrictions on state keys). - * A pair of strings `[t, s]` represents a single event with type `t` and + * A pair of strings `[t, s]` represents a single event with type `t` and state key `s`. The same type can appear in two entries with different state keys: in this situation, both state keys are included in prejoin state. @@ -3126,7 +3126,7 @@ Options for each entry include: * `picture_claim`: name of the claim containing an url for the user's profile picture. Defaults to 'picture', which OpenID Connect compliant providers should provide and has to refer to a direct image file such as PNG, JPEG, or GIF image file. - + Currently only supported in monolithic (single-process) server configurations where the media repository runs within the Synapse process. @@ -3864,6 +3864,48 @@ Example configuration: ```yaml run_background_tasks_on: worker1 ``` +--- +### `update_user_directory_from_worker` + +The [worker](../../workers.md#updating-the-user-directory) that is used to +update the user directory. If not provided this defaults to the main process. + +Example configuration: +```yaml +update_user_directory_from_worker: worker1 +``` + +_Added in Synapse 1.59.0._ + +--- +### `notify_appservices_from_worker` + +The [worker](../../workers.md#notifying-application-services) that is used to +send output traffic to Application Services. If not provided this defaults +to the main process. + +Example configuration: +```yaml +notify_appservices_from_worker: worker1 +``` + +_Added in Synapse 1.59.0._ + +--- +### `media_instance_running_background_jobs` + +The [worker](../../workers.md#synapseappmedia_repository) that is used to run +background tasks for media repository. If running multiple media repositories +you must configure a single instance to run the background tasks. If not provided +this defaults to the main process or your single `media_repository` worker. + +Example configuration: +```yaml +media_instance_running_background_jobs: worker1 +``` + +_Added in Synapse 1.16.0._ + --- ### `redis` diff --git a/docs/workers.md b/docs/workers.md index c21638564e5d..bc66f0e1bce5 100644 --- a/docs/workers.md +++ b/docs/workers.md @@ -465,7 +465,8 @@ An example for a dedicated background worker instance: You can designate one generic worker to update the user directory. -Specify its name in the shared configuration as follows: +Specify its name in the [shared configuration](usage/configuration/config_documentation.md#update_user_directory_from_worker) +as follows: ```yaml update_user_directory_from_worker: worker_name @@ -490,7 +491,8 @@ worker application type. You can designate one generic worker to send output traffic to Application Services. Doesn't handle any REST endpoints itself, but you should specify its name in the -shared configuration as follows: +[shared configuration](usage/configuration/config_documentation.md#notify_appservices_from_worker) +as follows: ```yaml notify_appservices_from_worker: worker_name @@ -502,11 +504,38 @@ after setting this option in the shared configuration! This style of configuration supersedes the legacy `synapse.app.appservice` worker application type. +#### Push Notifications + +You can designate generic worker to sending push notifications to +a [push gateway](https://spec.matrix.org/v1.5/push-gateway-api/) such as +[sygnal](https://github.com/matrix-org/sygnal) and email. + +This will stop the main process sending push notifications. + +The workers responsible for sending push notifications can be defined using the +[`pusher_instances`](usage/configuration/config_documentation.md#pusher_instances) +option. For example: + +```yaml +pusher_instances: + - pusher_worker1 + - pusher_worker2 +``` + +Multiple workers can be added to this map, in which case the work is balanced +across them. Ensure the main process and all pusher workers are restarted after changing +this option. + +These workers don't need to accept incoming HTTP requests to send push notifications, +so no additional reverse proxy configuration is required for pusher workers. + +This style of configuration supersedes the legacy `synapse.app.pusher` +worker application type. ### `synapse.app.pusher` It is likely this option will be deprecated in the future and is not recommended for new -installations. Instead, [use `synapse.app.generic_worker` with the `pusher_instances`](usage/configuration/config_documentation.md#pusher_instances). +installations. Instead, [use `synapse.app.generic_worker` with the `pusher_instances`](#push-notifications). Handles sending push notifications to sygnal and email. Doesn't handle any REST endpoints itself, but you should set @@ -547,7 +576,7 @@ Note this worker cannot be load-balanced: only one instance should be active. ### `synapse.app.federation_sender` It is likely this option will be deprecated in the future and not recommended for -new installations. Instead, [use `synapse.app.generic_worker` with the `federation_sender_instances`](usage/configuration/config_documentation.md#federation_sender_instances). +new installations. Instead, [use `synapse.app.generic_worker` with the `federation_sender_instances`](usage/configuration/config_documentation.md#federation_sender_instances). Handles sending federation traffic to other servers. Doesn't handle any REST endpoints itself, but you should set @@ -606,7 +635,9 @@ expose the `media` resource. For example: ``` Note that if running multiple media repositories they must be on the same server -and you must configure a single instance to run the background tasks, e.g.: +and you must specify a single instance to run the background tasks in the +[shared configuration](usage/configuration/config_documentation.md#media_instance_running_background_jobs), +e.g.: ```yaml media_instance_running_background_jobs: "media-repository-1" From ba4ea7d13ffae53644b206222af95a5171faa27c Mon Sep 17 00:00:00 2001 From: reivilibre Date: Tue, 10 Jan 2023 11:17:59 +0000 Subject: [PATCH 75/82] Batch up replication requests to request the resyncing of remote users's devices. (#14716) --- changelog.d/14716.misc | 1 + synapse/handlers/device.py | 124 +++++++++++++++++----- synapse/handlers/devicemessage.py | 2 +- synapse/handlers/e2e_keys.py | 93 +++++++++------- synapse/handlers/federation_event.py | 2 +- synapse/replication/http/devices.py | 74 ++++++++++++- synapse/storage/databases/main/devices.py | 30 ++++-- synapse/types/__init__.py | 4 + synapse/util/async_helpers.py | 55 +++++++++- 9 files changed, 306 insertions(+), 79 deletions(-) create mode 100644 changelog.d/14716.misc diff --git a/changelog.d/14716.misc b/changelog.d/14716.misc new file mode 100644 index 000000000000..ef9522e01dbd --- /dev/null +++ b/changelog.d/14716.misc @@ -0,0 +1 @@ +Batch up replication requests to request the resyncing of remote users's devices. \ No newline at end of file diff --git a/synapse/handlers/device.py b/synapse/handlers/device.py index d4750a32e644..89864e111941 100644 --- a/synapse/handlers/device.py +++ b/synapse/handlers/device.py @@ -14,6 +14,7 @@ # See the License for the specific language governing permissions and # limitations under the License. import logging +from http import HTTPStatus from typing import ( TYPE_CHECKING, Any, @@ -33,6 +34,7 @@ Codes, FederationDeniedError, HttpResponseException, + InvalidAPICallError, RequestSendFailed, SynapseError, ) @@ -45,6 +47,7 @@ JsonDict, StreamKeyType, StreamToken, + UserID, get_domain_from_id, get_verify_key_from_cross_signing_key, ) @@ -893,12 +896,47 @@ class DeviceListWorkerUpdater: def __init__(self, hs: "HomeServer"): from synapse.replication.http.devices import ( + ReplicationMultiUserDevicesResyncRestServlet, ReplicationUserDevicesResyncRestServlet, ) self._user_device_resync_client = ( ReplicationUserDevicesResyncRestServlet.make_client(hs) ) + self._multi_user_device_resync_client = ( + ReplicationMultiUserDevicesResyncRestServlet.make_client(hs) + ) + + async def multi_user_device_resync( + self, user_ids: List[str], mark_failed_as_stale: bool = True + ) -> Dict[str, Optional[JsonDict]]: + """ + Like `user_device_resync` but operates on multiple users **from the same origin** + at once. + + Returns: + Dict from User ID to the same Dict as `user_device_resync`. + """ + # mark_failed_as_stale is not sent. Ensure this doesn't break expectations. + assert mark_failed_as_stale + + if not user_ids: + # Shortcut empty requests + return {} + + try: + return await self._multi_user_device_resync_client(user_ids=user_ids) + except SynapseError as err: + if not ( + err.code == HTTPStatus.NOT_FOUND and err.errcode == Codes.UNRECOGNIZED + ): + raise + + # Fall back to single requests + result: Dict[str, Optional[JsonDict]] = {} + for user_id in user_ids: + result[user_id] = await self._user_device_resync_client(user_id=user_id) + return result async def user_device_resync( self, user_id: str, mark_failed_as_stale: bool = True @@ -913,8 +951,10 @@ async def user_device_resync( A dict with device info as under the "devices" in the result of this request: https://matrix.org/docs/spec/server_server/r0.1.2#get-matrix-federation-v1-user-devices-userid + None when we weren't able to fetch the device info for some reason, + e.g. due to a connection problem. """ - return await self._user_device_resync_client(user_id=user_id) + return (await self.multi_user_device_resync([user_id]))[user_id] class DeviceListUpdater(DeviceListWorkerUpdater): @@ -1160,19 +1200,66 @@ async def _maybe_retry_device_resync(self) -> None: # Allow future calls to retry resyncinc out of sync device lists. self._resync_retry_in_progress = False + async def multi_user_device_resync( + self, user_ids: List[str], mark_failed_as_stale: bool = True + ) -> Dict[str, Optional[JsonDict]]: + """ + Like `user_device_resync` but operates on multiple users **from the same origin** + at once. + + Returns: + Dict from User ID to the same Dict as `user_device_resync`. + """ + if not user_ids: + return {} + + origins = {UserID.from_string(user_id).domain for user_id in user_ids} + + if len(origins) != 1: + raise InvalidAPICallError(f"Only one origin permitted, got {origins!r}") + + result = {} + failed = set() + # TODO(Perf): Actually batch these up + for user_id in user_ids: + user_result, user_failed = await self._user_device_resync_returning_failed( + user_id + ) + result[user_id] = user_result + if user_failed: + failed.add(user_id) + + if mark_failed_as_stale: + await self.store.mark_remote_users_device_caches_as_stale(failed) + + return result + async def user_device_resync( self, user_id: str, mark_failed_as_stale: bool = True ) -> Optional[JsonDict]: + result, failed = await self._user_device_resync_returning_failed(user_id) + + if failed and mark_failed_as_stale: + # Mark the remote user's device list as stale so we know we need to retry + # it later. + await self.store.mark_remote_users_device_caches_as_stale((user_id,)) + + return result + + async def _user_device_resync_returning_failed( + self, user_id: str + ) -> Tuple[Optional[JsonDict], bool]: """Fetches all devices for a user and updates the device cache with them. Args: user_id: The user's id whose device_list will be updated. - mark_failed_as_stale: Whether to mark the user's device list as stale - if the attempt to resync failed. Returns: - A dict with device info as under the "devices" in the result of this - request: - https://matrix.org/docs/spec/server_server/r0.1.2#get-matrix-federation-v1-user-devices-userid + - A dict with device info as under the "devices" in the result of this + request: + https://matrix.org/docs/spec/server_server/r0.1.2#get-matrix-federation-v1-user-devices-userid + None when we weren't able to fetch the device info for some reason, + e.g. due to a connection problem. + - True iff the resync failed and the device list should be marked as stale. """ logger.debug("Attempting to resync the device list for %s", user_id) log_kv({"message": "Doing resync to update device list."}) @@ -1181,12 +1268,7 @@ async def user_device_resync( try: result = await self.federation.query_user_devices(origin, user_id) except NotRetryingDestination: - if mark_failed_as_stale: - # Mark the remote user's device list as stale so we know we need to retry - # it later. - await self.store.mark_remote_user_device_cache_as_stale(user_id) - - return None + return None, True except (RequestSendFailed, HttpResponseException) as e: logger.warning( "Failed to handle device list update for %s: %s", @@ -1194,23 +1276,18 @@ async def user_device_resync( e, ) - if mark_failed_as_stale: - # Mark the remote user's device list as stale so we know we need to retry - # it later. - await self.store.mark_remote_user_device_cache_as_stale(user_id) - # We abort on exceptions rather than accepting the update # as otherwise synapse will 'forget' that its device list # is out of date. If we bail then we will retry the resync # next time we get a device list update for this user_id. # This makes it more likely that the device lists will # eventually become consistent. - return None + return None, True except FederationDeniedError as e: set_tag("error", True) log_kv({"reason": "FederationDeniedError"}) logger.info(e) - return None + return None, False except Exception as e: set_tag("error", True) log_kv( @@ -1218,12 +1295,7 @@ async def user_device_resync( ) logger.exception("Failed to handle device list update for %s", user_id) - if mark_failed_as_stale: - # Mark the remote user's device list as stale so we know we need to retry - # it later. - await self.store.mark_remote_user_device_cache_as_stale(user_id) - - return None + return None, True log_kv({"result": result}) stream_id = result["stream_id"] devices = result["devices"] @@ -1305,7 +1377,7 @@ async def user_device_resync( # point. self._seen_updates[user_id] = {stream_id} - return result + return result, False async def process_cross_signing_key_update( self, diff --git a/synapse/handlers/devicemessage.py b/synapse/handlers/devicemessage.py index 75e89850f5bc..00c403db4925 100644 --- a/synapse/handlers/devicemessage.py +++ b/synapse/handlers/devicemessage.py @@ -195,7 +195,7 @@ async def _check_for_unknown_devices( sender_user_id, unknown_devices, ) - await self.store.mark_remote_user_device_cache_as_stale(sender_user_id) + await self.store.mark_remote_users_device_caches_as_stale((sender_user_id,)) # Immediately attempt a resync in the background run_in_background(self._user_device_resync, user_id=sender_user_id) diff --git a/synapse/handlers/e2e_keys.py b/synapse/handlers/e2e_keys.py index 5fe102e2f2f3..d2188ca08f82 100644 --- a/synapse/handlers/e2e_keys.py +++ b/synapse/handlers/e2e_keys.py @@ -36,8 +36,8 @@ get_domain_from_id, get_verify_key_from_cross_signing_key, ) -from synapse.util import json_decoder, unwrapFirstError -from synapse.util.async_helpers import Linearizer, delay_cancellation +from synapse.util import json_decoder +from synapse.util.async_helpers import Linearizer, concurrently_execute from synapse.util.cancellation import cancellable from synapse.util.retryutils import NotRetryingDestination @@ -238,24 +238,28 @@ async def query_devices( # Now fetch any devices that we don't have in our cache # TODO It might make sense to propagate cancellations into the # deferreds which are querying remote homeservers. - await make_deferred_yieldable( - delay_cancellation( - defer.gatherResults( - [ - run_in_background( - self._query_devices_for_destination, - results, - cross_signing_keys, - failures, - destination, - queries, - timeout, - ) - for destination, queries in remote_queries_not_in_cache.items() - ], - consumeErrors=True, - ).addErrback(unwrapFirstError) + logger.debug( + "%d destinations to query devices for", len(remote_queries_not_in_cache) + ) + + async def _query( + destination_queries: Tuple[str, Dict[str, Iterable[str]]] + ) -> None: + destination, queries = destination_queries + return await self._query_devices_for_destination( + results, + cross_signing_keys, + failures, + destination, + queries, + timeout, ) + + await concurrently_execute( + _query, + remote_queries_not_in_cache.items(), + 10, + delay_cancellation=True, ) ret = {"device_keys": results, "failures": failures} @@ -300,28 +304,41 @@ async def _query_devices_for_destination( # queries. We use the more efficient batched query_client_keys for all # remaining users user_ids_updated = [] - for (user_id, device_list) in destination_query.items(): - if user_id in user_ids_updated: - continue - if device_list: - continue + # Perform a user device resync for each user only once and only as long as: + # - they have an empty device_list + # - they are in some rooms that this server can see + users_to_resync_devices = { + user_id + for (user_id, device_list) in destination_query.items() + if (not device_list) and (await self.store.get_rooms_for_user(user_id)) + } - room_ids = await self.store.get_rooms_for_user(user_id) - if not room_ids: - continue + logger.debug( + "%d users to resync devices for from destination %s", + len(users_to_resync_devices), + destination, + ) - # We've decided we're sharing a room with this user and should - # probably be tracking their device lists. However, we haven't - # done an initial sync on the device list so we do it now. - try: - resync_results = ( - await self.device_handler.device_list_updater.user_device_resync( - user_id - ) + try: + user_resync_results = ( + await self.device_handler.device_list_updater.multi_user_device_resync( + list(users_to_resync_devices) ) + ) + for user_id in users_to_resync_devices: + resync_results = user_resync_results[user_id] + if resync_results is None: - raise ValueError("Device resync failed") + # TODO: It's weird that we'll store a failure against a + # destination, yet continue processing users from that + # destination. + # We might want to consider changing this, but for now + # I'm leaving it as I found it. + failures[destination] = _exception_to_failure( + ValueError(f"Device resync failed for {user_id!r}") + ) + continue # Add the device keys to the results. user_devices = resync_results["devices"] @@ -339,8 +356,8 @@ async def _query_devices_for_destination( if self_signing_key: cross_signing_keys["self_signing_keys"][user_id] = self_signing_key - except Exception as e: - failures[destination] = _exception_to_failure(e) + except Exception as e: + failures[destination] = _exception_to_failure(e) if len(destination_query) == len(user_ids_updated): # We've updated all the users in the query and we do not need to diff --git a/synapse/handlers/federation_event.py b/synapse/handlers/federation_event.py index 31df7f55cc97..6df000faafed 100644 --- a/synapse/handlers/federation_event.py +++ b/synapse/handlers/federation_event.py @@ -1423,7 +1423,7 @@ async def _resync_device(self, sender: str) -> None: """ try: - await self._store.mark_remote_user_device_cache_as_stale(sender) + await self._store.mark_remote_users_device_caches_as_stale((sender,)) # Immediately attempt a resync in the background if self._config.worker.worker_app: diff --git a/synapse/replication/http/devices.py b/synapse/replication/http/devices.py index 7c4941c3d3f5..ea5c08e6cfdf 100644 --- a/synapse/replication/http/devices.py +++ b/synapse/replication/http/devices.py @@ -13,12 +13,13 @@ # limitations under the License. import logging -from typing import TYPE_CHECKING, Optional, Tuple +from typing import TYPE_CHECKING, Dict, List, Optional, Tuple from twisted.web.server import Request from synapse.http.server import HttpServer from synapse.http.servlet import parse_json_object_from_request +from synapse.logging.opentracing import active_span from synapse.replication.http._base import ReplicationEndpoint from synapse.types import JsonDict @@ -84,6 +85,76 @@ async def _handle_request( # type: ignore[override] return 200, user_devices +class ReplicationMultiUserDevicesResyncRestServlet(ReplicationEndpoint): + """Ask master to resync the device list for multiple users from the same + remote server by contacting their server. + + This must happen on master so that the results can be correctly cached in + the database and streamed to workers. + + Request format: + + POST /_synapse/replication/multi_user_device_resync + + { + "user_ids": ["@alice:example.org", "@bob:example.org", ...] + } + + Response is roughly equivalent to ` /_matrix/federation/v1/user/devices/:user_id` + response, but there is a map from user ID to response, e.g.: + + { + "@alice:example.org": { + "devices": [ + { + "device_id": "JLAFKJWSCS", + "keys": { ... }, + "device_display_name": "Alice's Mobile Phone" + } + ] + }, + ... + } + """ + + NAME = "multi_user_device_resync" + PATH_ARGS = () + CACHE = False + + def __init__(self, hs: "HomeServer"): + super().__init__(hs) + + from synapse.handlers.device import DeviceHandler + + handler = hs.get_device_handler() + assert isinstance(handler, DeviceHandler) + self.device_list_updater = handler.device_list_updater + + self.store = hs.get_datastores().main + self.clock = hs.get_clock() + + @staticmethod + async def _serialize_payload(user_ids: List[str]) -> JsonDict: # type: ignore[override] + return {"user_ids": user_ids} + + async def _handle_request( # type: ignore[override] + self, request: Request + ) -> Tuple[int, Dict[str, Optional[JsonDict]]]: + content = parse_json_object_from_request(request) + user_ids: List[str] = content["user_ids"] + + logger.info("Resync for %r", user_ids) + span = active_span() + if span: + span.set_tag("user_ids", f"{user_ids!r}") + + multi_user_devices = await self.device_list_updater.multi_user_device_resync( + user_ids + ) + + return 200, multi_user_devices + + class ReplicationUploadKeysForUserRestServlet(ReplicationEndpoint): """Ask master to upload keys for the user and send them out over federation to update other servers. @@ -151,4 +222,5 @@ async def _handle_request( # type: ignore[override] def register_servlets(hs: "HomeServer", http_server: HttpServer) -> None: ReplicationUserDevicesResyncRestServlet(hs).register(http_server) + ReplicationMultiUserDevicesResyncRestServlet(hs).register(http_server) ReplicationUploadKeysForUserRestServlet(hs).register(http_server) diff --git a/synapse/storage/databases/main/devices.py b/synapse/storage/databases/main/devices.py index db877e3f1374..b06766447338 100644 --- a/synapse/storage/databases/main/devices.py +++ b/synapse/storage/databases/main/devices.py @@ -54,7 +54,7 @@ AbstractStreamIdTracker, StreamIdGenerator, ) -from synapse.types import JsonDict, get_verify_key_from_cross_signing_key +from synapse.types import JsonDict, StrCollection, get_verify_key_from_cross_signing_key from synapse.util import json_decoder, json_encoder from synapse.util.caches.descriptors import cached, cachedList from synapse.util.caches.lrucache import LruCache @@ -1069,16 +1069,30 @@ async def get_user_ids_requiring_device_list_resync( return {row["user_id"] for row in rows} - async def mark_remote_user_device_cache_as_stale(self, user_id: str) -> None: + async def mark_remote_users_device_caches_as_stale( + self, user_ids: StrCollection + ) -> None: """Records that the server has reason to believe the cache of the devices for the remote users is out of date. """ - await self.db_pool.simple_upsert( - table="device_lists_remote_resync", - keyvalues={"user_id": user_id}, - values={}, - insertion_values={"added_ts": self._clock.time_msec()}, - desc="mark_remote_user_device_cache_as_stale", + + def _mark_remote_users_device_caches_as_stale_txn( + txn: LoggingTransaction, + ) -> None: + # TODO add insertion_values support to simple_upsert_many and use + # that! + for user_id in user_ids: + self.db_pool.simple_upsert_txn( + txn, + table="device_lists_remote_resync", + keyvalues={"user_id": user_id}, + values={}, + insertion_values={"added_ts": self._clock.time_msec()}, + ) + + await self.db_pool.runInteraction( + "mark_remote_users_device_caches_as_stale", + _mark_remote_users_device_caches_as_stale_txn, ) async def mark_remote_user_device_cache_as_valid(self, user_id: str) -> None: diff --git a/synapse/types/__init__.py b/synapse/types/__init__.py index f2d436ddc38c..0c725eb9677d 100644 --- a/synapse/types/__init__.py +++ b/synapse/types/__init__.py @@ -77,6 +77,10 @@ # A JSON-serialisable object. JsonSerializable = object +# Collection[str] that does not include str itself; str being a Sequence[str] +# is very misleading and results in bugs. +StrCollection = Union[Tuple[str, ...], List[str], Set[str]] + # Note that this seems to require inheriting *directly* from Interface in order # for mypy-zope to realize it is an interface. diff --git a/synapse/util/async_helpers.py b/synapse/util/async_helpers.py index d24c4f68c4da..01e3cd46f650 100644 --- a/synapse/util/async_helpers.py +++ b/synapse/util/async_helpers.py @@ -205,7 +205,10 @@ def __repr__(self) -> str: async def concurrently_execute( - func: Callable[[T], Any], args: Iterable[T], limit: int + func: Callable[[T], Any], + args: Iterable[T], + limit: int, + delay_cancellation: bool = False, ) -> None: """Executes the function with each argument concurrently while limiting the number of concurrent executions. @@ -215,6 +218,8 @@ async def concurrently_execute( args: List of arguments to pass to func, each invocation of func gets a single argument. limit: Maximum number of conccurent executions. + delay_cancellation: Whether to delay cancellation until after the invocations + have finished. Returns: None, when all function invocations have finished. The return values @@ -233,9 +238,16 @@ async def _concurrently_execute_inner(value: T) -> None: # We use `itertools.islice` to handle the case where the number of args is # less than the limit, avoiding needlessly spawning unnecessary background # tasks. - await yieldable_gather_results( - _concurrently_execute_inner, (value for value in itertools.islice(it, limit)) - ) + if delay_cancellation: + await yieldable_gather_results_delaying_cancellation( + _concurrently_execute_inner, + (value for value in itertools.islice(it, limit)), + ) + else: + await yieldable_gather_results( + _concurrently_execute_inner, + (value for value in itertools.islice(it, limit)), + ) P = ParamSpec("P") @@ -292,6 +304,41 @@ async def yieldable_gather_results( raise dfe.subFailure.value from None +async def yieldable_gather_results_delaying_cancellation( + func: Callable[Concatenate[T, P], Awaitable[R]], + iter: Iterable[T], + *args: P.args, + **kwargs: P.kwargs, +) -> List[R]: + """Executes the function with each argument concurrently. + Cancellation is delayed until after all the results have been gathered. + + See `yieldable_gather_results`. + + Args: + func: Function to execute that returns a Deferred + iter: An iterable that yields items that get passed as the first + argument to the function + *args: Arguments to be passed to each call to func + **kwargs: Keyword arguments to be passed to each call to func + + Returns + A list containing the results of the function + """ + try: + return await make_deferred_yieldable( + delay_cancellation( + defer.gatherResults( + [run_in_background(func, item, *args, **kwargs) for item in iter], # type: ignore[arg-type] + consumeErrors=True, + ) + ) + ) + except defer.FirstError as dfe: + assert isinstance(dfe.subFailure.value, BaseException) + raise dfe.subFailure.value from None + + T1 = TypeVar("T1") T2 = TypeVar("T2") T3 = TypeVar("T3") From 9a4c69f59f1ac54838d3b0f47280d858b7e41073 Mon Sep 17 00:00:00 2001 From: David Robertson Date: Tue, 10 Jan 2023 12:18:50 +0000 Subject: [PATCH 76/82] 1.75.0rc1 --- CHANGES.md | 89 +++++++++++++++++++++++++++++++++++++++ changelog.d/14263.misc | 1 - changelog.d/14545.misc | 1 - changelog.d/14546.misc | 1 - changelog.d/14633.misc | 1 - changelog.d/14644.bugfix | 1 - changelog.d/14663.feature | 1 - changelog.d/14665.misc | 1 - changelog.d/14669.bugfix | 1 - changelog.d/14672.misc | 1 - changelog.d/14673.doc | 1 - changelog.d/14674.doc | 1 - changelog.d/14676.misc | 1 - changelog.d/14680.misc | 1 - changelog.d/14681.misc | 1 - changelog.d/14685.misc | 1 - changelog.d/14687.misc | 1 - changelog.d/14692.misc | 1 - changelog.d/14693.misc | 1 - changelog.d/14694.misc | 1 - changelog.d/14695.misc | 1 - changelog.d/14696.misc | 1 - changelog.d/14697.misc | 1 - changelog.d/14698.misc | 1 - changelog.d/14700.misc | 1 - changelog.d/14701.misc | 1 - changelog.d/14702.misc | 1 - changelog.d/14707.misc | 1 - changelog.d/14714.feature | 1 - changelog.d/14716.misc | 1 - changelog.d/14720.misc | 1 - changelog.d/14723.bugfix | 1 - changelog.d/14724.misc | 1 - changelog.d/14725.misc | 1 - changelog.d/14727.bugfix | 1 - changelog.d/14730.doc | 1 - changelog.d/14731.misc | 1 - changelog.d/14732.misc | 1 - changelog.d/14733.misc | 1 - changelog.d/14734.misc | 1 - changelog.d/14735.misc | 1 - changelog.d/14736.misc | 1 - changelog.d/14738.misc | 1 - changelog.d/14741.misc | 1 - changelog.d/14743.misc | 1 - changelog.d/14744.doc | 1 - changelog.d/14748.doc | 1 - changelog.d/14750.feature | 1 - changelog.d/14751.bugfix | 1 - changelog.d/14753.feature | 1 - changelog.d/14758.misc | 1 - changelog.d/14759.misc | 1 - changelog.d/14760.misc | 1 - changelog.d/14761.misc | 1 - changelog.d/14762.misc | 1 - changelog.d/14763.misc | 1 - changelog.d/14772.misc | 1 - changelog.d/14774.misc | 1 - changelog.d/14778.doc | 1 - changelog.d/14779.misc | 1 - changelog.d/14781.misc | 1 - changelog.d/14786.feature | 1 - changelog.d/14791.misc | 1 - changelog.d/14792.misc | 1 - changelog.d/14793.misc | 1 - changelog.d/14794.misc | 1 - changelog.d/14795.misc | 1 - changelog.d/14796.misc | 1 - changelog.d/14797.doc | 1 - changelog.d/14801.doc | 1 - changelog.d/14802.misc | 1 - debian/changelog | 6 +++ pyproject.toml | 2 +- 73 files changed, 96 insertions(+), 71 deletions(-) delete mode 100644 changelog.d/14263.misc delete mode 100644 changelog.d/14545.misc delete mode 100644 changelog.d/14546.misc delete mode 100644 changelog.d/14633.misc delete mode 100644 changelog.d/14644.bugfix delete mode 100644 changelog.d/14663.feature delete mode 100644 changelog.d/14665.misc delete mode 100644 changelog.d/14669.bugfix delete mode 100644 changelog.d/14672.misc delete mode 100644 changelog.d/14673.doc delete mode 100644 changelog.d/14674.doc delete mode 100644 changelog.d/14676.misc delete mode 100644 changelog.d/14680.misc delete mode 100644 changelog.d/14681.misc delete mode 100644 changelog.d/14685.misc delete mode 100644 changelog.d/14687.misc delete mode 100644 changelog.d/14692.misc delete mode 100644 changelog.d/14693.misc delete mode 100644 changelog.d/14694.misc delete mode 100644 changelog.d/14695.misc delete mode 100644 changelog.d/14696.misc delete mode 100644 changelog.d/14697.misc delete mode 100644 changelog.d/14698.misc delete mode 100644 changelog.d/14700.misc delete mode 100644 changelog.d/14701.misc delete mode 100644 changelog.d/14702.misc delete mode 100644 changelog.d/14707.misc delete mode 100644 changelog.d/14714.feature delete mode 100644 changelog.d/14716.misc delete mode 100644 changelog.d/14720.misc delete mode 100644 changelog.d/14723.bugfix delete mode 100644 changelog.d/14724.misc delete mode 100644 changelog.d/14725.misc delete mode 100644 changelog.d/14727.bugfix delete mode 100644 changelog.d/14730.doc delete mode 100644 changelog.d/14731.misc delete mode 100644 changelog.d/14732.misc delete mode 100644 changelog.d/14733.misc delete mode 100644 changelog.d/14734.misc delete mode 100644 changelog.d/14735.misc delete mode 100644 changelog.d/14736.misc delete mode 100644 changelog.d/14738.misc delete mode 100644 changelog.d/14741.misc delete mode 100644 changelog.d/14743.misc delete mode 100644 changelog.d/14744.doc delete mode 100644 changelog.d/14748.doc delete mode 100644 changelog.d/14750.feature delete mode 100644 changelog.d/14751.bugfix delete mode 100644 changelog.d/14753.feature delete mode 100644 changelog.d/14758.misc delete mode 100644 changelog.d/14759.misc delete mode 100644 changelog.d/14760.misc delete mode 100644 changelog.d/14761.misc delete mode 100644 changelog.d/14762.misc delete mode 100644 changelog.d/14763.misc delete mode 100644 changelog.d/14772.misc delete mode 100644 changelog.d/14774.misc delete mode 100644 changelog.d/14778.doc delete mode 100644 changelog.d/14779.misc delete mode 100644 changelog.d/14781.misc delete mode 100644 changelog.d/14786.feature delete mode 100644 changelog.d/14791.misc delete mode 100644 changelog.d/14792.misc delete mode 100644 changelog.d/14793.misc delete mode 100644 changelog.d/14794.misc delete mode 100644 changelog.d/14795.misc delete mode 100644 changelog.d/14796.misc delete mode 100644 changelog.d/14797.doc delete mode 100644 changelog.d/14801.doc delete mode 100644 changelog.d/14802.misc diff --git a/CHANGES.md b/CHANGES.md index ae55d63bc1ad..ab6a107f9773 100644 --- a/CHANGES.md +++ b/CHANGES.md @@ -1,3 +1,92 @@ +Synapse 1.75.0rc1 (2023-01-10) +============================== + +Features +-------- + +- Add a `cached` function to `synapse.module_api` that returns a decorator to cache return values of functions. ([\#14663](https://github.com/matrix-org/synapse/issues/14663)) +- Add experimental support for [MSC3391](https://github.com/matrix-org/matrix-spec-proposals/pull/3391) (removing account data). ([\#14714](https://github.com/matrix-org/synapse/issues/14714)) +- Support [RFC7636](https://datatracker.ietf.org/doc/html/rfc7636) Proof Key for Code Exchange for OAuth single sign-on. ([\#14750](https://github.com/matrix-org/synapse/issues/14750)) +- Support non-OpenID compliant userinfo claims for subject and picture. ([\#14753](https://github.com/matrix-org/synapse/issues/14753)) +- Improve performance of `/sync` when filtering all rooms, message types, or senders. ([\#14786](https://github.com/matrix-org/synapse/issues/14786)) + + +Bugfixes +-------- + +- Fix the *MAU Limits* section of the Grafana dashboard relying on a specific `job` name for the workers of a Synapse deployment. ([\#14644](https://github.com/matrix-org/synapse/issues/14644)) +- Fix a bug introduced in Synapse 1.70.0 which could cause spurious `UNIQUE constraint failed` errors in the `rotate_notifs` background job. ([\#14669](https://github.com/matrix-org/synapse/issues/14669)) +- Ensure stream IDs are always updated after caches get invalidated with workers. Contributed by Nick @ Beeper (@fizzadar). ([\#14723](https://github.com/matrix-org/synapse/issues/14723)) +- Remove the unspecced `device` field from `/pushrules` responses. ([\#14727](https://github.com/matrix-org/synapse/issues/14727)) +- Fix a bug introduced in Synapse 1.73.0 where the `picture_claim` configured under `oidc_providers` was unused (the default value of `"picture"` was used instead). ([\#14751](https://github.com/matrix-org/synapse/issues/14751)) + + +Improved Documentation +---------------------- + +- Declare support for Python 3.11. ([\#14673](https://github.com/matrix-org/synapse/issues/14673)) +- Fix `target_memory_usage` being used in the description for the actual `cache_autotune` sub-option `target_cache_memory_usage`. ([\#14674](https://github.com/matrix-org/synapse/issues/14674)) +- Move `email` to Server section in config file documentation. ([\#14730](https://github.com/matrix-org/synapse/issues/14730)) +- Fix broken links in the Synapse documentation. ([\#14744](https://github.com/matrix-org/synapse/issues/14744)) +- Add missing worker settings to shared configuration documentation. ([\#14748](https://github.com/matrix-org/synapse/issues/14748)) +- Document using Twitter as a OAuth 2.0 authentication provider. ([\#14778](https://github.com/matrix-org/synapse/issues/14778)) +- Fix Synapse 1.74 upgrade notes to correctly explain how to install pyICU when installing Synapse from PyPI. ([\#14797](https://github.com/matrix-org/synapse/issues/14797)) +- Update link to towncrier in contribution guide. ([\#14801](https://github.com/matrix-org/synapse/issues/14801)) + + +Internal Changes +---------------- + +- Improve performance of the `/hierarchy` endpoint. ([\#14263](https://github.com/matrix-org/synapse/issues/14263)) +- Faster remote room joins: stream the un-partial-stating of events over replication. ([\#14545](https://github.com/matrix-org/synapse/issues/14545), [\#14546](https://github.com/matrix-org/synapse/issues/14546)) +- Use [ruff](https://github.com/charliermarsh/ruff/) instead of flake8. ([\#14633](https://github.com/matrix-org/synapse/issues/14633), [\#14741](https://github.com/matrix-org/synapse/issues/14741)) +- Change `handle_new_client_event` signature so that a 429 does not reach clients on `PartialStateConflictError`, and internally retry when needed instead. ([\#14665](https://github.com/matrix-org/synapse/issues/14665)) +- Remove dependency on jQuery on reCAPTCHA page. ([\#14672](https://github.com/matrix-org/synapse/issues/14672)) +- Faster joins: make `computer_state_after_events` consistent with other state-fetching functions that take a `StateFilter`. ([\#14676](https://github.com/matrix-org/synapse/issues/14676)) +- Add missing type hints. ([\#14680](https://github.com/matrix-org/synapse/issues/14680), [\#14681](https://github.com/matrix-org/synapse/issues/14681), [\#14687](https://github.com/matrix-org/synapse/issues/14687)) +- Improve type annotations for the helper methods on a `CachedFunction`. ([\#14685](https://github.com/matrix-org/synapse/issues/14685)) +- Check that the SQLite database file exists before porting to PostgreSQL. ([\#14692](https://github.com/matrix-org/synapse/issues/14692)) +- Bump JasonEtco/create-an-issue from 2.8.1 to 2.8.2. ([\#14693](https://github.com/matrix-org/synapse/issues/14693)) +- Bump anyhow from 1.0.66 to 1.0.68. ([\#14694](https://github.com/matrix-org/synapse/issues/14694)) +- Bump blake2 from 0.10.5 to 0.10.6. ([\#14695](https://github.com/matrix-org/synapse/issues/14695)) +- Bump serde_json from 1.0.89 to 1.0.91. ([\#14696](https://github.com/matrix-org/synapse/issues/14696)) +- Bump serde from 1.0.150 to 1.0.151. ([\#14697](https://github.com/matrix-org/synapse/issues/14697)) +- Bump lxml from 4.9.1 to 4.9.2. ([\#14698](https://github.com/matrix-org/synapse/issues/14698)) +- Bump types-jsonschema from 4.17.0.1 to 4.17.0.2. ([\#14700](https://github.com/matrix-org/synapse/issues/14700)) +- Bump sentry-sdk from 1.11.1 to 1.12.0. ([\#14701](https://github.com/matrix-org/synapse/issues/14701)) +- Bump types-setuptools from 65.6.0.1 to 65.6.0.2. ([\#14702](https://github.com/matrix-org/synapse/issues/14702)) +- Add `.direnv/` directory to .gitignore to prevent local state generated by the [direnv](https://direnv.net/) development tool from being committed. ([\#14707](https://github.com/matrix-org/synapse/issues/14707)) +- Batch up replication requests to request the resyncing of remote users's devices. ([\#14716](https://github.com/matrix-org/synapse/issues/14716)) +- Bump minimum PyYAML to 3.13. ([\#14720](https://github.com/matrix-org/synapse/issues/14720)) +- If debug logging is enabled, log the `msgid`s of any to-device messages that are returned over `/sync`. ([\#14724](https://github.com/matrix-org/synapse/issues/14724)) +- Disable sending confirmation email when 3pid is disabled. ([\#14725](https://github.com/matrix-org/synapse/issues/14725)) +- Bump JasonEtco/create-an-issue from 2.8.2 to 2.9.1. ([\#14731](https://github.com/matrix-org/synapse/issues/14731)) +- Bump towncrier from 22.8.0 to 22.12.0. ([\#14732](https://github.com/matrix-org/synapse/issues/14732)) +- Bump isort from 5.10.1 to 5.11.4. ([\#14733](https://github.com/matrix-org/synapse/issues/14733)) +- Bump attrs from 22.1.0 to 22.2.0. ([\#14734](https://github.com/matrix-org/synapse/issues/14734)) +- Bump black from 22.10.0 to 22.12.0. ([\#14735](https://github.com/matrix-org/synapse/issues/14735)) +- Bump sentry-sdk from 1.12.0 to 1.12.1. ([\#14736](https://github.com/matrix-org/synapse/issues/14736)) +- Bump setuptools from 65.3.0 to 65.5.1. ([\#14738](https://github.com/matrix-org/synapse/issues/14738)) +- Use `htmltest` to check links in the Synapse documentation. ([\#14743](https://github.com/matrix-org/synapse/issues/14743)) +- Bump serde from 1.0.151 to 1.0.152. ([\#14758](https://github.com/matrix-org/synapse/issues/14758)) +- Bump ruff from 0.0.189 to 0.0.206. ([\#14759](https://github.com/matrix-org/synapse/issues/14759)) +- Bump pydantic from 1.10.2 to 1.10.4. ([\#14760](https://github.com/matrix-org/synapse/issues/14760)) +- Bump gitpython from 3.1.29 to 3.1.30. ([\#14761](https://github.com/matrix-org/synapse/issues/14761)) +- Bump pillow from 9.3.0 to 9.4.0. ([\#14762](https://github.com/matrix-org/synapse/issues/14762)) +- Bump types-requests from 2.28.11.5 to 2.28.11.7. ([\#14763](https://github.com/matrix-org/synapse/issues/14763)) +- Change GHA CI job to follow best practices. ([\#14772](https://github.com/matrix-org/synapse/issues/14772)) +- Switch to our fork of `dh-virtualenv` to work around an upstream Python 3.11 incompatibility. ([\#14774](https://github.com/matrix-org/synapse/issues/14774)) +- Bump dawidd6/action-download-artifact from 2.24.2 to 2.24.3. ([\#14779](https://github.com/matrix-org/synapse/issues/14779)) +- Unescape HTML entities in URL preview titles making use of oEmbed responses. ([\#14781](https://github.com/matrix-org/synapse/issues/14781)) +- Bump peaceiris/actions-gh-pages from 3.9.0 to 3.9.1. ([\#14791](https://github.com/matrix-org/synapse/issues/14791)) +- Bump types-pillow from 9.3.0.4 to 9.4.0.0. ([\#14792](https://github.com/matrix-org/synapse/issues/14792)) +- Bump pyopenssl from 22.1.0 to 23.0.0. ([\#14793](https://github.com/matrix-org/synapse/issues/14793)) +- Bump types-setuptools from 65.6.0.2 to 65.6.0.3. ([\#14794](https://github.com/matrix-org/synapse/issues/14794)) +- Bump importlib-metadata from 4.2.0 to 6.0.0. ([\#14795](https://github.com/matrix-org/synapse/issues/14795)) +- Bump ruff from 0.0.206 to 0.0.215. ([\#14796](https://github.com/matrix-org/synapse/issues/14796)) +- Skip testing built wheels for PyPy 3.7 on Linux x86_64 as we lack new required dependencies in the build environment. ([\#14802](https://github.com/matrix-org/synapse/issues/14802)) + + Synapse 1.74.0 (2022-12-20) =========================== diff --git a/changelog.d/14263.misc b/changelog.d/14263.misc deleted file mode 100644 index 11d9446a4b1e..000000000000 --- a/changelog.d/14263.misc +++ /dev/null @@ -1 +0,0 @@ -Improve performance of the `/hierarchy` endpoint. diff --git a/changelog.d/14545.misc b/changelog.d/14545.misc deleted file mode 100644 index 60b6761a51b3..000000000000 --- a/changelog.d/14545.misc +++ /dev/null @@ -1 +0,0 @@ -Faster remote room joins: stream the un-partial-stating of events over replication. \ No newline at end of file diff --git a/changelog.d/14546.misc b/changelog.d/14546.misc deleted file mode 100644 index 60b6761a51b3..000000000000 --- a/changelog.d/14546.misc +++ /dev/null @@ -1 +0,0 @@ -Faster remote room joins: stream the un-partial-stating of events over replication. \ No newline at end of file diff --git a/changelog.d/14633.misc b/changelog.d/14633.misc deleted file mode 100644 index def187b12b48..000000000000 --- a/changelog.d/14633.misc +++ /dev/null @@ -1 +0,0 @@ -Use [ruff](https://github.com/charliermarsh/ruff/) instead of flake8. diff --git a/changelog.d/14644.bugfix b/changelog.d/14644.bugfix deleted file mode 100644 index 711088bb7ed2..000000000000 --- a/changelog.d/14644.bugfix +++ /dev/null @@ -1 +0,0 @@ -Fix the *MAU Limits* section of the Grafana dashboard relying on a specific `job` name for the workers of a Synapse deployment. \ No newline at end of file diff --git a/changelog.d/14663.feature b/changelog.d/14663.feature deleted file mode 100644 index b03f3ee54e33..000000000000 --- a/changelog.d/14663.feature +++ /dev/null @@ -1 +0,0 @@ -Add a `cached` function to `synapse.module_api` that returns a decorator to cache return values of functions. diff --git a/changelog.d/14665.misc b/changelog.d/14665.misc deleted file mode 100644 index 2b7c96143d1f..000000000000 --- a/changelog.d/14665.misc +++ /dev/null @@ -1 +0,0 @@ -Change `handle_new_client_event` signature so that a 429 does not reach clients on `PartialStateConflictError`, and internally retry when needed instead. diff --git a/changelog.d/14669.bugfix b/changelog.d/14669.bugfix deleted file mode 100644 index bea316b06550..000000000000 --- a/changelog.d/14669.bugfix +++ /dev/null @@ -1 +0,0 @@ -Fix a bug introduced in Synapse 1.70.0 which could cause spurious `UNIQUE constraint failed` errors in the `rotate_notifs` background job. diff --git a/changelog.d/14672.misc b/changelog.d/14672.misc deleted file mode 100644 index b94ebed97152..000000000000 --- a/changelog.d/14672.misc +++ /dev/null @@ -1 +0,0 @@ -Remove dependency on jQuery on reCAPTCHA page. diff --git a/changelog.d/14673.doc b/changelog.d/14673.doc deleted file mode 100644 index 7baf5f7f38c9..000000000000 --- a/changelog.d/14673.doc +++ /dev/null @@ -1 +0,0 @@ -Declare support for Python 3.11. diff --git a/changelog.d/14674.doc b/changelog.d/14674.doc deleted file mode 100644 index df2141781935..000000000000 --- a/changelog.d/14674.doc +++ /dev/null @@ -1 +0,0 @@ -Fix `target_memory_usage` being used in the description for the actual `cache_autotune` sub-option `target_cache_memory_usage`. diff --git a/changelog.d/14676.misc b/changelog.d/14676.misc deleted file mode 100644 index 8a41df9c64c3..000000000000 --- a/changelog.d/14676.misc +++ /dev/null @@ -1 +0,0 @@ -Faster joins: make `computer_state_after_events` consistent with other state-fetching functions that take a `StateFilter`. diff --git a/changelog.d/14680.misc b/changelog.d/14680.misc deleted file mode 100644 index d44571b73149..000000000000 --- a/changelog.d/14680.misc +++ /dev/null @@ -1 +0,0 @@ -Add missing type hints. diff --git a/changelog.d/14681.misc b/changelog.d/14681.misc deleted file mode 100644 index d44571b73149..000000000000 --- a/changelog.d/14681.misc +++ /dev/null @@ -1 +0,0 @@ -Add missing type hints. diff --git a/changelog.d/14685.misc b/changelog.d/14685.misc deleted file mode 100644 index 3ba22701000b..000000000000 --- a/changelog.d/14685.misc +++ /dev/null @@ -1 +0,0 @@ -Improve type annotations for the helper methods on a `CachedFunction`. \ No newline at end of file diff --git a/changelog.d/14687.misc b/changelog.d/14687.misc deleted file mode 100644 index d44571b73149..000000000000 --- a/changelog.d/14687.misc +++ /dev/null @@ -1 +0,0 @@ -Add missing type hints. diff --git a/changelog.d/14692.misc b/changelog.d/14692.misc deleted file mode 100644 index 0edac253b7ff..000000000000 --- a/changelog.d/14692.misc +++ /dev/null @@ -1 +0,0 @@ -Check that the SQLite database file exists before porting to PostgreSQL. \ No newline at end of file diff --git a/changelog.d/14693.misc b/changelog.d/14693.misc deleted file mode 100644 index 86771f41b2f8..000000000000 --- a/changelog.d/14693.misc +++ /dev/null @@ -1 +0,0 @@ -Bump JasonEtco/create-an-issue from 2.8.1 to 2.8.2. diff --git a/changelog.d/14694.misc b/changelog.d/14694.misc deleted file mode 100644 index 146238d8c50b..000000000000 --- a/changelog.d/14694.misc +++ /dev/null @@ -1 +0,0 @@ -Bump anyhow from 1.0.66 to 1.0.68. diff --git a/changelog.d/14695.misc b/changelog.d/14695.misc deleted file mode 100644 index 57e08498be2d..000000000000 --- a/changelog.d/14695.misc +++ /dev/null @@ -1 +0,0 @@ -Bump blake2 from 0.10.5 to 0.10.6. diff --git a/changelog.d/14696.misc b/changelog.d/14696.misc deleted file mode 100644 index 9849366b9f25..000000000000 --- a/changelog.d/14696.misc +++ /dev/null @@ -1 +0,0 @@ -Bump serde_json from 1.0.89 to 1.0.91. diff --git a/changelog.d/14697.misc b/changelog.d/14697.misc deleted file mode 100644 index 514209fcc3c0..000000000000 --- a/changelog.d/14697.misc +++ /dev/null @@ -1 +0,0 @@ -Bump serde from 1.0.150 to 1.0.151. diff --git a/changelog.d/14698.misc b/changelog.d/14698.misc deleted file mode 100644 index 2e2072183ef6..000000000000 --- a/changelog.d/14698.misc +++ /dev/null @@ -1 +0,0 @@ -Bump lxml from 4.9.1 to 4.9.2. diff --git a/changelog.d/14700.misc b/changelog.d/14700.misc deleted file mode 100644 index 253eb1721dcb..000000000000 --- a/changelog.d/14700.misc +++ /dev/null @@ -1 +0,0 @@ -Bump types-jsonschema from 4.17.0.1 to 4.17.0.2. diff --git a/changelog.d/14701.misc b/changelog.d/14701.misc deleted file mode 100644 index 05c89d5948f4..000000000000 --- a/changelog.d/14701.misc +++ /dev/null @@ -1 +0,0 @@ -Bump sentry-sdk from 1.11.1 to 1.12.0. diff --git a/changelog.d/14702.misc b/changelog.d/14702.misc deleted file mode 100644 index 17c0485f1217..000000000000 --- a/changelog.d/14702.misc +++ /dev/null @@ -1 +0,0 @@ -Bump types-setuptools from 65.6.0.1 to 65.6.0.2. diff --git a/changelog.d/14707.misc b/changelog.d/14707.misc deleted file mode 100644 index 38f47a6f307d..000000000000 --- a/changelog.d/14707.misc +++ /dev/null @@ -1 +0,0 @@ -Add `.direnv/` directory to .gitignore to prevent local state generated by the [direnv](https://direnv.net/) development tool from being committed. \ No newline at end of file diff --git a/changelog.d/14714.feature b/changelog.d/14714.feature deleted file mode 100644 index 5f3a20b7a733..000000000000 --- a/changelog.d/14714.feature +++ /dev/null @@ -1 +0,0 @@ -Add experimental support for [MSC3391](https://github.com/matrix-org/matrix-spec-proposals/pull/3391) (removing account data). \ No newline at end of file diff --git a/changelog.d/14716.misc b/changelog.d/14716.misc deleted file mode 100644 index ef9522e01dbd..000000000000 --- a/changelog.d/14716.misc +++ /dev/null @@ -1 +0,0 @@ -Batch up replication requests to request the resyncing of remote users's devices. \ No newline at end of file diff --git a/changelog.d/14720.misc b/changelog.d/14720.misc deleted file mode 100644 index 0defc0155090..000000000000 --- a/changelog.d/14720.misc +++ /dev/null @@ -1 +0,0 @@ -Bump minimum PyYAML to 3.13. diff --git a/changelog.d/14723.bugfix b/changelog.d/14723.bugfix deleted file mode 100644 index e1f89cee35c8..000000000000 --- a/changelog.d/14723.bugfix +++ /dev/null @@ -1 +0,0 @@ -Ensure stream IDs are always updated after caches get invalidated with workers. Contributed by Nick @ Beeper (@fizzadar). diff --git a/changelog.d/14724.misc b/changelog.d/14724.misc deleted file mode 100644 index 270e5ed18802..000000000000 --- a/changelog.d/14724.misc +++ /dev/null @@ -1 +0,0 @@ -If debug logging is enabled, log the `msgid`s of any to-device messages that are returned over `/sync`. diff --git a/changelog.d/14725.misc b/changelog.d/14725.misc deleted file mode 100644 index a86c4f8c0527..000000000000 --- a/changelog.d/14725.misc +++ /dev/null @@ -1 +0,0 @@ -Disable sending confirmation email when 3pid is disabled. diff --git a/changelog.d/14727.bugfix b/changelog.d/14727.bugfix deleted file mode 100644 index 25079496e4f7..000000000000 --- a/changelog.d/14727.bugfix +++ /dev/null @@ -1 +0,0 @@ -Remove the unspecced `device` field from `/pushrules` responses. diff --git a/changelog.d/14730.doc b/changelog.d/14730.doc deleted file mode 100644 index 6015c7d2833e..000000000000 --- a/changelog.d/14730.doc +++ /dev/null @@ -1 +0,0 @@ -Move `email` to Server section in config file documentation. \ No newline at end of file diff --git a/changelog.d/14731.misc b/changelog.d/14731.misc deleted file mode 100644 index 511466787f36..000000000000 --- a/changelog.d/14731.misc +++ /dev/null @@ -1 +0,0 @@ -Bump JasonEtco/create-an-issue from 2.8.2 to 2.9.1. diff --git a/changelog.d/14732.misc b/changelog.d/14732.misc deleted file mode 100644 index 308858e841dc..000000000000 --- a/changelog.d/14732.misc +++ /dev/null @@ -1 +0,0 @@ -Bump towncrier from 22.8.0 to 22.12.0. diff --git a/changelog.d/14733.misc b/changelog.d/14733.misc deleted file mode 100644 index 53afc6c00e52..000000000000 --- a/changelog.d/14733.misc +++ /dev/null @@ -1 +0,0 @@ -Bump isort from 5.10.1 to 5.11.4. diff --git a/changelog.d/14734.misc b/changelog.d/14734.misc deleted file mode 100644 index 06b24e7d8cc2..000000000000 --- a/changelog.d/14734.misc +++ /dev/null @@ -1 +0,0 @@ -Bump attrs from 22.1.0 to 22.2.0. diff --git a/changelog.d/14735.misc b/changelog.d/14735.misc deleted file mode 100644 index 76b6c1e29d1c..000000000000 --- a/changelog.d/14735.misc +++ /dev/null @@ -1 +0,0 @@ -Bump black from 22.10.0 to 22.12.0. diff --git a/changelog.d/14736.misc b/changelog.d/14736.misc deleted file mode 100644 index 458d5accdfb4..000000000000 --- a/changelog.d/14736.misc +++ /dev/null @@ -1 +0,0 @@ -Bump sentry-sdk from 1.12.0 to 1.12.1. diff --git a/changelog.d/14738.misc b/changelog.d/14738.misc deleted file mode 100644 index 9530b7075cc2..000000000000 --- a/changelog.d/14738.misc +++ /dev/null @@ -1 +0,0 @@ -Bump setuptools from 65.3.0 to 65.5.1. diff --git a/changelog.d/14741.misc b/changelog.d/14741.misc deleted file mode 100644 index def187b12b48..000000000000 --- a/changelog.d/14741.misc +++ /dev/null @@ -1 +0,0 @@ -Use [ruff](https://github.com/charliermarsh/ruff/) instead of flake8. diff --git a/changelog.d/14743.misc b/changelog.d/14743.misc deleted file mode 100644 index fe949c5bdbcd..000000000000 --- a/changelog.d/14743.misc +++ /dev/null @@ -1 +0,0 @@ -Use `htmltest` to check links in the Synapse documentation. \ No newline at end of file diff --git a/changelog.d/14744.doc b/changelog.d/14744.doc deleted file mode 100644 index 738ab1a46db4..000000000000 --- a/changelog.d/14744.doc +++ /dev/null @@ -1 +0,0 @@ -Fix broken links in the Synapse documentation. \ No newline at end of file diff --git a/changelog.d/14748.doc b/changelog.d/14748.doc deleted file mode 100644 index 6926136f796c..000000000000 --- a/changelog.d/14748.doc +++ /dev/null @@ -1 +0,0 @@ -Add missing worker settings to shared configuration documentation. \ No newline at end of file diff --git a/changelog.d/14750.feature b/changelog.d/14750.feature deleted file mode 100644 index cfed64ee80f0..000000000000 --- a/changelog.d/14750.feature +++ /dev/null @@ -1 +0,0 @@ -Support [RFC7636](https://datatracker.ietf.org/doc/html/rfc7636) Proof Key for Code Exchange for OAuth single sign-on. diff --git a/changelog.d/14751.bugfix b/changelog.d/14751.bugfix deleted file mode 100644 index 56ef8522881a..000000000000 --- a/changelog.d/14751.bugfix +++ /dev/null @@ -1 +0,0 @@ -Fix a bug introduced in Synapse 1.73.0 where the `picture_claim` configured under `oidc_providers` was unused (the default value of `"picture"` was used instead). diff --git a/changelog.d/14753.feature b/changelog.d/14753.feature deleted file mode 100644 index 38b4d6af4b38..000000000000 --- a/changelog.d/14753.feature +++ /dev/null @@ -1 +0,0 @@ -Support non-OpenID compliant userinfo claims for subject and picture. diff --git a/changelog.d/14758.misc b/changelog.d/14758.misc deleted file mode 100644 index 69c727bab50b..000000000000 --- a/changelog.d/14758.misc +++ /dev/null @@ -1 +0,0 @@ -Bump serde from 1.0.151 to 1.0.152. diff --git a/changelog.d/14759.misc b/changelog.d/14759.misc deleted file mode 100644 index 30ce74c310db..000000000000 --- a/changelog.d/14759.misc +++ /dev/null @@ -1 +0,0 @@ -Bump ruff from 0.0.189 to 0.0.206. diff --git a/changelog.d/14760.misc b/changelog.d/14760.misc deleted file mode 100644 index 5eaaee4559ac..000000000000 --- a/changelog.d/14760.misc +++ /dev/null @@ -1 +0,0 @@ -Bump pydantic from 1.10.2 to 1.10.4. diff --git a/changelog.d/14761.misc b/changelog.d/14761.misc deleted file mode 100644 index bd96f75f39dd..000000000000 --- a/changelog.d/14761.misc +++ /dev/null @@ -1 +0,0 @@ -Bump gitpython from 3.1.29 to 3.1.30. diff --git a/changelog.d/14762.misc b/changelog.d/14762.misc deleted file mode 100644 index a9c750c2038c..000000000000 --- a/changelog.d/14762.misc +++ /dev/null @@ -1 +0,0 @@ -Bump pillow from 9.3.0 to 9.4.0. diff --git a/changelog.d/14763.misc b/changelog.d/14763.misc deleted file mode 100644 index 59d616d76dfc..000000000000 --- a/changelog.d/14763.misc +++ /dev/null @@ -1 +0,0 @@ -Bump types-requests from 2.28.11.5 to 2.28.11.7. diff --git a/changelog.d/14772.misc b/changelog.d/14772.misc deleted file mode 100644 index 7ead5a920f45..000000000000 --- a/changelog.d/14772.misc +++ /dev/null @@ -1 +0,0 @@ -Change GHA CI job to follow best practices. diff --git a/changelog.d/14774.misc b/changelog.d/14774.misc deleted file mode 100644 index b6c9f8ca52a8..000000000000 --- a/changelog.d/14774.misc +++ /dev/null @@ -1 +0,0 @@ -Switch to our fork of `dh-virtualenv` to work around an upstream Python 3.11 incompatibility. \ No newline at end of file diff --git a/changelog.d/14778.doc b/changelog.d/14778.doc deleted file mode 100644 index 677f999f8da0..000000000000 --- a/changelog.d/14778.doc +++ /dev/null @@ -1 +0,0 @@ -Document using Twitter as a OAuth 2.0 authentication provider. diff --git a/changelog.d/14779.misc b/changelog.d/14779.misc deleted file mode 100644 index 2bc760dbc661..000000000000 --- a/changelog.d/14779.misc +++ /dev/null @@ -1 +0,0 @@ -Bump dawidd6/action-download-artifact from 2.24.2 to 2.24.3. diff --git a/changelog.d/14781.misc b/changelog.d/14781.misc deleted file mode 100644 index 04f565b41020..000000000000 --- a/changelog.d/14781.misc +++ /dev/null @@ -1 +0,0 @@ -Unescape HTML entities in URL preview titles making use of oEmbed responses. diff --git a/changelog.d/14786.feature b/changelog.d/14786.feature deleted file mode 100644 index 008d61ab039c..000000000000 --- a/changelog.d/14786.feature +++ /dev/null @@ -1 +0,0 @@ -Improve performance of `/sync` when filtering all rooms, message types, or senders. diff --git a/changelog.d/14791.misc b/changelog.d/14791.misc deleted file mode 100644 index 6474859f3cdf..000000000000 --- a/changelog.d/14791.misc +++ /dev/null @@ -1 +0,0 @@ -Bump peaceiris/actions-gh-pages from 3.9.0 to 3.9.1. diff --git a/changelog.d/14792.misc b/changelog.d/14792.misc deleted file mode 100644 index 85e9537c64ad..000000000000 --- a/changelog.d/14792.misc +++ /dev/null @@ -1 +0,0 @@ -Bump types-pillow from 9.3.0.4 to 9.4.0.0. diff --git a/changelog.d/14793.misc b/changelog.d/14793.misc deleted file mode 100644 index 86848da6497d..000000000000 --- a/changelog.d/14793.misc +++ /dev/null @@ -1 +0,0 @@ -Bump pyopenssl from 22.1.0 to 23.0.0. diff --git a/changelog.d/14794.misc b/changelog.d/14794.misc deleted file mode 100644 index 8e0887ec76aa..000000000000 --- a/changelog.d/14794.misc +++ /dev/null @@ -1 +0,0 @@ -Bump types-setuptools from 65.6.0.2 to 65.6.0.3. diff --git a/changelog.d/14795.misc b/changelog.d/14795.misc deleted file mode 100644 index 5c4bcc891c8e..000000000000 --- a/changelog.d/14795.misc +++ /dev/null @@ -1 +0,0 @@ -Bump importlib-metadata from 4.2.0 to 6.0.0. diff --git a/changelog.d/14796.misc b/changelog.d/14796.misc deleted file mode 100644 index 5dc3865fdff9..000000000000 --- a/changelog.d/14796.misc +++ /dev/null @@ -1 +0,0 @@ -Bump ruff from 0.0.206 to 0.0.215. diff --git a/changelog.d/14797.doc b/changelog.d/14797.doc deleted file mode 100644 index 2e25d531d172..000000000000 --- a/changelog.d/14797.doc +++ /dev/null @@ -1 +0,0 @@ -Fix Synapse 1.74 upgrade notes to correctly explain how to install pyICU when installing Synapse from PyPI. diff --git a/changelog.d/14801.doc b/changelog.d/14801.doc deleted file mode 100644 index ce8e6763f13d..000000000000 --- a/changelog.d/14801.doc +++ /dev/null @@ -1 +0,0 @@ -Update link to towncrier in contribution guide. \ No newline at end of file diff --git a/changelog.d/14802.misc b/changelog.d/14802.misc deleted file mode 100644 index 81d5c0c642d1..000000000000 --- a/changelog.d/14802.misc +++ /dev/null @@ -1 +0,0 @@ -Skip testing built wheels for PyPy 3.7 on Linux x86_64 as we lack new required dependencies in the build environment. \ No newline at end of file diff --git a/debian/changelog b/debian/changelog index f6edb4d860c2..e02793c9967b 100644 --- a/debian/changelog +++ b/debian/changelog @@ -1,3 +1,9 @@ +matrix-synapse-py3 (1.75.0~rc1) stable; urgency=medium + + * New Synapse release 1.75.0rc1. + + -- Synapse Packaging team Tue, 10 Jan 2023 12:18:27 +0000 + matrix-synapse-py3 (1.74.0) stable; urgency=medium * New Synapse release 1.74.0. diff --git a/pyproject.toml b/pyproject.toml index 58063b15e95e..740d33066e1c 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -97,7 +97,7 @@ manifest-path = "rust/Cargo.toml" [tool.poetry] name = "matrix-synapse" -version = "1.74.0" +version = "1.75.0rc1" description = "Homeserver for the Matrix decentralised comms protocol" authors = ["Matrix.org Team and Contributors "] license = "Apache-2.0" From e5c01272a77bba05bdf219a11f1d47ae071e4d1a Mon Sep 17 00:00:00 2001 From: David Robertson Date: Tue, 10 Jan 2023 12:26:19 +0000 Subject: [PATCH 77/82] Update changelog --- CHANGES.md | 74 ++++++++++++++++++++++++++++-------------------------- 1 file changed, 38 insertions(+), 36 deletions(-) diff --git a/CHANGES.md b/CHANGES.md index ab6a107f9773..d8b1bb87a543 100644 --- a/CHANGES.md +++ b/CHANGES.md @@ -9,6 +9,7 @@ Features - Support [RFC7636](https://datatracker.ietf.org/doc/html/rfc7636) Proof Key for Code Exchange for OAuth single sign-on. ([\#14750](https://github.com/matrix-org/synapse/issues/14750)) - Support non-OpenID compliant userinfo claims for subject and picture. ([\#14753](https://github.com/matrix-org/synapse/issues/14753)) - Improve performance of `/sync` when filtering all rooms, message types, or senders. ([\#14786](https://github.com/matrix-org/synapse/issues/14786)) +- Improve performance of the `/hierarchy` endpoint. ([\#14263](https://github.com/matrix-org/synapse/issues/14263)) Bugfixes @@ -19,6 +20,8 @@ Bugfixes - Ensure stream IDs are always updated after caches get invalidated with workers. Contributed by Nick @ Beeper (@fizzadar). ([\#14723](https://github.com/matrix-org/synapse/issues/14723)) - Remove the unspecced `device` field from `/pushrules` responses. ([\#14727](https://github.com/matrix-org/synapse/issues/14727)) - Fix a bug introduced in Synapse 1.73.0 where the `picture_claim` configured under `oidc_providers` was unused (the default value of `"picture"` was used instead). ([\#14751](https://github.com/matrix-org/synapse/issues/14751)) +- Unescape HTML entities in URL preview titles making use of oEmbed responses. ([\#14781](https://github.com/matrix-org/synapse/issues/14781)) +- Disable sending confirmation email when 3pid is disabled. ([\#14725](https://github.com/matrix-org/synapse/issues/14725)) Improved Documentation @@ -32,60 +35,59 @@ Improved Documentation - Document using Twitter as a OAuth 2.0 authentication provider. ([\#14778](https://github.com/matrix-org/synapse/issues/14778)) - Fix Synapse 1.74 upgrade notes to correctly explain how to install pyICU when installing Synapse from PyPI. ([\#14797](https://github.com/matrix-org/synapse/issues/14797)) - Update link to towncrier in contribution guide. ([\#14801](https://github.com/matrix-org/synapse/issues/14801)) +- Use `htmltest` to check links in the Synapse documentation. ([\#14743](https://github.com/matrix-org/synapse/issues/14743)) Internal Changes ---------------- -- Improve performance of the `/hierarchy` endpoint. ([\#14263](https://github.com/matrix-org/synapse/issues/14263)) - Faster remote room joins: stream the un-partial-stating of events over replication. ([\#14545](https://github.com/matrix-org/synapse/issues/14545), [\#14546](https://github.com/matrix-org/synapse/issues/14546)) - Use [ruff](https://github.com/charliermarsh/ruff/) instead of flake8. ([\#14633](https://github.com/matrix-org/synapse/issues/14633), [\#14741](https://github.com/matrix-org/synapse/issues/14741)) - Change `handle_new_client_event` signature so that a 429 does not reach clients on `PartialStateConflictError`, and internally retry when needed instead. ([\#14665](https://github.com/matrix-org/synapse/issues/14665)) - Remove dependency on jQuery on reCAPTCHA page. ([\#14672](https://github.com/matrix-org/synapse/issues/14672)) -- Faster joins: make `computer_state_after_events` consistent with other state-fetching functions that take a `StateFilter`. ([\#14676](https://github.com/matrix-org/synapse/issues/14676)) +- Faster joins: make `compute_state_after_events` consistent with other state-fetching functions that take a `StateFilter`. ([\#14676](https://github.com/matrix-org/synapse/issues/14676)) - Add missing type hints. ([\#14680](https://github.com/matrix-org/synapse/issues/14680), [\#14681](https://github.com/matrix-org/synapse/issues/14681), [\#14687](https://github.com/matrix-org/synapse/issues/14687)) - Improve type annotations for the helper methods on a `CachedFunction`. ([\#14685](https://github.com/matrix-org/synapse/issues/14685)) - Check that the SQLite database file exists before porting to PostgreSQL. ([\#14692](https://github.com/matrix-org/synapse/issues/14692)) -- Bump JasonEtco/create-an-issue from 2.8.1 to 2.8.2. ([\#14693](https://github.com/matrix-org/synapse/issues/14693)) -- Bump anyhow from 1.0.66 to 1.0.68. ([\#14694](https://github.com/matrix-org/synapse/issues/14694)) -- Bump blake2 from 0.10.5 to 0.10.6. ([\#14695](https://github.com/matrix-org/synapse/issues/14695)) -- Bump serde_json from 1.0.89 to 1.0.91. ([\#14696](https://github.com/matrix-org/synapse/issues/14696)) -- Bump serde from 1.0.150 to 1.0.151. ([\#14697](https://github.com/matrix-org/synapse/issues/14697)) -- Bump lxml from 4.9.1 to 4.9.2. ([\#14698](https://github.com/matrix-org/synapse/issues/14698)) -- Bump types-jsonschema from 4.17.0.1 to 4.17.0.2. ([\#14700](https://github.com/matrix-org/synapse/issues/14700)) -- Bump sentry-sdk from 1.11.1 to 1.12.0. ([\#14701](https://github.com/matrix-org/synapse/issues/14701)) -- Bump types-setuptools from 65.6.0.1 to 65.6.0.2. ([\#14702](https://github.com/matrix-org/synapse/issues/14702)) - Add `.direnv/` directory to .gitignore to prevent local state generated by the [direnv](https://direnv.net/) development tool from being committed. ([\#14707](https://github.com/matrix-org/synapse/issues/14707)) - Batch up replication requests to request the resyncing of remote users's devices. ([\#14716](https://github.com/matrix-org/synapse/issues/14716)) -- Bump minimum PyYAML to 3.13. ([\#14720](https://github.com/matrix-org/synapse/issues/14720)) - If debug logging is enabled, log the `msgid`s of any to-device messages that are returned over `/sync`. ([\#14724](https://github.com/matrix-org/synapse/issues/14724)) -- Disable sending confirmation email when 3pid is disabled. ([\#14725](https://github.com/matrix-org/synapse/issues/14725)) -- Bump JasonEtco/create-an-issue from 2.8.2 to 2.9.1. ([\#14731](https://github.com/matrix-org/synapse/issues/14731)) -- Bump towncrier from 22.8.0 to 22.12.0. ([\#14732](https://github.com/matrix-org/synapse/issues/14732)) -- Bump isort from 5.10.1 to 5.11.4. ([\#14733](https://github.com/matrix-org/synapse/issues/14733)) -- Bump attrs from 22.1.0 to 22.2.0. ([\#14734](https://github.com/matrix-org/synapse/issues/14734)) -- Bump black from 22.10.0 to 22.12.0. ([\#14735](https://github.com/matrix-org/synapse/issues/14735)) -- Bump sentry-sdk from 1.12.0 to 1.12.1. ([\#14736](https://github.com/matrix-org/synapse/issues/14736)) -- Bump setuptools from 65.3.0 to 65.5.1. ([\#14738](https://github.com/matrix-org/synapse/issues/14738)) -- Use `htmltest` to check links in the Synapse documentation. ([\#14743](https://github.com/matrix-org/synapse/issues/14743)) -- Bump serde from 1.0.151 to 1.0.152. ([\#14758](https://github.com/matrix-org/synapse/issues/14758)) -- Bump ruff from 0.0.189 to 0.0.206. ([\#14759](https://github.com/matrix-org/synapse/issues/14759)) -- Bump pydantic from 1.10.2 to 1.10.4. ([\#14760](https://github.com/matrix-org/synapse/issues/14760)) -- Bump gitpython from 3.1.29 to 3.1.30. ([\#14761](https://github.com/matrix-org/synapse/issues/14761)) -- Bump pillow from 9.3.0 to 9.4.0. ([\#14762](https://github.com/matrix-org/synapse/issues/14762)) -- Bump types-requests from 2.28.11.5 to 2.28.11.7. ([\#14763](https://github.com/matrix-org/synapse/issues/14763)) - Change GHA CI job to follow best practices. ([\#14772](https://github.com/matrix-org/synapse/issues/14772)) - Switch to our fork of `dh-virtualenv` to work around an upstream Python 3.11 incompatibility. ([\#14774](https://github.com/matrix-org/synapse/issues/14774)) -- Bump dawidd6/action-download-artifact from 2.24.2 to 2.24.3. ([\#14779](https://github.com/matrix-org/synapse/issues/14779)) -- Unescape HTML entities in URL preview titles making use of oEmbed responses. ([\#14781](https://github.com/matrix-org/synapse/issues/14781)) -- Bump peaceiris/actions-gh-pages from 3.9.0 to 3.9.1. ([\#14791](https://github.com/matrix-org/synapse/issues/14791)) -- Bump types-pillow from 9.3.0.4 to 9.4.0.0. ([\#14792](https://github.com/matrix-org/synapse/issues/14792)) -- Bump pyopenssl from 22.1.0 to 23.0.0. ([\#14793](https://github.com/matrix-org/synapse/issues/14793)) -- Bump types-setuptools from 65.6.0.2 to 65.6.0.3. ([\#14794](https://github.com/matrix-org/synapse/issues/14794)) -- Bump importlib-metadata from 4.2.0 to 6.0.0. ([\#14795](https://github.com/matrix-org/synapse/issues/14795)) -- Bump ruff from 0.0.206 to 0.0.215. ([\#14796](https://github.com/matrix-org/synapse/issues/14796)) - Skip testing built wheels for PyPy 3.7 on Linux x86_64 as we lack new required dependencies in the build environment. ([\#14802](https://github.com/matrix-org/synapse/issues/14802)) - +-
Dependabot updates + + - Bump JasonEtco/create-an-issue from 2.8.1 to 2.8.2. ([\#14693](https://github.com/matrix-org/synapse/issues/14693)) + - Bump anyhow from 1.0.66 to 1.0.68. ([\#14694](https://github.com/matrix-org/synapse/issues/14694)) + - Bump blake2 from 0.10.5 to 0.10.6. ([\#14695](https://github.com/matrix-org/synapse/issues/14695)) + - Bump serde_json from 1.0.89 to 1.0.91. ([\#14696](https://github.com/matrix-org/synapse/issues/14696)) + - Bump serde from 1.0.150 to 1.0.151. ([\#14697](https://github.com/matrix-org/synapse/issues/14697)) + - Bump lxml from 4.9.1 to 4.9.2. ([\#14698](https://github.com/matrix-org/synapse/issues/14698)) + - Bump types-jsonschema from 4.17.0.1 to 4.17.0.2. ([\#14700](https://github.com/matrix-org/synapse/issues/14700)) + - Bump sentry-sdk from 1.11.1 to 1.12.0. ([\#14701](https://github.com/matrix-org/synapse/issues/14701)) + - Bump types-setuptools from 65.6.0.1 to 65.6.0.2. ([\#14702](https://github.com/matrix-org/synapse/issues/14702)) + - Bump minimum PyYAML to 3.13. ([\#14720](https://github.com/matrix-org/synapse/issues/14720)) + - Bump JasonEtco/create-an-issue from 2.8.2 to 2.9.1. ([\#14731](https://github.com/matrix-org/synapse/issues/14731)) + - Bump towncrier from 22.8.0 to 22.12.0. ([\#14732](https://github.com/matrix-org/synapse/issues/14732)) + - Bump isort from 5.10.1 to 5.11.4. ([\#14733](https://github.com/matrix-org/synapse/issues/14733)) + - Bump attrs from 22.1.0 to 22.2.0. ([\#14734](https://github.com/matrix-org/synapse/issues/14734)) + - Bump black from 22.10.0 to 22.12.0. ([\#14735](https://github.com/matrix-org/synapse/issues/14735)) + - Bump sentry-sdk from 1.12.0 to 1.12.1. ([\#14736](https://github.com/matrix-org/synapse/issues/14736)) + - Bump setuptools from 65.3.0 to 65.5.1. ([\#14738](https://github.com/matrix-org/synapse/issues/14738)) + - Bump serde from 1.0.151 to 1.0.152. ([\#14758](https://github.com/matrix-org/synapse/issues/14758)) + - Bump ruff from 0.0.189 to 0.0.206. ([\#14759](https://github.com/matrix-org/synapse/issues/14759)) + - Bump pydantic from 1.10.2 to 1.10.4. ([\#14760](https://github.com/matrix-org/synapse/issues/14760)) + - Bump gitpython from 3.1.29 to 3.1.30. ([\#14761](https://github.com/matrix-org/synapse/issues/14761)) + - Bump pillow from 9.3.0 to 9.4.0. ([\#14762](https://github.com/matrix-org/synapse/issues/14762)) + - Bump types-requests from 2.28.11.5 to 2.28.11.7. ([\#14763](https://github.com/matrix-org/synapse/issues/14763)) + - Bump dawidd6/action-download-artifact from 2.24.2 to 2.24.3. ([\#14779](https://github.com/matrix-org/synapse/issues/14779)) + - Bump peaceiris/actions-gh-pages from 3.9.0 to 3.9.1. ([\#14791](https://github.com/matrix-org/synapse/issues/14791)) + - Bump types-pillow from 9.3.0.4 to 9.4.0.0. ([\#14792](https://github.com/matrix-org/synapse/issues/14792)) + - Bump pyopenssl from 22.1.0 to 23.0.0. ([\#14793](https://github.com/matrix-org/synapse/issues/14793)) + - Bump types-setuptools from 65.6.0.2 to 65.6.0.3. ([\#14794](https://github.com/matrix-org/synapse/issues/14794)) + - Bump importlib-metadata from 4.2.0 to 6.0.0. ([\#14795](https://github.com/matrix-org/synapse/issues/14795)) + - Bump ruff from 0.0.206 to 0.0.215. ([\#14796](https://github.com/matrix-org/synapse/issues/14796)) +
Synapse 1.74.0 (2022-12-20) =========================== From f417fb84b8c8a228d3093faf4c3f37a7d979901c Mon Sep 17 00:00:00 2001 From: David Robertson Date: Tue, 10 Jan 2023 12:30:01 +0000 Subject: [PATCH 78/82] Update changelog 2 --- CHANGES.md | 67 ++++++++++++++++++++++++++++-------------------------- 1 file changed, 35 insertions(+), 32 deletions(-) diff --git a/CHANGES.md b/CHANGES.md index d8b1bb87a543..261ce6a8642a 100644 --- a/CHANGES.md +++ b/CHANGES.md @@ -55,38 +55,41 @@ Internal Changes - Change GHA CI job to follow best practices. ([\#14772](https://github.com/matrix-org/synapse/issues/14772)) - Switch to our fork of `dh-virtualenv` to work around an upstream Python 3.11 incompatibility. ([\#14774](https://github.com/matrix-org/synapse/issues/14774)) - Skip testing built wheels for PyPy 3.7 on Linux x86_64 as we lack new required dependencies in the build environment. ([\#14802](https://github.com/matrix-org/synapse/issues/14802)) --
Dependabot updates - - - Bump JasonEtco/create-an-issue from 2.8.1 to 2.8.2. ([\#14693](https://github.com/matrix-org/synapse/issues/14693)) - - Bump anyhow from 1.0.66 to 1.0.68. ([\#14694](https://github.com/matrix-org/synapse/issues/14694)) - - Bump blake2 from 0.10.5 to 0.10.6. ([\#14695](https://github.com/matrix-org/synapse/issues/14695)) - - Bump serde_json from 1.0.89 to 1.0.91. ([\#14696](https://github.com/matrix-org/synapse/issues/14696)) - - Bump serde from 1.0.150 to 1.0.151. ([\#14697](https://github.com/matrix-org/synapse/issues/14697)) - - Bump lxml from 4.9.1 to 4.9.2. ([\#14698](https://github.com/matrix-org/synapse/issues/14698)) - - Bump types-jsonschema from 4.17.0.1 to 4.17.0.2. ([\#14700](https://github.com/matrix-org/synapse/issues/14700)) - - Bump sentry-sdk from 1.11.1 to 1.12.0. ([\#14701](https://github.com/matrix-org/synapse/issues/14701)) - - Bump types-setuptools from 65.6.0.1 to 65.6.0.2. ([\#14702](https://github.com/matrix-org/synapse/issues/14702)) - - Bump minimum PyYAML to 3.13. ([\#14720](https://github.com/matrix-org/synapse/issues/14720)) - - Bump JasonEtco/create-an-issue from 2.8.2 to 2.9.1. ([\#14731](https://github.com/matrix-org/synapse/issues/14731)) - - Bump towncrier from 22.8.0 to 22.12.0. ([\#14732](https://github.com/matrix-org/synapse/issues/14732)) - - Bump isort from 5.10.1 to 5.11.4. ([\#14733](https://github.com/matrix-org/synapse/issues/14733)) - - Bump attrs from 22.1.0 to 22.2.0. ([\#14734](https://github.com/matrix-org/synapse/issues/14734)) - - Bump black from 22.10.0 to 22.12.0. ([\#14735](https://github.com/matrix-org/synapse/issues/14735)) - - Bump sentry-sdk from 1.12.0 to 1.12.1. ([\#14736](https://github.com/matrix-org/synapse/issues/14736)) - - Bump setuptools from 65.3.0 to 65.5.1. ([\#14738](https://github.com/matrix-org/synapse/issues/14738)) - - Bump serde from 1.0.151 to 1.0.152. ([\#14758](https://github.com/matrix-org/synapse/issues/14758)) - - Bump ruff from 0.0.189 to 0.0.206. ([\#14759](https://github.com/matrix-org/synapse/issues/14759)) - - Bump pydantic from 1.10.2 to 1.10.4. ([\#14760](https://github.com/matrix-org/synapse/issues/14760)) - - Bump gitpython from 3.1.29 to 3.1.30. ([\#14761](https://github.com/matrix-org/synapse/issues/14761)) - - Bump pillow from 9.3.0 to 9.4.0. ([\#14762](https://github.com/matrix-org/synapse/issues/14762)) - - Bump types-requests from 2.28.11.5 to 2.28.11.7. ([\#14763](https://github.com/matrix-org/synapse/issues/14763)) - - Bump dawidd6/action-download-artifact from 2.24.2 to 2.24.3. ([\#14779](https://github.com/matrix-org/synapse/issues/14779)) - - Bump peaceiris/actions-gh-pages from 3.9.0 to 3.9.1. ([\#14791](https://github.com/matrix-org/synapse/issues/14791)) - - Bump types-pillow from 9.3.0.4 to 9.4.0.0. ([\#14792](https://github.com/matrix-org/synapse/issues/14792)) - - Bump pyopenssl from 22.1.0 to 23.0.0. ([\#14793](https://github.com/matrix-org/synapse/issues/14793)) - - Bump types-setuptools from 65.6.0.2 to 65.6.0.3. ([\#14794](https://github.com/matrix-org/synapse/issues/14794)) - - Bump importlib-metadata from 4.2.0 to 6.0.0. ([\#14795](https://github.com/matrix-org/synapse/issues/14795)) - - Bump ruff from 0.0.206 to 0.0.215. ([\#14796](https://github.com/matrix-org/synapse/issues/14796)) + +### Dependabot updates + +
+ +- Bump JasonEtco/create-an-issue from 2.8.1 to 2.8.2. ([\#14693](https://github.com/matrix-org/synapse/issues/14693)) +- Bump anyhow from 1.0.66 to 1.0.68. ([\#14694](https://github.com/matrix-org/synapse/issues/14694)) +- Bump blake2 from 0.10.5 to 0.10.6. ([\#14695](https://github.com/matrix-org/synapse/issues/14695)) +- Bump serde_json from 1.0.89 to 1.0.91. ([\#14696](https://github.com/matrix-org/synapse/issues/14696)) +- Bump serde from 1.0.150 to 1.0.151. ([\#14697](https://github.com/matrix-org/synapse/issues/14697)) +- Bump lxml from 4.9.1 to 4.9.2. ([\#14698](https://github.com/matrix-org/synapse/issues/14698)) +- Bump types-jsonschema from 4.17.0.1 to 4.17.0.2. ([\#14700](https://github.com/matrix-org/synapse/issues/14700)) +- Bump sentry-sdk from 1.11.1 to 1.12.0. ([\#14701](https://github.com/matrix-org/synapse/issues/14701)) +- Bump types-setuptools from 65.6.0.1 to 65.6.0.2. ([\#14702](https://github.com/matrix-org/synapse/issues/14702)) +- Bump minimum PyYAML to 3.13. ([\#14720](https://github.com/matrix-org/synapse/issues/14720)) +- Bump JasonEtco/create-an-issue from 2.8.2 to 2.9.1. ([\#14731](https://github.com/matrix-org/synapse/issues/14731)) +- Bump towncrier from 22.8.0 to 22.12.0. ([\#14732](https://github.com/matrix-org/synapse/issues/14732)) +- Bump isort from 5.10.1 to 5.11.4. ([\#14733](https://github.com/matrix-org/synapse/issues/14733)) +- Bump attrs from 22.1.0 to 22.2.0. ([\#14734](https://github.com/matrix-org/synapse/issues/14734)) +- Bump black from 22.10.0 to 22.12.0. ([\#14735](https://github.com/matrix-org/synapse/issues/14735)) +- Bump sentry-sdk from 1.12.0 to 1.12.1. ([\#14736](https://github.com/matrix-org/synapse/issues/14736)) +- Bump setuptools from 65.3.0 to 65.5.1. ([\#14738](https://github.com/matrix-org/synapse/issues/14738)) +- Bump serde from 1.0.151 to 1.0.152. ([\#14758](https://github.com/matrix-org/synapse/issues/14758)) +- Bump ruff from 0.0.189 to 0.0.206. ([\#14759](https://github.com/matrix-org/synapse/issues/14759)) +- Bump pydantic from 1.10.2 to 1.10.4. ([\#14760](https://github.com/matrix-org/synapse/issues/14760)) +- Bump gitpython from 3.1.29 to 3.1.30. ([\#14761](https://github.com/matrix-org/synapse/issues/14761)) +- Bump pillow from 9.3.0 to 9.4.0. ([\#14762](https://github.com/matrix-org/synapse/issues/14762)) +- Bump types-requests from 2.28.11.5 to 2.28.11.7. ([\#14763](https://github.com/matrix-org/synapse/issues/14763)) +- Bump dawidd6/action-download-artifact from 2.24.2 to 2.24.3. ([\#14779](https://github.com/matrix-org/synapse/issues/14779)) +- Bump peaceiris/actions-gh-pages from 3.9.0 to 3.9.1. ([\#14791](https://github.com/matrix-org/synapse/issues/14791)) +- Bump types-pillow from 9.3.0.4 to 9.4.0.0. ([\#14792](https://github.com/matrix-org/synapse/issues/14792)) +- Bump pyopenssl from 22.1.0 to 23.0.0. ([\#14793](https://github.com/matrix-org/synapse/issues/14793)) +- Bump types-setuptools from 65.6.0.2 to 65.6.0.3. ([\#14794](https://github.com/matrix-org/synapse/issues/14794)) +- Bump importlib-metadata from 4.2.0 to 6.0.0. ([\#14795](https://github.com/matrix-org/synapse/issues/14795)) +- Bump ruff from 0.0.206 to 0.0.215. ([\#14796](https://github.com/matrix-org/synapse/issues/14796))
Synapse 1.74.0 (2022-12-20) From 3952297f6f39906a65e70bce7becc1acd300a287 Mon Sep 17 00:00:00 2001 From: Patrick Cloke Date: Wed, 11 Jan 2023 07:16:41 -0500 Subject: [PATCH 79/82] Calculate rooms changed for device lists to work. (#14810) Back-out some changes from 7e582a25f8f350df29d7d83ca902bdb522d1bbaf (#14786) which skipped necessary logic to calculate device lists properly. --- changelog.d/14810.bugfix | 1 + synapse/api/filtering.py | 3 --- synapse/handlers/sync.py | 4 ---- 3 files changed, 1 insertion(+), 7 deletions(-) create mode 100644 changelog.d/14810.bugfix diff --git a/changelog.d/14810.bugfix b/changelog.d/14810.bugfix new file mode 100644 index 000000000000..379bfccffa4d --- /dev/null +++ b/changelog.d/14810.bugfix @@ -0,0 +1 @@ +Fix a bug introduced in Synapse 1.75.0rc1 where device lists could be miscalculated with some sync filters. diff --git a/synapse/api/filtering.py b/synapse/api/filtering.py index 2b5af264b43d..4cf8f0cc8ef9 100644 --- a/synapse/api/filtering.py +++ b/synapse/api/filtering.py @@ -283,9 +283,6 @@ async def filter_room_account_data( await self._room_filter.filter(events) ) - def blocks_all_rooms(self) -> bool: - return self._room_filter.filters_all_rooms() - def blocks_all_presence(self) -> bool: return ( self._presence_filter.filters_all_types() diff --git a/synapse/handlers/sync.py b/synapse/handlers/sync.py index 6942e06c770e..20ee2f203a73 100644 --- a/synapse/handlers/sync.py +++ b/synapse/handlers/sync.py @@ -1793,10 +1793,6 @@ async def _generate_sync_entry_for_rooms( - newly_left_users """ - # If the request doesn't care about rooms then nothing to do! - if sync_result_builder.sync_config.filter_collection.blocks_all_rooms(): - return set(), set(), set(), set() - since_token = sync_result_builder.since_token # 1. Start by fetching all ephemeral events in rooms we've joined (if required). From 84ce93c12f921063bb6c59400fcf95649a1b7f45 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Thu, 12 Jan 2023 10:29:09 +0000 Subject: [PATCH 80/82] Fix race calling `/members?at=` (#14817) Fixes #14814 --- changelog.d/14817.bugfix | 1 + synapse/storage/databases/main/stream.py | 65 +++++++++++++++++++++--- 2 files changed, 60 insertions(+), 6 deletions(-) create mode 100644 changelog.d/14817.bugfix diff --git a/changelog.d/14817.bugfix b/changelog.d/14817.bugfix new file mode 100644 index 000000000000..bb5da7926854 --- /dev/null +++ b/changelog.d/14817.bugfix @@ -0,0 +1 @@ +Fix race where calling `/members` or `/state` with an `at` parameter could fail for newly created rooms, when using multiple workers. diff --git a/synapse/storage/databases/main/stream.py b/synapse/storage/databases/main/stream.py index cc27ec38042b..63d835053019 100644 --- a/synapse/storage/databases/main/stream.py +++ b/synapse/storage/databases/main/stream.py @@ -801,13 +801,66 @@ async def get_last_event_in_room_before_stream_ordering( before this stream ordering. """ - last_row = await self.get_room_event_before_stream_ordering( - room_id=room_id, - stream_ordering=end_token.stream, + def get_last_event_in_room_before_stream_ordering_txn( + txn: LoggingTransaction, + ) -> Optional[str]: + # We need to handle the fact that the stream tokens can be vector + # clocks. We do this by getting all rows between the minimum and + # maximum stream ordering in the token, plus one row less than the + # minimum stream ordering. We then filter the results against the + # token and return the first row that matches. + + sql = """ + SELECT * FROM ( + SELECT instance_name, stream_ordering, topological_ordering, event_id + FROM events + LEFT JOIN rejections USING (event_id) + WHERE room_id = ? + AND ? < stream_ordering AND stream_ordering <= ? + AND NOT outlier + AND rejections.event_id IS NULL + ORDER BY stream_ordering DESC + ) AS a + UNION + SELECT * FROM ( + SELECT instance_name, stream_ordering, topological_ordering, event_id + FROM events + LEFT JOIN rejections USING (event_id) + WHERE room_id = ? + AND stream_ordering <= ? + AND NOT outlier + AND rejections.event_id IS NULL + ORDER BY stream_ordering DESC + LIMIT 1 + ) AS b + """ + txn.execute( + sql, + ( + room_id, + end_token.stream, + end_token.get_max_stream_pos(), + room_id, + end_token.stream, + ), + ) + + for instance_name, stream_ordering, topological_ordering, event_id in txn: + if _filter_results( + lower_token=None, + upper_token=end_token, + instance_name=instance_name, + topological_ordering=topological_ordering, + stream_ordering=stream_ordering, + ): + return event_id + + return None + + return await self.db_pool.runInteraction( + "get_last_event_in_room_before_stream_ordering", + get_last_event_in_room_before_stream_ordering_txn, ) - if last_row: - return last_row[2] - return None async def get_current_room_stream_token_for_room_id( self, room_id: str From ea452571998c64a0d49d11791b58053bfbd9834b Mon Sep 17 00:00:00 2001 From: "H. Shay" Date: Thu, 12 Jan 2023 10:30:54 -0800 Subject: [PATCH 81/82] 1.75.0rc2 --- CHANGES.md | 10 ++++++++++ changelog.d/14810.bugfix | 1 - changelog.d/14817.bugfix | 1 - debian/changelog | 6 ++++++ pyproject.toml | 2 +- 5 files changed, 17 insertions(+), 3 deletions(-) delete mode 100644 changelog.d/14810.bugfix delete mode 100644 changelog.d/14817.bugfix diff --git a/CHANGES.md b/CHANGES.md index 261ce6a8642a..951a2273b598 100644 --- a/CHANGES.md +++ b/CHANGES.md @@ -1,3 +1,13 @@ +Synapse 1.75.0rc2 (2023-01-12) +============================== + +Bugfixes +-------- + +- Fix a bug introduced in Synapse 1.75.0rc1 where device lists could be miscalculated with some sync filters. ([\#14810](https://github.com/matrix-org/synapse/issues/14810)) +- Fix race where calling `/members` or `/state` with an `at` parameter could fail for newly created rooms, when using multiple workers. ([\#14817](https://github.com/matrix-org/synapse/issues/14817)) + + Synapse 1.75.0rc1 (2023-01-10) ============================== diff --git a/changelog.d/14810.bugfix b/changelog.d/14810.bugfix deleted file mode 100644 index 379bfccffa4d..000000000000 --- a/changelog.d/14810.bugfix +++ /dev/null @@ -1 +0,0 @@ -Fix a bug introduced in Synapse 1.75.0rc1 where device lists could be miscalculated with some sync filters. diff --git a/changelog.d/14817.bugfix b/changelog.d/14817.bugfix deleted file mode 100644 index bb5da7926854..000000000000 --- a/changelog.d/14817.bugfix +++ /dev/null @@ -1 +0,0 @@ -Fix race where calling `/members` or `/state` with an `at` parameter could fail for newly created rooms, when using multiple workers. diff --git a/debian/changelog b/debian/changelog index e02793c9967b..125b678f9322 100644 --- a/debian/changelog +++ b/debian/changelog @@ -1,3 +1,9 @@ +matrix-synapse-py3 (1.75.0~rc2) stable; urgency=medium + + * New Synapse release 1.75.0rc2. + + -- Synapse Packaging team Thu, 12 Jan 2023 10:30:15 -0800 + matrix-synapse-py3 (1.75.0~rc1) stable; urgency=medium * New Synapse release 1.75.0rc1. diff --git a/pyproject.toml b/pyproject.toml index 740d33066e1c..12f1686d21ff 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -97,7 +97,7 @@ manifest-path = "rust/Cargo.toml" [tool.poetry] name = "matrix-synapse" -version = "1.75.0rc1" +version = "1.75.0rc2" description = "Homeserver for the Matrix decentralised comms protocol" authors = ["Matrix.org Team and Contributors "] license = "Apache-2.0" From b6955673bfab5c8d553e8b43e9c50dd7b1212e2a Mon Sep 17 00:00:00 2001 From: David Robertson Date: Tue, 17 Jan 2023 11:36:22 +0000 Subject: [PATCH 82/82] 1.75.0 --- CHANGES.md | 6 ++++++ debian/changelog | 6 ++++++ pyproject.toml | 2 +- 3 files changed, 13 insertions(+), 1 deletion(-) diff --git a/CHANGES.md b/CHANGES.md index 951a2273b598..30f0a0674761 100644 --- a/CHANGES.md +++ b/CHANGES.md @@ -1,3 +1,9 @@ +Synapse 1.75.0 (2023-01-17) +=========================== + +No significant changes since 1.75.0rc2. + + Synapse 1.75.0rc2 (2023-01-12) ============================== diff --git a/debian/changelog b/debian/changelog index 125b678f9322..c4be6ac67aa3 100644 --- a/debian/changelog +++ b/debian/changelog @@ -1,3 +1,9 @@ +matrix-synapse-py3 (1.75.0) stable; urgency=medium + + * New Synapse release 1.75.0. + + -- Synapse Packaging team Tue, 17 Jan 2023 11:36:02 +0000 + matrix-synapse-py3 (1.75.0~rc2) stable; urgency=medium * New Synapse release 1.75.0rc2. diff --git a/pyproject.toml b/pyproject.toml index 12f1686d21ff..d3fa8b6b86af 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -97,7 +97,7 @@ manifest-path = "rust/Cargo.toml" [tool.poetry] name = "matrix-synapse" -version = "1.75.0rc2" +version = "1.75.0" description = "Homeserver for the Matrix decentralised comms protocol" authors = ["Matrix.org Team and Contributors "] license = "Apache-2.0"