From 689db6af9659da3d9227c7c80cab68d4b07330f1 Mon Sep 17 00:00:00 2001 From: Patrick Cloke Date: Wed, 26 Apr 2023 15:05:49 -0400 Subject: [PATCH 01/11] Remove unneeded untyped-defs. --- mypy.ini | 1 - 1 file changed, 1 deletion(-) diff --git a/mypy.ini b/mypy.ini index 8fb87b9b7452..8b2f62bd188d 100644 --- a/mypy.ini +++ b/mypy.ini @@ -34,7 +34,6 @@ exclude = (?x) )$ [mypy-synapse.metrics._reactor_metrics] -disallow_untyped_defs = False # This module imports select.epoll. That exists on Linux, but doesn't on macOS. # See https://github.com/matrix-org/synapse/pull/11771. warn_unused_ignores = False From 8894befda2b14e7c4f70392348f3cd2b0a1c67e0 Mon Sep 17 00:00:00 2001 From: Patrick Cloke Date: Wed, 26 Apr 2023 15:10:11 -0400 Subject: [PATCH 02/11] Add some type ignores so we can check main/cache.py. --- mypy.ini | 1 - synapse/storage/databases/main/cache.py | 16 +++++++++------- 2 files changed, 9 insertions(+), 8 deletions(-) diff --git a/mypy.ini b/mypy.ini index 8b2f62bd188d..d10cadc69cb3 100644 --- a/mypy.ini +++ b/mypy.ini @@ -29,7 +29,6 @@ files = exclude = (?x) ^( |synapse/storage/databases/__init__.py - |synapse/storage/databases/main/cache.py |synapse/storage/schema/ )$ diff --git a/synapse/storage/databases/main/cache.py b/synapse/storage/databases/main/cache.py index 096dec7f876e..bd07d2017143 100644 --- a/synapse/storage/databases/main/cache.py +++ b/synapse/storage/databases/main/cache.py @@ -205,13 +205,13 @@ def _process_event_stream_row(self, token: int, row: EventsStreamRow) -> None: ) elif row.type == EventsStreamCurrentStateRow.TypeId: assert isinstance(data, EventsStreamCurrentStateRow) - self._curr_state_delta_stream_cache.entity_has_changed(data.room_id, token) + self._curr_state_delta_stream_cache.entity_has_changed(data.room_id, token) # type: ignore[attr-defined] if data.type == EventTypes.Member: - self.get_rooms_for_user_with_stream_ordering.invalidate( + self.get_rooms_for_user_with_stream_ordering.invalidate( # type: ignore[attr-defined] (data.state_key,) ) - self.get_rooms_for_user.invalidate((data.state_key,)) + self.get_rooms_for_user.invalidate((data.state_key,)) # type: ignore[attr-defined] else: raise Exception("Unknown events stream row type %s" % (row.type,)) @@ -229,7 +229,7 @@ def _invalidate_caches_for_event( # This invalidates any local in-memory cached event objects, the original # process triggering the invalidation is responsible for clearing any external # cached objects. - self._invalidate_local_get_event_cache(event_id) + self._invalidate_local_get_event_cache(event_id) # type: ignore[attr-defined] self._attempt_to_invalidate_cache("have_seen_event", (room_id, event_id)) self._attempt_to_invalidate_cache("get_latest_event_ids_in_room", (room_id,)) @@ -242,10 +242,10 @@ def _invalidate_caches_for_event( self._attempt_to_invalidate_cache("_get_membership_from_event_id", (event_id,)) if not backfilled: - self._events_stream_cache.entity_has_changed(room_id, stream_ordering) + self._events_stream_cache.entity_has_changed(room_id, stream_ordering) # type: ignore[attr-defined] if redacts: - self._invalidate_local_get_event_cache(redacts) + self._invalidate_local_get_event_cache(redacts) # type: ignore[attr-defined] # Caches which might leak edits must be invalidated for the event being # redacted. self._attempt_to_invalidate_cache("get_relations_for_event", (redacts,)) @@ -254,7 +254,7 @@ def _invalidate_caches_for_event( self._attempt_to_invalidate_cache("get_thread_id_for_receipts", (redacts,)) if etype == EventTypes.Member: - self._membership_stream_cache.entity_has_changed(state_key, stream_ordering) + self._membership_stream_cache.entity_has_changed(state_key, stream_ordering) # type: ignore[attr-defined] self._attempt_to_invalidate_cache( "get_invited_rooms_for_local_user", (state_key,) ) @@ -378,6 +378,8 @@ def _send_invalidation_to_replication( ) if isinstance(self.database_engine, PostgresEngine): + assert self._cache_id_gen is not None + # get_next() returns a context manager which is designed to wrap # the transaction. However, we want to only get an ID when we want # to use it, here, so we need to call __enter__ manually, and have From 3e29d56606025f7b2e07cdf284fbf237a2ecc18b Mon Sep 17 00:00:00 2001 From: Patrick Cloke Date: Wed, 26 Apr 2023 15:20:24 -0400 Subject: [PATCH 03/11] Add some ignores to databases/__init__.py --- mypy.ini | 1 - synapse/storage/databases/__init__.py | 4 ++-- 2 files changed, 2 insertions(+), 3 deletions(-) diff --git a/mypy.ini b/mypy.ini index d10cadc69cb3..3b17c59dfcd7 100644 --- a/mypy.ini +++ b/mypy.ini @@ -28,7 +28,6 @@ files = # https://docs.python.org/3/library/re.html#re.X exclude = (?x) ^( - |synapse/storage/databases/__init__.py |synapse/storage/schema/ )$ diff --git a/synapse/storage/databases/__init__.py b/synapse/storage/databases/__init__.py index ce3d1d4e942e..7aa24ccf2121 100644 --- a/synapse/storage/databases/__init__.py +++ b/synapse/storage/databases/__init__.py @@ -95,7 +95,7 @@ def __init__(self, main_store_class: Type[DataStoreT], hs: "HomeServer"): # If we're on a process that can persist events also # instantiate a `PersistEventsStore` if hs.get_instance_name() in hs.config.worker.writers.events: - persist_events = PersistEventsStore(hs, database, main, db_conn) + persist_events = PersistEventsStore(hs, database, main, db_conn) # type: ignore[arg-type] if "state" in database_config.databases: logger.info( @@ -133,6 +133,6 @@ def __init__(self, main_store_class: Type[DataStoreT], hs: "HomeServer"): # We use local variables here to ensure that the databases do not have # optional types. - self.main = main + self.main = main # type: ignore[assignment] self.state = state self.persist_events = persist_events From 33fb2321b0452af6eb3c2b0383342bf0524de648 Mon Sep 17 00:00:00 2001 From: Patrick Cloke Date: Wed, 26 Apr 2023 16:01:20 -0400 Subject: [PATCH 04/11] Newsfragment --- changelog.d/15496.misc | 1 + 1 file changed, 1 insertion(+) create mode 100644 changelog.d/15496.misc diff --git a/changelog.d/15496.misc b/changelog.d/15496.misc new file mode 100644 index 000000000000..93ceaeafc9b9 --- /dev/null +++ b/changelog.d/15496.misc @@ -0,0 +1 @@ +Improve type hints. From 556855c0fa9e041ddb2ac4025a314ba16c79feb4 Mon Sep 17 00:00:00 2001 From: Patrick Cloke Date: Wed, 26 Apr 2023 15:46:01 -0400 Subject: [PATCH 05/11] Rename a file to avoid a conflict. --- synapse/storage/schema/main/delta/31/{pushers.py => pushers_0.py} | 0 1 file changed, 0 insertions(+), 0 deletions(-) rename synapse/storage/schema/main/delta/31/{pushers.py => pushers_0.py} (100%) diff --git a/synapse/storage/schema/main/delta/31/pushers.py b/synapse/storage/schema/main/delta/31/pushers_0.py similarity index 100% rename from synapse/storage/schema/main/delta/31/pushers.py rename to synapse/storage/schema/main/delta/31/pushers_0.py From 9c6e91eb86df25b58e0994aff3ddfbd5e829bd0c Mon Sep 17 00:00:00 2001 From: Patrick Cloke Date: Wed, 26 Apr 2023 15:48:37 -0400 Subject: [PATCH 06/11] Remove no-op functions. --- synapse/storage/schema/main/delta/20/pushers.py | 4 ---- synapse/storage/schema/main/delta/25/fts.py | 4 ---- synapse/storage/schema/main/delta/27/ts.py | 4 ---- synapse/storage/schema/main/delta/31/pushers_0.py | 4 ---- synapse/storage/schema/main/delta/31/search_update.py | 4 ---- synapse/storage/schema/main/delta/33/event_fields.py | 4 ---- synapse/storage/schema/main/delta/34/cache_stream.py | 4 ---- synapse/storage/schema/main/delta/34/received_txn_purge.py | 4 ---- synapse/storage/schema/main/delta/37/remove_auth_idx.py | 4 ---- synapse/storage/schema/main/delta/42/user_dir.py | 4 ---- synapse/storage/schema/main/delta/48/group_unique_indexes.py | 4 ---- .../storage/schema/main/delta/56/unique_user_filter_index.py | 4 ---- synapse/storage/schema/main/delta/58/06dlols_unique_idx.py | 4 ---- synapse/storage/schema/main/delta/58/11user_id_seq.py | 4 ---- synapse/storage/schema/main/delta/59/01ignored_user.py | 4 ---- synapse/storage/schema/main/delta/61/03recreate_min_depth.py | 4 ---- synapse/storage/schema/state/delta/47/state_group_seq.py | 4 ---- 17 files changed, 68 deletions(-) diff --git a/synapse/storage/schema/main/delta/20/pushers.py b/synapse/storage/schema/main/delta/20/pushers.py index 45b846e6a7d5..702faf6e669c 100644 --- a/synapse/storage/schema/main/delta/20/pushers.py +++ b/synapse/storage/schema/main/delta/20/pushers.py @@ -81,7 +81,3 @@ def run_create(cur, database_engine, *args, **kwargs): cur.execute("DROP TABLE pushers") cur.execute("ALTER TABLE pushers2 RENAME TO pushers") logger.info("Moved %d pushers to new table", count) - - -def run_upgrade(*args, **kwargs): - pass diff --git a/synapse/storage/schema/main/delta/25/fts.py b/synapse/storage/schema/main/delta/25/fts.py index 21f57825d4ed..8f8130c2ff66 100644 --- a/synapse/storage/schema/main/delta/25/fts.py +++ b/synapse/storage/schema/main/delta/25/fts.py @@ -72,7 +72,3 @@ def run_create(cur, database_engine, *args, **kwargs): ) cur.execute(sql, ("event_search", progress_json)) - - -def run_upgrade(*args, **kwargs): - pass diff --git a/synapse/storage/schema/main/delta/27/ts.py b/synapse/storage/schema/main/delta/27/ts.py index 1c6058063fb6..7bf768ec57a1 100644 --- a/synapse/storage/schema/main/delta/27/ts.py +++ b/synapse/storage/schema/main/delta/27/ts.py @@ -51,7 +51,3 @@ def run_create(cur, database_engine, *args, **kwargs): ) cur.execute(sql, ("event_origin_server_ts", progress_json)) - - -def run_upgrade(*args, **kwargs): - pass diff --git a/synapse/storage/schema/main/delta/31/pushers_0.py b/synapse/storage/schema/main/delta/31/pushers_0.py index 5be81c806a28..22a459ee2157 100644 --- a/synapse/storage/schema/main/delta/31/pushers_0.py +++ b/synapse/storage/schema/main/delta/31/pushers_0.py @@ -80,7 +80,3 @@ def run_create(cur, database_engine, *args, **kwargs): cur.execute("DROP TABLE pushers") cur.execute("ALTER TABLE pushers2 RENAME TO pushers") logger.info("Moved %d pushers to new table", count) - - -def run_upgrade(cur, database_engine, *args, **kwargs): - pass diff --git a/synapse/storage/schema/main/delta/31/search_update.py b/synapse/storage/schema/main/delta/31/search_update.py index b84c844e3af4..50eb35593a4f 100644 --- a/synapse/storage/schema/main/delta/31/search_update.py +++ b/synapse/storage/schema/main/delta/31/search_update.py @@ -56,7 +56,3 @@ def run_create(cur, database_engine, *args, **kwargs): ) cur.execute(sql, ("event_search_order", progress_json)) - - -def run_upgrade(cur, database_engine, *args, **kwargs): - pass diff --git a/synapse/storage/schema/main/delta/33/event_fields.py b/synapse/storage/schema/main/delta/33/event_fields.py index e928c66a8f2d..918006415167 100644 --- a/synapse/storage/schema/main/delta/33/event_fields.py +++ b/synapse/storage/schema/main/delta/33/event_fields.py @@ -51,7 +51,3 @@ def run_create(cur, database_engine, *args, **kwargs): ) cur.execute(sql, ("event_fields_sender_url", progress_json)) - - -def run_upgrade(cur, database_engine, *args, **kwargs): - pass diff --git a/synapse/storage/schema/main/delta/34/cache_stream.py b/synapse/storage/schema/main/delta/34/cache_stream.py index cf09e43e2bf2..9156731bacb3 100644 --- a/synapse/storage/schema/main/delta/34/cache_stream.py +++ b/synapse/storage/schema/main/delta/34/cache_stream.py @@ -40,7 +40,3 @@ def run_create(cur, database_engine, *args, **kwargs): for statement in get_statements(CREATE_TABLE.splitlines()): cur.execute(statement) - - -def run_upgrade(cur, database_engine, *args, **kwargs): - pass diff --git a/synapse/storage/schema/main/delta/34/received_txn_purge.py b/synapse/storage/schema/main/delta/34/received_txn_purge.py index 67d505e68bf4..536021b97b42 100644 --- a/synapse/storage/schema/main/delta/34/received_txn_purge.py +++ b/synapse/storage/schema/main/delta/34/received_txn_purge.py @@ -26,7 +26,3 @@ def run_create(cur, database_engine, *args, **kwargs): cur.execute("DELETE FROM received_transactions") cur.execute("CREATE INDEX received_transactions_ts ON received_transactions(ts)") - - -def run_upgrade(cur, database_engine, *args, **kwargs): - pass diff --git a/synapse/storage/schema/main/delta/37/remove_auth_idx.py b/synapse/storage/schema/main/delta/37/remove_auth_idx.py index a3778841699c..29599bcd1c5e 100644 --- a/synapse/storage/schema/main/delta/37/remove_auth_idx.py +++ b/synapse/storage/schema/main/delta/37/remove_auth_idx.py @@ -79,7 +79,3 @@ def run_create(cur, database_engine, *args, **kwargs): for statement in get_statements(drop_constraint.splitlines()): cur.execute(statement) - - -def run_upgrade(cur, database_engine, *args, **kwargs): - pass diff --git a/synapse/storage/schema/main/delta/42/user_dir.py b/synapse/storage/schema/main/delta/42/user_dir.py index 506f326f4db4..8edfd76289b1 100644 --- a/synapse/storage/schema/main/delta/42/user_dir.py +++ b/synapse/storage/schema/main/delta/42/user_dir.py @@ -78,7 +78,3 @@ def run_create(cur, database_engine, *args, **kwargs): cur.execute(statement) else: raise Exception("Unrecognized database engine") - - -def run_upgrade(*args, **kwargs): - pass diff --git a/synapse/storage/schema/main/delta/48/group_unique_indexes.py b/synapse/storage/schema/main/delta/48/group_unique_indexes.py index 49f5f2c00324..81ebe853c8e2 100644 --- a/synapse/storage/schema/main/delta/48/group_unique_indexes.py +++ b/synapse/storage/schema/main/delta/48/group_unique_indexes.py @@ -57,7 +57,3 @@ def run_create(cur, database_engine, *args, **kwargs): for statement in get_statements(FIX_INDEXES.splitlines()): cur.execute(statement) - - -def run_upgrade(*args, **kwargs): - pass diff --git a/synapse/storage/schema/main/delta/56/unique_user_filter_index.py b/synapse/storage/schema/main/delta/56/unique_user_filter_index.py index bb7296852a61..125b52acd3c4 100644 --- a/synapse/storage/schema/main/delta/56/unique_user_filter_index.py +++ b/synapse/storage/schema/main/delta/56/unique_user_filter_index.py @@ -16,10 +16,6 @@ """ -def run_upgrade(cur, database_engine, *args, **kwargs): - pass - - def run_create(cur, database_engine, *args, **kwargs): if isinstance(database_engine, PostgresEngine): select_clause = """ diff --git a/synapse/storage/schema/main/delta/58/06dlols_unique_idx.py b/synapse/storage/schema/main/delta/58/06dlols_unique_idx.py index d353f2bcb361..175bc550e2cb 100644 --- a/synapse/storage/schema/main/delta/58/06dlols_unique_idx.py +++ b/synapse/storage/schema/main/delta/58/06dlols_unique_idx.py @@ -27,10 +27,6 @@ logger = logging.getLogger(__name__) -def run_upgrade(*args, **kwargs): - pass - - def run_create(cur: Cursor, database_engine: BaseDatabaseEngine, *args, **kwargs): # some instances might already have this index, in which case we can skip this if isinstance(database_engine, PostgresEngine): diff --git a/synapse/storage/schema/main/delta/58/11user_id_seq.py b/synapse/storage/schema/main/delta/58/11user_id_seq.py index 4310ec12ce1a..fbceaa43d08c 100644 --- a/synapse/storage/schema/main/delta/58/11user_id_seq.py +++ b/synapse/storage/schema/main/delta/58/11user_id_seq.py @@ -28,7 +28,3 @@ def run_create(cur, database_engine, *args, **kwargs): next_id = find_max_generated_user_id_localpart(cur) + 1 cur.execute("CREATE SEQUENCE user_id_seq START WITH %s", (next_id,)) - - -def run_upgrade(*args, **kwargs): - pass diff --git a/synapse/storage/schema/main/delta/59/01ignored_user.py b/synapse/storage/schema/main/delta/59/01ignored_user.py index 9e8f35c1d24b..f598b76fd2c9 100644 --- a/synapse/storage/schema/main/delta/59/01ignored_user.py +++ b/synapse/storage/schema/main/delta/59/01ignored_user.py @@ -27,10 +27,6 @@ logger = logging.getLogger(__name__) -def run_upgrade(cur: Cursor, database_engine: BaseDatabaseEngine, *args, **kwargs): - pass - - def run_create(cur: Cursor, database_engine: BaseDatabaseEngine, *args, **kwargs): logger.info("Creating ignored_users table") execute_statements_from_stream(cur, StringIO(_create_commands)) diff --git a/synapse/storage/schema/main/delta/61/03recreate_min_depth.py b/synapse/storage/schema/main/delta/61/03recreate_min_depth.py index f8d7db9f2ef3..d4a51bcaf044 100644 --- a/synapse/storage/schema/main/delta/61/03recreate_min_depth.py +++ b/synapse/storage/schema/main/delta/61/03recreate_min_depth.py @@ -64,7 +64,3 @@ def run_create(cur: Cursor, database_engine: BaseDatabaseEngine, *args, **kwargs (6103, 'replace_room_depth_min_depth', '{}', 'populate_room_depth2') """ ) - - -def run_upgrade(cur: Cursor, database_engine: BaseDatabaseEngine, *args, **kwargs): - pass diff --git a/synapse/storage/schema/state/delta/47/state_group_seq.py b/synapse/storage/schema/state/delta/47/state_group_seq.py index 9fd1ccf6f792..08e7e6ff5217 100644 --- a/synapse/storage/schema/state/delta/47/state_group_seq.py +++ b/synapse/storage/schema/state/delta/47/state_group_seq.py @@ -28,7 +28,3 @@ def run_create(cur, database_engine, *args, **kwargs): start_val = row[0] + 1 cur.execute("CREATE SEQUENCE state_group_id_seq START WITH %s", (start_val,)) - - -def run_upgrade(*args, **kwargs): - pass From 85e4bb30661c00ebccf9ec2faf7808ccf625f179 Mon Sep 17 00:00:00 2001 From: Patrick Cloke Date: Wed, 26 Apr 2023 15:49:33 -0400 Subject: [PATCH 07/11] Add missing function parameters. --- synapse/storage/schema/main/delta/20/pushers.py | 5 ++++- synapse/storage/schema/main/delta/25/fts.py | 5 +++-- synapse/storage/schema/main/delta/27/ts.py | 4 +++- synapse/storage/schema/main/delta/30/as_users.py | 9 +++++++-- synapse/storage/schema/main/delta/31/pushers_0.py | 7 +++++-- synapse/storage/schema/main/delta/31/search_update.py | 5 +++-- synapse/storage/schema/main/delta/33/event_fields.py | 4 +++- .../storage/schema/main/delta/33/remote_media_ts.py | 10 ++++++++-- synapse/storage/schema/main/delta/34/cache_stream.py | 5 +++-- .../schema/main/delta/34/received_txn_purge.py | 5 +++-- .../storage/schema/main/delta/37/remove_auth_idx.py | 5 +++-- synapse/storage/schema/main/delta/42/user_dir.py | 5 +++-- .../schema/main/delta/48/group_unique_indexes.py | 6 ++++-- .../main/delta/50/make_event_content_nullable.py | 9 +++------ .../schema/main/delta/56/unique_user_filter_index.py | 5 +++-- .../schema/main/delta/57/local_current_membership.py | 11 +++++++++-- .../schema/main/delta/58/06dlols_unique_idx.py | 2 +- synapse/storage/schema/main/delta/58/11user_id_seq.py | 5 +++-- .../storage/schema/main/delta/59/01ignored_user.py | 2 +- .../schema/main/delta/61/03recreate_min_depth.py | 2 +- .../main/delta/68/05partial_state_rooms_triggers.py | 2 +- synapse/storage/schema/main/delta/69/01as_txn_seq.py | 5 +++-- .../main/delta/72/03bg_populate_events_columns.py | 3 ++- .../07force_update_current_state_events_membership.py | 7 ++++++- ...embership_tables_event_stream_ordering_triggers.py | 2 +- .../storage/schema/state/delta/47/state_group_seq.py | 5 +++-- 26 files changed, 89 insertions(+), 46 deletions(-) diff --git a/synapse/storage/schema/main/delta/20/pushers.py b/synapse/storage/schema/main/delta/20/pushers.py index 702faf6e669c..ac73d0a5fca0 100644 --- a/synapse/storage/schema/main/delta/20/pushers.py +++ b/synapse/storage/schema/main/delta/20/pushers.py @@ -24,10 +24,13 @@ import logging +from synapse.storage.engines import BaseDatabaseEngine +from synapse.storage.types import Cursor + logger = logging.getLogger(__name__) -def run_create(cur, database_engine, *args, **kwargs): +def run_create(cur: Cursor, database_engine: BaseDatabaseEngine) -> None: logger.info("Porting pushers table...") cur.execute( """ diff --git a/synapse/storage/schema/main/delta/25/fts.py b/synapse/storage/schema/main/delta/25/fts.py index 8f8130c2ff66..abbff139a23c 100644 --- a/synapse/storage/schema/main/delta/25/fts.py +++ b/synapse/storage/schema/main/delta/25/fts.py @@ -14,8 +14,9 @@ import json import logging -from synapse.storage.engines import PostgresEngine, Sqlite3Engine +from synapse.storage.engines import BaseDatabaseEngine, PostgresEngine, Sqlite3Engine from synapse.storage.prepare_database import get_statements +from synapse.storage.types import Cursor logger = logging.getLogger(__name__) @@ -41,7 +42,7 @@ ) -def run_create(cur, database_engine, *args, **kwargs): +def run_create(cur: Cursor, database_engine: BaseDatabaseEngine) -> None: if isinstance(database_engine, PostgresEngine): for statement in get_statements(POSTGRES_TABLE.splitlines()): cur.execute(statement) diff --git a/synapse/storage/schema/main/delta/27/ts.py b/synapse/storage/schema/main/delta/27/ts.py index 7bf768ec57a1..1bddb373b2c8 100644 --- a/synapse/storage/schema/main/delta/27/ts.py +++ b/synapse/storage/schema/main/delta/27/ts.py @@ -14,7 +14,9 @@ import json import logging +from synapse.storage.engines import BaseDatabaseEngine from synapse.storage.prepare_database import get_statements +from synapse.storage.types import Cursor logger = logging.getLogger(__name__) @@ -25,7 +27,7 @@ ) -def run_create(cur, database_engine, *args, **kwargs): +def run_create(cur: Cursor, database_engine: BaseDatabaseEngine) -> None: for statement in get_statements(ALTER_TABLE.splitlines()): cur.execute(statement) diff --git a/synapse/storage/schema/main/delta/30/as_users.py b/synapse/storage/schema/main/delta/30/as_users.py index 4b4b166e37a6..f16f00320f16 100644 --- a/synapse/storage/schema/main/delta/30/as_users.py +++ b/synapse/storage/schema/main/delta/30/as_users.py @@ -14,11 +14,14 @@ import logging from synapse.config.appservice import load_appservices +from synapse.config.homeserver import HomeServerConfig +from synapse.storage.engines import BaseDatabaseEngine +from synapse.storage.types import Cursor logger = logging.getLogger(__name__) -def run_create(cur, database_engine, *args, **kwargs): +def run_create(cur: Cursor, database_engine: BaseDatabaseEngine) -> None: # NULL indicates user was not registered by an appservice. try: cur.execute("ALTER TABLE users ADD COLUMN appservice_id TEXT") @@ -27,7 +30,9 @@ def run_create(cur, database_engine, *args, **kwargs): pass -def run_upgrade(cur, database_engine, config, *args, **kwargs): +def run_upgrade( + cur: Cursor, database_engine: BaseDatabaseEngine, config: HomeServerConfig +) -> None: cur.execute("SELECT name FROM users") rows = cur.fetchall() diff --git a/synapse/storage/schema/main/delta/31/pushers_0.py b/synapse/storage/schema/main/delta/31/pushers_0.py index 22a459ee2157..def890952e8a 100644 --- a/synapse/storage/schema/main/delta/31/pushers_0.py +++ b/synapse/storage/schema/main/delta/31/pushers_0.py @@ -20,14 +20,17 @@ import logging +from synapse.storage.engines import BaseDatabaseEngine +from synapse.storage.types import Cursor + logger = logging.getLogger(__name__) -def token_to_stream_ordering(token): +def token_to_stream_ordering(token: str) -> int: return int(token[1:].split("_")[0]) -def run_create(cur, database_engine, *args, **kwargs): +def run_create(cur: Cursor, database_engine: BaseDatabaseEngine) -> None: logger.info("Porting pushers table, delta 31...") cur.execute( """ diff --git a/synapse/storage/schema/main/delta/31/search_update.py b/synapse/storage/schema/main/delta/31/search_update.py index 50eb35593a4f..58b12468663b 100644 --- a/synapse/storage/schema/main/delta/31/search_update.py +++ b/synapse/storage/schema/main/delta/31/search_update.py @@ -14,8 +14,9 @@ import json import logging -from synapse.storage.engines import PostgresEngine +from synapse.storage.engines import BaseDatabaseEngine, PostgresEngine from synapse.storage.prepare_database import get_statements +from synapse.storage.types import Cursor logger = logging.getLogger(__name__) @@ -26,7 +27,7 @@ """ -def run_create(cur, database_engine, *args, **kwargs): +def run_create(cur: Cursor, database_engine: BaseDatabaseEngine) -> None: if not isinstance(database_engine, PostgresEngine): return diff --git a/synapse/storage/schema/main/delta/33/event_fields.py b/synapse/storage/schema/main/delta/33/event_fields.py index 918006415167..c0989b18dd11 100644 --- a/synapse/storage/schema/main/delta/33/event_fields.py +++ b/synapse/storage/schema/main/delta/33/event_fields.py @@ -14,7 +14,9 @@ import json import logging +from synapse.storage.engines import BaseDatabaseEngine from synapse.storage.prepare_database import get_statements +from synapse.storage.types import Cursor logger = logging.getLogger(__name__) @@ -25,7 +27,7 @@ """ -def run_create(cur, database_engine, *args, **kwargs): +def run_create(cur: Cursor, database_engine: BaseDatabaseEngine) -> None: for statement in get_statements(ALTER_TABLE.splitlines()): cur.execute(statement) diff --git a/synapse/storage/schema/main/delta/33/remote_media_ts.py b/synapse/storage/schema/main/delta/33/remote_media_ts.py index 3907189e29fc..baf4af1206f3 100644 --- a/synapse/storage/schema/main/delta/33/remote_media_ts.py +++ b/synapse/storage/schema/main/delta/33/remote_media_ts.py @@ -14,14 +14,20 @@ import time +from synapse.config.homeserver import HomeServerConfig +from synapse.storage.engines import BaseDatabaseEngine +from synapse.storage.types import Cursor + ALTER_TABLE = "ALTER TABLE remote_media_cache ADD COLUMN last_access_ts BIGINT" -def run_create(cur, database_engine, *args, **kwargs): +def run_create(cur: Cursor, database_engine: BaseDatabaseEngine) -> None: cur.execute(ALTER_TABLE) -def run_upgrade(cur, database_engine, *args, **kwargs): +def run_upgrade( + cur: Cursor, database_engine: BaseDatabaseEngine, config: HomeServerConfig +) -> None: cur.execute( "UPDATE remote_media_cache SET last_access_ts = ?", (int(time.time() * 1000),), diff --git a/synapse/storage/schema/main/delta/34/cache_stream.py b/synapse/storage/schema/main/delta/34/cache_stream.py index 9156731bacb3..a2581910e3af 100644 --- a/synapse/storage/schema/main/delta/34/cache_stream.py +++ b/synapse/storage/schema/main/delta/34/cache_stream.py @@ -14,8 +14,9 @@ import logging -from synapse.storage.engines import PostgresEngine +from synapse.storage.engines import BaseDatabaseEngine, PostgresEngine from synapse.storage.prepare_database import get_statements +from synapse.storage.types import Cursor logger = logging.getLogger(__name__) @@ -34,7 +35,7 @@ """ -def run_create(cur, database_engine, *args, **kwargs): +def run_create(cur: Cursor, database_engine: BaseDatabaseEngine) -> None: if not isinstance(database_engine, PostgresEngine): return diff --git a/synapse/storage/schema/main/delta/34/received_txn_purge.py b/synapse/storage/schema/main/delta/34/received_txn_purge.py index 536021b97b42..f65456340c58 100644 --- a/synapse/storage/schema/main/delta/34/received_txn_purge.py +++ b/synapse/storage/schema/main/delta/34/received_txn_purge.py @@ -14,12 +14,13 @@ import logging -from synapse.storage.engines import PostgresEngine +from synapse.storage.engines import BaseDatabaseEngine, PostgresEngine +from synapse.storage.types import Cursor logger = logging.getLogger(__name__) -def run_create(cur, database_engine, *args, **kwargs): +def run_create(cur: Cursor, database_engine: BaseDatabaseEngine) -> None: if isinstance(database_engine, PostgresEngine): cur.execute("TRUNCATE received_transactions") else: diff --git a/synapse/storage/schema/main/delta/37/remove_auth_idx.py b/synapse/storage/schema/main/delta/37/remove_auth_idx.py index 29599bcd1c5e..67e76ad8d7d5 100644 --- a/synapse/storage/schema/main/delta/37/remove_auth_idx.py +++ b/synapse/storage/schema/main/delta/37/remove_auth_idx.py @@ -14,8 +14,9 @@ import logging -from synapse.storage.engines import PostgresEngine +from synapse.storage.engines import BaseDatabaseEngine, PostgresEngine from synapse.storage.prepare_database import get_statements +from synapse.storage.types import Cursor logger = logging.getLogger(__name__) @@ -68,7 +69,7 @@ """ -def run_create(cur, database_engine, *args, **kwargs): +def run_create(cur: Cursor, database_engine: BaseDatabaseEngine) -> None: for statement in get_statements(DROP_INDICES.splitlines()): cur.execute(statement) diff --git a/synapse/storage/schema/main/delta/42/user_dir.py b/synapse/storage/schema/main/delta/42/user_dir.py index 8edfd76289b1..3f9b8684cf03 100644 --- a/synapse/storage/schema/main/delta/42/user_dir.py +++ b/synapse/storage/schema/main/delta/42/user_dir.py @@ -14,8 +14,9 @@ import logging -from synapse.storage.engines import PostgresEngine, Sqlite3Engine +from synapse.storage.engines import BaseDatabaseEngine, PostgresEngine, Sqlite3Engine from synapse.storage.prepare_database import get_statements +from synapse.storage.types import Cursor logger = logging.getLogger(__name__) @@ -66,7 +67,7 @@ """ -def run_create(cur, database_engine, *args, **kwargs): +def run_create(cur: Cursor, database_engine: BaseDatabaseEngine) -> None: for statement in get_statements(BOTH_TABLES.splitlines()): cur.execute(statement) diff --git a/synapse/storage/schema/main/delta/48/group_unique_indexes.py b/synapse/storage/schema/main/delta/48/group_unique_indexes.py index 81ebe853c8e2..fdc891f1984a 100644 --- a/synapse/storage/schema/main/delta/48/group_unique_indexes.py +++ b/synapse/storage/schema/main/delta/48/group_unique_indexes.py @@ -12,8 +12,10 @@ # See the License for the specific language governing permissions and # limitations under the License. -from synapse.storage.engines import PostgresEngine + +from synapse.storage.engines import BaseDatabaseEngine, PostgresEngine from synapse.storage.prepare_database import get_statements +from synapse.storage.types import Cursor FIX_INDEXES = """ -- rebuild indexes as uniques @@ -34,7 +36,7 @@ """ -def run_create(cur, database_engine, *args, **kwargs): +def run_create(cur: Cursor, database_engine: BaseDatabaseEngine) -> None: rowid = "ctid" if isinstance(database_engine, PostgresEngine) else "rowid" # remove duplicates from group_users & group_invites tables diff --git a/synapse/storage/schema/main/delta/50/make_event_content_nullable.py b/synapse/storage/schema/main/delta/50/make_event_content_nullable.py index acd6ad1e1fca..c3955d13baa9 100644 --- a/synapse/storage/schema/main/delta/50/make_event_content_nullable.py +++ b/synapse/storage/schema/main/delta/50/make_event_content_nullable.py @@ -53,16 +53,13 @@ import logging -from synapse.storage.engines import PostgresEngine +from synapse.storage.engines import BaseDatabaseEngine, PostgresEngine +from synapse.storage.types import Cursor logger = logging.getLogger(__name__) -def run_create(cur, database_engine, *args, **kwargs): - pass - - -def run_upgrade(cur, database_engine, *args, **kwargs): +def run_create(cur: Cursor, database_engine: BaseDatabaseEngine) -> None: if isinstance(database_engine, PostgresEngine): cur.execute( """ diff --git a/synapse/storage/schema/main/delta/56/unique_user_filter_index.py b/synapse/storage/schema/main/delta/56/unique_user_filter_index.py index 125b52acd3c4..6f54468f78b1 100644 --- a/synapse/storage/schema/main/delta/56/unique_user_filter_index.py +++ b/synapse/storage/schema/main/delta/56/unique_user_filter_index.py @@ -1,8 +1,9 @@ import logging from io import StringIO -from synapse.storage.engines import PostgresEngine +from synapse.storage.engines import BaseDatabaseEngine, PostgresEngine from synapse.storage.prepare_database import execute_statements_from_stream +from synapse.storage.types import Cursor logger = logging.getLogger(__name__) @@ -16,7 +17,7 @@ """ -def run_create(cur, database_engine, *args, **kwargs): +def run_create(cur: Cursor, database_engine: BaseDatabaseEngine) -> None: if isinstance(database_engine, PostgresEngine): select_clause = """ SELECT DISTINCT ON (user_id, filter_id) user_id, filter_id, filter_json diff --git a/synapse/storage/schema/main/delta/57/local_current_membership.py b/synapse/storage/schema/main/delta/57/local_current_membership.py index d25093c19fde..9acfac7e0104 100644 --- a/synapse/storage/schema/main/delta/57/local_current_membership.py +++ b/synapse/storage/schema/main/delta/57/local_current_membership.py @@ -27,7 +27,14 @@ # equivalent behaviour as if the server had remained in the room). -def run_upgrade(cur, database_engine, config, *args, **kwargs): +from synapse.config.homeserver import HomeServerConfig +from synapse.storage.engines import BaseDatabaseEngine +from synapse.storage.types import Cursor + + +def run_upgrade( + cur: Cursor, database_engine: BaseDatabaseEngine, config: HomeServerConfig +) -> None: # We need to do the insert in `run_upgrade` section as we don't have access # to `config` in `run_create`. @@ -77,7 +84,7 @@ def run_upgrade(cur, database_engine, config, *args, **kwargs): ) -def run_create(cur, database_engine, *args, **kwargs): +def run_create(cur: Cursor, database_engine: BaseDatabaseEngine) -> None: cur.execute( """ CREATE TABLE local_current_membership ( diff --git a/synapse/storage/schema/main/delta/58/06dlols_unique_idx.py b/synapse/storage/schema/main/delta/58/06dlols_unique_idx.py index 175bc550e2cb..c4b3f2286986 100644 --- a/synapse/storage/schema/main/delta/58/06dlols_unique_idx.py +++ b/synapse/storage/schema/main/delta/58/06dlols_unique_idx.py @@ -27,7 +27,7 @@ logger = logging.getLogger(__name__) -def run_create(cur: Cursor, database_engine: BaseDatabaseEngine, *args, **kwargs): +def run_create(cur: Cursor, database_engine: BaseDatabaseEngine) -> None: # some instances might already have this index, in which case we can skip this if isinstance(database_engine, PostgresEngine): cur.execute( diff --git a/synapse/storage/schema/main/delta/58/11user_id_seq.py b/synapse/storage/schema/main/delta/58/11user_id_seq.py index fbceaa43d08c..567d44ed6a8b 100644 --- a/synapse/storage/schema/main/delta/58/11user_id_seq.py +++ b/synapse/storage/schema/main/delta/58/11user_id_seq.py @@ -19,10 +19,11 @@ from synapse.storage.databases.main.registration import ( find_max_generated_user_id_localpart, ) -from synapse.storage.engines import PostgresEngine +from synapse.storage.engines import BaseDatabaseEngine, PostgresEngine +from synapse.storage.types import Cursor -def run_create(cur, database_engine, *args, **kwargs): +def run_create(cur: Cursor, database_engine: BaseDatabaseEngine) -> None: if not isinstance(database_engine, PostgresEngine): return diff --git a/synapse/storage/schema/main/delta/59/01ignored_user.py b/synapse/storage/schema/main/delta/59/01ignored_user.py index f598b76fd2c9..ff2b531ea23a 100644 --- a/synapse/storage/schema/main/delta/59/01ignored_user.py +++ b/synapse/storage/schema/main/delta/59/01ignored_user.py @@ -27,7 +27,7 @@ logger = logging.getLogger(__name__) -def run_create(cur: Cursor, database_engine: BaseDatabaseEngine, *args, **kwargs): +def run_create(cur: Cursor, database_engine: BaseDatabaseEngine) -> None: logger.info("Creating ignored_users table") execute_statements_from_stream(cur, StringIO(_create_commands)) diff --git a/synapse/storage/schema/main/delta/61/03recreate_min_depth.py b/synapse/storage/schema/main/delta/61/03recreate_min_depth.py index d4a51bcaf044..b26a51659d4e 100644 --- a/synapse/storage/schema/main/delta/61/03recreate_min_depth.py +++ b/synapse/storage/schema/main/delta/61/03recreate_min_depth.py @@ -20,7 +20,7 @@ from synapse.storage.types import Cursor -def run_create(cur: Cursor, database_engine: BaseDatabaseEngine, *args, **kwargs): +def run_create(cur: Cursor, database_engine: BaseDatabaseEngine) -> None: if not isinstance(database_engine, PostgresEngine): # this only applies to postgres - sqlite does not distinguish between big and # little ints. diff --git a/synapse/storage/schema/main/delta/68/05partial_state_rooms_triggers.py b/synapse/storage/schema/main/delta/68/05partial_state_rooms_triggers.py index a2ec4fc26edb..5f58f84e57e2 100644 --- a/synapse/storage/schema/main/delta/68/05partial_state_rooms_triggers.py +++ b/synapse/storage/schema/main/delta/68/05partial_state_rooms_triggers.py @@ -22,7 +22,7 @@ from synapse.storage.types import Cursor -def run_create(cur: Cursor, database_engine: BaseDatabaseEngine, *args, **kwargs): +def run_create(cur: Cursor, database_engine: BaseDatabaseEngine) -> None: # complain if the room_id in partial_state_events doesn't match # that in `events`. We already have a fk constraint which ensures that the event # exists in `events`, so all we have to do is raise if there is a row with a diff --git a/synapse/storage/schema/main/delta/69/01as_txn_seq.py b/synapse/storage/schema/main/delta/69/01as_txn_seq.py index 24bd4b391eee..c48dfa1c3038 100644 --- a/synapse/storage/schema/main/delta/69/01as_txn_seq.py +++ b/synapse/storage/schema/main/delta/69/01as_txn_seq.py @@ -17,10 +17,11 @@ Adds a postgres SEQUENCE for generating application service transaction IDs. """ -from synapse.storage.engines import PostgresEngine +from synapse.storage.engines import BaseDatabaseEngine, PostgresEngine +from synapse.storage.types import Cursor -def run_create(cur, database_engine, *args, **kwargs): +def run_create(cur: Cursor, database_engine: BaseDatabaseEngine) -> None: if isinstance(database_engine, PostgresEngine): # If we already have some AS TXNs we want to start from the current # maximum value. There are two potential places this is stored - the diff --git a/synapse/storage/schema/main/delta/72/03bg_populate_events_columns.py b/synapse/storage/schema/main/delta/72/03bg_populate_events_columns.py index 55a5d092cc67..492b59e9b3ac 100644 --- a/synapse/storage/schema/main/delta/72/03bg_populate_events_columns.py +++ b/synapse/storage/schema/main/delta/72/03bg_populate_events_columns.py @@ -14,10 +14,11 @@ import json +from synapse.storage.engines import BaseDatabaseEngine from synapse.storage.types import Cursor -def run_create(cur: Cursor, database_engine, *args, **kwargs): +def run_create(cur: Cursor, database_engine: BaseDatabaseEngine) -> None: """Add a bg update to populate the `state_key` and `rejection_reason` columns of `events`""" # we know that any new events will have the columns populated (and that has been diff --git a/synapse/storage/schema/main/delta/72/07force_update_current_state_events_membership.py b/synapse/storage/schema/main/delta/72/07force_update_current_state_events_membership.py index b5853d125c6a..003e9f39c60b 100644 --- a/synapse/storage/schema/main/delta/72/07force_update_current_state_events_membership.py +++ b/synapse/storage/schema/main/delta/72/07force_update_current_state_events_membership.py @@ -19,9 +19,14 @@ Note the background job must still remain defined in the database class. """ +from synapse.config.homeserver import HomeServerConfig +from synapse.storage.engines import BaseDatabaseEngine +from synapse.storage.types import Cursor -def run_upgrade(cur, database_engine, *args, **kwargs): +def run_upgrade( + cur: Cursor, database_engine: BaseDatabaseEngine, config: HomeServerConfig +) -> None: cur.execute("SELECT update_name FROM background_updates") rows = cur.fetchall() for row in rows: diff --git a/synapse/storage/schema/main/delta/74/04_membership_tables_event_stream_ordering_triggers.py b/synapse/storage/schema/main/delta/74/04_membership_tables_event_stream_ordering_triggers.py index e32e9083b359..763ea7460701 100644 --- a/synapse/storage/schema/main/delta/74/04_membership_tables_event_stream_ordering_triggers.py +++ b/synapse/storage/schema/main/delta/74/04_membership_tables_event_stream_ordering_triggers.py @@ -21,7 +21,7 @@ from synapse.storage.types import Cursor -def run_create(cur: Cursor, database_engine: BaseDatabaseEngine, *args, **kwargs): +def run_create(cur: Cursor, database_engine: BaseDatabaseEngine) -> None: # Complain if the `event_stream_ordering` in membership tables doesn't match # the `stream_ordering` row with the same `event_id` in `events`. if isinstance(database_engine, Sqlite3Engine): diff --git a/synapse/storage/schema/state/delta/47/state_group_seq.py b/synapse/storage/schema/state/delta/47/state_group_seq.py index 08e7e6ff5217..f7416c09a2bf 100644 --- a/synapse/storage/schema/state/delta/47/state_group_seq.py +++ b/synapse/storage/schema/state/delta/47/state_group_seq.py @@ -12,10 +12,11 @@ # See the License for the specific language governing permissions and # limitations under the License. -from synapse.storage.engines import PostgresEngine +from synapse.storage.engines import BaseDatabaseEngine, PostgresEngine +from synapse.storage.types import Cursor -def run_create(cur, database_engine, *args, **kwargs): +def run_create(cur: Cursor, database_engine: BaseDatabaseEngine) -> None: if isinstance(database_engine, PostgresEngine): # if we already have some state groups, we want to start making new # ones with a higher id. From 91f74c205a37437767e34f3e83379907d9e9179c Mon Sep 17 00:00:00 2001 From: Patrick Cloke Date: Wed, 26 Apr 2023 15:55:43 -0400 Subject: [PATCH 08/11] Fix-up type hints. --- synapse/storage/schema/main/delta/20/pushers.py | 4 ++-- synapse/storage/schema/main/delta/30/as_users.py | 5 +++-- synapse/storage/schema/main/delta/31/pushers_0.py | 4 ++-- .../schema/main/delta/50/make_event_content_nullable.py | 8 ++++++-- synapse/storage/schema/main/delta/69/01as_txn_seq.py | 2 ++ .../schema/main/delta/72/03bg_populate_events_columns.py | 4 +++- .../main/delta/73/10_update_sqlite_fts4_tokenizer.py | 2 ++ synapse/storage/schema/state/delta/47/state_group_seq.py | 1 + 8 files changed, 21 insertions(+), 9 deletions(-) diff --git a/synapse/storage/schema/main/delta/20/pushers.py b/synapse/storage/schema/main/delta/20/pushers.py index ac73d0a5fca0..9180af71ee9c 100644 --- a/synapse/storage/schema/main/delta/20/pushers.py +++ b/synapse/storage/schema/main/delta/20/pushers.py @@ -64,8 +64,8 @@ def run_create(cur: Cursor, database_engine: BaseDatabaseEngine) -> None: """ ) count = 0 - for row in cur.fetchall(): - row = list(row) + for tuple_row in cur.fetchall(): + row = list(tuple_row) row[8] = bytes(row[8]).decode("utf-8") row[11] = bytes(row[11]).decode("utf-8") cur.execute( diff --git a/synapse/storage/schema/main/delta/30/as_users.py b/synapse/storage/schema/main/delta/30/as_users.py index f16f00320f16..c8d2ff9e1df9 100644 --- a/synapse/storage/schema/main/delta/30/as_users.py +++ b/synapse/storage/schema/main/delta/30/as_users.py @@ -12,6 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. import logging +from typing import Dict, Iterable, List, Tuple, cast from synapse.config.appservice import load_appservices from synapse.config.homeserver import HomeServerConfig @@ -34,7 +35,7 @@ def run_upgrade( cur: Cursor, database_engine: BaseDatabaseEngine, config: HomeServerConfig ) -> None: cur.execute("SELECT name FROM users") - rows = cur.fetchall() + rows = cast(Iterable[Tuple[str]], cur.fetchall()) config_files = [] try: @@ -44,7 +45,7 @@ def run_upgrade( appservices = load_appservices(config.server.server_name, config_files) - owned = {} + owned: Dict[str, List[str]] = {} for row in rows: user_id = row[0] diff --git a/synapse/storage/schema/main/delta/31/pushers_0.py b/synapse/storage/schema/main/delta/31/pushers_0.py index def890952e8a..6ebc3d805211 100644 --- a/synapse/storage/schema/main/delta/31/pushers_0.py +++ b/synapse/storage/schema/main/delta/31/pushers_0.py @@ -64,8 +64,8 @@ def run_create(cur: Cursor, database_engine: BaseDatabaseEngine) -> None: """ ) count = 0 - for row in cur.fetchall(): - row = list(row) + for tuple_row in cur.fetchall(): + row = list(tuple_row) row[12] = token_to_stream_ordering(row[12]) cur.execute( """ diff --git a/synapse/storage/schema/main/delta/50/make_event_content_nullable.py b/synapse/storage/schema/main/delta/50/make_event_content_nullable.py index c3955d13baa9..6148725dc8f4 100644 --- a/synapse/storage/schema/main/delta/50/make_event_content_nullable.py +++ b/synapse/storage/schema/main/delta/50/make_event_content_nullable.py @@ -73,7 +73,9 @@ def run_create(cur: Cursor, database_engine: BaseDatabaseEngine) -> None: cur.execute( "SELECT sql FROM sqlite_master WHERE tbl_name='events' AND type='table'" ) - (oldsql,) = cur.fetchone() + row = cur.fetchone() + assert row is not None + (oldsql,) = row sql = oldsql.replace("content TEXT NOT NULL", "content TEXT") if sql == oldsql: @@ -82,7 +84,9 @@ def run_create(cur: Cursor, database_engine: BaseDatabaseEngine) -> None: logger.info("Replacing definition of 'events' with: %s", sql) cur.execute("PRAGMA schema_version") - (oldver,) = cur.fetchone() + row = cur.fetchone() + assert row is not None + (oldver,) = row cur.execute("PRAGMA writable_schema=ON") cur.execute( "UPDATE sqlite_master SET sql=? WHERE tbl_name='events' AND type='table'", diff --git a/synapse/storage/schema/main/delta/69/01as_txn_seq.py b/synapse/storage/schema/main/delta/69/01as_txn_seq.py index c48dfa1c3038..94b577954ec9 100644 --- a/synapse/storage/schema/main/delta/69/01as_txn_seq.py +++ b/synapse/storage/schema/main/delta/69/01as_txn_seq.py @@ -31,10 +31,12 @@ def run_create(cur: Cursor, database_engine: BaseDatabaseEngine) -> None: cur.execute("SELECT COALESCE(max(txn_id), 0) FROM application_services_txns") row = cur.fetchone() + assert row is not None txn_max = row[0] cur.execute("SELECT COALESCE(max(last_txn), 0) FROM application_services_state") row = cur.fetchone() + assert row is not None last_txn_max = row[0] start_val = max(last_txn_max, txn_max) + 1 diff --git a/synapse/storage/schema/main/delta/72/03bg_populate_events_columns.py b/synapse/storage/schema/main/delta/72/03bg_populate_events_columns.py index 492b59e9b3ac..16e8b8f65682 100644 --- a/synapse/storage/schema/main/delta/72/03bg_populate_events_columns.py +++ b/synapse/storage/schema/main/delta/72/03bg_populate_events_columns.py @@ -28,7 +28,9 @@ def run_create(cur: Cursor, database_engine: BaseDatabaseEngine) -> None: # current min and max stream orderings, since that is guaranteed to include all # the events that were stored before the new columns were added. cur.execute("SELECT MIN(stream_ordering), MAX(stream_ordering) FROM events") - (min_stream_ordering, max_stream_ordering) = cur.fetchone() + row = cur.fetchone() + assert row is not None + (min_stream_ordering, max_stream_ordering) = row if min_stream_ordering is None: # no rows, nothing to do. diff --git a/synapse/storage/schema/main/delta/73/10_update_sqlite_fts4_tokenizer.py b/synapse/storage/schema/main/delta/73/10_update_sqlite_fts4_tokenizer.py index 3de0a709eba7..0d914bc4358d 100644 --- a/synapse/storage/schema/main/delta/73/10_update_sqlite_fts4_tokenizer.py +++ b/synapse/storage/schema/main/delta/73/10_update_sqlite_fts4_tokenizer.py @@ -38,6 +38,7 @@ def run_create(cur: Cursor, database_engine: BaseDatabaseEngine) -> None: # Re-run the background job to re-populate the event_search table. cur.execute("SELECT MIN(stream_ordering) FROM events") row = cur.fetchone() + assert row is not None min_stream_id = row[0] # If there are not any events, nothing to do. @@ -46,6 +47,7 @@ def run_create(cur: Cursor, database_engine: BaseDatabaseEngine) -> None: cur.execute("SELECT MAX(stream_ordering) FROM events") row = cur.fetchone() + assert row is not None max_stream_id = row[0] progress = { diff --git a/synapse/storage/schema/state/delta/47/state_group_seq.py b/synapse/storage/schema/state/delta/47/state_group_seq.py index f7416c09a2bf..df93bb94099d 100644 --- a/synapse/storage/schema/state/delta/47/state_group_seq.py +++ b/synapse/storage/schema/state/delta/47/state_group_seq.py @@ -22,6 +22,7 @@ def run_create(cur: Cursor, database_engine: BaseDatabaseEngine) -> None: # ones with a higher id. cur.execute("SELECT max(id) FROM state_groups") row = cur.fetchone() + assert row is not None if row[0] is None: start_val = 1 From 7978dc9c493424e2d3d60e7c13c1481103b3063b Mon Sep 17 00:00:00 2001 From: Patrick Cloke Date: Wed, 26 Apr 2023 15:59:53 -0400 Subject: [PATCH 09/11] Use LoggingTransaction instead of Cursor since we use execute_batch. --- synapse/storage/prepare_database.py | 8 +++++--- synapse/storage/schema/main/delta/20/pushers.py | 4 ++-- synapse/storage/schema/main/delta/25/fts.py | 4 ++-- synapse/storage/schema/main/delta/27/ts.py | 4 ++-- synapse/storage/schema/main/delta/30/as_users.py | 8 +++++--- synapse/storage/schema/main/delta/31/pushers_0.py | 4 ++-- synapse/storage/schema/main/delta/31/search_update.py | 4 ++-- synapse/storage/schema/main/delta/33/event_fields.py | 4 ++-- synapse/storage/schema/main/delta/33/remote_media_ts.py | 8 +++++--- synapse/storage/schema/main/delta/34/cache_stream.py | 4 ++-- .../storage/schema/main/delta/34/received_txn_purge.py | 4 ++-- synapse/storage/schema/main/delta/37/remove_auth_idx.py | 4 ++-- synapse/storage/schema/main/delta/42/user_dir.py | 4 ++-- .../storage/schema/main/delta/48/group_unique_indexes.py | 4 ++-- .../schema/main/delta/50/make_event_content_nullable.py | 4 ++-- .../schema/main/delta/56/unique_user_filter_index.py | 4 ++-- .../schema/main/delta/57/local_current_membership.py | 8 +++++--- .../storage/schema/main/delta/58/06dlols_unique_idx.py | 4 ++-- synapse/storage/schema/main/delta/58/11user_id_seq.py | 4 ++-- synapse/storage/schema/main/delta/59/01ignored_user.py | 4 ++-- .../storage/schema/main/delta/61/03recreate_min_depth.py | 4 ++-- .../main/delta/68/05partial_state_rooms_triggers.py | 4 ++-- synapse/storage/schema/main/delta/69/01as_txn_seq.py | 4 ++-- .../schema/main/delta/72/03bg_populate_events_columns.py | 4 ++-- .../72/07force_update_current_state_events_membership.py | 6 ++++-- .../main/delta/73/10_update_sqlite_fts4_tokenizer.py | 4 ++-- ...04_membership_tables_event_stream_ordering_triggers.py | 4 ++-- synapse/storage/schema/state/delta/47/state_group_seq.py | 4 ++-- 28 files changed, 70 insertions(+), 60 deletions(-) diff --git a/synapse/storage/prepare_database.py b/synapse/storage/prepare_database.py index 2a1c6fa31bc2..38b7abd8010e 100644 --- a/synapse/storage/prepare_database.py +++ b/synapse/storage/prepare_database.py @@ -22,7 +22,7 @@ from typing_extensions import Counter as CounterType from synapse.config.homeserver import HomeServerConfig -from synapse.storage.database import LoggingDatabaseConnection +from synapse.storage.database import LoggingDatabaseConnection, LoggingTransaction from synapse.storage.engines import BaseDatabaseEngine, PostgresEngine, Sqlite3Engine from synapse.storage.schema import SCHEMA_COMPAT_VERSION, SCHEMA_VERSION from synapse.storage.types import Cursor @@ -168,7 +168,9 @@ def prepare_database( def _setup_new_database( - cur: Cursor, database_engine: BaseDatabaseEngine, databases: Collection[str] + cur: LoggingTransaction, + database_engine: BaseDatabaseEngine, + databases: Collection[str], ) -> None: """Sets up the physical database by finding a base set of "full schemas" and then applying any necessary deltas, including schemas from the given data @@ -289,7 +291,7 @@ def _setup_new_database( def _upgrade_existing_database( - cur: Cursor, + cur: LoggingTransaction, current_schema_state: _SchemaState, database_engine: BaseDatabaseEngine, config: Optional[HomeServerConfig], diff --git a/synapse/storage/schema/main/delta/20/pushers.py b/synapse/storage/schema/main/delta/20/pushers.py index 9180af71ee9c..08ae0efc2112 100644 --- a/synapse/storage/schema/main/delta/20/pushers.py +++ b/synapse/storage/schema/main/delta/20/pushers.py @@ -24,13 +24,13 @@ import logging +from synapse.storage.database import LoggingTransaction from synapse.storage.engines import BaseDatabaseEngine -from synapse.storage.types import Cursor logger = logging.getLogger(__name__) -def run_create(cur: Cursor, database_engine: BaseDatabaseEngine) -> None: +def run_create(cur: LoggingTransaction, database_engine: BaseDatabaseEngine) -> None: logger.info("Porting pushers table...") cur.execute( """ diff --git a/synapse/storage/schema/main/delta/25/fts.py b/synapse/storage/schema/main/delta/25/fts.py index abbff139a23c..831f8e914d76 100644 --- a/synapse/storage/schema/main/delta/25/fts.py +++ b/synapse/storage/schema/main/delta/25/fts.py @@ -14,9 +14,9 @@ import json import logging +from synapse.storage.database import LoggingTransaction from synapse.storage.engines import BaseDatabaseEngine, PostgresEngine, Sqlite3Engine from synapse.storage.prepare_database import get_statements -from synapse.storage.types import Cursor logger = logging.getLogger(__name__) @@ -42,7 +42,7 @@ ) -def run_create(cur: Cursor, database_engine: BaseDatabaseEngine) -> None: +def run_create(cur: LoggingTransaction, database_engine: BaseDatabaseEngine) -> None: if isinstance(database_engine, PostgresEngine): for statement in get_statements(POSTGRES_TABLE.splitlines()): cur.execute(statement) diff --git a/synapse/storage/schema/main/delta/27/ts.py b/synapse/storage/schema/main/delta/27/ts.py index 1bddb373b2c8..8962afdedae0 100644 --- a/synapse/storage/schema/main/delta/27/ts.py +++ b/synapse/storage/schema/main/delta/27/ts.py @@ -14,9 +14,9 @@ import json import logging +from synapse.storage.database import LoggingTransaction from synapse.storage.engines import BaseDatabaseEngine from synapse.storage.prepare_database import get_statements -from synapse.storage.types import Cursor logger = logging.getLogger(__name__) @@ -27,7 +27,7 @@ ) -def run_create(cur: Cursor, database_engine: BaseDatabaseEngine) -> None: +def run_create(cur: LoggingTransaction, database_engine: BaseDatabaseEngine) -> None: for statement in get_statements(ALTER_TABLE.splitlines()): cur.execute(statement) diff --git a/synapse/storage/schema/main/delta/30/as_users.py b/synapse/storage/schema/main/delta/30/as_users.py index c8d2ff9e1df9..b9d8df12313c 100644 --- a/synapse/storage/schema/main/delta/30/as_users.py +++ b/synapse/storage/schema/main/delta/30/as_users.py @@ -16,13 +16,13 @@ from synapse.config.appservice import load_appservices from synapse.config.homeserver import HomeServerConfig +from synapse.storage.database import LoggingTransaction from synapse.storage.engines import BaseDatabaseEngine -from synapse.storage.types import Cursor logger = logging.getLogger(__name__) -def run_create(cur: Cursor, database_engine: BaseDatabaseEngine) -> None: +def run_create(cur: LoggingTransaction, database_engine: BaseDatabaseEngine) -> None: # NULL indicates user was not registered by an appservice. try: cur.execute("ALTER TABLE users ADD COLUMN appservice_id TEXT") @@ -32,7 +32,9 @@ def run_create(cur: Cursor, database_engine: BaseDatabaseEngine) -> None: def run_upgrade( - cur: Cursor, database_engine: BaseDatabaseEngine, config: HomeServerConfig + cur: LoggingTransaction, + database_engine: BaseDatabaseEngine, + config: HomeServerConfig, ) -> None: cur.execute("SELECT name FROM users") rows = cast(Iterable[Tuple[str]], cur.fetchall()) diff --git a/synapse/storage/schema/main/delta/31/pushers_0.py b/synapse/storage/schema/main/delta/31/pushers_0.py index 6ebc3d805211..e772e2dc65a0 100644 --- a/synapse/storage/schema/main/delta/31/pushers_0.py +++ b/synapse/storage/schema/main/delta/31/pushers_0.py @@ -20,8 +20,8 @@ import logging +from synapse.storage.database import LoggingTransaction from synapse.storage.engines import BaseDatabaseEngine -from synapse.storage.types import Cursor logger = logging.getLogger(__name__) @@ -30,7 +30,7 @@ def token_to_stream_ordering(token: str) -> int: return int(token[1:].split("_")[0]) -def run_create(cur: Cursor, database_engine: BaseDatabaseEngine) -> None: +def run_create(cur: LoggingTransaction, database_engine: BaseDatabaseEngine) -> None: logger.info("Porting pushers table, delta 31...") cur.execute( """ diff --git a/synapse/storage/schema/main/delta/31/search_update.py b/synapse/storage/schema/main/delta/31/search_update.py index 58b12468663b..e20e92e454c6 100644 --- a/synapse/storage/schema/main/delta/31/search_update.py +++ b/synapse/storage/schema/main/delta/31/search_update.py @@ -14,9 +14,9 @@ import json import logging +from synapse.storage.database import LoggingTransaction from synapse.storage.engines import BaseDatabaseEngine, PostgresEngine from synapse.storage.prepare_database import get_statements -from synapse.storage.types import Cursor logger = logging.getLogger(__name__) @@ -27,7 +27,7 @@ """ -def run_create(cur: Cursor, database_engine: BaseDatabaseEngine) -> None: +def run_create(cur: LoggingTransaction, database_engine: BaseDatabaseEngine) -> None: if not isinstance(database_engine, PostgresEngine): return diff --git a/synapse/storage/schema/main/delta/33/event_fields.py b/synapse/storage/schema/main/delta/33/event_fields.py index c0989b18dd11..8d806f5b525c 100644 --- a/synapse/storage/schema/main/delta/33/event_fields.py +++ b/synapse/storage/schema/main/delta/33/event_fields.py @@ -14,9 +14,9 @@ import json import logging +from synapse.storage.database import LoggingTransaction from synapse.storage.engines import BaseDatabaseEngine from synapse.storage.prepare_database import get_statements -from synapse.storage.types import Cursor logger = logging.getLogger(__name__) @@ -27,7 +27,7 @@ """ -def run_create(cur: Cursor, database_engine: BaseDatabaseEngine) -> None: +def run_create(cur: LoggingTransaction, database_engine: BaseDatabaseEngine) -> None: for statement in get_statements(ALTER_TABLE.splitlines()): cur.execute(statement) diff --git a/synapse/storage/schema/main/delta/33/remote_media_ts.py b/synapse/storage/schema/main/delta/33/remote_media_ts.py index baf4af1206f3..35499e43b526 100644 --- a/synapse/storage/schema/main/delta/33/remote_media_ts.py +++ b/synapse/storage/schema/main/delta/33/remote_media_ts.py @@ -15,18 +15,20 @@ import time from synapse.config.homeserver import HomeServerConfig +from synapse.storage.database import LoggingTransaction from synapse.storage.engines import BaseDatabaseEngine -from synapse.storage.types import Cursor ALTER_TABLE = "ALTER TABLE remote_media_cache ADD COLUMN last_access_ts BIGINT" -def run_create(cur: Cursor, database_engine: BaseDatabaseEngine) -> None: +def run_create(cur: LoggingTransaction, database_engine: BaseDatabaseEngine) -> None: cur.execute(ALTER_TABLE) def run_upgrade( - cur: Cursor, database_engine: BaseDatabaseEngine, config: HomeServerConfig + cur: LoggingTransaction, + database_engine: BaseDatabaseEngine, + config: HomeServerConfig, ) -> None: cur.execute( "UPDATE remote_media_cache SET last_access_ts = ?", diff --git a/synapse/storage/schema/main/delta/34/cache_stream.py b/synapse/storage/schema/main/delta/34/cache_stream.py index a2581910e3af..682c86da1abd 100644 --- a/synapse/storage/schema/main/delta/34/cache_stream.py +++ b/synapse/storage/schema/main/delta/34/cache_stream.py @@ -14,9 +14,9 @@ import logging +from synapse.storage.database import LoggingTransaction from synapse.storage.engines import BaseDatabaseEngine, PostgresEngine from synapse.storage.prepare_database import get_statements -from synapse.storage.types import Cursor logger = logging.getLogger(__name__) @@ -35,7 +35,7 @@ """ -def run_create(cur: Cursor, database_engine: BaseDatabaseEngine) -> None: +def run_create(cur: LoggingTransaction, database_engine: BaseDatabaseEngine) -> None: if not isinstance(database_engine, PostgresEngine): return diff --git a/synapse/storage/schema/main/delta/34/received_txn_purge.py b/synapse/storage/schema/main/delta/34/received_txn_purge.py index f65456340c58..dcfe3bc45a97 100644 --- a/synapse/storage/schema/main/delta/34/received_txn_purge.py +++ b/synapse/storage/schema/main/delta/34/received_txn_purge.py @@ -14,13 +14,13 @@ import logging +from synapse.storage.database import LoggingTransaction from synapse.storage.engines import BaseDatabaseEngine, PostgresEngine -from synapse.storage.types import Cursor logger = logging.getLogger(__name__) -def run_create(cur: Cursor, database_engine: BaseDatabaseEngine) -> None: +def run_create(cur: LoggingTransaction, database_engine: BaseDatabaseEngine) -> None: if isinstance(database_engine, PostgresEngine): cur.execute("TRUNCATE received_transactions") else: diff --git a/synapse/storage/schema/main/delta/37/remove_auth_idx.py b/synapse/storage/schema/main/delta/37/remove_auth_idx.py index 67e76ad8d7d5..d672f9b43cdf 100644 --- a/synapse/storage/schema/main/delta/37/remove_auth_idx.py +++ b/synapse/storage/schema/main/delta/37/remove_auth_idx.py @@ -14,9 +14,9 @@ import logging +from synapse.storage.database import LoggingTransaction from synapse.storage.engines import BaseDatabaseEngine, PostgresEngine from synapse.storage.prepare_database import get_statements -from synapse.storage.types import Cursor logger = logging.getLogger(__name__) @@ -69,7 +69,7 @@ """ -def run_create(cur: Cursor, database_engine: BaseDatabaseEngine) -> None: +def run_create(cur: LoggingTransaction, database_engine: BaseDatabaseEngine) -> None: for statement in get_statements(DROP_INDICES.splitlines()): cur.execute(statement) diff --git a/synapse/storage/schema/main/delta/42/user_dir.py b/synapse/storage/schema/main/delta/42/user_dir.py index 3f9b8684cf03..7e5c307c628f 100644 --- a/synapse/storage/schema/main/delta/42/user_dir.py +++ b/synapse/storage/schema/main/delta/42/user_dir.py @@ -14,9 +14,9 @@ import logging +from synapse.storage.database import LoggingTransaction from synapse.storage.engines import BaseDatabaseEngine, PostgresEngine, Sqlite3Engine from synapse.storage.prepare_database import get_statements -from synapse.storage.types import Cursor logger = logging.getLogger(__name__) @@ -67,7 +67,7 @@ """ -def run_create(cur: Cursor, database_engine: BaseDatabaseEngine) -> None: +def run_create(cur: LoggingTransaction, database_engine: BaseDatabaseEngine) -> None: for statement in get_statements(BOTH_TABLES.splitlines()): cur.execute(statement) diff --git a/synapse/storage/schema/main/delta/48/group_unique_indexes.py b/synapse/storage/schema/main/delta/48/group_unique_indexes.py index fdc891f1984a..ad2da4c8af84 100644 --- a/synapse/storage/schema/main/delta/48/group_unique_indexes.py +++ b/synapse/storage/schema/main/delta/48/group_unique_indexes.py @@ -13,9 +13,9 @@ # limitations under the License. +from synapse.storage.database import LoggingTransaction from synapse.storage.engines import BaseDatabaseEngine, PostgresEngine from synapse.storage.prepare_database import get_statements -from synapse.storage.types import Cursor FIX_INDEXES = """ -- rebuild indexes as uniques @@ -36,7 +36,7 @@ """ -def run_create(cur: Cursor, database_engine: BaseDatabaseEngine) -> None: +def run_create(cur: LoggingTransaction, database_engine: BaseDatabaseEngine) -> None: rowid = "ctid" if isinstance(database_engine, PostgresEngine) else "rowid" # remove duplicates from group_users & group_invites tables diff --git a/synapse/storage/schema/main/delta/50/make_event_content_nullable.py b/synapse/storage/schema/main/delta/50/make_event_content_nullable.py index 6148725dc8f4..3e8a348b8aad 100644 --- a/synapse/storage/schema/main/delta/50/make_event_content_nullable.py +++ b/synapse/storage/schema/main/delta/50/make_event_content_nullable.py @@ -53,13 +53,13 @@ import logging +from synapse.storage.database import LoggingTransaction from synapse.storage.engines import BaseDatabaseEngine, PostgresEngine -from synapse.storage.types import Cursor logger = logging.getLogger(__name__) -def run_create(cur: Cursor, database_engine: BaseDatabaseEngine) -> None: +def run_create(cur: LoggingTransaction, database_engine: BaseDatabaseEngine) -> None: if isinstance(database_engine, PostgresEngine): cur.execute( """ diff --git a/synapse/storage/schema/main/delta/56/unique_user_filter_index.py b/synapse/storage/schema/main/delta/56/unique_user_filter_index.py index 6f54468f78b1..2461f87d7727 100644 --- a/synapse/storage/schema/main/delta/56/unique_user_filter_index.py +++ b/synapse/storage/schema/main/delta/56/unique_user_filter_index.py @@ -1,9 +1,9 @@ import logging from io import StringIO +from synapse.storage.database import LoggingTransaction from synapse.storage.engines import BaseDatabaseEngine, PostgresEngine from synapse.storage.prepare_database import execute_statements_from_stream -from synapse.storage.types import Cursor logger = logging.getLogger(__name__) @@ -17,7 +17,7 @@ """ -def run_create(cur: Cursor, database_engine: BaseDatabaseEngine) -> None: +def run_create(cur: LoggingTransaction, database_engine: BaseDatabaseEngine) -> None: if isinstance(database_engine, PostgresEngine): select_clause = """ SELECT DISTINCT ON (user_id, filter_id) user_id, filter_id, filter_json diff --git a/synapse/storage/schema/main/delta/57/local_current_membership.py b/synapse/storage/schema/main/delta/57/local_current_membership.py index 9acfac7e0104..cc0f2109bb23 100644 --- a/synapse/storage/schema/main/delta/57/local_current_membership.py +++ b/synapse/storage/schema/main/delta/57/local_current_membership.py @@ -28,12 +28,14 @@ from synapse.config.homeserver import HomeServerConfig +from synapse.storage.database import LoggingTransaction from synapse.storage.engines import BaseDatabaseEngine -from synapse.storage.types import Cursor def run_upgrade( - cur: Cursor, database_engine: BaseDatabaseEngine, config: HomeServerConfig + cur: LoggingTransaction, + database_engine: BaseDatabaseEngine, + config: HomeServerConfig, ) -> None: # We need to do the insert in `run_upgrade` section as we don't have access # to `config` in `run_create`. @@ -84,7 +86,7 @@ def run_upgrade( ) -def run_create(cur: Cursor, database_engine: BaseDatabaseEngine) -> None: +def run_create(cur: LoggingTransaction, database_engine: BaseDatabaseEngine) -> None: cur.execute( """ CREATE TABLE local_current_membership ( diff --git a/synapse/storage/schema/main/delta/58/06dlols_unique_idx.py b/synapse/storage/schema/main/delta/58/06dlols_unique_idx.py index c4b3f2286986..4eaab9e08600 100644 --- a/synapse/storage/schema/main/delta/58/06dlols_unique_idx.py +++ b/synapse/storage/schema/main/delta/58/06dlols_unique_idx.py @@ -20,14 +20,14 @@ import logging from io import StringIO +from synapse.storage.database import LoggingTransaction from synapse.storage.engines import BaseDatabaseEngine, PostgresEngine from synapse.storage.prepare_database import execute_statements_from_stream -from synapse.storage.types import Cursor logger = logging.getLogger(__name__) -def run_create(cur: Cursor, database_engine: BaseDatabaseEngine) -> None: +def run_create(cur: LoggingTransaction, database_engine: BaseDatabaseEngine) -> None: # some instances might already have this index, in which case we can skip this if isinstance(database_engine, PostgresEngine): cur.execute( diff --git a/synapse/storage/schema/main/delta/58/11user_id_seq.py b/synapse/storage/schema/main/delta/58/11user_id_seq.py index 567d44ed6a8b..32f7e0a252c7 100644 --- a/synapse/storage/schema/main/delta/58/11user_id_seq.py +++ b/synapse/storage/schema/main/delta/58/11user_id_seq.py @@ -16,14 +16,14 @@ Adds a postgres SEQUENCE for generating guest user IDs. """ +from synapse.storage.database import LoggingTransaction from synapse.storage.databases.main.registration import ( find_max_generated_user_id_localpart, ) from synapse.storage.engines import BaseDatabaseEngine, PostgresEngine -from synapse.storage.types import Cursor -def run_create(cur: Cursor, database_engine: BaseDatabaseEngine) -> None: +def run_create(cur: LoggingTransaction, database_engine: BaseDatabaseEngine) -> None: if not isinstance(database_engine, PostgresEngine): return diff --git a/synapse/storage/schema/main/delta/59/01ignored_user.py b/synapse/storage/schema/main/delta/59/01ignored_user.py index ff2b531ea23a..c53e2bade25c 100644 --- a/synapse/storage/schema/main/delta/59/01ignored_user.py +++ b/synapse/storage/schema/main/delta/59/01ignored_user.py @@ -20,14 +20,14 @@ from io import StringIO from synapse.storage._base import db_to_json +from synapse.storage.database import LoggingTransaction from synapse.storage.engines import BaseDatabaseEngine from synapse.storage.prepare_database import execute_statements_from_stream -from synapse.storage.types import Cursor logger = logging.getLogger(__name__) -def run_create(cur: Cursor, database_engine: BaseDatabaseEngine) -> None: +def run_create(cur: LoggingTransaction, database_engine: BaseDatabaseEngine) -> None: logger.info("Creating ignored_users table") execute_statements_from_stream(cur, StringIO(_create_commands)) diff --git a/synapse/storage/schema/main/delta/61/03recreate_min_depth.py b/synapse/storage/schema/main/delta/61/03recreate_min_depth.py index b26a51659d4e..4a06b65888df 100644 --- a/synapse/storage/schema/main/delta/61/03recreate_min_depth.py +++ b/synapse/storage/schema/main/delta/61/03recreate_min_depth.py @@ -16,11 +16,11 @@ This migration handles the process of changing the type of `room_depth.min_depth` to a BIGINT. """ +from synapse.storage.database import LoggingTransaction from synapse.storage.engines import BaseDatabaseEngine, PostgresEngine -from synapse.storage.types import Cursor -def run_create(cur: Cursor, database_engine: BaseDatabaseEngine) -> None: +def run_create(cur: LoggingTransaction, database_engine: BaseDatabaseEngine) -> None: if not isinstance(database_engine, PostgresEngine): # this only applies to postgres - sqlite does not distinguish between big and # little ints. diff --git a/synapse/storage/schema/main/delta/68/05partial_state_rooms_triggers.py b/synapse/storage/schema/main/delta/68/05partial_state_rooms_triggers.py index 5f58f84e57e2..9210026ddee9 100644 --- a/synapse/storage/schema/main/delta/68/05partial_state_rooms_triggers.py +++ b/synapse/storage/schema/main/delta/68/05partial_state_rooms_triggers.py @@ -18,11 +18,11 @@ Triggers cannot be expressed in .sql files, so we have to use a separate file. """ +from synapse.storage.database import LoggingTransaction from synapse.storage.engines import BaseDatabaseEngine, PostgresEngine, Sqlite3Engine -from synapse.storage.types import Cursor -def run_create(cur: Cursor, database_engine: BaseDatabaseEngine) -> None: +def run_create(cur: LoggingTransaction, database_engine: BaseDatabaseEngine) -> None: # complain if the room_id in partial_state_events doesn't match # that in `events`. We already have a fk constraint which ensures that the event # exists in `events`, so all we have to do is raise if there is a row with a diff --git a/synapse/storage/schema/main/delta/69/01as_txn_seq.py b/synapse/storage/schema/main/delta/69/01as_txn_seq.py index 94b577954ec9..6c112425f2f0 100644 --- a/synapse/storage/schema/main/delta/69/01as_txn_seq.py +++ b/synapse/storage/schema/main/delta/69/01as_txn_seq.py @@ -17,11 +17,11 @@ Adds a postgres SEQUENCE for generating application service transaction IDs. """ +from synapse.storage.database import LoggingTransaction from synapse.storage.engines import BaseDatabaseEngine, PostgresEngine -from synapse.storage.types import Cursor -def run_create(cur: Cursor, database_engine: BaseDatabaseEngine) -> None: +def run_create(cur: LoggingTransaction, database_engine: BaseDatabaseEngine) -> None: if isinstance(database_engine, PostgresEngine): # If we already have some AS TXNs we want to start from the current # maximum value. There are two potential places this is stored - the diff --git a/synapse/storage/schema/main/delta/72/03bg_populate_events_columns.py b/synapse/storage/schema/main/delta/72/03bg_populate_events_columns.py index 16e8b8f65682..2ec1830c6ffb 100644 --- a/synapse/storage/schema/main/delta/72/03bg_populate_events_columns.py +++ b/synapse/storage/schema/main/delta/72/03bg_populate_events_columns.py @@ -14,11 +14,11 @@ import json +from synapse.storage.database import LoggingTransaction from synapse.storage.engines import BaseDatabaseEngine -from synapse.storage.types import Cursor -def run_create(cur: Cursor, database_engine: BaseDatabaseEngine) -> None: +def run_create(cur: LoggingTransaction, database_engine: BaseDatabaseEngine) -> None: """Add a bg update to populate the `state_key` and `rejection_reason` columns of `events`""" # we know that any new events will have the columns populated (and that has been diff --git a/synapse/storage/schema/main/delta/72/07force_update_current_state_events_membership.py b/synapse/storage/schema/main/delta/72/07force_update_current_state_events_membership.py index 003e9f39c60b..5c3e3584a21b 100644 --- a/synapse/storage/schema/main/delta/72/07force_update_current_state_events_membership.py +++ b/synapse/storage/schema/main/delta/72/07force_update_current_state_events_membership.py @@ -20,12 +20,14 @@ Note the background job must still remain defined in the database class. """ from synapse.config.homeserver import HomeServerConfig +from synapse.storage.database import LoggingTransaction from synapse.storage.engines import BaseDatabaseEngine -from synapse.storage.types import Cursor def run_upgrade( - cur: Cursor, database_engine: BaseDatabaseEngine, config: HomeServerConfig + cur: LoggingTransaction, + database_engine: BaseDatabaseEngine, + config: HomeServerConfig, ) -> None: cur.execute("SELECT update_name FROM background_updates") rows = cur.fetchall() diff --git a/synapse/storage/schema/main/delta/73/10_update_sqlite_fts4_tokenizer.py b/synapse/storage/schema/main/delta/73/10_update_sqlite_fts4_tokenizer.py index 0d914bc4358d..c7ed258e9df2 100644 --- a/synapse/storage/schema/main/delta/73/10_update_sqlite_fts4_tokenizer.py +++ b/synapse/storage/schema/main/delta/73/10_update_sqlite_fts4_tokenizer.py @@ -13,11 +13,11 @@ # limitations under the License. import json +from synapse.storage.database import LoggingTransaction from synapse.storage.engines import BaseDatabaseEngine, Sqlite3Engine -from synapse.storage.types import Cursor -def run_create(cur: Cursor, database_engine: BaseDatabaseEngine) -> None: +def run_create(cur: LoggingTransaction, database_engine: BaseDatabaseEngine) -> None: """ Upgrade the event_search table to use the porter tokenizer if it isn't already diff --git a/synapse/storage/schema/main/delta/74/04_membership_tables_event_stream_ordering_triggers.py b/synapse/storage/schema/main/delta/74/04_membership_tables_event_stream_ordering_triggers.py index 763ea7460701..2ee2bc9422a6 100644 --- a/synapse/storage/schema/main/delta/74/04_membership_tables_event_stream_ordering_triggers.py +++ b/synapse/storage/schema/main/delta/74/04_membership_tables_event_stream_ordering_triggers.py @@ -17,11 +17,11 @@ This migration adds triggers to the room membership tables to enforce consistency. Triggers cannot be expressed in .sql files, so we have to use a separate file. """ +from synapse.storage.database import LoggingTransaction from synapse.storage.engines import BaseDatabaseEngine, PostgresEngine, Sqlite3Engine -from synapse.storage.types import Cursor -def run_create(cur: Cursor, database_engine: BaseDatabaseEngine) -> None: +def run_create(cur: LoggingTransaction, database_engine: BaseDatabaseEngine) -> None: # Complain if the `event_stream_ordering` in membership tables doesn't match # the `stream_ordering` row with the same `event_id` in `events`. if isinstance(database_engine, Sqlite3Engine): diff --git a/synapse/storage/schema/state/delta/47/state_group_seq.py b/synapse/storage/schema/state/delta/47/state_group_seq.py index df93bb94099d..42aff502273b 100644 --- a/synapse/storage/schema/state/delta/47/state_group_seq.py +++ b/synapse/storage/schema/state/delta/47/state_group_seq.py @@ -12,11 +12,11 @@ # See the License for the specific language governing permissions and # limitations under the License. +from synapse.storage.database import LoggingTransaction from synapse.storage.engines import BaseDatabaseEngine, PostgresEngine -from synapse.storage.types import Cursor -def run_create(cur: Cursor, database_engine: BaseDatabaseEngine) -> None: +def run_create(cur: LoggingTransaction, database_engine: BaseDatabaseEngine) -> None: if isinstance(database_engine, PostgresEngine): # if we already have some state groups, we want to start making new # ones with a higher id. From e76a7366e76a172b14761c9d8d015a82c6e7b519 Mon Sep 17 00:00:00 2001 From: Patrick Cloke Date: Wed, 26 Apr 2023 16:00:06 -0400 Subject: [PATCH 10/11] No more excluded files from mypy. --- mypy.ini | 10 ---------- 1 file changed, 10 deletions(-) diff --git a/mypy.ini b/mypy.ini index 3b17c59dfcd7..5e7057cfb7b1 100644 --- a/mypy.ini +++ b/mypy.ini @@ -21,16 +21,6 @@ files = tests/, build_rust.py -# Note: Better exclusion syntax coming in mypy > 0.910 -# https://github.com/python/mypy/pull/11329 -# -# For now, set the (?x) flag enable "verbose" regexes -# https://docs.python.org/3/library/re.html#re.X -exclude = (?x) - ^( - |synapse/storage/schema/ - )$ - [mypy-synapse.metrics._reactor_metrics] # This module imports select.epoll. That exists on Linux, but doesn't on macOS. # See https://github.com/matrix-org/synapse/pull/11771. From fbacc65e8a2f097825bb585c0b8cddaa821ccdd5 Mon Sep 17 00:00:00 2001 From: Patrick Cloke Date: Wed, 26 Apr 2023 16:03:26 -0400 Subject: [PATCH 11/11] Newsfragment --- changelog.d/15497.misc | 1 + 1 file changed, 1 insertion(+) create mode 100644 changelog.d/15497.misc diff --git a/changelog.d/15497.misc b/changelog.d/15497.misc new file mode 100644 index 000000000000..93ceaeafc9b9 --- /dev/null +++ b/changelog.d/15497.misc @@ -0,0 +1 @@ +Improve type hints.