Skip to content
This repository has been archived by the owner on Apr 26, 2024. It is now read-only.

Commit

Permalink
Remove remaining usage of cursor_to_dict. (#16564)
Browse files Browse the repository at this point in the history
  • Loading branch information
clokep authored Oct 31, 2023
1 parent c0ba319 commit cfb6d38
Show file tree
Hide file tree
Showing 18 changed files with 300 additions and 157 deletions.
1 change: 1 addition & 0 deletions changelog.d/16564.misc
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
Improve type hints.
2 changes: 1 addition & 1 deletion synapse/handlers/admin.py
Original file line number Diff line number Diff line change
Expand Up @@ -283,7 +283,7 @@ async def export_user_data(self, user_id: str, writer: "ExfiltrationWriter") ->
start, limit, user_id
)
for media in media_ids:
writer.write_media_id(media["media_id"], media)
writer.write_media_id(media.media_id, attr.asdict(media))

logger.info(
"[%s] Written %d media_ids of %s",
Expand Down
43 changes: 21 additions & 22 deletions synapse/handlers/room_list.py
Original file line number Diff line number Diff line change
Expand Up @@ -33,6 +33,7 @@
RequestSendFailed,
SynapseError,
)
from synapse.storage.databases.main.room import LargestRoomStats
from synapse.types import JsonDict, JsonMapping, ThirdPartyInstanceID
from synapse.util.caches.descriptors import _CacheContext, cached
from synapse.util.caches.response_cache import ResponseCache
Expand Down Expand Up @@ -170,26 +171,24 @@ async def _get_public_room_list(
ignore_non_federatable=from_federation,
)

def build_room_entry(room: JsonDict) -> JsonDict:
def build_room_entry(room: LargestRoomStats) -> JsonDict:
entry = {
"room_id": room["room_id"],
"name": room["name"],
"topic": room["topic"],
"canonical_alias": room["canonical_alias"],
"num_joined_members": room["joined_members"],
"avatar_url": room["avatar"],
"world_readable": room["history_visibility"]
"room_id": room.room_id,
"name": room.name,
"topic": room.topic,
"canonical_alias": room.canonical_alias,
"num_joined_members": room.joined_members,
"avatar_url": room.avatar,
"world_readable": room.history_visibility
== HistoryVisibility.WORLD_READABLE,
"guest_can_join": room["guest_access"] == "can_join",
"join_rule": room["join_rules"],
"room_type": room["room_type"],
"guest_can_join": room.guest_access == "can_join",
"join_rule": room.join_rules,
"room_type": room.room_type,
}

# Filter out Nones – rather omit the field altogether
return {k: v for k, v in entry.items() if v is not None}

results = [build_room_entry(r) for r in results]

response: JsonDict = {}
num_results = len(results)
if limit is not None:
Expand All @@ -212,33 +211,33 @@ def build_room_entry(room: JsonDict) -> JsonDict:
# If there was a token given then we assume that there
# must be previous results.
response["prev_batch"] = RoomListNextBatch(
last_joined_members=initial_entry["num_joined_members"],
last_room_id=initial_entry["room_id"],
last_joined_members=initial_entry.joined_members,
last_room_id=initial_entry.room_id,
direction_is_forward=False,
).to_token()

if more_to_come:
response["next_batch"] = RoomListNextBatch(
last_joined_members=final_entry["num_joined_members"],
last_room_id=final_entry["room_id"],
last_joined_members=final_entry.joined_members,
last_room_id=final_entry.room_id,
direction_is_forward=True,
).to_token()
else:
if has_batch_token:
response["next_batch"] = RoomListNextBatch(
last_joined_members=final_entry["num_joined_members"],
last_room_id=final_entry["room_id"],
last_joined_members=final_entry.joined_members,
last_room_id=final_entry.room_id,
direction_is_forward=True,
).to_token()

if more_to_come:
response["prev_batch"] = RoomListNextBatch(
last_joined_members=initial_entry["num_joined_members"],
last_room_id=initial_entry["room_id"],
last_joined_members=initial_entry.joined_members,
last_room_id=initial_entry.room_id,
direction_is_forward=False,
).to_token()

response["chunk"] = results
response["chunk"] = [build_room_entry(r) for r in results]

response["total_room_count_estimate"] = await self.store.count_public_rooms(
network_tuple,
Expand Down
26 changes: 13 additions & 13 deletions synapse/handlers/room_summary.py
Original file line number Diff line number Diff line change
Expand Up @@ -703,24 +703,24 @@ async def _build_room_entry(self, room_id: str, for_federation: bool) -> JsonDic
# there should always be an entry
assert stats is not None, "unable to retrieve stats for %s" % (room_id,)

entry = {
"room_id": stats["room_id"],
"name": stats["name"],
"topic": stats["topic"],
"canonical_alias": stats["canonical_alias"],
"num_joined_members": stats["joined_members"],
"avatar_url": stats["avatar"],
"join_rule": stats["join_rules"],
entry: JsonDict = {
"room_id": stats.room_id,
"name": stats.name,
"topic": stats.topic,
"canonical_alias": stats.canonical_alias,
"num_joined_members": stats.joined_members,
"avatar_url": stats.avatar,
"join_rule": stats.join_rules,
"world_readable": (
stats["history_visibility"] == HistoryVisibility.WORLD_READABLE
stats.history_visibility == HistoryVisibility.WORLD_READABLE
),
"guest_can_join": stats["guest_access"] == "can_join",
"room_type": stats["room_type"],
"guest_can_join": stats.guest_access == "can_join",
"room_type": stats.room_type,
}

if self._msc3266_enabled:
entry["im.nheko.summary.version"] = stats["version"]
entry["im.nheko.summary.encryption"] = stats["encryption"]
entry["im.nheko.summary.version"] = stats.version
entry["im.nheko.summary.encryption"] = stats.encryption

# Federation requests need to provide additional information so the
# requested server is able to filter the response appropriately.
Expand Down
6 changes: 4 additions & 2 deletions synapse/rest/admin/media.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,8 @@
from http import HTTPStatus
from typing import TYPE_CHECKING, Optional, Tuple

import attr

from synapse.api.constants import Direction
from synapse.api.errors import Codes, NotFoundError, SynapseError
from synapse.http.server import HttpServer
Expand Down Expand Up @@ -418,7 +420,7 @@ async def on_GET(
start, limit, user_id, order_by, direction
)

ret = {"media": media, "total": total}
ret = {"media": [attr.asdict(m) for m in media], "total": total}
if (start + limit) < total:
ret["next_token"] = start + len(media)

Expand Down Expand Up @@ -477,7 +479,7 @@ async def on_DELETE(
)

deleted_media, total = await self.media_repository.delete_local_media_ids(
[row["media_id"] for row in media]
[m.media_id for m in media]
)

return HTTPStatus.OK, {"deleted_media": deleted_media, "total": total}
Expand Down
13 changes: 12 additions & 1 deletion synapse/rest/admin/registration_tokens.py
Original file line number Diff line number Diff line change
Expand Up @@ -77,7 +77,18 @@ async def on_GET(self, request: SynapseRequest) -> Tuple[int, JsonDict]:
await assert_requester_is_admin(self.auth, request)
valid = parse_boolean(request, "valid")
token_list = await self.store.get_registration_tokens(valid)
return HTTPStatus.OK, {"registration_tokens": token_list}
return HTTPStatus.OK, {
"registration_tokens": [
{
"token": t[0],
"uses_allowed": t[1],
"pending": t[2],
"completed": t[3],
"expiry_time": t[4],
}
for t in token_list
]
}


class NewRegistrationTokenRestServlet(RestServlet):
Expand Down
11 changes: 8 additions & 3 deletions synapse/rest/admin/rooms.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,8 @@
from typing import TYPE_CHECKING, List, Optional, Tuple, cast
from urllib import parse as urlparse

import attr

from synapse.api.constants import Direction, EventTypes, JoinRules, Membership
from synapse.api.errors import AuthError, Codes, NotFoundError, SynapseError
from synapse.api.filtering import Filter
Expand Down Expand Up @@ -306,10 +308,13 @@ async def on_GET(
raise NotFoundError("Room not found")

members = await self.store.get_users_in_room(room_id)
ret["joined_local_devices"] = await self.store.count_devices_by_users(members)
ret["forgotten"] = await self.store.is_locally_forgotten_room(room_id)
result = attr.asdict(ret)
result["joined_local_devices"] = await self.store.count_devices_by_users(
members
)
result["forgotten"] = await self.store.is_locally_forgotten_room(room_id)

return HTTPStatus.OK, ret
return HTTPStatus.OK, result

async def on_DELETE(
self, request: SynapseRequest, room_id: str
Expand Down
10 changes: 7 additions & 3 deletions synapse/rest/admin/users.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,8 @@
from http import HTTPStatus
from typing import TYPE_CHECKING, Dict, List, Optional, Tuple

import attr

from synapse.api.constants import Direction, UserTypes
from synapse.api.errors import Codes, NotFoundError, SynapseError
from synapse.http.servlet import (
Expand Down Expand Up @@ -161,11 +163,13 @@ async def on_GET(self, request: SynapseRequest) -> Tuple[int, JsonDict]:
)

# If support for MSC3866 is not enabled, don't show the approval flag.
filter = None
if not self._msc3866_enabled:
for user in users:
del user["approved"]

ret = {"users": users, "total": total}
def _filter(a: attr.Attribute) -> bool:
return a.name != "approved"

ret = {"users": [attr.asdict(u, filter=filter) for u in users], "total": total}
if (start + limit) < total:
ret["next_token"] = str(start + len(users))

Expand Down
14 changes: 7 additions & 7 deletions synapse/storage/background_updates.py
Original file line number Diff line number Diff line change
Expand Up @@ -28,6 +28,7 @@
Sequence,
Tuple,
Type,
cast,
)

import attr
Expand Down Expand Up @@ -488,14 +489,14 @@ async def do_next_background_update(self, sleep: bool = True) -> bool:
True if we have finished running all the background updates, otherwise False
"""

def get_background_updates_txn(txn: Cursor) -> List[Dict[str, Any]]:
def get_background_updates_txn(txn: Cursor) -> List[Tuple[str, Optional[str]]]:
txn.execute(
"""
SELECT update_name, depends_on FROM background_updates
ORDER BY ordering, update_name
"""
)
return self.db_pool.cursor_to_dict(txn)
return cast(List[Tuple[str, Optional[str]]], txn.fetchall())

if not self._current_background_update:
all_pending_updates = await self.db_pool.runInteraction(
Expand All @@ -507,14 +508,13 @@ def get_background_updates_txn(txn: Cursor) -> List[Dict[str, Any]]:
return True

# find the first update which isn't dependent on another one in the queue.
pending = {update["update_name"] for update in all_pending_updates}
for upd in all_pending_updates:
depends_on = upd["depends_on"]
pending = {update_name for update_name, depends_on in all_pending_updates}
for update_name, depends_on in all_pending_updates:
if not depends_on or depends_on not in pending:
break
logger.info(
"Not starting on bg update %s until %s is done",
upd["update_name"],
update_name,
depends_on,
)
else:
Expand All @@ -524,7 +524,7 @@ def get_background_updates_txn(txn: Cursor) -> List[Dict[str, Any]]:
"another: dependency cycle?"
)

self._current_background_update = upd["update_name"]
self._current_background_update = update_name

# We have a background update to run, otherwise we would have returned
# early.
Expand Down
15 changes: 0 additions & 15 deletions synapse/storage/database.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,6 @@
import time
import types
from collections import defaultdict
from sys import intern
from time import monotonic as monotonic_time
from typing import (
TYPE_CHECKING,
Expand Down Expand Up @@ -1042,20 +1041,6 @@ def inner_func(conn: _PoolConnection, *args: P.args, **kwargs: P.kwargs) -> R:
self._db_pool.runWithConnection(inner_func, *args, **kwargs)
)

@staticmethod
def cursor_to_dict(cursor: Cursor) -> List[Dict[str, Any]]:
"""Converts a SQL cursor into an list of dicts.
Args:
cursor: The DBAPI cursor which has executed a query.
Returns:
A list of dicts where the key is the column header.
"""
assert cursor.description is not None, "cursor.description was None"
col_headers = [intern(str(column[0])) for column in cursor.description]
results = [dict(zip(col_headers, row)) for row in cursor]
return results

async def execute(self, desc: str, query: str, *args: Any) -> List[Tuple[Any, ...]]:
"""Runs a single query for a result set.
Expand Down
Loading

0 comments on commit cfb6d38

Please sign in to comment.