Skip to content
This repository has been archived by the owner on Apr 26, 2024. It is now read-only.

Backfill remote event fetched by MSC3030 so we can paginate from it later #13205

Merged
Merged
Show file tree
Hide file tree
Changes from 12 commits
Commits
Show all changes
33 commits
Select commit Hold shift + click to select a range
93bbac7
Backfill remote event fetched by MSC3030 so we can paginate from it…
MadLittleMods Jul 6, 2022
f645864
Working logs
MadLittleMods Jul 6, 2022
6c4b618
Working Complement test by getting new raw event which isn't an outlier
MadLittleMods Jul 6, 2022
2e01535
Add changelog
MadLittleMods Jul 6, 2022
b335d44
Debug logs
MadLittleMods Jul 8, 2022
dab7ad9
More debugging
MadLittleMods Jul 9, 2022
f3072c9
Working with get_pdu
MadLittleMods Jul 11, 2022
3db79a1
Merge branch 'develop' into madlittlemods/msc3030-backfill-at-remote-…
MadLittleMods Jul 11, 2022
3f0da1b
Install tabulate back and working
MadLittleMods Jul 11, 2022
b9c936c
Clean up debug logs
MadLittleMods Jul 11, 2022
e76fbc0
More cleanup
MadLittleMods Jul 11, 2022
7d9c20a
Better order and fix lints
MadLittleMods Jul 11, 2022
48ca870
Revert fast complement changes
MadLittleMods Jul 11, 2022
6543bd5
Remove unused persist_events_store
MadLittleMods Jul 11, 2022
8b0dd8c
Better logging
MadLittleMods Jul 11, 2022
ba344b4
Add comment why _process_pulled_events
MadLittleMods Jul 11, 2022
caa0fce
Add docstring
MadLittleMods Jul 11, 2022
05dc230
Fix logic error
MadLittleMods Jul 11, 2022
7a316a5
Fix wrong scope in log
MadLittleMods Jul 15, 2022
682399f
Remove whitespace changes
MadLittleMods Jul 15, 2022
4df2f0c
Use shorthand
MadLittleMods Jul 15, 2022
ce447f0
Merge branch 'develop' into madlittlemods/msc3030-backfill-at-remote-…
MadLittleMods Jul 15, 2022
2beeccd
Remove duplicate information from error
MadLittleMods Jul 15, 2022
7f866f4
Log what the remote event is closer than
MadLittleMods Jul 15, 2022
337d8be
Explain why no persisting outliers in _process_pulled_event
MadLittleMods Jul 15, 2022
efaf434
get_pdu returns pristine EventBase
MadLittleMods Jul 15, 2022
344e63e
Fix lints
MadLittleMods Jul 15, 2022
cf5a324
Fix unused ex lint
MadLittleMods Jul 15, 2022
b3743c2
Fix lint
MadLittleMods Jul 15, 2022
b2be2bc
Use timestamp from the event we backfilled instead of trusting the re…
MadLittleMods Jul 16, 2022
7dbc4f7
Merge branch 'develop' into madlittlemods/msc3030-backfill-at-remote-…
MadLittleMods Jul 20, 2022
2d1a84b
Restore whitespace
MadLittleMods Jul 20, 2022
bf4e5d6
Add ideas the comment
MadLittleMods Jul 22, 2022
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions changelog.d/13205.feature
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
Allow pagination from remote event after discovering it from MSC3030 `/timestamp_to_event`.
20 changes: 10 additions & 10 deletions docker/complement/Dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -8,17 +8,17 @@ ARG SYNAPSE_VERSION=latest
FROM matrixdotorg/synapse-workers:$SYNAPSE_VERSION

# Install postgresql
RUN apt-get update && \
DEBIAN_FRONTEND=noninteractive apt-get install --no-install-recommends -yqq postgresql-13
# RUN apt-get update && \
# DEBIAN_FRONTEND=noninteractive apt-get install --no-install-recommends -yqq postgresql-13

# Configure a user and create a database for Synapse
RUN pg_ctlcluster 13 main start && su postgres -c "echo \
\"ALTER USER postgres PASSWORD 'somesecret'; \
CREATE DATABASE synapse \
ENCODING 'UTF8' \
LC_COLLATE='C' \
LC_CTYPE='C' \
template=template0;\" | psql" && pg_ctlcluster 13 main stop
# RUN pg_ctlcluster 13 main start && su postgres -c "echo \
# \"ALTER USER postgres PASSWORD 'somesecret'; \
# CREATE DATABASE synapse \
# ENCODING 'UTF8' \
# LC_COLLATE='C' \
# LC_CTYPE='C' \
# template=template0;\" | psql" && pg_ctlcluster 13 main stop
MadLittleMods marked this conversation as resolved.
Show resolved Hide resolved

# Extend the shared homeserver config to disable rate-limiting,
# set Complement's static shared secret, enable registration, amongst other
Expand All @@ -42,4 +42,4 @@ ENTRYPOINT ["/start_for_complement.sh"]

# Update the healthcheck to have a shorter check interval
HEALTHCHECK --start-period=5s --interval=1s --timeout=1s \
CMD /bin/sh /healthcheck.sh
CMD /bin/sh /healthcheck.sh
2 changes: 1 addition & 1 deletion synapse/federation/federation_client.py
Original file line number Diff line number Diff line change
Expand Up @@ -309,7 +309,7 @@ async def get_pdu_from_destination_raw(
)

logger.debug(
"retrieved event id %s from %s: %r",
"get_pdu_raw: retrieved event id %s from %s: %r",
MadLittleMods marked this conversation as resolved.
Show resolved Hide resolved
event_id,
destination,
transaction_data,
Expand Down
54 changes: 49 additions & 5 deletions synapse/handlers/federation_event.py
Original file line number Diff line number Diff line change
Expand Up @@ -54,7 +54,7 @@
check_state_independent_auth_rules,
validate_event_for_room_version,
)
from synapse.events import EventBase
from synapse.events import EventBase, make_event_from_dict
from synapse.events.snapshot import EventContext
from synapse.federation.federation_client import InvalidResponseError
from synapse.logging.context import nested_logging_context
Expand Down Expand Up @@ -100,6 +100,7 @@ class FederationEventHandler:

def __init__(self, hs: "HomeServer"):
self._store = hs.get_datastores().main
self.persist_events_store = hs.get_datastores().persist_events
self._storage_controllers = hs.get_storage_controllers()
self._state_storage_controller = self._storage_controllers.state

Expand Down Expand Up @@ -765,6 +766,11 @@ async def _process_pulled_event(
"""
logger.info("Processing pulled event %s", event)

MadLittleMods marked this conversation as resolved.
Show resolved Hide resolved
# TODO: Why does this matter? The whole point of this function is to
# persist random PDU's from backfill. It shouldn't matter whether we saw
# them somewhere else first as an outlier, then during backfill. This
# function handles de-outliering anyway.
MadLittleMods marked this conversation as resolved.
Show resolved Hide resolved
#
# these should not be outliers.
assert (
not event.internal_metadata.is_outlier()
Expand All @@ -778,7 +784,7 @@ async def _process_pulled_event(
if existing:
if not existing.internal_metadata.is_outlier():
logger.info(
"Ignoring received event %s which we have already seen",
"_process_pulled_event: Ignoring received event %s which we have already seen",
event_id,
)
return
Expand Down Expand Up @@ -918,15 +924,12 @@ async def _get_state_ids_after_missing_prev_event(
event_id: str,
) -> StateMap[str]:
"""Requests all of the room state at a given event from a remote homeserver.

MadLittleMods marked this conversation as resolved.
Show resolved Hide resolved
Args:
destination: The remote homeserver to query for the state.
room_id: The id of the room we're interested in.
event_id: The id of the event we want the state at.

Returns:
The event ids of the state *after* the given event.

Raises:
InvalidResponseError: if the remote homeserver's response contains fields
of the wrong type.
Expand Down Expand Up @@ -1315,6 +1318,47 @@ async def _handle_marker_event(self, origin: str, marker_event: EventBase) -> No
marker_event,
)

async def backfill_event(
self, destination: str, room_id: str, event_id: str
) -> None:
logger.info("backfill_event event_id=%s", event_id)

room_version = await self._store.get_room_version(room_id)

event_from_response = await self._federation_client.get_pdu(
[destination],
event_id,
room_version,
)

if not event_from_response:
raise FederationError(
"ERROR",
404,
"Unable to find event_id=%s from destination=%s to backfill."
% (event_id, destination),
affected=event_id,
)

# We want to make a non-outlier event so it plays well with
# `_process_pulled_events()` -> `_update_outliers_txn()` to create a
# `state_group` and mimics what would happen in a regular backfill.
# `get_pdu()` can potentially return an `outlier` depending on the cache
# which we don't want.
MadLittleMods marked this conversation as resolved.
Show resolved Hide resolved
event_non_outlier = make_event_from_dict(
event_from_response.get_pdu_json(),
event_from_response.room_version,
internal_metadata_dict=None,
)
assert not event_non_outlier.internal_metadata.outlier
MadLittleMods marked this conversation as resolved.
Show resolved Hide resolved

await self._process_pulled_events(
destination,
[event_non_outlier],
# Prevent notifications going to clients
backfilled=True,
)

async def _get_events_and_persist(
self, destination: str, room_id: str, event_ids: Collection[str]
) -> None:
Expand Down
28 changes: 21 additions & 7 deletions synapse/handlers/room.py
Original file line number Diff line number Diff line change
Expand Up @@ -1355,6 +1355,7 @@ def __init__(self, hs: "HomeServer"):
self.store = hs.get_datastores().main
self.state_handler = hs.get_state_handler()
self.federation_client = hs.get_federation_client()
self.federation_event_handler = hs.get_federation_event_handler()
self._storage_controllers = hs.get_storage_controllers()

async def get_event_for_timestamp(
Expand Down Expand Up @@ -1450,25 +1451,37 @@ async def get_event_for_timestamp(
remote_response,
)

# TODO: Do we want to persist this as an extremity?
# TODO: I think ideally, we would try to backfill from
# this event and run this whole
# `get_event_for_timestamp` function again to make sure
# they didn't give us an event from their gappy history.
remote_event_id = remote_response.event_id
origin_server_ts = remote_response.origin_server_ts

# Backfill this event so we can get a pagination token for
# it with `/context` and paginate `/messages` from this
# point.
#
# FIXME: After this backfill, we might want to run this
# whole `get_event_for_timestamp` function again to make
# sure they didn't give us an event from their gappy
# history. Also need a heuristic for when to stop recursing
# if they keep giving us gappy results.
MadLittleMods marked this conversation as resolved.
Show resolved Hide resolved
await self.federation_event_handler.backfill_event(
domain, room_id, remote_event_id
)
MadLittleMods marked this conversation as resolved.
Show resolved Hide resolved

# Only return the remote event if it's closer than the local event
if not local_event or (
abs(origin_server_ts - timestamp)
< abs(local_event.origin_server_ts - timestamp)
):
logger.info(
"get_event_for_timestamp: returning remote_event_id=%s since it's closer",
MadLittleMods marked this conversation as resolved.
Show resolved Hide resolved
remote_event_id,
)
return remote_event_id, origin_server_ts
except (HttpResponseException, InvalidResponseError) as ex:
# Let's not put a high priority on some other homeserver
# failing to respond or giving a random response
logger.debug(
"Failed to fetch /timestamp_to_event from %s because of exception(%s) %s args=%s",
"get_event_for_timestamp: Failed to fetch /timestamp_to_event from %s because of exception(%s) %s args=%s",
domain,
type(ex).__name__,
ex,
Expand All @@ -1477,11 +1490,12 @@ async def get_event_for_timestamp(
except Exception as ex:
# But we do want to see some exceptions in our code
logger.warning(
"Failed to fetch /timestamp_to_event from %s because of exception(%s) %s args=%s",
"get_event_for_timestamp: Failed to fetch /timestamp_to_event from %s because of exception(%s) %s args=%s",
domain,
type(ex).__name__,
ex,
ex.args,
MadLittleMods marked this conversation as resolved.
Show resolved Hide resolved
exc_info=(type(ex), ex, ex.__traceback__),
MadLittleMods marked this conversation as resolved.
Show resolved Hide resolved
)

# To appease mypy, we have to add both of these conditions to check for
Expand Down
22 changes: 19 additions & 3 deletions synapse/storage/databases/main/events.py
Original file line number Diff line number Diff line change
Expand Up @@ -1346,9 +1346,23 @@ def _update_outliers_txn(
event_id: outlier for event_id, outlier in txn
}

logger.debug(
"_update_outliers_txn: events=%s have_persisted=%s",
[ev.event_id for ev, _ in events_and_contexts],
have_persisted,
)

to_remove = set()
for event, context in events_and_contexts:
if event.event_id not in have_persisted:
outlier_persisted = have_persisted.get(event.event_id, False)
logger.debug(
"_update_outliers_txn: event=%s outlier=%s outlier_persisted=%s",
event.event_id,
event.internal_metadata.is_outlier(),
outlier_persisted,
)

if not outlier_persisted:
continue

to_remove.add(event)
Expand All @@ -1358,7 +1372,6 @@ def _update_outliers_txn(
# was an outlier or not - what we have is at least as good.
continue

outlier_persisted = have_persisted[event.event_id]
if not event.internal_metadata.is_outlier() and outlier_persisted:
# We received a copy of an event that we had already stored as
# an outlier in the database. We now have some state at that event
Expand All @@ -1369,7 +1382,10 @@ def _update_outliers_txn(
# events down /sync. In general they will be historical events, so that
# doesn't matter too much, but that is not always the case.

logger.info("Updating state for ex-outlier event %s", event.event_id)
logger.info(
"_update_outliers_txn: Updating state for ex-outlier event %s",
event.event_id,
)

# insert into event_to_state_groups.
try:
Expand Down