diff options
author | Richard van der Hoff <1389908+richvdh@users.noreply.github.com> | 2019-03-25 16:38:05 +0000 |
---|---|---|
committer | GitHub <noreply@github.com> | 2019-03-25 16:38:05 +0000 |
commit | 9bde730ef821a20f6a785813b19953a9ba187ce7 (patch) | |
tree | 6b86286e597c30f73665326d08c9761add0f5978 /synapse | |
parent | Use an explicit dbname for postgres connections in the tests. (#4928) (diff) | |
download | synapse-9bde730ef821a20f6a785813b19953a9ba187ce7.tar.xz |
Fix bug where read-receipts lost their timestamps (#4927)
Make sure that they are sent correctly over the replication stream. Fixes: #4898
Diffstat (limited to 'synapse')
-rw-r--r-- | synapse/replication/tcp/protocol.py | 27 | ||||
-rw-r--r-- | synapse/replication/tcp/streams.py | 11 | ||||
-rw-r--r-- | synapse/storage/receipts.py | 4 |
3 files changed, 30 insertions, 12 deletions
diff --git a/synapse/replication/tcp/protocol.py b/synapse/replication/tcp/protocol.py index 55630ba9a7..e16fad5261 100644 --- a/synapse/replication/tcp/protocol.py +++ b/synapse/replication/tcp/protocol.py @@ -223,14 +223,25 @@ class BaseReplicationStreamProtocol(LineOnlyReceiver): return # Now lets try and call on_<CMD_NAME> function - try: - run_as_background_process( - "replication-" + cmd.get_logcontext_id(), - getattr(self, "on_%s" % (cmd_name,)), - cmd, - ) - except Exception: - logger.exception("[%s] Failed to handle line: %r", self.id(), line) + run_as_background_process( + "replication-" + cmd.get_logcontext_id(), + self.handle_command, + cmd, + ) + + def handle_command(self, cmd): + """Handle a command we have received over the replication stream. + + By default delegates to on_<COMMAND> + + Args: + cmd (synapse.replication.tcp.commands.Command): received command + + Returns: + Deferred + """ + handler = getattr(self, "on_%s" % (cmd.NAME,)) + return handler(cmd) def close(self): logger.warn("[%s] Closing connection", self.id()) diff --git a/synapse/replication/tcp/streams.py b/synapse/replication/tcp/streams.py index c1e626be3f..e23084baae 100644 --- a/synapse/replication/tcp/streams.py +++ b/synapse/replication/tcp/streams.py @@ -23,7 +23,7 @@ Each stream is defined by the following information: current_token: The function that returns the current token for the stream update_function: The function that returns a list of updates between two tokens """ - +import itertools import logging from collections import namedtuple @@ -195,8 +195,8 @@ class Stream(object): limit=MAX_EVENTS_BEHIND + 1, ) - if len(rows) >= MAX_EVENTS_BEHIND: - raise Exception("stream %s has fallen behind" % (self.NAME)) + # never turn more than MAX_EVENTS_BEHIND + 1 into updates. + rows = itertools.islice(rows, MAX_EVENTS_BEHIND + 1) else: rows = yield self.update_function( from_token, current_token, @@ -204,6 +204,11 @@ class Stream(object): updates = [(row[0], self.ROW_TYPE(*row[1:])) for row in rows] + # check we didn't get more rows than the limit. + # doing it like this allows the update_function to be a generator. + if self._LIMITED and len(updates) >= MAX_EVENTS_BEHIND: + raise Exception("stream %s has fallen behind" % (self.NAME)) + defer.returnValue((updates, current_token)) def current_token(self): diff --git a/synapse/storage/receipts.py b/synapse/storage/receipts.py index 0fd1ccc40a..89a1f7e3d7 100644 --- a/synapse/storage/receipts.py +++ b/synapse/storage/receipts.py @@ -301,7 +301,9 @@ class ReceiptsWorkerStore(SQLBaseStore): args.append(limit) txn.execute(sql, args) - return txn.fetchall() + return ( + r[0:5] + (json.loads(r[5]), ) for r in txn + ) return self.runInteraction( "get_all_updated_receipts", get_all_updated_receipts_txn ) |