summary refs log tree commit diff
path: root/synapse/replication/slave/storage/events.py
diff options
context:
space:
mode:
authorErik Johnston <erikj@jki.re>2016-11-23 15:39:12 +0000
committerGitHub <noreply@github.com>2016-11-23 15:39:12 +0000
commit302fbd218ddb00a1d38e778b29cdf043800941e1 (patch)
tree89e462d2fc8d2470bb66d744e2289050df7edef3 /synapse/replication/slave/storage/events.py
parentMerge pull request #1641 from matrix-org/erikj/as_pushers (diff)
parentShuffle receipt handler around so that worker apps don't need to load it (diff)
downloadsynapse-302fbd218ddb00a1d38e778b29cdf043800941e1.tar.xz
Merge pull request #1635 from matrix-org/erikj/split_out_fed_txn
Split out federation transaction sending to a worker
Diffstat (limited to 'synapse/replication/slave/storage/events.py')
-rw-r--r--synapse/replication/slave/storage/events.py14
1 files changed, 14 insertions, 0 deletions
diff --git a/synapse/replication/slave/storage/events.py b/synapse/replication/slave/storage/events.py

index 0c26e96e98..64f18bbb3e 100644 --- a/synapse/replication/slave/storage/events.py +++ b/synapse/replication/slave/storage/events.py
@@ -26,6 +26,11 @@ from synapse.storage.stream import StreamStore from synapse.util.caches.stream_change_cache import StreamChangeCache import ujson as json +import logging + + +logger = logging.getLogger(__name__) + # So, um, we want to borrow a load of functions intended for reading from # a DataStore, but we don't want to take functions that either write to the @@ -180,6 +185,11 @@ class SlavedEventStore(BaseSlavedStore): EventFederationStore.__dict__["_get_forward_extremeties_for_room"] ) + get_all_new_events_stream = DataStore.get_all_new_events_stream.__func__ + + get_federation_out_pos = DataStore.get_federation_out_pos.__func__ + update_federation_out_pos = DataStore.update_federation_out_pos.__func__ + def stream_positions(self): result = super(SlavedEventStore, self).stream_positions() result["events"] = self._stream_id_gen.get_current_token() @@ -194,6 +204,10 @@ class SlavedEventStore(BaseSlavedStore): stream = result.get("events") if stream: self._stream_id_gen.advance(int(stream["position"])) + + if stream["rows"]: + logger.info("Got %d event rows", len(stream["rows"])) + for row in stream["rows"]: self._process_replication_row( row, backfilled=False, state_resets=state_resets