summary refs log tree commit diff
path: root/synapse
diff options
context:
space:
mode:
authorMark Haines <mjark@negativecurvature.net>2016-05-13 12:05:19 +0100
committerMark Haines <mjark@negativecurvature.net>2016-05-13 12:05:19 +0100
commitcdda850ce1db08c7ee354655e6ba712eae155612 (patch)
treee6c4cb46a4debb6141d7c436de4bd32825e64d6b /synapse
parentMerge pull request #777 from matrix-org/markjh/move_filter_for_client (diff)
parentLog the stream IDs in an order that makes sense (diff)
downloadsynapse-cdda850ce1db08c7ee354655e6ba712eae155612.tar.xz
Merge pull request #781 from matrix-org/markjh/replication_problems
Fix a bug in replication that was causing the pusher to tight loop
Diffstat (limited to 'synapse')
-rw-r--r--synapse/replication/resource.py2
-rw-r--r--synapse/replication/slave/storage/events.py2
2 files changed, 3 insertions, 1 deletions
diff --git a/synapse/replication/resource.py b/synapse/replication/resource.py
index 69ad1de863..0e983ae7fa 100644
--- a/synapse/replication/resource.py
+++ b/synapse/replication/resource.py
@@ -164,8 +164,8 @@ class ReplicationResource(Resource):
                 "Replicating %d rows of %s from %s -> %s",
                 len(stream_content["rows"]),
                 stream_name,
-                stream_content["position"],
                 request_streams.get(stream_name),
+                stream_content["position"],
             )
 
         request.write(json.dumps(result, ensure_ascii=False))
diff --git a/synapse/replication/slave/storage/events.py b/synapse/replication/slave/storage/events.py
index 7ba7a6f6e4..635febb174 100644
--- a/synapse/replication/slave/storage/events.py
+++ b/synapse/replication/slave/storage/events.py
@@ -146,12 +146,14 @@ class SlavedEventStore(BaseSlavedStore):
 
         stream = result.get("forward_ex_outliers")
         if stream:
+            self._stream_id_gen.advance(stream["position"])
             for row in stream["rows"]:
                 event_id = row[1]
                 self._invalidate_get_event_cache(event_id)
 
         stream = result.get("backward_ex_outliers")
         if stream:
+            self._backfill_id_gen.advance(-stream["position"])
             for row in stream["rows"]:
                 event_id = row[1]
                 self._invalidate_get_event_cache(event_id)