summary refs log tree commit diff
path: root/synapse/replication
diff options
context:
space:
mode:
Diffstat (limited to 'synapse/replication')
-rw-r--r--synapse/replication/resource.py2
-rw-r--r--synapse/replication/slave/storage/account_data.py61
-rw-r--r--synapse/replication/slave/storage/events.py2
3 files changed, 64 insertions, 1 deletions
diff --git a/synapse/replication/resource.py b/synapse/replication/resource.py
index 69ad1de863..0e983ae7fa 100644
--- a/synapse/replication/resource.py
+++ b/synapse/replication/resource.py
@@ -164,8 +164,8 @@ class ReplicationResource(Resource):
                 "Replicating %d rows of %s from %s -> %s",
                 len(stream_content["rows"]),
                 stream_name,
-                stream_content["position"],
                 request_streams.get(stream_name),
+                stream_content["position"],
             )
 
         request.write(json.dumps(result, ensure_ascii=False))
diff --git a/synapse/replication/slave/storage/account_data.py b/synapse/replication/slave/storage/account_data.py
new file mode 100644
index 0000000000..f59b0eabbc
--- /dev/null
+++ b/synapse/replication/slave/storage/account_data.py
@@ -0,0 +1,61 @@
+# -*- coding: utf-8 -*-
+# Copyright 2016 OpenMarket Ltd
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from ._base import BaseSlavedStore
+from ._slaved_id_tracker import SlavedIdTracker
+from synapse.storage.account_data import AccountDataStore
+
+
+class SlavedAccountDataStore(BaseSlavedStore):
+
+    def __init__(self, db_conn, hs):
+        super(SlavedAccountDataStore, self).__init__(db_conn, hs)
+        self._account_data_id_gen = SlavedIdTracker(
+            db_conn, "account_data_max_stream_id", "stream_id",
+        )
+
+    get_global_account_data_by_type_for_users = (
+        AccountDataStore.__dict__["get_global_account_data_by_type_for_users"]
+    )
+
+    get_global_account_data_by_type_for_user = (
+        AccountDataStore.__dict__["get_global_account_data_by_type_for_user"]
+    )
+
+    def stream_positions(self):
+        result = super(SlavedAccountDataStore, self).stream_positions()
+        position = self._account_data_id_gen.get_current_token()
+        result["user_account_data"] = position
+        result["room_account_data"] = position
+        result["tag_account_data"] = position
+        return result
+
+    def process_replication(self, result):
+        stream = result.get("user_account_data")
+        if stream:
+            self._account_data_id_gen.advance(int(stream["position"]))
+            for row in stream["rows"]:
+                user_id, data_type = row[1:3]
+                self.get_global_account_data_by_type_for_user.invalidate(
+                    (data_type, user_id,)
+                )
+
+        stream = result.get("room_account_data")
+        if stream:
+            self._account_data_id_gen.advance(int(stream["position"]))
+
+        stream = result.get("tag_account_data")
+        if stream:
+            self._account_data_id_gen.advance(int(stream["position"]))
diff --git a/synapse/replication/slave/storage/events.py b/synapse/replication/slave/storage/events.py
index 0e29bd51d6..99cddf2518 100644
--- a/synapse/replication/slave/storage/events.py
+++ b/synapse/replication/slave/storage/events.py
@@ -165,12 +165,14 @@ class SlavedEventStore(BaseSlavedStore):
 
         stream = result.get("forward_ex_outliers")
         if stream:
+            self._stream_id_gen.advance(stream["position"])
             for row in stream["rows"]:
                 event_id = row[1]
                 self._invalidate_get_event_cache(event_id)
 
         stream = result.get("backward_ex_outliers")
         if stream:
+            self._backfill_id_gen.advance(-stream["position"])
             for row in stream["rows"]:
                 event_id = row[1]
                 self._invalidate_get_event_cache(event_id)