summary refs log tree commit diff
path: root/synapse/replication
diff options
context:
space:
mode:
authorErik Johnston <erik@matrix.org>2017-02-04 08:28:56 +0000
committerErik Johnston <erik@matrix.org>2017-02-04 08:28:56 +0000
commitfad3a8433535d7de321350bbd17373138c6fd3ec (patch)
tree70fe66754b86a7a6b1466030dcc8d7e260063f4d /synapse/replication
parentBump version and changelog (diff)
parentBump changelog and version (diff)
downloadsynapse-fad3a8433535d7de321350bbd17373138c6fd3ec.tar.xz
Merge branch 'release-v0.19.0' of github.com:matrix-org/synapse v0.19.0
Diffstat (limited to 'synapse/replication')
-rw-r--r--synapse/replication/resource.py23
-rw-r--r--synapse/replication/slave/storage/devices.py72
-rw-r--r--synapse/replication/slave/storage/events.py34
3 files changed, 99 insertions, 30 deletions
diff --git a/synapse/replication/resource.py b/synapse/replication/resource.py
index 4616e9b34a..d8eb14592b 100644
--- a/synapse/replication/resource.py
+++ b/synapse/replication/resource.py
@@ -46,6 +46,7 @@ STREAM_NAMES = (
     ("to_device",),
     ("public_rooms",),
     ("federation",),
+    ("device_lists",),
 )
 
 
@@ -140,6 +141,7 @@ class ReplicationResource(Resource):
         caches_token = self.store.get_cache_stream_token()
         public_rooms_token = self.store.get_current_public_room_stream_id()
         federation_token = self.federation_sender.get_current_token()
+        device_list_token = self.store.get_device_stream_token()
 
         defer.returnValue(_ReplicationToken(
             room_stream_token,
@@ -155,6 +157,7 @@ class ReplicationResource(Resource):
             int(stream_token.to_device_key),
             int(public_rooms_token),
             int(federation_token),
+            int(device_list_token),
         ))
 
     @request_handler()
@@ -214,6 +217,7 @@ class ReplicationResource(Resource):
         yield self.caches(writer, current_token, limit, request_streams)
         yield self.to_device(writer, current_token, limit, request_streams)
         yield self.public_rooms(writer, current_token, limit, request_streams)
+        yield self.device_lists(writer, current_token, limit, request_streams)
         self.federation(writer, current_token, limit, request_streams, federation_ack)
         self.streams(writer, current_token, request_streams)
 
@@ -295,9 +299,6 @@ class ReplicationResource(Resource):
                 "backward_ex_outliers", res.backward_ex_outliers,
                 ("position", "event_id", "state_group"),
             )
-            writer.write_header_and_rows(
-                "state_resets", res.state_resets, ("position",),
-            )
 
     @defer.inlineCallbacks
     def presence(self, writer, current_token, request_streams):
@@ -495,6 +496,20 @@ class ReplicationResource(Resource):
                 "position", "type", "content",
             ), position=upto_token)
 
+    @defer.inlineCallbacks
+    def device_lists(self, writer, current_token, limit, request_streams):
+        current_position = current_token.device_lists
+
+        device_lists = request_streams.get("device_lists")
+
+        if device_lists is not None and device_lists != current_position:
+            changes = yield self.store.get_all_device_list_changes_for_remotes(
+                device_lists,
+            )
+            writer.write_header_and_rows("device_lists", changes, (
+                "position", "user_id", "destination",
+            ), position=current_position)
+
 
 class _Writer(object):
     """Writes the streams as a JSON object as the response to the request"""
@@ -527,7 +542,7 @@ class _Writer(object):
 class _ReplicationToken(collections.namedtuple("_ReplicationToken", (
     "events", "presence", "typing", "receipts", "account_data", "backfill",
     "push_rules", "pushers", "state", "caches", "to_device", "public_rooms",
-    "federation",
+    "federation", "device_lists",
 ))):
     __slots__ = []
 
diff --git a/synapse/replication/slave/storage/devices.py b/synapse/replication/slave/storage/devices.py
new file mode 100644
index 0000000000..ca46aa17b6
--- /dev/null
+++ b/synapse/replication/slave/storage/devices.py
@@ -0,0 +1,72 @@
+# -*- coding: utf-8 -*-
+# Copyright 2016 OpenMarket Ltd
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from ._base import BaseSlavedStore
+from ._slaved_id_tracker import SlavedIdTracker
+from synapse.storage import DataStore
+from synapse.util.caches.stream_change_cache import StreamChangeCache
+
+
+class SlavedDeviceStore(BaseSlavedStore):
+    def __init__(self, db_conn, hs):
+        super(SlavedDeviceStore, self).__init__(db_conn, hs)
+
+        self.hs = hs
+
+        self._device_list_id_gen = SlavedIdTracker(
+            db_conn, "device_lists_stream", "stream_id",
+        )
+        device_list_max = self._device_list_id_gen.get_current_token()
+        self._device_list_stream_cache = StreamChangeCache(
+            "DeviceListStreamChangeCache", device_list_max,
+        )
+        self._device_list_federation_stream_cache = StreamChangeCache(
+            "DeviceListFederationStreamChangeCache", device_list_max,
+        )
+
+    get_device_stream_token = DataStore.get_device_stream_token.__func__
+    get_user_whose_devices_changed = DataStore.get_user_whose_devices_changed.__func__
+    get_devices_by_remote = DataStore.get_devices_by_remote.__func__
+    _get_devices_by_remote_txn = DataStore._get_devices_by_remote_txn.__func__
+    _get_e2e_device_keys_txn = DataStore._get_e2e_device_keys_txn.__func__
+    mark_as_sent_devices_by_remote = DataStore.mark_as_sent_devices_by_remote.__func__
+    _mark_as_sent_devices_by_remote_txn = (
+        DataStore._mark_as_sent_devices_by_remote_txn.__func__
+    )
+
+    def stream_positions(self):
+        result = super(SlavedDeviceStore, self).stream_positions()
+        result["device_lists"] = self._device_list_id_gen.get_current_token()
+        return result
+
+    def process_replication(self, result):
+        stream = result.get("device_lists")
+        if stream:
+            self._device_list_id_gen.advance(int(stream["position"]))
+            for row in stream["rows"]:
+                stream_id = row[0]
+                user_id = row[1]
+                destination = row[2]
+
+                self._device_list_stream_cache.entity_has_changed(
+                    user_id, stream_id
+                )
+
+                if destination:
+                    self._device_list_federation_stream_cache.entity_has_changed(
+                        destination, stream_id
+                    )
+
+        return super(SlavedDeviceStore, self).process_replication(result)
diff --git a/synapse/replication/slave/storage/events.py b/synapse/replication/slave/storage/events.py
index 64f18bbb3e..d72ff6055c 100644
--- a/synapse/replication/slave/storage/events.py
+++ b/synapse/replication/slave/storage/events.py
@@ -73,12 +73,12 @@ class SlavedEventStore(BaseSlavedStore):
     # to reach inside the __dict__ to extract them.
     get_rooms_for_user = RoomMemberStore.__dict__["get_rooms_for_user"]
     get_users_in_room = RoomMemberStore.__dict__["get_users_in_room"]
+    get_users_who_share_room_with_user = (
+        RoomMemberStore.__dict__["get_users_who_share_room_with_user"]
+    )
     get_latest_event_ids_in_room = EventFederationStore.__dict__[
         "get_latest_event_ids_in_room"
     ]
-    _get_current_state_for_key = StateStore.__dict__[
-        "_get_current_state_for_key"
-    ]
     get_invited_rooms_for_user = RoomMemberStore.__dict__[
         "get_invited_rooms_for_user"
     ]
@@ -115,8 +115,6 @@ class SlavedEventStore(BaseSlavedStore):
     )
     get_event = DataStore.get_event.__func__
     get_events = DataStore.get_events.__func__
-    get_current_state = DataStore.get_current_state.__func__
-    get_current_state_for_key = DataStore.get_current_state_for_key.__func__
     get_rooms_for_user_where_membership_is = (
         DataStore.get_rooms_for_user_where_membership_is.__func__
     )
@@ -197,10 +195,6 @@ class SlavedEventStore(BaseSlavedStore):
         return result
 
     def process_replication(self, result):
-        state_resets = set(
-            r[0] for r in result.get("state_resets", {"rows": []})["rows"]
-        )
-
         stream = result.get("events")
         if stream:
             self._stream_id_gen.advance(int(stream["position"]))
@@ -210,7 +204,7 @@ class SlavedEventStore(BaseSlavedStore):
 
             for row in stream["rows"]:
                 self._process_replication_row(
-                    row, backfilled=False, state_resets=state_resets
+                    row, backfilled=False,
                 )
 
         stream = result.get("backfill")
@@ -218,7 +212,7 @@ class SlavedEventStore(BaseSlavedStore):
             self._backfill_id_gen.advance(-int(stream["position"]))
             for row in stream["rows"]:
                 self._process_replication_row(
-                    row, backfilled=True, state_resets=state_resets
+                    row, backfilled=True,
                 )
 
         stream = result.get("forward_ex_outliers")
@@ -237,21 +231,15 @@ class SlavedEventStore(BaseSlavedStore):
 
         return super(SlavedEventStore, self).process_replication(result)
 
-    def _process_replication_row(self, row, backfilled, state_resets):
-        position = row[0]
+    def _process_replication_row(self, row, backfilled):
         internal = json.loads(row[1])
         event_json = json.loads(row[2])
         event = FrozenEvent(event_json, internal_metadata_dict=internal)
         self.invalidate_caches_for_event(
-            event, backfilled, reset_state=position in state_resets
+            event, backfilled,
         )
 
-    def invalidate_caches_for_event(self, event, backfilled, reset_state):
-        if reset_state:
-            self._get_current_state_for_key.invalidate_all()
-            self.get_rooms_for_user.invalidate_all()
-            self.get_users_in_room.invalidate((event.room_id,))
-
+    def invalidate_caches_for_event(self, event, backfilled):
         self._invalidate_get_event_cache(event.event_id)
 
         self.get_latest_event_ids_in_room.invalidate((event.room_id,))
@@ -273,8 +261,6 @@ class SlavedEventStore(BaseSlavedStore):
             self._invalidate_get_event_cache(event.redacts)
 
         if event.type == EventTypes.Member:
-            self.get_rooms_for_user.invalidate((event.state_key,))
-            self.get_users_in_room.invalidate((event.room_id,))
             self._membership_stream_cache.entity_has_changed(
                 event.state_key, event.internal_metadata.stream_ordering
             )
@@ -289,7 +275,3 @@ class SlavedEventStore(BaseSlavedStore):
         if (not event.internal_metadata.is_invite_from_remote()
                 and event.internal_metadata.is_outlier()):
             return
-
-        self._get_current_state_for_key.invalidate((
-            event.room_id, event.type, event.state_key
-        ))