diff --git a/synapse/storage/__init__.py b/synapse/storage/__init__.py
index e8495f1eb9..b9968debe5 100644
--- a/synapse/storage/__init__.py
+++ b/synapse/storage/__init__.py
@@ -116,6 +116,9 @@ class DataStore(RoomMemberStore, RoomStore,
self._public_room_id_gen = StreamIdGenerator(
db_conn, "public_room_list_stream", "stream_id"
)
+ self._device_list_id_gen = StreamIdGenerator(
+ db_conn, "device_lists_stream", "stream_id",
+ )
self._transaction_id_gen = IdGenerator(db_conn, "sent_transactions", "id")
self._state_groups_id_gen = IdGenerator(db_conn, "state_groups", "id")
@@ -210,6 +213,14 @@ class DataStore(RoomMemberStore, RoomStore,
prefilled_cache=device_outbox_prefill,
)
+ device_list_max = self._device_list_id_gen.get_current_token()
+ self._device_list_stream_cache = StreamChangeCache(
+ "DeviceListStreamChangeCache", device_list_max,
+ )
+ self._device_list_federation_stream_cache = StreamChangeCache(
+ "DeviceListFederationStreamChangeCache", device_list_max,
+ )
+
cur = LoggingTransaction(
db_conn.cursor(),
name="_find_stream_orderings_for_times_txn",
diff --git a/synapse/storage/_base.py b/synapse/storage/_base.py
index 963ef999d5..05374682fd 100644
--- a/synapse/storage/_base.py
+++ b/synapse/storage/_base.py
@@ -387,6 +387,10 @@ class SQLBaseStore(object):
Args:
table : string giving the table name
values : dict of new column names and values for them
+
+ Returns:
+ bool: Whether the row was inserted or not. Only useful when
+ `or_ignore` is True
"""
try:
yield self.runInteraction(
@@ -398,6 +402,8 @@ class SQLBaseStore(object):
# a cursor after we receive an error from the db.
if not or_ignore:
raise
+ defer.returnValue(False)
+ defer.returnValue(True)
@staticmethod
def _simple_insert_txn(txn, table, values):
diff --git a/synapse/storage/devices.py b/synapse/storage/devices.py
index 17920d4480..b594f501f9 100644
--- a/synapse/storage/devices.py
+++ b/synapse/storage/devices.py
@@ -13,6 +13,7 @@
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
+import ujson as json
from twisted.internet import defer
@@ -33,17 +34,13 @@ class DeviceStore(SQLBaseStore):
user_id (str): id of user associated with the device
device_id (str): id of device
initial_device_display_name (str): initial displayname of the
- device
- ignore_if_known (bool): ignore integrity errors which mean the
- device is already known
+ device. Ignored if device exists.
Returns:
- defer.Deferred
- Raises:
- StoreError: if ignore_if_known is False and the device was already
- known
+ defer.Deferred: boolean whether the device was inserted or an
+ existing device existed with that ID.
"""
try:
- yield self._simple_insert(
+ inserted = yield self._simple_insert(
"devices",
values={
"user_id": user_id,
@@ -51,8 +48,9 @@ class DeviceStore(SQLBaseStore):
"display_name": initial_device_display_name
},
desc="store_device",
- or_ignore=ignore_if_known,
+ or_ignore=True,
)
+ defer.returnValue(inserted)
except Exception as e:
logger.error("store_device with device_id=%s(%r) user_id=%s(%r)"
" display_name=%s(%r) failed: %s",
@@ -139,3 +137,156 @@ class DeviceStore(SQLBaseStore):
)
defer.returnValue({d["device_id"]: d for d in devices})
+
+ def get_devices_by_remote(self, destination, from_stream_id):
+ now_stream_id = self._device_list_id_gen.get_current_token()
+
+ has_changed = self._device_list_stream_cache.has_entity_changed(
+ destination, int(from_stream_id)
+ )
+ if not has_changed:
+ defer.returnValue((now_stream_id, []))
+
+ return self.runInteraction(
+ "get_devices_by_remote", self._get_devices_by_remote_txn,
+ destination, from_stream_id, now_stream_id,
+ )
+
+ def _get_devices_by_remote_txn(self, txn, destination, from_stream_id,
+ now_stream_id):
+ sql = """
+ SELECT user_id, device_id, max(stream_id) FROM device_lists_outbound_pokes
+ WHERE destination = ? AND stream_id > ? AND stream_id <= ? AND sent = ?
+ GROUP BY user_id, device_id
+ """
+ txn.execute(
+ sql, (destination, from_stream_id, now_stream_id, False)
+ )
+ rows = txn.fetchall()
+
+ if not rows:
+ return now_stream_id, []
+
+ # maps (user_id, device_id) -> stream_id
+ query_map = {(r[0], r[1]): r[2] for r in rows}
+ devices = self._get_e2e_device_keys_txn(
+ txn, query_map.keys(), include_all_devices=True
+ )
+
+ prev_sent_id_sql = """
+ SELECT coalesce(max(stream_id), 0) as stream_id
+ FROM device_lists_outbound_pokes
+ WHERE destination = ? AND user_id = ? AND sent = ?
+ """
+
+ results = []
+ for user_id, user_devices in devices.iteritems():
+ txn.execute(prev_sent_id_sql, (destination, user_id, True))
+ rows = txn.fetchall()
+ prev_id = rows[0][0]
+ for device_id, result in user_devices.iteritems():
+ stream_id = query_map[(user_id, device_id)]
+ result = {
+ "user_id": user_id,
+ "device_id": device_id,
+ "prev_id": prev_id,
+ "stream_id": stream_id,
+ }
+
+ prev_id = stream_id
+
+ key_json = result.get("key_json", None)
+ if key_json:
+ result["keys"] = json.loads(key_json)
+ device_display_name = result.get("device_display_name", None)
+ if device_display_name:
+ result["device_display_name"] = device_display_name
+
+ results.setdefault(user_id, {})[device_id] = result
+
+ return now_stream_id, results
+
+ def mark_as_sent_devices_by_remote(self, destination, stream_id):
+ return self.runInteraction(
+ "mark_as_sent_devices_by_remote", self._mark_as_sent_devices_by_remote_txn,
+ destination, stream_id,
+ )
+
+ @defer.inlineCallbacks
+ def get_user_whose_devices_changed(self, from_key):
+ from_key = int(from_key)
+ changed = self._device_list_stream_cache.get_all_entities_changed(from_key)
+ if changed is not None:
+ defer.returnValue(set(changed))
+
+ sql = """
+ SELECT user_id FROM device_lists_stream WHERE stream_id > ?
+ """
+ rows = yield self._execute("get_user_whose_devices_changed", None, sql, from_key)
+ defer.returnValue(set(row["user_id"] for row in rows))
+
+ def _mark_as_sent_devices_by_remote_txn(self, txn, destination, stream_id):
+ sql = """
+ DELETE FROM device_lists_outbound_pokes
+ WHERE destination = ? AND stream_id < (
+ SELECT coalesce(max(stream_id), 0) FROM device_lists_outbound_pokes
+ WHERE destination = ? AND stream_id <= ?
+ )
+ """
+ txn.execute(sql, (destination, destination, stream_id,))
+
+ sql = """
+ UPDATE device_lists_outbound_pokes SET sent = ?
+ WHERE destination = ? AND stream_id <= ?
+ """
+ txn.execute(sql, (destination, True,))
+
+ @defer.inlineCallbacks
+ def add_device_change_to_streams(self, user_id, device_id, hosts):
+ # device_lists_stream
+ # device_lists_outbound_pokes
+ with self._device_list_id_gen.get_next() as stream_id:
+ yield self.runInteraction(
+ "add_device_change_to_streams", self._add_device_change_txn,
+ user_id, device_id, hosts, stream_id,
+ )
+ defer.returnValue(stream_id)
+
+ def _add_device_change_txn(self, txn, user_id, device_id, hosts, stream_id):
+ txn.call_after(
+ self._device_list_stream_cache.entity_has_changed,
+ user_id, stream_id,
+ )
+ for host in hosts:
+ txn.call_after(
+ self._device_list_federation_stream_cache.entity_has_changed,
+ host, stream_id,
+ )
+
+ self._simple_insert_txn(
+ txn,
+ table="device_lists_stream",
+ values={
+ "stream_id": stream_id,
+ "user_id": user_id,
+ "device_id": device_id,
+ }
+ )
+
+ self._simple_insert_many_txn(
+ txn,
+ table="device_lists_outbound_pokes",
+ values=[
+ {
+ "destination": destination,
+ "stream_id": stream_id,
+ "user_id": user_id,
+ "device_id": device_id,
+ "sent": False,
+ }
+ for destination in hosts
+ ]
+ )
+
+ def get_device_stream_token(self):
+ return self._device_list_id_gen.get_current_token()
diff --git a/synapse/storage/end_to_end_keys.py b/synapse/storage/end_to_end_keys.py
index 385d607056..f82943a7a8 100644
--- a/synapse/storage/end_to_end_keys.py
+++ b/synapse/storage/end_to_end_keys.py
@@ -12,9 +12,7 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
-import collections
-
-import twisted.internet.defer
+from twisted.internet import defer
from ._base import SQLBaseStore
@@ -33,7 +31,7 @@ class EndToEndKeyStore(SQLBaseStore):
}
)
- def get_e2e_device_keys(self, query_list):
+ def get_e2e_device_keys(self, query_list, include_all_devices=False):
"""Fetch a list of device keys.
Args:
query_list(list): List of pairs of user_ids and device_ids.
@@ -45,10 +43,11 @@ class EndToEndKeyStore(SQLBaseStore):
return {}
return self.runInteraction(
- "get_e2e_device_keys", self._get_e2e_device_keys_txn, query_list
+ "get_e2e_device_keys", self._get_e2e_device_keys_txn,
+ query_list, include_all_devices,
)
- def _get_e2e_device_keys_txn(self, txn, query_list):
+ def _get_e2e_device_keys_txn(self, txn, query_list, include_all_devices):
query_clauses = []
query_params = []
@@ -63,23 +62,23 @@ class EndToEndKeyStore(SQLBaseStore):
query_clauses.append(query_clause)
sql = (
- "SELECT k.user_id, k.device_id, "
+ "SELECT user_id, device_id, "
" d.display_name AS device_display_name, "
" k.key_json"
" FROM e2e_device_keys_json k"
- " LEFT JOIN devices d ON d.user_id = k.user_id"
- " AND d.device_id = k.device_id"
+ " %s JOIN devices d USING (user_id, device_id)"
" WHERE %s"
) % (
+ "FULL OUTER" if include_all_devices else "LEFT",
" OR ".join("(" + q + ")" for q in query_clauses)
)
txn.execute(sql, query_params)
rows = self.cursor_to_dict(txn)
- result = collections.defaultdict(dict)
+ result = {}
for row in rows:
- result[row["user_id"]][row["device_id"]] = row
+ result.setdefault(row["user_id"], {})[row["device_id"]] = row
return result
@@ -152,7 +151,7 @@ class EndToEndKeyStore(SQLBaseStore):
"claim_e2e_one_time_keys", _claim_e2e_one_time_keys
)
- @twisted.internet.defer.inlineCallbacks
+ @defer.inlineCallbacks
def delete_e2e_keys_by_device(self, user_id, device_id):
yield self._simple_delete(
table="e2e_device_keys_json",
diff --git a/synapse/storage/schema/delta/40/device_list_streams.sql b/synapse/storage/schema/delta/40/device_list_streams.sql
new file mode 100644
index 0000000000..61cac63bbb
--- /dev/null
+++ b/synapse/storage/schema/delta/40/device_list_streams.sql
@@ -0,0 +1,56 @@
+/* Copyright 2017 OpenMarket Ltd
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+CREATE TABLE device_list_streams_remote (
+ list_id TEXT NOT NULL,
+ origin TEXT NOT NULL,
+ user_id TEXT NOT NULL,
+ is_full BOOLEAN NOT NULL,
+ ts BIGINT NOT NULL
+);
+
+CREATE INDEX device_list_streams_remote_id_origin ON device_list_streams_remote(
+ origin, list_id, user_id
+);
+
+
+CREATE TABLE device_lists_remote_cache (
+ user_id TEXT NOT NULL,
+ device_id TEXT NOT NULL,
+ content TEXT NOT NULL
+);
+
+CREATE INDEX device_lists_remote_cache_id ON device_lists_remote_cache(user_id, device_id);
+
+
+CREATE TABLE device_lists_stream (
+ stream_id BIGINT NOT NULL,
+ user_id TEXT NOT NULL,
+ device_id TEXT NOT NULL
+);
+
+CREATE INDEX device_lists_stream_id ON device_lists_stream(stream_id, user_id);
+
+
+CREATE TABLE device_lists_outbound_pokes (
+ destination TEXT NOT NULL,
+ stream_id BIGINT NOT NULL,
+ user_id TEXT NOT NULL,
+ device_id TEXT NOT NULL,
+ sent BOOLEAN NOT NULL
+);
+
+CREATE INDEX device_lists_outbound_pokes_id ON device_lists_outbound_pokes(destination, stream_id);
+CREATE INDEX device_lists_outbound_pokes_user ON device_lists_outbound_pokes(destination, user_id);
|