summary refs log tree commit diff
path: root/synapse/storage/end_to_end_keys.py
blob: e381e472a241b9c558fa9e7f872abdd1d8ea1a16 (plain) (blame)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
# -*- coding: utf-8 -*-
# Copyright 2015, 2016 OpenMarket Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
#     http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from six import iteritems

from canonicaljson import encode_canonical_json

from twisted.internet import defer

from synapse.util.caches.descriptors import cached

from ._base import SQLBaseStore, db_to_json


class EndToEndKeyWorkerStore(SQLBaseStore):
    @defer.inlineCallbacks
    def get_e2e_device_keys(
        self, query_list, include_all_devices=False,
        include_deleted_devices=False,
    ):
        """Fetch a list of device keys.
        Args:
            query_list(list): List of pairs of user_ids and device_ids.
            include_all_devices (bool): whether to include entries for devices
                that don't have device keys
            include_deleted_devices (bool): whether to include null entries for
                devices which no longer exist (but were in the query_list).
                This option only takes effect if include_all_devices is true.
        Returns:
            Dict mapping from user-id to dict mapping from device_id to
            dict containing "key_json", "device_display_name".
        """
        if not query_list:
            defer.returnValue({})

        results = yield self.runInteraction(
            "get_e2e_device_keys", self._get_e2e_device_keys_txn,
            query_list, include_all_devices, include_deleted_devices,
        )

        for user_id, device_keys in iteritems(results):
            for device_id, device_info in iteritems(device_keys):
                device_info["keys"] = db_to_json(device_info.pop("key_json"))

        defer.returnValue(results)

    def _get_e2e_device_keys_txn(
        self, txn, query_list, include_all_devices=False,
        include_deleted_devices=False,
    ):
        query_clauses = []
        query_params = []

        if include_all_devices is False:
            include_deleted_devices = False

        if include_deleted_devices:
            deleted_devices = set(query_list)

        for (user_id, device_id) in query_list:
            query_clause = "user_id = ?"
            query_params.append(user_id)

            if device_id is not None:
                query_clause += " AND device_id = ?"
                query_params.append(device_id)

            query_clauses.append(query_clause)

        sql = (
            "SELECT user_id, device_id, "
            "    d.display_name AS device_display_name, "
            "    k.key_json"
            " FROM devices d"
            "    %s JOIN e2e_device_keys_json k USING (user_id, device_id)"
            " WHERE %s"
        ) % (
            "LEFT" if include_all_devices else "INNER",
            " OR ".join("(" + q + ")" for q in query_clauses)
        )

        txn.execute(sql, query_params)
        rows = self.cursor_to_dict(txn)

        result = {}
        for row in rows:
            if include_deleted_devices:
                deleted_devices.remove((row["user_id"], row["device_id"]))
            result.setdefault(row["user_id"], {})[row["device_id"]] = row

        if include_deleted_devices:
            for user_id, device_id in deleted_devices:
                result.setdefault(user_id, {})[device_id] = None

        return result

    @defer.inlineCallbacks
    def get_e2e_one_time_keys(self, user_id, device_id, key_ids):
        """Retrieve a number of one-time keys for a user

        Args:
            user_id(str): id of user to get keys for
            device_id(str): id of device to get keys for
            key_ids(list[str]): list of key ids (excluding algorithm) to
                retrieve

        Returns:
            deferred resolving to Dict[(str, str), str]: map from (algorithm,
            key_id) to json string for key
        """

        rows = yield self._simple_select_many_batch(
            table="e2e_one_time_keys_json",
            column="key_id",
            iterable=key_ids,
            retcols=("algorithm", "key_id", "key_json",),
            keyvalues={
                "user_id": user_id,
                "device_id": device_id,
            },
            desc="add_e2e_one_time_keys_check",
        )

        defer.returnValue({
            (row["algorithm"], row["key_id"]): row["key_json"] for row in rows
        })

    @defer.inlineCallbacks
    def add_e2e_one_time_keys(self, user_id, device_id, time_now, new_keys):
        """Insert some new one time keys for a device. Errors if any of the
        keys already exist.

        Args:
            user_id(str): id of user to get keys for
            device_id(str): id of device to get keys for
            time_now(long): insertion time to record (ms since epoch)
            new_keys(iterable[(str, str, str)]: keys to add - each a tuple of
                (algorithm, key_id, key json)
        """

        def _add_e2e_one_time_keys(txn):
            # We are protected from race between lookup and insertion due to
            # a unique constraint. If there is a race of two calls to
            # `add_e2e_one_time_keys` then they'll conflict and we will only
            # insert one set.
            self._simple_insert_many_txn(
                txn, table="e2e_one_time_keys_json",
                values=[
                    {
                        "user_id": user_id,
                        "device_id": device_id,
                        "algorithm": algorithm,
                        "key_id": key_id,
                        "ts_added_ms": time_now,
                        "key_json": json_bytes,
                    }
                    for algorithm, key_id, json_bytes in new_keys
                ],
            )
            self._invalidate_cache_and_stream(
                txn, self.count_e2e_one_time_keys, (user_id, device_id,)
            )
        yield self.runInteraction(
            "add_e2e_one_time_keys_insert", _add_e2e_one_time_keys
        )

    @cached(max_entries=10000)
    def count_e2e_one_time_keys(self, user_id, device_id):
        """ Count the number of one time keys the server has for a device
        Returns:
            Dict mapping from algorithm to number of keys for that algorithm.
        """
        def _count_e2e_one_time_keys(txn):
            sql = (
                "SELECT algorithm, COUNT(key_id) FROM e2e_one_time_keys_json"
                " WHERE user_id = ? AND device_id = ?"
                " GROUP BY algorithm"
            )
            txn.execute(sql, (user_id, device_id))
            result = {}
            for algorithm, key_count in txn:
                result[algorithm] = key_count
            return result
        return self.runInteraction(
            "count_e2e_one_time_keys", _count_e2e_one_time_keys
        )


class EndToEndKeyStore(EndToEndKeyWorkerStore, SQLBaseStore):
    def set_e2e_device_keys(self, user_id, device_id, time_now, device_keys):
        """Stores device keys for a device. Returns whether there was a change
        or the keys were already in the database.
        """
        def _set_e2e_device_keys_txn(txn):
            old_key_json = self._simple_select_one_onecol_txn(
                txn,
                table="e2e_device_keys_json",
                keyvalues={
                    "user_id": user_id,
                    "device_id": device_id,
                },
                retcol="key_json",
                allow_none=True,
            )

            # In py3 we need old_key_json to match new_key_json type. The DB
            # returns unicode while encode_canonical_json returns bytes.
            new_key_json = encode_canonical_json(device_keys).decode("utf-8")

            if old_key_json == new_key_json:
                return False

            self._simple_upsert_txn(
                txn,
                table="e2e_device_keys_json",
                keyvalues={
                    "user_id": user_id,
                    "device_id": device_id,
                },
                values={
                    "ts_added_ms": time_now,
                    "key_json": new_key_json,
                }
            )

            return True

        return self.runInteraction(
            "set_e2e_device_keys", _set_e2e_device_keys_txn
        )

    def claim_e2e_one_time_keys(self, query_list):
        """Take a list of one time keys out of the database"""
        def _claim_e2e_one_time_keys(txn):
            sql = (
                "SELECT key_id, key_json FROM e2e_one_time_keys_json"
                " WHERE user_id = ? AND device_id = ? AND algorithm = ?"
                " LIMIT 1"
            )
            result = {}
            delete = []
            for user_id, device_id, algorithm in query_list:
                user_result = result.setdefault(user_id, {})
                device_result = user_result.setdefault(device_id, {})
                txn.execute(sql, (user_id, device_id, algorithm))
                for key_id, key_json in txn:
                    device_result[algorithm + ":" + key_id] = key_json
                    delete.append((user_id, device_id, algorithm, key_id))
            sql = (
                "DELETE FROM e2e_one_time_keys_json"
                " WHERE user_id = ? AND device_id = ? AND algorithm = ?"
                " AND key_id = ?"
            )
            for user_id, device_id, algorithm, key_id in delete:
                txn.execute(sql, (user_id, device_id, algorithm, key_id))
                self._invalidate_cache_and_stream(
                    txn, self.count_e2e_one_time_keys, (user_id, device_id,)
                )
            return result
        return self.runInteraction(
            "claim_e2e_one_time_keys", _claim_e2e_one_time_keys
        )

    def delete_e2e_keys_by_device(self, user_id, device_id):
        def delete_e2e_keys_by_device_txn(txn):
            self._simple_delete_txn(
                txn,
                table="e2e_device_keys_json",
                keyvalues={"user_id": user_id, "device_id": device_id},
            )
            self._simple_delete_txn(
                txn,
                table="e2e_one_time_keys_json",
                keyvalues={"user_id": user_id, "device_id": device_id},
            )
            self._invalidate_cache_and_stream(
                txn, self.count_e2e_one_time_keys, (user_id, device_id,)
            )
        return self.runInteraction(
            "delete_e2e_keys_by_device", delete_e2e_keys_by_device_txn
        )