diff options
-rw-r--r-- | changelog.d/16943.bugfix | 1 | ||||
-rw-r--r-- | changelog.d/17032.misc | 1 | ||||
-rw-r--r-- | changelog.d/17036.misc | 1 | ||||
-rw-r--r-- | docker/complement/conf/workers-shared-extra.yaml.j2 | 2 | ||||
-rwxr-xr-x | scripts-dev/complement.sh | 2 | ||||
-rw-r--r-- | synapse/handlers/e2e_keys.py | 36 | ||||
-rw-r--r-- | synapse/http/proxy.py | 3 | ||||
-rw-r--r-- | synapse/http/server.py | 4 | ||||
-rw-r--r-- | synapse/http/site.py | 3 | ||||
-rw-r--r-- | synapse/rest/client/keys.py | 14 | ||||
-rw-r--r-- | synapse/storage/databases/main/event_push_actions.py | 22 | ||||
-rw-r--r-- | synapse/storage/databases/main/receipts.py | 8 | ||||
-rw-r--r-- | tests/handlers/test_e2e_keys.py | 50 |
13 files changed, 121 insertions, 26 deletions
diff --git a/changelog.d/16943.bugfix b/changelog.d/16943.bugfix new file mode 100644 index 0000000000..4360741132 --- /dev/null +++ b/changelog.d/16943.bugfix @@ -0,0 +1 @@ +Make the CSAPI endpoint `/keys/device_signing/upload` idempotent. \ No newline at end of file diff --git a/changelog.d/17032.misc b/changelog.d/17032.misc new file mode 100644 index 0000000000..b03f6f42e5 --- /dev/null +++ b/changelog.d/17032.misc @@ -0,0 +1 @@ +Use new receipts column to optimise receipt and push action SQL queries. Contributed by Nick @ Beeper (@fizzadar). diff --git a/changelog.d/17036.misc b/changelog.d/17036.misc new file mode 100644 index 0000000000..3296668059 --- /dev/null +++ b/changelog.d/17036.misc @@ -0,0 +1 @@ +Fix mypy with latest Twisted release. diff --git a/docker/complement/conf/workers-shared-extra.yaml.j2 b/docker/complement/conf/workers-shared-extra.yaml.j2 index 2b11b487f6..32eada4419 100644 --- a/docker/complement/conf/workers-shared-extra.yaml.j2 +++ b/docker/complement/conf/workers-shared-extra.yaml.j2 @@ -102,6 +102,8 @@ experimental_features: msc3391_enabled: true # Filtering /messages by relation type. msc3874_enabled: true + # no UIA for x-signing upload for the first time + msc3967_enabled: true server_notices: system_mxid_localpart: _server diff --git a/scripts-dev/complement.sh b/scripts-dev/complement.sh index b1a8724b7e..2a779f8255 100755 --- a/scripts-dev/complement.sh +++ b/scripts-dev/complement.sh @@ -214,7 +214,7 @@ fi extra_test_args=() -test_packages="./tests/csapi ./tests ./tests/msc3874 ./tests/msc3890 ./tests/msc3391 ./tests/msc3930 ./tests/msc3902" +test_packages="./tests/csapi ./tests ./tests/msc3874 ./tests/msc3890 ./tests/msc3391 ./tests/msc3930 ./tests/msc3902 ./tests/msc3967" # Enable dirty runs, so tests will reuse the same container where possible. # This significantly speeds up tests, but increases the possibility of test pollution. diff --git a/synapse/handlers/e2e_keys.py b/synapse/handlers/e2e_keys.py index 63e00f102e..1ece54ccfc 100644 --- a/synapse/handlers/e2e_keys.py +++ b/synapse/handlers/e2e_keys.py @@ -1476,6 +1476,42 @@ class E2eKeysHandler: else: return exists, self.clock.time_msec() < ts_replacable_without_uia_before + async def has_different_keys(self, user_id: str, body: JsonDict) -> bool: + """ + Check if a key provided in `body` differs from the same key stored in the DB. Returns + true on the first difference. If a key exists in `body` but does not exist in the DB, + returns True. If `body` has no keys, this always returns False. + Note by 'key' we mean Matrix key rather than JSON key. + + The purpose of this function is to detect whether or not we need to apply UIA checks. + We must apply UIA checks if any key in the database is being overwritten. If a key is + being inserted for the first time, or if the key exactly matches what is in the database, + then no UIA check needs to be performed. + + Args: + user_id: The user who sent the `body`. + body: The JSON request body from POST /keys/device_signing/upload + Returns: + True if any key in `body` has a different value in the database. + """ + # Ensure that each key provided in the request body exactly matches the one we have stored. + # The first time we see the DB having a different key to the matching request key, bail. + # Note: we do not care if the DB has a key which the request does not specify, as we only + # care about *replacements* or *insertions* (i.e UPSERT) + req_body_key_to_db_key = { + "master_key": "master", + "self_signing_key": "self_signing", + "user_signing_key": "user_signing", + } + for req_body_key, db_key in req_body_key_to_db_key.items(): + if req_body_key in body: + existing_key = await self.store.get_e2e_cross_signing_key( + user_id, db_key + ) + if existing_key != body[req_body_key]: + return True + return False + def _check_cross_signing_key( key: JsonDict, user_id: str, key_type: str, signing_key: Optional[VerifyKey] = None diff --git a/synapse/http/proxy.py b/synapse/http/proxy.py index 6cbbd5741b..5b5ded757b 100644 --- a/synapse/http/proxy.py +++ b/synapse/http/proxy.py @@ -262,7 +262,8 @@ class _ProxyResponseBody(protocol.Protocol): self._request.finish() else: # Abort the underlying request since our remote request also failed. - self._request.transport.abortConnection() + if self._request.channel: + self._request.channel.forceAbortClient() class ProxySite(Site): diff --git a/synapse/http/server.py b/synapse/http/server.py index 632284712c..c76500e14f 100644 --- a/synapse/http/server.py +++ b/synapse/http/server.py @@ -153,9 +153,9 @@ def return_json_error( # Only respond with an error response if we haven't already started writing, # otherwise lets just kill the connection if request.startedWriting: - if request.transport: + if request.channel: try: - request.transport.abortConnection() + request.channel.forceAbortClient() except Exception: # abortConnection throws if the connection is already closed pass diff --git a/synapse/http/site.py b/synapse/http/site.py index 682b28e4c6..a5b5780679 100644 --- a/synapse/http/site.py +++ b/synapse/http/site.py @@ -150,7 +150,8 @@ class SynapseRequest(Request): self.get_method(), self.get_redacted_uri(), ) - self.transport.abortConnection() + if self.channel: + self.channel.forceAbortClient() return super().handleContentChunk(data) diff --git a/synapse/rest/client/keys.py b/synapse/rest/client/keys.py index b6d9ee074a..86c9515854 100644 --- a/synapse/rest/client/keys.py +++ b/synapse/rest/client/keys.py @@ -409,7 +409,18 @@ class SigningKeyUploadServlet(RestServlet): # But first-time setup is fine elif self.hs.config.experimental.msc3967_enabled: - # If we already have a master key then cross signing is set up and we require UIA to reset + # MSC3967 allows this endpoint to 200 OK for idempotency. Resending exactly the same + # keys should just 200 OK without doing a UIA prompt. + keys_are_different = await self.e2e_keys_handler.has_different_keys( + user_id, body + ) + if not keys_are_different: + # FIXME: we do not fallthrough to upload_signing_keys_for_user because confusingly + # if we do, we 500 as it looks like it tries to INSERT the same key twice, causing a + # unique key constraint violation. This sounds like a bug? + return 200, {} + # the keys are different, is x-signing set up? If no, then the keys don't exist which is + # why they are different. If yes, then we need to UIA to change them. if is_cross_signing_setup: await self.auth_handler.validate_user_via_ui_auth( requester, @@ -420,7 +431,6 @@ class SigningKeyUploadServlet(RestServlet): can_skip_ui_auth=False, ) # Otherwise we don't require UIA since we are setting up cross signing for first time - else: # Previous behaviour is to always require UIA but allow it to be skipped await self.auth_handler.validate_user_via_ui_auth( diff --git a/synapse/storage/databases/main/event_push_actions.py b/synapse/storage/databases/main/event_push_actions.py index 40bf000e9c..bdd0781c48 100644 --- a/synapse/storage/databases/main/event_push_actions.py +++ b/synapse/storage/databases/main/event_push_actions.py @@ -385,7 +385,6 @@ class EventPushActionsWorkerStore(ReceiptsWorkerStore, StreamWorkerStore, SQLBas WITH all_receipts AS ( SELECT room_id, thread_id, MAX(event_stream_ordering) AS max_receipt_stream_ordering FROM receipts_linearized - LEFT JOIN events USING (room_id, event_id) WHERE {receipt_types_clause} AND user_id = ? @@ -621,13 +620,12 @@ class EventPushActionsWorkerStore(ReceiptsWorkerStore, StreamWorkerStore, SQLBas SELECT notif_count, COALESCE(unread_count, 0), thread_id FROM event_push_summary LEFT JOIN ( - SELECT thread_id, MAX(stream_ordering) AS threaded_receipt_stream_ordering + SELECT thread_id, MAX(event_stream_ordering) AS threaded_receipt_stream_ordering FROM receipts_linearized - LEFT JOIN events USING (room_id, event_id) WHERE user_id = ? AND room_id = ? - AND stream_ordering > ? + AND event_stream_ordering > ? AND {receipt_types_clause} GROUP BY thread_id ) AS receipts USING (thread_id) @@ -659,13 +657,12 @@ class EventPushActionsWorkerStore(ReceiptsWorkerStore, StreamWorkerStore, SQLBas sql = f""" SELECT COUNT(*), thread_id FROM event_push_actions LEFT JOIN ( - SELECT thread_id, MAX(stream_ordering) AS threaded_receipt_stream_ordering + SELECT thread_id, MAX(event_stream_ordering) AS threaded_receipt_stream_ordering FROM receipts_linearized - LEFT JOIN events USING (room_id, event_id) WHERE user_id = ? AND room_id = ? - AND stream_ordering > ? + AND event_stream_ordering > ? AND {receipt_types_clause} GROUP BY thread_id ) AS receipts USING (thread_id) @@ -738,13 +735,12 @@ class EventPushActionsWorkerStore(ReceiptsWorkerStore, StreamWorkerStore, SQLBas thread_id FROM event_push_actions LEFT JOIN ( - SELECT thread_id, MAX(stream_ordering) AS threaded_receipt_stream_ordering + SELECT thread_id, MAX(event_stream_ordering) AS threaded_receipt_stream_ordering FROM receipts_linearized - LEFT JOIN events USING (room_id, event_id) WHERE user_id = ? AND room_id = ? - AND stream_ordering > ? + AND event_stream_ordering > ? AND {receipt_types_clause} GROUP BY thread_id ) AS receipts USING (thread_id) @@ -910,9 +906,8 @@ class EventPushActionsWorkerStore(ReceiptsWorkerStore, StreamWorkerStore, SQLBas # given this function generally gets called with only one room and # thread ID. sql = f""" - SELECT room_id, thread_id, MAX(stream_ordering) + SELECT room_id, thread_id, MAX(event_stream_ordering) FROM receipts_linearized - INNER JOIN events USING (room_id, event_id) WHERE {receipt_types_clause} AND {thread_ids_clause} AND {room_ids_clause} @@ -1442,9 +1437,8 @@ class EventPushActionsWorkerStore(ReceiptsWorkerStore, StreamWorkerStore, SQLBas ) sql = """ - SELECT r.stream_id, r.room_id, r.user_id, r.thread_id, e.stream_ordering + SELECT r.stream_id, r.room_id, r.user_id, r.thread_id, r.event_stream_ordering FROM receipts_linearized AS r - INNER JOIN events AS e USING (event_id) WHERE ? < r.stream_id AND r.stream_id <= ? AND user_id LIKE ? ORDER BY r.stream_id ASC LIMIT ? diff --git a/synapse/storage/databases/main/receipts.py b/synapse/storage/databases/main/receipts.py index d513c42530..9660fc4699 100644 --- a/synapse/storage/databases/main/receipts.py +++ b/synapse/storage/databases/main/receipts.py @@ -178,14 +178,13 @@ class ReceiptsWorkerStore(SQLBaseStore): ) sql = f""" - SELECT event_id, stream_ordering + SELECT event_id, event_stream_ordering FROM receipts_linearized - INNER JOIN events USING (room_id, event_id) WHERE {clause} AND user_id = ? AND room_id = ? AND thread_id IS NULL - ORDER BY stream_ordering DESC + ORDER BY event_stream_ordering DESC LIMIT 1 """ @@ -736,8 +735,7 @@ class ReceiptsWorkerStore(SQLBaseStore): thread_args = (thread_id,) sql = f""" - SELECT stream_ordering, event_id FROM events - INNER JOIN receipts_linearized AS r USING (event_id, room_id) + SELECT r.event_stream_ordering, r.event_id FROM receipts_linearized AS r WHERE r.room_id = ? AND r.receipt_type = ? AND r.user_id = ? AND {thread_clause} """ txn.execute( diff --git a/tests/handlers/test_e2e_keys.py b/tests/handlers/test_e2e_keys.py index 3d931abb06..0e6352ff4b 100644 --- a/tests/handlers/test_e2e_keys.py +++ b/tests/handlers/test_e2e_keys.py @@ -1101,6 +1101,56 @@ class E2eKeysHandlerTestCase(unittest.HomeserverTestCase): }, ) + def test_has_different_keys(self) -> None: + """check that has_different_keys returns True when the keys provided are different to what + is in the database.""" + local_user = "@boris:" + self.hs.hostname + keys1 = { + "master_key": { + # private key: 2lonYOM6xYKdEsO+6KrC766xBcHnYnim1x/4LFGF8B0 + "user_id": local_user, + "usage": ["master"], + "keys": { + "ed25519:nqOvzeuGWT/sRx3h7+MHoInYj3Uk2LD/unI9kDYcHwk": "nqOvzeuGWT/sRx3h7+MHoInYj3Uk2LD/unI9kDYcHwk" + }, + } + } + self.get_success(self.handler.upload_signing_keys_for_user(local_user, keys1)) + is_different = self.get_success( + self.handler.has_different_keys( + local_user, + { + "master_key": keys1["master_key"], + }, + ) + ) + self.assertEqual(is_different, False) + # change the usage => different keys + keys1["master_key"]["usage"] = ["develop"] + is_different = self.get_success( + self.handler.has_different_keys( + local_user, + { + "master_key": keys1["master_key"], + }, + ) + ) + self.assertEqual(is_different, True) + keys1["master_key"]["usage"] = ["master"] # reset + # change the key => different keys + keys1["master_key"]["keys"] = { + "ed25519:nqOvzeuGWT/sRx3h7+MHoInYj3Uk2LD/unIc0rncs": "nqOvzeuGWT/sRx3h7+MHoInYj3Uk2LD/unIc0rncs" + } + is_different = self.get_success( + self.handler.has_different_keys( + local_user, + { + "master_key": keys1["master_key"], + }, + ) + ) + self.assertEqual(is_different, True) + def test_query_devices_remote_sync(self) -> None: """Tests that querying keys for a remote user that we share a room with, but haven't yet fetched the keys for, returns the cross signing keys |