From be36600327b47b93f8462bdf343c4c12f6c966b9 Mon Sep 17 00:00:00 2001 From: Dirk Klimpel <5740567+dklimpel@users.noreply.github.com> Date: Thu, 13 Apr 2023 13:28:55 +0200 Subject: Disable loading `RefreshTokenServlet` on workers (#15428) --- synapse/rest/client/login.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) (limited to 'synapse/rest') diff --git a/synapse/rest/client/login.py b/synapse/rest/client/login.py index b7e9c8f6b5..32c2f5ce0c 100644 --- a/synapse/rest/client/login.py +++ b/synapse/rest/client/login.py @@ -670,7 +670,10 @@ class CasTicketServlet(RestServlet): def register_servlets(hs: "HomeServer", http_server: HttpServer) -> None: LoginRestServlet(hs).register(http_server) - if hs.config.registration.refreshable_access_token_lifetime is not None: + if ( + hs.config.worker.worker_app is None + and hs.config.registration.refreshable_access_token_lifetime is not None + ): RefreshTokenServlet(hs).register(http_server) SsoRedirectServlet(hs).register(http_server) if hs.config.cas.cas_enabled: -- cgit 1.5.1 From c9723a1c1fbae1cc172fc9257fd1f1f259d2a23f Mon Sep 17 00:00:00 2001 From: Dirk Klimpel <5740567+dklimpel@users.noreply.github.com> Date: Thu, 13 Apr 2023 15:08:00 +0200 Subject: Only load the SSO redirect servlet if SSO is enabled. (#15421) --- changelog.d/15421.misc | 1 + synapse/rest/client/login.py | 7 ++++++- 2 files changed, 7 insertions(+), 1 deletion(-) create mode 100644 changelog.d/15421.misc (limited to 'synapse/rest') diff --git a/changelog.d/15421.misc b/changelog.d/15421.misc new file mode 100644 index 0000000000..5deea3ac5b --- /dev/null +++ b/changelog.d/15421.misc @@ -0,0 +1 @@ +Only load the SSO redirect servlet if SSO is enabled. \ No newline at end of file diff --git a/synapse/rest/client/login.py b/synapse/rest/client/login.py index 32c2f5ce0c..a348720131 100644 --- a/synapse/rest/client/login.py +++ b/synapse/rest/client/login.py @@ -675,7 +675,12 @@ def register_servlets(hs: "HomeServer", http_server: HttpServer) -> None: and hs.config.registration.refreshable_access_token_lifetime is not None ): RefreshTokenServlet(hs).register(http_server) - SsoRedirectServlet(hs).register(http_server) + if ( + hs.config.cas.cas_enabled + or hs.config.saml2.saml2_enabled + or hs.config.oidc.oidc_enabled + ): + SsoRedirectServlet(hs).register(http_server) if hs.config.cas.cas_enabled: CasTicketServlet(hs).register(http_server) -- cgit 1.5.1 From 2503126d5245586b89c76e5f15f27c0a07774a45 Mon Sep 17 00:00:00 2001 From: Patrick Cloke Date: Thu, 13 Apr 2023 09:47:07 -0400 Subject: Implement MSC2174: move redacts to a content property. (#15395) This moves `redacts` from being a top-level property to a `content` property in a new room version. MSC2176 (which was previously implemented) states to not `redact` this property. --- changelog.d/15395.misc | 1 + synapse/api/room_versions.py | 3 ++- synapse/event_auth.py | 2 +- synapse/events/__init__.py | 8 +++++++- synapse/events/builder.py | 4 +++- synapse/rest/client/room.py | 35 +++++++++++++++++++++++--------- tests/events/test_utils.py | 12 +++++++++-- tests/rest/client/test_redactions.py | 39 ++++++++++++++++++++++++++++++++++-- 8 files changed, 87 insertions(+), 17 deletions(-) create mode 100644 changelog.d/15395.misc (limited to 'synapse/rest') diff --git a/changelog.d/15395.misc b/changelog.d/15395.misc new file mode 100644 index 0000000000..ee93845241 --- /dev/null +++ b/changelog.d/15395.misc @@ -0,0 +1 @@ +Implement [MSC2174](https://github.com/matrix-org/matrix-spec-proposals/pull/2174) to move the `redacts` key to a `content` property. diff --git a/synapse/api/room_versions.py b/synapse/api/room_versions.py index 3dcae12161..5d9c13e3c3 100644 --- a/synapse/api/room_versions.py +++ b/synapse/api/room_versions.py @@ -80,7 +80,8 @@ class RoomVersion: limit_notifications_power_levels: bool # MSC2175: No longer include the creator in m.room.create events. msc2175_implicit_room_creator: bool - # MSC2174/MSC2176: Apply updated redaction rules algorithm. + # MSC2174/MSC2176: Apply updated redaction rules algorithm, move redacts to + # content property. msc2176_redaction_rules: bool # MSC3083: Support the 'restricted' join_rule. msc3083_join_rules: bool diff --git a/synapse/event_auth.py b/synapse/event_auth.py index f95d00d472..25898b95a5 100644 --- a/synapse/event_auth.py +++ b/synapse/event_auth.py @@ -793,7 +793,7 @@ def check_redaction( """Check whether the event sender is allowed to redact the target event. Returns: - True if the the sender is allowed to redact the target event if the + True if the sender is allowed to redact the target event if the target event was created by them. False if the sender is allowed to redact the target event with no further checks. diff --git a/synapse/events/__init__.py b/synapse/events/__init__.py index d475fe7ae5..4501518cf0 100644 --- a/synapse/events/__init__.py +++ b/synapse/events/__init__.py @@ -326,7 +326,6 @@ class EventBase(metaclass=abc.ABCMeta): hashes: DictProperty[Dict[str, str]] = DictProperty("hashes") origin: DictProperty[str] = DictProperty("origin") origin_server_ts: DictProperty[int] = DictProperty("origin_server_ts") - redacts: DefaultDictProperty[Optional[str]] = DefaultDictProperty("redacts", None) room_id: DictProperty[str] = DictProperty("room_id") sender: DictProperty[str] = DictProperty("sender") # TODO state_key should be Optional[str]. This is generally asserted in Synapse @@ -346,6 +345,13 @@ class EventBase(metaclass=abc.ABCMeta): def membership(self) -> str: return self.content["membership"] + @property + def redacts(self) -> Optional[str]: + """MSC2176 moved the redacts field into the content.""" + if self.room_version.msc2176_redaction_rules: + return self.content.get("redacts") + return self.get("redacts") + def is_state(self) -> bool: return self.get_state_key() is not None diff --git a/synapse/events/builder.py b/synapse/events/builder.py index c82745275f..a254548c6c 100644 --- a/synapse/events/builder.py +++ b/synapse/events/builder.py @@ -173,7 +173,9 @@ class EventBuilder: if self.is_state(): event_dict["state_key"] = self._state_key - if self._redacts is not None: + # MSC2174 moves the redacts property to the content, it is invalid to + # provide it as a top-level property. + if self._redacts is not None and not self.room_version.msc2176_redaction_rules: event_dict["redacts"] = self._redacts if self._origin_server_ts is not None: diff --git a/synapse/rest/client/room.py b/synapse/rest/client/room.py index c0705d4291..7699cc8d1b 100644 --- a/synapse/rest/client/room.py +++ b/synapse/rest/client/room.py @@ -1096,6 +1096,7 @@ class RoomRedactEventRestServlet(TransactionRestServlet): super().__init__(hs) self.event_creation_handler = hs.get_event_creation_handler() self.auth = hs.get_auth() + self._store = hs.get_datastores().main self._relation_handler = hs.get_relations_handler() self._msc3912_enabled = hs.config.experimental.msc3912_enabled @@ -1113,6 +1114,19 @@ class RoomRedactEventRestServlet(TransactionRestServlet): ) -> Tuple[int, JsonDict]: content = parse_json_object_from_request(request) + # Ensure the redacts property in the content matches the one provided in + # the URL. + room_version = await self._store.get_room_version(room_id) + if room_version.msc2176_redaction_rules: + if "redacts" in content and content["redacts"] != event_id: + raise SynapseError( + 400, + "Cannot provide a redacts value incoherent with the event_id of the URL parameter", + Codes.INVALID_PARAM, + ) + else: + content["redacts"] = event_id + try: with_relations = None if self._msc3912_enabled and "org.matrix.msc3912.with_relations" in content: @@ -1128,20 +1142,23 @@ class RoomRedactEventRestServlet(TransactionRestServlet): requester, txn_id, room_id ) + # Event is not yet redacted, create a new event to redact it. if event is None: + event_dict = { + "type": EventTypes.Redaction, + "content": content, + "room_id": room_id, + "sender": requester.user.to_string(), + } + # Earlier room versions had a top-level redacts property. + if not room_version.msc2176_redaction_rules: + event_dict["redacts"] = event_id + ( event, _, ) = await self.event_creation_handler.create_and_send_nonmember_event( - requester, - { - "type": EventTypes.Redaction, - "content": content, - "room_id": room_id, - "sender": requester.user.to_string(), - "redacts": event_id, - }, - txn_id=txn_id, + requester, event_dict, txn_id=txn_id ) if with_relations: diff --git a/tests/events/test_utils.py b/tests/events/test_utils.py index c35f58f462..1b179acb20 100644 --- a/tests/events/test_utils.py +++ b/tests/events/test_utils.py @@ -318,7 +318,11 @@ class PruneEventTestCase(stdlib_unittest.TestCase): """Redaction events have no special behaviour until MSC2174/MSC2176.""" self.run_test( - {"type": "m.room.redaction", "content": {"redacts": "$test2:domain"}}, + { + "type": "m.room.redaction", + "content": {"redacts": "$test2:domain"}, + "redacts": "$test2:domain", + }, { "type": "m.room.redaction", "content": {}, @@ -330,7 +334,11 @@ class PruneEventTestCase(stdlib_unittest.TestCase): # After MSC2174, redaction events keep the redacts content key. self.run_test( - {"type": "m.room.redaction", "content": {"redacts": "$test2:domain"}}, + { + "type": "m.room.redaction", + "content": {"redacts": "$test2:domain"}, + "redacts": "$test2:domain", + }, { "type": "m.room.redaction", "content": {"redacts": "$test2:domain"}, diff --git a/tests/rest/client/test_redactions.py b/tests/rest/client/test_redactions.py index 5dfe44defb..84a60c0b07 100644 --- a/tests/rest/client/test_redactions.py +++ b/tests/rest/client/test_redactions.py @@ -16,6 +16,7 @@ from typing import List, Optional from twisted.test.proto_helpers import MemoryReactor from synapse.api.constants import EventTypes, RelationTypes +from synapse.api.room_versions import RoomVersions from synapse.rest import admin from synapse.rest.client import login, room, sync from synapse.server import HomeServer @@ -74,6 +75,7 @@ class RedactionsTestCase(HomeserverTestCase): event_id: str, expect_code: int = 200, with_relations: Optional[List[str]] = None, + content: Optional[JsonDict] = None, ) -> JsonDict: """Helper function to send a redaction event. @@ -81,7 +83,7 @@ class RedactionsTestCase(HomeserverTestCase): """ path = "/_matrix/client/r0/rooms/%s/redact/%s" % (room_id, event_id) - request_content = {} + request_content = content or {} if with_relations: request_content["org.matrix.msc3912.with_relations"] = with_relations @@ -92,7 +94,7 @@ class RedactionsTestCase(HomeserverTestCase): return channel.json_body def _sync_room_timeline(self, access_token: str, room_id: str) -> List[JsonDict]: - channel = self.make_request("GET", "sync", access_token=self.mod_access_token) + channel = self.make_request("GET", "sync", access_token=access_token) self.assertEqual(channel.code, 200) room_sync = channel.json_body["rooms"]["join"][room_id] return room_sync["timeline"]["events"] @@ -466,3 +468,36 @@ class RedactionsTestCase(HomeserverTestCase): ) self.assertIn("body", event_dict["content"], event_dict) self.assertEqual("I'm in a thread!", event_dict["content"]["body"]) + + def test_content_redaction(self) -> None: + """MSC2174 moved the redacts property to the content.""" + # Create a room with the newer room version. + room_id = self.helper.create_room_as( + self.mod_user_id, + tok=self.mod_access_token, + room_version=RoomVersions.MSC2176.identifier, + ) + + # Create an event. + b = self.helper.send(room_id=room_id, tok=self.mod_access_token) + event_id = b["event_id"] + + # Attempt to redact it with a bogus event ID. + self._redact_event( + self.mod_access_token, + room_id, + event_id, + expect_code=400, + content={"redacts": "foo"}, + ) + + # Redact it for real. + self._redact_event(self.mod_access_token, room_id, event_id) + + # Sync the room, to get the id of the create event + timeline = self._sync_room_timeline(self.mod_access_token, room_id) + redact_event = timeline[-1] + self.assertEqual(redact_event["type"], EventTypes.Redaction) + # The redacts key should be in the content. + self.assertNotIn("redacts", redact_event) + self.assertEquals(redact_event["content"]["redacts"], event_id) -- cgit 1.5.1 From 4af0aec54dad261bcad240d8a878a1c16934e77c Mon Sep 17 00:00:00 2001 From: Dirk Klimpel <5740567+dklimpel@users.noreply.github.com> Date: Fri, 14 Apr 2023 11:24:06 +0200 Subject: Load `/directory/room/{roomAlias}` endpoint on workers (#15333) * Enable `directory` * move to worker store * newsfile * disable `ClientDirectoryListServer` and `ClientAppserviceDirectoryListServer` for workers --- changelog.d/15333.feature | 1 + docker/configure_workers_and_start.py | 1 + docs/workers.md | 1 + synapse/rest/__init__.py | 3 +-- synapse/rest/client/directory.py | 6 ++++-- synapse/storage/databases/main/directory.py | 6 ++++-- 6 files changed, 12 insertions(+), 6 deletions(-) create mode 100644 changelog.d/15333.feature (limited to 'synapse/rest') diff --git a/changelog.d/15333.feature b/changelog.d/15333.feature new file mode 100644 index 0000000000..35ea89ad89 --- /dev/null +++ b/changelog.d/15333.feature @@ -0,0 +1 @@ +Allow loading `/directory/room/{roomAlias}` endpoint on workers. \ No newline at end of file diff --git a/docker/configure_workers_and_start.py b/docker/configure_workers_and_start.py index 2a50ee1e4b..26f92b3f1a 100755 --- a/docker/configure_workers_and_start.py +++ b/docker/configure_workers_and_start.py @@ -173,6 +173,7 @@ WORKERS_CONFIG: Dict[str, Dict[str, Any]] = { "^/_matrix/client/(api/v1|r0|v3|unstable)/search", "^/_matrix/client/(r0|v3|unstable)/user/.*/filter(/|$)", "^/_matrix/client/(r0|v3|unstable)/password_policy$", + "^/_matrix/client/(api/v1|r0|v3|unstable)/directory/room/.*$", ], "shared_extra_conf": {}, "worker_extra_conf": "", diff --git a/docs/workers.md b/docs/workers.md index e9a477d32c..cb2333e4a5 100644 --- a/docs/workers.md +++ b/docs/workers.md @@ -234,6 +234,7 @@ information. ^/_matrix/client/(api/v1|r0|v3|unstable/.*)/rooms/.*/aliases ^/_matrix/client/(api/v1|r0|v3|unstable)/search$ ^/_matrix/client/(r0|v3|unstable)/user/.*/filter(/|$) + ^/_matrix/client/(api/v1|r0|v3|unstable)/directory/room/.*$ # Encryption requests ^/_matrix/client/(r0|v3|unstable)/keys/query$ diff --git a/synapse/rest/__init__.py b/synapse/rest/__init__.py index 1d7c11b42d..19603ed137 100644 --- a/synapse/rest/__init__.py +++ b/synapse/rest/__init__.py @@ -100,8 +100,7 @@ class ClientRestResource(JsonResource): login.register_servlets(hs, client_resource) profile.register_servlets(hs, client_resource) presence.register_servlets(hs, client_resource) - if is_main_process: - directory.register_servlets(hs, client_resource) + directory.register_servlets(hs, client_resource) voip.register_servlets(hs, client_resource) if is_main_process: pusher.register_servlets(hs, client_resource) diff --git a/synapse/rest/client/directory.py b/synapse/rest/client/directory.py index f17b4c8d22..570bb52747 100644 --- a/synapse/rest/client/directory.py +++ b/synapse/rest/client/directory.py @@ -39,12 +39,14 @@ logger = logging.getLogger(__name__) def register_servlets(hs: "HomeServer", http_server: HttpServer) -> None: ClientDirectoryServer(hs).register(http_server) - ClientDirectoryListServer(hs).register(http_server) - ClientAppserviceDirectoryListServer(hs).register(http_server) + if hs.config.worker.worker_app is None: + ClientDirectoryListServer(hs).register(http_server) + ClientAppserviceDirectoryListServer(hs).register(http_server) class ClientDirectoryServer(RestServlet): PATTERNS = client_patterns("/directory/room/(?P[^/]*)$", v1=True) + CATEGORY = "Client API requests" def __init__(self, hs: "HomeServer"): super().__init__() diff --git a/synapse/storage/databases/main/directory.py b/synapse/storage/databases/main/directory.py index 44aa181174..3cb4c90729 100644 --- a/synapse/storage/databases/main/directory.py +++ b/synapse/storage/databases/main/directory.py @@ -129,8 +129,6 @@ class DirectoryWorkerStore(CacheInvalidationWorkerStore): 409, "Room alias %s already exists" % room_alias.to_string() ) - -class DirectoryStore(DirectoryWorkerStore): async def delete_room_alias(self, room_alias: RoomAlias) -> Optional[str]: room_id = await self.db_pool.runInteraction( "delete_room_alias", self._delete_room_alias_txn, room_alias @@ -201,3 +199,7 @@ class DirectoryStore(DirectoryWorkerStore): await self.db_pool.runInteraction( "_update_aliases_for_room_txn", _update_aliases_for_room_txn ) + + +class DirectoryStore(DirectoryWorkerStore): + pass -- cgit 1.5.1 From e4a25d022c1e4b71e043b07324d95362f7fb4067 Mon Sep 17 00:00:00 2001 From: Dirk Klimpel <5740567+dklimpel@users.noreply.github.com> Date: Fri, 14 Apr 2023 18:26:07 +0200 Subject: Load `/capabilities` endpoint on workers (#15436) --- changelog.d/15436.feature | 1 + docker/configure_workers_and_start.py | 1 + docs/workers.md | 1 + synapse/rest/__init__.py | 2 +- synapse/rest/client/capabilities.py | 1 + 5 files changed, 5 insertions(+), 1 deletion(-) create mode 100644 changelog.d/15436.feature (limited to 'synapse/rest') diff --git a/changelog.d/15436.feature b/changelog.d/15436.feature new file mode 100644 index 0000000000..d83f8c3e4a --- /dev/null +++ b/changelog.d/15436.feature @@ -0,0 +1 @@ +Allow loading `/capabilities` endpoint on workers. \ No newline at end of file diff --git a/docker/configure_workers_and_start.py b/docker/configure_workers_and_start.py index 26f92b3f1a..4beec3daaf 100755 --- a/docker/configure_workers_and_start.py +++ b/docker/configure_workers_and_start.py @@ -174,6 +174,7 @@ WORKERS_CONFIG: Dict[str, Dict[str, Any]] = { "^/_matrix/client/(r0|v3|unstable)/user/.*/filter(/|$)", "^/_matrix/client/(r0|v3|unstable)/password_policy$", "^/_matrix/client/(api/v1|r0|v3|unstable)/directory/room/.*$", + "^/_matrix/client/(r0|v3|unstable)/capabilities$", ], "shared_extra_conf": {}, "worker_extra_conf": "", diff --git a/docs/workers.md b/docs/workers.md index cb2333e4a5..6192a46e09 100644 --- a/docs/workers.md +++ b/docs/workers.md @@ -235,6 +235,7 @@ information. ^/_matrix/client/(api/v1|r0|v3|unstable)/search$ ^/_matrix/client/(r0|v3|unstable)/user/.*/filter(/|$) ^/_matrix/client/(api/v1|r0|v3|unstable)/directory/room/.*$ + ^/_matrix/client/(r0|v3|unstable)/capabilities$ # Encryption requests ^/_matrix/client/(r0|v3|unstable)/keys/query$ diff --git a/synapse/rest/__init__.py b/synapse/rest/__init__.py index 19603ed137..1af8d99d20 100644 --- a/synapse/rest/__init__.py +++ b/synapse/rest/__init__.py @@ -133,8 +133,8 @@ class ClientRestResource(JsonResource): if is_main_process: room_upgrade_rest_servlet.register_servlets(hs, client_resource) room_batch.register_servlets(hs, client_resource) + capabilities.register_servlets(hs, client_resource) if is_main_process: - capabilities.register_servlets(hs, client_resource) account_validity.register_servlets(hs, client_resource) relations.register_servlets(hs, client_resource) password_policy.register_servlets(hs, client_resource) diff --git a/synapse/rest/client/capabilities.py b/synapse/rest/client/capabilities.py index e84dde31b1..0dbf8f6818 100644 --- a/synapse/rest/client/capabilities.py +++ b/synapse/rest/client/capabilities.py @@ -33,6 +33,7 @@ class CapabilitiesRestServlet(RestServlet): """End point to expose the capabilities of the server.""" PATTERNS = client_patterns("/capabilities$") + CATEGORY = "Client API requests" def __init__(self, hs: "HomeServer"): super().__init__() -- cgit 1.5.1 From 5e024a0645733a816dc0da21a034dd70b053f2be Mon Sep 17 00:00:00 2001 From: Patrick Cloke Date: Thu, 20 Apr 2023 12:30:32 -0400 Subject: Modify StoreKeyFetcher to read from server_keys_json. (#15417) Before this change: * `PerspectivesKeyFetcher` and `ServerKeyFetcher` write to `server_keys_json`. * `PerspectivesKeyFetcher` also writes to `server_signature_keys`. * `StoreKeyFetcher` reads from `server_signature_keys`. After this change: * `PerspectivesKeyFetcher` and `ServerKeyFetcher` write to `server_keys_json`. * `PerspectivesKeyFetcher` also writes to `server_signature_keys`. * `StoreKeyFetcher` reads from `server_keys_json`. This results in `StoreKeyFetcher` now using the results from `ServerKeyFetcher` in addition to those from `PerspectivesKeyFetcher`, i.e. keys which are directly fetched from a server will now be pulled from the database instead of refetched. An additional minor change is included to avoid creating a `PerspectivesKeyFetcher` (and checking it) if no `trusted_key_servers` are configured. The overall impact of this should be better usage of cached results: * If a server has no trusted key servers configured then it should reduce how often keys are fetched. * if a server's trusted key server does not have a requested server's keys cached then it should reduce how often keys are directly fetched. --- changelog.d/15417.bugfix | 1 + synapse/crypto/keyring.py | 30 ++++----- synapse/rest/key/v2/remote_key_resource.py | 2 +- synapse/storage/databases/main/keys.py | 99 ++++++++++++++++++++++++++---- tests/crypto/test_keyring.py | 62 +++++++++---------- tests/storage/test_keys.py | 16 ++--- tests/unittest.py | 25 +++++--- tests/utils.py | 3 + 8 files changed, 162 insertions(+), 76 deletions(-) create mode 100644 changelog.d/15417.bugfix (limited to 'synapse/rest') diff --git a/changelog.d/15417.bugfix b/changelog.d/15417.bugfix new file mode 100644 index 0000000000..300635cbdc --- /dev/null +++ b/changelog.d/15417.bugfix @@ -0,0 +1 @@ +Fix a long-standing bug where cached key results which were directly fetched would not be properly re-used. diff --git a/synapse/crypto/keyring.py b/synapse/crypto/keyring.py index d2f99dc2ac..afdf6863d6 100644 --- a/synapse/crypto/keyring.py +++ b/synapse/crypto/keyring.py @@ -150,18 +150,19 @@ class Keyring: def __init__( self, hs: "HomeServer", key_fetchers: "Optional[Iterable[KeyFetcher]]" = None ): - self.clock = hs.get_clock() - if key_fetchers is None: - key_fetchers = ( - # Fetch keys from the database. - StoreKeyFetcher(hs), - # Fetch keys from a configured Perspectives server. - PerspectivesKeyFetcher(hs), - # Fetch keys from the origin server directly. - ServerKeyFetcher(hs), - ) - self._key_fetchers = key_fetchers + # Always fetch keys from the database. + mutable_key_fetchers: List[KeyFetcher] = [StoreKeyFetcher(hs)] + # Fetch keys from configured trusted key servers, if any exist. + key_servers = hs.config.key.key_servers + if key_servers: + mutable_key_fetchers.append(PerspectivesKeyFetcher(hs)) + # Finally, fetch keys from the origin server directly. + mutable_key_fetchers.append(ServerKeyFetcher(hs)) + + self._key_fetchers: Iterable[KeyFetcher] = tuple(mutable_key_fetchers) + else: + self._key_fetchers = key_fetchers self._fetch_keys_queue: BatchingQueue[ _FetchKeyRequest, Dict[str, Dict[str, FetchKeyResult]] @@ -510,7 +511,7 @@ class StoreKeyFetcher(KeyFetcher): for key_id in queue_value.key_ids ) - res = await self.store.get_server_verify_keys(key_ids_to_fetch) + res = await self.store.get_server_keys_json(key_ids_to_fetch) keys: Dict[str, Dict[str, FetchKeyResult]] = {} for (server_name, key_id), key in res.items(): keys.setdefault(server_name, {})[key_id] = key @@ -522,7 +523,6 @@ class BaseV2KeyFetcher(KeyFetcher): super().__init__(hs) self.store = hs.get_datastores().main - self.config = hs.config async def process_v2_response( self, from_server: str, response_json: JsonDict, time_added_ms: int @@ -626,7 +626,7 @@ class PerspectivesKeyFetcher(BaseV2KeyFetcher): super().__init__(hs) self.clock = hs.get_clock() self.client = hs.get_federation_http_client() - self.key_servers = self.config.key.key_servers + self.key_servers = hs.config.key.key_servers async def _fetch_keys( self, keys_to_fetch: List[_FetchKeyRequest] @@ -775,7 +775,7 @@ class PerspectivesKeyFetcher(BaseV2KeyFetcher): keys.setdefault(server_name, {}).update(processed_response) - await self.store.store_server_verify_keys( + await self.store.store_server_signature_keys( perspective_name, time_now_ms, added_keys ) diff --git a/synapse/rest/key/v2/remote_key_resource.py b/synapse/rest/key/v2/remote_key_resource.py index 3bdb6ec909..ff0454ca57 100644 --- a/synapse/rest/key/v2/remote_key_resource.py +++ b/synapse/rest/key/v2/remote_key_resource.py @@ -155,7 +155,7 @@ class RemoteKey(RestServlet): for key_id in key_ids: store_queries.append((server_name, key_id, None)) - cached = await self.store.get_server_keys_json(store_queries) + cached = await self.store.get_server_keys_json_for_remote(store_queries) json_results: Set[bytes] = set() diff --git a/synapse/storage/databases/main/keys.py b/synapse/storage/databases/main/keys.py index 89c37a4eb5..1666e3c43b 100644 --- a/synapse/storage/databases/main/keys.py +++ b/synapse/storage/databases/main/keys.py @@ -14,10 +14,12 @@ # limitations under the License. import itertools +import json import logging from typing import Any, Dict, Iterable, List, Mapping, Optional, Tuple from signedjson.key import decode_verify_key_bytes +from unpaddedbase64 import decode_base64 from synapse.storage._base import SQLBaseStore from synapse.storage.database import LoggingTransaction @@ -36,15 +38,16 @@ class KeyStore(SQLBaseStore): """Persistence for signature verification keys""" @cached() - def _get_server_verify_key( + def _get_server_signature_key( self, server_name_and_key_id: Tuple[str, str] ) -> FetchKeyResult: raise NotImplementedError() @cachedList( - cached_method_name="_get_server_verify_key", list_name="server_name_and_key_ids" + cached_method_name="_get_server_signature_key", + list_name="server_name_and_key_ids", ) - async def get_server_verify_keys( + async def get_server_signature_keys( self, server_name_and_key_ids: Iterable[Tuple[str, str]] ) -> Dict[Tuple[str, str], FetchKeyResult]: """ @@ -62,10 +65,12 @@ class KeyStore(SQLBaseStore): """Processes a batch of keys to fetch, and adds the result to `keys`.""" # batch_iter always returns tuples so it's safe to do len(batch) - sql = ( - "SELECT server_name, key_id, verify_key, ts_valid_until_ms " - "FROM server_signature_keys WHERE 1=0" - ) + " OR (server_name=? AND key_id=?)" * len(batch) + sql = """ + SELECT server_name, key_id, verify_key, ts_valid_until_ms + FROM server_signature_keys WHERE 1=0 + """ + " OR (server_name=? AND key_id=?)" * len( + batch + ) txn.execute(sql, tuple(itertools.chain.from_iterable(batch))) @@ -89,9 +94,9 @@ class KeyStore(SQLBaseStore): _get_keys(txn, batch) return keys - return await self.db_pool.runInteraction("get_server_verify_keys", _txn) + return await self.db_pool.runInteraction("get_server_signature_keys", _txn) - async def store_server_verify_keys( + async def store_server_signature_keys( self, from_server: str, ts_added_ms: int, @@ -119,7 +124,7 @@ class KeyStore(SQLBaseStore): ) ) # invalidate takes a tuple corresponding to the params of - # _get_server_verify_key. _get_server_verify_key only takes one + # _get_server_signature_key. _get_server_signature_key only takes one # param, which is itself the 2-tuple (server_name, key_id). invalidations.append((server_name, key_id)) @@ -134,10 +139,10 @@ class KeyStore(SQLBaseStore): "verify_key", ), value_values=value_values, - desc="store_server_verify_keys", + desc="store_server_signature_keys", ) - invalidate = self._get_server_verify_key.invalidate + invalidate = self._get_server_signature_key.invalidate for i in invalidations: invalidate((i,)) @@ -180,7 +185,75 @@ class KeyStore(SQLBaseStore): desc="store_server_keys_json", ) + # invalidate takes a tuple corresponding to the params of + # _get_server_keys_json. _get_server_keys_json only takes one + # param, which is itself the 2-tuple (server_name, key_id). + self._get_server_keys_json.invalidate((((server_name, key_id),))) + + @cached() + def _get_server_keys_json( + self, server_name_and_key_id: Tuple[str, str] + ) -> FetchKeyResult: + raise NotImplementedError() + + @cachedList( + cached_method_name="_get_server_keys_json", list_name="server_name_and_key_ids" + ) async def get_server_keys_json( + self, server_name_and_key_ids: Iterable[Tuple[str, str]] + ) -> Dict[Tuple[str, str], FetchKeyResult]: + """ + Args: + server_name_and_key_ids: + iterable of (server_name, key-id) tuples to fetch keys for + + Returns: + A map from (server_name, key_id) -> FetchKeyResult, or None if the + key is unknown + """ + keys = {} + + def _get_keys(txn: Cursor, batch: Tuple[Tuple[str, str], ...]) -> None: + """Processes a batch of keys to fetch, and adds the result to `keys`.""" + + # batch_iter always returns tuples so it's safe to do len(batch) + sql = """ + SELECT server_name, key_id, key_json, ts_valid_until_ms + FROM server_keys_json WHERE 1=0 + """ + " OR (server_name=? AND key_id=?)" * len( + batch + ) + + txn.execute(sql, tuple(itertools.chain.from_iterable(batch))) + + for server_name, key_id, key_json_bytes, ts_valid_until_ms in txn: + if ts_valid_until_ms is None: + # Old keys may be stored with a ts_valid_until_ms of null, + # in which case we treat this as if it was set to `0`, i.e. + # it won't match key requests that define a minimum + # `ts_valid_until_ms`. + ts_valid_until_ms = 0 + + # The entire signed JSON response is stored in server_keys_json, + # fetch out the bits needed. + key_json = json.loads(bytes(key_json_bytes)) + key_base64 = key_json["verify_keys"][key_id]["key"] + + keys[(server_name, key_id)] = FetchKeyResult( + verify_key=decode_verify_key_bytes( + key_id, decode_base64(key_base64) + ), + valid_until_ts=ts_valid_until_ms, + ) + + def _txn(txn: Cursor) -> Dict[Tuple[str, str], FetchKeyResult]: + for batch in batch_iter(server_name_and_key_ids, 50): + _get_keys(txn, batch) + return keys + + return await self.db_pool.runInteraction("get_server_keys_json", _txn) + + async def get_server_keys_json_for_remote( self, server_keys: Iterable[Tuple[str, Optional[str], Optional[str]]] ) -> Dict[Tuple[str, Optional[str], Optional[str]], List[Dict[str, Any]]]: """Retrieve the key json for a list of server_keys and key ids. @@ -188,8 +261,10 @@ class KeyStore(SQLBaseStore): that server, key_id, and source triplet entry will be an empty list. The JSON is returned as a byte array so that it can be efficiently used in an HTTP response. + Args: server_keys: List of (server_name, key_id, source) triplets. + Returns: A mapping from (server_name, key_id, source) triplets to a list of dicts """ diff --git a/tests/crypto/test_keyring.py b/tests/crypto/test_keyring.py index 66102ab934..7c63b2ea4c 100644 --- a/tests/crypto/test_keyring.py +++ b/tests/crypto/test_keyring.py @@ -190,10 +190,23 @@ class KeyringTestCase(unittest.HomeserverTestCase): kr = keyring.Keyring(self.hs) key1 = signedjson.key.generate_signing_key("1") - r = self.hs.get_datastores().main.store_server_verify_keys( + r = self.hs.get_datastores().main.store_server_keys_json( "server9", - int(time.time() * 1000), - {("server9", get_key_id(key1)): FetchKeyResult(get_verify_key(key1), 1000)}, + get_key_id(key1), + from_server="test", + ts_now_ms=int(time.time() * 1000), + ts_expires_ms=1000, + # The entire response gets signed & stored, just include the bits we + # care about. + key_json_bytes=canonicaljson.encode_canonical_json( + { + "verify_keys": { + get_key_id(key1): { + "key": encode_verify_key_base64(get_verify_key(key1)) + } + } + } + ), ) self.get_success(r) @@ -280,17 +293,13 @@ class KeyringTestCase(unittest.HomeserverTestCase): mock_fetcher = Mock() mock_fetcher.get_keys = Mock(return_value=make_awaitable({})) - kr = keyring.Keyring( - self.hs, key_fetchers=(StoreKeyFetcher(self.hs), mock_fetcher) - ) - key1 = signedjson.key.generate_signing_key("1") - r = self.hs.get_datastores().main.store_server_verify_keys( + r = self.hs.get_datastores().main.store_server_signature_keys( "server9", int(time.time() * 1000), # None is not a valid value in FetchKeyResult, but we're abusing this # API to insert null values into the database. The nulls get converted - # to 0 when fetched in KeyStore.get_server_verify_keys. + # to 0 when fetched in KeyStore.get_server_signature_keys. {("server9", get_key_id(key1)): FetchKeyResult(get_verify_key(key1), None)}, # type: ignore[arg-type] ) self.get_success(r) @@ -298,27 +307,12 @@ class KeyringTestCase(unittest.HomeserverTestCase): json1: JsonDict = {} signedjson.sign.sign_json(json1, "server9", key1) - # should fail immediately on an unsigned object - d = kr.verify_json_for_server("server9", {}, 0) - self.get_failure(d, SynapseError) - - # should fail on a signed object with a non-zero minimum_valid_until_ms, - # as it tries to refetch the keys and fails. - d = kr.verify_json_for_server("server9", json1, 500) - self.get_failure(d, SynapseError) - - # We expect the keyring tried to refetch the key once. - mock_fetcher.get_keys.assert_called_once_with( - "server9", [get_key_id(key1)], 500 - ) - # should succeed on a signed object with a 0 minimum_valid_until_ms - d = kr.verify_json_for_server( - "server9", - json1, - 0, + d = self.hs.get_datastores().main.get_server_signature_keys( + [("server9", get_key_id(key1))] ) - self.get_success(d) + result = self.get_success(d) + self.assertEquals(result[("server9", get_key_id(key1))].valid_until_ts, 0) def test_verify_json_dedupes_key_requests(self) -> None: """Two requests for the same key should be deduped.""" @@ -464,7 +458,9 @@ class ServerKeyFetcherTestCase(unittest.HomeserverTestCase): # check that the perspectives store is correctly updated lookup_triplet = (SERVER_NAME, testverifykey_id, None) key_json = self.get_success( - self.hs.get_datastores().main.get_server_keys_json([lookup_triplet]) + self.hs.get_datastores().main.get_server_keys_json_for_remote( + [lookup_triplet] + ) ) res_keys = key_json[lookup_triplet] self.assertEqual(len(res_keys), 1) @@ -582,7 +578,9 @@ class PerspectivesKeyFetcherTestCase(unittest.HomeserverTestCase): # check that the perspectives store is correctly updated lookup_triplet = (SERVER_NAME, testverifykey_id, None) key_json = self.get_success( - self.hs.get_datastores().main.get_server_keys_json([lookup_triplet]) + self.hs.get_datastores().main.get_server_keys_json_for_remote( + [lookup_triplet] + ) ) res_keys = key_json[lookup_triplet] self.assertEqual(len(res_keys), 1) @@ -703,7 +701,9 @@ class PerspectivesKeyFetcherTestCase(unittest.HomeserverTestCase): # check that the perspectives store is correctly updated lookup_triplet = (SERVER_NAME, testverifykey_id, None) key_json = self.get_success( - self.hs.get_datastores().main.get_server_keys_json([lookup_triplet]) + self.hs.get_datastores().main.get_server_keys_json_for_remote( + [lookup_triplet] + ) ) res_keys = key_json[lookup_triplet] self.assertEqual(len(res_keys), 1) diff --git a/tests/storage/test_keys.py b/tests/storage/test_keys.py index 5901d80f26..5d7c13e6d0 100644 --- a/tests/storage/test_keys.py +++ b/tests/storage/test_keys.py @@ -37,13 +37,13 @@ KEY_2 = decode_verify_key_base64( class KeyStoreTestCase(tests.unittest.HomeserverTestCase): - def test_get_server_verify_keys(self) -> None: + def test_get_server_signature_keys(self) -> None: store = self.hs.get_datastores().main key_id_1 = "ed25519:key1" key_id_2 = "ed25519:KEY_ID_2" self.get_success( - store.store_server_verify_keys( + store.store_server_signature_keys( "from_server", 10, { @@ -54,7 +54,7 @@ class KeyStoreTestCase(tests.unittest.HomeserverTestCase): ) res = self.get_success( - store.get_server_verify_keys( + store.get_server_signature_keys( [ ("server1", key_id_1), ("server1", key_id_2), @@ -87,7 +87,7 @@ class KeyStoreTestCase(tests.unittest.HomeserverTestCase): key_id_2 = "ed25519:key2" self.get_success( - store.store_server_verify_keys( + store.store_server_signature_keys( "from_server", 0, { @@ -98,7 +98,7 @@ class KeyStoreTestCase(tests.unittest.HomeserverTestCase): ) res = self.get_success( - store.get_server_verify_keys([("srv1", key_id_1), ("srv1", key_id_2)]) + store.get_server_signature_keys([("srv1", key_id_1), ("srv1", key_id_2)]) ) self.assertEqual(len(res.keys()), 2) @@ -111,20 +111,20 @@ class KeyStoreTestCase(tests.unittest.HomeserverTestCase): self.assertEqual(res2.valid_until_ts, 200) # we should be able to look up the same thing again without a db hit - res = self.get_success(store.get_server_verify_keys([("srv1", key_id_1)])) + res = self.get_success(store.get_server_signature_keys([("srv1", key_id_1)])) self.assertEqual(len(res.keys()), 1) self.assertEqual(res[("srv1", key_id_1)].verify_key, KEY_1) new_key_2 = signedjson.key.get_verify_key( signedjson.key.generate_signing_key("key2") ) - d = store.store_server_verify_keys( + d = store.store_server_signature_keys( "from_server", 10, {("srv1", key_id_2): FetchKeyResult(new_key_2, 300)} ) self.get_success(d) res = self.get_success( - store.get_server_verify_keys([("srv1", key_id_1), ("srv1", key_id_2)]) + store.get_server_signature_keys([("srv1", key_id_1), ("srv1", key_id_2)]) ) self.assertEqual(len(res.keys()), 2) diff --git a/tests/unittest.py b/tests/unittest.py index 96ae8fca67..ee2f78ab01 100644 --- a/tests/unittest.py +++ b/tests/unittest.py @@ -69,7 +69,6 @@ from synapse.logging.context import ( ) from synapse.rest import RegisterServletsFunc from synapse.server import HomeServer -from synapse.storage.keys import FetchKeyResult from synapse.types import JsonDict, Requester, UserID, create_requester from synapse.util import Clock from synapse.util.httpresourcetree import create_resource_tree @@ -848,15 +847,23 @@ class FederatingHomeserverTestCase(HomeserverTestCase): verify_key_id = "%s:%s" % (verify_key.alg, verify_key.version) self.get_success( - hs.get_datastores().main.store_server_verify_keys( + hs.get_datastores().main.store_server_keys_json( + self.OTHER_SERVER_NAME, + verify_key_id, from_server=self.OTHER_SERVER_NAME, - ts_added_ms=clock.time_msec(), - verify_keys={ - (self.OTHER_SERVER_NAME, verify_key_id): FetchKeyResult( - verify_key=verify_key, - valid_until_ts=clock.time_msec() + 10000, - ), - }, + ts_now_ms=clock.time_msec(), + ts_expires_ms=clock.time_msec() + 10000, + key_json_bytes=canonicaljson.encode_canonical_json( + { + "verify_keys": { + verify_key_id: { + "key": signedjson.key.encode_verify_key_base64( + verify_key + ) + } + } + } + ), ) ) diff --git a/tests/utils.py b/tests/utils.py index a0ac11bc5c..e73b46944b 100644 --- a/tests/utils.py +++ b/tests/utils.py @@ -131,6 +131,9 @@ def default_config( # the test signing key is just an arbitrary ed25519 key to keep the config # parser happy "signing_key": "ed25519 a_lPym qvioDNmfExFBRPgdTU+wtFYKq4JfwFRv7sYVgWvmgJg", + # Disable trusted key servers, otherwise unit tests might try to actually + # reach out to matrix.org. + "trusted_key_servers": [], "event_cache_size": 1, "enable_registration": True, "enable_registration_captcha": False, -- cgit 1.5.1 From 8b3a50299658a27175f55f1051e9470553c76d8e Mon Sep 17 00:00:00 2001 From: Quentin Gliech Date: Tue, 25 Apr 2023 10:37:09 +0200 Subject: Experimental support for MSC3970: per-device transaction IDs (#15318) --- changelog.d/15318.feature | 1 + synapse/config/experimental.py | 3 + synapse/events/__init__.py | 9 ++- synapse/events/utils.py | 58 +++++++++++++----- synapse/handlers/message.py | 38 ++++++++++-- synapse/handlers/room_member.py | 33 ++++++++--- synapse/rest/client/transactions.py | 13 +++++ synapse/server.py | 4 +- synapse/storage/databases/main/events.py | 68 +++++++++++++++++----- synapse/storage/databases/main/events_worker.py | 33 +++++++++-- .../main/delta/74/05_events_txn_id_device_id.sql | 53 +++++++++++++++++ 11 files changed, 265 insertions(+), 48 deletions(-) create mode 100644 changelog.d/15318.feature create mode 100644 synapse/storage/schema/main/delta/74/05_events_txn_id_device_id.sql (limited to 'synapse/rest') diff --git a/changelog.d/15318.feature b/changelog.d/15318.feature new file mode 100644 index 0000000000..47bb2e17a7 --- /dev/null +++ b/changelog.d/15318.feature @@ -0,0 +1 @@ +Experimental support for MSC3970: Scope transaction IDs to devices. diff --git a/synapse/config/experimental.py b/synapse/config/experimental.py index 7687c80ea0..6599679731 100644 --- a/synapse/config/experimental.py +++ b/synapse/config/experimental.py @@ -191,3 +191,6 @@ class ExperimentalConfig(Config): # MSC2659: Application service ping endpoint self.msc2659_enabled = experimental.get("msc2659_enabled", False) + + # MSC3970: Scope transaction IDs to devices + self.msc3970_enabled = experimental.get("msc3970_enabled", False) diff --git a/synapse/events/__init__.py b/synapse/events/__init__.py index 4501518cf0..de7e5be42b 100644 --- a/synapse/events/__init__.py +++ b/synapse/events/__init__.py @@ -198,9 +198,16 @@ class _EventInternalMetadata: soft_failed: DictProperty[bool] = DictProperty("soft_failed") proactively_send: DictProperty[bool] = DictProperty("proactively_send") redacted: DictProperty[bool] = DictProperty("redacted") + historical: DictProperty[bool] = DictProperty("historical") + txn_id: DictProperty[str] = DictProperty("txn_id") + """The transaction ID, if it was set when the event was created.""" + token_id: DictProperty[int] = DictProperty("token_id") - historical: DictProperty[bool] = DictProperty("historical") + """The access token ID of the user who sent this event, if any.""" + + device_id: DictProperty[str] = DictProperty("device_id") + """The device ID of the user who sent this event, if any.""" # XXX: These are set by StreamWorkerStore._set_before_and_after. # I'm pretty sure that these are never persisted to the database, so shouldn't diff --git a/synapse/events/utils.py b/synapse/events/utils.py index 1d5d7491cd..0802eb1963 100644 --- a/synapse/events/utils.py +++ b/synapse/events/utils.py @@ -339,6 +339,7 @@ def serialize_event( time_now_ms: int, *, config: SerializeEventConfig = _DEFAULT_SERIALIZE_EVENT_CONFIG, + msc3970_enabled: bool = False, ) -> JsonDict: """Serialize event for clients @@ -346,6 +347,8 @@ def serialize_event( e time_now_ms config: Event serialization config + msc3970_enabled: Whether MSC3970 is enabled. It changes whether we should + include the `transaction_id` in the event's `unsigned` section. Returns: The serialized event dictionary. @@ -368,27 +371,43 @@ def serialize_event( if "redacted_because" in e.unsigned: d["unsigned"]["redacted_because"] = serialize_event( - e.unsigned["redacted_because"], time_now_ms, config=config + e.unsigned["redacted_because"], + time_now_ms, + config=config, + msc3970_enabled=msc3970_enabled, ) # If we have a txn_id saved in the internal_metadata, we should include it in the # unsigned section of the event if it was sent by the same session as the one # requesting the event. - # There is a special case for guests, because they only have one access token - # without associated access_token_id, so we always include the txn_id for events - # they sent. - txn_id = getattr(e.internal_metadata, "txn_id", None) + txn_id: Optional[str] = getattr(e.internal_metadata, "txn_id", None) if txn_id is not None and config.requester is not None: - event_token_id = getattr(e.internal_metadata, "token_id", None) - if config.requester.user.to_string() == e.sender and ( - ( - event_token_id is not None - and config.requester.access_token_id is not None - and event_token_id == config.requester.access_token_id + # For the MSC3970 rules to be applied, we *need* to have the device ID in the + # event internal metadata. Since we were not recording them before, if it hasn't + # been recorded, we fallback to the old behaviour. + event_device_id: Optional[str] = getattr(e.internal_metadata, "device_id", None) + if msc3970_enabled and event_device_id is not None: + if event_device_id == config.requester.device_id: + d["unsigned"]["transaction_id"] = txn_id + + else: + # The pre-MSC3970 behaviour is to only include the transaction ID if the + # event was sent from the same access token. For regular users, we can use + # the access token ID to determine this. For guests, we can't, but since + # each guest only has one access token, we can just check that the event was + # sent by the same user as the one requesting the event. + event_token_id: Optional[int] = getattr( + e.internal_metadata, "token_id", None ) - or config.requester.is_guest - ): - d["unsigned"]["transaction_id"] = txn_id + if config.requester.user.to_string() == e.sender and ( + ( + event_token_id is not None + and config.requester.access_token_id is not None + and event_token_id == config.requester.access_token_id + ) + or config.requester.is_guest + ): + d["unsigned"]["transaction_id"] = txn_id # invite_room_state and knock_room_state are a list of stripped room state events # that are meant to provide metadata about a room to an invitee/knocker. They are @@ -419,6 +438,9 @@ class EventClientSerializer: clients. """ + def __init__(self, *, msc3970_enabled: bool = False): + self._msc3970_enabled = msc3970_enabled + def serialize_event( self, event: Union[JsonDict, EventBase], @@ -443,7 +465,9 @@ class EventClientSerializer: if not isinstance(event, EventBase): return event - serialized_event = serialize_event(event, time_now, config=config) + serialized_event = serialize_event( + event, time_now, config=config, msc3970_enabled=self._msc3970_enabled + ) # Check if there are any bundled aggregations to include with the event. if bundle_aggregations: @@ -501,7 +525,9 @@ class EventClientSerializer: # `sender` of the edit; however MSC3925 proposes extending it to the whole # of the edit, which is what we do here. serialized_aggregations[RelationTypes.REPLACE] = self.serialize_event( - event_aggregations.replace, time_now, config=config + event_aggregations.replace, + time_now, + config=config, ) # Include any threaded replies to this event. diff --git a/synapse/handlers/message.py b/synapse/handlers/message.py index 2e964ed37e..ac1932a7f9 100644 --- a/synapse/handlers/message.py +++ b/synapse/handlers/message.py @@ -561,6 +561,8 @@ class EventCreationHandler: expiry_ms=30 * 60 * 1000, ) + self._msc3970_enabled = hs.config.experimental.msc3970_enabled + async def create_event( self, requester: Requester, @@ -701,9 +703,16 @@ class EventCreationHandler: if require_consent and not is_exempt: await self.assert_accepted_privacy_policy(requester) + # Save the access token ID, the device ID and the transaction ID in the event + # internal metadata. This is useful to determine if we should echo the + # transaction_id in events. + # See `synapse.events.utils.EventClientSerializer.serialize_event` if requester.access_token_id is not None: builder.internal_metadata.token_id = requester.access_token_id + if requester.device_id is not None: + builder.internal_metadata.device_id = requester.device_id + if txn_id is not None: builder.internal_metadata.txn_id = txn_id @@ -897,12 +906,31 @@ class EventCreationHandler: Returns: An event if one could be found, None otherwise. """ + + if self._msc3970_enabled and requester.device_id: + # When MSC3970 is enabled, we lookup for events sent by the same device first, + # and fallback to the old behaviour if none were found. + existing_event_id = ( + await self.store.get_event_id_from_transaction_id_and_device_id( + room_id, + requester.user.to_string(), + requester.device_id, + txn_id, + ) + ) + if existing_event_id: + return await self.store.get_event(existing_event_id) + + # Pre-MSC3970, we looked up for events that were sent by the same session by + # using the access token ID. if requester.access_token_id: - existing_event_id = await self.store.get_event_id_from_transaction_id( - room_id, - requester.user.to_string(), - requester.access_token_id, - txn_id, + existing_event_id = ( + await self.store.get_event_id_from_transaction_id_and_token_id( + room_id, + requester.user.to_string(), + requester.access_token_id, + txn_id, + ) ) if existing_event_id: return await self.store.get_event(existing_event_id) diff --git a/synapse/handlers/room_member.py b/synapse/handlers/room_member.py index ec317e6023..ed805d6ec8 100644 --- a/synapse/handlers/room_member.py +++ b/synapse/handlers/room_member.py @@ -169,6 +169,8 @@ class RoomMemberHandler(metaclass=abc.ABCMeta): self.request_ratelimiter = hs.get_request_ratelimiter() hs.get_notifier().add_new_join_in_room_callback(self._on_user_joined_room) + self._msc3970_enabled = hs.config.experimental.msc3970_enabled + def _on_user_joined_room(self, event_id: str, room_id: str) -> None: """Notify the rate limiter that a room join has occurred. @@ -399,13 +401,30 @@ class RoomMemberHandler(metaclass=abc.ABCMeta): # Check if we already have an event with a matching transaction ID. (We # do this check just before we persist an event as well, but may as well # do it up front for efficiency.) - if txn_id and requester.access_token_id: - existing_event_id = await self.store.get_event_id_from_transaction_id( - room_id, - requester.user.to_string(), - requester.access_token_id, - txn_id, - ) + if txn_id: + existing_event_id = None + if self._msc3970_enabled and requester.device_id: + # When MSC3970 is enabled, we lookup for events sent by the same device + # first, and fallback to the old behaviour if none were found. + existing_event_id = ( + await self.store.get_event_id_from_transaction_id_and_device_id( + room_id, + requester.user.to_string(), + requester.device_id, + txn_id, + ) + ) + + if requester.access_token_id and not existing_event_id: + existing_event_id = ( + await self.store.get_event_id_from_transaction_id_and_token_id( + room_id, + requester.user.to_string(), + requester.access_token_id, + txn_id, + ) + ) + if existing_event_id: event_pos = await self.store.get_position_for_event(existing_event_id) return existing_event_id, event_pos.stream diff --git a/synapse/rest/client/transactions.py b/synapse/rest/client/transactions.py index f2aaab6227..0d8a63d8be 100644 --- a/synapse/rest/client/transactions.py +++ b/synapse/rest/client/transactions.py @@ -50,6 +50,8 @@ class HttpTransactionCache: # for at *LEAST* 30 mins, and at *MOST* 60 mins. self.cleaner = self.clock.looping_call(self._cleanup, CLEANUP_PERIOD_MS) + self._msc3970_enabled = hs.config.experimental.msc3970_enabled + def _get_transaction_key(self, request: IRequest, requester: Requester) -> Hashable: """A helper function which returns a transaction key that can be used with TransactionCache for idempotent requests. @@ -58,6 +60,7 @@ class HttpTransactionCache: requests to the same endpoint. The key is formed from the HTTP request path and attributes from the requester: the access_token_id for regular users, the user ID for guest users, and the appservice ID for appservice users. + With MSC3970, for regular users, the key is based on the user ID and device ID. Args: request: The incoming request. @@ -67,11 +70,21 @@ class HttpTransactionCache: """ assert request.path is not None path: str = request.path.decode("utf8") + if requester.is_guest: assert requester.user is not None, "Guest requester must have a user ID set" return (path, "guest", requester.user) + elif requester.app_service is not None: return (path, "appservice", requester.app_service.id) + + # With MSC3970, we use the user ID and device ID as the transaction key + elif self._msc3970_enabled: + assert requester.user, "Requester must have a user" + assert requester.device_id, "Requester must have a device_id" + return (path, "user", requester.user, requester.device_id) + + # Otherwise, the pre-MSC3970 behaviour is to use the access token ID else: assert ( requester.access_token_id is not None diff --git a/synapse/server.py b/synapse/server.py index 559724594b..08ad97b952 100644 --- a/synapse/server.py +++ b/synapse/server.py @@ -762,7 +762,9 @@ class HomeServer(metaclass=abc.ABCMeta): @cache_in_self def get_event_client_serializer(self) -> EventClientSerializer: - return EventClientSerializer() + return EventClientSerializer( + msc3970_enabled=self.config.experimental.msc3970_enabled + ) @cache_in_self def get_password_policy_handler(self) -> PasswordPolicyHandler: diff --git a/synapse/storage/databases/main/events.py b/synapse/storage/databases/main/events.py index 9c1e506da6..c229de48c8 100644 --- a/synapse/storage/databases/main/events.py +++ b/synapse/storage/databases/main/events.py @@ -127,6 +127,8 @@ class PersistEventsStore: self._backfill_id_gen: AbstractStreamIdGenerator = self.store._backfill_id_gen self._stream_id_gen: AbstractStreamIdGenerator = self.store._stream_id_gen + self._msc3970_enabled = hs.config.experimental.msc3970_enabled + @trace async def _persist_events_and_state_updates( self, @@ -977,23 +979,43 @@ class PersistEventsStore: ) -> None: """Persist the mapping from transaction IDs to event IDs (if defined).""" - to_insert = [] + inserted_ts = self._clock.time_msec() + to_insert_token_id: List[Tuple[str, str, str, int, str, int]] = [] + to_insert_device_id: List[Tuple[str, str, str, str, str, int]] = [] for event, _ in events_and_contexts: - token_id = getattr(event.internal_metadata, "token_id", None) txn_id = getattr(event.internal_metadata, "txn_id", None) - if token_id and txn_id: - to_insert.append( - ( - event.event_id, - event.room_id, - event.sender, - token_id, - txn_id, - self._clock.time_msec(), + token_id = getattr(event.internal_metadata, "token_id", None) + device_id = getattr(event.internal_metadata, "device_id", None) + + if txn_id is not None: + if token_id is not None: + to_insert_token_id.append( + ( + event.event_id, + event.room_id, + event.sender, + token_id, + txn_id, + inserted_ts, + ) ) - ) - if to_insert: + if device_id is not None: + to_insert_device_id.append( + ( + event.event_id, + event.room_id, + event.sender, + device_id, + txn_id, + inserted_ts, + ) + ) + + # Pre-MSC3970, we rely on the access_token_id to scope the txn_id for events. + # Since this is an experimental flag, we still store the mapping even if the + # flag is disabled. + if to_insert_token_id: self.db_pool.simple_insert_many_txn( txn, table="event_txn_id", @@ -1005,7 +1027,25 @@ class PersistEventsStore: "txn_id", "inserted_ts", ), - values=to_insert, + values=to_insert_token_id, + ) + + # With MSC3970, we rely on the device_id instead to scope the txn_id for events. + # We're only inserting if MSC3970 is *enabled*, because else the pre-MSC3970 + # behaviour would allow for a UNIQUE constraint violation on this table + if to_insert_device_id and self._msc3970_enabled: + self.db_pool.simple_insert_many_txn( + txn, + table="event_txn_id_device_id", + keys=( + "event_id", + "room_id", + "user_id", + "device_id", + "txn_id", + "inserted_ts", + ), + values=to_insert_device_id, ) async def update_current_state( diff --git a/synapse/storage/databases/main/events_worker.py b/synapse/storage/databases/main/events_worker.py index 0cf46626d2..0ff3fc7369 100644 --- a/synapse/storage/databases/main/events_worker.py +++ b/synapse/storage/databases/main/events_worker.py @@ -2022,7 +2022,7 @@ class EventsWorkerStore(SQLBaseStore): desc="get_next_event_to_expire", func=get_next_event_to_expire_txn ) - async def get_event_id_from_transaction_id( + async def get_event_id_from_transaction_id_and_token_id( self, room_id: str, user_id: str, token_id: int, txn_id: str ) -> Optional[str]: """Look up if we have already persisted an event for the transaction ID, @@ -2038,7 +2038,26 @@ class EventsWorkerStore(SQLBaseStore): }, retcol="event_id", allow_none=True, - desc="get_event_id_from_transaction_id", + desc="get_event_id_from_transaction_id_and_token_id", + ) + + async def get_event_id_from_transaction_id_and_device_id( + self, room_id: str, user_id: str, device_id: str, txn_id: str + ) -> Optional[str]: + """Look up if we have already persisted an event for the transaction ID, + returning the event ID if so. + """ + return await self.db_pool.simple_select_one_onecol( + table="event_txn_id_device_id", + keyvalues={ + "room_id": room_id, + "user_id": user_id, + "device_id": device_id, + "txn_id": txn_id, + }, + retcol="event_id", + allow_none=True, + desc="get_event_id_from_transaction_id_and_device_id", ) async def get_already_persisted_events( @@ -2068,7 +2087,7 @@ class EventsWorkerStore(SQLBaseStore): # Check if this is a duplicate of an event we've already # persisted. - existing = await self.get_event_id_from_transaction_id( + existing = await self.get_event_id_from_transaction_id_and_token_id( event.room_id, event.sender, token_id, txn_id ) if existing: @@ -2084,11 +2103,17 @@ class EventsWorkerStore(SQLBaseStore): """Cleans out transaction id mappings older than 24hrs.""" def _cleanup_old_transaction_ids_txn(txn: LoggingTransaction) -> None: + one_day_ago = self._clock.time_msec() - 24 * 60 * 60 * 1000 sql = """ DELETE FROM event_txn_id WHERE inserted_ts < ? """ - one_day_ago = self._clock.time_msec() - 24 * 60 * 60 * 1000 + txn.execute(sql, (one_day_ago,)) + + sql = """ + DELETE FROM event_txn_id_device_id + WHERE inserted_ts < ? + """ txn.execute(sql, (one_day_ago,)) return await self.db_pool.runInteraction( diff --git a/synapse/storage/schema/main/delta/74/05_events_txn_id_device_id.sql b/synapse/storage/schema/main/delta/74/05_events_txn_id_device_id.sql new file mode 100644 index 0000000000..517a821a56 --- /dev/null +++ b/synapse/storage/schema/main/delta/74/05_events_txn_id_device_id.sql @@ -0,0 +1,53 @@ +/* Copyright 2023 The Matrix.org Foundation C.I.C + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +-- For MSC3970, in addition to the (room_id, user_id, token_id, txn_id) -> event_id mapping for each local event, +-- we also store the (room_id, user_id, device_id, txn_id) -> event_id mapping. +-- +-- This adds a new event_txn_id_device_id table. + +-- A map of recent events persisted with transaction IDs. Used to deduplicate +-- send event requests with the same transaction ID. +-- +-- Note: with MSC3970, transaction IDs are scoped to the +-- room ID/user ID/device ID that was used to make the request. +-- +-- Note: The foreign key constraints are ON DELETE CASCADE, as if we delete the +-- event or device we don't want to try and de-duplicate the event. +CREATE TABLE IF NOT EXISTS event_txn_id_device_id ( + event_id TEXT NOT NULL, + room_id TEXT NOT NULL, + user_id TEXT NOT NULL, + device_id TEXT NOT NULL, + txn_id TEXT NOT NULL, + inserted_ts BIGINT NOT NULL, + FOREIGN KEY (event_id) + REFERENCES events (event_id) ON DELETE CASCADE, + FOREIGN KEY (user_id, device_id) + REFERENCES devices (user_id, device_id) ON DELETE CASCADE +); + +-- This ensures that there is only one mapping per event_id. +CREATE UNIQUE INDEX IF NOT EXISTS event_txn_id_device_id_event_id + ON event_txn_id_device_id(event_id); + +-- This ensures that there is only one mapping per (room_id, user_id, device_id, txn_id) tuple. +-- Events are usually looked up using this index. +CREATE UNIQUE INDEX IF NOT EXISTS event_txn_id_device_id_txn_id + ON event_txn_id_device_id(room_id, user_id, device_id, txn_id); + +-- This table is cleaned up regularly, removing the oldest entries, hence this index. +CREATE INDEX IF NOT EXISTS event_txn_id_device_id_ts + ON event_txn_id_device_id(inserted_ts); -- cgit 1.5.1 From 8e9739449dd6d3c133adf9e995d27d06518a0bcf Mon Sep 17 00:00:00 2001 From: Patrick Cloke Date: Tue, 25 Apr 2023 13:30:41 -0400 Subject: Add unstable /keys/claim endpoint which always returns fallback keys. (#15462) It can be useful to always return the fallback key when attempting to claim keys. This adds an unstable endpoint for `/keys/claim` which always returns fallback keys in addition to one-time-keys. The fallback key(s) are not marked as "used" unless there are no corresponding OTKs. This is currently defined in MSC3983 (although likely to be split out to a separate MSC). The endpoint shape may change or be requested differently (i.e. a keyword parameter on the current endpoint), but the core logic should be reasonable. --- changelog.d/15462.misc | 1 + synapse/federation/federation_server.py | 6 +- synapse/federation/transport/server/__init__.py | 6 + synapse/federation/transport/server/federation.py | 23 ++- synapse/handlers/appservice.py | 13 +- synapse/handlers/e2e_keys.py | 70 ++++++- synapse/rest/client/keys.py | 31 ++- synapse/storage/databases/main/end_to_end_keys.py | 9 +- tests/handlers/test_e2e_keys.py | 241 +++++++++++++++++++++- 9 files changed, 371 insertions(+), 29 deletions(-) create mode 100644 changelog.d/15462.misc (limited to 'synapse/rest') diff --git a/changelog.d/15462.misc b/changelog.d/15462.misc new file mode 100644 index 0000000000..36e4bffbc8 --- /dev/null +++ b/changelog.d/15462.misc @@ -0,0 +1 @@ +Update support for [MSC3983](https://github.com/matrix-org/matrix-spec-proposals/pull/3983) to allow always returning fallback-keys in a `/keys/claim` request. diff --git a/synapse/federation/federation_server.py b/synapse/federation/federation_server.py index d7740eb3b4..c618f3d7a6 100644 --- a/synapse/federation/federation_server.py +++ b/synapse/federation/federation_server.py @@ -1005,7 +1005,7 @@ class FederationServer(FederationBase): @trace async def on_claim_client_keys( - self, origin: str, content: JsonDict + self, origin: str, content: JsonDict, always_include_fallback_keys: bool ) -> Dict[str, Any]: query = [] for user_id, device_keys in content.get("one_time_keys", {}).items(): @@ -1013,7 +1013,9 @@ class FederationServer(FederationBase): query.append((user_id, device_id, algorithm)) log_kv({"message": "Claiming one time keys.", "user, device pairs": query}) - results = await self._e2e_keys_handler.claim_local_one_time_keys(query) + results = await self._e2e_keys_handler.claim_local_one_time_keys( + query, always_include_fallback_keys=always_include_fallback_keys + ) json_result: Dict[str, Dict[str, Dict[str, JsonDict]]] = {} for result in results: diff --git a/synapse/federation/transport/server/__init__.py b/synapse/federation/transport/server/__init__.py index 753372fc54..55d2cd0a9a 100644 --- a/synapse/federation/transport/server/__init__.py +++ b/synapse/federation/transport/server/__init__.py @@ -25,6 +25,7 @@ from synapse.federation.transport.server._base import ( from synapse.federation.transport.server.federation import ( FEDERATION_SERVLET_CLASSES, FederationAccountStatusServlet, + FederationUnstableClientKeysClaimServlet, ) from synapse.http.server import HttpServer, JsonResource from synapse.http.servlet import ( @@ -298,6 +299,11 @@ def register_servlets( and not hs.config.experimental.msc3720_enabled ): continue + if ( + servletclass == FederationUnstableClientKeysClaimServlet + and not hs.config.experimental.msc3983_appservice_otk_claims + ): + continue servletclass( hs=hs, diff --git a/synapse/federation/transport/server/federation.py b/synapse/federation/transport/server/federation.py index ec5b5eeafa..e2340d70d5 100644 --- a/synapse/federation/transport/server/federation.py +++ b/synapse/federation/transport/server/federation.py @@ -577,7 +577,28 @@ class FederationClientKeysClaimServlet(BaseFederationServerServlet): async def on_POST( self, origin: str, content: JsonDict, query: Dict[bytes, List[bytes]] ) -> Tuple[int, JsonDict]: - response = await self.handler.on_claim_client_keys(origin, content) + response = await self.handler.on_claim_client_keys( + origin, content, always_include_fallback_keys=False + ) + return 200, response + + +class FederationUnstableClientKeysClaimServlet(BaseFederationServerServlet): + """ + Identical to the stable endpoint (FederationClientKeysClaimServlet) except it + always includes fallback keys in the response. + """ + + PREFIX = FEDERATION_UNSTABLE_PREFIX + PATH = "/user/keys/claim" + CATEGORY = "Federation requests" + + async def on_POST( + self, origin: str, content: JsonDict, query: Dict[bytes, List[bytes]] + ) -> Tuple[int, JsonDict]: + response = await self.handler.on_claim_client_keys( + origin, content, always_include_fallback_keys=True + ) return 200, response diff --git a/synapse/handlers/appservice.py b/synapse/handlers/appservice.py index da887647d4..4ca2bc0420 100644 --- a/synapse/handlers/appservice.py +++ b/synapse/handlers/appservice.py @@ -842,9 +842,7 @@ class ApplicationServicesHandler: async def claim_e2e_one_time_keys( self, query: Iterable[Tuple[str, str, str]] - ) -> Tuple[ - Iterable[Dict[str, Dict[str, Dict[str, JsonDict]]]], List[Tuple[str, str, str]] - ]: + ) -> Tuple[Dict[str, Dict[str, Dict[str, JsonDict]]], List[Tuple[str, str, str]]]: """Claim one time keys from application services. Users which are exclusively owned by an application service are sent a @@ -856,7 +854,7 @@ class ApplicationServicesHandler: Returns: A tuple of: - An iterable of maps of user ID -> a map device ID -> a map of key ID -> JSON bytes. + A map of user ID -> a map device ID -> a map of key ID -> JSON. A copy of the input which has not been fulfilled (either because they are not appservice users or the appservice does not support @@ -897,12 +895,11 @@ class ApplicationServicesHandler: ) # Patch together the results -- they are all independent (since they - # require exclusive control over the users). They get returned as a list - # and the caller combines them. - claimed_keys: List[Dict[str, Dict[str, Dict[str, JsonDict]]]] = [] + # require exclusive control over the users, which is the outermost key). + claimed_keys: Dict[str, Dict[str, Dict[str, JsonDict]]] = {} for success, result in results: if success: - claimed_keys.append(result[0]) + claimed_keys.update(result[0]) missing.extend(result[1]) return claimed_keys, missing diff --git a/synapse/handlers/e2e_keys.py b/synapse/handlers/e2e_keys.py index 0073667470..d1ab95126c 100644 --- a/synapse/handlers/e2e_keys.py +++ b/synapse/handlers/e2e_keys.py @@ -563,7 +563,9 @@ class E2eKeysHandler: return ret async def claim_local_one_time_keys( - self, local_query: List[Tuple[str, str, str]] + self, + local_query: List[Tuple[str, str, str]], + always_include_fallback_keys: bool, ) -> Iterable[Dict[str, Dict[str, Dict[str, JsonDict]]]]: """Claim one time keys for local users. @@ -573,6 +575,7 @@ class E2eKeysHandler: Args: local_query: An iterable of tuples of (user ID, device ID, algorithm). + always_include_fallback_keys: True to always include fallback keys. Returns: An iterable of maps of user ID -> a map device ID -> a map of key ID -> JSON bytes. @@ -583,24 +586,73 @@ class E2eKeysHandler: # If the application services have not provided any keys via the C-S # API, query it directly for one-time keys. if self._query_appservices_for_otks: + # TODO Should this query for fallback keys of uploaded OTKs if + # always_include_fallback_keys is True? The MSC is ambiguous. ( appservice_results, not_found, ) = await self._appservice_handler.claim_e2e_one_time_keys(not_found) else: - appservice_results = [] + appservice_results = {} + + # Calculate which user ID / device ID / algorithm tuples to get fallback + # keys for. This can be either only missing results *or* all results + # (which don't already have a fallback key). + if always_include_fallback_keys: + # Build the fallback query as any part of the original query where + # the appservice didn't respond with a fallback key. + fallback_query = [] + + # Iterate each item in the original query and search the results + # from the appservice for that user ID / device ID. If it is found, + # check if any of the keys match the requested algorithm & are a + # fallback key. + for user_id, device_id, algorithm in local_query: + # Check if the appservice responded for this query. + as_result = appservice_results.get(user_id, {}).get(device_id, {}) + found_otk = False + for key_id, key_json in as_result.items(): + if key_id.startswith(f"{algorithm}:"): + # A OTK or fallback key was found for this query. + found_otk = True + # A fallback key was found for this query, no need to + # query further. + if key_json.get("fallback", False): + break + + else: + # No fallback key was found from appservices, query for it. + # Only mark the fallback key as used if no OTK was found + # (from either the database or appservices). + mark_as_used = not found_otk and not any( + key_id.startswith(f"{algorithm}:") + for key_id in otk_results.get(user_id, {}) + .get(device_id, {}) + .keys() + ) + fallback_query.append((user_id, device_id, algorithm, mark_as_used)) + + else: + # All fallback keys get marked as used. + fallback_query = [ + (user_id, device_id, algorithm, True) + for user_id, device_id, algorithm in not_found + ] # For each user that does not have a one-time keys available, see if # there is a fallback key. - fallback_results = await self.store.claim_e2e_fallback_keys(not_found) + fallback_results = await self.store.claim_e2e_fallback_keys(fallback_query) # Return the results in order, each item from the input query should # only appear once in the combined list. - return (otk_results, *appservice_results, fallback_results) + return (otk_results, appservice_results, fallback_results) @trace async def claim_one_time_keys( - self, query: Dict[str, Dict[str, Dict[str, str]]], timeout: Optional[int] + self, + query: Dict[str, Dict[str, Dict[str, str]]], + timeout: Optional[int], + always_include_fallback_keys: bool, ) -> JsonDict: local_query: List[Tuple[str, str, str]] = [] remote_queries: Dict[str, Dict[str, Dict[str, str]]] = {} @@ -617,7 +669,9 @@ class E2eKeysHandler: set_tag("local_key_query", str(local_query)) set_tag("remote_key_query", str(remote_queries)) - results = await self.claim_local_one_time_keys(local_query) + results = await self.claim_local_one_time_keys( + local_query, always_include_fallback_keys + ) # A map of user ID -> device ID -> key ID -> key. json_result: Dict[str, Dict[str, Dict[str, JsonDict]]] = {} @@ -625,7 +679,9 @@ class E2eKeysHandler: for user_id, device_keys in result.items(): for device_id, keys in device_keys.items(): for key_id, key in keys.items(): - json_result.setdefault(user_id, {})[device_id] = {key_id: key} + json_result.setdefault(user_id, {}).setdefault( + device_id, {} + ).update({key_id: key}) # Remote failures. failures: Dict[str, JsonDict] = {} diff --git a/synapse/rest/client/keys.py b/synapse/rest/client/keys.py index 6209b79b01..2a25094109 100644 --- a/synapse/rest/client/keys.py +++ b/synapse/rest/client/keys.py @@ -15,6 +15,7 @@ # limitations under the License. import logging +import re from typing import TYPE_CHECKING, Any, Optional, Tuple from synapse.api.errors import InvalidAPICallError, SynapseError @@ -288,7 +289,33 @@ class OneTimeKeyServlet(RestServlet): await self.auth.get_user_by_req(request, allow_guest=True) timeout = parse_integer(request, "timeout", 10 * 1000) body = parse_json_object_from_request(request) - result = await self.e2e_keys_handler.claim_one_time_keys(body, timeout) + result = await self.e2e_keys_handler.claim_one_time_keys( + body, timeout, always_include_fallback_keys=False + ) + return 200, result + + +class UnstableOneTimeKeyServlet(RestServlet): + """ + Identical to the stable endpoint (OneTimeKeyServlet) except it always includes + fallback keys in the response. + """ + + PATTERNS = [re.compile(r"^/_matrix/client/unstable/org.matrix.msc3983/keys/claim$")] + CATEGORY = "Encryption requests" + + def __init__(self, hs: "HomeServer"): + super().__init__() + self.auth = hs.get_auth() + self.e2e_keys_handler = hs.get_e2e_keys_handler() + + async def on_POST(self, request: SynapseRequest) -> Tuple[int, JsonDict]: + await self.auth.get_user_by_req(request, allow_guest=True) + timeout = parse_integer(request, "timeout", 10 * 1000) + body = parse_json_object_from_request(request) + result = await self.e2e_keys_handler.claim_one_time_keys( + body, timeout, always_include_fallback_keys=True + ) return 200, result @@ -394,6 +421,8 @@ def register_servlets(hs: "HomeServer", http_server: HttpServer) -> None: KeyQueryServlet(hs).register(http_server) KeyChangesServlet(hs).register(http_server) OneTimeKeyServlet(hs).register(http_server) + if hs.config.experimental.msc3983_appservice_otk_claims: + UnstableOneTimeKeyServlet(hs).register(http_server) if hs.config.worker.worker_app is None: SigningKeyUploadServlet(hs).register(http_server) SignaturesUploadServlet(hs).register(http_server) diff --git a/synapse/storage/databases/main/end_to_end_keys.py b/synapse/storage/databases/main/end_to_end_keys.py index dc7768c50c..1a4ae55304 100644 --- a/synapse/storage/databases/main/end_to_end_keys.py +++ b/synapse/storage/databases/main/end_to_end_keys.py @@ -1149,18 +1149,19 @@ class EndToEndKeyWorkerStore(EndToEndKeyBackgroundStore, CacheInvalidationWorker return results, missing async def claim_e2e_fallback_keys( - self, query_list: Iterable[Tuple[str, str, str]] + self, query_list: Iterable[Tuple[str, str, str, bool]] ) -> Dict[str, Dict[str, Dict[str, JsonDict]]]: """Take a list of fallback keys out of the database. Args: - query_list: An iterable of tuples of (user ID, device ID, algorithm). + query_list: An iterable of tuples of + (user ID, device ID, algorithm, whether the key should be marked as used). Returns: A map of user ID -> a map device ID -> a map of key ID -> JSON. """ results: Dict[str, Dict[str, Dict[str, JsonDict]]] = {} - for user_id, device_id, algorithm in query_list: + for user_id, device_id, algorithm, mark_as_used in query_list: row = await self.db_pool.simple_select_one( table="e2e_fallback_keys_json", keyvalues={ @@ -1180,7 +1181,7 @@ class EndToEndKeyWorkerStore(EndToEndKeyBackgroundStore, CacheInvalidationWorker used = row["used"] # Mark fallback key as used if not already. - if not used: + if not used and mark_as_used: await self.db_pool.simple_update_one( table="e2e_fallback_keys_json", keyvalues={ diff --git a/tests/handlers/test_e2e_keys.py b/tests/handlers/test_e2e_keys.py index 013b9ee550..18edebd652 100644 --- a/tests/handlers/test_e2e_keys.py +++ b/tests/handlers/test_e2e_keys.py @@ -160,7 +160,9 @@ class E2eKeysHandlerTestCase(unittest.HomeserverTestCase): res2 = self.get_success( self.handler.claim_one_time_keys( - {"one_time_keys": {local_user: {device_id: "alg1"}}}, timeout=None + {"one_time_keys": {local_user: {device_id: "alg1"}}}, + timeout=None, + always_include_fallback_keys=False, ) ) self.assertEqual( @@ -203,7 +205,9 @@ class E2eKeysHandlerTestCase(unittest.HomeserverTestCase): # key claim_res = self.get_success( self.handler.claim_one_time_keys( - {"one_time_keys": {local_user: {device_id: "alg1"}}}, timeout=None + {"one_time_keys": {local_user: {device_id: "alg1"}}}, + timeout=None, + always_include_fallback_keys=False, ) ) self.assertEqual( @@ -220,7 +224,9 @@ class E2eKeysHandlerTestCase(unittest.HomeserverTestCase): # claiming an OTK again should return the same fallback key claim_res = self.get_success( self.handler.claim_one_time_keys( - {"one_time_keys": {local_user: {device_id: "alg1"}}}, timeout=None + {"one_time_keys": {local_user: {device_id: "alg1"}}}, + timeout=None, + always_include_fallback_keys=False, ) ) self.assertEqual( @@ -267,7 +273,9 @@ class E2eKeysHandlerTestCase(unittest.HomeserverTestCase): claim_res = self.get_success( self.handler.claim_one_time_keys( - {"one_time_keys": {local_user: {device_id: "alg1"}}}, timeout=None + {"one_time_keys": {local_user: {device_id: "alg1"}}}, + timeout=None, + always_include_fallback_keys=False, ) ) self.assertEqual( @@ -277,7 +285,9 @@ class E2eKeysHandlerTestCase(unittest.HomeserverTestCase): claim_res = self.get_success( self.handler.claim_one_time_keys( - {"one_time_keys": {local_user: {device_id: "alg1"}}}, timeout=None + {"one_time_keys": {local_user: {device_id: "alg1"}}}, + timeout=None, + always_include_fallback_keys=False, ) ) self.assertEqual( @@ -296,7 +306,9 @@ class E2eKeysHandlerTestCase(unittest.HomeserverTestCase): claim_res = self.get_success( self.handler.claim_one_time_keys( - {"one_time_keys": {local_user: {device_id: "alg1"}}}, timeout=None + {"one_time_keys": {local_user: {device_id: "alg1"}}}, + timeout=None, + always_include_fallback_keys=False, ) ) self.assertEqual( @@ -304,6 +316,75 @@ class E2eKeysHandlerTestCase(unittest.HomeserverTestCase): {"failures": {}, "one_time_keys": {local_user: {device_id: fallback_key3}}}, ) + def test_fallback_key_always_returned(self) -> None: + local_user = "@boris:" + self.hs.hostname + device_id = "xyz" + fallback_key = {"alg1:k1": "fallback_key1"} + otk = {"alg1:k2": "key2"} + + # we shouldn't have any unused fallback keys yet + res = self.get_success( + self.store.get_e2e_unused_fallback_key_types(local_user, device_id) + ) + self.assertEqual(res, []) + + # Upload a OTK & fallback key. + self.get_success( + self.handler.upload_keys_for_user( + local_user, + device_id, + {"one_time_keys": otk, "fallback_keys": fallback_key}, + ) + ) + + # we should now have an unused alg1 key + fallback_res = self.get_success( + self.store.get_e2e_unused_fallback_key_types(local_user, device_id) + ) + self.assertEqual(fallback_res, ["alg1"]) + + # Claiming an OTK and requesting to always return the fallback key should + # return both. + claim_res = self.get_success( + self.handler.claim_one_time_keys( + {"one_time_keys": {local_user: {device_id: "alg1"}}}, + timeout=None, + always_include_fallback_keys=True, + ) + ) + self.assertEqual( + claim_res, + { + "failures": {}, + "one_time_keys": {local_user: {device_id: {**fallback_key, **otk}}}, + }, + ) + + # This should not mark the key as used. + fallback_res = self.get_success( + self.store.get_e2e_unused_fallback_key_types(local_user, device_id) + ) + self.assertEqual(fallback_res, ["alg1"]) + + # Claiming an OTK again should return only the fallback key. + claim_res = self.get_success( + self.handler.claim_one_time_keys( + {"one_time_keys": {local_user: {device_id: "alg1"}}}, + timeout=None, + always_include_fallback_keys=True, + ) + ) + self.assertEqual( + claim_res, + {"failures": {}, "one_time_keys": {local_user: {device_id: fallback_key}}}, + ) + + # And mark it as used. + fallback_res = self.get_success( + self.store.get_e2e_unused_fallback_key_types(local_user, device_id) + ) + self.assertEqual(fallback_res, []) + def test_replace_master_key(self) -> None: """uploading a new signing key should make the old signing key unavailable""" local_user = "@boris:" + self.hs.hostname @@ -1004,6 +1085,7 @@ class E2eKeysHandlerTestCase(unittest.HomeserverTestCase): } }, timeout=None, + always_include_fallback_keys=False, ) ) self.assertEqual( @@ -1016,6 +1098,153 @@ class E2eKeysHandlerTestCase(unittest.HomeserverTestCase): }, ) + @override_config({"experimental_features": {"msc3983_appservice_otk_claims": True}}) + def test_query_appservice_with_fallback(self) -> None: + local_user = "@boris:" + self.hs.hostname + device_id_1 = "xyz" + fallback_key = {"alg1:k1": {"desc": "fallback_key1", "fallback": True}} + otk = {"alg1:k2": {"desc": "key2"}} + as_fallback_key = {"alg1:k3": {"desc": "fallback_key3", "fallback": True}} + as_otk = {"alg1:k4": {"desc": "key4"}} + + # Inject an appservice interested in this user. + appservice = ApplicationService( + token="i_am_an_app_service", + id="1234", + namespaces={"users": [{"regex": r"@boris:.+", "exclusive": True}]}, + # Note: this user does not have to match the regex above + sender="@as_main:test", + ) + self.hs.get_datastores().main.services_cache = [appservice] + self.hs.get_datastores().main.exclusive_user_regex = _make_exclusive_regex( + [appservice] + ) + + # Setup a response. + self.appservice_api.claim_client_keys.return_value = make_awaitable( + ({local_user: {device_id_1: {**as_otk, **as_fallback_key}}}, []) + ) + + # Claim OTKs, which will ask the appservice and do nothing else. + claim_res = self.get_success( + self.handler.claim_one_time_keys( + {"one_time_keys": {local_user: {device_id_1: "alg1"}}}, + timeout=None, + always_include_fallback_keys=True, + ) + ) + self.assertEqual( + claim_res, + { + "failures": {}, + "one_time_keys": { + local_user: {device_id_1: {**as_otk, **as_fallback_key}} + }, + }, + ) + + # Now upload a fallback key. + res = self.get_success( + self.store.get_e2e_unused_fallback_key_types(local_user, device_id_1) + ) + self.assertEqual(res, []) + + self.get_success( + self.handler.upload_keys_for_user( + local_user, + device_id_1, + {"fallback_keys": fallback_key}, + ) + ) + + # we should now have an unused alg1 key + fallback_res = self.get_success( + self.store.get_e2e_unused_fallback_key_types(local_user, device_id_1) + ) + self.assertEqual(fallback_res, ["alg1"]) + + # The appservice will return only the OTK. + self.appservice_api.claim_client_keys.return_value = make_awaitable( + ({local_user: {device_id_1: as_otk}}, []) + ) + + # Claim OTKs, which should return the OTK from the appservice and the + # uploaded fallback key. + claim_res = self.get_success( + self.handler.claim_one_time_keys( + {"one_time_keys": {local_user: {device_id_1: "alg1"}}}, + timeout=None, + always_include_fallback_keys=True, + ) + ) + self.assertEqual( + claim_res, + { + "failures": {}, + "one_time_keys": { + local_user: {device_id_1: {**as_otk, **fallback_key}} + }, + }, + ) + + # But the fallback key should not be marked as used. + fallback_res = self.get_success( + self.store.get_e2e_unused_fallback_key_types(local_user, device_id_1) + ) + self.assertEqual(fallback_res, ["alg1"]) + + # Now upload a OTK. + self.get_success( + self.handler.upload_keys_for_user( + local_user, + device_id_1, + {"one_time_keys": otk}, + ) + ) + + # Claim OTKs, which will return information only from the database. + claim_res = self.get_success( + self.handler.claim_one_time_keys( + {"one_time_keys": {local_user: {device_id_1: "alg1"}}}, + timeout=None, + always_include_fallback_keys=True, + ) + ) + self.assertEqual( + claim_res, + { + "failures": {}, + "one_time_keys": {local_user: {device_id_1: {**otk, **fallback_key}}}, + }, + ) + + # But the fallback key should not be marked as used. + fallback_res = self.get_success( + self.store.get_e2e_unused_fallback_key_types(local_user, device_id_1) + ) + self.assertEqual(fallback_res, ["alg1"]) + + # Finally, return only the fallback key from the appservice. + self.appservice_api.claim_client_keys.return_value = make_awaitable( + ({local_user: {device_id_1: as_fallback_key}}, []) + ) + + # Claim OTKs, which will return only the fallback key from the database. + claim_res = self.get_success( + self.handler.claim_one_time_keys( + {"one_time_keys": {local_user: {device_id_1: "alg1"}}}, + timeout=None, + always_include_fallback_keys=True, + ) + ) + self.assertEqual( + claim_res, + { + "failures": {}, + "one_time_keys": {local_user: {device_id_1: as_fallback_key}}, + }, + ) + @override_config({"experimental_features": {"msc3984_appservice_key_query": True}}) def test_query_local_devices_appservice(self) -> None: """Test that querying of appservices for keys overrides responses from the database.""" -- cgit 1.5.1 From 9900f7c231f8af536fce229117b0a406dc629293 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Wed, 26 Apr 2023 17:00:11 +0100 Subject: Add admin endpoint to query room sizes (#15482) --- changelog.d/15482.feature | 1 + docs/admin_api/statistics.md | 49 ++++++++++++++ synapse/rest/admin/__init__.py | 6 +- synapse/rest/admin/statistics.py | 25 +++++++ synapse/storage/controllers/__init__.py | 2 + synapse/storage/controllers/stats.py | 113 ++++++++++++++++++++++++++++++++ 6 files changed, 195 insertions(+), 1 deletion(-) create mode 100644 changelog.d/15482.feature create mode 100644 synapse/storage/controllers/stats.py (limited to 'synapse/rest') diff --git a/changelog.d/15482.feature b/changelog.d/15482.feature new file mode 100644 index 0000000000..f3e9f2a5b2 --- /dev/null +++ b/changelog.d/15482.feature @@ -0,0 +1 @@ +Add admin endpoint to query the largest rooms by disk space used in the database. diff --git a/docs/admin_api/statistics.md b/docs/admin_api/statistics.md index 03b3621e55..2bd417e900 100644 --- a/docs/admin_api/statistics.md +++ b/docs/admin_api/statistics.md @@ -81,3 +81,52 @@ The following fields are returned in the JSON response body: - `user_id` - string - Fully-qualified user ID (ex. `@user:server.com`). * `next_token` - integer - Opaque value used for pagination. See above. * `total` - integer - Total number of users after filtering. + + +# Get largest rooms by size in database + +Returns the 10 largest rooms and an estimate of how much space in the database +they are taking. + +This does not include the size of any associated media associated with the room. + +Returns an error on SQLite. + +*Note:* This uses the planner statistics from PostgreSQL to do the estimates, +which means that the returned information can vary widely from reality. However, +it should be enough to get a rough idea of where database disk space is going. + + +The API is: + +``` +GET /_synapse/admin/v1/statistics/statistics/database/rooms +``` + +A response body like the following is returned: + +```json +{ + "rooms": [ + { + "room_id": "!OGEhHVWSdvArJzumhm:matrix.org", + "estimated_size": 47325417353 + } + ], +} +``` + + + +**Response** + +The following fields are returned in the JSON response body: + +* `rooms` - An array of objects, sorted by largest room first. Objects contain + the following fields: + - `room_id` - string - The room ID. + - `estimated_size` - integer - Estimated disk space used in bytes by the room + in the database. + + +*Added in Synapse 1.83.0* diff --git a/synapse/rest/admin/__init__.py b/synapse/rest/admin/__init__.py index 79f22a59f1..770df261ce 100644 --- a/synapse/rest/admin/__init__.py +++ b/synapse/rest/admin/__init__.py @@ -68,7 +68,10 @@ from synapse.rest.admin.rooms import ( RoomTimestampToEventRestServlet, ) from synapse.rest.admin.server_notice_servlet import SendServerNoticeServlet -from synapse.rest.admin.statistics import UserMediaStatisticsRestServlet +from synapse.rest.admin.statistics import ( + LargestRoomsStatistics, + UserMediaStatisticsRestServlet, +) from synapse.rest.admin.username_available import UsernameAvailableRestServlet from synapse.rest.admin.users import ( AccountDataRestServlet, @@ -259,6 +262,7 @@ def register_servlets(hs: "HomeServer", http_server: HttpServer) -> None: UserRestServletV2(hs).register(http_server) UsersRestServletV2(hs).register(http_server) UserMediaStatisticsRestServlet(hs).register(http_server) + LargestRoomsStatistics(hs).register(http_server) EventReportDetailRestServlet(hs).register(http_server) EventReportsRestServlet(hs).register(http_server) AccountDataRestServlet(hs).register(http_server) diff --git a/synapse/rest/admin/statistics.py b/synapse/rest/admin/statistics.py index 9c45f4650d..19780e4b4c 100644 --- a/synapse/rest/admin/statistics.py +++ b/synapse/rest/admin/statistics.py @@ -113,3 +113,28 @@ class UserMediaStatisticsRestServlet(RestServlet): ret["next_token"] = start + len(users_media) return HTTPStatus.OK, ret + + +class LargestRoomsStatistics(RestServlet): + """Get the largest rooms by database size. + + Only works when using PostgreSQL. + """ + + PATTERNS = admin_patterns("/statistics/database/rooms$") + + def __init__(self, hs: "HomeServer"): + self.auth = hs.get_auth() + self.stats_controller = hs.get_storage_controllers().stats + + async def on_GET(self, request: SynapseRequest) -> Tuple[int, JsonDict]: + await assert_requester_is_admin(self.auth, request) + + room_sizes = await self.stats_controller.get_room_db_size_estimate() + + return HTTPStatus.OK, { + "rooms": [ + {"room_id": room_id, "estimated_size": size} + for room_id, size in room_sizes + ] + } diff --git a/synapse/storage/controllers/__init__.py b/synapse/storage/controllers/__init__.py index 45101cda7a..0ef8602631 100644 --- a/synapse/storage/controllers/__init__.py +++ b/synapse/storage/controllers/__init__.py @@ -19,6 +19,7 @@ from synapse.storage.controllers.persist_events import ( ) from synapse.storage.controllers.purge_events import PurgeEventsStorageController from synapse.storage.controllers.state import StateStorageController +from synapse.storage.controllers.stats import StatsController from synapse.storage.databases import Databases from synapse.storage.databases.main import DataStore @@ -40,6 +41,7 @@ class StorageControllers: self.purge_events = PurgeEventsStorageController(hs, stores) self.state = StateStorageController(hs, stores) + self.stats = StatsController(hs, stores) self.persistence = None if stores.persist_events: diff --git a/synapse/storage/controllers/stats.py b/synapse/storage/controllers/stats.py new file mode 100644 index 0000000000..988e44c6af --- /dev/null +++ b/synapse/storage/controllers/stats.py @@ -0,0 +1,113 @@ +# Copyright 2023 The Matrix.org Foundation C.I.C. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import logging +from collections import Counter +from typing import TYPE_CHECKING, Collection, List, Tuple + +from synapse.api.errors import SynapseError +from synapse.storage.database import LoggingTransaction +from synapse.storage.databases import Databases +from synapse.storage.engines import PostgresEngine + +if TYPE_CHECKING: + from synapse.server import HomeServer + +logger = logging.getLogger(__name__) + + +class StatsController: + """High level interface for getting statistics.""" + + def __init__(self, hs: "HomeServer", stores: Databases): + self.stores = stores + + async def get_room_db_size_estimate(self) -> List[Tuple[str, int]]: + """Get an estimate of the largest rooms and how much database space they + use, in bytes. + + Only works against PostgreSQL. + + Note: this uses the postgres statistics so is a very rough estimate. + """ + + # Note: We look at both tables on the main and state databases. + if not isinstance(self.stores.main.database_engine, PostgresEngine): + raise SynapseError(400, "Endpoint requires using PostgreSQL") + + if not isinstance(self.stores.state.database_engine, PostgresEngine): + raise SynapseError(400, "Endpoint requires using PostgreSQL") + + # For each "large" table, we go through and get the largest rooms + # and an estimate of how much space they take. We can then sum the + # results and return the top 10. + # + # This isn't the most accurate, but given all of these are estimates + # anyway its good enough. + room_estimates: Counter[str] = Counter() + + # Return size of the table on disk, including indexes and TOAST. + table_sql = """ + SELECT pg_total_relation_size(?) + """ + + # Get an estimate for the largest rooms and their frequency. + # + # Note: the cast here is a hack to cast from `anyarray` to an actual + # type. This ensures that psycopg2 passes us a back a a Python list. + column_sql = """ + SELECT + most_common_vals::TEXT::TEXT[], most_common_freqs::TEXT::NUMERIC[] + FROM pg_stats + WHERE tablename = ? and attname = 'room_id' + """ + + def get_room_db_size_estimate_txn( + txn: LoggingTransaction, + tables: Collection[str], + ) -> None: + for table in tables: + txn.execute(table_sql, (table,)) + row = txn.fetchone() + assert row is not None + (table_size,) = row + + txn.execute(column_sql, (table,)) + row = txn.fetchone() + assert row is not None + vals, freqs = row + + for room_id, freq in zip(vals, freqs): + room_estimates[room_id] += int(freq * table_size) + + await self.stores.main.db_pool.runInteraction( + "get_room_db_size_estimate_main", + get_room_db_size_estimate_txn, + ( + "event_json", + "events", + "event_search", + "event_edges", + "event_push_actions", + "stream_ordering_to_exterm", + ), + ) + + await self.stores.state.db_pool.runInteraction( + "get_room_db_size_estimate_state", + get_room_db_size_estimate_txn, + ("state_groups_state",), + ) + + return room_estimates.most_common(10) -- cgit 1.5.1 From 301b4156d5574521e4fa3df8fed2f8a1c8617745 Mon Sep 17 00:00:00 2001 From: Shay Date: Wed, 26 Apr 2023 16:03:26 -0700 Subject: Add column `full_user_id` to tables `profiles` and `user_filters`. (#15458) --- changelog.d/15458.misc | 1 + synapse/_scripts/synapse_port_db.py | 5 ++- synapse/api/filtering.py | 6 +-- synapse/handlers/profile.py | 8 +--- synapse/rest/client/filter.py | 2 +- synapse/storage/databases/main/filtering.py | 47 ++++++++++++++++++---- synapse/storage/databases/main/profile.py | 42 +++++++++++++++---- synapse/storage/databases/main/registration.py | 4 +- synapse/storage/schema/__init__.py | 5 ++- .../76/01_add_profiles_full_user_id_column.sql | 20 +++++++++ .../76/02_add_user_filters_full_user_id_column.sql | 20 +++++++++ tests/api/test_filtering.py | 16 ++++---- tests/handlers/test_profile.py | 26 ++++++------ tests/rest/admin/test_user.py | 30 +++++++++++--- tests/rest/client/test_filter.py | 4 +- tests/storage/test_main.py | 4 +- tests/storage/test_profile.py | 20 +++------ 17 files changed, 186 insertions(+), 74 deletions(-) create mode 100644 changelog.d/15458.misc create mode 100644 synapse/storage/schema/main/delta/76/01_add_profiles_full_user_id_column.sql create mode 100644 synapse/storage/schema/main/delta/76/02_add_user_filters_full_user_id_column.sql (limited to 'synapse/rest') diff --git a/changelog.d/15458.misc b/changelog.d/15458.misc new file mode 100644 index 0000000000..5183161d25 --- /dev/null +++ b/changelog.d/15458.misc @@ -0,0 +1 @@ +Add column `full_user_id` to tables `profiles` and `user_filters`. diff --git a/synapse/_scripts/synapse_port_db.py b/synapse/_scripts/synapse_port_db.py index a58ae2a308..56d5aeb0dd 100755 --- a/synapse/_scripts/synapse_port_db.py +++ b/synapse/_scripts/synapse_port_db.py @@ -54,7 +54,7 @@ from synapse.logging.context import ( ) from synapse.notifier import ReplicationNotifier from synapse.storage.database import DatabasePool, LoggingTransaction, make_conn -from synapse.storage.databases.main import PushRuleStore +from synapse.storage.databases.main import FilteringWorkerStore, PushRuleStore from synapse.storage.databases.main.account_data import AccountDataWorkerStore from synapse.storage.databases.main.client_ips import ClientIpBackgroundUpdateStore from synapse.storage.databases.main.deviceinbox import DeviceInboxBackgroundUpdateStore @@ -69,6 +69,7 @@ from synapse.storage.databases.main.media_repository import ( MediaRepositoryBackgroundUpdateStore, ) from synapse.storage.databases.main.presence import PresenceBackgroundUpdateStore +from synapse.storage.databases.main.profile import ProfileWorkerStore from synapse.storage.databases.main.pusher import ( PusherBackgroundUpdatesStore, PusherWorkerStore, @@ -229,6 +230,8 @@ class Store( EndToEndRoomKeyBackgroundStore, StatsStore, AccountDataWorkerStore, + FilteringWorkerStore, + ProfileWorkerStore, PushRuleStore, PusherWorkerStore, PusherBackgroundUpdatesStore, diff --git a/synapse/api/filtering.py b/synapse/api/filtering.py index b9f432cc23..de7c56bc0f 100644 --- a/synapse/api/filtering.py +++ b/synapse/api/filtering.py @@ -170,11 +170,9 @@ class Filtering: result = await self.store.get_user_filter(user_localpart, filter_id) return FilterCollection(self._hs, result) - def add_user_filter( - self, user_localpart: str, user_filter: JsonDict - ) -> Awaitable[int]: + def add_user_filter(self, user_id: UserID, user_filter: JsonDict) -> Awaitable[int]: self.check_valid_filter(user_filter) - return self.store.add_user_filter(user_localpart, user_filter) + return self.store.add_user_filter(user_id, user_filter) # TODO(paul): surely we should probably add a delete_user_filter or # replace_user_filter at some point? There's no REST API specified for diff --git a/synapse/handlers/profile.py b/synapse/handlers/profile.py index 9a81a77cbd..440d3f4acd 100644 --- a/synapse/handlers/profile.py +++ b/synapse/handlers/profile.py @@ -178,9 +178,7 @@ class ProfileHandler: authenticated_entity=requester.authenticated_entity, ) - await self.store.set_profile_displayname( - target_user.localpart, displayname_to_set - ) + await self.store.set_profile_displayname(target_user, displayname_to_set) profile = await self.store.get_profileinfo(target_user.localpart) await self.user_directory_handler.handle_local_profile_change( @@ -272,9 +270,7 @@ class ProfileHandler: target_user, authenticated_entity=requester.authenticated_entity ) - await self.store.set_profile_avatar_url( - target_user.localpart, avatar_url_to_set - ) + await self.store.set_profile_avatar_url(target_user, avatar_url_to_set) profile = await self.store.get_profileinfo(target_user.localpart) await self.user_directory_handler.handle_local_profile_change( diff --git a/synapse/rest/client/filter.py b/synapse/rest/client/filter.py index ab7d8c9419..04561f36d7 100644 --- a/synapse/rest/client/filter.py +++ b/synapse/rest/client/filter.py @@ -94,7 +94,7 @@ class CreateFilterRestServlet(RestServlet): set_timeline_upper_limit(content, self.hs.config.server.filter_timeline_limit) filter_id = await self.filtering.add_user_filter( - user_localpart=target_user.localpart, user_filter=content + user_id=target_user, user_filter=content ) return 200, {"filter_id": str(filter_id)} diff --git a/synapse/storage/databases/main/filtering.py b/synapse/storage/databases/main/filtering.py index 8e57c8e5a0..50516402f9 100644 --- a/synapse/storage/databases/main/filtering.py +++ b/synapse/storage/databases/main/filtering.py @@ -16,15 +16,38 @@ from typing import Optional, Tuple, Union, cast from canonicaljson import encode_canonical_json +from typing_extensions import TYPE_CHECKING from synapse.api.errors import Codes, StoreError, SynapseError from synapse.storage._base import SQLBaseStore, db_to_json -from synapse.storage.database import LoggingTransaction -from synapse.types import JsonDict +from synapse.storage.database import ( + DatabasePool, + LoggingDatabaseConnection, + LoggingTransaction, +) +from synapse.types import JsonDict, UserID from synapse.util.caches.descriptors import cached +if TYPE_CHECKING: + from synapse.server import HomeServer + class FilteringWorkerStore(SQLBaseStore): + def __init__( + self, + database: DatabasePool, + db_conn: LoggingDatabaseConnection, + hs: "HomeServer", + ): + super().__init__(database, db_conn, hs) + self.db_pool.updates.register_background_index_update( + "full_users_filters_unique_idx", + index_name="full_users_unique_idx", + table="user_filters", + columns=["full_user_id, filter_id"], + unique=True, + ) + @cached(num_args=2) async def get_user_filter( self, user_localpart: str, filter_id: Union[int, str] @@ -46,7 +69,7 @@ class FilteringWorkerStore(SQLBaseStore): return db_to_json(def_json) - async def add_user_filter(self, user_localpart: str, user_filter: JsonDict) -> int: + async def add_user_filter(self, user_id: UserID, user_filter: JsonDict) -> int: def_json = encode_canonical_json(user_filter) # Need an atomic transaction to SELECT the maximal ID so far then @@ -56,13 +79,13 @@ class FilteringWorkerStore(SQLBaseStore): "SELECT filter_id FROM user_filters " "WHERE user_id = ? AND filter_json = ?" ) - txn.execute(sql, (user_localpart, bytearray(def_json))) + txn.execute(sql, (user_id.localpart, bytearray(def_json))) filter_id_response = txn.fetchone() if filter_id_response is not None: return filter_id_response[0] sql = "SELECT MAX(filter_id) FROM user_filters WHERE user_id = ?" - txn.execute(sql, (user_localpart,)) + txn.execute(sql, (user_id.localpart,)) max_id = cast(Tuple[Optional[int]], txn.fetchone())[0] if max_id is None: filter_id = 0 @@ -70,10 +93,18 @@ class FilteringWorkerStore(SQLBaseStore): filter_id = max_id + 1 sql = ( - "INSERT INTO user_filters (user_id, filter_id, filter_json)" - "VALUES(?, ?, ?)" + "INSERT INTO user_filters (full_user_id, user_id, filter_id, filter_json)" + "VALUES(?, ?, ?, ?)" + ) + txn.execute( + sql, + ( + user_id.to_string(), + user_id.localpart, + filter_id, + bytearray(def_json), + ), ) - txn.execute(sql, (user_localpart, filter_id, bytearray(def_json))) return filter_id diff --git a/synapse/storage/databases/main/profile.py b/synapse/storage/databases/main/profile.py index a1747f04ce..b109f8c07f 100644 --- a/synapse/storage/databases/main/profile.py +++ b/synapse/storage/databases/main/profile.py @@ -11,14 +11,34 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. -from typing import Optional +from typing import TYPE_CHECKING, Optional from synapse.api.errors import StoreError from synapse.storage._base import SQLBaseStore +from synapse.storage.database import DatabasePool, LoggingDatabaseConnection from synapse.storage.databases.main.roommember import ProfileInfo +from synapse.types import UserID + +if TYPE_CHECKING: + from synapse.server import HomeServer class ProfileWorkerStore(SQLBaseStore): + def __init__( + self, + database: DatabasePool, + db_conn: LoggingDatabaseConnection, + hs: "HomeServer", + ): + super().__init__(database, db_conn, hs) + self.db_pool.updates.register_background_index_update( + "profiles_full_user_id_key_idx", + index_name="profiles_full_user_id_key", + table="profiles", + columns=["full_user_id"], + unique=True, + ) + async def get_profileinfo(self, user_localpart: str) -> ProfileInfo: try: profile = await self.db_pool.simple_select_one( @@ -54,28 +74,36 @@ class ProfileWorkerStore(SQLBaseStore): desc="get_profile_avatar_url", ) - async def create_profile(self, user_localpart: str) -> None: + async def create_profile(self, user_id: UserID) -> None: + user_localpart = user_id.localpart await self.db_pool.simple_insert( - table="profiles", values={"user_id": user_localpart}, desc="create_profile" + table="profiles", + values={"user_id": user_localpart, "full_user_id": user_id.to_string()}, + desc="create_profile", ) async def set_profile_displayname( - self, user_localpart: str, new_displayname: Optional[str] + self, user_id: UserID, new_displayname: Optional[str] ) -> None: + user_localpart = user_id.localpart await self.db_pool.simple_upsert( table="profiles", keyvalues={"user_id": user_localpart}, - values={"displayname": new_displayname}, + values={ + "displayname": new_displayname, + "full_user_id": user_id.to_string(), + }, desc="set_profile_displayname", ) async def set_profile_avatar_url( - self, user_localpart: str, new_avatar_url: Optional[str] + self, user_id: UserID, new_avatar_url: Optional[str] ) -> None: + user_localpart = user_id.localpart await self.db_pool.simple_upsert( table="profiles", keyvalues={"user_id": user_localpart}, - values={"avatar_url": new_avatar_url}, + values={"avatar_url": new_avatar_url, "full_user_id": user_id.to_string()}, desc="set_profile_avatar_url", ) diff --git a/synapse/storage/databases/main/registration.py b/synapse/storage/databases/main/registration.py index 717237e024..676d03bb7e 100644 --- a/synapse/storage/databases/main/registration.py +++ b/synapse/storage/databases/main/registration.py @@ -2414,8 +2414,8 @@ class RegistrationStore(StatsStore, RegistrationBackgroundUpdateStore): # *obviously* the 'profiles' table uses localpart for user_id # while everything else uses the full mxid. txn.execute( - "INSERT INTO profiles(user_id, displayname) VALUES (?,?)", - (user_id_obj.localpart, create_profile_with_displayname), + "INSERT INTO profiles(full_user_id, user_id, displayname) VALUES (?,?,?)", + (user_id, user_id_obj.localpart, create_profile_with_displayname), ) if self.hs.config.stats.stats_enabled: diff --git a/synapse/storage/schema/__init__.py b/synapse/storage/schema/__init__.py index a28f2b997c..1672976209 100644 --- a/synapse/storage/schema/__init__.py +++ b/synapse/storage/schema/__init__.py @@ -12,7 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -SCHEMA_VERSION = 75 # remember to update the list below when updating +SCHEMA_VERSION = 76 # remember to update the list below when updating """Represents the expectations made by the codebase about the database schema This should be incremented whenever the codebase changes its requirements on the @@ -97,6 +97,9 @@ Changes in SCHEMA_VERSION = 75: `local_current_membership` & `room_memberships`) is now being populated for new rows. When the background job to populate historical rows lands this will become the compat schema version. + +Changes in SCHEMA_VERSION = 76: + - Adds a full_user_id column to tables profiles and user_filters. """ diff --git a/synapse/storage/schema/main/delta/76/01_add_profiles_full_user_id_column.sql b/synapse/storage/schema/main/delta/76/01_add_profiles_full_user_id_column.sql new file mode 100644 index 0000000000..9cd680325a --- /dev/null +++ b/synapse/storage/schema/main/delta/76/01_add_profiles_full_user_id_column.sql @@ -0,0 +1,20 @@ +/* Copyright 2023 The Matrix.org Foundation C.I.C + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +ALTER TABLE profiles ADD COLUMN full_user_id TEXT; + +-- Make sure the column has a unique constraint, mirroring the `profiles_user_id_key` +-- constraint. +INSERT INTO background_updates (ordering, update_name, progress_json) VALUES (7501, 'profiles_full_user_id_key_idx', '{}'); diff --git a/synapse/storage/schema/main/delta/76/02_add_user_filters_full_user_id_column.sql b/synapse/storage/schema/main/delta/76/02_add_user_filters_full_user_id_column.sql new file mode 100644 index 0000000000..fd231adeef --- /dev/null +++ b/synapse/storage/schema/main/delta/76/02_add_user_filters_full_user_id_column.sql @@ -0,0 +1,20 @@ +/* Copyright 2023 The Matrix.org Foundation C.I.C + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +ALTER TABLE user_filters ADD COLUMN full_user_id TEXT; + +-- Add a unique index on the new column, mirroring the `user_filters_unique` unique +-- index. +INSERT INTO background_updates (ordering, update_name, progress_json) VALUES (7502, 'full_users_filters_unique_idx', '{}'); \ No newline at end of file diff --git a/tests/api/test_filtering.py b/tests/api/test_filtering.py index 6c6a9ab4b4..222449baac 100644 --- a/tests/api/test_filtering.py +++ b/tests/api/test_filtering.py @@ -26,13 +26,15 @@ from synapse.api.errors import SynapseError from synapse.api.filtering import Filter from synapse.api.presence import UserPresenceState from synapse.server import HomeServer -from synapse.types import JsonDict +from synapse.types import JsonDict, UserID from synapse.util import Clock from synapse.util.frozenutils import freeze from tests import unittest from tests.events.test_utils import MockEvent +user_id = UserID.from_string("@test_user:test") +user2_id = UserID.from_string("@test_user2:test") user_localpart = "test_user" @@ -437,7 +439,7 @@ class FilteringTestCase(unittest.HomeserverTestCase): user_filter_json = {"presence": {"senders": ["@foo:bar"]}} filter_id = self.get_success( self.datastore.add_user_filter( - user_localpart=user_localpart, user_filter=user_filter_json + user_id=user_id, user_filter=user_filter_json ) ) presence_states = [ @@ -467,7 +469,7 @@ class FilteringTestCase(unittest.HomeserverTestCase): filter_id = self.get_success( self.datastore.add_user_filter( - user_localpart=user_localpart + "2", user_filter=user_filter_json + user_id=user2_id, user_filter=user_filter_json ) ) presence_states = [ @@ -495,7 +497,7 @@ class FilteringTestCase(unittest.HomeserverTestCase): user_filter_json = {"room": {"state": {"types": ["m.*"]}}} filter_id = self.get_success( self.datastore.add_user_filter( - user_localpart=user_localpart, user_filter=user_filter_json + user_id=user_id, user_filter=user_filter_json ) ) event = MockEvent(sender="@foo:bar", type="m.room.topic", room_id="!foo:bar") @@ -514,7 +516,7 @@ class FilteringTestCase(unittest.HomeserverTestCase): user_filter_json = {"room": {"state": {"types": ["m.*"]}}} filter_id = self.get_success( self.datastore.add_user_filter( - user_localpart=user_localpart, user_filter=user_filter_json + user_id=user_id, user_filter=user_filter_json ) ) event = MockEvent( @@ -598,7 +600,7 @@ class FilteringTestCase(unittest.HomeserverTestCase): filter_id = self.get_success( self.filtering.add_user_filter( - user_localpart=user_localpart, user_filter=user_filter_json + user_id=user_id, user_filter=user_filter_json ) ) @@ -619,7 +621,7 @@ class FilteringTestCase(unittest.HomeserverTestCase): filter_id = self.get_success( self.datastore.add_user_filter( - user_localpart=user_localpart, user_filter=user_filter_json + user_id=user_id, user_filter=user_filter_json ) ) diff --git a/tests/handlers/test_profile.py b/tests/handlers/test_profile.py index 7c174782da..64a9a22afe 100644 --- a/tests/handlers/test_profile.py +++ b/tests/handlers/test_profile.py @@ -66,9 +66,7 @@ class ProfileTestCase(unittest.HomeserverTestCase): self.handler = hs.get_profile_handler() def test_get_my_name(self) -> None: - self.get_success( - self.store.set_profile_displayname(self.frank.localpart, "Frank") - ) + self.get_success(self.store.set_profile_displayname(self.frank, "Frank")) displayname = self.get_success(self.handler.get_displayname(self.frank)) @@ -121,9 +119,7 @@ class ProfileTestCase(unittest.HomeserverTestCase): self.hs.config.registration.enable_set_displayname = False # Setting displayname for the first time is allowed - self.get_success( - self.store.set_profile_displayname(self.frank.localpart, "Frank") - ) + self.get_success(self.store.set_profile_displayname(self.frank, "Frank")) self.assertEqual( ( @@ -166,8 +162,14 @@ class ProfileTestCase(unittest.HomeserverTestCase): ) def test_incoming_fed_query(self) -> None: - self.get_success(self.store.create_profile("caroline")) - self.get_success(self.store.set_profile_displayname("caroline", "Caroline")) + self.get_success( + self.store.create_profile(UserID.from_string("@caroline:test")) + ) + self.get_success( + self.store.set_profile_displayname( + UserID.from_string("@caroline:test"), "Caroline" + ) + ) response = self.get_success( self.query_handlers["profile"]( @@ -183,9 +185,7 @@ class ProfileTestCase(unittest.HomeserverTestCase): def test_get_my_avatar(self) -> None: self.get_success( - self.store.set_profile_avatar_url( - self.frank.localpart, "http://my.server/me.png" - ) + self.store.set_profile_avatar_url(self.frank, "http://my.server/me.png") ) avatar_url = self.get_success(self.handler.get_avatar_url(self.frank)) @@ -237,9 +237,7 @@ class ProfileTestCase(unittest.HomeserverTestCase): # Setting displayname for the first time is allowed self.get_success( - self.store.set_profile_avatar_url( - self.frank.localpart, "http://my.server/me.png" - ) + self.store.set_profile_avatar_url(self.frank, "http://my.server/me.png") ) self.assertEqual( diff --git a/tests/rest/admin/test_user.py b/tests/rest/admin/test_user.py index b4241ceaf0..434bb56d44 100644 --- a/tests/rest/admin/test_user.py +++ b/tests/rest/admin/test_user.py @@ -802,9 +802,21 @@ class UsersListTestCase(unittest.HomeserverTestCase): # Set avatar URL to all users, that no user has a NULL value to avoid # different sort order between SQlite and PostreSQL - self.get_success(self.store.set_profile_avatar_url("user1", "mxc://url3")) - self.get_success(self.store.set_profile_avatar_url("user2", "mxc://url2")) - self.get_success(self.store.set_profile_avatar_url("admin", "mxc://url1")) + self.get_success( + self.store.set_profile_avatar_url( + UserID.from_string("@user1:test"), "mxc://url3" + ) + ) + self.get_success( + self.store.set_profile_avatar_url( + UserID.from_string("@user2:test"), "mxc://url2" + ) + ) + self.get_success( + self.store.set_profile_avatar_url( + UserID.from_string("@admin:test"), "mxc://url1" + ) + ) # order by default (name) self._order_test([self.admin_user, user1, user2], None) @@ -1127,7 +1139,9 @@ class DeactivateAccountTestCase(unittest.HomeserverTestCase): # set attributes for user self.get_success( - self.store.set_profile_avatar_url("user", "mxc://servername/mediaid") + self.store.set_profile_avatar_url( + UserID.from_string("@user:test"), "mxc://servername/mediaid" + ) ) self.get_success( self.store.user_add_threepid("@user:test", "email", "foo@bar.com", 0, 0) @@ -1257,7 +1271,9 @@ class DeactivateAccountTestCase(unittest.HomeserverTestCase): Reproduces #12257. """ # Patch `self.other_user` to have an empty string as their avatar. - self.get_success(self.store.set_profile_avatar_url("user", "")) + self.get_success( + self.store.set_profile_avatar_url(UserID.from_string("@user:test"), "") + ) # Check we can still erase them. channel = self.make_request( @@ -2311,7 +2327,9 @@ class UserRestTestCase(unittest.HomeserverTestCase): # set attributes for user self.get_success( - self.store.set_profile_avatar_url("user", "mxc://servername/mediaid") + self.store.set_profile_avatar_url( + UserID.from_string("@user:test"), "mxc://servername/mediaid" + ) ) self.get_success( self.store.user_add_threepid("@user:test", "email", "foo@bar.com", 0, 0) diff --git a/tests/rest/client/test_filter.py b/tests/rest/client/test_filter.py index 91678abf13..9faa9de050 100644 --- a/tests/rest/client/test_filter.py +++ b/tests/rest/client/test_filter.py @@ -17,6 +17,7 @@ from twisted.test.proto_helpers import MemoryReactor from synapse.api.errors import Codes from synapse.rest.client import filter from synapse.server import HomeServer +from synapse.types import UserID from synapse.util import Clock from tests import unittest @@ -76,7 +77,8 @@ class FilterTestCase(unittest.HomeserverTestCase): def test_get_filter(self) -> None: filter_id = self.get_success( self.filtering.add_user_filter( - user_localpart="apple", user_filter=self.EXAMPLE_FILTER + user_id=UserID.from_string("@apple:test"), + user_filter=self.EXAMPLE_FILTER, ) ) self.reactor.advance(1) diff --git a/tests/storage/test_main.py b/tests/storage/test_main.py index 5806cb0e4b..27f450e22d 100644 --- a/tests/storage/test_main.py +++ b/tests/storage/test_main.py @@ -29,9 +29,9 @@ class DataStoreTestCase(unittest.HomeserverTestCase): def test_get_users_paginate(self) -> None: self.get_success(self.store.register_user(self.user.to_string(), "pass")) - self.get_success(self.store.create_profile(self.user.localpart)) + self.get_success(self.store.create_profile(self.user)) self.get_success( - self.store.set_profile_displayname(self.user.localpart, self.displayname) + self.store.set_profile_displayname(self.user, self.displayname) ) users, total = self.get_success( diff --git a/tests/storage/test_profile.py b/tests/storage/test_profile.py index a019d06e09..6ec34997ea 100644 --- a/tests/storage/test_profile.py +++ b/tests/storage/test_profile.py @@ -27,11 +27,9 @@ class ProfileStoreTestCase(unittest.HomeserverTestCase): self.u_frank = UserID.from_string("@frank:test") def test_displayname(self) -> None: - self.get_success(self.store.create_profile(self.u_frank.localpart)) + self.get_success(self.store.create_profile(self.u_frank)) - self.get_success( - self.store.set_profile_displayname(self.u_frank.localpart, "Frank") - ) + self.get_success(self.store.set_profile_displayname(self.u_frank, "Frank")) self.assertEqual( "Frank", @@ -43,21 +41,17 @@ class ProfileStoreTestCase(unittest.HomeserverTestCase): ) # test set to None - self.get_success( - self.store.set_profile_displayname(self.u_frank.localpart, None) - ) + self.get_success(self.store.set_profile_displayname(self.u_frank, None)) self.assertIsNone( self.get_success(self.store.get_profile_displayname(self.u_frank.localpart)) ) def test_avatar_url(self) -> None: - self.get_success(self.store.create_profile(self.u_frank.localpart)) + self.get_success(self.store.create_profile(self.u_frank)) self.get_success( - self.store.set_profile_avatar_url( - self.u_frank.localpart, "http://my.site/here" - ) + self.store.set_profile_avatar_url(self.u_frank, "http://my.site/here") ) self.assertEqual( @@ -70,9 +64,7 @@ class ProfileStoreTestCase(unittest.HomeserverTestCase): ) # test set to None - self.get_success( - self.store.set_profile_avatar_url(self.u_frank.localpart, None) - ) + self.get_success(self.store.set_profile_avatar_url(self.u_frank, None)) self.assertIsNone( self.get_success(self.store.get_profile_avatar_url(self.u_frank.localpart)) -- cgit 1.5.1 From 57aeeb308b39c4fd455682966eabc9c0fa17c65d Mon Sep 17 00:00:00 2001 From: Patrick Cloke Date: Thu, 27 Apr 2023 12:57:46 -0400 Subject: Add support for claiming multiple OTKs at once. (#15468) MSC3983 provides a way to request multiple OTKs at once from appservices, this extends this concept to the Client-Server API. Note that this will likely be spit out into a separate MSC, but is currently part of MSC3983. --- changelog.d/15468.misc | 1 + synapse/appservice/api.py | 31 ++++++--- synapse/federation/federation_client.py | 49 ++++++++++++++- synapse/federation/federation_server.py | 7 +-- synapse/federation/transport/client.py | 49 ++++++++++++++- synapse/federation/transport/server/federation.py | 25 ++++++-- synapse/handlers/appservice.py | 14 +++-- synapse/handlers/e2e_keys.py | 31 ++++++--- synapse/rest/client/keys.py | 42 +++++++++++-- synapse/storage/databases/main/end_to_end_keys.py | 77 ++++++++++++++--------- tests/appservice/test_api.py | 11 ++-- tests/handlers/test_e2e_keys.py | 32 +++++----- 12 files changed, 271 insertions(+), 98 deletions(-) create mode 100644 changelog.d/15468.misc (limited to 'synapse/rest') diff --git a/changelog.d/15468.misc b/changelog.d/15468.misc new file mode 100644 index 0000000000..e0a94f36fd --- /dev/null +++ b/changelog.d/15468.misc @@ -0,0 +1 @@ +Support claiming more than one OTK at a time. diff --git a/synapse/appservice/api.py b/synapse/appservice/api.py index 86ddb1bb28..024098e9cb 100644 --- a/synapse/appservice/api.py +++ b/synapse/appservice/api.py @@ -442,8 +442,10 @@ class ApplicationServiceApi(SimpleHttpClient): return False async def claim_client_keys( - self, service: "ApplicationService", query: List[Tuple[str, str, str]] - ) -> Tuple[Dict[str, Dict[str, Dict[str, JsonDict]]], List[Tuple[str, str, str]]]: + self, service: "ApplicationService", query: List[Tuple[str, str, str, int]] + ) -> Tuple[ + Dict[str, Dict[str, Dict[str, JsonDict]]], List[Tuple[str, str, str, int]] + ]: """Claim one time keys from an application service. Note that any error (including a timeout) is treated as the application @@ -469,8 +471,10 @@ class ApplicationServiceApi(SimpleHttpClient): # Create the expected payload shape. body: Dict[str, Dict[str, List[str]]] = {} - for user_id, device, algorithm in query: - body.setdefault(user_id, {}).setdefault(device, []).append(algorithm) + for user_id, device, algorithm, count in query: + body.setdefault(user_id, {}).setdefault(device, []).extend( + [algorithm] * count + ) uri = f"{service.url}/_matrix/app/unstable/org.matrix.msc3983/keys/claim" try: @@ -493,11 +497,20 @@ class ApplicationServiceApi(SimpleHttpClient): # or if some are still missing. # # TODO This places a lot of faith in the response shape being correct. - missing = [ - (user_id, device, algorithm) - for user_id, device, algorithm in query - if algorithm not in response.get(user_id, {}).get(device, []) - ] + missing = [] + for user_id, device, algorithm, count in query: + # Count the number of keys in the response for this algorithm by + # checking which key IDs start with the algorithm. This uses that + # True == 1 in Python to generate a count. + response_count = sum( + key_id.startswith(f"{algorithm}:") + for key_id in response.get(user_id, {}).get(device, {}) + ) + count -= response_count + # If the appservice responds with fewer keys than requested, then + # consider the request unfulfilled. + if count > 0: + missing.append((user_id, device, algorithm, count)) return response, missing diff --git a/synapse/federation/federation_client.py b/synapse/federation/federation_client.py index ba34573d46..0b2d1a78f7 100644 --- a/synapse/federation/federation_client.py +++ b/synapse/federation/federation_client.py @@ -235,7 +235,10 @@ class FederationClient(FederationBase): ) async def claim_client_keys( - self, destination: str, content: JsonDict, timeout: Optional[int] + self, + destination: str, + query: Dict[str, Dict[str, Dict[str, int]]], + timeout: Optional[int], ) -> JsonDict: """Claims one-time keys for a device hosted on a remote server. @@ -247,6 +250,50 @@ class FederationClient(FederationBase): The JSON object from the response """ sent_queries_counter.labels("client_one_time_keys").inc() + + # Convert the query with counts into a stable and unstable query and check + # if attempting to claim more than 1 OTK. + content: Dict[str, Dict[str, str]] = {} + unstable_content: Dict[str, Dict[str, List[str]]] = {} + use_unstable = False + for user_id, one_time_keys in query.items(): + for device_id, algorithms in one_time_keys.items(): + if any(count > 1 for count in algorithms.values()): + use_unstable = True + if algorithms: + # For the stable query, choose only the first algorithm. + content.setdefault(user_id, {})[device_id] = next(iter(algorithms)) + # For the unstable query, repeat each algorithm by count, then + # splat those into chain to get a flattened list of all algorithms. + # + # Converts from {"algo1": 2, "algo2": 2} to ["algo1", "algo1", "algo2"]. + unstable_content.setdefault(user_id, {})[device_id] = list( + itertools.chain( + *( + itertools.repeat(algorithm, count) + for algorithm, count in algorithms.items() + ) + ) + ) + + if use_unstable: + try: + return await self.transport_layer.claim_client_keys_unstable( + destination, unstable_content, timeout + ) + except HttpResponseException as e: + # If an error is received that is due to an unrecognised endpoint, + # fallback to the v1 endpoint. Otherwise, consider it a legitimate error + # and raise. + if not is_unknown_endpoint(e): + raise + + logger.debug( + "Couldn't claim client keys with the unstable API, falling back to the v1 API" + ) + else: + logger.debug("Skipping unstable claim client keys API") + return await self.transport_layer.claim_client_keys( destination, content, timeout ) diff --git a/synapse/federation/federation_server.py b/synapse/federation/federation_server.py index c618f3d7a6..ca43c7bfc0 100644 --- a/synapse/federation/federation_server.py +++ b/synapse/federation/federation_server.py @@ -1005,13 +1005,8 @@ class FederationServer(FederationBase): @trace async def on_claim_client_keys( - self, origin: str, content: JsonDict, always_include_fallback_keys: bool + self, query: List[Tuple[str, str, str, int]], always_include_fallback_keys: bool ) -> Dict[str, Any]: - query = [] - for user_id, device_keys in content.get("one_time_keys", {}).items(): - for device_id, algorithm in device_keys.items(): - query.append((user_id, device_id, algorithm)) - log_kv({"message": "Claiming one time keys.", "user, device pairs": query}) results = await self._e2e_keys_handler.claim_local_one_time_keys( query, always_include_fallback_keys=always_include_fallback_keys diff --git a/synapse/federation/transport/client.py b/synapse/federation/transport/client.py index bedbd23ded..bc70b94f68 100644 --- a/synapse/federation/transport/client.py +++ b/synapse/federation/transport/client.py @@ -650,10 +650,10 @@ class TransportLayerClient: Response: { - "device_keys": { + "one_time_keys": { "": { "": { - ":": "" + ":": } } } @@ -669,7 +669,50 @@ class TransportLayerClient: path = _create_v1_path("/user/keys/claim") return await self.client.post_json( - destination=destination, path=path, data=query_content, timeout=timeout + destination=destination, + path=path, + data={"one_time_keys": query_content}, + timeout=timeout, + ) + + async def claim_client_keys_unstable( + self, destination: str, query_content: JsonDict, timeout: Optional[int] + ) -> JsonDict: + """Claim one-time keys for a list of devices hosted on a remote server. + + Request: + { + "one_time_keys": { + "": { + "": {"": } + } + } + } + + Response: + { + "one_time_keys": { + "": { + "": { + ":": + } + } + } + } + + Args: + destination: The server to query. + query_content: The user ids to query. + Returns: + A dict containing the one-time keys. + """ + path = _create_path(FEDERATION_UNSTABLE_PREFIX, "/user/keys/claim") + + return await self.client.post_json( + destination=destination, + path=path, + data={"one_time_keys": query_content}, + timeout=timeout, ) async def get_missing_events( diff --git a/synapse/federation/transport/server/federation.py b/synapse/federation/transport/server/federation.py index e2340d70d5..36b0362504 100644 --- a/synapse/federation/transport/server/federation.py +++ b/synapse/federation/transport/server/federation.py @@ -12,6 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. import logging +from collections import Counter from typing import ( TYPE_CHECKING, Dict, @@ -577,16 +578,23 @@ class FederationClientKeysClaimServlet(BaseFederationServerServlet): async def on_POST( self, origin: str, content: JsonDict, query: Dict[bytes, List[bytes]] ) -> Tuple[int, JsonDict]: + # Generate a count for each algorithm, which is hard-coded to 1. + key_query: List[Tuple[str, str, str, int]] = [] + for user_id, device_keys in content.get("one_time_keys", {}).items(): + for device_id, algorithm in device_keys.items(): + key_query.append((user_id, device_id, algorithm, 1)) + response = await self.handler.on_claim_client_keys( - origin, content, always_include_fallback_keys=False + key_query, always_include_fallback_keys=False ) return 200, response class FederationUnstableClientKeysClaimServlet(BaseFederationServerServlet): """ - Identical to the stable endpoint (FederationClientKeysClaimServlet) except it - always includes fallback keys in the response. + Identical to the stable endpoint (FederationClientKeysClaimServlet) except + it allows for querying for multiple OTKs at once and always includes fallback + keys in the response. """ PREFIX = FEDERATION_UNSTABLE_PREFIX @@ -596,8 +604,16 @@ class FederationUnstableClientKeysClaimServlet(BaseFederationServerServlet): async def on_POST( self, origin: str, content: JsonDict, query: Dict[bytes, List[bytes]] ) -> Tuple[int, JsonDict]: + # Generate a count for each algorithm. + key_query: List[Tuple[str, str, str, int]] = [] + for user_id, device_keys in content.get("one_time_keys", {}).items(): + for device_id, algorithms in device_keys.items(): + counts = Counter(algorithms) + for algorithm, count in counts.items(): + key_query.append((user_id, device_id, algorithm, count)) + response = await self.handler.on_claim_client_keys( - origin, content, always_include_fallback_keys=True + key_query, always_include_fallback_keys=True ) return 200, response @@ -805,6 +821,7 @@ FEDERATION_SERVLET_CLASSES: Tuple[Type[BaseFederationServlet], ...] = ( FederationClientKeysQueryServlet, FederationUserDevicesQueryServlet, FederationClientKeysClaimServlet, + FederationUnstableClientKeysClaimServlet, FederationThirdPartyInviteExchangeServlet, On3pidBindServlet, FederationVersionServlet, diff --git a/synapse/handlers/appservice.py b/synapse/handlers/appservice.py index 4ca2bc0420..6429545c98 100644 --- a/synapse/handlers/appservice.py +++ b/synapse/handlers/appservice.py @@ -841,8 +841,10 @@ class ApplicationServicesHandler: return True async def claim_e2e_one_time_keys( - self, query: Iterable[Tuple[str, str, str]] - ) -> Tuple[Dict[str, Dict[str, Dict[str, JsonDict]]], List[Tuple[str, str, str]]]: + self, query: Iterable[Tuple[str, str, str, int]] + ) -> Tuple[ + Dict[str, Dict[str, Dict[str, JsonDict]]], List[Tuple[str, str, str, int]] + ]: """Claim one time keys from application services. Users which are exclusively owned by an application service are sent a @@ -863,18 +865,18 @@ class ApplicationServicesHandler: services = self.store.get_app_services() # Partition the users by appservice. - query_by_appservice: Dict[str, List[Tuple[str, str, str]]] = {} + query_by_appservice: Dict[str, List[Tuple[str, str, str, int]]] = {} missing = [] - for user_id, device, algorithm in query: + for user_id, device, algorithm, count in query: if not self.store.get_if_app_services_interested_in_user(user_id): - missing.append((user_id, device, algorithm)) + missing.append((user_id, device, algorithm, count)) continue # Find the associated appservice. for service in services: if service.is_exclusive_user(user_id): query_by_appservice.setdefault(service.id, []).append( - (user_id, device, algorithm) + (user_id, device, algorithm, count) ) continue diff --git a/synapse/handlers/e2e_keys.py b/synapse/handlers/e2e_keys.py index d1ab95126c..24741b667b 100644 --- a/synapse/handlers/e2e_keys.py +++ b/synapse/handlers/e2e_keys.py @@ -564,7 +564,7 @@ class E2eKeysHandler: async def claim_local_one_time_keys( self, - local_query: List[Tuple[str, str, str]], + local_query: List[Tuple[str, str, str, int]], always_include_fallback_keys: bool, ) -> Iterable[Dict[str, Dict[str, Dict[str, JsonDict]]]]: """Claim one time keys for local users. @@ -581,6 +581,12 @@ class E2eKeysHandler: An iterable of maps of user ID -> a map device ID -> a map of key ID -> JSON bytes. """ + # Cap the number of OTKs that can be claimed at once to avoid abuse. + local_query = [ + (user_id, device_id, algorithm, min(count, 5)) + for user_id, device_id, algorithm, count in local_query + ] + otk_results, not_found = await self.store.claim_e2e_one_time_keys(local_query) # If the application services have not provided any keys via the C-S @@ -607,7 +613,7 @@ class E2eKeysHandler: # from the appservice for that user ID / device ID. If it is found, # check if any of the keys match the requested algorithm & are a # fallback key. - for user_id, device_id, algorithm in local_query: + for user_id, device_id, algorithm, _count in local_query: # Check if the appservice responded for this query. as_result = appservice_results.get(user_id, {}).get(device_id, {}) found_otk = False @@ -630,13 +636,17 @@ class E2eKeysHandler: .get(device_id, {}) .keys() ) + # Note that it doesn't make sense to request more than 1 fallback key + # per (user_id, device_id, algorithm). fallback_query.append((user_id, device_id, algorithm, mark_as_used)) else: # All fallback keys get marked as used. fallback_query = [ + # Note that it doesn't make sense to request more than 1 fallback key + # per (user_id, device_id, algorithm). (user_id, device_id, algorithm, True) - for user_id, device_id, algorithm in not_found + for user_id, device_id, algorithm, count in not_found ] # For each user that does not have a one-time keys available, see if @@ -650,18 +660,19 @@ class E2eKeysHandler: @trace async def claim_one_time_keys( self, - query: Dict[str, Dict[str, Dict[str, str]]], + query: Dict[str, Dict[str, Dict[str, int]]], timeout: Optional[int], always_include_fallback_keys: bool, ) -> JsonDict: - local_query: List[Tuple[str, str, str]] = [] - remote_queries: Dict[str, Dict[str, Dict[str, str]]] = {} + local_query: List[Tuple[str, str, str, int]] = [] + remote_queries: Dict[str, Dict[str, Dict[str, Dict[str, int]]]] = {} - for user_id, one_time_keys in query.get("one_time_keys", {}).items(): + for user_id, one_time_keys in query.items(): # we use UserID.from_string to catch invalid user ids if self.is_mine(UserID.from_string(user_id)): - for device_id, algorithm in one_time_keys.items(): - local_query.append((user_id, device_id, algorithm)) + for device_id, algorithms in one_time_keys.items(): + for algorithm, count in algorithms.items(): + local_query.append((user_id, device_id, algorithm, count)) else: domain = get_domain_from_id(user_id) remote_queries.setdefault(domain, {})[user_id] = one_time_keys @@ -692,7 +703,7 @@ class E2eKeysHandler: device_keys = remote_queries[destination] try: remote_result = await self.federation.claim_client_keys( - destination, {"one_time_keys": device_keys}, timeout=timeout + destination, device_keys, timeout=timeout ) for user_id, keys in remote_result["one_time_keys"].items(): if user_id in device_keys: diff --git a/synapse/rest/client/keys.py b/synapse/rest/client/keys.py index 2a25094109..9bbab5e624 100644 --- a/synapse/rest/client/keys.py +++ b/synapse/rest/client/keys.py @@ -16,7 +16,8 @@ import logging import re -from typing import TYPE_CHECKING, Any, Optional, Tuple +from collections import Counter +from typing import TYPE_CHECKING, Any, Dict, Optional, Tuple from synapse.api.errors import InvalidAPICallError, SynapseError from synapse.http.server import HttpServer @@ -289,16 +290,40 @@ class OneTimeKeyServlet(RestServlet): await self.auth.get_user_by_req(request, allow_guest=True) timeout = parse_integer(request, "timeout", 10 * 1000) body = parse_json_object_from_request(request) + + # Generate a count for each algorithm, which is hard-coded to 1. + query: Dict[str, Dict[str, Dict[str, int]]] = {} + for user_id, one_time_keys in body.get("one_time_keys", {}).items(): + for device_id, algorithm in one_time_keys.items(): + query.setdefault(user_id, {})[device_id] = {algorithm: 1} + result = await self.e2e_keys_handler.claim_one_time_keys( - body, timeout, always_include_fallback_keys=False + query, timeout, always_include_fallback_keys=False ) return 200, result class UnstableOneTimeKeyServlet(RestServlet): """ - Identical to the stable endpoint (OneTimeKeyServlet) except it always includes - fallback keys in the response. + Identical to the stable endpoint (OneTimeKeyServlet) except it allows for + querying for multiple OTKs at once and always includes fallback keys in the + response. + + POST /keys/claim HTTP/1.1 + { + "one_time_keys": { + "": { + "": ["", ...] + } } } + + HTTP/1.1 200 OK + { + "one_time_keys": { + "": { + "": { + ":": "" + } } } } + """ PATTERNS = [re.compile(r"^/_matrix/client/unstable/org.matrix.msc3983/keys/claim$")] @@ -313,8 +338,15 @@ class UnstableOneTimeKeyServlet(RestServlet): await self.auth.get_user_by_req(request, allow_guest=True) timeout = parse_integer(request, "timeout", 10 * 1000) body = parse_json_object_from_request(request) + + # Generate a count for each algorithm. + query: Dict[str, Dict[str, Dict[str, int]]] = {} + for user_id, one_time_keys in body.get("one_time_keys", {}).items(): + for device_id, algorithms in one_time_keys.items(): + query.setdefault(user_id, {})[device_id] = Counter(algorithms) + result = await self.e2e_keys_handler.claim_one_time_keys( - body, timeout, always_include_fallback_keys=True + query, timeout, always_include_fallback_keys=True ) return 200, result diff --git a/synapse/storage/databases/main/end_to_end_keys.py b/synapse/storage/databases/main/end_to_end_keys.py index 1a4ae55304..4bc391f213 100644 --- a/synapse/storage/databases/main/end_to_end_keys.py +++ b/synapse/storage/databases/main/end_to_end_keys.py @@ -1027,8 +1027,10 @@ class EndToEndKeyWorkerStore(EndToEndKeyBackgroundStore, CacheInvalidationWorker ... async def claim_e2e_one_time_keys( - self, query_list: Iterable[Tuple[str, str, str]] - ) -> Tuple[Dict[str, Dict[str, Dict[str, JsonDict]]], List[Tuple[str, str, str]]]: + self, query_list: Iterable[Tuple[str, str, str, int]] + ) -> Tuple[ + Dict[str, Dict[str, Dict[str, JsonDict]]], List[Tuple[str, str, str, int]] + ]: """Take a list of one time keys out of the database. Args: @@ -1043,8 +1045,12 @@ class EndToEndKeyWorkerStore(EndToEndKeyBackgroundStore, CacheInvalidationWorker @trace def _claim_e2e_one_time_key_simple( - txn: LoggingTransaction, user_id: str, device_id: str, algorithm: str - ) -> Optional[Tuple[str, str]]: + txn: LoggingTransaction, + user_id: str, + device_id: str, + algorithm: str, + count: int, + ) -> List[Tuple[str, str]]: """Claim OTK for device for DBs that don't support RETURNING. Returns: @@ -1055,36 +1061,41 @@ class EndToEndKeyWorkerStore(EndToEndKeyBackgroundStore, CacheInvalidationWorker sql = """ SELECT key_id, key_json FROM e2e_one_time_keys_json WHERE user_id = ? AND device_id = ? AND algorithm = ? - LIMIT 1 + LIMIT ? """ - txn.execute(sql, (user_id, device_id, algorithm)) - otk_row = txn.fetchone() - if otk_row is None: - return None + txn.execute(sql, (user_id, device_id, algorithm, count)) + otk_rows = list(txn) + if not otk_rows: + return [] - key_id, key_json = otk_row - - self.db_pool.simple_delete_one_txn( + self.db_pool.simple_delete_many_txn( txn, table="e2e_one_time_keys_json", + column="key_id", + values=[otk_row[0] for otk_row in otk_rows], keyvalues={ "user_id": user_id, "device_id": device_id, "algorithm": algorithm, - "key_id": key_id, }, ) self._invalidate_cache_and_stream( txn, self.count_e2e_one_time_keys, (user_id, device_id) ) - return f"{algorithm}:{key_id}", key_json + return [ + (f"{algorithm}:{key_id}", key_json) for key_id, key_json in otk_rows + ] @trace def _claim_e2e_one_time_key_returning( - txn: LoggingTransaction, user_id: str, device_id: str, algorithm: str - ) -> Optional[Tuple[str, str]]: + txn: LoggingTransaction, + user_id: str, + device_id: str, + algorithm: str, + count: int, + ) -> List[Tuple[str, str]]: """Claim OTK for device for DBs that support RETURNING. Returns: @@ -1099,28 +1110,30 @@ class EndToEndKeyWorkerStore(EndToEndKeyBackgroundStore, CacheInvalidationWorker AND key_id IN ( SELECT key_id FROM e2e_one_time_keys_json WHERE user_id = ? AND device_id = ? AND algorithm = ? - LIMIT 1 + LIMIT ? ) RETURNING key_id, key_json """ txn.execute( - sql, (user_id, device_id, algorithm, user_id, device_id, algorithm) + sql, + (user_id, device_id, algorithm, user_id, device_id, algorithm, count), ) - otk_row = txn.fetchone() - if otk_row is None: - return None + otk_rows = list(txn) + if not otk_rows: + return [] self._invalidate_cache_and_stream( txn, self.count_e2e_one_time_keys, (user_id, device_id) ) - key_id, key_json = otk_row - return f"{algorithm}:{key_id}", key_json + return [ + (f"{algorithm}:{key_id}", key_json) for key_id, key_json in otk_rows + ] results: Dict[str, Dict[str, Dict[str, JsonDict]]] = {} - missing: List[Tuple[str, str, str]] = [] - for user_id, device_id, algorithm in query_list: + missing: List[Tuple[str, str, str, int]] = [] + for user_id, device_id, algorithm, count in query_list: if self.database_engine.supports_returning: # If we support RETURNING clause we can use a single query that # allows us to use autocommit mode. @@ -1130,21 +1143,25 @@ class EndToEndKeyWorkerStore(EndToEndKeyBackgroundStore, CacheInvalidationWorker _claim_e2e_one_time_key = _claim_e2e_one_time_key_simple db_autocommit = False - claim_row = await self.db_pool.runInteraction( + claim_rows = await self.db_pool.runInteraction( "claim_e2e_one_time_keys", _claim_e2e_one_time_key, user_id, device_id, algorithm, + count, db_autocommit=db_autocommit, ) - if claim_row: + if claim_rows: device_results = results.setdefault(user_id, {}).setdefault( device_id, {} ) - device_results[claim_row[0]] = json_decoder.decode(claim_row[1]) - else: - missing.append((user_id, device_id, algorithm)) + for claim_row in claim_rows: + device_results[claim_row[0]] = json_decoder.decode(claim_row[1]) + # Did we get enough OTKs? + count -= len(claim_rows) + if count: + missing.append((user_id, device_id, algorithm, count)) return results, missing diff --git a/tests/appservice/test_api.py b/tests/appservice/test_api.py index 7deb923a28..15fce165b6 100644 --- a/tests/appservice/test_api.py +++ b/tests/appservice/test_api.py @@ -195,11 +195,11 @@ class ApplicationServiceApiTestCase(unittest.HomeserverTestCase): MISSING_KEYS = [ # Known user, known device, missing algorithm. - ("@alice:example.org", "DEVICE_1", "signed_curve25519:DDDDHg"), + ("@alice:example.org", "DEVICE_2", "xyz", 1), # Known user, missing device. - ("@alice:example.org", "DEVICE_3", "signed_curve25519:EEEEHg"), + ("@alice:example.org", "DEVICE_3", "signed_curve25519", 1), # Unknown user. - ("@bob:example.org", "DEVICE_4", "signed_curve25519:FFFFHg"), + ("@bob:example.org", "DEVICE_4", "signed_curve25519", 1), ] claimed_keys, missing = self.get_success( @@ -207,9 +207,8 @@ class ApplicationServiceApiTestCase(unittest.HomeserverTestCase): self.service, [ # Found devices - ("@alice:example.org", "DEVICE_1", "signed_curve25519:AAAAHg"), - ("@alice:example.org", "DEVICE_1", "signed_curve25519:BBBBHg"), - ("@alice:example.org", "DEVICE_2", "signed_curve25519:CCCCHg"), + ("@alice:example.org", "DEVICE_1", "signed_curve25519", 1), + ("@alice:example.org", "DEVICE_2", "signed_curve25519", 1), ] + MISSING_KEYS, ) diff --git a/tests/handlers/test_e2e_keys.py b/tests/handlers/test_e2e_keys.py index 18edebd652..72d0584061 100644 --- a/tests/handlers/test_e2e_keys.py +++ b/tests/handlers/test_e2e_keys.py @@ -160,7 +160,7 @@ class E2eKeysHandlerTestCase(unittest.HomeserverTestCase): res2 = self.get_success( self.handler.claim_one_time_keys( - {"one_time_keys": {local_user: {device_id: "alg1"}}}, + {local_user: {device_id: {"alg1": 1}}}, timeout=None, always_include_fallback_keys=False, ) @@ -205,7 +205,7 @@ class E2eKeysHandlerTestCase(unittest.HomeserverTestCase): # key claim_res = self.get_success( self.handler.claim_one_time_keys( - {"one_time_keys": {local_user: {device_id: "alg1"}}}, + {local_user: {device_id: {"alg1": 1}}}, timeout=None, always_include_fallback_keys=False, ) @@ -224,7 +224,7 @@ class E2eKeysHandlerTestCase(unittest.HomeserverTestCase): # claiming an OTK again should return the same fallback key claim_res = self.get_success( self.handler.claim_one_time_keys( - {"one_time_keys": {local_user: {device_id: "alg1"}}}, + {local_user: {device_id: {"alg1": 1}}}, timeout=None, always_include_fallback_keys=False, ) @@ -273,7 +273,7 @@ class E2eKeysHandlerTestCase(unittest.HomeserverTestCase): claim_res = self.get_success( self.handler.claim_one_time_keys( - {"one_time_keys": {local_user: {device_id: "alg1"}}}, + {local_user: {device_id: {"alg1": 1}}}, timeout=None, always_include_fallback_keys=False, ) @@ -285,7 +285,7 @@ class E2eKeysHandlerTestCase(unittest.HomeserverTestCase): claim_res = self.get_success( self.handler.claim_one_time_keys( - {"one_time_keys": {local_user: {device_id: "alg1"}}}, + {local_user: {device_id: {"alg1": 1}}}, timeout=None, always_include_fallback_keys=False, ) @@ -306,7 +306,7 @@ class E2eKeysHandlerTestCase(unittest.HomeserverTestCase): claim_res = self.get_success( self.handler.claim_one_time_keys( - {"one_time_keys": {local_user: {device_id: "alg1"}}}, + {local_user: {device_id: {"alg1": 1}}}, timeout=None, always_include_fallback_keys=False, ) @@ -347,7 +347,7 @@ class E2eKeysHandlerTestCase(unittest.HomeserverTestCase): # return both. claim_res = self.get_success( self.handler.claim_one_time_keys( - {"one_time_keys": {local_user: {device_id: "alg1"}}}, + {local_user: {device_id: {"alg1": 1}}}, timeout=None, always_include_fallback_keys=True, ) @@ -369,7 +369,7 @@ class E2eKeysHandlerTestCase(unittest.HomeserverTestCase): # Claiming an OTK again should return only the fallback key. claim_res = self.get_success( self.handler.claim_one_time_keys( - {"one_time_keys": {local_user: {device_id: "alg1"}}}, + {local_user: {device_id: {"alg1": 1}}}, timeout=None, always_include_fallback_keys=True, ) @@ -1052,7 +1052,7 @@ class E2eKeysHandlerTestCase(unittest.HomeserverTestCase): # Setup a response, but only for device 2. self.appservice_api.claim_client_keys.return_value = make_awaitable( - ({local_user: {device_id_2: otk}}, [(local_user, device_id_1, "alg1")]) + ({local_user: {device_id_2: otk}}, [(local_user, device_id_1, "alg1", 1)]) ) # we shouldn't have any unused fallback keys yet @@ -1079,11 +1079,7 @@ class E2eKeysHandlerTestCase(unittest.HomeserverTestCase): # query the fallback keys. claim_res = self.get_success( self.handler.claim_one_time_keys( - { - "one_time_keys": { - local_user: {device_id_1: "alg1", device_id_2: "alg1"} - } - }, + {local_user: {device_id_1: {"alg1": 1}, device_id_2: {"alg1": 1}}}, timeout=None, always_include_fallback_keys=False, ) @@ -1128,7 +1124,7 @@ class E2eKeysHandlerTestCase(unittest.HomeserverTestCase): # Claim OTKs, which will ask the appservice and do nothing else. claim_res = self.get_success( self.handler.claim_one_time_keys( - {"one_time_keys": {local_user: {device_id_1: "alg1"}}}, + {local_user: {device_id_1: {"alg1": 1}}}, timeout=None, always_include_fallback_keys=True, ) @@ -1172,7 +1168,7 @@ class E2eKeysHandlerTestCase(unittest.HomeserverTestCase): # uploaded fallback key. claim_res = self.get_success( self.handler.claim_one_time_keys( - {"one_time_keys": {local_user: {device_id_1: "alg1"}}}, + {local_user: {device_id_1: {"alg1": 1}}}, timeout=None, always_include_fallback_keys=True, ) @@ -1205,7 +1201,7 @@ class E2eKeysHandlerTestCase(unittest.HomeserverTestCase): # Claim OTKs, which will return information only from the database. claim_res = self.get_success( self.handler.claim_one_time_keys( - {"one_time_keys": {local_user: {device_id_1: "alg1"}}}, + {local_user: {device_id_1: {"alg1": 1}}}, timeout=None, always_include_fallback_keys=True, ) @@ -1232,7 +1228,7 @@ class E2eKeysHandlerTestCase(unittest.HomeserverTestCase): # Claim OTKs, which will return only the fallback key from the database. claim_res = self.get_success( self.handler.claim_one_time_keys( - {"one_time_keys": {local_user: {device_id_1: "alg1"}}}, + {local_user: {device_id_1: {"alg1": 1}}}, timeout=None, always_include_fallback_keys=True, ) -- cgit 1.5.1 From 89f6fb0d5a87d7415d1e67c600f47cb2b4370971 Mon Sep 17 00:00:00 2001 From: Shay Date: Fri, 28 Apr 2023 11:33:45 -0700 Subject: Add an admin API endpoint to support per-user feature flags (#15344) --- changelog.d/15344.feature | 1 + docs/admin_api/experimental_features.md | 54 +++++++++ synapse/_scripts/synapse_port_db.py | 1 + synapse/rest/admin/__init__.py | 2 + synapse/rest/admin/experimental_features.py | 119 +++++++++++++++++++ synapse/storage/databases/main/__init__.py | 2 + .../databases/main/experimental_features.py | 75 ++++++++++++ .../delta/76/03_per_user_experimental_features.sql | 27 +++++ tests/rest/admin/test_admin.py | 127 +++++++++++++++++++++ 9 files changed, 408 insertions(+) create mode 100644 changelog.d/15344.feature create mode 100644 docs/admin_api/experimental_features.md create mode 100644 synapse/rest/admin/experimental_features.py create mode 100644 synapse/storage/databases/main/experimental_features.py create mode 100644 synapse/storage/schema/main/delta/76/03_per_user_experimental_features.sql (limited to 'synapse/rest') diff --git a/changelog.d/15344.feature b/changelog.d/15344.feature new file mode 100644 index 0000000000..44262e9bd8 --- /dev/null +++ b/changelog.d/15344.feature @@ -0,0 +1 @@ +Add an admin API endpoint to support per-user feature flags. diff --git a/docs/admin_api/experimental_features.md b/docs/admin_api/experimental_features.md new file mode 100644 index 0000000000..c1aebe4b01 --- /dev/null +++ b/docs/admin_api/experimental_features.md @@ -0,0 +1,54 @@ +# Experimental Features API + +This API allows a server administrator to enable or disable some experimental features on a per-user +basis. Currently supported features are [msc3026](https://github.com/matrix-org/matrix-spec-proposals/pull/3026): busy +presence state enabled, [msc2654](https://github.com/matrix-org/matrix-spec-proposals/pull/2654): enable unread counts, +[msc3881](https://github.com/matrix-org/matrix-spec-proposals/pull/3881): enable remotely toggling push notifications +for another client, and [msc3967](https://github.com/matrix-org/matrix-spec-proposals/pull/3967): do not require +UIA when first uploading cross-signing keys. + + +To use it, you will need to authenticate by providing an `access_token` +for a server admin: see [Admin API](../usage/administration/admin_api/). + +## Enabling/Disabling Features + +This API allows a server administrator to enable experimental features for a given user. The request must +provide a body containing the user id and listing the features to enable/disable in the following format: +```json +{ + "features": { + "msc3026":true, + "msc2654":true + } +} +``` +where true is used to enable the feature, and false is used to disable the feature. + + +The API is: + +``` +PUT /_synapse/admin/v1/experimental_features/ +``` + +## Listing Enabled Features + +To list which features are enabled/disabled for a given user send a request to the following API: + +``` +GET /_synapse/admin/v1/experimental_features/ +``` + +It will return a list of possible features and indicate whether they are enabled or disabled for the +user like so: +```json +{ + "features": { + "msc3026": true, + "msc2654": true, + "msc3881": false, + "msc3967": false + } +} +``` \ No newline at end of file diff --git a/synapse/_scripts/synapse_port_db.py b/synapse/_scripts/synapse_port_db.py index 56d5aeb0dd..27fee3d9a9 100755 --- a/synapse/_scripts/synapse_port_db.py +++ b/synapse/_scripts/synapse_port_db.py @@ -125,6 +125,7 @@ BOOLEAN_COLUMNS = { "users": ["shadow_banned", "approved"], "un_partial_stated_event_stream": ["rejection_status_changed"], "users_who_share_rooms": ["share_private"], + "per_user_experimental_features": ["enabled"], } diff --git a/synapse/rest/admin/__init__.py b/synapse/rest/admin/__init__.py index 770df261ce..c729364839 100644 --- a/synapse/rest/admin/__init__.py +++ b/synapse/rest/admin/__init__.py @@ -39,6 +39,7 @@ from synapse.rest.admin.event_reports import ( EventReportDetailRestServlet, EventReportsRestServlet, ) +from synapse.rest.admin.experimental_features import ExperimentalFeaturesRestServlet from synapse.rest.admin.federation import ( DestinationMembershipRestServlet, DestinationResetConnectionRestServlet, @@ -292,6 +293,7 @@ def register_servlets(hs: "HomeServer", http_server: HttpServer) -> None: BackgroundUpdateEnabledRestServlet(hs).register(http_server) BackgroundUpdateRestServlet(hs).register(http_server) BackgroundUpdateStartJobRestServlet(hs).register(http_server) + ExperimentalFeaturesRestServlet(hs).register(http_server) def register_servlets_for_client_rest_resource( diff --git a/synapse/rest/admin/experimental_features.py b/synapse/rest/admin/experimental_features.py new file mode 100644 index 0000000000..1d409ac2b7 --- /dev/null +++ b/synapse/rest/admin/experimental_features.py @@ -0,0 +1,119 @@ +# Copyright 2023 The Matrix.org Foundation C.I.C +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +from enum import Enum +from http import HTTPStatus +from typing import TYPE_CHECKING, Dict, Tuple + +from synapse.api.errors import SynapseError +from synapse.http.servlet import RestServlet, parse_json_object_from_request +from synapse.http.site import SynapseRequest +from synapse.rest.admin import admin_patterns, assert_requester_is_admin +from synapse.types import JsonDict, UserID + +if TYPE_CHECKING: + from synapse.server import HomeServer + + +class ExperimentalFeature(str, Enum): + """ + Currently supported per-user features + """ + + MSC3026 = "msc3026" + MSC2654 = "msc2654" + MSC3881 = "msc3881" + MSC3967 = "msc3967" + + +class ExperimentalFeaturesRestServlet(RestServlet): + """ + Enable or disable experimental features for a user or determine which features are enabled + for a given user + """ + + PATTERNS = admin_patterns("/experimental_features/(?P[^/]*)") + + def __init__(self, hs: "HomeServer"): + super().__init__() + self.auth = hs.get_auth() + self.store = hs.get_datastores().main + self.is_mine = hs.is_mine + + async def on_GET( + self, + request: SynapseRequest, + user_id: str, + ) -> Tuple[int, JsonDict]: + """ + List which features are enabled for a given user + """ + await assert_requester_is_admin(self.auth, request) + + target_user = UserID.from_string(user_id) + if not self.is_mine(target_user): + raise SynapseError( + HTTPStatus.BAD_REQUEST, + "User must be local to check what experimental features are enabled.", + ) + + enabled_features = await self.store.list_enabled_features(user_id) + + user_features = {} + for feature in ExperimentalFeature: + if feature in enabled_features: + user_features[feature] = True + else: + user_features[feature] = False + return HTTPStatus.OK, {"features": user_features} + + async def on_PUT( + self, request: SynapseRequest, user_id: str + ) -> Tuple[HTTPStatus, Dict]: + """ + Enable or disable the provided features for the requester + """ + await assert_requester_is_admin(self.auth, request) + + body = parse_json_object_from_request(request) + + target_user = UserID.from_string(user_id) + if not self.is_mine(target_user): + raise SynapseError( + HTTPStatus.BAD_REQUEST, + "User must be local to enable experimental features.", + ) + + features = body.get("features") + if not features: + raise SynapseError( + HTTPStatus.BAD_REQUEST, "You must provide features to set." + ) + + # validate the provided features + validated_features = {} + for feature, enabled in features.items(): + try: + validated_feature = ExperimentalFeature(feature) + validated_features[validated_feature] = enabled + except ValueError: + raise SynapseError( + HTTPStatus.BAD_REQUEST, + f"{feature!r} is not recognised as a valid experimental feature.", + ) + + await self.store.set_features_for_user(user_id, validated_features) + + return HTTPStatus.OK, {} diff --git a/synapse/storage/databases/main/__init__.py b/synapse/storage/databases/main/__init__.py index dc3948c170..0032a92f49 100644 --- a/synapse/storage/databases/main/__init__.py +++ b/synapse/storage/databases/main/__init__.py @@ -43,6 +43,7 @@ from .event_federation import EventFederationStore from .event_push_actions import EventPushActionsStore from .events_bg_updates import EventsBackgroundUpdatesStore from .events_forward_extremities import EventForwardExtremitiesStore +from .experimental_features import ExperimentalFeaturesStore from .filtering import FilteringWorkerStore from .keys import KeyStore from .lock import LockStore @@ -82,6 +83,7 @@ logger = logging.getLogger(__name__) class DataStore( EventsBackgroundUpdatesStore, + ExperimentalFeaturesStore, DeviceStore, RoomMemberStore, RoomStore, diff --git a/synapse/storage/databases/main/experimental_features.py b/synapse/storage/databases/main/experimental_features.py new file mode 100644 index 0000000000..cf3226ae5a --- /dev/null +++ b/synapse/storage/databases/main/experimental_features.py @@ -0,0 +1,75 @@ +# Copyright 2023 The Matrix.org Foundation C.I.C +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from typing import TYPE_CHECKING, Dict + +from synapse.storage.database import DatabasePool, LoggingDatabaseConnection +from synapse.storage.databases.main import CacheInvalidationWorkerStore +from synapse.types import StrCollection +from synapse.util.caches.descriptors import cached + +if TYPE_CHECKING: + from synapse.rest.admin.experimental_features import ExperimentalFeature + from synapse.server import HomeServer + + +class ExperimentalFeaturesStore(CacheInvalidationWorkerStore): + def __init__( + self, + database: DatabasePool, + db_conn: LoggingDatabaseConnection, + hs: "HomeServer", + ) -> None: + super().__init__(database, db_conn, hs) + + @cached() + async def list_enabled_features(self, user_id: str) -> StrCollection: + """ + Checks to see what features are enabled for a given user + Args: + user: + the user to be queried on + Returns: + the features currently enabled for the user + """ + enabled = await self.db_pool.simple_select_list( + "per_user_experimental_features", + {"user_id": user_id, "enabled": True}, + ["feature"], + ) + + return [feature["feature"] for feature in enabled] + + async def set_features_for_user( + self, + user: str, + features: Dict["ExperimentalFeature", bool], + ) -> None: + """ + Enables or disables features for a given user + Args: + user: + the user for whom to enable/disable the features + features: + pairs of features and True/False for whether the feature should be enabled + """ + for feature, enabled in features.items(): + await self.db_pool.simple_upsert( + table="per_user_experimental_features", + keyvalues={"feature": feature, "user_id": user}, + values={"enabled": enabled}, + insertion_values={"user_id": user, "feature": feature}, + ) + + await self.invalidate_cache_and_stream("list_enabled_features", (user,)) diff --git a/synapse/storage/schema/main/delta/76/03_per_user_experimental_features.sql b/synapse/storage/schema/main/delta/76/03_per_user_experimental_features.sql new file mode 100644 index 0000000000..c4ef81846c --- /dev/null +++ b/synapse/storage/schema/main/delta/76/03_per_user_experimental_features.sql @@ -0,0 +1,27 @@ +/* Copyright 2023 The Matrix.org Foundation C.I.C + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +-- Table containing experimental features and whether they are enabled for a given user +CREATE TABLE per_user_experimental_features ( + -- The User ID to check/set the feature for + user_id TEXT NOT NULL, + -- Contains features to be enabled/disabled + feature TEXT NOT NULL, + -- whether the feature is enabled/disabled for a given user, defaults to disabled + enabled BOOLEAN DEFAULT FALSE, + FOREIGN KEY (user_id) REFERENCES users(name), + PRIMARY KEY (user_id, feature) +); + diff --git a/tests/rest/admin/test_admin.py b/tests/rest/admin/test_admin.py index a8f6436836..645a00b4b1 100644 --- a/tests/rest/admin/test_admin.py +++ b/tests/rest/admin/test_admin.py @@ -372,3 +372,130 @@ class PurgeHistoryTestCase(unittest.HomeserverTestCase): self.assertEqual(200, channel.code, msg=channel.json_body) self.assertEqual("complete", channel.json_body["status"]) + + +class ExperimentalFeaturesTestCase(unittest.HomeserverTestCase): + servlets = [ + synapse.rest.admin.register_servlets, + login.register_servlets, + ] + + def prepare(self, reactor: MemoryReactor, clock: Clock, hs: HomeServer) -> None: + self.admin_user = self.register_user("admin", "pass", admin=True) + self.admin_user_tok = self.login("admin", "pass") + + self.other_user = self.register_user("user", "pass") + self.other_user_tok = self.login("user", "pass") + + self.url = "/_synapse/admin/v1/experimental_features" + + def test_enable_and_disable(self) -> None: + """ + Test basic functionality of ExperimentalFeatures endpoint + """ + # test enabling features works + url = f"{self.url}/{self.other_user}" + channel = self.make_request( + "PUT", + url, + content={ + "features": {"msc3026": True, "msc2654": True}, + }, + access_token=self.admin_user_tok, + ) + self.assertEqual(channel.code, 200) + + # list which features are enabled and ensure the ones we enabled are listed + self.assertEqual(channel.code, 200) + url = f"{self.url}/{self.other_user}" + channel = self.make_request( + "GET", + url, + access_token=self.admin_user_tok, + ) + self.assertEqual(channel.code, 200) + self.assertEqual( + True, + channel.json_body["features"]["msc3026"], + ) + self.assertEqual( + True, + channel.json_body["features"]["msc2654"], + ) + + # test disabling a feature works + url = f"{self.url}/{self.other_user}" + channel = self.make_request( + "PUT", + url, + content={"features": {"msc3026": False}}, + access_token=self.admin_user_tok, + ) + self.assertEqual(channel.code, 200) + + # list the features enabled/disabled and ensure they are still are correct + self.assertEqual(channel.code, 200) + url = f"{self.url}/{self.other_user}" + channel = self.make_request( + "GET", + url, + access_token=self.admin_user_tok, + ) + self.assertEqual(channel.code, 200) + self.assertEqual( + False, + channel.json_body["features"]["msc3026"], + ) + self.assertEqual( + True, + channel.json_body["features"]["msc2654"], + ) + self.assertEqual( + False, + channel.json_body["features"]["msc3881"], + ) + self.assertEqual( + False, + channel.json_body["features"]["msc3967"], + ) + + # test nothing blows up if you try to disable a feature that isn't already enabled + url = f"{self.url}/{self.other_user}" + channel = self.make_request( + "PUT", + url, + content={"features": {"msc3026": False}}, + access_token=self.admin_user_tok, + ) + self.assertEqual(channel.code, 200) + + # test trying to enable a feature without an admin access token is denied + url = f"{self.url}/f{self.other_user}" + channel = self.make_request( + "PUT", + url, + content={"features": {"msc3881": True}}, + access_token=self.other_user_tok, + ) + self.assertEqual(channel.code, 403) + self.assertEqual( + channel.json_body, + {"errcode": "M_FORBIDDEN", "error": "You are not a server admin"}, + ) + + # test trying to enable a bogus msc is denied + url = f"{self.url}/{self.other_user}" + channel = self.make_request( + "PUT", + url, + content={"features": {"msc6666": True}}, + access_token=self.admin_user_tok, + ) + self.assertEqual(channel.code, 400) + self.assertEqual( + channel.json_body, + { + "errcode": "M_UNKNOWN", + "error": "'msc6666' is not recognised as a valid experimental feature.", + }, + ) -- cgit 1.5.1 From 07b1c70d6b11d6b8feca23442a09b60ab0c930e3 Mon Sep 17 00:00:00 2001 From: Patrick Cloke Date: Tue, 2 May 2023 07:59:55 -0400 Subject: Initial implementation of MSC3981: recursive relations API (#15315) Adds an optional keyword argument to the /relations API which will recurse a limited number of event relationships. This will cause the API to return not just the events related to the parent event, but also events related to those related to the parent event, etc. This is disabled by default behind an experimental configuration flag and is currently implemented using prefixed parameters. --- changelog.d/15315.feature | 1 + synapse/config/experimental.py | 5 ++ synapse/handlers/relations.py | 3 + synapse/rest/client/relations.py | 10 ++- synapse/storage/databases/main/relations.py | 65 +++++++++++---- tests/rest/client/test_relations.py | 120 ++++++++++++++++++++++++++++ 6 files changed, 186 insertions(+), 18 deletions(-) create mode 100644 changelog.d/15315.feature (limited to 'synapse/rest') diff --git a/changelog.d/15315.feature b/changelog.d/15315.feature new file mode 100644 index 0000000000..30b2abdc62 --- /dev/null +++ b/changelog.d/15315.feature @@ -0,0 +1 @@ +Experimental support to recursively provide relations per [MSC3981](https://github.com/matrix-org/matrix-spec-proposals/pull/3981). diff --git a/synapse/config/experimental.py b/synapse/config/experimental.py index 6599679731..cab7ccf4b7 100644 --- a/synapse/config/experimental.py +++ b/synapse/config/experimental.py @@ -192,5 +192,10 @@ class ExperimentalConfig(Config): # MSC2659: Application service ping endpoint self.msc2659_enabled = experimental.get("msc2659_enabled", False) + # MSC3981: Recurse relations + self.msc3981_recurse_relations = experimental.get( + "msc3981_recurse_relations", False + ) + # MSC3970: Scope transaction IDs to devices self.msc3970_enabled = experimental.get("msc3970_enabled", False) diff --git a/synapse/handlers/relations.py b/synapse/handlers/relations.py index 1d09fdf135..4824635162 100644 --- a/synapse/handlers/relations.py +++ b/synapse/handlers/relations.py @@ -85,6 +85,7 @@ class RelationsHandler: event_id: str, room_id: str, pagin_config: PaginationConfig, + recurse: bool, include_original_event: bool, relation_type: Optional[str] = None, event_type: Optional[str] = None, @@ -98,6 +99,7 @@ class RelationsHandler: event_id: Fetch events that relate to this event ID. room_id: The room the event belongs to. pagin_config: The pagination config rules to apply, if any. + recurse: Whether to recursively find relations. include_original_event: Whether to include the parent event. relation_type: Only fetch events with this relation type, if given. event_type: Only fetch events with this event type, if given. @@ -132,6 +134,7 @@ class RelationsHandler: direction=pagin_config.direction, from_token=pagin_config.from_token, to_token=pagin_config.to_token, + recurse=recurse, ) events = await self._main_store.get_events_as_list( diff --git a/synapse/rest/client/relations.py b/synapse/rest/client/relations.py index b8b296bc0c..785dfa08d8 100644 --- a/synapse/rest/client/relations.py +++ b/synapse/rest/client/relations.py @@ -19,7 +19,7 @@ from typing import TYPE_CHECKING, Optional, Tuple from synapse.api.constants import Direction from synapse.handlers.relations import ThreadsListInclude from synapse.http.server import HttpServer -from synapse.http.servlet import RestServlet, parse_integer, parse_string +from synapse.http.servlet import RestServlet, parse_boolean, parse_integer, parse_string from synapse.http.site import SynapseRequest from synapse.rest.client._base import client_patterns from synapse.storage.databases.main.relations import ThreadsNextBatch @@ -49,6 +49,7 @@ class RelationPaginationServlet(RestServlet): self.auth = hs.get_auth() self._store = hs.get_datastores().main self._relations_handler = hs.get_relations_handler() + self._support_recurse = hs.config.experimental.msc3981_recurse_relations async def on_GET( self, @@ -63,6 +64,12 @@ class RelationPaginationServlet(RestServlet): pagination_config = await PaginationConfig.from_request( self._store, request, default_limit=5, default_dir=Direction.BACKWARDS ) + if self._support_recurse: + recurse = parse_boolean( + request, "org.matrix.msc3981.recurse", default=False + ) + else: + recurse = False # The unstable version of this API returns an extra field for client # compatibility, see https://github.com/matrix-org/synapse/issues/12930. @@ -75,6 +82,7 @@ class RelationPaginationServlet(RestServlet): event_id=parent_id, room_id=room_id, pagin_config=pagination_config, + recurse=recurse, include_original_event=include_original_event, relation_type=relation_type, event_type=event_type, diff --git a/synapse/storage/databases/main/relations.py b/synapse/storage/databases/main/relations.py index 3955a8a9a5..4a6c6c724d 100644 --- a/synapse/storage/databases/main/relations.py +++ b/synapse/storage/databases/main/relations.py @@ -172,6 +172,7 @@ class RelationsWorkerStore(SQLBaseStore): direction: Direction = Direction.BACKWARDS, from_token: Optional[StreamToken] = None, to_token: Optional[StreamToken] = None, + recurse: bool = False, ) -> Tuple[Sequence[_RelatedEvent], Optional[StreamToken]]: """Get a list of relations for an event, ordered by topological ordering. @@ -186,6 +187,7 @@ class RelationsWorkerStore(SQLBaseStore): oldest first (forwards). from_token: Fetch rows from the given token, or from the start if None. to_token: Fetch rows up to the given token, or up to the end if None. + recurse: Whether to recursively find relations. Returns: A tuple of: @@ -200,8 +202,8 @@ class RelationsWorkerStore(SQLBaseStore): # Ensure bad limits aren't being passed in. assert limit >= 0 - where_clause = ["relates_to_id = ?", "room_id = ?"] - where_args: List[Union[str, int]] = [event.event_id, room_id] + where_clause = ["room_id = ?"] + where_args: List[Union[str, int]] = [room_id] is_redacted = event.internal_metadata.is_redacted() if relation_type is not None: @@ -229,23 +231,52 @@ class RelationsWorkerStore(SQLBaseStore): if pagination_clause: where_clause.append(pagination_clause) - sql = """ - SELECT event_id, relation_type, sender, topological_ordering, stream_ordering - FROM event_relations - INNER JOIN events USING (event_id) - WHERE %s - ORDER BY topological_ordering %s, stream_ordering %s - LIMIT ? - """ % ( - " AND ".join(where_clause), - order, - order, - ) + # If a recursive query is requested then the filters are applied after + # recursively following relationships from the requested event to children + # up to 3-relations deep. + # + # If no recursion is needed then the event_relations table is queried + # for direct children of the requested event. + if recurse: + sql = """ + WITH RECURSIVE related_events AS ( + SELECT event_id, relation_type, relates_to_id, 0 AS depth + FROM event_relations + WHERE relates_to_id = ? + UNION SELECT e.event_id, e.relation_type, e.relates_to_id, depth + 1 + FROM event_relations e + INNER JOIN related_events r ON r.event_id = e.relates_to_id + WHERE depth <= 3 + ) + SELECT event_id, relation_type, sender, topological_ordering, stream_ordering + FROM related_events + INNER JOIN events USING (event_id) + WHERE %s + ORDER BY topological_ordering %s, stream_ordering %s + LIMIT ?; + """ % ( + " AND ".join(where_clause), + order, + order, + ) + else: + sql = """ + SELECT event_id, relation_type, sender, topological_ordering, stream_ordering + FROM event_relations + INNER JOIN events USING (event_id) + WHERE relates_to_id = ? AND %s + ORDER BY topological_ordering %s, stream_ordering %s + LIMIT ? + """ % ( + " AND ".join(where_clause), + order, + order, + ) def _get_recent_references_for_event_txn( txn: LoggingTransaction, ) -> Tuple[List[_RelatedEvent], Optional[StreamToken]]: - txn.execute(sql, where_args + [limit + 1]) + txn.execute(sql, [event.event_id] + where_args + [limit + 1]) events = [] topo_orderings: List[int] = [] @@ -965,7 +996,7 @@ class RelationsWorkerStore(SQLBaseStore): # relation. sql = """ WITH RECURSIVE related_events AS ( - SELECT event_id, relates_to_id, relation_type, 0 depth + SELECT event_id, relates_to_id, relation_type, 0 AS depth FROM event_relations WHERE event_id = ? UNION SELECT e.event_id, e.relates_to_id, e.relation_type, depth + 1 @@ -1025,7 +1056,7 @@ class RelationsWorkerStore(SQLBaseStore): sql = """ SELECT relates_to_id FROM event_relations WHERE relates_to_id = COALESCE(( WITH RECURSIVE related_events AS ( - SELECT event_id, relates_to_id, relation_type, 0 depth + SELECT event_id, relates_to_id, relation_type, 0 AS depth FROM event_relations WHERE event_id = ? UNION SELECT e.event_id, e.relates_to_id, e.relation_type, depth + 1 diff --git a/tests/rest/client/test_relations.py b/tests/rest/client/test_relations.py index fbbbcb23f1..75439416c1 100644 --- a/tests/rest/client/test_relations.py +++ b/tests/rest/client/test_relations.py @@ -30,6 +30,7 @@ from tests import unittest from tests.server import FakeChannel from tests.test_utils import make_awaitable from tests.test_utils.event_injection import inject_event +from tests.unittest import override_config class BaseRelationsTestCase(unittest.HomeserverTestCase): @@ -949,6 +950,125 @@ class RelationPaginationTestCase(BaseRelationsTestCase): ) +class RecursiveRelationTestCase(BaseRelationsTestCase): + @override_config({"experimental_features": {"msc3981_recurse_relations": True}}) + def test_recursive_relations(self) -> None: + """Generate a complex, multi-level relationship tree and query it.""" + # Create a thread with a few messages in it. + channel = self._send_relation(RelationTypes.THREAD, "m.room.test") + thread_1 = channel.json_body["event_id"] + + channel = self._send_relation(RelationTypes.THREAD, "m.room.test") + thread_2 = channel.json_body["event_id"] + + # Add annotations. + channel = self._send_relation( + RelationTypes.ANNOTATION, "m.reaction", "a", parent_id=thread_2 + ) + annotation_1 = channel.json_body["event_id"] + + channel = self._send_relation( + RelationTypes.ANNOTATION, "m.reaction", "b", parent_id=thread_1 + ) + annotation_2 = channel.json_body["event_id"] + + # Add a reference to part of the thread, then edit the reference and annotate it. + channel = self._send_relation( + RelationTypes.REFERENCE, "m.room.test", parent_id=thread_2 + ) + reference_1 = channel.json_body["event_id"] + + channel = self._send_relation( + RelationTypes.ANNOTATION, "m.reaction", "c", parent_id=reference_1 + ) + annotation_3 = channel.json_body["event_id"] + + channel = self._send_relation( + RelationTypes.REPLACE, + "m.room.test", + parent_id=reference_1, + ) + edit = channel.json_body["event_id"] + + # Also more events off the root. + channel = self._send_relation(RelationTypes.ANNOTATION, "m.reaction", "d") + annotation_4 = channel.json_body["event_id"] + + channel = self.make_request( + "GET", + f"/_matrix/client/v1/rooms/{self.room}/relations/{self.parent_id}" + "?dir=f&limit=20&org.matrix.msc3981.recurse=true", + access_token=self.user_token, + ) + self.assertEqual(200, channel.code, channel.json_body) + + # The above events should be returned in creation order. + event_ids = [ev["event_id"] for ev in channel.json_body["chunk"]] + self.assertEqual( + event_ids, + [ + thread_1, + thread_2, + annotation_1, + annotation_2, + reference_1, + annotation_3, + edit, + annotation_4, + ], + ) + + @override_config({"experimental_features": {"msc3981_recurse_relations": True}}) + def test_recursive_relations_with_filter(self) -> None: + """The event_type and rel_type still apply.""" + # Create a thread with a few messages in it. + channel = self._send_relation(RelationTypes.THREAD, "m.room.test") + thread_1 = channel.json_body["event_id"] + + # Add annotations. + channel = self._send_relation( + RelationTypes.ANNOTATION, "m.reaction", "b", parent_id=thread_1 + ) + annotation_1 = channel.json_body["event_id"] + + # Add a reference to part of the thread, then edit the reference and annotate it. + channel = self._send_relation( + RelationTypes.REFERENCE, "m.room.test", parent_id=thread_1 + ) + reference_1 = channel.json_body["event_id"] + + channel = self._send_relation( + RelationTypes.ANNOTATION, "org.matrix.reaction", "c", parent_id=reference_1 + ) + annotation_2 = channel.json_body["event_id"] + + # Fetch only annotations, but recursively. + channel = self.make_request( + "GET", + f"/_matrix/client/v1/rooms/{self.room}/relations/{self.parent_id}/{RelationTypes.ANNOTATION}" + "?dir=f&limit=20&org.matrix.msc3981.recurse=true", + access_token=self.user_token, + ) + self.assertEqual(200, channel.code, channel.json_body) + + # The above events should be returned in creation order. + event_ids = [ev["event_id"] for ev in channel.json_body["chunk"]] + self.assertEqual(event_ids, [annotation_1, annotation_2]) + + # Fetch only m.reactions, but recursively. + channel = self.make_request( + "GET", + f"/_matrix/client/v1/rooms/{self.room}/relations/{self.parent_id}/{RelationTypes.ANNOTATION}/m.reaction" + "?dir=f&limit=20&org.matrix.msc3981.recurse=true", + access_token=self.user_token, + ) + self.assertEqual(200, channel.code, channel.json_body) + + # The above events should be returned in creation order. + event_ids = [ev["event_id"] for ev in channel.json_body["chunk"]] + self.assertEqual(event_ids, [annotation_1]) + + class BundledAggregationsTestCase(BaseRelationsTestCase): """ See RelationsTestCase.test_edit for a similar test for edits. -- cgit 1.5.1 From 0e8aa2a1b28dfce374294450a015d18884c89d36 Mon Sep 17 00:00:00 2001 From: Shay Date: Tue, 2 May 2023 14:21:36 -0700 Subject: Remove references to supporting per-user flag for msc2654 (#15522) --- changelog.d/15522.misc | 1 + docs/admin_api/experimental_features.md | 13 +++++++------ synapse/rest/admin/experimental_features.py | 1 - tests/rest/admin/test_admin.py | 8 ++------ 4 files changed, 10 insertions(+), 13 deletions(-) create mode 100644 changelog.d/15522.misc (limited to 'synapse/rest') diff --git a/changelog.d/15522.misc b/changelog.d/15522.misc new file mode 100644 index 0000000000..a5a229e4a0 --- /dev/null +++ b/changelog.d/15522.misc @@ -0,0 +1 @@ +Remove references to supporting per-user flag for [MSC2654](https://github.com/matrix-org/matrix-spec-proposals/pull/2654) (#15522). diff --git a/docs/admin_api/experimental_features.md b/docs/admin_api/experimental_features.md index c1aebe4b01..07b630915d 100644 --- a/docs/admin_api/experimental_features.md +++ b/docs/admin_api/experimental_features.md @@ -1,10 +1,12 @@ # Experimental Features API This API allows a server administrator to enable or disable some experimental features on a per-user -basis. Currently supported features are [msc3026](https://github.com/matrix-org/matrix-spec-proposals/pull/3026): busy -presence state enabled, [msc2654](https://github.com/matrix-org/matrix-spec-proposals/pull/2654): enable unread counts, -[msc3881](https://github.com/matrix-org/matrix-spec-proposals/pull/3881): enable remotely toggling push notifications -for another client, and [msc3967](https://github.com/matrix-org/matrix-spec-proposals/pull/3967): do not require +basis. The currently supported features are: +- [MSC3026](https://github.com/matrix-org/matrix-spec-proposals/pull/3026): busy +presence state enabled +- [MSC3881](https://github.com/matrix-org/matrix-spec-proposals/pull/3881): enable remotely toggling push notifications +for another client +- [MSC3967](https://github.com/matrix-org/matrix-spec-proposals/pull/3967): do not require UIA when first uploading cross-signing keys. @@ -19,7 +21,7 @@ provide a body containing the user id and listing the features to enable/disable { "features": { "msc3026":true, - "msc2654":true + "msc3881":true } } ``` @@ -46,7 +48,6 @@ user like so: { "features": { "msc3026": true, - "msc2654": true, "msc3881": false, "msc3967": false } diff --git a/synapse/rest/admin/experimental_features.py b/synapse/rest/admin/experimental_features.py index 1d409ac2b7..abf273af10 100644 --- a/synapse/rest/admin/experimental_features.py +++ b/synapse/rest/admin/experimental_features.py @@ -33,7 +33,6 @@ class ExperimentalFeature(str, Enum): """ MSC3026 = "msc3026" - MSC2654 = "msc2654" MSC3881 = "msc3881" MSC3967 = "msc3967" diff --git a/tests/rest/admin/test_admin.py b/tests/rest/admin/test_admin.py index 645a00b4b1..695e84357a 100644 --- a/tests/rest/admin/test_admin.py +++ b/tests/rest/admin/test_admin.py @@ -399,7 +399,7 @@ class ExperimentalFeaturesTestCase(unittest.HomeserverTestCase): "PUT", url, content={ - "features": {"msc3026": True, "msc2654": True}, + "features": {"msc3026": True, "msc3881": True}, }, access_token=self.admin_user_tok, ) @@ -420,7 +420,7 @@ class ExperimentalFeaturesTestCase(unittest.HomeserverTestCase): ) self.assertEqual( True, - channel.json_body["features"]["msc2654"], + channel.json_body["features"]["msc3881"], ) # test disabling a feature works @@ -448,10 +448,6 @@ class ExperimentalFeaturesTestCase(unittest.HomeserverTestCase): ) self.assertEqual( True, - channel.json_body["features"]["msc2654"], - ) - self.assertEqual( - False, channel.json_body["features"]["msc3881"], ) self.assertEqual( -- cgit 1.5.1 From 2e59e97ebd02e93da39e6c90335d3b24ed01217a Mon Sep 17 00:00:00 2001 From: Andrew Morgan <1342360+anoadragon453@users.noreply.github.com> Date: Thu, 4 May 2023 15:18:22 +0100 Subject: Move ThirdPartyEventRules into module_api/callbacks (#15535) --- changelog.d/15535.misc | 1 + synapse/app/_base.py | 4 +- synapse/events/third_party_rules.py | 593 --------------------- synapse/handlers/auth.py | 2 +- synapse/handlers/deactivate_account.py | 4 +- synapse/handlers/directory.py | 6 +- synapse/handlers/federation.py | 6 +- synapse/handlers/federation_event.py | 4 +- synapse/handlers/message.py | 7 +- synapse/handlers/profile.py | 2 +- synapse/handlers/room.py | 10 +- synapse/handlers/room_member.py | 6 +- synapse/module_api/__init__.py | 31 +- synapse/module_api/callbacks/__init__.py | 4 + .../callbacks/third_party_event_rules_callbacks.py | 591 ++++++++++++++++++++ synapse/notifier.py | 2 +- synapse/rest/admin/rooms.py | 2 +- synapse/server.py | 5 - tests/rest/client/test_third_party_rules.py | 56 +- tests/server.py | 4 +- 20 files changed, 682 insertions(+), 658 deletions(-) create mode 100644 changelog.d/15535.misc delete mode 100644 synapse/events/third_party_rules.py create mode 100644 synapse/module_api/callbacks/third_party_event_rules_callbacks.py (limited to 'synapse/rest') diff --git a/changelog.d/15535.misc b/changelog.d/15535.misc new file mode 100644 index 0000000000..9981606c32 --- /dev/null +++ b/changelog.d/15535.misc @@ -0,0 +1 @@ +Move various module API callback registration methods to a dedicated class. \ No newline at end of file diff --git a/synapse/app/_base.py b/synapse/app/_base.py index 954402e4d2..7f83b34d89 100644 --- a/synapse/app/_base.py +++ b/synapse/app/_base.py @@ -64,7 +64,6 @@ from synapse.config.homeserver import HomeServerConfig from synapse.config.server import ListenerConfig, ManholeConfig, TCPListenerConfig from synapse.crypto import context_factory from synapse.events.presence_router import load_legacy_presence_router -from synapse.events.third_party_rules import load_legacy_third_party_event_rules from synapse.handlers.auth import load_legacy_password_auth_providers from synapse.http.site import SynapseSite from synapse.logging.context import PreserveLoggingContext @@ -73,6 +72,9 @@ from synapse.metrics import install_gc_manager, register_threadpool from synapse.metrics.background_process_metrics import wrap_as_background_process from synapse.metrics.jemalloc import setup_jemalloc_stats from synapse.module_api.callbacks.spamchecker_callbacks import load_legacy_spam_checkers +from synapse.module_api.callbacks.third_party_event_rules_callbacks import ( + load_legacy_third_party_event_rules, +) from synapse.types import ISynapseReactor from synapse.util import SYNAPSE_VERSION from synapse.util.caches.lrucache import setup_expire_lru_cache_entries diff --git a/synapse/events/third_party_rules.py b/synapse/events/third_party_rules.py deleted file mode 100644 index 61d4530be7..0000000000 --- a/synapse/events/third_party_rules.py +++ /dev/null @@ -1,593 +0,0 @@ -# Copyright 2019 The Matrix.org Foundation C.I.C. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -import logging -from typing import TYPE_CHECKING, Any, Awaitable, Callable, List, Optional, Tuple - -from twisted.internet.defer import CancelledError - -from synapse.api.errors import ModuleFailedException, SynapseError -from synapse.events import EventBase -from synapse.events.snapshot import UnpersistedEventContextBase -from synapse.storage.roommember import ProfileInfo -from synapse.types import Requester, StateMap -from synapse.util.async_helpers import delay_cancellation, maybe_awaitable - -if TYPE_CHECKING: - from synapse.server import HomeServer - -logger = logging.getLogger(__name__) - - -CHECK_EVENT_ALLOWED_CALLBACK = Callable[ - [EventBase, StateMap[EventBase]], Awaitable[Tuple[bool, Optional[dict]]] -] -ON_CREATE_ROOM_CALLBACK = Callable[[Requester, dict, bool], Awaitable] -CHECK_THREEPID_CAN_BE_INVITED_CALLBACK = Callable[ - [str, str, StateMap[EventBase]], Awaitable[bool] -] -CHECK_VISIBILITY_CAN_BE_MODIFIED_CALLBACK = Callable[ - [str, StateMap[EventBase], str], Awaitable[bool] -] -ON_NEW_EVENT_CALLBACK = Callable[[EventBase, StateMap[EventBase]], Awaitable] -CHECK_CAN_SHUTDOWN_ROOM_CALLBACK = Callable[[str, str], Awaitable[bool]] -CHECK_CAN_DEACTIVATE_USER_CALLBACK = Callable[[str, bool], Awaitable[bool]] -ON_PROFILE_UPDATE_CALLBACK = Callable[[str, ProfileInfo, bool, bool], Awaitable] -ON_USER_DEACTIVATION_STATUS_CHANGED_CALLBACK = Callable[[str, bool, bool], Awaitable] -ON_THREEPID_BIND_CALLBACK = Callable[[str, str, str], Awaitable] -ON_ADD_USER_THIRD_PARTY_IDENTIFIER_CALLBACK = Callable[[str, str, str], Awaitable] -ON_REMOVE_USER_THIRD_PARTY_IDENTIFIER_CALLBACK = Callable[[str, str, str], Awaitable] - - -def load_legacy_third_party_event_rules(hs: "HomeServer") -> None: - """Wrapper that loads a third party event rules module configured using the old - configuration, and registers the hooks they implement. - """ - if hs.config.thirdpartyrules.third_party_event_rules is None: - return - - module, config = hs.config.thirdpartyrules.third_party_event_rules - - api = hs.get_module_api() - third_party_rules = module(config=config, module_api=api) - - # The known hooks. If a module implements a method which name appears in this set, - # we'll want to register it. - third_party_event_rules_methods = { - "check_event_allowed", - "on_create_room", - "check_threepid_can_be_invited", - "check_visibility_can_be_modified", - } - - def async_wrapper(f: Optional[Callable]) -> Optional[Callable[..., Awaitable]]: - # f might be None if the callback isn't implemented by the module. In this - # case we don't want to register a callback at all so we return None. - if f is None: - return None - - # We return a separate wrapper for these methods because, in order to wrap them - # correctly, we need to await its result. Therefore it doesn't make a lot of - # sense to make it go through the run() wrapper. - if f.__name__ == "check_event_allowed": - # We need to wrap check_event_allowed because its old form would return either - # a boolean or a dict, but now we want to return the dict separately from the - # boolean. - async def wrap_check_event_allowed( - event: EventBase, - state_events: StateMap[EventBase], - ) -> Tuple[bool, Optional[dict]]: - # Assertion required because mypy can't prove we won't change - # `f` back to `None`. See - # https://mypy.readthedocs.io/en/latest/common_issues.html#narrowing-and-inner-functions - assert f is not None - - res = await f(event, state_events) - if isinstance(res, dict): - return True, res - else: - return res, None - - return wrap_check_event_allowed - - if f.__name__ == "on_create_room": - # We need to wrap on_create_room because its old form would return a boolean - # if the room creation is denied, but now we just want it to raise an - # exception. - async def wrap_on_create_room( - requester: Requester, config: dict, is_requester_admin: bool - ) -> None: - # Assertion required because mypy can't prove we won't change - # `f` back to `None`. See - # https://mypy.readthedocs.io/en/latest/common_issues.html#narrowing-and-inner-functions - assert f is not None - - res = await f(requester, config, is_requester_admin) - if res is False: - raise SynapseError( - 403, - "Room creation forbidden with these parameters", - ) - - return wrap_on_create_room - - def run(*args: Any, **kwargs: Any) -> Awaitable: - # Assertion required because mypy can't prove we won't change `f` - # back to `None`. See - # https://mypy.readthedocs.io/en/latest/common_issues.html#narrowing-and-inner-functions - assert f is not None - - return maybe_awaitable(f(*args, **kwargs)) - - return run - - # Register the hooks through the module API. - hooks = { - hook: async_wrapper(getattr(third_party_rules, hook, None)) - for hook in third_party_event_rules_methods - } - - api.register_third_party_rules_callbacks(**hooks) - - -class ThirdPartyEventRules: - """Allows server admins to provide a Python module implementing an extra - set of rules to apply when processing events. - - This is designed to help admins of closed federations with enforcing custom - behaviours. - """ - - def __init__(self, hs: "HomeServer"): - self.third_party_rules = None - - self.store = hs.get_datastores().main - self._storage_controllers = hs.get_storage_controllers() - - self._check_event_allowed_callbacks: List[CHECK_EVENT_ALLOWED_CALLBACK] = [] - self._on_create_room_callbacks: List[ON_CREATE_ROOM_CALLBACK] = [] - self._check_threepid_can_be_invited_callbacks: List[ - CHECK_THREEPID_CAN_BE_INVITED_CALLBACK - ] = [] - self._check_visibility_can_be_modified_callbacks: List[ - CHECK_VISIBILITY_CAN_BE_MODIFIED_CALLBACK - ] = [] - self._on_new_event_callbacks: List[ON_NEW_EVENT_CALLBACK] = [] - self._check_can_shutdown_room_callbacks: List[ - CHECK_CAN_SHUTDOWN_ROOM_CALLBACK - ] = [] - self._check_can_deactivate_user_callbacks: List[ - CHECK_CAN_DEACTIVATE_USER_CALLBACK - ] = [] - self._on_profile_update_callbacks: List[ON_PROFILE_UPDATE_CALLBACK] = [] - self._on_user_deactivation_status_changed_callbacks: List[ - ON_USER_DEACTIVATION_STATUS_CHANGED_CALLBACK - ] = [] - self._on_threepid_bind_callbacks: List[ON_THREEPID_BIND_CALLBACK] = [] - self._on_add_user_third_party_identifier_callbacks: List[ - ON_ADD_USER_THIRD_PARTY_IDENTIFIER_CALLBACK - ] = [] - self._on_remove_user_third_party_identifier_callbacks: List[ - ON_REMOVE_USER_THIRD_PARTY_IDENTIFIER_CALLBACK - ] = [] - - def register_third_party_rules_callbacks( - self, - check_event_allowed: Optional[CHECK_EVENT_ALLOWED_CALLBACK] = None, - on_create_room: Optional[ON_CREATE_ROOM_CALLBACK] = None, - check_threepid_can_be_invited: Optional[ - CHECK_THREEPID_CAN_BE_INVITED_CALLBACK - ] = None, - check_visibility_can_be_modified: Optional[ - CHECK_VISIBILITY_CAN_BE_MODIFIED_CALLBACK - ] = None, - on_new_event: Optional[ON_NEW_EVENT_CALLBACK] = None, - check_can_shutdown_room: Optional[CHECK_CAN_SHUTDOWN_ROOM_CALLBACK] = None, - check_can_deactivate_user: Optional[CHECK_CAN_DEACTIVATE_USER_CALLBACK] = None, - on_profile_update: Optional[ON_PROFILE_UPDATE_CALLBACK] = None, - on_user_deactivation_status_changed: Optional[ - ON_USER_DEACTIVATION_STATUS_CHANGED_CALLBACK - ] = None, - on_threepid_bind: Optional[ON_THREEPID_BIND_CALLBACK] = None, - on_add_user_third_party_identifier: Optional[ - ON_ADD_USER_THIRD_PARTY_IDENTIFIER_CALLBACK - ] = None, - on_remove_user_third_party_identifier: Optional[ - ON_REMOVE_USER_THIRD_PARTY_IDENTIFIER_CALLBACK - ] = None, - ) -> None: - """Register callbacks from modules for each hook.""" - if check_event_allowed is not None: - self._check_event_allowed_callbacks.append(check_event_allowed) - - if on_create_room is not None: - self._on_create_room_callbacks.append(on_create_room) - - if check_threepid_can_be_invited is not None: - self._check_threepid_can_be_invited_callbacks.append( - check_threepid_can_be_invited, - ) - - if check_visibility_can_be_modified is not None: - self._check_visibility_can_be_modified_callbacks.append( - check_visibility_can_be_modified, - ) - - if on_new_event is not None: - self._on_new_event_callbacks.append(on_new_event) - - if check_can_shutdown_room is not None: - self._check_can_shutdown_room_callbacks.append(check_can_shutdown_room) - - if check_can_deactivate_user is not None: - self._check_can_deactivate_user_callbacks.append(check_can_deactivate_user) - if on_profile_update is not None: - self._on_profile_update_callbacks.append(on_profile_update) - - if on_user_deactivation_status_changed is not None: - self._on_user_deactivation_status_changed_callbacks.append( - on_user_deactivation_status_changed, - ) - - if on_threepid_bind is not None: - self._on_threepid_bind_callbacks.append(on_threepid_bind) - - if on_add_user_third_party_identifier is not None: - self._on_add_user_third_party_identifier_callbacks.append( - on_add_user_third_party_identifier - ) - - if on_remove_user_third_party_identifier is not None: - self._on_remove_user_third_party_identifier_callbacks.append( - on_remove_user_third_party_identifier - ) - - async def check_event_allowed( - self, - event: EventBase, - context: UnpersistedEventContextBase, - ) -> Tuple[bool, Optional[dict]]: - """Check if a provided event should be allowed in the given context. - - The module can return: - * True: the event is allowed. - * False: the event is not allowed, and should be rejected with M_FORBIDDEN. - - If the event is allowed, the module can also return a dictionary to use as a - replacement for the event. - - Args: - event: The event to be checked. - context: The context of the event. - - Returns: - The result from the ThirdPartyRules module, as above. - """ - # Bail out early without hitting the store if we don't have any callbacks to run. - if len(self._check_event_allowed_callbacks) == 0: - return True, None - - prev_state_ids = await context.get_prev_state_ids() - - # Retrieve the state events from the database. - events = await self.store.get_events(prev_state_ids.values()) - state_events = {(ev.type, ev.state_key): ev for ev in events.values()} - - # Ensure that the event is frozen, to make sure that the module is not tempted - # to try to modify it. Any attempt to modify it at this point will invalidate - # the hashes and signatures. - event.freeze() - - for callback in self._check_event_allowed_callbacks: - try: - res, replacement_data = await delay_cancellation( - callback(event, state_events) - ) - except CancelledError: - raise - except SynapseError as e: - # FIXME: Being able to throw SynapseErrors is relied upon by - # some modules. PR #10386 accidentally broke this ability. - # That said, we aren't keen on exposing this implementation detail - # to modules and we should one day have a proper way to do what - # is wanted. - # This module callback needs a rework so that hacks such as - # this one are not necessary. - raise e - except Exception: - raise ModuleFailedException( - "Failed to run `check_event_allowed` module API callback" - ) - - # Return if the event shouldn't be allowed or if the module came up with a - # replacement dict for the event. - if res is False: - return res, None - elif isinstance(replacement_data, dict): - return True, replacement_data - - return True, None - - async def on_create_room( - self, requester: Requester, config: dict, is_requester_admin: bool - ) -> None: - """Intercept requests to create room to maybe deny it (via an exception) or - update the request config. - - Args: - requester - config: The creation config from the client. - is_requester_admin: If the requester is an admin - """ - for callback in self._on_create_room_callbacks: - try: - await callback(requester, config, is_requester_admin) - except Exception as e: - # Don't silence the errors raised by this callback since we expect it to - # raise an exception to deny the creation of the room; instead make sure - # it's a SynapseError we can send to clients. - if not isinstance(e, SynapseError): - e = SynapseError( - 403, "Room creation forbidden with these parameters" - ) - - raise e - - async def check_threepid_can_be_invited( - self, medium: str, address: str, room_id: str - ) -> bool: - """Check if a provided 3PID can be invited in the given room. - - Args: - medium: The 3PID's medium. - address: The 3PID's address. - room_id: The room we want to invite the threepid to. - - Returns: - True if the 3PID can be invited, False if not. - """ - # Bail out early without hitting the store if we don't have any callbacks to run. - if len(self._check_threepid_can_be_invited_callbacks) == 0: - return True - - state_events = await self._get_state_map_for_room(room_id) - - for callback in self._check_threepid_can_be_invited_callbacks: - try: - threepid_can_be_invited = await delay_cancellation( - callback(medium, address, state_events) - ) - if threepid_can_be_invited is False: - return False - except CancelledError: - raise - except Exception as e: - logger.warning("Failed to run module API callback %s: %s", callback, e) - - return True - - async def check_visibility_can_be_modified( - self, room_id: str, new_visibility: str - ) -> bool: - """Check if a room is allowed to be published to, or removed from, the public room - list. - - Args: - room_id: The ID of the room. - new_visibility: The new visibility state. Either "public" or "private". - - Returns: - True if the room's visibility can be modified, False if not. - """ - # Bail out early without hitting the store if we don't have any callback - if len(self._check_visibility_can_be_modified_callbacks) == 0: - return True - - state_events = await self._get_state_map_for_room(room_id) - - for callback in self._check_visibility_can_be_modified_callbacks: - try: - visibility_can_be_modified = await delay_cancellation( - callback(room_id, state_events, new_visibility) - ) - if visibility_can_be_modified is False: - return False - except CancelledError: - raise - except Exception as e: - logger.warning("Failed to run module API callback %s: %s", callback, e) - - return True - - async def on_new_event(self, event_id: str) -> None: - """Let modules act on events after they've been sent (e.g. auto-accepting - invites, etc.) - - Args: - event_id: The ID of the event. - """ - # Bail out early without hitting the store if we don't have any callbacks - if len(self._on_new_event_callbacks) == 0: - return - - event = await self.store.get_event(event_id) - state_events = await self._get_state_map_for_room(event.room_id) - - for callback in self._on_new_event_callbacks: - try: - await callback(event, state_events) - except Exception as e: - logger.exception( - "Failed to run module API callback %s: %s", callback, e - ) - - async def check_can_shutdown_room(self, user_id: str, room_id: str) -> bool: - """Intercept requests to shutdown a room. If `False` is returned, the - room must not be shut down. - - Args: - requester: The ID of the user requesting the shutdown. - room_id: The ID of the room. - """ - for callback in self._check_can_shutdown_room_callbacks: - try: - can_shutdown_room = await delay_cancellation(callback(user_id, room_id)) - if can_shutdown_room is False: - return False - except CancelledError: - raise - except Exception as e: - logger.exception( - "Failed to run module API callback %s: %s", callback, e - ) - return True - - async def check_can_deactivate_user( - self, - user_id: str, - by_admin: bool, - ) -> bool: - """Intercept requests to deactivate a user. If `False` is returned, the - user should not be deactivated. - - Args: - requester - user_id: The ID of the room. - """ - for callback in self._check_can_deactivate_user_callbacks: - try: - can_deactivate_user = await delay_cancellation( - callback(user_id, by_admin) - ) - if can_deactivate_user is False: - return False - except CancelledError: - raise - except Exception as e: - logger.exception( - "Failed to run module API callback %s: %s", callback, e - ) - return True - - async def _get_state_map_for_room(self, room_id: str) -> StateMap[EventBase]: - """Given a room ID, return the state events of that room. - - Args: - room_id: The ID of the room. - - Returns: - A dict mapping (event type, state key) to state event. - """ - return await self._storage_controllers.state.get_current_state(room_id) - - async def on_profile_update( - self, user_id: str, new_profile: ProfileInfo, by_admin: bool, deactivation: bool - ) -> None: - """Called after the global profile of a user has been updated. Does not include - per-room profile changes. - - Args: - user_id: The user whose profile was changed. - new_profile: The updated profile for the user. - by_admin: Whether the profile update was performed by a server admin. - deactivation: Whether this change was made while deactivating the user. - """ - for callback in self._on_profile_update_callbacks: - try: - await callback(user_id, new_profile, by_admin, deactivation) - except Exception as e: - logger.exception( - "Failed to run module API callback %s: %s", callback, e - ) - - async def on_user_deactivation_status_changed( - self, user_id: str, deactivated: bool, by_admin: bool - ) -> None: - """Called after a user has been deactivated or reactivated. - - Args: - user_id: The deactivated user. - deactivated: Whether the user is now deactivated. - by_admin: Whether the deactivation was performed by a server admin. - """ - for callback in self._on_user_deactivation_status_changed_callbacks: - try: - await callback(user_id, deactivated, by_admin) - except Exception as e: - logger.exception( - "Failed to run module API callback %s: %s", callback, e - ) - - async def on_threepid_bind(self, user_id: str, medium: str, address: str) -> None: - """Called after a threepid association has been verified and stored. - - Note that this callback is called when an association is created on the - local homeserver, not when it's created on an identity server (and then kept track - of so that it can be unbound on the same IS later on). - - THIS MODULE CALLBACK METHOD HAS BEEN DEPRECATED. Please use the - `on_add_user_third_party_identifier` callback method instead. - - Args: - user_id: the user being associated with the threepid. - medium: the threepid's medium. - address: the threepid's address. - """ - for callback in self._on_threepid_bind_callbacks: - try: - await callback(user_id, medium, address) - except Exception as e: - logger.exception( - "Failed to run module API callback %s: %s", callback, e - ) - - async def on_add_user_third_party_identifier( - self, user_id: str, medium: str, address: str - ) -> None: - """Called when an association between a user's Matrix ID and a third-party ID - (email, phone number) has successfully been registered on the homeserver. - - Args: - user_id: The User ID included in the association. - medium: The medium of the third-party ID (email, msisdn). - address: The address of the third-party ID (i.e. an email address). - """ - for callback in self._on_add_user_third_party_identifier_callbacks: - try: - await callback(user_id, medium, address) - except Exception as e: - logger.exception( - "Failed to run module API callback %s: %s", callback, e - ) - - async def on_remove_user_third_party_identifier( - self, user_id: str, medium: str, address: str - ) -> None: - """Called when an association between a user's Matrix ID and a third-party ID - (email, phone number) has been successfully removed on the homeserver. - - This is called *after* any known bindings on identity servers for this - association have been removed. - - Args: - user_id: The User ID included in the removed association. - medium: The medium of the third-party ID (email, msisdn). - address: The address of the third-party ID (i.e. an email address). - """ - for callback in self._on_remove_user_third_party_identifier_callbacks: - try: - await callback(user_id, medium, address) - except Exception as e: - logger.exception( - "Failed to run module API callback %s: %s", callback, e - ) diff --git a/synapse/handlers/auth.py b/synapse/handlers/auth.py index 1e89447044..59e340974d 100644 --- a/synapse/handlers/auth.py +++ b/synapse/handlers/auth.py @@ -212,7 +212,7 @@ class AuthHandler: self._password_enabled_for_login = hs.config.auth.password_enabled_for_login self._password_enabled_for_reauth = hs.config.auth.password_enabled_for_reauth self._password_localdb_enabled = hs.config.auth.password_localdb_enabled - self._third_party_rules = hs.get_third_party_event_rules() + self._third_party_rules = hs.get_module_api_callbacks().third_party_event_rules # Ratelimiter for failed auth during UIA. Uses same ratelimit config # as per `rc_login.failed_attempts`. diff --git a/synapse/handlers/deactivate_account.py b/synapse/handlers/deactivate_account.py index bd5867491b..f299b89a1b 100644 --- a/synapse/handlers/deactivate_account.py +++ b/synapse/handlers/deactivate_account.py @@ -39,11 +39,11 @@ class DeactivateAccountHandler: self._profile_handler = hs.get_profile_handler() self.user_directory_handler = hs.get_user_directory_handler() self._server_name = hs.hostname - self._third_party_rules = hs.get_third_party_event_rules() + self._third_party_rules = hs.get_module_api_callbacks().third_party_event_rules # Flag that indicates whether the process to part users from rooms is running self._user_parter_running = False - self._third_party_rules = hs.get_third_party_event_rules() + self._third_party_rules = hs.get_module_api_callbacks().third_party_event_rules # Start the user parter loop so it can resume parting users from rooms where # it left off (if it has work left to do). diff --git a/synapse/handlers/directory.py b/synapse/handlers/directory.py index 5e8316e2e5..1e0623c7f8 100644 --- a/synapse/handlers/directory.py +++ b/synapse/handlers/directory.py @@ -52,7 +52,9 @@ class DirectoryHandler: self.config = hs.config self.enable_room_list_search = hs.config.roomdirectory.enable_room_list_search self.require_membership = hs.config.server.require_membership_for_aliases - self.third_party_event_rules = hs.get_third_party_event_rules() + self._third_party_event_rules = ( + hs.get_module_api_callbacks().third_party_event_rules + ) self.server_name = hs.hostname self.federation = hs.get_federation_client() @@ -503,7 +505,7 @@ class DirectoryHandler: # Check if publishing is blocked by a third party module allowed_by_third_party_rules = ( await ( - self.third_party_event_rules.check_visibility_can_be_modified( + self._third_party_event_rules.check_visibility_can_be_modified( room_id, visibility ) ) diff --git a/synapse/handlers/federation.py b/synapse/handlers/federation.py index d1a88cc604..4ad808a5b4 100644 --- a/synapse/handlers/federation.py +++ b/synapse/handlers/federation.py @@ -169,7 +169,9 @@ class FederationHandler: self._room_backfill = Linearizer("room_backfill") - self.third_party_event_rules = hs.get_third_party_event_rules() + self._third_party_event_rules = ( + hs.get_module_api_callbacks().third_party_event_rules + ) # Tracks running partial state syncs by room ID. # Partial state syncs currently only run on the main process, so it's okay to @@ -1253,7 +1255,7 @@ class FederationHandler: unpersisted_context, ) = await self.event_creation_handler.create_new_client_event(builder=builder) - event_allowed, _ = await self.third_party_event_rules.check_event_allowed( + event_allowed, _ = await self._third_party_event_rules.check_event_allowed( event, unpersisted_context ) if not event_allowed: diff --git a/synapse/handlers/federation_event.py b/synapse/handlers/federation_event.py index 06609fab93..fc15024166 100644 --- a/synapse/handlers/federation_event.py +++ b/synapse/handlers/federation_event.py @@ -157,7 +157,9 @@ class FederationEventHandler: self._get_room_member_handler = hs.get_room_member_handler self._federation_client = hs.get_federation_client() - self._third_party_event_rules = hs.get_third_party_event_rules() + self._third_party_event_rules = ( + hs.get_module_api_callbacks().third_party_event_rules + ) self._notifier = hs.get_notifier() self._is_mine_id = hs.is_mine_id diff --git a/synapse/handlers/message.py b/synapse/handlers/message.py index ac1932a7f9..0b61c2272b 100644 --- a/synapse/handlers/message.py +++ b/synapse/handlers/message.py @@ -77,7 +77,6 @@ from synapse.util.metrics import measure_func from synapse.visibility import get_effective_room_visibility_from_state if TYPE_CHECKING: - from synapse.events.third_party_rules import ThirdPartyEventRules from synapse.server import HomeServer logger = logging.getLogger(__name__) @@ -509,8 +508,8 @@ class EventCreationHandler: self._bulk_push_rule_evaluator = hs.get_bulk_push_rule_evaluator() self._spam_checker_module_callbacks = hs.get_module_api_callbacks().spam_checker - self.third_party_event_rules: "ThirdPartyEventRules" = ( - self.hs.get_third_party_event_rules() + self._third_party_event_rules = ( + self.hs.get_module_api_callbacks().third_party_event_rules ) self._block_events_without_consent_error = ( @@ -1314,7 +1313,7 @@ class EventCreationHandler: if requester: context.app_service = requester.app_service - res, new_content = await self.third_party_event_rules.check_event_allowed( + res, new_content = await self._third_party_event_rules.check_event_allowed( event, context ) if res is False: diff --git a/synapse/handlers/profile.py b/synapse/handlers/profile.py index 440d3f4acd..983b9b66fb 100644 --- a/synapse/handlers/profile.py +++ b/synapse/handlers/profile.py @@ -61,7 +61,7 @@ class ProfileHandler: self.server_name = hs.config.server.server_name - self._third_party_rules = hs.get_third_party_event_rules() + self._third_party_rules = hs.get_module_api_callbacks().third_party_event_rules async def get_profile(self, user_id: str, ignore_backoff: bool = True) -> JsonDict: target_user = UserID.from_string(user_id) diff --git a/synapse/handlers/room.py b/synapse/handlers/room.py index efd9612d90..5e1702d78a 100644 --- a/synapse/handlers/room.py +++ b/synapse/handlers/room.py @@ -160,7 +160,9 @@ class RoomCreationHandler: ) self._server_notices_mxid = hs.config.servernotices.server_notices_mxid - self.third_party_event_rules = hs.get_third_party_event_rules() + self._third_party_event_rules = ( + hs.get_module_api_callbacks().third_party_event_rules + ) async def upgrade_room( self, requester: Requester, old_room_id: str, new_version: RoomVersion @@ -742,7 +744,7 @@ class RoomCreationHandler: # Let the third party rules modify the room creation config if needed, or abort # the room creation entirely with an exception. - await self.third_party_event_rules.on_create_room( + await self._third_party_event_rules.on_create_room( requester, config, is_requester_admin=is_requester_admin ) @@ -879,7 +881,7 @@ class RoomCreationHandler: # Check whether this visibility value is blocked by a third party module allowed_by_third_party_rules = ( await ( - self.third_party_event_rules.check_visibility_can_be_modified( + self._third_party_event_rules.check_visibility_can_be_modified( room_id, visibility ) ) @@ -1731,7 +1733,7 @@ class RoomShutdownHandler: self.room_member_handler = hs.get_room_member_handler() self._room_creation_handler = hs.get_room_creation_handler() self._replication = hs.get_replication_data_handler() - self._third_party_rules = hs.get_third_party_event_rules() + self._third_party_rules = hs.get_module_api_callbacks().third_party_event_rules self.event_creation_handler = hs.get_event_creation_handler() self.store = hs.get_datastores().main diff --git a/synapse/handlers/room_member.py b/synapse/handlers/room_member.py index fbef600acd..af0ca5c26d 100644 --- a/synapse/handlers/room_member.py +++ b/synapse/handlers/room_member.py @@ -100,7 +100,9 @@ class RoomMemberHandler(metaclass=abc.ABCMeta): self.clock = hs.get_clock() self._spam_checker_module_callbacks = hs.get_module_api_callbacks().spam_checker - self.third_party_event_rules = hs.get_third_party_event_rules() + self._third_party_event_rules = ( + hs.get_module_api_callbacks().third_party_event_rules + ) self._server_notices_mxid = self.config.servernotices.server_notices_mxid self._enable_lookup = hs.config.registration.enable_3pid_lookup self.allow_per_room_profiles = self.config.server.allow_per_room_profiles @@ -1560,7 +1562,7 @@ class RoomMemberHandler(metaclass=abc.ABCMeta): # can't just rely on the standard ratelimiting of events. await self._third_party_invite_limiter.ratelimit(requester) - can_invite = await self.third_party_event_rules.check_threepid_can_be_invited( + can_invite = await self._third_party_event_rules.check_threepid_can_be_invited( medium, address, room_id ) if not can_invite: diff --git a/synapse/module_api/__init__.py b/synapse/module_api/__init__.py index 90eff030b5..4b59e6825b 100644 --- a/synapse/module_api/__init__.py +++ b/synapse/module_api/__init__.py @@ -44,20 +44,6 @@ from synapse.events.presence_router import ( GET_USERS_FOR_STATES_CALLBACK, PresenceRouter, ) -from synapse.events.third_party_rules import ( - CHECK_CAN_DEACTIVATE_USER_CALLBACK, - CHECK_CAN_SHUTDOWN_ROOM_CALLBACK, - CHECK_EVENT_ALLOWED_CALLBACK, - CHECK_THREEPID_CAN_BE_INVITED_CALLBACK, - CHECK_VISIBILITY_CAN_BE_MODIFIED_CALLBACK, - ON_ADD_USER_THIRD_PARTY_IDENTIFIER_CALLBACK, - ON_CREATE_ROOM_CALLBACK, - ON_NEW_EVENT_CALLBACK, - ON_PROFILE_UPDATE_CALLBACK, - ON_REMOVE_USER_THIRD_PARTY_IDENTIFIER_CALLBACK, - ON_THREEPID_BIND_CALLBACK, - ON_USER_DEACTIVATION_STATUS_CHANGED_CALLBACK, -) from synapse.handlers.account_data import ON_ACCOUNT_DATA_UPDATED_CALLBACK from synapse.handlers.auth import ( CHECK_3PID_AUTH_CALLBACK, @@ -105,6 +91,20 @@ from synapse.module_api.callbacks.spamchecker_callbacks import ( USER_MAY_SEND_3PID_INVITE_CALLBACK, SpamCheckerModuleApiCallbacks, ) +from synapse.module_api.callbacks.third_party_event_rules_callbacks import ( + CHECK_CAN_DEACTIVATE_USER_CALLBACK, + CHECK_CAN_SHUTDOWN_ROOM_CALLBACK, + CHECK_EVENT_ALLOWED_CALLBACK, + CHECK_THREEPID_CAN_BE_INVITED_CALLBACK, + CHECK_VISIBILITY_CAN_BE_MODIFIED_CALLBACK, + ON_ADD_USER_THIRD_PARTY_IDENTIFIER_CALLBACK, + ON_CREATE_ROOM_CALLBACK, + ON_NEW_EVENT_CALLBACK, + ON_PROFILE_UPDATE_CALLBACK, + ON_REMOVE_USER_THIRD_PARTY_IDENTIFIER_CALLBACK, + ON_THREEPID_BIND_CALLBACK, + ON_USER_DEACTIVATION_STATUS_CHANGED_CALLBACK, +) from synapse.push.httppusher import HttpPusher from synapse.rest.client.login import LoginResponse from synapse.storage import DataStore @@ -273,7 +273,6 @@ class ModuleApi: self._public_room_list_manager = PublicRoomListManager(hs) self._account_data_manager = AccountDataManager(hs) - self._third_party_event_rules = hs.get_third_party_event_rules() self._password_auth_provider = hs.get_password_auth_provider() self._presence_router = hs.get_presence_router() self._account_data_handler = hs.get_account_data_handler() @@ -371,7 +370,7 @@ class ModuleApi: Added in Synapse v1.39.0. """ - return self._third_party_event_rules.register_third_party_rules_callbacks( + return self._callbacks.third_party_event_rules.register_third_party_rules_callbacks( check_event_allowed=check_event_allowed, on_create_room=on_create_room, check_threepid_can_be_invited=check_threepid_can_be_invited, diff --git a/synapse/module_api/callbacks/__init__.py b/synapse/module_api/callbacks/__init__.py index 5cdb2c003a..dcb036552b 100644 --- a/synapse/module_api/callbacks/__init__.py +++ b/synapse/module_api/callbacks/__init__.py @@ -23,9 +23,13 @@ from synapse.module_api.callbacks.account_validity_callbacks import ( from synapse.module_api.callbacks.spamchecker_callbacks import ( SpamCheckerModuleApiCallbacks, ) +from synapse.module_api.callbacks.third_party_event_rules_callbacks import ( + ThirdPartyEventRulesModuleApiCallbacks, +) class ModuleApiCallbacks: def __init__(self, hs: "HomeServer") -> None: self.account_validity = AccountValidityModuleApiCallbacks() self.spam_checker = SpamCheckerModuleApiCallbacks(hs) + self.third_party_event_rules = ThirdPartyEventRulesModuleApiCallbacks(hs) diff --git a/synapse/module_api/callbacks/third_party_event_rules_callbacks.py b/synapse/module_api/callbacks/third_party_event_rules_callbacks.py new file mode 100644 index 0000000000..911f37ba42 --- /dev/null +++ b/synapse/module_api/callbacks/third_party_event_rules_callbacks.py @@ -0,0 +1,591 @@ +# Copyright 2019 The Matrix.org Foundation C.I.C. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +import logging +from typing import TYPE_CHECKING, Any, Awaitable, Callable, List, Optional, Tuple + +from twisted.internet.defer import CancelledError + +from synapse.api.errors import ModuleFailedException, SynapseError +from synapse.events import EventBase +from synapse.events.snapshot import UnpersistedEventContextBase +from synapse.storage.roommember import ProfileInfo +from synapse.types import Requester, StateMap +from synapse.util.async_helpers import delay_cancellation, maybe_awaitable + +if TYPE_CHECKING: + from synapse.server import HomeServer + +logger = logging.getLogger(__name__) + + +CHECK_EVENT_ALLOWED_CALLBACK = Callable[ + [EventBase, StateMap[EventBase]], Awaitable[Tuple[bool, Optional[dict]]] +] +ON_CREATE_ROOM_CALLBACK = Callable[[Requester, dict, bool], Awaitable] +CHECK_THREEPID_CAN_BE_INVITED_CALLBACK = Callable[ + [str, str, StateMap[EventBase]], Awaitable[bool] +] +CHECK_VISIBILITY_CAN_BE_MODIFIED_CALLBACK = Callable[ + [str, StateMap[EventBase], str], Awaitable[bool] +] +ON_NEW_EVENT_CALLBACK = Callable[[EventBase, StateMap[EventBase]], Awaitable] +CHECK_CAN_SHUTDOWN_ROOM_CALLBACK = Callable[[str, str], Awaitable[bool]] +CHECK_CAN_DEACTIVATE_USER_CALLBACK = Callable[[str, bool], Awaitable[bool]] +ON_PROFILE_UPDATE_CALLBACK = Callable[[str, ProfileInfo, bool, bool], Awaitable] +ON_USER_DEACTIVATION_STATUS_CHANGED_CALLBACK = Callable[[str, bool, bool], Awaitable] +ON_THREEPID_BIND_CALLBACK = Callable[[str, str, str], Awaitable] +ON_ADD_USER_THIRD_PARTY_IDENTIFIER_CALLBACK = Callable[[str, str, str], Awaitable] +ON_REMOVE_USER_THIRD_PARTY_IDENTIFIER_CALLBACK = Callable[[str, str, str], Awaitable] + + +def load_legacy_third_party_event_rules(hs: "HomeServer") -> None: + """Wrapper that loads a third party event rules module configured using the old + configuration, and registers the hooks they implement. + """ + if hs.config.thirdpartyrules.third_party_event_rules is None: + return + + module, config = hs.config.thirdpartyrules.third_party_event_rules + + api = hs.get_module_api() + third_party_rules = module(config=config, module_api=api) + + # The known hooks. If a module implements a method which name appears in this set, + # we'll want to register it. + third_party_event_rules_methods = { + "check_event_allowed", + "on_create_room", + "check_threepid_can_be_invited", + "check_visibility_can_be_modified", + } + + def async_wrapper(f: Optional[Callable]) -> Optional[Callable[..., Awaitable]]: + # f might be None if the callback isn't implemented by the module. In this + # case we don't want to register a callback at all so we return None. + if f is None: + return None + + # We return a separate wrapper for these methods because, in order to wrap them + # correctly, we need to await its result. Therefore it doesn't make a lot of + # sense to make it go through the run() wrapper. + if f.__name__ == "check_event_allowed": + # We need to wrap check_event_allowed because its old form would return either + # a boolean or a dict, but now we want to return the dict separately from the + # boolean. + async def wrap_check_event_allowed( + event: EventBase, + state_events: StateMap[EventBase], + ) -> Tuple[bool, Optional[dict]]: + # Assertion required because mypy can't prove we won't change + # `f` back to `None`. See + # https://mypy.readthedocs.io/en/latest/common_issues.html#narrowing-and-inner-functions + assert f is not None + + res = await f(event, state_events) + if isinstance(res, dict): + return True, res + else: + return res, None + + return wrap_check_event_allowed + + if f.__name__ == "on_create_room": + # We need to wrap on_create_room because its old form would return a boolean + # if the room creation is denied, but now we just want it to raise an + # exception. + async def wrap_on_create_room( + requester: Requester, config: dict, is_requester_admin: bool + ) -> None: + # Assertion required because mypy can't prove we won't change + # `f` back to `None`. See + # https://mypy.readthedocs.io/en/latest/common_issues.html#narrowing-and-inner-functions + assert f is not None + + res = await f(requester, config, is_requester_admin) + if res is False: + raise SynapseError( + 403, + "Room creation forbidden with these parameters", + ) + + return wrap_on_create_room + + def run(*args: Any, **kwargs: Any) -> Awaitable: + # Assertion required because mypy can't prove we won't change `f` + # back to `None`. See + # https://mypy.readthedocs.io/en/latest/common_issues.html#narrowing-and-inner-functions + assert f is not None + + return maybe_awaitable(f(*args, **kwargs)) + + return run + + # Register the hooks through the module API. + hooks = { + hook: async_wrapper(getattr(third_party_rules, hook, None)) + for hook in third_party_event_rules_methods + } + + api.register_third_party_rules_callbacks(**hooks) + + +class ThirdPartyEventRulesModuleApiCallbacks: + """Allows server admins to provide a Python module implementing an extra + set of rules to apply when processing events. + + This is designed to help admins of closed federations with enforcing custom + behaviours. + """ + + def __init__(self, hs: "HomeServer"): + self.store = hs.get_datastores().main + self._storage_controllers = hs.get_storage_controllers() + + self._check_event_allowed_callbacks: List[CHECK_EVENT_ALLOWED_CALLBACK] = [] + self._on_create_room_callbacks: List[ON_CREATE_ROOM_CALLBACK] = [] + self._check_threepid_can_be_invited_callbacks: List[ + CHECK_THREEPID_CAN_BE_INVITED_CALLBACK + ] = [] + self._check_visibility_can_be_modified_callbacks: List[ + CHECK_VISIBILITY_CAN_BE_MODIFIED_CALLBACK + ] = [] + self._on_new_event_callbacks: List[ON_NEW_EVENT_CALLBACK] = [] + self._check_can_shutdown_room_callbacks: List[ + CHECK_CAN_SHUTDOWN_ROOM_CALLBACK + ] = [] + self._check_can_deactivate_user_callbacks: List[ + CHECK_CAN_DEACTIVATE_USER_CALLBACK + ] = [] + self._on_profile_update_callbacks: List[ON_PROFILE_UPDATE_CALLBACK] = [] + self._on_user_deactivation_status_changed_callbacks: List[ + ON_USER_DEACTIVATION_STATUS_CHANGED_CALLBACK + ] = [] + self._on_threepid_bind_callbacks: List[ON_THREEPID_BIND_CALLBACK] = [] + self._on_add_user_third_party_identifier_callbacks: List[ + ON_ADD_USER_THIRD_PARTY_IDENTIFIER_CALLBACK + ] = [] + self._on_remove_user_third_party_identifier_callbacks: List[ + ON_REMOVE_USER_THIRD_PARTY_IDENTIFIER_CALLBACK + ] = [] + + def register_third_party_rules_callbacks( + self, + check_event_allowed: Optional[CHECK_EVENT_ALLOWED_CALLBACK] = None, + on_create_room: Optional[ON_CREATE_ROOM_CALLBACK] = None, + check_threepid_can_be_invited: Optional[ + CHECK_THREEPID_CAN_BE_INVITED_CALLBACK + ] = None, + check_visibility_can_be_modified: Optional[ + CHECK_VISIBILITY_CAN_BE_MODIFIED_CALLBACK + ] = None, + on_new_event: Optional[ON_NEW_EVENT_CALLBACK] = None, + check_can_shutdown_room: Optional[CHECK_CAN_SHUTDOWN_ROOM_CALLBACK] = None, + check_can_deactivate_user: Optional[CHECK_CAN_DEACTIVATE_USER_CALLBACK] = None, + on_profile_update: Optional[ON_PROFILE_UPDATE_CALLBACK] = None, + on_user_deactivation_status_changed: Optional[ + ON_USER_DEACTIVATION_STATUS_CHANGED_CALLBACK + ] = None, + on_threepid_bind: Optional[ON_THREEPID_BIND_CALLBACK] = None, + on_add_user_third_party_identifier: Optional[ + ON_ADD_USER_THIRD_PARTY_IDENTIFIER_CALLBACK + ] = None, + on_remove_user_third_party_identifier: Optional[ + ON_REMOVE_USER_THIRD_PARTY_IDENTIFIER_CALLBACK + ] = None, + ) -> None: + """Register callbacks from modules for each hook.""" + if check_event_allowed is not None: + self._check_event_allowed_callbacks.append(check_event_allowed) + + if on_create_room is not None: + self._on_create_room_callbacks.append(on_create_room) + + if check_threepid_can_be_invited is not None: + self._check_threepid_can_be_invited_callbacks.append( + check_threepid_can_be_invited, + ) + + if check_visibility_can_be_modified is not None: + self._check_visibility_can_be_modified_callbacks.append( + check_visibility_can_be_modified, + ) + + if on_new_event is not None: + self._on_new_event_callbacks.append(on_new_event) + + if check_can_shutdown_room is not None: + self._check_can_shutdown_room_callbacks.append(check_can_shutdown_room) + + if check_can_deactivate_user is not None: + self._check_can_deactivate_user_callbacks.append(check_can_deactivate_user) + if on_profile_update is not None: + self._on_profile_update_callbacks.append(on_profile_update) + + if on_user_deactivation_status_changed is not None: + self._on_user_deactivation_status_changed_callbacks.append( + on_user_deactivation_status_changed, + ) + + if on_threepid_bind is not None: + self._on_threepid_bind_callbacks.append(on_threepid_bind) + + if on_add_user_third_party_identifier is not None: + self._on_add_user_third_party_identifier_callbacks.append( + on_add_user_third_party_identifier + ) + + if on_remove_user_third_party_identifier is not None: + self._on_remove_user_third_party_identifier_callbacks.append( + on_remove_user_third_party_identifier + ) + + async def check_event_allowed( + self, + event: EventBase, + context: UnpersistedEventContextBase, + ) -> Tuple[bool, Optional[dict]]: + """Check if a provided event should be allowed in the given context. + + The module can return: + * True: the event is allowed. + * False: the event is not allowed, and should be rejected with M_FORBIDDEN. + + If the event is allowed, the module can also return a dictionary to use as a + replacement for the event. + + Args: + event: The event to be checked. + context: The context of the event. + + Returns: + The result from the ThirdPartyRules module, as above. + """ + # Bail out early without hitting the store if we don't have any callbacks to run. + if len(self._check_event_allowed_callbacks) == 0: + return True, None + + prev_state_ids = await context.get_prev_state_ids() + + # Retrieve the state events from the database. + events = await self.store.get_events(prev_state_ids.values()) + state_events = {(ev.type, ev.state_key): ev for ev in events.values()} + + # Ensure that the event is frozen, to make sure that the module is not tempted + # to try to modify it. Any attempt to modify it at this point will invalidate + # the hashes and signatures. + event.freeze() + + for callback in self._check_event_allowed_callbacks: + try: + res, replacement_data = await delay_cancellation( + callback(event, state_events) + ) + except CancelledError: + raise + except SynapseError as e: + # FIXME: Being able to throw SynapseErrors is relied upon by + # some modules. PR #10386 accidentally broke this ability. + # That said, we aren't keen on exposing this implementation detail + # to modules and we should one day have a proper way to do what + # is wanted. + # This module callback needs a rework so that hacks such as + # this one are not necessary. + raise e + except Exception: + raise ModuleFailedException( + "Failed to run `check_event_allowed` module API callback" + ) + + # Return if the event shouldn't be allowed or if the module came up with a + # replacement dict for the event. + if res is False: + return res, None + elif isinstance(replacement_data, dict): + return True, replacement_data + + return True, None + + async def on_create_room( + self, requester: Requester, config: dict, is_requester_admin: bool + ) -> None: + """Intercept requests to create room to maybe deny it (via an exception) or + update the request config. + + Args: + requester + config: The creation config from the client. + is_requester_admin: If the requester is an admin + """ + for callback in self._on_create_room_callbacks: + try: + await callback(requester, config, is_requester_admin) + except Exception as e: + # Don't silence the errors raised by this callback since we expect it to + # raise an exception to deny the creation of the room; instead make sure + # it's a SynapseError we can send to clients. + if not isinstance(e, SynapseError): + e = SynapseError( + 403, "Room creation forbidden with these parameters" + ) + + raise e + + async def check_threepid_can_be_invited( + self, medium: str, address: str, room_id: str + ) -> bool: + """Check if a provided 3PID can be invited in the given room. + + Args: + medium: The 3PID's medium. + address: The 3PID's address. + room_id: The room we want to invite the threepid to. + + Returns: + True if the 3PID can be invited, False if not. + """ + # Bail out early without hitting the store if we don't have any callbacks to run. + if len(self._check_threepid_can_be_invited_callbacks) == 0: + return True + + state_events = await self._get_state_map_for_room(room_id) + + for callback in self._check_threepid_can_be_invited_callbacks: + try: + threepid_can_be_invited = await delay_cancellation( + callback(medium, address, state_events) + ) + if threepid_can_be_invited is False: + return False + except CancelledError: + raise + except Exception as e: + logger.warning("Failed to run module API callback %s: %s", callback, e) + + return True + + async def check_visibility_can_be_modified( + self, room_id: str, new_visibility: str + ) -> bool: + """Check if a room is allowed to be published to, or removed from, the public room + list. + + Args: + room_id: The ID of the room. + new_visibility: The new visibility state. Either "public" or "private". + + Returns: + True if the room's visibility can be modified, False if not. + """ + # Bail out early without hitting the store if we don't have any callback + if len(self._check_visibility_can_be_modified_callbacks) == 0: + return True + + state_events = await self._get_state_map_for_room(room_id) + + for callback in self._check_visibility_can_be_modified_callbacks: + try: + visibility_can_be_modified = await delay_cancellation( + callback(room_id, state_events, new_visibility) + ) + if visibility_can_be_modified is False: + return False + except CancelledError: + raise + except Exception as e: + logger.warning("Failed to run module API callback %s: %s", callback, e) + + return True + + async def on_new_event(self, event_id: str) -> None: + """Let modules act on events after they've been sent (e.g. auto-accepting + invites, etc.) + + Args: + event_id: The ID of the event. + """ + # Bail out early without hitting the store if we don't have any callbacks + if len(self._on_new_event_callbacks) == 0: + return + + event = await self.store.get_event(event_id) + state_events = await self._get_state_map_for_room(event.room_id) + + for callback in self._on_new_event_callbacks: + try: + await callback(event, state_events) + except Exception as e: + logger.exception( + "Failed to run module API callback %s: %s", callback, e + ) + + async def check_can_shutdown_room(self, user_id: str, room_id: str) -> bool: + """Intercept requests to shutdown a room. If `False` is returned, the + room must not be shut down. + + Args: + requester: The ID of the user requesting the shutdown. + room_id: The ID of the room. + """ + for callback in self._check_can_shutdown_room_callbacks: + try: + can_shutdown_room = await delay_cancellation(callback(user_id, room_id)) + if can_shutdown_room is False: + return False + except CancelledError: + raise + except Exception as e: + logger.exception( + "Failed to run module API callback %s: %s", callback, e + ) + return True + + async def check_can_deactivate_user( + self, + user_id: str, + by_admin: bool, + ) -> bool: + """Intercept requests to deactivate a user. If `False` is returned, the + user should not be deactivated. + + Args: + requester + user_id: The ID of the room. + """ + for callback in self._check_can_deactivate_user_callbacks: + try: + can_deactivate_user = await delay_cancellation( + callback(user_id, by_admin) + ) + if can_deactivate_user is False: + return False + except CancelledError: + raise + except Exception as e: + logger.exception( + "Failed to run module API callback %s: %s", callback, e + ) + return True + + async def _get_state_map_for_room(self, room_id: str) -> StateMap[EventBase]: + """Given a room ID, return the state events of that room. + + Args: + room_id: The ID of the room. + + Returns: + A dict mapping (event type, state key) to state event. + """ + return await self._storage_controllers.state.get_current_state(room_id) + + async def on_profile_update( + self, user_id: str, new_profile: ProfileInfo, by_admin: bool, deactivation: bool + ) -> None: + """Called after the global profile of a user has been updated. Does not include + per-room profile changes. + + Args: + user_id: The user whose profile was changed. + new_profile: The updated profile for the user. + by_admin: Whether the profile update was performed by a server admin. + deactivation: Whether this change was made while deactivating the user. + """ + for callback in self._on_profile_update_callbacks: + try: + await callback(user_id, new_profile, by_admin, deactivation) + except Exception as e: + logger.exception( + "Failed to run module API callback %s: %s", callback, e + ) + + async def on_user_deactivation_status_changed( + self, user_id: str, deactivated: bool, by_admin: bool + ) -> None: + """Called after a user has been deactivated or reactivated. + + Args: + user_id: The deactivated user. + deactivated: Whether the user is now deactivated. + by_admin: Whether the deactivation was performed by a server admin. + """ + for callback in self._on_user_deactivation_status_changed_callbacks: + try: + await callback(user_id, deactivated, by_admin) + except Exception as e: + logger.exception( + "Failed to run module API callback %s: %s", callback, e + ) + + async def on_threepid_bind(self, user_id: str, medium: str, address: str) -> None: + """Called after a threepid association has been verified and stored. + + Note that this callback is called when an association is created on the + local homeserver, not when it's created on an identity server (and then kept track + of so that it can be unbound on the same IS later on). + + THIS MODULE CALLBACK METHOD HAS BEEN DEPRECATED. Please use the + `on_add_user_third_party_identifier` callback method instead. + + Args: + user_id: the user being associated with the threepid. + medium: the threepid's medium. + address: the threepid's address. + """ + for callback in self._on_threepid_bind_callbacks: + try: + await callback(user_id, medium, address) + except Exception as e: + logger.exception( + "Failed to run module API callback %s: %s", callback, e + ) + + async def on_add_user_third_party_identifier( + self, user_id: str, medium: str, address: str + ) -> None: + """Called when an association between a user's Matrix ID and a third-party ID + (email, phone number) has successfully been registered on the homeserver. + + Args: + user_id: The User ID included in the association. + medium: The medium of the third-party ID (email, msisdn). + address: The address of the third-party ID (i.e. an email address). + """ + for callback in self._on_add_user_third_party_identifier_callbacks: + try: + await callback(user_id, medium, address) + except Exception as e: + logger.exception( + "Failed to run module API callback %s: %s", callback, e + ) + + async def on_remove_user_third_party_identifier( + self, user_id: str, medium: str, address: str + ) -> None: + """Called when an association between a user's Matrix ID and a third-party ID + (email, phone number) has been successfully removed on the homeserver. + + This is called *after* any known bindings on identity servers for this + association have been removed. + + Args: + user_id: The User ID included in the removed association. + medium: The medium of the third-party ID (email, msisdn). + address: The address of the third-party ID (i.e. an email address). + """ + for callback in self._on_remove_user_third_party_identifier_callbacks: + try: + await callback(user_id, medium, address) + except Exception as e: + logger.exception( + "Failed to run module API callback %s: %s", callback, e + ) diff --git a/synapse/notifier.py b/synapse/notifier.py index a8832a3f8e..897272ad5b 100644 --- a/synapse/notifier.py +++ b/synapse/notifier.py @@ -232,7 +232,7 @@ class Notifier: self._federation_client = hs.get_federation_http_client() - self._third_party_rules = hs.get_third_party_event_rules() + self._third_party_rules = hs.get_module_api_callbacks().third_party_event_rules self.clock = hs.get_clock() self.appservice_handler = hs.get_application_service_handler() diff --git a/synapse/rest/admin/rooms.py b/synapse/rest/admin/rooms.py index 4de56bf13f..1d65560265 100644 --- a/synapse/rest/admin/rooms.py +++ b/synapse/rest/admin/rooms.py @@ -70,7 +70,7 @@ class RoomRestV2Servlet(RestServlet): self._auth = hs.get_auth() self._store = hs.get_datastores().main self._pagination_handler = hs.get_pagination_handler() - self._third_party_rules = hs.get_third_party_event_rules() + self._third_party_rules = hs.get_module_api_callbacks().third_party_event_rules async def on_DELETE( self, request: SynapseRequest, room_id: str diff --git a/synapse/server.py b/synapse/server.py index e597627a6d..c557c60482 100644 --- a/synapse/server.py +++ b/synapse/server.py @@ -42,7 +42,6 @@ from synapse.crypto.context_factory import RegularPolicyForHTTPS from synapse.crypto.keyring import Keyring from synapse.events.builder import EventBuilderFactory from synapse.events.presence_router import PresenceRouter -from synapse.events.third_party_rules import ThirdPartyEventRules from synapse.events.utils import EventClientSerializer from synapse.federation.federation_client import FederationClient from synapse.federation.federation_server import ( @@ -691,10 +690,6 @@ class HomeServer(metaclass=abc.ABCMeta): def get_stats_handler(self) -> StatsHandler: return StatsHandler(self) - @cache_in_self - def get_third_party_event_rules(self) -> ThirdPartyEventRules: - return ThirdPartyEventRules(self) - @cache_in_self def get_password_auth_provider(self) -> PasswordAuthProvider: return PasswordAuthProvider() diff --git a/tests/rest/client/test_third_party_rules.py b/tests/rest/client/test_third_party_rules.py index 753ecc8d16..e5ba5a9706 100644 --- a/tests/rest/client/test_third_party_rules.py +++ b/tests/rest/client/test_third_party_rules.py @@ -22,7 +22,9 @@ from synapse.api.errors import SynapseError from synapse.api.room_versions import RoomVersion from synapse.config.homeserver import HomeServerConfig from synapse.events import EventBase -from synapse.events.third_party_rules import load_legacy_third_party_event_rules +from synapse.module_api.callbacks.third_party_event_rules_callbacks import ( + load_legacy_third_party_event_rules, +) from synapse.rest import admin from synapse.rest.client import account, login, profile, room from synapse.server import HomeServer @@ -146,7 +148,7 @@ class ThirdPartyRulesTestCase(unittest.FederatingHomeserverTestCase): return ev.type != "foo.bar.forbidden", None callback = Mock(spec=[], side_effect=check) - self.hs.get_third_party_event_rules()._check_event_allowed_callbacks = [ + self.hs.get_module_api_callbacks().third_party_event_rules._check_event_allowed_callbacks = [ callback ] @@ -202,7 +204,9 @@ class ThirdPartyRulesTestCase(unittest.FederatingHomeserverTestCase): ) -> Tuple[bool, Optional[JsonDict]]: raise NastyHackException(429, "message") - self.hs.get_third_party_event_rules()._check_event_allowed_callbacks = [check] + self.hs.get_module_api_callbacks().third_party_event_rules._check_event_allowed_callbacks = [ + check + ] # Make a request channel = self.make_request( @@ -229,7 +233,9 @@ class ThirdPartyRulesTestCase(unittest.FederatingHomeserverTestCase): ev.content = {"x": "y"} return True, None - self.hs.get_third_party_event_rules()._check_event_allowed_callbacks = [check] + self.hs.get_module_api_callbacks().third_party_event_rules._check_event_allowed_callbacks = [ + check + ] # now send the event channel = self.make_request( @@ -253,7 +259,9 @@ class ThirdPartyRulesTestCase(unittest.FederatingHomeserverTestCase): d["content"] = {"x": "y"} return True, d - self.hs.get_third_party_event_rules()._check_event_allowed_callbacks = [check] + self.hs.get_module_api_callbacks().third_party_event_rules._check_event_allowed_callbacks = [ + check + ] # now send the event channel = self.make_request( @@ -289,7 +297,9 @@ class ThirdPartyRulesTestCase(unittest.FederatingHomeserverTestCase): } return True, d - self.hs.get_third_party_event_rules()._check_event_allowed_callbacks = [check] + self.hs.get_module_api_callbacks().third_party_event_rules._check_event_allowed_callbacks = [ + check + ] # Send an event, then edit it. channel = self.make_request( @@ -440,7 +450,9 @@ class ThirdPartyRulesTestCase(unittest.FederatingHomeserverTestCase): ) return True, None - self.hs.get_third_party_event_rules()._check_event_allowed_callbacks = [test_fn] + self.hs.get_module_api_callbacks().third_party_event_rules._check_event_allowed_callbacks = [ + test_fn + ] # Sometimes the bug might not happen the first time the event type is added # to the state but might happen when an event updates the state of the room for @@ -466,7 +478,7 @@ class ThirdPartyRulesTestCase(unittest.FederatingHomeserverTestCase): def test_on_new_event(self) -> None: """Test that the on_new_event callback is called on new events""" on_new_event = Mock(make_awaitable(None)) - self.hs.get_third_party_event_rules()._on_new_event_callbacks.append( + self.hs.get_module_api_callbacks().third_party_event_rules._on_new_event_callbacks.append( on_new_event ) @@ -569,7 +581,9 @@ class ThirdPartyRulesTestCase(unittest.FederatingHomeserverTestCase): # Register a mock callback. m = Mock(return_value=make_awaitable(None)) - self.hs.get_third_party_event_rules()._on_profile_update_callbacks.append(m) + self.hs.get_module_api_callbacks().third_party_event_rules._on_profile_update_callbacks.append( + m + ) # Change the display name. channel = self.make_request( @@ -628,7 +642,9 @@ class ThirdPartyRulesTestCase(unittest.FederatingHomeserverTestCase): # Register a mock callback. m = Mock(return_value=make_awaitable(None)) - self.hs.get_third_party_event_rules()._on_profile_update_callbacks.append(m) + self.hs.get_module_api_callbacks().third_party_event_rules._on_profile_update_callbacks.append( + m + ) # Register an admin user. self.register_user("admin", "password", admin=True) @@ -667,7 +683,7 @@ class ThirdPartyRulesTestCase(unittest.FederatingHomeserverTestCase): """ # Register a mocked callback. deactivation_mock = Mock(return_value=make_awaitable(None)) - third_party_rules = self.hs.get_third_party_event_rules() + third_party_rules = self.hs.get_module_api_callbacks().third_party_event_rules third_party_rules._on_user_deactivation_status_changed_callbacks.append( deactivation_mock, ) @@ -675,7 +691,7 @@ class ThirdPartyRulesTestCase(unittest.FederatingHomeserverTestCase): # deactivation code calls it in a way that let modules know the user is being # deactivated. profile_mock = Mock(return_value=make_awaitable(None)) - self.hs.get_third_party_event_rules()._on_profile_update_callbacks.append( + self.hs.get_module_api_callbacks().third_party_event_rules._on_profile_update_callbacks.append( profile_mock, ) @@ -725,7 +741,7 @@ class ThirdPartyRulesTestCase(unittest.FederatingHomeserverTestCase): """ # Register a mock callback. m = Mock(return_value=make_awaitable(None)) - third_party_rules = self.hs.get_third_party_event_rules() + third_party_rules = self.hs.get_module_api_callbacks().third_party_event_rules third_party_rules._on_user_deactivation_status_changed_callbacks.append(m) # Register an admin user. @@ -779,7 +795,7 @@ class ThirdPartyRulesTestCase(unittest.FederatingHomeserverTestCase): """ # Register a mocked callback. deactivation_mock = Mock(return_value=make_awaitable(False)) - third_party_rules = self.hs.get_third_party_event_rules() + third_party_rules = self.hs.get_module_api_callbacks().third_party_event_rules third_party_rules._check_can_deactivate_user_callbacks.append( deactivation_mock, ) @@ -825,7 +841,7 @@ class ThirdPartyRulesTestCase(unittest.FederatingHomeserverTestCase): """ # Register a mocked callback. deactivation_mock = Mock(return_value=make_awaitable(False)) - third_party_rules = self.hs.get_third_party_event_rules() + third_party_rules = self.hs.get_module_api_callbacks().third_party_event_rules third_party_rules._check_can_deactivate_user_callbacks.append( deactivation_mock, ) @@ -864,7 +880,7 @@ class ThirdPartyRulesTestCase(unittest.FederatingHomeserverTestCase): """ # Register a mocked callback. shutdown_mock = Mock(return_value=make_awaitable(False)) - third_party_rules = self.hs.get_third_party_event_rules() + third_party_rules = self.hs.get_module_api_callbacks().third_party_event_rules third_party_rules._check_can_shutdown_room_callbacks.append( shutdown_mock, ) @@ -900,7 +916,7 @@ class ThirdPartyRulesTestCase(unittest.FederatingHomeserverTestCase): """ # Register a mocked callback. threepid_bind_mock = Mock(return_value=make_awaitable(None)) - third_party_rules = self.hs.get_third_party_event_rules() + third_party_rules = self.hs.get_module_api_callbacks().third_party_event_rules third_party_rules._on_threepid_bind_callbacks.append(threepid_bind_mock) # Register an admin user. @@ -947,8 +963,7 @@ class ThirdPartyRulesTestCase(unittest.FederatingHomeserverTestCase): on_remove_user_third_party_identifier_callback_mock = Mock( return_value=make_awaitable(None) ) - third_party_rules = self.hs.get_third_party_event_rules() - third_party_rules.register_third_party_rules_callbacks( + self.hs.get_module_api().register_third_party_rules_callbacks( on_add_user_third_party_identifier=on_add_user_third_party_identifier_callback_mock, on_remove_user_third_party_identifier=on_remove_user_third_party_identifier_callback_mock, ) @@ -1009,8 +1024,7 @@ class ThirdPartyRulesTestCase(unittest.FederatingHomeserverTestCase): on_remove_user_third_party_identifier_callback_mock = Mock( return_value=make_awaitable(None) ) - third_party_rules = self.hs.get_third_party_event_rules() - third_party_rules.register_third_party_rules_callbacks( + self.hs.get_module_api().register_third_party_rules_callbacks( on_remove_user_third_party_identifier=on_remove_user_third_party_identifier_callback_mock, ) diff --git a/tests/server.py b/tests/server.py index a49dc90e32..7296f0a552 100644 --- a/tests/server.py +++ b/tests/server.py @@ -73,11 +73,13 @@ from twisted.web.server import Request, Site from synapse.config.database import DatabaseConnectionConfig from synapse.config.homeserver import HomeServerConfig from synapse.events.presence_router import load_legacy_presence_router -from synapse.events.third_party_rules import load_legacy_third_party_event_rules from synapse.handlers.auth import load_legacy_password_auth_providers from synapse.http.site import SynapseRequest from synapse.logging.context import ContextResourceUsage from synapse.module_api.callbacks.spamchecker_callbacks import load_legacy_spam_checkers +from synapse.module_api.callbacks.third_party_event_rules_callbacks import ( + load_legacy_third_party_event_rules, +) from synapse.server import HomeServer from synapse.storage import DataStore from synapse.storage.database import LoggingDatabaseConnection -- cgit 1.5.1 From e46d5f3586025a491d11a31ce2be4c540c38d404 Mon Sep 17 00:00:00 2001 From: Sean Quah <8349537+squahtx@users.noreply.github.com> Date: Fri, 5 May 2023 15:06:22 +0100 Subject: Factor out an `is_mine_server_name` method (#15542) Add an `is_mine_server_name` method, similar to `is_mine_id`. Ideally we would use this consistently, instead of sometimes comparing against `hs.hostname` and other times reaching into `hs.config.server.server_name`. Also fix a bug in the tests where `hs.hostname` would sometimes differ from `hs.config.server.server_name`. Signed-off-by: Sean Quah --- changelog.d/15542.misc | 1 + synapse/api/auth_blocking.py | 4 ++-- synapse/crypto/keyring.py | 4 ++-- synapse/federation/federation_base.py | 2 +- synapse/federation/federation_client.py | 4 ++-- synapse/federation/federation_server.py | 3 ++- synapse/federation/send_queue.py | 3 ++- synapse/federation/sender/__init__.py | 11 ++++++----- synapse/federation/transport/client.py | 4 ++-- synapse/federation/transport/server/_base.py | 5 ++++- synapse/handlers/event_auth.py | 5 +++-- synapse/handlers/federation.py | 3 ++- synapse/handlers/federation_event.py | 3 ++- synapse/handlers/profile.py | 4 ++-- synapse/handlers/sso.py | 3 ++- synapse/handlers/typing.py | 3 ++- synapse/rest/admin/media.py | 4 ++-- synapse/rest/client/room.py | 4 ++-- synapse/rest/media/download_resource.py | 4 ++-- synapse/rest/media/thumbnail_resource.py | 4 ++-- synapse/server.py | 4 ++++ synapse/storage/databases/main/room.py | 2 +- tests/unittest.py | 16 ++++++++++++++-- 23 files changed, 64 insertions(+), 36 deletions(-) create mode 100644 changelog.d/15542.misc (limited to 'synapse/rest') diff --git a/changelog.d/15542.misc b/changelog.d/15542.misc new file mode 100644 index 0000000000..32e3d678a1 --- /dev/null +++ b/changelog.d/15542.misc @@ -0,0 +1 @@ +Factor out an `is_mine_server_name` method. diff --git a/synapse/api/auth_blocking.py b/synapse/api/auth_blocking.py index 22348d2d86..fcf5b842c6 100644 --- a/synapse/api/auth_blocking.py +++ b/synapse/api/auth_blocking.py @@ -39,7 +39,7 @@ class AuthBlocking: self._mau_limits_reserved_threepids = ( hs.config.server.mau_limits_reserved_threepids ) - self._server_name = hs.hostname + self._is_mine_server_name = hs.is_mine_server_name self._track_appservice_user_ips = hs.config.appservice.track_appservice_user_ips async def check_auth_blocking( @@ -77,7 +77,7 @@ class AuthBlocking: if requester: if requester.authenticated_entity.startswith("@"): user_id = requester.authenticated_entity - elif requester.authenticated_entity == self._server_name: + elif self._is_mine_server_name(requester.authenticated_entity): # We never block the server from doing actions on behalf of # users. return diff --git a/synapse/crypto/keyring.py b/synapse/crypto/keyring.py index afdf6863d6..260aab3241 100644 --- a/synapse/crypto/keyring.py +++ b/synapse/crypto/keyring.py @@ -173,7 +173,7 @@ class Keyring: process_batch_callback=self._inner_fetch_key_requests, ) - self._hostname = hs.hostname + self._is_mine_server_name = hs.is_mine_server_name # build a FetchKeyResult for each of our own keys, to shortcircuit the # fetcher. @@ -277,7 +277,7 @@ class Keyring: # If we are the originating server, short-circuit the key-fetch for any keys # we already have - if verify_request.server_name == self._hostname: + if self._is_mine_server_name(verify_request.server_name): for key_id in verify_request.key_ids: if key_id in self._local_verify_keys: found_keys[key_id] = self._local_verify_keys[key_id] diff --git a/synapse/federation/federation_base.py b/synapse/federation/federation_base.py index 3df975958d..b77022b406 100644 --- a/synapse/federation/federation_base.py +++ b/synapse/federation/federation_base.py @@ -49,7 +49,7 @@ class FederationBase: def __init__(self, hs: "HomeServer"): self.hs = hs - self.server_name = hs.hostname + self._is_mine_server_name = hs.is_mine_server_name self.keyring = hs.get_keyring() self._spam_checker_module_callbacks = hs.get_module_api_callbacks().spam_checker self.store = hs.get_datastores().main diff --git a/synapse/federation/federation_client.py b/synapse/federation/federation_client.py index 0b2d1a78f7..076b9287c6 100644 --- a/synapse/federation/federation_client.py +++ b/synapse/federation/federation_client.py @@ -854,7 +854,7 @@ class FederationClient(FederationBase): for destination in destinations: # We don't want to ask our own server for information we don't have - if destination == self.server_name: + if self._is_mine_server_name(destination): continue try: @@ -1536,7 +1536,7 @@ class FederationClient(FederationBase): self, destinations: Iterable[str], room_id: str, event_dict: JsonDict ) -> None: for destination in destinations: - if destination == self.server_name: + if self._is_mine_server_name(destination): continue try: diff --git a/synapse/federation/federation_server.py b/synapse/federation/federation_server.py index ca43c7bfc0..c590d8f96f 100644 --- a/synapse/federation/federation_server.py +++ b/synapse/federation/federation_server.py @@ -129,6 +129,7 @@ class FederationServer(FederationBase): def __init__(self, hs: "HomeServer"): super().__init__(hs) + self.server_name = hs.hostname self.handler = hs.get_federation_handler() self._spam_checker_module_callbacks = hs.get_module_api_callbacks().spam_checker self._federation_event_handler = hs.get_federation_event_handler() @@ -942,7 +943,7 @@ class FederationServer(FederationBase): authorising_server = get_domain_from_id( event.content[EventContentFields.AUTHORISING_USER] ) - if authorising_server != self.server_name: + if not self._is_mine_server_name(authorising_server): raise SynapseError( 400, f"Cannot authorise request from resident server: {authorising_server}", diff --git a/synapse/federation/send_queue.py b/synapse/federation/send_queue.py index 0b7c81677e..fb448f2155 100644 --- a/synapse/federation/send_queue.py +++ b/synapse/federation/send_queue.py @@ -68,6 +68,7 @@ class FederationRemoteSendQueue(AbstractFederationSender): self.clock = hs.get_clock() self.notifier = hs.get_notifier() self.is_mine_id = hs.is_mine_id + self.is_mine_server_name = hs.is_mine_server_name # We may have multiple federation sender instances, so we need to track # their positions separately. @@ -198,7 +199,7 @@ class FederationRemoteSendQueue(AbstractFederationSender): key: Optional[Hashable] = None, ) -> None: """As per FederationSender""" - if destination == self.server_name: + if self.is_mine_server_name(destination): logger.info("Not sending EDU to ourselves") return diff --git a/synapse/federation/sender/__init__.py b/synapse/federation/sender/__init__.py index edc4b1768c..f3bdc5a4d2 100644 --- a/synapse/federation/sender/__init__.py +++ b/synapse/federation/sender/__init__.py @@ -362,6 +362,7 @@ class FederationSender(AbstractFederationSender): self.clock = hs.get_clock() self.is_mine_id = hs.is_mine_id + self.is_mine_server_name = hs.is_mine_server_name self._presence_router: Optional["PresenceRouter"] = None self._transaction_manager = TransactionManager(hs) @@ -766,7 +767,7 @@ class FederationSender(AbstractFederationSender): domains = [ d for d in domains_set - if d != self.server_name + if not self.is_mine_server_name(d) and self._federation_shard_config.should_handle(self._instance_name, d) ] if not domains: @@ -832,7 +833,7 @@ class FederationSender(AbstractFederationSender): assert self.is_mine_id(state.user_id) for destination in destinations: - if destination == self.server_name: + if self.is_mine_server_name(destination): continue if not self._federation_shard_config.should_handle( self._instance_name, destination @@ -860,7 +861,7 @@ class FederationSender(AbstractFederationSender): content: content of EDU key: clobbering key for this edu """ - if destination == self.server_name: + if self.is_mine_server_name(destination): logger.info("Not sending EDU to ourselves") return @@ -897,7 +898,7 @@ class FederationSender(AbstractFederationSender): queue.send_edu(edu) def send_device_messages(self, destination: str, immediate: bool = True) -> None: - if destination == self.server_name: + if self.is_mine_server_name(destination): logger.warning("Not sending device update to ourselves") return @@ -919,7 +920,7 @@ class FederationSender(AbstractFederationSender): might have come back. """ - if destination == self.server_name: + if self.is_mine_server_name(destination): logger.warning("Not waking up ourselves") return diff --git a/synapse/federation/transport/client.py b/synapse/federation/transport/client.py index bc70b94f68..d2fa9976da 100644 --- a/synapse/federation/transport/client.py +++ b/synapse/federation/transport/client.py @@ -58,9 +58,9 @@ class TransportLayerClient: """Sends federation HTTP requests to other servers""" def __init__(self, hs: "HomeServer"): - self.server_name = hs.hostname self.client = hs.get_federation_http_client() self._faster_joins_enabled = hs.config.experimental.faster_joins_enabled + self._is_mine_server_name = hs.is_mine_server_name async def get_room_state_ids( self, destination: str, room_id: str, event_id: str @@ -235,7 +235,7 @@ class TransportLayerClient: transaction.transaction_id, ) - if transaction.destination == self.server_name: + if self._is_mine_server_name(transaction.destination): raise RuntimeError("Transport layer cannot send to itself!") # FIXME: This is only used by the tests. The actual json sent is diff --git a/synapse/federation/transport/server/_base.py b/synapse/federation/transport/server/_base.py index cdaf0d5de7..b6e9c58760 100644 --- a/synapse/federation/transport/server/_base.py +++ b/synapse/federation/transport/server/_base.py @@ -57,6 +57,7 @@ class Authenticator: self._clock = hs.get_clock() self.keyring = hs.get_keyring() self.server_name = hs.hostname + self._is_mine_server_name = hs.is_mine_server_name self.store = hs.get_datastores().main self.federation_domain_whitelist = ( hs.config.federation.federation_domain_whitelist @@ -100,7 +101,9 @@ class Authenticator: json_request["signatures"].setdefault(origin, {})[key] = sig # if the origin_server sent a destination along it needs to match our own server_name - if destination is not None and destination != self.server_name: + if destination is not None and not self._is_mine_server_name( + destination + ): raise AuthenticationError( HTTPStatus.UNAUTHORIZED, "Destination mismatch in auth header", diff --git a/synapse/handlers/event_auth.py b/synapse/handlers/event_auth.py index 0db0bd7304..3e37c0cbe2 100644 --- a/synapse/handlers/event_auth.py +++ b/synapse/handlers/event_auth.py @@ -29,7 +29,7 @@ from synapse.event_auth import ( ) from synapse.events import EventBase from synapse.events.builder import EventBuilder -from synapse.types import StateMap, StrCollection, get_domain_from_id +from synapse.types import StateMap, StrCollection if TYPE_CHECKING: from synapse.server import HomeServer @@ -47,6 +47,7 @@ class EventAuthHandler: self._store = hs.get_datastores().main self._state_storage_controller = hs.get_storage_controllers().state self._server_name = hs.hostname + self._is_mine_id = hs.is_mine_id async def check_auth_rules_from_context( self, @@ -247,7 +248,7 @@ class EventAuthHandler: if not await self.is_user_in_rooms(allowed_rooms, user_id): # If this is a remote request, the user might be in an allowed room # that we do not know about. - if get_domain_from_id(user_id) != self._server_name: + if not self._is_mine_id(user_id): for room_id in allowed_rooms: if not await self._store.is_host_joined(room_id, self._server_name): raise SynapseError( diff --git a/synapse/handlers/federation.py b/synapse/handlers/federation.py index 4ad808a5b4..19dec4812f 100644 --- a/synapse/handlers/federation.py +++ b/synapse/handlers/federation.py @@ -141,6 +141,7 @@ class FederationHandler: self.server_name = hs.hostname self.keyring = hs.get_keyring() self.is_mine_id = hs.is_mine_id + self.is_mine_server_name = hs.is_mine_server_name self._spam_checker_module_callbacks = hs.get_module_api_callbacks().spam_checker self.event_creation_handler = hs.get_event_creation_handler() self.event_builder_factory = hs.get_event_builder_factory() @@ -453,7 +454,7 @@ class FederationHandler: for dom in domains: # We don't want to ask our own server for information we don't have - if dom == self.server_name: + if self.is_mine_server_name(dom): continue try: diff --git a/synapse/handlers/federation_event.py b/synapse/handlers/federation_event.py index fc15024166..06343d40e4 100644 --- a/synapse/handlers/federation_event.py +++ b/synapse/handlers/federation_event.py @@ -163,6 +163,7 @@ class FederationEventHandler: self._notifier = hs.get_notifier() self._is_mine_id = hs.is_mine_id + self._is_mine_server_name = hs.is_mine_server_name self._server_name = hs.hostname self._instance_name = hs.get_instance_name() @@ -688,7 +689,7 @@ class FederationEventHandler: server from invalid events (there is probably no point in trying to re-fetch invalid events from every other HS in the room.) """ - if dest == self._server_name: + if self._is_mine_server_name(dest): raise SynapseError(400, "Can't backfill from self.") events = await self._federation_client.backfill( diff --git a/synapse/handlers/profile.py b/synapse/handlers/profile.py index 983b9b66fb..48f9858931 100644 --- a/synapse/handlers/profile.py +++ b/synapse/handlers/profile.py @@ -59,7 +59,7 @@ class ProfileHandler: self.max_avatar_size = hs.config.server.max_avatar_size self.allowed_avatar_mimetypes = hs.config.server.allowed_avatar_mimetypes - self.server_name = hs.config.server.server_name + self._is_mine_server_name = hs.is_mine_server_name self._third_party_rules = hs.get_module_api_callbacks().third_party_event_rules @@ -309,7 +309,7 @@ class ProfileHandler: else: server_name = host - if server_name == self.server_name: + if self._is_mine_server_name(server_name): media_info = await self.store.get_local_media(media_id) else: media_info = await self.store.get_cached_remote_media(server_name, media_id) diff --git a/synapse/handlers/sso.py b/synapse/handlers/sso.py index c28325323c..92c3742625 100644 --- a/synapse/handlers/sso.py +++ b/synapse/handlers/sso.py @@ -194,6 +194,7 @@ class SsoHandler: self._clock = hs.get_clock() self._store = hs.get_datastores().main self._server_name = hs.hostname + self._is_mine_server_name = hs.is_mine_server_name self._registration_handler = hs.get_registration_handler() self._auth_handler = hs.get_auth_handler() self._device_handler = hs.get_device_handler() @@ -802,7 +803,7 @@ class SsoHandler: if profile["avatar_url"] is not None: server_name = profile["avatar_url"].split("/")[-2] media_id = profile["avatar_url"].split("/")[-1] - if server_name == self._server_name: + if self._is_mine_server_name(server_name): media = await self._media_repo.store.get_local_media(media_id) if media is not None and upload_name == media["upload_name"]: logger.info("skipping saving the user avatar") diff --git a/synapse/handlers/typing.py b/synapse/handlers/typing.py index 39ae44ea95..7aeae5319c 100644 --- a/synapse/handlers/typing.py +++ b/synapse/handlers/typing.py @@ -68,6 +68,7 @@ class FollowerTypingHandler: self.server_name = hs.config.server.server_name self.clock = hs.get_clock() self.is_mine_id = hs.is_mine_id + self.is_mine_server_name = hs.is_mine_server_name self.federation = None if hs.should_send_federation(): @@ -153,7 +154,7 @@ class FollowerTypingHandler: member.room_id ) for domain in hosts: - if domain != self.server_name: + if not self.is_mine_server_name(domain): logger.debug("sending typing update to %s", domain) self.federation.build_and_send_edu( destination=domain, diff --git a/synapse/rest/admin/media.py b/synapse/rest/admin/media.py index c134ccfb3d..b7637dff0b 100644 --- a/synapse/rest/admin/media.py +++ b/synapse/rest/admin/media.py @@ -258,7 +258,7 @@ class DeleteMediaByID(RestServlet): def __init__(self, hs: "HomeServer"): self.store = hs.get_datastores().main self.auth = hs.get_auth() - self.server_name = hs.hostname + self._is_mine_server_name = hs.is_mine_server_name self.media_repository = hs.get_media_repository() async def on_DELETE( @@ -266,7 +266,7 @@ class DeleteMediaByID(RestServlet): ) -> Tuple[int, JsonDict]: await assert_requester_is_admin(self.auth, request) - if self.server_name != server_name: + if not self._is_mine_server_name(server_name): raise SynapseError(HTTPStatus.BAD_REQUEST, "Can only delete local media") if await self.store.get_local_media(media_id) is None: diff --git a/synapse/rest/client/room.py b/synapse/rest/client/room.py index 7699cc8d1b..951bd033f5 100644 --- a/synapse/rest/client/room.py +++ b/synapse/rest/client/room.py @@ -501,7 +501,7 @@ class PublicRoomListRestServlet(RestServlet): limit = None handler = self.hs.get_room_list_handler() - if server and server != self.hs.config.server.server_name: + if server and not self.hs.is_mine_server_name(server): # Ensure the server is valid. try: parse_and_validate_server_name(server) @@ -551,7 +551,7 @@ class PublicRoomListRestServlet(RestServlet): limit = None handler = self.hs.get_room_list_handler() - if server and server != self.hs.config.server.server_name: + if server and not self.hs.is_mine_server_name(server): # Ensure the server is valid. try: parse_and_validate_server_name(server) diff --git a/synapse/rest/media/download_resource.py b/synapse/rest/media/download_resource.py index 8f270cf4cc..3c618ef60a 100644 --- a/synapse/rest/media/download_resource.py +++ b/synapse/rest/media/download_resource.py @@ -37,7 +37,7 @@ class DownloadResource(DirectServeJsonResource): def __init__(self, hs: "HomeServer", media_repo: "MediaRepository"): super().__init__() self.media_repo = media_repo - self.server_name = hs.hostname + self._is_mine_server_name = hs.is_mine_server_name async def _async_render_GET(self, request: SynapseRequest) -> None: set_cors_headers(request) @@ -59,7 +59,7 @@ class DownloadResource(DirectServeJsonResource): b"no-referrer", ) server_name, media_id, name = parse_media_id(request) - if server_name == self.server_name: + if self._is_mine_server_name(server_name): await self.media_repo.get_local_media(request, media_id, name) else: allow_remote = parse_boolean(request, "allow_remote", default=True) diff --git a/synapse/rest/media/thumbnail_resource.py b/synapse/rest/media/thumbnail_resource.py index 4ee2a0dbda..a6396fb05a 100644 --- a/synapse/rest/media/thumbnail_resource.py +++ b/synapse/rest/media/thumbnail_resource.py @@ -59,7 +59,7 @@ class ThumbnailResource(DirectServeJsonResource): self.media_repo = media_repo self.media_storage = media_storage self.dynamic_thumbnails = hs.config.media.dynamic_thumbnails - self.server_name = hs.hostname + self._is_mine_server_name = hs.is_mine_server_name async def _async_render_GET(self, request: SynapseRequest) -> None: set_cors_headers(request) @@ -71,7 +71,7 @@ class ThumbnailResource(DirectServeJsonResource): # TODO Parse the Accept header to get an prioritised list of thumbnail types. m_type = "image/png" - if server_name == self.server_name: + if self._is_mine_server_name(server_name): if self.dynamic_thumbnails: await self._select_or_generate_local_thumbnail( request, media_id, width, height, method, m_type diff --git a/synapse/server.py b/synapse/server.py index c557c60482..fd29c28173 100644 --- a/synapse/server.py +++ b/synapse/server.py @@ -377,6 +377,10 @@ class HomeServer(metaclass=abc.ABCMeta): return False return localpart_hostname[1] == self.hostname + def is_mine_server_name(self, server_name: str) -> bool: + """Determines whether a server name refers to this homeserver.""" + return server_name == self.hostname + @cache_in_self def get_clock(self) -> Clock: return Clock(self._reactor) diff --git a/synapse/storage/databases/main/room.py b/synapse/storage/databases/main/room.py index dd7dbb6901..ca8be8c80d 100644 --- a/synapse/storage/databases/main/room.py +++ b/synapse/storage/databases/main/room.py @@ -996,7 +996,7 @@ class RoomWorkerStore(CacheInvalidationWorkerStore): If it is `None` media will be removed from quarantine """ logger.info("Quarantining media: %s/%s", server_name, media_id) - is_local = server_name == self.config.server.server_name + is_local = self.hs.is_mine_server_name(server_name) def _quarantine_media_by_id_txn(txn: LoggingTransaction) -> int: local_mxcs = [media_id] if is_local else [] diff --git a/tests/unittest.py b/tests/unittest.py index ee2f78ab01..b6fdf69635 100644 --- a/tests/unittest.py +++ b/tests/unittest.py @@ -566,7 +566,9 @@ class HomeserverTestCase(TestCase): client_ip, ) - def setup_test_homeserver(self, *args: Any, **kwargs: Any) -> HomeServer: + def setup_test_homeserver( + self, name: Optional[str] = None, **kwargs: Any + ) -> HomeServer: """ Set up the test homeserver, meant to be called by the overridable make_homeserver. It automatically passes through the test class's @@ -585,15 +587,25 @@ class HomeserverTestCase(TestCase): else: config = kwargs["config"] + # The server name can be specified using either the `name` argument or a config + # override. The `name` argument takes precedence over any config overrides. + if name is not None: + config["server_name"] = name + # Parse the config from a config dict into a HomeServerConfig config_obj = make_homeserver_config_obj(config) kwargs["config"] = config_obj + # The server name in the config is now `name`, if provided, or the `server_name` + # from a config override, or the default of "test". Whichever it is, we + # construct a homeserver with a matching name. + kwargs["name"] = config_obj.server.server_name + async def run_bg_updates() -> None: with LoggingContext("run_bg_updates"): self.get_success(stor.db_pool.updates.run_background_updates(False)) - hs = setup_test_homeserver(self.addCleanup, *args, **kwargs) + hs = setup_test_homeserver(self.addCleanup, **kwargs) stor = hs.get_datastores().main # Run the database background updates, when running against "master". -- cgit 1.5.1 From 7c95b65873c7a858388b9c99c7e9e15dc5ccb2b5 Mon Sep 17 00:00:00 2001 From: Andrew Morgan <1342360+anoadragon453@users.noreply.github.com> Date: Fri, 5 May 2023 15:51:46 +0100 Subject: Clean up and clarify "Create or modify Account" Admin API documentation (#15544) --- changelog.d/15544.doc | 1 + docs/admin_api/user_admin_api.md | 87 +++++++++++++++++-------------- synapse/handlers/profile.py | 4 +- synapse/rest/admin/users.py | 2 +- synapse/storage/databases/main/profile.py | 16 ++++++ synapse/util/msisdn.py | 6 ++- 6 files changed, 74 insertions(+), 42 deletions(-) create mode 100644 changelog.d/15544.doc (limited to 'synapse/rest') diff --git a/changelog.d/15544.doc b/changelog.d/15544.doc new file mode 100644 index 0000000000..a6d1e96900 --- /dev/null +++ b/changelog.d/15544.doc @@ -0,0 +1 @@ +Clarify documentation of the "Create or modify account" Admin API. \ No newline at end of file diff --git a/docs/admin_api/user_admin_api.md b/docs/admin_api/user_admin_api.md index 86c29ab380..6b952ba396 100644 --- a/docs/admin_api/user_admin_api.md +++ b/docs/admin_api/user_admin_api.md @@ -62,7 +62,7 @@ URL parameters: - `user_id`: fully-qualified user id: for example, `@user:server.com`. -## Create or modify Account +## Create or modify account This API allows an administrator to create or modify a user account with a specific `user_id`. @@ -78,28 +78,29 @@ with a body of: ```json { "password": "user_password", - "displayname": "User", + "logout_devices": false, + "displayname": "Alice Marigold", + "avatar_url": "mxc://example.com/abcde12345", "threepids": [ { "medium": "email", - "address": "" + "address": "alice@example.com" }, { "medium": "email", - "address": "" + "address": "alice@domain.org" } ], "external_ids": [ { - "auth_provider": "", - "external_id": "" + "auth_provider": "example", + "external_id": "12345" }, { - "auth_provider": "", - "external_id": "" + "auth_provider": "example2", + "external_id": "abc54321" } ], - "avatar_url": "", "admin": false, "deactivated": false, "user_type": null @@ -112,41 +113,51 @@ Returns HTTP status code: URL parameters: -- `user_id`: fully-qualified user id: for example, `@user:server.com`. +- `user_id` - A fully-qualified user id. For example, `@user:server.com`. Body parameters: -- `password` - string, optional. If provided, the user's password is updated and all +- `password` - **string**, optional. If provided, the user's password is updated and all devices are logged out, unless `logout_devices` is set to `false`. -- `logout_devices` - bool, optional, defaults to `true`. If set to false, devices aren't +- `logout_devices` - **bool**, optional, defaults to `true`. If set to `false`, devices aren't logged out even when `password` is provided. -- `displayname` - string, optional, defaults to the value of `user_id`. -- `threepids` - array, optional, allows setting the third-party IDs (email, msisdn) - - `medium` - string. Kind of third-party ID, either `email` or `msisdn`. - - `address` - string. Value of third-party ID. - belonging to a user. -- `external_ids` - array, optional. Allow setting the identifier of the external identity - provider for SSO (Single sign-on). Details in the configuration manual under the - sections [sso](../usage/configuration/config_documentation.md#sso) and [oidc_providers](../usage/configuration/config_documentation.md#oidc_providers). - - `auth_provider` - string. ID of the external identity provider. Value of `idp_id` - in the homeserver configuration. Note that no error is raised if the provided - value is not in the homeserver configuration. - - `external_id` - string, user ID in the external identity provider. -- `avatar_url` - string, optional, must be a +- `displayname` - **string**, optional. If set to an empty string (`""`), the user's display name + will be removed. +- `avatar_url` - **string**, optional. Must be a [MXC URI](https://matrix.org/docs/spec/client_server/r0.6.0#matrix-content-mxc-uris). -- `admin` - bool, optional, defaults to `false`. -- `deactivated` - bool, optional. If unspecified, deactivation state will be left - unchanged on existing accounts and set to `false` for new accounts. - A user cannot be erased by deactivating with this API. For details on - deactivating users see [Deactivate Account](#deactivate-account). -- `user_type` - string or null, optional. If provided, the user type will be - adjusted. If `null` given, the user type will be cleared. Other - allowed options are: `bot` and `support`. - -If the user already exists then optional parameters default to the current value. - -In order to re-activate an account `deactivated` must be set to `false`. If -users do not login via single-sign-on, a new `password` must be provided. + If set to an empty string (`""`), the user's avatar is removed. +- `threepids` - **array**, optional. If provided, the user's third-party IDs (email, msisdn) are + entirely replaced with the given list. Each item in the array is an object with the following + fields: + - `medium` - **string**, required. The type of third-party ID, either `email` or `msisdn` (phone number). + - `address` - **string**, required. The third-party ID itself, e.g. `alice@example.com` for `email` or + `447470274584` (for a phone number with country code "44") and `19254857364` (for a phone number + with country code "1") for `msisdn`. + Note: If a threepid is removed from a user via this option, Synapse will also attempt to remove + that threepid from any identity servers it is aware has a binding for it. +- `external_ids` - **array**, optional. Allow setting the identifier of the external identity + provider for SSO (Single sign-on). More details are in the configuration manual under the + sections [sso](../usage/configuration/config_documentation.md#sso) and [oidc_providers](../usage/configuration/config_documentation.md#oidc_providers). + - `auth_provider` - **string**, required. The unique, internal ID of the external identity provider. + The same as `idp_id` from the homeserver configuration. Note that no error is raised if the + provided value is not in the homeserver configuration. + - `external_id` - **string**, required. An identifier for the user in the external identity provider. + When the user logs in to the identity provider, this must be the unique ID that they map to. +- `admin` - **bool**, optional, defaults to `false`. Whether the user is a homeserver administrator, + granting them access to the Admin API, among other things. +- `deactivated` - **bool**, optional. If unspecified, deactivation state will be left unchanged. + + Note: the `password` field must also be set if both of the following are true: + - `deactivated` is set to `false` and the user was previously deactivated (you are reactivating this user) + - Users are allowed to set their password on this homeserver (both `password_config.enabled` and + `password_config.localdb_enabled` config options are set to `true`). + Users' passwords are wiped upon account deactivation, hence the need to set a new one here. + + Note: a user cannot be erased with this API. For more details on + deactivating and erasing users see [Deactivate Account](#deactivate-account). +- `user_type` - **string** or null, optional. If not provided, the user type will be + not be changed. If `null` is given, the user type will be cleared. + Other allowed options are: `bot` and `support`. ## List Accounts diff --git a/synapse/handlers/profile.py b/synapse/handlers/profile.py index 48f9858931..a9160c87e3 100644 --- a/synapse/handlers/profile.py +++ b/synapse/handlers/profile.py @@ -170,8 +170,8 @@ class ProfileHandler: displayname_to_set = None # If the admin changes the display name of a user, the requesting user cannot send - # the join event to update the displayname in the rooms. - # This must be done by the target user himself. + # the join event to update the display name in the rooms. + # This must be done by the target user themselves. if by_admin: requester = create_requester( target_user, diff --git a/synapse/rest/admin/users.py b/synapse/rest/admin/users.py index 331f225116..932333ae57 100644 --- a/synapse/rest/admin/users.py +++ b/synapse/rest/admin/users.py @@ -336,7 +336,7 @@ class UserRestServletV2(RestServlet): HTTPStatus.CONFLICT, "External id is already in use." ) - if "avatar_url" in body and isinstance(body["avatar_url"], str): + if "avatar_url" in body: await self.profile_handler.set_avatar_url( target_user, requester, body["avatar_url"], True ) diff --git a/synapse/storage/databases/main/profile.py b/synapse/storage/databases/main/profile.py index b109f8c07f..c4022d2427 100644 --- a/synapse/storage/databases/main/profile.py +++ b/synapse/storage/databases/main/profile.py @@ -85,6 +85,14 @@ class ProfileWorkerStore(SQLBaseStore): async def set_profile_displayname( self, user_id: UserID, new_displayname: Optional[str] ) -> None: + """ + Set the display name of a user. + + Args: + user_id: The user's ID. + new_displayname: The new display name. If this is None, the user's display + name is removed. + """ user_localpart = user_id.localpart await self.db_pool.simple_upsert( table="profiles", @@ -99,6 +107,14 @@ class ProfileWorkerStore(SQLBaseStore): async def set_profile_avatar_url( self, user_id: UserID, new_avatar_url: Optional[str] ) -> None: + """ + Set the avatar of a user. + + Args: + user_id: The user's ID. + new_avatar_url: The new avatar URL. If this is None, the user's avatar is + removed. + """ user_localpart = user_id.localpart await self.db_pool.simple_upsert( table="profiles", diff --git a/synapse/util/msisdn.py b/synapse/util/msisdn.py index 1046224f15..3721a1558e 100644 --- a/synapse/util/msisdn.py +++ b/synapse/util/msisdn.py @@ -22,12 +22,16 @@ def phone_number_to_msisdn(country: str, number: str) -> str: Takes an ISO-3166-1 2 letter country code and phone number and returns an msisdn representing the canonical version of that phone number. + + As an example, if `country` is "GB" and `number` is "7470674927", this + function will return "447470674927". + Args: country: ISO-3166-1 2 letter country code number: Phone number in a national or international format Returns: - The canonical form of the phone number, as an msisdn + The canonical form of the phone number, as an msisdn. Raises: SynapseError if the number could not be parsed. """ -- cgit 1.5.1 From 2bfe3f0b8193b62a92975b1f89f6b2e0eb643091 Mon Sep 17 00:00:00 2001 From: Patrick Cloke Date: Tue, 9 May 2023 07:23:27 -0400 Subject: Use account data constants in more places. (#15554) --- changelog.d/15554.misc | 1 + synapse/api/constants.py | 1 + synapse/handlers/read_marker.py | 5 +++-- synapse/handlers/sync.py | 12 ++++++------ synapse/rest/client/account_data.py | 3 ++- 5 files changed, 13 insertions(+), 9 deletions(-) create mode 100644 changelog.d/15554.misc (limited to 'synapse/rest') diff --git a/changelog.d/15554.misc b/changelog.d/15554.misc new file mode 100644 index 0000000000..002e3f5315 --- /dev/null +++ b/changelog.d/15554.misc @@ -0,0 +1 @@ +Use account data constants in more places. diff --git a/synapse/api/constants.py b/synapse/api/constants.py index c56b2f2561..cde9a2ecef 100644 --- a/synapse/api/constants.py +++ b/synapse/api/constants.py @@ -257,6 +257,7 @@ class AccountDataTypes: DIRECT: Final = "m.direct" IGNORED_USER_LIST: Final = "m.ignored_user_list" TAG: Final = "m.tag" + PUSH_RULES: Final = "m.push_rules" class HistoryVisibility: diff --git a/synapse/handlers/read_marker.py b/synapse/handlers/read_marker.py index 05122fd5a6..6d35e61880 100644 --- a/synapse/handlers/read_marker.py +++ b/synapse/handlers/read_marker.py @@ -15,6 +15,7 @@ import logging from typing import TYPE_CHECKING +from synapse.api.constants import ReceiptTypes from synapse.util.async_helpers import Linearizer if TYPE_CHECKING: @@ -42,7 +43,7 @@ class ReadMarkerHandler: async with self.read_marker_linearizer.queue((room_id, user_id)): existing_read_marker = await self.store.get_account_data_for_room_and_type( - user_id, room_id, "m.fully_read" + user_id, room_id, ReceiptTypes.FULLY_READ ) should_update = True @@ -56,5 +57,5 @@ class ReadMarkerHandler: if should_update: content = {"event_id": event_id} await self.account_data_handler.add_account_data_to_room( - user_id, room_id, "m.fully_read", content + user_id, room_id, ReceiptTypes.FULLY_READ, content ) diff --git a/synapse/handlers/sync.py b/synapse/handlers/sync.py index 64d298408d..cc05b0afa0 100644 --- a/synapse/handlers/sync.py +++ b/synapse/handlers/sync.py @@ -1777,18 +1777,18 @@ class SyncHandler: if push_rules_changed: global_account_data = dict(global_account_data) - global_account_data["m.push_rules"] = await self.push_rules_for_user( - sync_config.user - ) + global_account_data[ + AccountDataTypes.PUSH_RULES + ] = await self.push_rules_for_user(sync_config.user) else: all_global_account_data = await self.store.get_global_account_data_for_user( user_id ) global_account_data = dict(all_global_account_data) - global_account_data["m.push_rules"] = await self.push_rules_for_user( - sync_config.user - ) + global_account_data[ + AccountDataTypes.PUSH_RULES + ] = await self.push_rules_for_user(sync_config.user) account_data_for_user = ( await sync_config.filter_collection.filter_global_account_data( diff --git a/synapse/rest/client/account_data.py b/synapse/rest/client/account_data.py index 43193ad086..8eebb21c76 100644 --- a/synapse/rest/client/account_data.py +++ b/synapse/rest/client/account_data.py @@ -15,6 +15,7 @@ import logging from typing import TYPE_CHECKING, Tuple +from synapse.api.constants import ReceiptTypes from synapse.api.errors import AuthError, Codes, NotFoundError, SynapseError from synapse.http.server import HttpServer from synapse.http.servlet import RestServlet, parse_json_object_from_request @@ -166,7 +167,7 @@ class RoomAccountDataServlet(RestServlet): body = parse_json_object_from_request(request) - if account_data_type == "m.fully_read": + if account_data_type == ReceiptTypes.FULLY_READ: raise SynapseError( 405, "Cannot set m.fully_read through this API." -- cgit 1.5.1 From 4b4e0dc3cecbe9ad65c4728c1ec461321d15789f Mon Sep 17 00:00:00 2001 From: Patrick Cloke Date: Tue, 9 May 2023 10:34:10 -0400 Subject: Error if attempting to set m.push_rules account data, per MSC4010. (#15555) m.push_rules, like m.fully_read, is a special account data type that cannot be set using the normal /account_data endpoint. Return an error instead of allowing data that will not be used to be stored. --- changelog.d/15554.bugfix | 1 + changelog.d/15554.misc | 1 - changelog.d/15555.bugfix | 1 + synapse/config/experimental.py | 5 +++ synapse/handlers/push_rules.py | 16 ++++++- synapse/handlers/sync.py | 12 ++---- synapse/push/clientformat.py | 2 +- synapse/rest/client/account_data.py | 85 +++++++++++++++++++++++++++++-------- synapse/rest/client/push_rule.py | 7 +-- 9 files changed, 95 insertions(+), 35 deletions(-) create mode 100644 changelog.d/15554.bugfix delete mode 100644 changelog.d/15554.misc create mode 100644 changelog.d/15555.bugfix (limited to 'synapse/rest') diff --git a/changelog.d/15554.bugfix b/changelog.d/15554.bugfix new file mode 100644 index 0000000000..0fd9de8c65 --- /dev/null +++ b/changelog.d/15554.bugfix @@ -0,0 +1 @@ +Experimental support for [MSC4010](https://github.com/matrix-org/matrix-spec-proposals/pull/4010) which rejects setting the `"m.push_rules"` via account data. diff --git a/changelog.d/15554.misc b/changelog.d/15554.misc deleted file mode 100644 index 002e3f5315..0000000000 --- a/changelog.d/15554.misc +++ /dev/null @@ -1 +0,0 @@ -Use account data constants in more places. diff --git a/changelog.d/15555.bugfix b/changelog.d/15555.bugfix new file mode 100644 index 0000000000..0fd9de8c65 --- /dev/null +++ b/changelog.d/15555.bugfix @@ -0,0 +1 @@ +Experimental support for [MSC4010](https://github.com/matrix-org/matrix-spec-proposals/pull/4010) which rejects setting the `"m.push_rules"` via account data. diff --git a/synapse/config/experimental.py b/synapse/config/experimental.py index 514d87cb2c..7af6dbcd09 100644 --- a/synapse/config/experimental.py +++ b/synapse/config/experimental.py @@ -202,3 +202,8 @@ class ExperimentalConfig(Config): # MSC4009: E.164 Matrix IDs self.msc4009_e164_mxids = experimental.get("msc4009_e164_mxids", False) + + # MSC4010: Do not allow setting m.push_rules account data. + self.msc4010_push_rules_account_data = experimental.get( + "msc4010_push_rules_account_data", False + ) diff --git a/synapse/handlers/push_rules.py b/synapse/handlers/push_rules.py index 813f3aa2d5..7ed88a3611 100644 --- a/synapse/handlers/push_rules.py +++ b/synapse/handlers/push_rules.py @@ -11,14 +11,15 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. -from typing import TYPE_CHECKING, List, Optional, Union +from typing import TYPE_CHECKING, Any, Dict, List, Optional, Union import attr from synapse.api.errors import SynapseError, UnrecognizedRequestError +from synapse.push.clientformat import format_push_rules_for_user from synapse.storage.push_rule import RuleNotFoundException from synapse.synapse_rust.push import get_base_rule_ids -from synapse.types import JsonDict +from synapse.types import JsonDict, UserID if TYPE_CHECKING: from synapse.server import HomeServer @@ -115,6 +116,17 @@ class PushRulesHandler: stream_id = self._main_store.get_max_push_rules_stream_id() self._notifier.on_new_event("push_rules_key", stream_id, users=[user_id]) + async def push_rules_for_user( + self, user: UserID + ) -> Dict[str, Dict[str, List[Dict[str, Any]]]]: + """ + Push rules aren't really account data, but get formatted as such for /sync. + """ + user_id = user.to_string() + rules_raw = await self._main_store.get_push_rules_for_user(user_id) + rules = format_push_rules_for_user(user, rules_raw) + return rules + def check_actions(actions: List[Union[str, JsonDict]]) -> None: """Check if the given actions are spec compliant. diff --git a/synapse/handlers/sync.py b/synapse/handlers/sync.py index cc05b0afa0..c010405be6 100644 --- a/synapse/handlers/sync.py +++ b/synapse/handlers/sync.py @@ -50,7 +50,6 @@ from synapse.logging.opentracing import ( start_active_span, trace, ) -from synapse.push.clientformat import format_push_rules_for_user from synapse.storage.databases.main.event_push_actions import RoomNotifCounts from synapse.storage.databases.main.roommember import extract_heroes_from_room_summary from synapse.storage.roommember import MemberSummary @@ -261,6 +260,7 @@ class SyncHandler: self.notifier = hs.get_notifier() self.presence_handler = hs.get_presence_handler() self._relations_handler = hs.get_relations_handler() + self._push_rules_handler = hs.get_push_rules_handler() self.event_sources = hs.get_event_sources() self.clock = hs.get_clock() self.state = hs.get_state_handler() @@ -428,12 +428,6 @@ class SyncHandler: set_tag(SynapseTags.SYNC_RESULT, bool(sync_result)) return sync_result - async def push_rules_for_user(self, user: UserID) -> Dict[str, Dict[str, list]]: - user_id = user.to_string() - rules_raw = await self.store.get_push_rules_for_user(user_id) - rules = format_push_rules_for_user(user, rules_raw) - return rules - async def ephemeral_by_room( self, sync_result_builder: "SyncResultBuilder", @@ -1779,7 +1773,7 @@ class SyncHandler: global_account_data = dict(global_account_data) global_account_data[ AccountDataTypes.PUSH_RULES - ] = await self.push_rules_for_user(sync_config.user) + ] = await self._push_rules_handler.push_rules_for_user(sync_config.user) else: all_global_account_data = await self.store.get_global_account_data_for_user( user_id @@ -1788,7 +1782,7 @@ class SyncHandler: global_account_data = dict(all_global_account_data) global_account_data[ AccountDataTypes.PUSH_RULES - ] = await self.push_rules_for_user(sync_config.user) + ] = await self._push_rules_handler.push_rules_for_user(sync_config.user) account_data_for_user = ( await sync_config.filter_collection.filter_global_account_data( diff --git a/synapse/push/clientformat.py b/synapse/push/clientformat.py index 222afbdcc8..88b52c26a0 100644 --- a/synapse/push/clientformat.py +++ b/synapse/push/clientformat.py @@ -22,7 +22,7 @@ from synapse.types import UserID def format_push_rules_for_user( user: UserID, ruleslist: FilteredPushRules -) -> Dict[str, Dict[str, list]]: +) -> Dict[str, Dict[str, List[Dict[str, Any]]]]: """Converts a list of rawrules and a enabled map into nested dictionaries to match the Matrix client-server format for push rules""" diff --git a/synapse/rest/client/account_data.py b/synapse/rest/client/account_data.py index 8eebb21c76..b1f9e9dc9b 100644 --- a/synapse/rest/client/account_data.py +++ b/synapse/rest/client/account_data.py @@ -13,9 +13,9 @@ # limitations under the License. import logging -from typing import TYPE_CHECKING, Tuple +from typing import TYPE_CHECKING, Optional, Tuple -from synapse.api.constants import ReceiptTypes +from synapse.api.constants import AccountDataTypes, ReceiptTypes from synapse.api.errors import AuthError, Codes, NotFoundError, SynapseError from synapse.http.server import HttpServer from synapse.http.servlet import RestServlet, parse_json_object_from_request @@ -30,6 +30,23 @@ if TYPE_CHECKING: logger = logging.getLogger(__name__) +def _check_can_set_account_data_type(account_data_type: str) -> None: + """The fully read marker and push rules cannot be directly set via /account_data.""" + if account_data_type == ReceiptTypes.FULLY_READ: + raise SynapseError( + 405, + "Cannot set m.fully_read through this API." + " Use /rooms/!roomId:server.name/read_markers", + Codes.BAD_JSON, + ) + elif account_data_type == AccountDataTypes.PUSH_RULES: + raise SynapseError( + 405, + "Cannot set m.push_rules through this API. Use /pushrules", + Codes.BAD_JSON, + ) + + class AccountDataServlet(RestServlet): """ PUT /user/{user_id}/account_data/{account_dataType} HTTP/1.1 @@ -47,6 +64,7 @@ class AccountDataServlet(RestServlet): self.auth = hs.get_auth() self.store = hs.get_datastores().main self.handler = hs.get_account_data_handler() + self._push_rules_handler = hs.get_push_rules_handler() async def on_PUT( self, request: SynapseRequest, user_id: str, account_data_type: str @@ -55,6 +73,10 @@ class AccountDataServlet(RestServlet): if user_id != requester.user.to_string(): raise AuthError(403, "Cannot add account data for other users.") + # Raise an error if the account data type cannot be set directly. + if self._hs.config.experimental.msc4010_push_rules_account_data: + _check_can_set_account_data_type(account_data_type) + body = parse_json_object_from_request(request) # If experimental support for MSC3391 is enabled, then providing an empty dict @@ -78,19 +100,28 @@ class AccountDataServlet(RestServlet): if user_id != requester.user.to_string(): raise AuthError(403, "Cannot get account data for other users.") - event = await self.store.get_global_account_data_by_type_for_user( - user_id, account_data_type - ) + # Push rules are stored in a separate table and must be queried separately. + if ( + self._hs.config.experimental.msc4010_push_rules_account_data + and account_data_type == AccountDataTypes.PUSH_RULES + ): + account_data: Optional[ + JsonDict + ] = await self._push_rules_handler.push_rules_for_user(requester.user) + else: + account_data = await self.store.get_global_account_data_by_type_for_user( + user_id, account_data_type + ) - if event is None: + if account_data is None: raise NotFoundError("Account data not found") # If experimental support for MSC3391 is enabled, then this endpoint should # return a 404 if the content for an account data type is an empty dict. - if self._hs.config.experimental.msc3391_enabled and event == {}: + if self._hs.config.experimental.msc3391_enabled and account_data == {}: raise NotFoundError("Account data not found") - return 200, event + return 200, account_data class UnstableAccountDataServlet(RestServlet): @@ -109,6 +140,7 @@ class UnstableAccountDataServlet(RestServlet): def __init__(self, hs: "HomeServer"): super().__init__() + self._hs = hs self.auth = hs.get_auth() self.handler = hs.get_account_data_handler() @@ -122,6 +154,10 @@ class UnstableAccountDataServlet(RestServlet): if user_id != requester.user.to_string(): raise AuthError(403, "Cannot delete account data for other users.") + # Raise an error if the account data type cannot be set directly. + if self._hs.config.experimental.msc4010_push_rules_account_data: + _check_can_set_account_data_type(account_data_type) + await self.handler.remove_account_data_for_user(user_id, account_data_type) return 200, {} @@ -165,9 +201,10 @@ class RoomAccountDataServlet(RestServlet): Codes.INVALID_PARAM, ) - body = parse_json_object_from_request(request) - - if account_data_type == ReceiptTypes.FULLY_READ: + # Raise an error if the account data type cannot be set directly. + if self._hs.config.experimental.msc4010_push_rules_account_data: + _check_can_set_account_data_type(account_data_type) + elif account_data_type == ReceiptTypes.FULLY_READ: raise SynapseError( 405, "Cannot set m.fully_read through this API." @@ -175,6 +212,8 @@ class RoomAccountDataServlet(RestServlet): Codes.BAD_JSON, ) + body = parse_json_object_from_request(request) + # If experimental support for MSC3391 is enabled, then providing an empty dict # as the value for an account data type should be functionally equivalent to # calling the DELETE method on the same type. @@ -209,19 +248,26 @@ class RoomAccountDataServlet(RestServlet): Codes.INVALID_PARAM, ) - event = await self.store.get_account_data_for_room_and_type( - user_id, room_id, account_data_type - ) + # Room-specific push rules are not currently supported. + if ( + self._hs.config.experimental.msc4010_push_rules_account_data + and account_data_type == AccountDataTypes.PUSH_RULES + ): + account_data: Optional[JsonDict] = {} + else: + account_data = await self.store.get_account_data_for_room_and_type( + user_id, room_id, account_data_type + ) - if event is None: + if account_data is None: raise NotFoundError("Room account data not found") # If experimental support for MSC3391 is enabled, then this endpoint should # return a 404 if the content for an account data type is an empty dict. - if self._hs.config.experimental.msc3391_enabled and event == {}: + if self._hs.config.experimental.msc3391_enabled and account_data == {}: raise NotFoundError("Room account data not found") - return 200, event + return 200, account_data class UnstableRoomAccountDataServlet(RestServlet): @@ -241,6 +287,7 @@ class UnstableRoomAccountDataServlet(RestServlet): def __init__(self, hs: "HomeServer"): super().__init__() + self._hs = hs self.auth = hs.get_auth() self.handler = hs.get_account_data_handler() @@ -262,6 +309,10 @@ class UnstableRoomAccountDataServlet(RestServlet): Codes.INVALID_PARAM, ) + # Raise an error if the account data type cannot be set directly. + if self._hs.config.experimental.msc4010_push_rules_account_data: + _check_can_set_account_data_type(account_data_type) + await self.handler.remove_account_data_for_room( user_id, room_id, account_data_type ) diff --git a/synapse/rest/client/push_rule.py b/synapse/rest/client/push_rule.py index 1147b6f8ec..5c9fece3ba 100644 --- a/synapse/rest/client/push_rule.py +++ b/synapse/rest/client/push_rule.py @@ -28,7 +28,6 @@ from synapse.http.servlet import ( parse_string, ) from synapse.http.site import SynapseRequest -from synapse.push.clientformat import format_push_rules_for_user from synapse.push.rulekinds import PRIORITY_CLASS_MAP from synapse.rest.client._base import client_patterns from synapse.storage.push_rule import InconsistentRuleException, RuleNotFoundException @@ -146,14 +145,12 @@ class PushRuleRestServlet(RestServlet): async def on_GET(self, request: SynapseRequest, path: str) -> Tuple[int, JsonDict]: requester = await self.auth.get_user_by_req(request) - user_id = requester.user.to_string() + requester.user.to_string() # we build up the full structure and then decide which bits of it # to send which means doing unnecessary work sometimes but is # is probably not going to make a whole lot of difference - rules_raw = await self.store.get_push_rules_for_user(user_id) - - rules = format_push_rules_for_user(requester.user, rules_raw) + rules = await self._push_rules_handler.push_rules_for_user(requester.user) path_parts = path.split("/")[1:] -- cgit 1.5.1 From ab4535b6082db97e8c48a69ea6674fe3b7c5e956 Mon Sep 17 00:00:00 2001 From: Travis Ralston Date: Tue, 9 May 2023 12:08:51 -0600 Subject: Add config option to prevent media downloads from listed domains. (#15197) This stops media (and thumbnails) from being accessed from the listed domains. It does not delete any already locally cached media, but will prevent accessing it. Note that admin APIs are unaffected by this change. --- changelog.d/15197.feature | 1 + docs/usage/configuration/config_documentation.md | 24 ++++ synapse/config/repository.py | 4 + synapse/media/media_repository.py | 9 ++ synapse/rest/media/thumbnail_resource.py | 9 ++ tests/rest/media/test_domain_blocking.py | 139 +++++++++++++++++++++++ 6 files changed, 186 insertions(+) create mode 100644 changelog.d/15197.feature create mode 100644 tests/rest/media/test_domain_blocking.py (limited to 'synapse/rest') diff --git a/changelog.d/15197.feature b/changelog.d/15197.feature new file mode 100644 index 0000000000..c8a6f114e8 --- /dev/null +++ b/changelog.d/15197.feature @@ -0,0 +1 @@ +Add an option to prevent media downloads from configured domains. \ No newline at end of file diff --git a/docs/usage/configuration/config_documentation.md b/docs/usage/configuration/config_documentation.md index 14c21f73fe..6dd1a639ed 100644 --- a/docs/usage/configuration/config_documentation.md +++ b/docs/usage/configuration/config_documentation.md @@ -1768,6 +1768,30 @@ Example configuration: max_image_pixels: 35M ``` --- +### `prevent_media_downloads_from` + +A list of domains to never download media from. Media from these +domains that is already downloaded will not be deleted, but will be +inaccessible to users. This option does not affect admin APIs trying +to download/operate on media. + +This will not prevent the listed domains from accessing media themselves. +It simply prevents users on this server from downloading media originating +from the listed servers. + +This will have no effect on media originating from the local server. +This only affects media downloaded from other Matrix servers, to +block domains from URL previews see [`url_preview_url_blacklist`](#url_preview_url_blacklist). + +Defaults to an empty list (nothing blocked). + +Example configuration: +```yaml +prevent_media_downloads_from: + - evil.example.org + - evil2.example.org +``` +--- ### `dynamic_thumbnails` Whether to generate new thumbnails on the fly to precisely match diff --git a/synapse/config/repository.py b/synapse/config/repository.py index ecb3edbe3a..655f06505b 100644 --- a/synapse/config/repository.py +++ b/synapse/config/repository.py @@ -137,6 +137,10 @@ class ContentRepositoryConfig(Config): self.max_image_pixels = self.parse_size(config.get("max_image_pixels", "32M")) self.max_spider_size = self.parse_size(config.get("max_spider_size", "10M")) + self.prevent_media_downloads_from = config.get( + "prevent_media_downloads_from", [] + ) + self.media_store_path = self.ensure_directory( config.get("media_store_path", "media_store") ) diff --git a/synapse/media/media_repository.py b/synapse/media/media_repository.py index b81e3c2b0c..e81c987b10 100644 --- a/synapse/media/media_repository.py +++ b/synapse/media/media_repository.py @@ -93,6 +93,7 @@ class MediaRepository: self.federation_domain_whitelist = ( hs.config.federation.federation_domain_whitelist ) + self.prevent_media_downloads_from = hs.config.media.prevent_media_downloads_from # List of StorageProviders where we should search for media and # potentially upload to. @@ -276,6 +277,14 @@ class MediaRepository: ): raise FederationDeniedError(server_name) + # Don't let users download media from domains listed in the config, even + # if we might have the media to serve. This is Trust & Safety tooling to + # block some servers' media from being accessible to local users. + # See `prevent_media_downloads_from` config docs for more info. + if server_name in self.prevent_media_downloads_from: + respond_404(request) + return + self.mark_recently_accessed(server_name, media_id) # We linearize here to ensure that we don't try and download remote diff --git a/synapse/rest/media/thumbnail_resource.py b/synapse/rest/media/thumbnail_resource.py index a6396fb05a..661e604b85 100644 --- a/synapse/rest/media/thumbnail_resource.py +++ b/synapse/rest/media/thumbnail_resource.py @@ -60,6 +60,7 @@ class ThumbnailResource(DirectServeJsonResource): self.media_storage = media_storage self.dynamic_thumbnails = hs.config.media.dynamic_thumbnails self._is_mine_server_name = hs.is_mine_server_name + self.prevent_media_downloads_from = hs.config.media.prevent_media_downloads_from async def _async_render_GET(self, request: SynapseRequest) -> None: set_cors_headers(request) @@ -82,6 +83,14 @@ class ThumbnailResource(DirectServeJsonResource): ) self.media_repo.mark_recently_accessed(None, media_id) else: + # Don't let users download media from configured domains, even if it + # is already downloaded. This is Trust & Safety tooling to make some + # media inaccessible to local users. + # See `prevent_media_downloads_from` config docs for more info. + if server_name in self.prevent_media_downloads_from: + respond_404(request) + return + if self.dynamic_thumbnails: await self._select_or_generate_remote_thumbnail( request, server_name, media_id, width, height, method, m_type diff --git a/tests/rest/media/test_domain_blocking.py b/tests/rest/media/test_domain_blocking.py new file mode 100644 index 0000000000..9beeeab843 --- /dev/null +++ b/tests/rest/media/test_domain_blocking.py @@ -0,0 +1,139 @@ +# Copyright 2023 The Matrix.org Foundation C.I.C. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +from typing import Dict + +from twisted.test.proto_helpers import MemoryReactor +from twisted.web.resource import Resource + +from synapse.media._base import FileInfo +from synapse.server import HomeServer +from synapse.util import Clock + +from tests import unittest +from tests.test_utils import SMALL_PNG +from tests.unittest import override_config + + +class MediaDomainBlockingTests(unittest.HomeserverTestCase): + remote_media_id = "doesnotmatter" + remote_server_name = "evil.com" + + def prepare(self, reactor: MemoryReactor, clock: Clock, hs: HomeServer) -> None: + self.store = hs.get_datastores().main + + # Inject a piece of media. We'll use this to ensure we're returning a sane + # response when we're not supposed to block it, distinguishing a media block + # from a regular 404. + file_id = "abcdefg12345" + file_info = FileInfo(server_name=self.remote_server_name, file_id=file_id) + with hs.get_media_repository().media_storage.store_into_file(file_info) as ( + f, + fname, + finish, + ): + f.write(SMALL_PNG) + self.get_success(finish()) + + self.get_success( + self.store.store_cached_remote_media( + origin=self.remote_server_name, + media_id=self.remote_media_id, + media_type="image/png", + media_length=1, + time_now_ms=clock.time_msec(), + upload_name="test.png", + filesystem_id=file_id, + ) + ) + + def create_resource_dict(self) -> Dict[str, Resource]: + # We need to manually set the resource tree to include media, the + # default only does `/_matrix/client` APIs. + return {"/_matrix/media": self.hs.get_media_repository_resource()} + + @override_config( + { + # Disable downloads from the domain we'll be trying to download from. + # Should result in a 404. + "prevent_media_downloads_from": ["evil.com"] + } + ) + def test_cannot_download_blocked_media(self) -> None: + """ + Tests to ensure that remote media which is blocked cannot be downloaded. + """ + response = self.make_request( + "GET", + f"/_matrix/media/v3/download/evil.com/{self.remote_media_id}", + shorthand=False, + ) + self.assertEqual(response.code, 404) + + @override_config( + { + # Disable downloads from a domain we won't be requesting downloads from. + # This proves we haven't broken anything. + "prevent_media_downloads_from": ["not-listed.com"] + } + ) + def test_remote_media_normally_unblocked(self) -> None: + """ + Tests to ensure that remote media is normally able to be downloaded + when no domain block is in place. + """ + response = self.make_request( + "GET", + f"/_matrix/media/v3/download/evil.com/{self.remote_media_id}", + shorthand=False, + ) + self.assertEqual(response.code, 200) + + @override_config( + { + # Disable downloads from the domain we'll be trying to download from. + # Should result in a 404. + "prevent_media_downloads_from": ["evil.com"], + "dynamic_thumbnails": True, + } + ) + def test_cannot_download_blocked_media_thumbnail(self) -> None: + """ + Same test as test_cannot_download_blocked_media but for thumbnails. + """ + response = self.make_request( + "GET", + f"/_matrix/media/v3/thumbnail/evil.com/{self.remote_media_id}?width=100&height=100", + shorthand=False, + content={"width": 100, "height": 100}, + ) + self.assertEqual(response.code, 404) + + @override_config( + { + # Disable downloads from a domain we won't be requesting downloads from. + # This proves we haven't broken anything. + "prevent_media_downloads_from": ["not-listed.com"], + "dynamic_thumbnails": True, + } + ) + def test_remote_media_thumbnail_normally_unblocked(self) -> None: + """ + Same test as test_remote_media_normally_unblocked but for thumbnails. + """ + response = self.make_request( + "GET", + f"/_matrix/media/v3/thumbnail/evil.com/{self.remote_media_id}?width=100&height=100", + shorthand=False, + ) + self.assertEqual(response.code, 200) -- cgit 1.5.1 From 86d541f37c1bc9197a6f561b31f3aa359740b4bd Mon Sep 17 00:00:00 2001 From: Tulir Asokan Date: Tue, 9 May 2023 22:02:36 +0300 Subject: Stabilize MSC2659 support for AS ping endpoint. (#15528) --- changelog.d/15528.feature | 1 + synapse/api/errors.py | 8 ++++---- synapse/appservice/api.py | 2 +- synapse/config/experimental.py | 3 --- synapse/rest/client/appservice_ping.py | 10 ++++------ synapse/rest/client/versions.py | 2 +- 6 files changed, 11 insertions(+), 15 deletions(-) create mode 100644 changelog.d/15528.feature (limited to 'synapse/rest') diff --git a/changelog.d/15528.feature b/changelog.d/15528.feature new file mode 100644 index 0000000000..aae9fa1ecf --- /dev/null +++ b/changelog.d/15528.feature @@ -0,0 +1 @@ +Stabilize support for [MSC2659](https://github.com/matrix-org/matrix-spec-proposals/pull/2659): application service ping endpoint. Contributed by Tulir @ Beeper. diff --git a/synapse/api/errors.py b/synapse/api/errors.py index f2d6f9ab2d..8c7c94b045 100644 --- a/synapse/api/errors.py +++ b/synapse/api/errors.py @@ -108,10 +108,10 @@ class Codes(str, Enum): USER_AWAITING_APPROVAL = "ORG.MATRIX.MSC3866_USER_AWAITING_APPROVAL" - AS_PING_URL_NOT_SET = "FI.MAU.MSC2659_URL_NOT_SET" - AS_PING_BAD_STATUS = "FI.MAU.MSC2659_BAD_STATUS" - AS_PING_CONNECTION_TIMEOUT = "FI.MAU.MSC2659_CONNECTION_TIMEOUT" - AS_PING_CONNECTION_FAILED = "FI.MAU.MSC2659_CONNECTION_FAILED" + AS_PING_URL_NOT_SET = "M_URL_NOT_SET" + AS_PING_BAD_STATUS = "M_BAD_STATUS" + AS_PING_CONNECTION_TIMEOUT = "M_CONNECTION_TIMEOUT" + AS_PING_CONNECTION_FAILED = "M_CONNECTION_FAILED" # Attempt to send a second annotation with the same event type & annotation key # MSC2677 diff --git a/synapse/appservice/api.py b/synapse/appservice/api.py index 024098e9cb..5fb3d5083d 100644 --- a/synapse/appservice/api.py +++ b/synapse/appservice/api.py @@ -326,7 +326,7 @@ class ApplicationServiceApi(SimpleHttpClient): assert service.hs_token is not None await self.post_json_get_json( - uri=f"{service.url}{APP_SERVICE_UNSTABLE_PREFIX}/fi.mau.msc2659/ping", + uri=f"{service.url}{APP_SERVICE_PREFIX}/ping", post_json={"transaction_id": txn_id}, headers={"Authorization": [f"Bearer {service.hs_token}"]}, ) diff --git a/synapse/config/experimental.py b/synapse/config/experimental.py index 7af6dbcd09..6e453bd963 100644 --- a/synapse/config/experimental.py +++ b/synapse/config/experimental.py @@ -189,9 +189,6 @@ class ExperimentalConfig(Config): # MSC3967: Do not require UIA when first uploading cross signing keys self.msc3967_enabled = experimental.get("msc3967_enabled", False) - # MSC2659: Application service ping endpoint - self.msc2659_enabled = experimental.get("msc2659_enabled", False) - # MSC3981: Recurse relations self.msc3981_recurse_relations = experimental.get( "msc3981_recurse_relations", False diff --git a/synapse/rest/client/appservice_ping.py b/synapse/rest/client/appservice_ping.py index 31466a4ad4..3f553d14d1 100644 --- a/synapse/rest/client/appservice_ping.py +++ b/synapse/rest/client/appservice_ping.py @@ -39,9 +39,8 @@ logger = logging.getLogger(__name__) class AppservicePingRestServlet(RestServlet): PATTERNS = client_patterns( - "/fi.mau.msc2659/appservice/(?P[^/]*)/ping", - unstable=True, - releases=(), + "/appservice/(?P[^/]*)/ping", + releases=("v1",), ) def __init__(self, hs: "HomeServer"): @@ -107,9 +106,8 @@ class AppservicePingRestServlet(RestServlet): duration = time.monotonic() - start - return HTTPStatus.OK, {"duration": int(duration * 1000)} + return HTTPStatus.OK, {"duration_ms": int(duration * 1000)} def register_servlets(hs: "HomeServer", http_server: HttpServer) -> None: - if hs.config.experimental.msc2659_enabled: - AppservicePingRestServlet(hs).register(http_server) + AppservicePingRestServlet(hs).register(http_server) diff --git a/synapse/rest/client/versions.py b/synapse/rest/client/versions.py index 59aed66464..5c98916ec2 100644 --- a/synapse/rest/client/versions.py +++ b/synapse/rest/client/versions.py @@ -111,7 +111,7 @@ class VersionsRestServlet(RestServlet): # Allows moderators to fetch redacted event content as described in MSC2815 "fi.mau.msc2815": self.config.experimental.msc2815_enabled, # Adds a ping endpoint for appservices to check HS->AS connection - "fi.mau.msc2659": self.config.experimental.msc2659_enabled, + "fi.mau.msc2659.stable": True, # TODO: remove when "v1.7" is added above # Adds support for login token requests as per MSC3882 "org.matrix.msc3882": self.config.experimental.msc3882_enabled, # Adds support for remotely enabling/disabling pushers, as per MSC3881 -- cgit 1.5.1 From 722ccc30b5b66592099c39c3622e48fcf552d2e2 Mon Sep 17 00:00:00 2001 From: Andrew Morgan <1342360+anoadragon453@users.noreply.github.com> Date: Thu, 11 May 2023 10:38:32 +0100 Subject: Add an unstable feature flag for MSC3391 to the /versions endpoint (#15562) --- changelog.d/15562.misc | 1 + synapse/rest/client/versions.py | 2 ++ 2 files changed, 3 insertions(+) create mode 100644 changelog.d/15562.misc (limited to 'synapse/rest') diff --git a/changelog.d/15562.misc b/changelog.d/15562.misc new file mode 100644 index 0000000000..eeeb553d8f --- /dev/null +++ b/changelog.d/15562.misc @@ -0,0 +1 @@ +Declare unstable support for [MSC3391](https://github.com/matrix-org/matrix-spec-proposals/pull/3391) under `/_matrix/client/versions` if the experimental implementation is enabled. \ No newline at end of file diff --git a/synapse/rest/client/versions.py b/synapse/rest/client/versions.py index 5c98916ec2..2d2be6ef38 100644 --- a/synapse/rest/client/versions.py +++ b/synapse/rest/client/versions.py @@ -125,6 +125,8 @@ class VersionsRestServlet(RestServlet): "org.matrix.msc3912": self.config.experimental.msc3912_enabled, # Adds support for unstable "intentional mentions" behaviour. "org.matrix.msc3952_intentional_mentions": self.config.experimental.msc3952_intentional_mentions, + # Adds support for deleting account data. + "org.matrix.msc3391": self.config.experimental.msc3391_enabled, }, }, ) -- cgit 1.5.1 From def480442d752f1951cf7f790be873489a09c432 Mon Sep 17 00:00:00 2001 From: Patrick Cloke Date: Fri, 12 May 2023 07:31:50 -0400 Subject: Declare support for Matrix 1.6 (#15559) Adds logging for key server requests which include a key ID. This is technically in violation of the 1.6 spec, but is the only way to remain backwards compatibly with earlier versions of Synapse (and possibly other homeservers) which *did* include the key ID. --- changelog.d/15559.feature | 1 + synapse/rest/client/versions.py | 1 + synapse/rest/key/v2/local_key_resource.py | 11 +++++++++++ synapse/rest/key/v2/remote_key_resource.py | 11 ++++++++++- 4 files changed, 23 insertions(+), 1 deletion(-) create mode 100644 changelog.d/15559.feature (limited to 'synapse/rest') diff --git a/changelog.d/15559.feature b/changelog.d/15559.feature new file mode 100644 index 0000000000..07f729e38c --- /dev/null +++ b/changelog.d/15559.feature @@ -0,0 +1 @@ +Advertise support for Matrix 1.6 on `/_matrix/client/versions`. diff --git a/synapse/rest/client/versions.py b/synapse/rest/client/versions.py index 2d2be6ef38..e9b56fc3f8 100644 --- a/synapse/rest/client/versions.py +++ b/synapse/rest/client/versions.py @@ -79,6 +79,7 @@ class VersionsRestServlet(RestServlet): "v1.3", "v1.4", "v1.5", + "v1.6", ], # as per MSC1497: "unstable_features": { diff --git a/synapse/rest/key/v2/local_key_resource.py b/synapse/rest/key/v2/local_key_resource.py index d03e728d42..22e7bf9d86 100644 --- a/synapse/rest/key/v2/local_key_resource.py +++ b/synapse/rest/key/v2/local_key_resource.py @@ -34,6 +34,8 @@ class LocalKey(RestServlet): """HTTP resource containing encoding the TLS X.509 certificate and NACL signature verification keys for this server:: + GET /_matrix/key/v2/server HTTP/1.1 + GET /_matrix/key/v2/server/a.key.id HTTP/1.1 HTTP/1.1 200 OK @@ -100,6 +102,15 @@ class LocalKey(RestServlet): def on_GET( self, request: Request, key_id: Optional[str] = None ) -> Tuple[int, JsonDict]: + # Matrix 1.6 drops support for passing the key_id, this is incompatible + # with earlier versions and is allowed in order to support both. + # A warning is issued to help determine when it is safe to drop this. + if key_id: + logger.warning( + "Request for local server key with deprecated key ID (logging to determine usage level for future removal): %s", + key_id, + ) + time_now = self.clock.time_msec() # Update the expiry time if less than half the interval remains. if time_now + self.config.key.key_refresh_interval / 2 > self.valid_until_ts: diff --git a/synapse/rest/key/v2/remote_key_resource.py b/synapse/rest/key/v2/remote_key_resource.py index ff0454ca57..8f3865d412 100644 --- a/synapse/rest/key/v2/remote_key_resource.py +++ b/synapse/rest/key/v2/remote_key_resource.py @@ -126,6 +126,15 @@ class RemoteKey(RestServlet): self, request: Request, server: str, key_id: Optional[str] = None ) -> Tuple[int, JsonDict]: if server and key_id: + # Matrix 1.6 drops support for passing the key_id, this is incompatible + # with earlier versions and is allowed in order to support both. + # A warning is issued to help determine when it is safe to drop this. + logger.warning( + "Request for remote server key with deprecated key ID (logging to determine usage level for future removal): %s / %s", + server, + key_id, + ) + minimum_valid_until_ts = parse_integer(request, "minimum_valid_until_ts") arguments = {} if minimum_valid_until_ts is not None: @@ -161,7 +170,7 @@ class RemoteKey(RestServlet): time_now_ms = self.clock.time_msec() - # Map server_name->key_id->int. Note that the value of the init is unused. + # Map server_name->key_id->int. Note that the value of the int is unused. # XXX: why don't we just use a set? cache_misses: Dict[str, Dict[str, int]] = {} for (server_name, key_id, _), key_results in cached.items(): -- cgit 1.5.1 From 3690d5bd89e696264ed2d56759c216f47bf23fca Mon Sep 17 00:00:00 2001 From: Michael Weimann Date: Mon, 15 May 2023 10:54:49 +0200 Subject: Add an unstable feature flag for MSC3981 to the /versions endpoint (#15558) Signed-off-by: Michael Weimann Co-authored-by: Patrick Cloke --- changelog.d/15558.misc | 1 + synapse/rest/client/versions.py | 2 ++ 2 files changed, 3 insertions(+) create mode 100644 changelog.d/15558.misc (limited to 'synapse/rest') diff --git a/changelog.d/15558.misc b/changelog.d/15558.misc new file mode 100644 index 0000000000..a7cfee2513 --- /dev/null +++ b/changelog.d/15558.misc @@ -0,0 +1 @@ +Add `org.matrix.msc3981` info to `client/versions`. diff --git a/synapse/rest/client/versions.py b/synapse/rest/client/versions.py index e9b56fc3f8..58c5b07390 100644 --- a/synapse/rest/client/versions.py +++ b/synapse/rest/client/versions.py @@ -126,6 +126,8 @@ class VersionsRestServlet(RestServlet): "org.matrix.msc3912": self.config.experimental.msc3912_enabled, # Adds support for unstable "intentional mentions" behaviour. "org.matrix.msc3952_intentional_mentions": self.config.experimental.msc3952_intentional_mentions, + # Whether recursively provide relations is supported. + "org.matrix.msc3981": self.config.experimental.msc3981_recurse_relations, # Adds support for deleting account data. "org.matrix.msc3391": self.config.experimental.msc3391_enabled, }, -- cgit 1.5.1 From 41b9def9f2c02118796e147f63abf23bc2d7dc04 Mon Sep 17 00:00:00 2001 From: Quentin Gliech Date: Wed, 17 May 2023 16:39:06 +0200 Subject: Add a new admin API to create a new device for a user. (#15611) This allows an external service (e.g. the matrix-authentication-service) to create devices for users. --- changelog.d/15611.feature | 1 + docs/admin_api/user_admin_api.md | 27 +++++++++++++++++++++++++++ synapse/rest/admin/devices.py | 29 +++++++++++++++++++++++++++++ 3 files changed, 57 insertions(+) create mode 100644 changelog.d/15611.feature (limited to 'synapse/rest') diff --git a/changelog.d/15611.feature b/changelog.d/15611.feature new file mode 100644 index 0000000000..7cfb46fd0a --- /dev/null +++ b/changelog.d/15611.feature @@ -0,0 +1 @@ +Add a new admin API to create a new device for a user. diff --git a/docs/admin_api/user_admin_api.md b/docs/admin_api/user_admin_api.md index 6b952ba396..229942b311 100644 --- a/docs/admin_api/user_admin_api.md +++ b/docs/admin_api/user_admin_api.md @@ -813,6 +813,33 @@ The following fields are returned in the JSON response body: - `total` - Total number of user's devices. +### Create a device + +Creates a new device for a specific `user_id` and `device_id`. Does nothing if the `device_id` +exists already. + +The API is: + +``` +POST /_synapse/admin/v2/users//devices + +{ + "device_id": "QBUAZIFURK" +} +``` + +An empty JSON dict is returned. + +**Parameters** + +The following parameters should be set in the URL: + +- `user_id` - fully qualified: for example, `@user:server.com`. + +The following fields are required in the JSON request body: + +- `device_id` - The device ID to create. + ### Delete multiple devices Deletes the given devices for a specific `user_id`, and invalidates any access token associated with them. diff --git a/synapse/rest/admin/devices.py b/synapse/rest/admin/devices.py index 3b2f2d9abb..11ebed9bfd 100644 --- a/synapse/rest/admin/devices.py +++ b/synapse/rest/admin/devices.py @@ -137,6 +137,35 @@ class DevicesRestServlet(RestServlet): devices = await self.device_handler.get_devices_by_user(target_user.to_string()) return HTTPStatus.OK, {"devices": devices, "total": len(devices)} + async def on_POST( + self, request: SynapseRequest, user_id: str + ) -> Tuple[int, JsonDict]: + """Creates a new device for the user.""" + await assert_requester_is_admin(self.auth, request) + + target_user = UserID.from_string(user_id) + if not self.is_mine(target_user): + raise SynapseError( + HTTPStatus.BAD_REQUEST, "Can only create devices for local users" + ) + + u = await self.store.get_user_by_id(target_user.to_string()) + if u is None: + raise NotFoundError("Unknown user") + + body = parse_json_object_from_request(request) + device_id = body.get("device_id") + if not device_id: + raise SynapseError(HTTPStatus.BAD_REQUEST, "Missing device_id") + if not isinstance(device_id, str): + raise SynapseError(HTTPStatus.BAD_REQUEST, "device_id must be a string") + + await self.device_handler.check_device_registered( + user_id=user_id, device_id=device_id + ) + + return HTTPStatus.CREATED, {} + class DeleteDevicesRestServlet(RestServlet): """ -- cgit 1.5.1 From e5b4d93770fe5cfc45f1e769d8cb00a2075d68fa Mon Sep 17 00:00:00 2001 From: Jonathan de Jong Date: Thu, 18 May 2023 18:49:12 +0200 Subject: Update Mutual Rooms (MSC2666) implementation (#15621) To track changes in MSC2666: - The change from `/mutual_rooms/{user_id}` to `/mutual_rooms?user_id={user_id}`. - The addition of `next_batch_token` (and logic). - Unstable flag now being `uk.half-shot.msc2666.query_mutual_rooms`. - The error code when your own user is requested. --- changelog.d/15621.misc | 1 + synapse/rest/client/mutual_rooms.py | 43 ++++++++++++++++++++++++---------- synapse/rest/client/versions.py | 2 +- tests/rest/client/test_mutual_rooms.py | 6 +++-- 4 files changed, 37 insertions(+), 15 deletions(-) create mode 100644 changelog.d/15621.misc (limited to 'synapse/rest') diff --git a/changelog.d/15621.misc b/changelog.d/15621.misc new file mode 100644 index 0000000000..5d060f4dbc --- /dev/null +++ b/changelog.d/15621.misc @@ -0,0 +1 @@ +Update Mutual Rooms (MSC2666) implementation to match new proposal text. \ No newline at end of file diff --git a/synapse/rest/client/mutual_rooms.py b/synapse/rest/client/mutual_rooms.py index 38ef4e459f..c99445da30 100644 --- a/synapse/rest/client/mutual_rooms.py +++ b/synapse/rest/client/mutual_rooms.py @@ -12,13 +12,14 @@ # See the License for the specific language governing permissions and # limitations under the License. import logging -from typing import TYPE_CHECKING, Tuple +from http import HTTPStatus +from typing import TYPE_CHECKING, Dict, List, Tuple from synapse.api.errors import Codes, SynapseError from synapse.http.server import HttpServer -from synapse.http.servlet import RestServlet +from synapse.http.servlet import RestServlet, parse_strings_from_args from synapse.http.site import SynapseRequest -from synapse.types import JsonDict, UserID +from synapse.types import JsonDict from ._base import client_patterns @@ -30,11 +31,11 @@ logger = logging.getLogger(__name__) class UserMutualRoomsServlet(RestServlet): """ - GET /uk.half-shot.msc2666/user/mutual_rooms/{user_id} HTTP/1.1 + GET /uk.half-shot.msc2666/user/mutual_rooms?user_id={user_id} HTTP/1.1 """ PATTERNS = client_patterns( - "/uk.half-shot.msc2666/user/mutual_rooms/(?P[^/]*)", + "/uk.half-shot.msc2666/user/mutual_rooms$", releases=(), # This is an unstable feature ) @@ -43,17 +44,35 @@ class UserMutualRoomsServlet(RestServlet): self.auth = hs.get_auth() self.store = hs.get_datastores().main - async def on_GET( - self, request: SynapseRequest, user_id: str - ) -> Tuple[int, JsonDict]: - UserID.from_string(user_id) + async def on_GET(self, request: SynapseRequest) -> Tuple[int, JsonDict]: + # twisted.web.server.Request.args is incorrectly defined as Optional[Any] + args: Dict[bytes, List[bytes]] = request.args # type: ignore + + user_ids = parse_strings_from_args(args, "user_id", required=True) + + if len(user_ids) > 1: + raise SynapseError( + HTTPStatus.BAD_REQUEST, + "Duplicate user_id query parameter", + errcode=Codes.INVALID_PARAM, + ) + + # We don't do batching, so a batch token is illegal by default + if b"batch_token" in args: + raise SynapseError( + HTTPStatus.BAD_REQUEST, + "Unknown batch_token", + errcode=Codes.INVALID_PARAM, + ) + + user_id = user_ids[0] requester = await self.auth.get_user_by_req(request) if user_id == requester.user.to_string(): raise SynapseError( - code=400, - msg="You cannot request a list of shared rooms with yourself", - errcode=Codes.FORBIDDEN, + HTTPStatus.UNPROCESSABLE_ENTITY, + "You cannot request a list of shared rooms with yourself", + errcode=Codes.INVALID_PARAM, ) rooms = await self.store.get_mutual_rooms_between_users( diff --git a/synapse/rest/client/versions.py b/synapse/rest/client/versions.py index 58c5b07390..32df054f56 100644 --- a/synapse/rest/client/versions.py +++ b/synapse/rest/client/versions.py @@ -91,7 +91,7 @@ class VersionsRestServlet(RestServlet): # Implements additional endpoints as described in MSC2432 "org.matrix.msc2432": True, # Implements additional endpoints as described in MSC2666 - "uk.half-shot.msc2666.mutual_rooms": True, + "uk.half-shot.msc2666.query_mutual_rooms": True, # Whether new rooms will be set to encrypted or not (based on presets). "io.element.e2ee_forced.public": self.e2ee_forced_public, "io.element.e2ee_forced.private": self.e2ee_forced_private, diff --git a/tests/rest/client/test_mutual_rooms.py b/tests/rest/client/test_mutual_rooms.py index a4327f7ace..22fddbd6d6 100644 --- a/tests/rest/client/test_mutual_rooms.py +++ b/tests/rest/client/test_mutual_rooms.py @@ -11,6 +11,8 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. +from urllib.parse import quote + from twisted.test.proto_helpers import MemoryReactor import synapse.rest.admin @@ -44,8 +46,8 @@ class UserMutualRoomsTest(unittest.HomeserverTestCase): def _get_mutual_rooms(self, token: str, other_user: str) -> FakeChannel: return self.make_request( "GET", - "/_matrix/client/unstable/uk.half-shot.msc2666/user/mutual_rooms/%s" - % other_user, + "/_matrix/client/unstable/uk.half-shot.msc2666/user/mutual_rooms" + f"?user_id={quote(other_user)}", access_token=token, ) -- cgit 1.5.1 From 89a23c940672944acd98db58085cdc38191515a8 Mon Sep 17 00:00:00 2001 From: Patrick Cloke Date: Fri, 19 May 2023 08:06:54 -0400 Subject: Do not allow deactivated users to login with JWT. (#15624) To improve the organization of this code it moves the JWT login checks to a separate handler and then fixes the bug (and a deprecation warning). --- changelog.d/15624.bugfix | 1 + synapse/handlers/jwt.py | 118 ++++++++++++++++++++++++++++++++++++++++ synapse/rest/client/login.py | 77 ++++---------------------- synapse/server.py | 7 +++ tests/rest/client/test_login.py | 20 ++++++- 5 files changed, 156 insertions(+), 67 deletions(-) create mode 100644 changelog.d/15624.bugfix create mode 100644 synapse/handlers/jwt.py (limited to 'synapse/rest') diff --git a/changelog.d/15624.bugfix b/changelog.d/15624.bugfix new file mode 100644 index 0000000000..fde515ba62 --- /dev/null +++ b/changelog.d/15624.bugfix @@ -0,0 +1 @@ +Fix a long-standing bug where deactivated users were still able to login using the custom `org.matrix.login.jwt` login type (if enabled). diff --git a/synapse/handlers/jwt.py b/synapse/handlers/jwt.py new file mode 100644 index 0000000000..5fddc0e315 --- /dev/null +++ b/synapse/handlers/jwt.py @@ -0,0 +1,118 @@ +# Copyright 2023 Matrix.org Foundation C.I.C. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +from typing import TYPE_CHECKING + +from authlib.jose import JsonWebToken, JWTClaims +from authlib.jose.errors import BadSignatureError, InvalidClaimError, JoseError + +from synapse.api.errors import Codes, LoginError, StoreError, UserDeactivatedError +from synapse.types import JsonDict, UserID + +if TYPE_CHECKING: + from synapse.server import HomeServer + + +class JwtHandler: + def __init__(self, hs: "HomeServer"): + self.hs = hs + self._main_store = hs.get_datastores().main + + self.jwt_secret = hs.config.jwt.jwt_secret + self.jwt_subject_claim = hs.config.jwt.jwt_subject_claim + self.jwt_algorithm = hs.config.jwt.jwt_algorithm + self.jwt_issuer = hs.config.jwt.jwt_issuer + self.jwt_audiences = hs.config.jwt.jwt_audiences + + async def validate_login(self, login_submission: JsonDict) -> str: + """ + Authenticates the user for the /login API + + Args: + login_submission: the whole of the login submission + (including 'type' and other relevant fields) + + Returns: + The user ID that is logging in. + + Raises: + LoginError if there was an authentication problem. + """ + token = login_submission.get("token", None) + if token is None: + raise LoginError( + 403, "Token field for JWT is missing", errcode=Codes.FORBIDDEN + ) + + jwt = JsonWebToken([self.jwt_algorithm]) + claim_options = {} + if self.jwt_issuer is not None: + claim_options["iss"] = {"value": self.jwt_issuer, "essential": True} + if self.jwt_audiences is not None: + claim_options["aud"] = {"values": self.jwt_audiences, "essential": True} + + try: + claims = jwt.decode( + token, + key=self.jwt_secret, + claims_cls=JWTClaims, + claims_options=claim_options, + ) + except BadSignatureError: + # We handle this case separately to provide a better error message + raise LoginError( + 403, + "JWT validation failed: Signature verification failed", + errcode=Codes.FORBIDDEN, + ) + except JoseError as e: + # A JWT error occurred, return some info back to the client. + raise LoginError( + 403, + "JWT validation failed: %s" % (str(e),), + errcode=Codes.FORBIDDEN, + ) + + try: + claims.validate(leeway=120) # allows 2 min of clock skew + + # Enforce the old behavior which is rolled out in productive + # servers: if the JWT contains an 'aud' claim but none is + # configured, the login attempt will fail + if claims.get("aud") is not None: + if self.jwt_audiences is None or len(self.jwt_audiences) == 0: + raise InvalidClaimError("aud") + except JoseError as e: + raise LoginError( + 403, + "JWT validation failed: %s" % (str(e),), + errcode=Codes.FORBIDDEN, + ) + + user = claims.get(self.jwt_subject_claim, None) + if user is None: + raise LoginError(403, "Invalid JWT", errcode=Codes.FORBIDDEN) + + user_id = UserID(user, self.hs.hostname).to_string() + + # If the account has been deactivated, do not proceed with the login + # flow. + try: + deactivated = await self._main_store.get_user_deactivated_status(user_id) + except StoreError: + # JWT lazily creates users, so they may not exist in the database yet. + deactivated = False + if deactivated: + raise UserDeactivatedError("This account has been deactivated") + + return user_id diff --git a/synapse/rest/client/login.py b/synapse/rest/client/login.py index a348720131..afdbf821b5 100644 --- a/synapse/rest/client/login.py +++ b/synapse/rest/client/login.py @@ -87,11 +87,6 @@ class LoginRestServlet(RestServlet): # JWT configuration variables. self.jwt_enabled = hs.config.jwt.jwt_enabled - self.jwt_secret = hs.config.jwt.jwt_secret - self.jwt_subject_claim = hs.config.jwt.jwt_subject_claim - self.jwt_algorithm = hs.config.jwt.jwt_algorithm - self.jwt_issuer = hs.config.jwt.jwt_issuer - self.jwt_audiences = hs.config.jwt.jwt_audiences # SSO configuration. self.saml2_enabled = hs.config.saml2.saml2_enabled @@ -427,7 +422,7 @@ class LoginRestServlet(RestServlet): self, login_submission: JsonDict, should_issue_refresh_token: bool = False ) -> LoginResponse: """ - Handle the final stage of SSO login. + Handle token login. Args: login_submission: The JSON request body. @@ -452,72 +447,24 @@ class LoginRestServlet(RestServlet): async def _do_jwt_login( self, login_submission: JsonDict, should_issue_refresh_token: bool = False ) -> LoginResponse: - token = login_submission.get("token", None) - if token is None: - raise LoginError( - 403, "Token field for JWT is missing", errcode=Codes.FORBIDDEN - ) - - from authlib.jose import JsonWebToken, JWTClaims - from authlib.jose.errors import BadSignatureError, InvalidClaimError, JoseError - - jwt = JsonWebToken([self.jwt_algorithm]) - claim_options = {} - if self.jwt_issuer is not None: - claim_options["iss"] = {"value": self.jwt_issuer, "essential": True} - if self.jwt_audiences is not None: - claim_options["aud"] = {"values": self.jwt_audiences, "essential": True} - - try: - claims = jwt.decode( - token, - key=self.jwt_secret, - claims_cls=JWTClaims, - claims_options=claim_options, - ) - except BadSignatureError: - # We handle this case separately to provide a better error message - raise LoginError( - 403, - "JWT validation failed: Signature verification failed", - errcode=Codes.FORBIDDEN, - ) - except JoseError as e: - # A JWT error occurred, return some info back to the client. - raise LoginError( - 403, - "JWT validation failed: %s" % (str(e),), - errcode=Codes.FORBIDDEN, - ) - - try: - claims.validate(leeway=120) # allows 2 min of clock skew - - # Enforce the old behavior which is rolled out in productive - # servers: if the JWT contains an 'aud' claim but none is - # configured, the login attempt will fail - if claims.get("aud") is not None: - if self.jwt_audiences is None or len(self.jwt_audiences) == 0: - raise InvalidClaimError("aud") - except JoseError as e: - raise LoginError( - 403, - "JWT validation failed: %s" % (str(e),), - errcode=Codes.FORBIDDEN, - ) + """ + Handle the custom JWT login. - user = claims.get(self.jwt_subject_claim, None) - if user is None: - raise LoginError(403, "Invalid JWT", errcode=Codes.FORBIDDEN) + Args: + login_submission: The JSON request body. + should_issue_refresh_token: True if this login should issue + a refresh token alongside the access token. - user_id = UserID(user, self.hs.hostname).to_string() - result = await self._complete_login( + Returns: + The body of the JSON response. + """ + user_id = await self.hs.get_jwt_handler().validate_login(login_submission) + return await self._complete_login( user_id, login_submission, create_non_existent_users=True, should_issue_refresh_token=should_issue_refresh_token, ) - return result def _get_auth_flow_dict_for_idp(idp: SsoIdentityProvider) -> JsonDict: diff --git a/synapse/server.py b/synapse/server.py index b307295789..aa90465047 100644 --- a/synapse/server.py +++ b/synapse/server.py @@ -147,6 +147,7 @@ logger = logging.getLogger(__name__) if TYPE_CHECKING: from txredisapi import ConnectionHandler + from synapse.handlers.jwt import JwtHandler from synapse.handlers.oidc import OidcHandler from synapse.handlers.saml import SamlHandler @@ -533,6 +534,12 @@ class HomeServer(metaclass=abc.ABCMeta): def get_sso_handler(self) -> SsoHandler: return SsoHandler(self) + @cache_in_self + def get_jwt_handler(self) -> "JwtHandler": + from synapse.handlers.jwt import JwtHandler + + return JwtHandler(self) + @cache_in_self def get_sync_handler(self) -> SyncHandler: return SyncHandler(self) diff --git a/tests/rest/client/test_login.py b/tests/rest/client/test_login.py index 62acf4f44e..dc32982e22 100644 --- a/tests/rest/client/test_login.py +++ b/tests/rest/client/test_login.py @@ -42,7 +42,7 @@ from tests.test_utils.html_parsers import TestHtmlParser from tests.unittest import HomeserverTestCase, override_config, skip_unless try: - from authlib.jose import jwk, jwt + from authlib.jose import JsonWebKey, jwt HAS_JWT = True except ImportError: @@ -1054,6 +1054,22 @@ class JWTTestCase(unittest.HomeserverTestCase): self.assertEqual(channel.json_body["errcode"], "M_FORBIDDEN") self.assertEqual(channel.json_body["error"], "Token field for JWT is missing") + def test_deactivated_user(self) -> None: + """Logging in as a deactivated account should error.""" + user_id = self.register_user("kermit", "monkey") + self.get_success( + self.hs.get_deactivate_account_handler().deactivate_account( + user_id, erase_data=False, requester=create_requester(user_id) + ) + ) + + channel = self.jwt_login({"sub": "kermit"}) + self.assertEqual(channel.code, 403, msg=channel.result) + self.assertEqual(channel.json_body["errcode"], "M_USER_DEACTIVATED") + self.assertEqual( + channel.json_body["error"], "This account has been deactivated" + ) + # The JWTPubKeyTestCase is a complement to JWTTestCase where we instead use # RSS256, with a public key configured in synapse as "jwt_secret", and tokens @@ -1121,7 +1137,7 @@ class JWTPubKeyTestCase(unittest.HomeserverTestCase): def jwt_encode(self, payload: Dict[str, Any], secret: str = jwt_privatekey) -> str: header = {"alg": "RS256"} if secret.startswith("-----BEGIN RSA PRIVATE KEY-----"): - secret = jwk.dumps(secret, kty="RSA") + secret = JsonWebKey.import_key(secret, {"kty": "RSA"}) result: bytes = jwt.encode(header, payload, secret) return result.decode("ascii") -- cgit 1.5.1 From 7c9b91790c013d11ca88a9d01e0054939eda8523 Mon Sep 17 00:00:00 2001 From: Patrick Cloke Date: Tue, 23 May 2023 10:35:43 -0400 Subject: Consolidate logic to check for deactivated users. (#15634) This moves the deactivated user check to the method which all login types call. Additionally updates the application service tests to be more realistic by removing invalid tests and fixing server names. --- changelog.d/15634.bugfix | 1 + docs/modules/password_auth_provider_callbacks.md | 3 ++ synapse/appservice/__init__.py | 3 +- synapse/handlers/auth.py | 14 ++---- synapse/handlers/jwt.py | 19 ++------ synapse/rest/client/login.py | 23 +++++++-- tests/handlers/test_password_providers.py | 59 +++++++++--------------- 7 files changed, 55 insertions(+), 67 deletions(-) create mode 100644 changelog.d/15634.bugfix (limited to 'synapse/rest') diff --git a/changelog.d/15634.bugfix b/changelog.d/15634.bugfix new file mode 100644 index 0000000000..ef39e8a689 --- /dev/null +++ b/changelog.d/15634.bugfix @@ -0,0 +1 @@ +Fix a long-standing bug where deactivated users were able to login in uncommon situations. diff --git a/docs/modules/password_auth_provider_callbacks.md b/docs/modules/password_auth_provider_callbacks.md index 8275f7ebdc..d66ac7df31 100644 --- a/docs/modules/password_auth_provider_callbacks.md +++ b/docs/modules/password_auth_provider_callbacks.md @@ -46,6 +46,9 @@ instead. If the authentication is unsuccessful, the module must return `None`. +Note that the user is not automatically registered, the `register_user(..)` method of +the [module API](writing_a_module.html) can be used to lazily create users. + If multiple modules register an auth checker for the same login type but with different fields, Synapse will refuse to start. diff --git a/synapse/appservice/__init__.py b/synapse/appservice/__init__.py index 35c330a3c4..2260a8f589 100644 --- a/synapse/appservice/__init__.py +++ b/synapse/appservice/__init__.py @@ -86,6 +86,7 @@ class ApplicationService: url.rstrip("/") if isinstance(url, str) else None ) # url must not end with a slash self.hs_token = hs_token + # The full Matrix ID for this application service's sender. self.sender = sender self.namespaces = self._check_namespaces(namespaces) self.id = id @@ -212,7 +213,7 @@ class ApplicationService: True if the application service is interested in the user, False if not. """ return ( - # User is the appservice's sender_localpart user + # User is the appservice's configured sender_localpart user user_id == self.sender # User is in the appservice's user namespace or self.is_user_in_namespace(user_id) diff --git a/synapse/handlers/auth.py b/synapse/handlers/auth.py index 59e340974d..d001f2fb2f 100644 --- a/synapse/handlers/auth.py +++ b/synapse/handlers/auth.py @@ -52,7 +52,6 @@ from synapse.api.errors import ( NotFoundError, StoreError, SynapseError, - UserDeactivatedError, ) from synapse.api.ratelimiting import Ratelimiter from synapse.handlers.ui_auth import ( @@ -1419,12 +1418,6 @@ class AuthHandler: return None (user_id, password_hash) = lookupres - # If the password hash is None, the account has likely been deactivated - if not password_hash: - deactivated = await self.store.get_user_deactivated_status(user_id) - if deactivated: - raise UserDeactivatedError("This account has been deactivated") - result = await self.validate_hash(password, password_hash) if not result: logger.warning("Failed password login for user %s", user_id) @@ -1749,8 +1742,11 @@ class AuthHandler: registered. auth_provider_session_id: The session ID from the SSO IdP received during login. """ - # If the account has been deactivated, do not proceed with the login - # flow. + # If the account has been deactivated, do not proceed with the login. + # + # This gets checked again when the token is submitted but this lets us + # provide an HTML error page to the user (instead of issuing a token and + # having it error later). deactivated = await self.store.get_user_deactivated_status(registered_user_id) if deactivated: respond_with_html(request, 403, self._sso_account_deactivated_template) diff --git a/synapse/handlers/jwt.py b/synapse/handlers/jwt.py index 5fddc0e315..740bf9b3c4 100644 --- a/synapse/handlers/jwt.py +++ b/synapse/handlers/jwt.py @@ -16,7 +16,7 @@ from typing import TYPE_CHECKING from authlib.jose import JsonWebToken, JWTClaims from authlib.jose.errors import BadSignatureError, InvalidClaimError, JoseError -from synapse.api.errors import Codes, LoginError, StoreError, UserDeactivatedError +from synapse.api.errors import Codes, LoginError from synapse.types import JsonDict, UserID if TYPE_CHECKING: @@ -26,7 +26,6 @@ if TYPE_CHECKING: class JwtHandler: def __init__(self, hs: "HomeServer"): self.hs = hs - self._main_store = hs.get_datastores().main self.jwt_secret = hs.config.jwt.jwt_secret self.jwt_subject_claim = hs.config.jwt.jwt_subject_claim @@ -34,7 +33,7 @@ class JwtHandler: self.jwt_issuer = hs.config.jwt.jwt_issuer self.jwt_audiences = hs.config.jwt.jwt_audiences - async def validate_login(self, login_submission: JsonDict) -> str: + def validate_login(self, login_submission: JsonDict) -> str: """ Authenticates the user for the /login API @@ -103,16 +102,4 @@ class JwtHandler: if user is None: raise LoginError(403, "Invalid JWT", errcode=Codes.FORBIDDEN) - user_id = UserID(user, self.hs.hostname).to_string() - - # If the account has been deactivated, do not proceed with the login - # flow. - try: - deactivated = await self._main_store.get_user_deactivated_status(user_id) - except StoreError: - # JWT lazily creates users, so they may not exist in the database yet. - deactivated = False - if deactivated: - raise UserDeactivatedError("This account has been deactivated") - - return user_id + return UserID(user, self.hs.hostname).to_string() diff --git a/synapse/rest/client/login.py b/synapse/rest/client/login.py index afdbf821b5..6ca61ffbd0 100644 --- a/synapse/rest/client/login.py +++ b/synapse/rest/client/login.py @@ -35,6 +35,7 @@ from synapse.api.errors import ( LoginError, NotApprovedError, SynapseError, + UserDeactivatedError, ) from synapse.api.ratelimiting import Ratelimiter from synapse.api.urls import CLIENT_API_PREFIX @@ -84,6 +85,7 @@ class LoginRestServlet(RestServlet): def __init__(self, hs: "HomeServer"): super().__init__() self.hs = hs + self._main_store = hs.get_datastores().main # JWT configuration variables. self.jwt_enabled = hs.config.jwt.jwt_enabled @@ -112,13 +114,13 @@ class LoginRestServlet(RestServlet): self._well_known_builder = WellKnownBuilder(hs) self._address_ratelimiter = Ratelimiter( - store=hs.get_datastores().main, + store=self._main_store, clock=hs.get_clock(), rate_hz=self.hs.config.ratelimiting.rc_login_address.per_second, burst_count=self.hs.config.ratelimiting.rc_login_address.burst_count, ) self._account_ratelimiter = Ratelimiter( - store=hs.get_datastores().main, + store=self._main_store, clock=hs.get_clock(), rate_hz=self.hs.config.ratelimiting.rc_login_account.per_second, burst_count=self.hs.config.ratelimiting.rc_login_account.burst_count, @@ -280,6 +282,9 @@ class LoginRestServlet(RestServlet): login_submission, ratelimit=appservice.is_rate_limited(), should_issue_refresh_token=should_issue_refresh_token, + # The user represented by an appservice's configured sender_localpart + # is not actually created in Synapse. + should_check_deactivated=qualified_user_id != appservice.sender, ) async def _do_other_login( @@ -326,6 +331,7 @@ class LoginRestServlet(RestServlet): auth_provider_id: Optional[str] = None, should_issue_refresh_token: bool = False, auth_provider_session_id: Optional[str] = None, + should_check_deactivated: bool = True, ) -> LoginResponse: """Called when we've successfully authed the user and now need to actually login them in (e.g. create devices). This gets called on @@ -345,6 +351,11 @@ class LoginRestServlet(RestServlet): should_issue_refresh_token: True if this login should issue a refresh token alongside the access token. auth_provider_session_id: The session ID got during login from the SSO IdP. + should_check_deactivated: True if the user should be checked for + deactivation status before logging in. + + This exists purely for appservice's configured sender_localpart + which doesn't have an associated user in the database. Returns: Dictionary of account information after successful login. @@ -364,6 +375,12 @@ class LoginRestServlet(RestServlet): ) user_id = canonical_uid + # If the account has been deactivated, do not proceed with the login. + if should_check_deactivated: + deactivated = await self._main_store.get_user_deactivated_status(user_id) + if deactivated: + raise UserDeactivatedError("This account has been deactivated") + device_id = login_submission.get("device_id") # If device_id is present, check that device_id is not longer than a reasonable 512 characters @@ -458,7 +475,7 @@ class LoginRestServlet(RestServlet): Returns: The body of the JSON response. """ - user_id = await self.hs.get_jwt_handler().validate_login(login_submission) + user_id = self.hs.get_jwt_handler().validate_login(login_submission) return await self._complete_login( user_id, login_submission, diff --git a/tests/handlers/test_password_providers.py b/tests/handlers/test_password_providers.py index aa91bc0a3d..394006f5f3 100644 --- a/tests/handlers/test_password_providers.py +++ b/tests/handlers/test_password_providers.py @@ -18,13 +18,17 @@ from http import HTTPStatus from typing import Any, Dict, List, Optional, Type, Union from unittest.mock import Mock +from twisted.test.proto_helpers import MemoryReactor + import synapse from synapse.api.constants import LoginType from synapse.api.errors import Codes from synapse.handlers.account import AccountHandler from synapse.module_api import ModuleApi from synapse.rest.client import account, devices, login, logout, register +from synapse.server import HomeServer from synapse.types import JsonDict, UserID +from synapse.util import Clock from tests import unittest from tests.server import FakeChannel @@ -162,10 +166,16 @@ class PasswordAuthProviderTests(unittest.HomeserverTestCase): CALLBACK_USERNAME = "get_username_for_registration" CALLBACK_DISPLAYNAME = "get_displayname_for_registration" - def setUp(self) -> None: + def prepare( + self, reactor: MemoryReactor, clock: Clock, homeserver: HomeServer + ) -> None: # we use a global mock device, so make sure we are starting with a clean slate mock_password_provider.reset_mock() - super().setUp() + + # The mock password provider doesn't register the users, so ensure they + # are registered first. + self.register_user("u", "not-the-tested-password") + self.register_user("user", "not-the-tested-password") @override_config(legacy_providers_config(LegacyPasswordOnlyAuthProvider)) def test_password_only_auth_progiver_login_legacy(self) -> None: @@ -185,22 +195,12 @@ class PasswordAuthProviderTests(unittest.HomeserverTestCase): mock_password_provider.reset_mock() # login with mxid should work too - channel = self._send_password_login("@u:bz", "p") + channel = self._send_password_login("@u:test", "p") self.assertEqual(channel.code, HTTPStatus.OK, channel.result) - self.assertEqual("@u:bz", channel.json_body["user_id"]) - mock_password_provider.check_password.assert_called_once_with("@u:bz", "p") + self.assertEqual("@u:test", channel.json_body["user_id"]) + mock_password_provider.check_password.assert_called_once_with("@u:test", "p") mock_password_provider.reset_mock() - # try a weird username / pass. Honestly it's unclear what we *expect* to happen - # in these cases, but at least we can guard against the API changing - # unexpectedly - channel = self._send_password_login(" USER🙂NAME ", " pASS\U0001F622word ") - self.assertEqual(channel.code, HTTPStatus.OK, channel.result) - self.assertEqual("@ USER🙂NAME :test", channel.json_body["user_id"]) - mock_password_provider.check_password.assert_called_once_with( - "@ USER🙂NAME :test", " pASS😢word " - ) - @override_config(legacy_providers_config(LegacyPasswordOnlyAuthProvider)) def test_password_only_auth_provider_ui_auth_legacy(self) -> None: self.password_only_auth_provider_ui_auth_test_body() @@ -208,10 +208,6 @@ class PasswordAuthProviderTests(unittest.HomeserverTestCase): def password_only_auth_provider_ui_auth_test_body(self) -> None: """UI Auth should delegate correctly to the password provider""" - # create the user, otherwise access doesn't work - module_api = self.hs.get_module_api() - self.get_success(module_api.register_user("u")) - # log in twice, to get two devices mock_password_provider.check_password.return_value = make_awaitable(True) tok1 = self.login("u", "p") @@ -401,29 +397,16 @@ class PasswordAuthProviderTests(unittest.HomeserverTestCase): mock_password_provider.check_auth.assert_not_called() mock_password_provider.check_auth.return_value = make_awaitable( - ("@user:bz", None) + ("@user:test", None) ) channel = self._send_login("test.login_type", "u", test_field="y") self.assertEqual(channel.code, HTTPStatus.OK, channel.result) - self.assertEqual("@user:bz", channel.json_body["user_id"]) + self.assertEqual("@user:test", channel.json_body["user_id"]) mock_password_provider.check_auth.assert_called_once_with( "u", "test.login_type", {"test_field": "y"} ) mock_password_provider.reset_mock() - # try a weird username. Again, it's unclear what we *expect* to happen - # in these cases, but at least we can guard against the API changing - # unexpectedly - mock_password_provider.check_auth.return_value = make_awaitable( - ("@ MALFORMED! :bz", None) - ) - channel = self._send_login("test.login_type", " USER🙂NAME ", test_field=" abc ") - self.assertEqual(channel.code, HTTPStatus.OK, channel.result) - self.assertEqual("@ MALFORMED! :bz", channel.json_body["user_id"]) - mock_password_provider.check_auth.assert_called_once_with( - " USER🙂NAME ", "test.login_type", {"test_field": " abc "} - ) - @override_config(legacy_providers_config(LegacyCustomAuthProvider)) def test_custom_auth_provider_ui_auth_legacy(self) -> None: self.custom_auth_provider_ui_auth_test_body() @@ -465,7 +448,7 @@ class PasswordAuthProviderTests(unittest.HomeserverTestCase): # right params, but authing as the wrong user mock_password_provider.check_auth.return_value = make_awaitable( - ("@user:bz", None) + ("@user:test", None) ) body["auth"]["test_field"] = "foo" channel = self._delete_device(tok1, "dev2", body) @@ -498,11 +481,11 @@ class PasswordAuthProviderTests(unittest.HomeserverTestCase): callback = Mock(return_value=make_awaitable(None)) mock_password_provider.check_auth.return_value = make_awaitable( - ("@user:bz", callback) + ("@user:test", callback) ) channel = self._send_login("test.login_type", "u", test_field="y") self.assertEqual(channel.code, HTTPStatus.OK, channel.result) - self.assertEqual("@user:bz", channel.json_body["user_id"]) + self.assertEqual("@user:test", channel.json_body["user_id"]) mock_password_provider.check_auth.assert_called_once_with( "u", "test.login_type", {"test_field": "y"} ) @@ -512,7 +495,7 @@ class PasswordAuthProviderTests(unittest.HomeserverTestCase): call_args, call_kwargs = callback.call_args # should be one positional arg self.assertEqual(len(call_args), 1) - self.assertEqual(call_args[0]["user_id"], "@user:bz") + self.assertEqual(call_args[0]["user_id"], "@user:test") for p in ["user_id", "access_token", "device_id", "home_server"]: self.assertIn(p, call_args[0]) -- cgit 1.5.1