From e83520cc42cb174a5d3dc5ca1dcce299ad4abb25 Mon Sep 17 00:00:00 2001 From: reivilibre Date: Fri, 21 Jan 2022 08:01:37 +0000 Subject: Make `get_account_data_for_room_and_type` a tree cache (#11789) --- synapse/storage/databases/main/account_data.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) (limited to 'synapse') diff --git a/synapse/storage/databases/main/account_data.py b/synapse/storage/databases/main/account_data.py index ef475e18c7..bb3740711e 100644 --- a/synapse/storage/databases/main/account_data.py +++ b/synapse/storage/databases/main/account_data.py @@ -210,7 +210,7 @@ class AccountDataWorkerStore(CacheInvalidationWorkerStore): "get_account_data_for_room", get_account_data_for_room_txn ) - @cached(num_args=3, max_entries=5000) + @cached(num_args=3, max_entries=5000, tree=True) async def get_account_data_for_room_and_type( self, user_id: str, room_id: str, account_data_type: str ) -> Optional[JsonDict]: -- cgit 1.4.1 From 4c2096599c9780290703e14df63963e77d058dda Mon Sep 17 00:00:00 2001 From: reivilibre Date: Fri, 21 Jan 2022 08:38:36 +0000 Subject: Make the `get_global_account_data_by_type_for_user` cache be a tree-cache whose key is prefixed with the user ID (#11788) --- changelog.d/11788.feature | 1 + synapse/handlers/sync.py | 2 +- synapse/rest/client/account_data.py | 2 +- synapse/storage/databases/main/account_data.py | 8 ++++---- synapse/visibility.py | 2 +- tests/replication/slave/storage/test_account_data.py | 4 ++-- 6 files changed, 10 insertions(+), 9 deletions(-) create mode 100644 changelog.d/11788.feature (limited to 'synapse') diff --git a/changelog.d/11788.feature b/changelog.d/11788.feature new file mode 100644 index 0000000000..dc426fb658 --- /dev/null +++ b/changelog.d/11788.feature @@ -0,0 +1 @@ +Remove account data (including client config, push rules and ignored users) upon user deactivation. \ No newline at end of file diff --git a/synapse/handlers/sync.py b/synapse/handlers/sync.py index ffc6b748e8..7e2a892b63 100644 --- a/synapse/handlers/sync.py +++ b/synapse/handlers/sync.py @@ -1619,7 +1619,7 @@ class SyncHandler: # TODO: Can we `SELECT ignored_user_id FROM ignored_users WHERE ignorer_user_id=?;` instead? ignored_account_data = ( await self.store.get_global_account_data_by_type_for_user( - AccountDataTypes.IGNORED_USER_LIST, user_id=user_id + user_id=user_id, data_type=AccountDataTypes.IGNORED_USER_LIST ) ) diff --git a/synapse/rest/client/account_data.py b/synapse/rest/client/account_data.py index d1badbdf3b..58b8adbd32 100644 --- a/synapse/rest/client/account_data.py +++ b/synapse/rest/client/account_data.py @@ -66,7 +66,7 @@ class AccountDataServlet(RestServlet): raise AuthError(403, "Cannot get account data for other users.") event = await self.store.get_global_account_data_by_type_for_user( - account_data_type, user_id + user_id, account_data_type ) if event is None: diff --git a/synapse/storage/databases/main/account_data.py b/synapse/storage/databases/main/account_data.py index bb3740711e..9c19f0965f 100644 --- a/synapse/storage/databases/main/account_data.py +++ b/synapse/storage/databases/main/account_data.py @@ -158,9 +158,9 @@ class AccountDataWorkerStore(CacheInvalidationWorkerStore): "get_account_data_for_user", get_account_data_for_user_txn ) - @cached(num_args=2, max_entries=5000) + @cached(num_args=2, max_entries=5000, tree=True) async def get_global_account_data_by_type_for_user( - self, data_type: str, user_id: str + self, user_id: str, data_type: str ) -> Optional[JsonDict]: """ Returns: @@ -392,7 +392,7 @@ class AccountDataWorkerStore(CacheInvalidationWorkerStore): for row in rows: if not row.room_id: self.get_global_account_data_by_type_for_user.invalidate( - (row.data_type, row.user_id) + (row.user_id, row.data_type) ) self.get_account_data_for_user.invalidate((row.user_id,)) self.get_account_data_for_room.invalidate((row.user_id, row.room_id)) @@ -476,7 +476,7 @@ class AccountDataWorkerStore(CacheInvalidationWorkerStore): self._account_data_stream_cache.entity_has_changed(user_id, next_id) self.get_account_data_for_user.invalidate((user_id,)) self.get_global_account_data_by_type_for_user.invalidate( - (account_data_type, user_id) + (user_id, account_data_type) ) return self._account_data_id_gen.get_current_token() diff --git a/synapse/visibility.py b/synapse/visibility.py index 17532059e9..1b970ce479 100644 --- a/synapse/visibility.py +++ b/synapse/visibility.py @@ -87,7 +87,7 @@ async def filter_events_for_client( ) ignore_dict_content = await storage.main.get_global_account_data_by_type_for_user( - AccountDataTypes.IGNORED_USER_LIST, user_id + user_id, AccountDataTypes.IGNORED_USER_LIST ) ignore_list: FrozenSet[str] = frozenset() diff --git a/tests/replication/slave/storage/test_account_data.py b/tests/replication/slave/storage/test_account_data.py index 43e3248703..1524087c43 100644 --- a/tests/replication/slave/storage/test_account_data.py +++ b/tests/replication/slave/storage/test_account_data.py @@ -30,7 +30,7 @@ class SlavedAccountDataStoreTestCase(BaseSlavedStoreTestCase): ) self.replicate() self.check( - "get_global_account_data_by_type_for_user", [TYPE, USER_ID], {"a": 1} + "get_global_account_data_by_type_for_user", [USER_ID, TYPE], {"a": 1} ) self.get_success( @@ -38,5 +38,5 @@ class SlavedAccountDataStoreTestCase(BaseSlavedStoreTestCase): ) self.replicate() self.check( - "get_global_account_data_by_type_for_user", [TYPE, USER_ID], {"a": 2} + "get_global_account_data_by_type_for_user", [USER_ID, TYPE], {"a": 2} ) -- cgit 1.4.1 From c027bc0e4b071d3d40c7b0c1e7011ad5c8c3d0a0 Mon Sep 17 00:00:00 2001 From: Richard van der Hoff <1389908+richvdh@users.noreply.github.com> Date: Fri, 21 Jan 2022 09:10:01 +0000 Subject: Add `FrozenEvent.get_state_key` and use it in a couple of places (#11793) This is more efficient, since we only have to look up `state_key` in the event dict once, rather than three (!) times. --- changelog.d/11793.misc | 1 + synapse/events/__init__.py | 13 +++++++++---- synapse/events/snapshot.py | 2 +- 3 files changed, 11 insertions(+), 5 deletions(-) create mode 100644 changelog.d/11793.misc (limited to 'synapse') diff --git a/changelog.d/11793.misc b/changelog.d/11793.misc new file mode 100644 index 0000000000..fc0530bf2c --- /dev/null +++ b/changelog.d/11793.misc @@ -0,0 +1 @@ +Add `FrozenEvent.get_state_key` and use it in a couple of places. diff --git a/synapse/events/__init__.py b/synapse/events/__init__.py index 38f3cf4d33..9acb3c0cc4 100644 --- a/synapse/events/__init__.py +++ b/synapse/events/__init__.py @@ -315,10 +315,11 @@ class EventBase(metaclass=abc.ABCMeta): redacts: DefaultDictProperty[Optional[str]] = DefaultDictProperty("redacts", None) room_id: DictProperty[str] = DictProperty("room_id") sender: DictProperty[str] = DictProperty("sender") - # TODO state_key should be Optional[str], this is generally asserted in Synapse - # by calling is_state() first (which ensures this), but it is hard (not possible?) + # TODO state_key should be Optional[str]. This is generally asserted in Synapse + # by calling is_state() first (which ensures it is not None), but it is hard (not possible?) # to properly annotate that calling is_state() asserts that state_key exists - # and is non-None. + # and is non-None. It would be better to replace such direct references with + # get_state_key() (and a check for None). state_key: DictProperty[str] = DictProperty("state_key") type: DictProperty[str] = DictProperty("type") user_id: DictProperty[str] = DictProperty("sender") @@ -332,7 +333,11 @@ class EventBase(metaclass=abc.ABCMeta): return self.content["membership"] def is_state(self) -> bool: - return hasattr(self, "state_key") and self.state_key is not None + return self.get_state_key() is not None + + def get_state_key(self) -> Optional[str]: + """Get the state key of this event, or None if it's not a state event""" + return self._dict.get("state_key") def get_dict(self) -> JsonDict: d = dict(self._dict) diff --git a/synapse/events/snapshot.py b/synapse/events/snapshot.py index 0eab1aefd6..5833fee25f 100644 --- a/synapse/events/snapshot.py +++ b/synapse/events/snapshot.py @@ -163,7 +163,7 @@ class EventContext: return { "prev_state_id": prev_state_id, "event_type": event.type, - "event_state_key": event.state_key if event.is_state() else None, + "event_state_key": event.get_state_key(), "state_group": self._state_group, "state_group_before_event": self.state_group_before_event, "rejected": self.rejected, -- cgit 1.4.1 From 227727548546c12e644721d0380be056eead48b0 Mon Sep 17 00:00:00 2001 From: Richard van der Hoff <1389908+richvdh@users.noreply.github.com> Date: Fri, 21 Jan 2022 09:18:10 +0000 Subject: Stop reading from `event_reference_hashes` (#11794) Preparation for dropping this table altogether. Part of #6574. --- changelog.d/11794.misc | 1 + synapse/replication/slave/storage/events.py | 2 +- synapse/storage/databases/main/event_federation.py | 2 +- synapse/storage/databases/main/signatures.py | 54 ++++++++++------------ synapse/storage/schema/__init__.py | 5 +- 5 files changed, 31 insertions(+), 33 deletions(-) create mode 100644 changelog.d/11794.misc (limited to 'synapse') diff --git a/changelog.d/11794.misc b/changelog.d/11794.misc new file mode 100644 index 0000000000..29826bc0e5 --- /dev/null +++ b/changelog.d/11794.misc @@ -0,0 +1 @@ +Preparation for database schema simplifications: stop reading from `event_reference_hashes`. diff --git a/synapse/replication/slave/storage/events.py b/synapse/replication/slave/storage/events.py index 0f08372694..a72dad7464 100644 --- a/synapse/replication/slave/storage/events.py +++ b/synapse/replication/slave/storage/events.py @@ -52,8 +52,8 @@ class SlavedEventStore( EventPushActionsWorkerStore, StreamWorkerStore, StateGroupWorkerStore, - EventsWorkerStore, SignatureWorkerStore, + EventsWorkerStore, UserErasureWorkerStore, RelationsWorkerStore, BaseSlavedStore, diff --git a/synapse/storage/databases/main/event_federation.py b/synapse/storage/databases/main/event_federation.py index 270b30800b..0856a9332a 100644 --- a/synapse/storage/databases/main/event_federation.py +++ b/synapse/storage/databases/main/event_federation.py @@ -65,7 +65,7 @@ class _NoChainCoverIndex(Exception): super().__init__("Unexpectedly no chain cover for events in %s" % (room_id,)) -class EventFederationWorkerStore(EventsWorkerStore, SignatureWorkerStore, SQLBaseStore): +class EventFederationWorkerStore(SignatureWorkerStore, EventsWorkerStore, SQLBaseStore): def __init__( self, database: DatabasePool, diff --git a/synapse/storage/databases/main/signatures.py b/synapse/storage/databases/main/signatures.py index 3201623fe4..0518b8b910 100644 --- a/synapse/storage/databases/main/signatures.py +++ b/synapse/storage/databases/main/signatures.py @@ -12,16 +12,19 @@ # See the License for the specific language governing permissions and # limitations under the License. -from typing import Dict, Iterable, List, Tuple +from typing import Collection, Dict, List, Tuple from unpaddedbase64 import encode_base64 -from synapse.storage._base import SQLBaseStore -from synapse.storage.types import Cursor +from synapse.crypto.event_signing import compute_event_reference_hash +from synapse.storage.databases.main.events_worker import ( + EventRedactBehaviour, + EventsWorkerStore, +) from synapse.util.caches.descriptors import cached, cachedList -class SignatureWorkerStore(SQLBaseStore): +class SignatureWorkerStore(EventsWorkerStore): @cached() def get_event_reference_hash(self, event_id): # This is a dummy function to allow get_event_reference_hashes @@ -32,7 +35,7 @@ class SignatureWorkerStore(SQLBaseStore): cached_method_name="get_event_reference_hash", list_name="event_ids", num_args=1 ) async def get_event_reference_hashes( - self, event_ids: Iterable[str] + self, event_ids: Collection[str] ) -> Dict[str, Dict[str, bytes]]: """Get all hashes for given events. @@ -41,18 +44,27 @@ class SignatureWorkerStore(SQLBaseStore): Returns: A mapping of event ID to a mapping of algorithm to hash. + Returns an empty dict for a given event id if that event is unknown. """ + events = await self.get_events( + event_ids, + redact_behaviour=EventRedactBehaviour.AS_IS, + allow_rejected=True, + ) - def f(txn): - return { - event_id: self._get_event_reference_hashes_txn(txn, event_id) - for event_id in event_ids - } + hashes: Dict[str, Dict[str, bytes]] = {} + for event_id in event_ids: + event = events.get(event_id) + if event is None: + hashes[event_id] = {} + else: + ref_alg, ref_hash_bytes = compute_event_reference_hash(event) + hashes[event_id] = {ref_alg: ref_hash_bytes} - return await self.db_pool.runInteraction("get_event_reference_hashes", f) + return hashes async def add_event_hashes( - self, event_ids: Iterable[str] + self, event_ids: Collection[str] ) -> List[Tuple[str, Dict[str, str]]]: """ @@ -70,24 +82,6 @@ class SignatureWorkerStore(SQLBaseStore): return list(encoded_hashes.items()) - def _get_event_reference_hashes_txn( - self, txn: Cursor, event_id: str - ) -> Dict[str, bytes]: - """Get all the hashes for a given PDU. - Args: - txn: - event_id: Id for the Event. - Returns: - A mapping of algorithm -> hash. - """ - query = ( - "SELECT algorithm, hash" - " FROM event_reference_hashes" - " WHERE event_id = ?" - ) - txn.execute(query, (event_id,)) - return {k: v for k, v in txn} - class SignatureStore(SignatureWorkerStore): """Persistence for event signatures and hashes""" diff --git a/synapse/storage/schema/__init__.py b/synapse/storage/schema/__init__.py index 2a3d47185a..166173ba37 100644 --- a/synapse/storage/schema/__init__.py +++ b/synapse/storage/schema/__init__.py @@ -12,7 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -SCHEMA_VERSION = 67 # remember to update the list below when updating +SCHEMA_VERSION = 68 # remember to update the list below when updating """Represents the expectations made by the codebase about the database schema This should be incremented whenever the codebase changes its requirements on the @@ -53,6 +53,9 @@ Changes in SCHEMA_VERSION = 66: Changes in SCHEMA_VERSION = 67: - state_events.prev_state is no longer written to. + +Changes in SCHEMA_VERSION = 68: + - event_reference_hashes is no longer read. """ -- cgit 1.4.1 From 9f2016e96e800460c390c2f2de85797910954ca6 Mon Sep 17 00:00:00 2001 From: Richard van der Hoff <1389908+richvdh@users.noreply.github.com> Date: Fri, 21 Jan 2022 09:19:56 +0000 Subject: Drop unused table `public_room_list_stream`. (#11795) This is a follow-up to #10565. --- changelog.d/11795.misc | 1 + synapse/storage/databases/main/purge_events.py | 1 - synapse/storage/schema/__init__.py | 4 +++- .../main/delta/67/01drop_public_room_list_stream.sql | 18 ++++++++++++++++++ tests/rest/admin/test_room.py | 1 - 5 files changed, 22 insertions(+), 3 deletions(-) create mode 100644 changelog.d/11795.misc create mode 100644 synapse/storage/schema/main/delta/67/01drop_public_room_list_stream.sql (limited to 'synapse') diff --git a/changelog.d/11795.misc b/changelog.d/11795.misc new file mode 100644 index 0000000000..aeba317670 --- /dev/null +++ b/changelog.d/11795.misc @@ -0,0 +1 @@ +Drop unused table `public_room_list_stream`. diff --git a/synapse/storage/databases/main/purge_events.py b/synapse/storage/databases/main/purge_events.py index 91b0576b85..e87a8fb85d 100644 --- a/synapse/storage/databases/main/purge_events.py +++ b/synapse/storage/databases/main/purge_events.py @@ -390,7 +390,6 @@ class PurgeEventsStore(StateGroupWorkerStore, CacheInvalidationWorkerStore): "event_search", "events", "group_rooms", - "public_room_list_stream", "receipts_graph", "receipts_linearized", "room_aliases", diff --git a/synapse/storage/schema/__init__.py b/synapse/storage/schema/__init__.py index 166173ba37..75659f931c 100644 --- a/synapse/storage/schema/__init__.py +++ b/synapse/storage/schema/__init__.py @@ -60,7 +60,9 @@ Changes in SCHEMA_VERSION = 68: SCHEMA_COMPAT_VERSION = ( - 61 # 61: Remove unused tables `user_stats_historical` and `room_stats_historical` + # we have removed the public_room_list_stream table, so are now incompatible with + # synapses wth SCHEMA_VERSION < 63. + 63 ) """Limit on how far the synapse codebase can be rolled back without breaking db compat diff --git a/synapse/storage/schema/main/delta/67/01drop_public_room_list_stream.sql b/synapse/storage/schema/main/delta/67/01drop_public_room_list_stream.sql new file mode 100644 index 0000000000..1eb8de9907 --- /dev/null +++ b/synapse/storage/schema/main/delta/67/01drop_public_room_list_stream.sql @@ -0,0 +1,18 @@ +/* Copyright 2022 The Matrix.org Foundation C.I.C + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +-- this table is unused as of Synapse 1.41 +DROP TABLE public_room_list_stream; + diff --git a/tests/rest/admin/test_room.py b/tests/rest/admin/test_room.py index 3495a0366a..23da0ad736 100644 --- a/tests/rest/admin/test_room.py +++ b/tests/rest/admin/test_room.py @@ -2468,7 +2468,6 @@ PURGE_TABLES = [ "event_search", "events", "group_rooms", - "public_room_list_stream", "receipts_graph", "receipts_linearized", "room_aliases", -- cgit 1.4.1 From b784299cbc121d27d7dadd0a4a96f4657244a4e9 Mon Sep 17 00:00:00 2001 From: Patrick Cloke Date: Fri, 21 Jan 2022 05:31:31 -0500 Subject: Do not try to serialize raw aggregations dict. (#11791) --- changelog.d/11612.bugfix | 1 + changelog.d/11612.misc | 1 - changelog.d/11791.bugfix | 1 + synapse/events/utils.py | 4 +- synapse/rest/admin/rooms.py | 13 ++--- synapse/rest/client/room.py | 11 ++-- tests/rest/client/test_relations.py | 108 ++++++++++++++++++++++++------------ 7 files changed, 85 insertions(+), 54 deletions(-) create mode 100644 changelog.d/11612.bugfix delete mode 100644 changelog.d/11612.misc create mode 100644 changelog.d/11791.bugfix (limited to 'synapse') diff --git a/changelog.d/11612.bugfix b/changelog.d/11612.bugfix new file mode 100644 index 0000000000..842f6892fd --- /dev/null +++ b/changelog.d/11612.bugfix @@ -0,0 +1 @@ +Include the bundled aggregations in the `/sync` response, per [MSC2675](https://github.com/matrix-org/matrix-doc/pull/2675). diff --git a/changelog.d/11612.misc b/changelog.d/11612.misc deleted file mode 100644 index 2d886169c5..0000000000 --- a/changelog.d/11612.misc +++ /dev/null @@ -1 +0,0 @@ -Avoid database access in the JSON serialization process. diff --git a/changelog.d/11791.bugfix b/changelog.d/11791.bugfix new file mode 100644 index 0000000000..842f6892fd --- /dev/null +++ b/changelog.d/11791.bugfix @@ -0,0 +1 @@ +Include the bundled aggregations in the `/sync` response, per [MSC2675](https://github.com/matrix-org/matrix-doc/pull/2675). diff --git a/synapse/events/utils.py b/synapse/events/utils.py index de0e0c1731..918adeecf8 100644 --- a/synapse/events/utils.py +++ b/synapse/events/utils.py @@ -402,7 +402,7 @@ class EventClientSerializer: if bundle_aggregations: event_aggregations = bundle_aggregations.get(event.event_id) if event_aggregations: - self._injected_bundled_aggregations( + self._inject_bundled_aggregations( event, time_now, bundle_aggregations[event.event_id], @@ -411,7 +411,7 @@ class EventClientSerializer: return serialized_event - def _injected_bundled_aggregations( + def _inject_bundled_aggregations( self, event: EventBase, time_now: int, diff --git a/synapse/rest/admin/rooms.py b/synapse/rest/admin/rooms.py index 2e714ac87b..efe25fe7eb 100644 --- a/synapse/rest/admin/rooms.py +++ b/synapse/rest/admin/rooms.py @@ -744,20 +744,15 @@ class RoomEventContextServlet(RestServlet): ) time_now = self.clock.time_msec() + aggregations = results.pop("aggregations", None) results["events_before"] = self._event_serializer.serialize_events( - results["events_before"], - time_now, - bundle_aggregations=results["aggregations"], + results["events_before"], time_now, bundle_aggregations=aggregations ) results["event"] = self._event_serializer.serialize_event( - results["event"], - time_now, - bundle_aggregations=results["aggregations"], + results["event"], time_now, bundle_aggregations=aggregations ) results["events_after"] = self._event_serializer.serialize_events( - results["events_after"], - time_now, - bundle_aggregations=results["aggregations"], + results["events_after"], time_now, bundle_aggregations=aggregations ) results["state"] = self._event_serializer.serialize_events( results["state"], time_now diff --git a/synapse/rest/client/room.py b/synapse/rest/client/room.py index 31fd329a38..90bb9142a0 100644 --- a/synapse/rest/client/room.py +++ b/synapse/rest/client/room.py @@ -714,18 +714,15 @@ class RoomEventContextServlet(RestServlet): raise SynapseError(404, "Event not found.", errcode=Codes.NOT_FOUND) time_now = self.clock.time_msec() + aggregations = results.pop("aggregations", None) results["events_before"] = self._event_serializer.serialize_events( - results["events_before"], - time_now, - bundle_aggregations=results["aggregations"], + results["events_before"], time_now, bundle_aggregations=aggregations ) results["event"] = self._event_serializer.serialize_event( - results["event"], time_now, bundle_aggregations=results["aggregations"] + results["event"], time_now, bundle_aggregations=aggregations ) results["events_after"] = self._event_serializer.serialize_events( - results["events_after"], - time_now, - bundle_aggregations=results["aggregations"], + results["events_after"], time_now, bundle_aggregations=aggregations ) results["state"] = self._event_serializer.serialize_events( results["state"], time_now diff --git a/tests/rest/client/test_relations.py b/tests/rest/client/test_relations.py index 4b20ab0e3e..c9b220e73d 100644 --- a/tests/rest/client/test_relations.py +++ b/tests/rest/client/test_relations.py @@ -21,6 +21,7 @@ from unittest.mock import patch from synapse.api.constants import EventTypes, RelationTypes from synapse.rest import admin from synapse.rest.client import login, register, relations, room, sync +from synapse.types import JsonDict from tests import unittest from tests.server import FakeChannel @@ -454,7 +455,14 @@ class RelationsTestCase(unittest.HomeserverTestCase): @unittest.override_config({"experimental_features": {"msc3440_enabled": True}}) def test_bundled_aggregations(self): - """Test that annotations, references, and threads get correctly bundled.""" + """ + Test that annotations, references, and threads get correctly bundled. + + Note that this doesn't test against /relations since only thread relations + get bundled via that API. See test_aggregation_get_event_for_thread. + + See test_edit for a similar test for edits. + """ # Setup by sending a variety of relations. channel = self._send_relation(RelationTypes.ANNOTATION, "m.reaction", "a") self.assertEquals(200, channel.code, channel.json_body) @@ -482,12 +490,13 @@ class RelationsTestCase(unittest.HomeserverTestCase): self.assertEquals(200, channel.code, channel.json_body) thread_2 = channel.json_body["event_id"] - def assert_bundle(actual): + def assert_bundle(event_json: JsonDict) -> None: """Assert the expected values of the bundled aggregations.""" + relations_dict = event_json["unsigned"].get("m.relations") # Ensure the fields are as expected. self.assertCountEqual( - actual.keys(), + relations_dict.keys(), ( RelationTypes.ANNOTATION, RelationTypes.REFERENCE, @@ -503,20 +512,20 @@ class RelationsTestCase(unittest.HomeserverTestCase): {"type": "m.reaction", "key": "b", "count": 1}, ] }, - actual[RelationTypes.ANNOTATION], + relations_dict[RelationTypes.ANNOTATION], ) self.assertEquals( {"chunk": [{"event_id": reply_1}, {"event_id": reply_2}]}, - actual[RelationTypes.REFERENCE], + relations_dict[RelationTypes.REFERENCE], ) self.assertEquals( 2, - actual[RelationTypes.THREAD].get("count"), + relations_dict[RelationTypes.THREAD].get("count"), ) self.assertTrue( - actual[RelationTypes.THREAD].get("current_user_participated") + relations_dict[RelationTypes.THREAD].get("current_user_participated") ) # The latest thread event has some fields that don't matter. self.assert_dict( @@ -533,20 +542,9 @@ class RelationsTestCase(unittest.HomeserverTestCase): "type": "m.room.test", "user_id": self.user_id, }, - actual[RelationTypes.THREAD].get("latest_event"), + relations_dict[RelationTypes.THREAD].get("latest_event"), ) - def _find_and_assert_event(events): - """ - Find the parent event in a chunk of events and assert that it has the proper bundled aggregations. - """ - for event in events: - if event["event_id"] == self.parent_id: - break - else: - raise AssertionError(f"Event {self.parent_id} not found in chunk") - assert_bundle(event["unsigned"].get("m.relations")) - # Request the event directly. channel = self.make_request( "GET", @@ -554,7 +552,7 @@ class RelationsTestCase(unittest.HomeserverTestCase): access_token=self.user_token, ) self.assertEquals(200, channel.code, channel.json_body) - assert_bundle(channel.json_body["unsigned"].get("m.relations")) + assert_bundle(channel.json_body) # Request the room messages. channel = self.make_request( @@ -563,7 +561,7 @@ class RelationsTestCase(unittest.HomeserverTestCase): access_token=self.user_token, ) self.assertEquals(200, channel.code, channel.json_body) - _find_and_assert_event(channel.json_body["chunk"]) + assert_bundle(self._find_event_in_chunk(channel.json_body["chunk"])) # Request the room context. channel = self.make_request( @@ -572,17 +570,14 @@ class RelationsTestCase(unittest.HomeserverTestCase): access_token=self.user_token, ) self.assertEquals(200, channel.code, channel.json_body) - assert_bundle(channel.json_body["event"]["unsigned"].get("m.relations")) + assert_bundle(channel.json_body["event"]) # Request sync. channel = self.make_request("GET", "/sync", access_token=self.user_token) self.assertEquals(200, channel.code, channel.json_body) room_timeline = channel.json_body["rooms"]["join"][self.room]["timeline"] self.assertTrue(room_timeline["limited"]) - _find_and_assert_event(room_timeline["events"]) - - # Note that /relations is tested separately in test_aggregation_get_event_for_thread - # since it needs different data configured. + self._find_event_in_chunk(room_timeline["events"]) def test_aggregation_get_event_for_annotation(self): """Test that annotations do not get bundled aggregations included @@ -777,25 +772,58 @@ class RelationsTestCase(unittest.HomeserverTestCase): edit_event_id = channel.json_body["event_id"] + def assert_bundle(event_json: JsonDict) -> None: + """Assert the expected values of the bundled aggregations.""" + relations_dict = event_json["unsigned"].get("m.relations") + self.assertIn(RelationTypes.REPLACE, relations_dict) + + m_replace_dict = relations_dict[RelationTypes.REPLACE] + for key in ["event_id", "sender", "origin_server_ts"]: + self.assertIn(key, m_replace_dict) + + self.assert_dict( + {"event_id": edit_event_id, "sender": self.user_id}, m_replace_dict + ) + channel = self.make_request( "GET", - "/rooms/%s/event/%s" % (self.room, self.parent_id), + f"/rooms/{self.room}/event/{self.parent_id}", access_token=self.user_token, ) self.assertEquals(200, channel.code, channel.json_body) - self.assertEquals(channel.json_body["content"], new_body) + assert_bundle(channel.json_body) - relations_dict = channel.json_body["unsigned"].get("m.relations") - self.assertIn(RelationTypes.REPLACE, relations_dict) + # Request the room messages. + channel = self.make_request( + "GET", + f"/rooms/{self.room}/messages?dir=b", + access_token=self.user_token, + ) + self.assertEquals(200, channel.code, channel.json_body) + assert_bundle(self._find_event_in_chunk(channel.json_body["chunk"])) - m_replace_dict = relations_dict[RelationTypes.REPLACE] - for key in ["event_id", "sender", "origin_server_ts"]: - self.assertIn(key, m_replace_dict) + # Request the room context. + channel = self.make_request( + "GET", + f"/rooms/{self.room}/context/{self.parent_id}", + access_token=self.user_token, + ) + self.assertEquals(200, channel.code, channel.json_body) + assert_bundle(channel.json_body["event"]) - self.assert_dict( - {"event_id": edit_event_id, "sender": self.user_id}, m_replace_dict + # Request sync, but limit the timeline so it becomes limited (and includes + # bundled aggregations). + filter = urllib.parse.quote_plus( + '{"room": {"timeline": {"limit": 2}}}'.encode() + ) + channel = self.make_request( + "GET", f"/sync?filter={filter}", access_token=self.user_token ) + self.assertEquals(200, channel.code, channel.json_body) + room_timeline = channel.json_body["rooms"]["join"][self.room]["timeline"] + self.assertTrue(room_timeline["limited"]) + assert_bundle(self._find_event_in_chunk(room_timeline["events"])) def test_multi_edit(self): """Test that multiple edits, including attempts by people who @@ -1102,6 +1130,16 @@ class RelationsTestCase(unittest.HomeserverTestCase): self.assertEquals(200, channel.code, channel.json_body) self.assertEquals(channel.json_body["chunk"], []) + def _find_event_in_chunk(self, events: List[JsonDict]) -> JsonDict: + """ + Find the parent event in a chunk of events and assert that it has the proper bundled aggregations. + """ + for event in events: + if event["event_id"] == self.parent_id: + return event + + raise AssertionError(f"Event {self.parent_id} not found in chunk") + def _send_relation( self, relation_type: str, -- cgit 1.4.1 From 2aa37a4250675f6d9feb57ec0dce65b2a6a3cde6 Mon Sep 17 00:00:00 2001 From: Richard van der Hoff <1389908+richvdh@users.noreply.github.com> Date: Fri, 21 Jan 2022 12:21:28 +0000 Subject: Add `state_key` and `rejection_reason` to `events` (#11792) ... and start populating them for new events --- changelog.d/11792.misc | 1 + synapse/storage/databases/main/events.py | 7 +++++- synapse/storage/schema/__init__.py | 8 ++++--- .../schema/main/delta/68/01event_columns.sql | 26 ++++++++++++++++++++++ tests/storage/test_event_chain.py | 5 ++++- 5 files changed, 42 insertions(+), 5 deletions(-) create mode 100644 changelog.d/11792.misc create mode 100644 synapse/storage/schema/main/delta/68/01event_columns.sql (limited to 'synapse') diff --git a/changelog.d/11792.misc b/changelog.d/11792.misc new file mode 100644 index 0000000000..6aa1cd61c3 --- /dev/null +++ b/changelog.d/11792.misc @@ -0,0 +1 @@ +Preparation for database schema simplifications: add `state_key` and `rejection_reason` columns to `events` table. diff --git a/synapse/storage/databases/main/events.py b/synapse/storage/databases/main/events.py index 1ae1ebe108..b7554154ac 100644 --- a/synapse/storage/databases/main/events.py +++ b/synapse/storage/databases/main/events.py @@ -1389,6 +1389,8 @@ class PersistEventsStore: "received_ts", "sender", "contains_url", + "state_key", + "rejection_reason", ), values=( ( @@ -1405,8 +1407,10 @@ class PersistEventsStore: self._clock.time_msec(), event.sender, "url" in event.content and isinstance(event.content["url"], str), + event.get_state_key(), + context.rejected or None, ) - for event, _ in events_and_contexts + for event, context in events_and_contexts ), ) @@ -1456,6 +1460,7 @@ class PersistEventsStore: for event, context in events_and_contexts: if context.rejected: # Insert the event_id into the rejections table + # (events.rejection_reason has already been done) self._store_rejections_txn(txn, event.event_id, context.rejected) to_remove.add(event) diff --git a/synapse/storage/schema/__init__.py b/synapse/storage/schema/__init__.py index 75659f931c..7b21c1b96d 100644 --- a/synapse/storage/schema/__init__.py +++ b/synapse/storage/schema/__init__.py @@ -56,13 +56,15 @@ Changes in SCHEMA_VERSION = 67: Changes in SCHEMA_VERSION = 68: - event_reference_hashes is no longer read. + - `events` has `state_key` and `rejection_reason` columns, which are populated for + new events. """ SCHEMA_COMPAT_VERSION = ( - # we have removed the public_room_list_stream table, so are now incompatible with - # synapses wth SCHEMA_VERSION < 63. - 63 + # we now have `state_key` columns in both `events` and `state_events`, so + # now incompatible with synapses wth SCHEMA_VERSION < 66. + 66 ) """Limit on how far the synapse codebase can be rolled back without breaking db compat diff --git a/synapse/storage/schema/main/delta/68/01event_columns.sql b/synapse/storage/schema/main/delta/68/01event_columns.sql new file mode 100644 index 0000000000..7c072f972e --- /dev/null +++ b/synapse/storage/schema/main/delta/68/01event_columns.sql @@ -0,0 +1,26 @@ +/* Copyright 2022 The Matrix.org Foundation C.I.C + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +-- Add new colums to the `events` table which will (one day) make the `state_events` +-- and `rejections` tables redundant. + +ALTER TABLE events + -- if this event is a state event, its state key + ADD COLUMN state_key TEXT DEFAULT NULL; + + +ALTER TABLE events + -- if this event was rejected, the reason it was rejected. + ADD COLUMN rejection_reason TEXT DEFAULT NULL; diff --git a/tests/storage/test_event_chain.py b/tests/storage/test_event_chain.py index 7b7f6c349e..e3273a93f9 100644 --- a/tests/storage/test_event_chain.py +++ b/tests/storage/test_event_chain.py @@ -19,6 +19,7 @@ from twisted.trial import unittest from synapse.api.constants import EventTypes from synapse.api.room_versions import RoomVersions from synapse.events import EventBase +from synapse.events.snapshot import EventContext from synapse.rest import admin from synapse.rest.client import login, room from synapse.storage.databases.main.events import _LinkMap @@ -391,7 +392,9 @@ class EventChainStoreTestCase(HomeserverTestCase): def _persist(txn): # We need to persist the events to the events and state_events # tables. - persist_events_store._store_event_txn(txn, [(e, {}) for e in events]) + persist_events_store._store_event_txn( + txn, [(e, EventContext()) for e in events] + ) # Actually call the function that calculates the auth chain stuff. persist_events_store._persist_event_auth_chain_txn(txn, events) -- cgit 1.4.1 From 9006ee36d1d3d83ffaf1cce2ac9d70ff2d29df51 Mon Sep 17 00:00:00 2001 From: Shay Date: Fri, 21 Jan 2022 14:23:26 -0800 Subject: Drop support for and remove references to EOL Python 3.6 (#11683) * remove reference in comments to python3.6 * upgrade tox python env in script * bump python version in example for completeness * upgrade python version requirement in setup doc * upgrade necessary python version in __init__.py * upgrade python version in setup.py * newsfragment * drops refs to bionic and replace with focal * bump refs to postgres 9.6 to 10 * fix hanging ci * try installing tzdata first * revert change made in b979f336 * ignore new random mypy error while debugging other error * fix lint error for temporary workaround * revert change to install list * try passing env var * export debian frontend var? * move line and add comment * bump pillow dependency * bump lxml depenency * install libjpeg-dev for pillow * bump automat version to one compatible with py3.8 * add libwebp for pillow * bump twisted trunk python version * change suffix of newsfragment * remove redundant python 3.7 checks * lint --- .ci/scripts/test_old_deps.sh | 8 +++++--- .github/workflows/tests.yml | 8 ++++---- .github/workflows/twisted_trunk.yml | 2 +- changelog.d/11683.removal | 1 + docker/Dockerfile-pgtests | 2 +- docker/run_pg_tests.sh | 2 +- docs/admin_api/version_api.md | 2 +- setup.py | 2 +- synapse/__init__.py | 4 ++-- synapse/app/_base.py | 11 +++-------- synapse/python_dependencies.py | 4 ++-- synapse/storage/engines/postgres.py | 4 ++-- tox.ini | 3 +-- 13 files changed, 25 insertions(+), 28 deletions(-) create mode 100644 changelog.d/11683.removal (limited to 'synapse') diff --git a/.ci/scripts/test_old_deps.sh b/.ci/scripts/test_old_deps.sh index 8b473936f8..a54aa86fbc 100755 --- a/.ci/scripts/test_old_deps.sh +++ b/.ci/scripts/test_old_deps.sh @@ -1,12 +1,14 @@ #!/usr/bin/env bash - -# this script is run by GitHub Actions in a plain `bionic` container; it installs the +# this script is run by GitHub Actions in a plain `focal` container; it installs the # minimal requirements for tox and hands over to the py3-old tox environment. +# Prevent tzdata from asking for user input +export DEBIAN_FRONTEND=noninteractive + set -ex apt-get update -apt-get install -y python3 python3-dev python3-pip libxml2-dev libxslt-dev xmlsec1 zlib1g-dev tox +apt-get install -y python3 python3-dev python3-pip libxml2-dev libxslt-dev xmlsec1 zlib1g-dev tox libjpeg-dev libwebp-dev export LANG="C.UTF-8" diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index 4f58069702..e47671102e 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -141,7 +141,7 @@ jobs: steps: - uses: actions/checkout@v2 - name: Test with old deps - uses: docker://ubuntu:bionic # For old python and sqlite + uses: docker://ubuntu:focal # For old python and sqlite with: workdir: /github/workspace entrypoint: .ci/scripts/test_old_deps.sh @@ -213,15 +213,15 @@ jobs: fail-fast: false matrix: include: - - sytest-tag: bionic + - sytest-tag: focal - - sytest-tag: bionic + - sytest-tag: focal postgres: postgres - sytest-tag: testing postgres: postgres - - sytest-tag: bionic + - sytest-tag: focal postgres: multi-postgres workers: workers diff --git a/.github/workflows/twisted_trunk.yml b/.github/workflows/twisted_trunk.yml index e974ac7aba..fb9d46b7bf 100644 --- a/.github/workflows/twisted_trunk.yml +++ b/.github/workflows/twisted_trunk.yml @@ -25,7 +25,7 @@ jobs: - run: sudo apt-get -qq install xmlsec1 - uses: actions/setup-python@v2 with: - python-version: 3.6 + python-version: 3.7 - run: .ci/patch_for_twisted_trunk.sh - run: pip install tox - run: tox -e py diff --git a/changelog.d/11683.removal b/changelog.d/11683.removal new file mode 100644 index 0000000000..b1f048f7f5 --- /dev/null +++ b/changelog.d/11683.removal @@ -0,0 +1 @@ +Drop support for Python 3.6, which is EOL. \ No newline at end of file diff --git a/docker/Dockerfile-pgtests b/docker/Dockerfile-pgtests index 92b804d193..b94484ea7f 100644 --- a/docker/Dockerfile-pgtests +++ b/docker/Dockerfile-pgtests @@ -1,6 +1,6 @@ # Use the Sytest image that comes with a lot of the build dependencies # pre-installed -FROM matrixdotorg/sytest:bionic +FROM matrixdotorg/sytest:focal # The Sytest image doesn't come with python, so install that RUN apt-get update && apt-get -qq install -y python3 python3-dev python3-pip diff --git a/docker/run_pg_tests.sh b/docker/run_pg_tests.sh index 58e2177d34..b22b6ef16b 100755 --- a/docker/run_pg_tests.sh +++ b/docker/run_pg_tests.sh @@ -16,4 +16,4 @@ sudo -u postgres /usr/lib/postgresql/10/bin/pg_ctl -w -D /var/lib/postgresql/dat # Run the tests cd /src export TRIAL_FLAGS="-j 4" -tox --workdir=./.tox-pg-container -e py36-postgres "$@" +tox --workdir=./.tox-pg-container -e py37-postgres "$@" diff --git a/docs/admin_api/version_api.md b/docs/admin_api/version_api.md index efb4a0c0f7..27977de0d3 100644 --- a/docs/admin_api/version_api.md +++ b/docs/admin_api/version_api.md @@ -16,6 +16,6 @@ It returns a JSON body like the following: ```json { "server_version": "0.99.2rc1 (b=develop, abcdef123)", - "python_version": "3.6.8" + "python_version": "3.7.8" } ``` diff --git a/setup.py b/setup.py index e618ff898b..d0511c767f 100755 --- a/setup.py +++ b/setup.py @@ -150,7 +150,7 @@ setup( zip_safe=False, long_description=long_description, long_description_content_type="text/x-rst", - python_requires="~=3.6", + python_requires="~=3.7", entry_points={ "console_scripts": [ "synapse_homeserver = synapse.app.homeserver:main", diff --git a/synapse/__init__.py b/synapse/__init__.py index 3d0d165f48..4ef8728018 100644 --- a/synapse/__init__.py +++ b/synapse/__init__.py @@ -21,8 +21,8 @@ import os import sys # Check that we're not running on an unsupported Python version. -if sys.version_info < (3, 6): - print("Synapse requires Python 3.6 or above.") +if sys.version_info < (3, 7): + print("Synapse requires Python 3.7 or above.") sys.exit(1) # Twisted and canonicaljson will fail to import when this file is executed to diff --git a/synapse/app/_base.py b/synapse/app/_base.py index 579adbbca0..e5ee03b79f 100644 --- a/synapse/app/_base.py +++ b/synapse/app/_base.py @@ -16,7 +16,6 @@ import atexit import gc import logging import os -import platform import signal import socket import sys @@ -468,16 +467,12 @@ async def start(hs: "HomeServer") -> None: # everything currently allocated are things that will be used for the # rest of time. Doing so means less work each GC (hopefully). # - # This only works on Python 3.7 - if platform.python_implementation() == "CPython" and sys.version_info >= (3, 7): - gc.collect() - gc.freeze() + gc.collect() + gc.freeze() # Speed up shutdowns by freezing all allocated objects. This moves everything # into the permanent generation and excludes them from the final GC. - # Unfortunately only works on Python 3.7 - if platform.python_implementation() == "CPython" and sys.version_info >= (3, 7): - atexit.register(gc.freeze) + atexit.register(gc.freeze) def setup_sentry(hs: "HomeServer") -> None: diff --git a/synapse/python_dependencies.py b/synapse/python_dependencies.py index d844fbb3b3..22b4606ae0 100644 --- a/synapse/python_dependencies.py +++ b/synapse/python_dependencies.py @@ -70,7 +70,7 @@ REQUIREMENTS = [ "pyasn1>=0.1.9", "pyasn1-modules>=0.0.7", "bcrypt>=3.1.0", - "pillow>=4.3.0", + "pillow>=5.4.0", "sortedcontainers>=1.4.4", "pymacaroons>=0.13.0", "msgpack>=0.5.2", @@ -107,7 +107,7 @@ CONDITIONAL_REQUIREMENTS = { # `systemd.journal.JournalHandler`, as is documented in # `contrib/systemd/log_config.yaml`. "systemd": ["systemd-python>=231"], - "url_preview": ["lxml>=3.5.0"], + "url_preview": ["lxml>=4.2.0"], "sentry": ["sentry-sdk>=0.7.2"], "opentracing": ["jaeger-client>=4.0.0", "opentracing>=2.2.0"], "jwt": ["pyjwt>=1.6.4"], diff --git a/synapse/storage/engines/postgres.py b/synapse/storage/engines/postgres.py index 30f948a0f7..b3d71f661c 100644 --- a/synapse/storage/engines/postgres.py +++ b/synapse/storage/engines/postgres.py @@ -46,8 +46,8 @@ class PostgresEngine(BaseDatabaseEngine): self._version = db_conn.server_version # Are we on a supported PostgreSQL version? - if not allow_outdated_version and self._version < 90600: - raise RuntimeError("Synapse requires PostgreSQL 9.6 or above.") + if not allow_outdated_version and self._version < 100000: + raise RuntimeError("Synapse requires PostgreSQL 10 or above.") with db_conn.cursor() as txn: txn.execute("SHOW SERVER_ENCODING") diff --git a/tox.ini b/tox.ini index 2ffca14b22..32679e9106 100644 --- a/tox.ini +++ b/tox.ini @@ -117,8 +117,7 @@ usedevelop=true skip_install = true usedevelop = false deps = - # Old automat version for Twisted - Automat == 0.3.0 + Automat == 0.8.0 lxml {[base]deps} -- cgit 1.4.1 From df54c8485a286dbefaa038319399ef8985d5344e Mon Sep 17 00:00:00 2001 From: reivilibre Date: Mon, 24 Jan 2022 13:37:00 +0000 Subject: Remove account data (including client config, push rules and ignored users) upon user deactivation. (#11621) Co-authored-by: Patrick Cloke --- changelog.d/11621.feature | 1 + docs/admin_api/user_admin_api.md | 6 +- synapse/handlers/deactivate_account.py | 3 + synapse/storage/databases/main/account_data.py | 73 ++++++++- tests/handlers/test_deactivate_account.py | 219 +++++++++++++++++++++++++ 5 files changed, 299 insertions(+), 3 deletions(-) create mode 100644 changelog.d/11621.feature create mode 100644 tests/handlers/test_deactivate_account.py (limited to 'synapse') diff --git a/changelog.d/11621.feature b/changelog.d/11621.feature new file mode 100644 index 0000000000..dc426fb658 --- /dev/null +++ b/changelog.d/11621.feature @@ -0,0 +1 @@ +Remove account data (including client config, push rules and ignored users) upon user deactivation. \ No newline at end of file diff --git a/docs/admin_api/user_admin_api.md b/docs/admin_api/user_admin_api.md index c514cadb9d..fdc1f2d1cf 100644 --- a/docs/admin_api/user_admin_api.md +++ b/docs/admin_api/user_admin_api.md @@ -353,6 +353,11 @@ The following actions are performed when deactivating an user: - Remove the user from the user directory - Reject all pending invites - Remove all account validity information related to the user +- Remove the arbitrary data store known as *account data*. For example, this includes: + - list of ignored users; + - push rules; + - secret storage keys; and + - cross-signing keys. The following additional actions are performed during deactivation if `erase` is set to `true`: @@ -366,7 +371,6 @@ The following actions are **NOT** performed. The list may be incomplete. - Remove mappings of SSO IDs - [Delete media uploaded](#delete-media-uploaded-by-a-user) by user (included avatar images) - Delete sent and received messages -- Delete E2E cross-signing keys - Remove the user's creation (registration) timestamp - [Remove rate limit overrides](#override-ratelimiting-for-users) - Remove from monthly active users diff --git a/synapse/handlers/deactivate_account.py b/synapse/handlers/deactivate_account.py index bee62cf360..7a13d76a68 100644 --- a/synapse/handlers/deactivate_account.py +++ b/synapse/handlers/deactivate_account.py @@ -157,6 +157,9 @@ class DeactivateAccountHandler: # Mark the user as deactivated. await self.store.set_user_deactivated_status(user_id, True) + # Remove account data (including ignored users and push rules). + await self.store.purge_account_data_for_user(user_id) + return identity_server_supports_unbinding async def _reject_pending_invites_for_user(self, user_id: str) -> None: diff --git a/synapse/storage/databases/main/account_data.py b/synapse/storage/databases/main/account_data.py index 9c19f0965f..5bfa408f74 100644 --- a/synapse/storage/databases/main/account_data.py +++ b/synapse/storage/databases/main/account_data.py @@ -26,6 +26,7 @@ from synapse.storage.database import ( LoggingTransaction, ) from synapse.storage.databases.main.cache import CacheInvalidationWorkerStore +from synapse.storage.databases.main.push_rule import PushRulesWorkerStore from synapse.storage.engines import PostgresEngine from synapse.storage.util.id_generators import ( AbstractStreamIdGenerator, @@ -44,7 +45,7 @@ if TYPE_CHECKING: logger = logging.getLogger(__name__) -class AccountDataWorkerStore(CacheInvalidationWorkerStore): +class AccountDataWorkerStore(PushRulesWorkerStore, CacheInvalidationWorkerStore): def __init__( self, database: DatabasePool, @@ -179,7 +180,7 @@ class AccountDataWorkerStore(CacheInvalidationWorkerStore): else: return None - @cached(num_args=2) + @cached(num_args=2, tree=True) async def get_account_data_for_room( self, user_id: str, room_id: str ) -> Dict[str, JsonDict]: @@ -546,6 +547,74 @@ class AccountDataWorkerStore(CacheInvalidationWorkerStore): for ignored_user_id in previously_ignored_users ^ currently_ignored_users: self._invalidate_cache_and_stream(txn, self.ignored_by, (ignored_user_id,)) + async def purge_account_data_for_user(self, user_id: str) -> None: + """ + Removes the account data for a user. + + This is intended to be used upon user deactivation and also removes any + derived information from account data (e.g. push rules and ignored users). + + Args: + user_id: The user ID to remove data for. + """ + + def purge_account_data_for_user_txn(txn: LoggingTransaction) -> None: + # Purge from the primary account_data tables. + self.db_pool.simple_delete_txn( + txn, table="account_data", keyvalues={"user_id": user_id} + ) + + self.db_pool.simple_delete_txn( + txn, table="room_account_data", keyvalues={"user_id": user_id} + ) + + # Purge from ignored_users where this user is the ignorer. + # N.B. We don't purge where this user is the ignoree, because that + # interferes with other users' account data. + # It's also not this user's data to delete! + self.db_pool.simple_delete_txn( + txn, table="ignored_users", keyvalues={"ignorer_user_id": user_id} + ) + + # Remove the push rules + self.db_pool.simple_delete_txn( + txn, table="push_rules", keyvalues={"user_name": user_id} + ) + self.db_pool.simple_delete_txn( + txn, table="push_rules_enable", keyvalues={"user_name": user_id} + ) + self.db_pool.simple_delete_txn( + txn, table="push_rules_stream", keyvalues={"user_id": user_id} + ) + + # Invalidate caches as appropriate + self._invalidate_cache_and_stream( + txn, self.get_account_data_for_room_and_type, (user_id,) + ) + self._invalidate_cache_and_stream( + txn, self.get_account_data_for_user, (user_id,) + ) + self._invalidate_cache_and_stream( + txn, self.get_global_account_data_by_type_for_user, (user_id,) + ) + self._invalidate_cache_and_stream( + txn, self.get_account_data_for_room, (user_id,) + ) + self._invalidate_cache_and_stream( + txn, self.get_push_rules_for_user, (user_id,) + ) + self._invalidate_cache_and_stream( + txn, self.get_push_rules_enabled_for_user, (user_id,) + ) + # This user might be contained in the ignored_by cache for other users, + # so we have to invalidate it all. + self._invalidate_all_cache_and_stream(txn, self.ignored_by) + + await self.db_pool.runInteraction( + "purge_account_data_for_user_txn", + purge_account_data_for_user_txn, + ) + class AccountDataStore(AccountDataWorkerStore): pass diff --git a/tests/handlers/test_deactivate_account.py b/tests/handlers/test_deactivate_account.py new file mode 100644 index 0000000000..3da597768c --- /dev/null +++ b/tests/handlers/test_deactivate_account.py @@ -0,0 +1,219 @@ +# Copyright 2021 The Matrix.org Foundation C.I.C. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +from http import HTTPStatus +from typing import Any, Dict + +from twisted.test.proto_helpers import MemoryReactor + +from synapse.api.constants import AccountDataTypes +from synapse.push.rulekinds import PRIORITY_CLASS_MAP +from synapse.rest import admin +from synapse.rest.client import account, login +from synapse.server import HomeServer +from synapse.util import Clock + +from tests.unittest import HomeserverTestCase + + +class DeactivateAccountTestCase(HomeserverTestCase): + servlets = [ + login.register_servlets, + admin.register_servlets, + account.register_servlets, + ] + + def prepare(self, reactor: MemoryReactor, clock: Clock, hs: HomeServer) -> None: + self._store = hs.get_datastore() + + self.user = self.register_user("user", "pass") + self.token = self.login("user", "pass") + + def _deactivate_my_account(self): + """ + Deactivates the account `self.user` using `self.token` and asserts + that it returns a 200 success code. + """ + req = self.get_success( + self.make_request( + "POST", + "account/deactivate", + { + "auth": { + "type": "m.login.password", + "user": self.user, + "password": "pass", + }, + "erase": True, + }, + access_token=self.token, + ) + ) + self.assertEqual(req.code, HTTPStatus.OK, req) + + def test_global_account_data_deleted_upon_deactivation(self) -> None: + """ + Tests that global account data is removed upon deactivation. + """ + # Add some account data + self.get_success( + self._store.add_account_data_for_user( + self.user, + AccountDataTypes.DIRECT, + {"@someone:remote": ["!somewhere:remote"]}, + ) + ) + + # Check that we actually added some. + self.assertIsNotNone( + self.get_success( + self._store.get_global_account_data_by_type_for_user( + self.user, AccountDataTypes.DIRECT + ) + ), + ) + + # Request the deactivation of our account + self._deactivate_my_account() + + # Check that the account data does not persist. + self.assertIsNone( + self.get_success( + self._store.get_global_account_data_by_type_for_user( + self.user, AccountDataTypes.DIRECT + ) + ), + ) + + def test_room_account_data_deleted_upon_deactivation(self) -> None: + """ + Tests that room account data is removed upon deactivation. + """ + room_id = "!room:test" + + # Add some room account data + self.get_success( + self._store.add_account_data_to_room( + self.user, + room_id, + "m.fully_read", + {"event_id": "$aaaa:test"}, + ) + ) + + # Check that we actually added some. + self.assertIsNotNone( + self.get_success( + self._store.get_account_data_for_room_and_type( + self.user, room_id, "m.fully_read" + ) + ), + ) + + # Request the deactivation of our account + self._deactivate_my_account() + + # Check that the account data does not persist. + self.assertIsNone( + self.get_success( + self._store.get_account_data_for_room_and_type( + self.user, room_id, "m.fully_read" + ) + ), + ) + + def _is_custom_rule(self, push_rule: Dict[str, Any]) -> bool: + """ + Default rules start with a dot: such as .m.rule and .im.vector. + This function returns true iff a rule is custom (not default). + """ + return "/." not in push_rule["rule_id"] + + def test_push_rules_deleted_upon_account_deactivation(self) -> None: + """ + Push rules are a special case of account data. + They are stored separately but get sent to the client as account data in /sync. + This tests that deactivating a user deletes push rules along with the rest + of their account data. + """ + + # Add a push rule + self.get_success( + self._store.add_push_rule( + self.user, + "personal.override.rule1", + PRIORITY_CLASS_MAP["override"], + [], + [], + ) + ) + + # Test the rule exists + push_rules = self.get_success(self._store.get_push_rules_for_user(self.user)) + # Filter out default rules; we don't care + push_rules = list(filter(self._is_custom_rule, push_rules)) + # Check our rule made it + self.assertEqual( + push_rules, + [ + { + "user_name": "@user:test", + "rule_id": "personal.override.rule1", + "priority_class": 5, + "priority": 0, + "conditions": [], + "actions": [], + "default": False, + } + ], + push_rules, + ) + + # Request the deactivation of our account + self._deactivate_my_account() + + push_rules = self.get_success(self._store.get_push_rules_for_user(self.user)) + # Filter out default rules; we don't care + push_rules = list(filter(self._is_custom_rule, push_rules)) + # Check our rule no longer exists + self.assertEqual(push_rules, [], push_rules) + + def test_ignored_users_deleted_upon_deactivation(self) -> None: + """ + Ignored users are a special case of account data. + They get denormalised into the `ignored_users` table upon being stored as + account data. + Test that a user's list of ignored users is deleted upon deactivation. + """ + + # Add an ignored user + self.get_success( + self._store.add_account_data_for_user( + self.user, + AccountDataTypes.IGNORED_USER_LIST, + {"ignored_users": {"@sheltie:test": {}}}, + ) + ) + + # Test the user is ignored + self.assertEqual( + self.get_success(self._store.ignored_by("@sheltie:test")), {self.user} + ) + + # Request the deactivation of our account + self._deactivate_my_account() + + # Test the user is no longer ignored by the user that was deactivated + self.assertEqual( + self.get_success(self._store.ignored_by("@sheltie:test")), set() + ) -- cgit 1.4.1 From 807efd26aec9b65c6a2f02d10fd139095a5b3387 Mon Sep 17 00:00:00 2001 From: Patrick Cloke Date: Mon, 24 Jan 2022 08:58:18 -0500 Subject: Support rendering previews with data: URLs in them (#11767) Images which are data URLs will no longer break URL previews and will properly be "downloaded" and thumbnailed. --- changelog.d/11767.bugfix | 1 + synapse/rest/media/v1/preview_html.py | 31 +- synapse/rest/media/v1/preview_url_resource.py | 224 ++++++++---- tests/rest/media/v1/test_html_preview.py | 481 ++++++++++++++++++++++++++ tests/rest/media/v1/test_url_preview.py | 81 ++++- tests/server.py | 2 +- tests/test_preview.py | 449 ------------------------ 7 files changed, 747 insertions(+), 522 deletions(-) create mode 100644 changelog.d/11767.bugfix create mode 100644 tests/rest/media/v1/test_html_preview.py delete mode 100644 tests/test_preview.py (limited to 'synapse') diff --git a/changelog.d/11767.bugfix b/changelog.d/11767.bugfix new file mode 100644 index 0000000000..3e344747f4 --- /dev/null +++ b/changelog.d/11767.bugfix @@ -0,0 +1 @@ +Fix a long-standing bug when previewing Reddit URLs which do not contain an image. diff --git a/synapse/rest/media/v1/preview_html.py b/synapse/rest/media/v1/preview_html.py index 30b067dd42..872a9e72e8 100644 --- a/synapse/rest/media/v1/preview_html.py +++ b/synapse/rest/media/v1/preview_html.py @@ -321,14 +321,33 @@ def _iterate_over_text( def rebase_url(url: str, base: str) -> str: - base_parts = list(urlparse.urlparse(base)) + """ + Resolves a potentially relative `url` against an absolute `base` URL. + + For example: + + >>> rebase_url("subpage", "https://example.com/foo/") + 'https://example.com/foo/subpage' + >>> rebase_url("sibling", "https://example.com/foo") + 'https://example.com/sibling' + >>> rebase_url("/bar", "https://example.com/foo/") + 'https://example.com/bar' + >>> rebase_url("https://alice.com/a/", "https://example.com/foo/") + 'https://alice.com/a' + """ + base_parts = urlparse.urlparse(base) + # Convert the parsed URL to a list for (potential) modification. url_parts = list(urlparse.urlparse(url)) - if not url_parts[0]: # fix up schema - url_parts[0] = base_parts[0] or "http" - if not url_parts[1]: # fix up hostname - url_parts[1] = base_parts[1] + # Add a scheme, if one does not exist. + if not url_parts[0]: + url_parts[0] = base_parts.scheme or "http" + # Fix up the hostname, if this is not a data URL. + if url_parts[0] != "data" and not url_parts[1]: + url_parts[1] = base_parts.netloc + # If the path does not start with a /, nest it under the base path's last + # directory. if not url_parts[2].startswith("/"): - url_parts[2] = re.sub(r"/[^/]+$", "/", base_parts[2]) + url_parts[2] + url_parts[2] = re.sub(r"/[^/]+$", "/", base_parts.path) + url_parts[2] return urlparse.urlunparse(url_parts) diff --git a/synapse/rest/media/v1/preview_url_resource.py b/synapse/rest/media/v1/preview_url_resource.py index e8881bc870..efd84ced8f 100644 --- a/synapse/rest/media/v1/preview_url_resource.py +++ b/synapse/rest/media/v1/preview_url_resource.py @@ -21,8 +21,9 @@ import re import shutil import sys import traceback -from typing import TYPE_CHECKING, Iterable, Optional, Tuple +from typing import TYPE_CHECKING, BinaryIO, Iterable, Optional, Tuple from urllib import parse as urlparse +from urllib.request import urlopen import attr @@ -70,6 +71,17 @@ ONE_DAY = 24 * ONE_HOUR IMAGE_CACHE_EXPIRY_MS = 2 * ONE_DAY +@attr.s(slots=True, frozen=True, auto_attribs=True) +class DownloadResult: + length: int + uri: str + response_code: int + media_type: str + download_name: Optional[str] + expires: int + etag: Optional[str] + + @attr.s(slots=True, frozen=True, auto_attribs=True) class MediaInfo: """ @@ -256,7 +268,7 @@ class PreviewUrlResource(DirectServeJsonResource): if oembed_url: url_to_download = oembed_url - media_info = await self._download_url(url_to_download, user) + media_info = await self._handle_url(url_to_download, user) logger.debug("got media_info of '%s'", media_info) @@ -297,7 +309,9 @@ class PreviewUrlResource(DirectServeJsonResource): oembed_url = self._oembed.autodiscover_from_html(tree) og_from_oembed: JsonDict = {} if oembed_url: - oembed_info = await self._download_url(oembed_url, user) + oembed_info = await self._handle_url( + oembed_url, user, allow_data_urls=True + ) ( og_from_oembed, author_name, @@ -367,7 +381,135 @@ class PreviewUrlResource(DirectServeJsonResource): return jsonog.encode("utf8") - async def _download_url(self, url: str, user: UserID) -> MediaInfo: + async def _download_url(self, url: str, output_stream: BinaryIO) -> DownloadResult: + """ + Fetches a remote URL and parses the headers. + + Args: + url: The URL to fetch. + output_stream: The stream to write the content to. + + Returns: + A tuple of: + Media length, URL downloaded, the HTTP response code, + the media type, the downloaded file name, the number of + milliseconds the result is valid for, the etag header. + """ + + try: + logger.debug("Trying to get preview for url '%s'", url) + length, headers, uri, code = await self.client.get_file( + url, + output_stream=output_stream, + max_size=self.max_spider_size, + headers={"Accept-Language": self.url_preview_accept_language}, + ) + except SynapseError: + # Pass SynapseErrors through directly, so that the servlet + # handler will return a SynapseError to the client instead of + # blank data or a 500. + raise + except DNSLookupError: + # DNS lookup returned no results + # Note: This will also be the case if one of the resolved IP + # addresses is blacklisted + raise SynapseError( + 502, + "DNS resolution failure during URL preview generation", + Codes.UNKNOWN, + ) + except Exception as e: + # FIXME: pass through 404s and other error messages nicely + logger.warning("Error downloading %s: %r", url, e) + + raise SynapseError( + 500, + "Failed to download content: %s" + % (traceback.format_exception_only(sys.exc_info()[0], e),), + Codes.UNKNOWN, + ) + + if b"Content-Type" in headers: + media_type = headers[b"Content-Type"][0].decode("ascii") + else: + media_type = "application/octet-stream" + + download_name = get_filename_from_headers(headers) + + # FIXME: we should calculate a proper expiration based on the + # Cache-Control and Expire headers. But for now, assume 1 hour. + expires = ONE_HOUR + etag = headers[b"ETag"][0].decode("ascii") if b"ETag" in headers else None + + return DownloadResult( + length, uri, code, media_type, download_name, expires, etag + ) + + async def _parse_data_url( + self, url: str, output_stream: BinaryIO + ) -> DownloadResult: + """ + Parses a data: URL. + + Args: + url: The URL to parse. + output_stream: The stream to write the content to. + + Returns: + A tuple of: + Media length, URL downloaded, the HTTP response code, + the media type, the downloaded file name, the number of + milliseconds the result is valid for, the etag header. + """ + + try: + logger.debug("Trying to parse data url '%s'", url) + with urlopen(url) as url_info: + # TODO Can this be more efficient. + output_stream.write(url_info.read()) + except Exception as e: + logger.warning("Error parsing data: URL %s: %r", url, e) + + raise SynapseError( + 500, + "Failed to parse data URL: %s" + % (traceback.format_exception_only(sys.exc_info()[0], e),), + Codes.UNKNOWN, + ) + + return DownloadResult( + # Read back the length that has been written. + length=output_stream.tell(), + uri=url, + # If it was parsed, consider this a 200 OK. + response_code=200, + # urlopen shoves the media-type from the data URL into the content type + # header object. + media_type=url_info.headers.get_content_type(), + # Some features are not supported by data: URLs. + download_name=None, + expires=ONE_HOUR, + etag=None, + ) + + async def _handle_url( + self, url: str, user: UserID, allow_data_urls: bool = False + ) -> MediaInfo: + """ + Fetches content from a URL and parses the result to generate a MediaInfo. + + It uses the media storage provider to persist the fetched content and + stores the mapping into the database. + + Args: + url: The URL to fetch. + user: The user who ahs requested this URL. + allow_data_urls: True if data URLs should be allowed. + + Returns: + A MediaInfo object describing the fetched content. + """ + # TODO: we should probably honour robots.txt... except in practice # we're most likely being explicitly triggered by a human rather than a # bot, so are we really a robot? @@ -377,61 +519,27 @@ class PreviewUrlResource(DirectServeJsonResource): file_info = FileInfo(server_name=None, file_id=file_id, url_cache=True) with self.media_storage.store_into_file(file_info) as (f, fname, finish): - try: - logger.debug("Trying to get preview for url '%s'", url) - length, headers, uri, code = await self.client.get_file( - url, - output_stream=f, - max_size=self.max_spider_size, - headers={"Accept-Language": self.url_preview_accept_language}, - ) - except SynapseError: - # Pass SynapseErrors through directly, so that the servlet - # handler will return a SynapseError to the client instead of - # blank data or a 500. - raise - except DNSLookupError: - # DNS lookup returned no results - # Note: This will also be the case if one of the resolved IP - # addresses is blacklisted - raise SynapseError( - 502, - "DNS resolution failure during URL preview generation", - Codes.UNKNOWN, - ) - except Exception as e: - # FIXME: pass through 404s and other error messages nicely - logger.warning("Error downloading %s: %r", url, e) - - raise SynapseError( - 500, - "Failed to download content: %s" - % (traceback.format_exception_only(sys.exc_info()[0], e),), - Codes.UNKNOWN, - ) - await finish() + if url.startswith("data:"): + if not allow_data_urls: + raise SynapseError( + 500, "Previewing of data: URLs is forbidden", Codes.UNKNOWN + ) - if b"Content-Type" in headers: - media_type = headers[b"Content-Type"][0].decode("ascii") + download_result = await self._parse_data_url(url, f) else: - media_type = "application/octet-stream" + download_result = await self._download_url(url, f) - download_name = get_filename_from_headers(headers) - - # FIXME: we should calculate a proper expiration based on the - # Cache-Control and Expire headers. But for now, assume 1 hour. - expires = ONE_HOUR - etag = headers[b"ETag"][0].decode("ascii") if b"ETag" in headers else None + await finish() try: time_now_ms = self.clock.time_msec() await self.store.store_local_media( media_id=file_id, - media_type=media_type, + media_type=download_result.media_type, time_now_ms=time_now_ms, - upload_name=download_name, - media_length=length, + upload_name=download_result.download_name, + media_length=download_result.length, user_id=user, url_cache=url, ) @@ -444,16 +552,16 @@ class PreviewUrlResource(DirectServeJsonResource): raise return MediaInfo( - media_type=media_type, - media_length=length, - download_name=download_name, + media_type=download_result.media_type, + media_length=download_result.length, + download_name=download_result.download_name, created_ts_ms=time_now_ms, filesystem_id=file_id, filename=fname, - uri=uri, - response_code=code, - expires=expires, - etag=etag, + uri=download_result.uri, + response_code=download_result.response_code, + expires=download_result.expires, + etag=download_result.etag, ) async def _precache_image_url( @@ -474,8 +582,8 @@ class PreviewUrlResource(DirectServeJsonResource): # FIXME: it might be cleaner to use the same flow as the main /preview_url # request itself and benefit from the same caching etc. But for now we # just rely on the caching on the master request to speed things up. - image_info = await self._download_url( - rebase_url(og["og:image"], media_info.uri), user + image_info = await self._handle_url( + rebase_url(og["og:image"], media_info.uri), user, allow_data_urls=True ) if _is_media(image_info.media_type): diff --git a/tests/rest/media/v1/test_html_preview.py b/tests/rest/media/v1/test_html_preview.py new file mode 100644 index 0000000000..a4b57e3d1f --- /dev/null +++ b/tests/rest/media/v1/test_html_preview.py @@ -0,0 +1,481 @@ +# Copyright 2014-2016 OpenMarket Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from synapse.rest.media.v1.preview_html import ( + _get_html_media_encodings, + decode_body, + parse_html_to_open_graph, + rebase_url, + summarize_paragraphs, +) + +from tests import unittest + +try: + import lxml +except ImportError: + lxml = None + + +class SummarizeTestCase(unittest.TestCase): + if not lxml: + skip = "url preview feature requires lxml" + + def test_long_summarize(self): + example_paras = [ + """Tromsø (Norwegian pronunciation: [ˈtrʊmsœ] ( listen); Northern Sami: + Romsa; Finnish: Tromssa[2] Kven: Tromssa) is a city and municipality in + Troms county, Norway. The administrative centre of the municipality is + the city of Tromsø. Outside of Norway, Tromso and Tromsö are + alternative spellings of the city.Tromsø is considered the northernmost + city in the world with a population above 50,000. The most populous town + north of it is Alta, Norway, with a population of 14,272 (2013).""", + """Tromsø lies in Northern Norway. The municipality has a population of + (2015) 72,066, but with an annual influx of students it has over 75,000 + most of the year. It is the largest urban area in Northern Norway and the + third largest north of the Arctic Circle (following Murmansk and Norilsk). + Most of Tromsø, including the city centre, is located on the island of + Tromsøya, 350 kilometres (217 mi) north of the Arctic Circle. In 2012, + Tromsøya had a population of 36,088. Substantial parts of the urban area + are also situated on the mainland to the east, and on parts of Kvaløya—a + large island to the west. Tromsøya is connected to the mainland by the Tromsø + Bridge and the Tromsøysund Tunnel, and to the island of Kvaløya by the + Sandnessund Bridge. Tromsø Airport connects the city to many destinations + in Europe. The city is warmer than most other places located on the same + latitude, due to the warming effect of the Gulf Stream.""", + """The city centre of Tromsø contains the highest number of old wooden + houses in Northern Norway, the oldest house dating from 1789. The Arctic + Cathedral, a modern church from 1965, is probably the most famous landmark + in Tromsø. The city is a cultural centre for its region, with several + festivals taking place in the summer. Some of Norway's best-known + musicians, Torbjørn Brundtland and Svein Berge of the electronica duo + Röyksopp and Lene Marlin grew up and started their careers in Tromsø. + Noted electronic musician Geir Jenssen also hails from Tromsø.""", + ] + + desc = summarize_paragraphs(example_paras, min_size=200, max_size=500) + + self.assertEqual( + desc, + "Tromsø (Norwegian pronunciation: [ˈtrʊmsœ] ( listen); Northern Sami:" + " Romsa; Finnish: Tromssa[2] Kven: Tromssa) is a city and municipality in" + " Troms county, Norway. The administrative centre of the municipality is" + " the city of Tromsø. Outside of Norway, Tromso and Tromsö are" + " alternative spellings of the city.Tromsø is considered the northernmost" + " city in the world with a population above 50,000. The most populous town" + " north of it is Alta, Norway, with a population of 14,272 (2013).", + ) + + desc = summarize_paragraphs(example_paras[1:], min_size=200, max_size=500) + + self.assertEqual( + desc, + "Tromsø lies in Northern Norway. The municipality has a population of" + " (2015) 72,066, but with an annual influx of students it has over 75,000" + " most of the year. It is the largest urban area in Northern Norway and the" + " third largest north of the Arctic Circle (following Murmansk and Norilsk)." + " Most of Tromsø, including the city centre, is located on the island of" + " Tromsøya, 350 kilometres (217 mi) north of the Arctic Circle. In 2012," + " Tromsøya had a population of 36,088. Substantial parts of the urban…", + ) + + def test_short_summarize(self): + example_paras = [ + "Tromsø (Norwegian pronunciation: [ˈtrʊmsœ] ( listen); Northern Sami:" + " Romsa; Finnish: Tromssa[2] Kven: Tromssa) is a city and municipality in" + " Troms county, Norway.", + "Tromsø lies in Northern Norway. The municipality has a population of" + " (2015) 72,066, but with an annual influx of students it has over 75,000" + " most of the year.", + "The city centre of Tromsø contains the highest number of old wooden" + " houses in Northern Norway, the oldest house dating from 1789. The Arctic" + " Cathedral, a modern church from 1965, is probably the most famous landmark" + " in Tromsø.", + ] + + desc = summarize_paragraphs(example_paras, min_size=200, max_size=500) + + self.assertEqual( + desc, + "Tromsø (Norwegian pronunciation: [ˈtrʊmsœ] ( listen); Northern Sami:" + " Romsa; Finnish: Tromssa[2] Kven: Tromssa) is a city and municipality in" + " Troms county, Norway.\n" + "\n" + "Tromsø lies in Northern Norway. The municipality has a population of" + " (2015) 72,066, but with an annual influx of students it has over 75,000" + " most of the year.", + ) + + def test_small_then_large_summarize(self): + example_paras = [ + "Tromsø (Norwegian pronunciation: [ˈtrʊmsœ] ( listen); Northern Sami:" + " Romsa; Finnish: Tromssa[2] Kven: Tromssa) is a city and municipality in" + " Troms county, Norway.", + "Tromsø lies in Northern Norway. The municipality has a population of" + " (2015) 72,066, but with an annual influx of students it has over 75,000" + " most of the year." + " The city centre of Tromsø contains the highest number of old wooden" + " houses in Northern Norway, the oldest house dating from 1789. The Arctic" + " Cathedral, a modern church from 1965, is probably the most famous landmark" + " in Tromsø.", + ] + + desc = summarize_paragraphs(example_paras, min_size=200, max_size=500) + self.assertEqual( + desc, + "Tromsø (Norwegian pronunciation: [ˈtrʊmsœ] ( listen); Northern Sami:" + " Romsa; Finnish: Tromssa[2] Kven: Tromssa) is a city and municipality in" + " Troms county, Norway.\n" + "\n" + "Tromsø lies in Northern Norway. The municipality has a population of" + " (2015) 72,066, but with an annual influx of students it has over 75,000" + " most of the year. The city centre of Tromsø contains the highest number" + " of old wooden houses in Northern Norway, the oldest house dating from" + " 1789. The Arctic Cathedral, a modern church from…", + ) + + +class CalcOgTestCase(unittest.TestCase): + if not lxml: + skip = "url preview feature requires lxml" + + def test_simple(self): + html = b""" + + Foo + + Some text. + + + """ + + tree = decode_body(html, "http://example.com/test.html") + og = parse_html_to_open_graph(tree, "http://example.com/test.html") + + self.assertEqual(og, {"og:title": "Foo", "og:description": "Some text."}) + + def test_comment(self): + html = b""" + + Foo + + + Some text. + + + """ + + tree = decode_body(html, "http://example.com/test.html") + og = parse_html_to_open_graph(tree, "http://example.com/test.html") + + self.assertEqual(og, {"og:title": "Foo", "og:description": "Some text."}) + + def test_comment2(self): + html = b""" + + Foo + + Some text. + + Some more text. +

Text

+ More text + + + """ + + tree = decode_body(html, "http://example.com/test.html") + og = parse_html_to_open_graph(tree, "http://example.com/test.html") + + self.assertEqual( + og, + { + "og:title": "Foo", + "og:description": "Some text.\n\nSome more text.\n\nText\n\nMore text", + }, + ) + + def test_script(self): + html = b""" + + Foo + + + Some text. + + + """ + + tree = decode_body(html, "http://example.com/test.html") + og = parse_html_to_open_graph(tree, "http://example.com/test.html") + + self.assertEqual(og, {"og:title": "Foo", "og:description": "Some text."}) + + def test_missing_title(self): + html = b""" + + + Some text. + + + """ + + tree = decode_body(html, "http://example.com/test.html") + og = parse_html_to_open_graph(tree, "http://example.com/test.html") + + self.assertEqual(og, {"og:title": None, "og:description": "Some text."}) + + def test_h1_as_title(self): + html = b""" + + + +

Title

+ + + """ + + tree = decode_body(html, "http://example.com/test.html") + og = parse_html_to_open_graph(tree, "http://example.com/test.html") + + self.assertEqual(og, {"og:title": "Title", "og:description": "Some text."}) + + def test_missing_title_and_broken_h1(self): + html = b""" + + +

+ Some text. + + + """ + + tree = decode_body(html, "http://example.com/test.html") + og = parse_html_to_open_graph(tree, "http://example.com/test.html") + + self.assertEqual(og, {"og:title": None, "og:description": "Some text."}) + + def test_empty(self): + """Test a body with no data in it.""" + html = b"" + tree = decode_body(html, "http://example.com/test.html") + self.assertIsNone(tree) + + def test_no_tree(self): + """A valid body with no tree in it.""" + html = b"\x00" + tree = decode_body(html, "http://example.com/test.html") + self.assertIsNone(tree) + + def test_xml(self): + """Test decoding XML and ensure it works properly.""" + # Note that the strip() call is important to ensure the xml tag starts + # at the initial byte. + html = b""" + + + + + FooSome text. + """.strip() + tree = decode_body(html, "http://example.com/test.html") + og = parse_html_to_open_graph(tree, "http://example.com/test.html") + self.assertEqual(og, {"og:title": "Foo", "og:description": "Some text."}) + + def test_invalid_encoding(self): + """An invalid character encoding should be ignored and treated as UTF-8, if possible.""" + html = b""" + + Foo + + Some text. + + + """ + tree = decode_body(html, "http://example.com/test.html", "invalid-encoding") + og = parse_html_to_open_graph(tree, "http://example.com/test.html") + self.assertEqual(og, {"og:title": "Foo", "og:description": "Some text."}) + + def test_invalid_encoding2(self): + """A body which doesn't match the sent character encoding.""" + # Note that this contains an invalid UTF-8 sequence in the title. + html = b""" + + \xff\xff Foo + + Some text. + + + """ + tree = decode_body(html, "http://example.com/test.html") + og = parse_html_to_open_graph(tree, "http://example.com/test.html") + self.assertEqual(og, {"og:title": "ÿÿ Foo", "og:description": "Some text."}) + + def test_windows_1252(self): + """A body which uses cp1252, but doesn't declare that.""" + html = b""" + + \xf3 + + Some text. + + + """ + tree = decode_body(html, "http://example.com/test.html") + og = parse_html_to_open_graph(tree, "http://example.com/test.html") + self.assertEqual(og, {"og:title": "ó", "og:description": "Some text."}) + + +class MediaEncodingTestCase(unittest.TestCase): + def test_meta_charset(self): + """A character encoding is found via the meta tag.""" + encodings = _get_html_media_encodings( + b""" + + + + + """, + "text/html", + ) + self.assertEqual(list(encodings), ["ascii", "utf-8", "cp1252"]) + + # A less well-formed version. + encodings = _get_html_media_encodings( + b""" + + < meta charset = ascii> + + + """, + "text/html", + ) + self.assertEqual(list(encodings), ["ascii", "utf-8", "cp1252"]) + + def test_meta_charset_underscores(self): + """A character encoding contains underscore.""" + encodings = _get_html_media_encodings( + b""" + + + + + """, + "text/html", + ) + self.assertEqual(list(encodings), ["shift_jis", "utf-8", "cp1252"]) + + def test_xml_encoding(self): + """A character encoding is found via the meta tag.""" + encodings = _get_html_media_encodings( + b""" + + + + """, + "text/html", + ) + self.assertEqual(list(encodings), ["ascii", "utf-8", "cp1252"]) + + def test_meta_xml_encoding(self): + """Meta tags take precedence over XML encoding.""" + encodings = _get_html_media_encodings( + b""" + + + + + + """, + "text/html", + ) + self.assertEqual(list(encodings), ["utf-16", "ascii", "utf-8", "cp1252"]) + + def test_content_type(self): + """A character encoding is found via the Content-Type header.""" + # Test a few variations of the header. + headers = ( + 'text/html; charset="ascii";', + "text/html;charset=ascii;", + 'text/html; charset="ascii"', + "text/html; charset=ascii", + 'text/html; charset="ascii;', + 'text/html; charset=ascii";', + ) + for header in headers: + encodings = _get_html_media_encodings(b"", header) + self.assertEqual(list(encodings), ["ascii", "utf-8", "cp1252"]) + + def test_fallback(self): + """A character encoding cannot be found in the body or header.""" + encodings = _get_html_media_encodings(b"", "text/html") + self.assertEqual(list(encodings), ["utf-8", "cp1252"]) + + def test_duplicates(self): + """Ensure each encoding is only attempted once.""" + encodings = _get_html_media_encodings( + b""" + + + + + + """, + 'text/html; charset="UTF_8"', + ) + self.assertEqual(list(encodings), ["utf-8", "cp1252"]) + + def test_unknown_invalid(self): + """A character encoding should be ignored if it is unknown or invalid.""" + encodings = _get_html_media_encodings( + b""" + + + + + """, + 'text/html; charset="invalid"', + ) + self.assertEqual(list(encodings), ["utf-8", "cp1252"]) + + +class RebaseUrlTestCase(unittest.TestCase): + def test_relative(self): + """Relative URLs should be resolved based on the context of the base URL.""" + self.assertEqual( + rebase_url("subpage", "https://example.com/foo/"), + "https://example.com/foo/subpage", + ) + self.assertEqual( + rebase_url("sibling", "https://example.com/foo"), + "https://example.com/sibling", + ) + self.assertEqual( + rebase_url("/bar", "https://example.com/foo/"), + "https://example.com/bar", + ) + + def test_absolute(self): + """Absolute URLs should not be modified.""" + self.assertEqual( + rebase_url("https://alice.com/a/", "https://example.com/foo/"), + "https://alice.com/a/", + ) + + def test_data(self): + """Data URLs should not be modified.""" + self.assertEqual( + rebase_url("data:,Hello%2C%20World%21", "https://example.com/foo/"), + "data:,Hello%2C%20World%21", + ) diff --git a/tests/rest/media/v1/test_url_preview.py b/tests/rest/media/v1/test_url_preview.py index 16e904f15b..53f6186213 100644 --- a/tests/rest/media/v1/test_url_preview.py +++ b/tests/rest/media/v1/test_url_preview.py @@ -12,9 +12,11 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. +import base64 import json import os import re +from urllib.parse import urlencode from twisted.internet._resolver import HostResolution from twisted.internet.address import IPv4Address, IPv6Address @@ -23,6 +25,7 @@ from twisted.test.proto_helpers import AccumulatingProtocol from synapse.config.oembed import OEmbedEndpointConfig from synapse.rest.media.v1.preview_url_resource import IMAGE_CACHE_EXPIRY_MS +from synapse.types import JsonDict from synapse.util.stringutils import parse_and_validate_mxc_uri from tests import unittest @@ -142,6 +145,14 @@ class URLPreviewTests(unittest.HomeserverTestCase): def create_test_resource(self): return self.hs.get_media_repository_resource() + def _assert_small_png(self, json_body: JsonDict) -> None: + """Assert properties from the SMALL_PNG test image.""" + self.assertTrue(json_body["og:image"].startswith("mxc://")) + self.assertEqual(json_body["og:image:height"], 1) + self.assertEqual(json_body["og:image:width"], 1) + self.assertEqual(json_body["og:image:type"], "image/png") + self.assertEqual(json_body["matrix:image:size"], 67) + def test_cache_returns_correct_type(self): self.lookups["matrix.org"] = [(IPv4Address, "10.1.2.3")] @@ -569,6 +580,66 @@ class URLPreviewTests(unittest.HomeserverTestCase): server.data, ) + def test_data_url(self): + """ + Requesting to preview a data URL is not supported. + """ + self.lookups["matrix.org"] = [(IPv4Address, "10.1.2.3")] + + data = base64.b64encode(SMALL_PNG).decode() + + query_params = urlencode( + { + "url": f'' + } + ) + + channel = self.make_request( + "GET", + f"preview_url?{query_params}", + shorthand=False, + ) + self.pump() + + self.assertEqual(channel.code, 500) + + def test_inline_data_url(self): + """ + An inline image (as a data URL) should be parsed properly. + """ + self.lookups["matrix.org"] = [(IPv4Address, "10.1.2.3")] + + data = base64.b64encode(SMALL_PNG) + + end_content = ( + b"" b'' b"" + ) % (data,) + + channel = self.make_request( + "GET", + "preview_url?url=http://matrix.org", + shorthand=False, + await_result=False, + ) + self.pump() + + client = self.reactor.tcpClients[0][2].buildProtocol(None) + server = AccumulatingProtocol() + server.makeConnection(FakeTransport(client, self.reactor)) + client.makeConnection(FakeTransport(server, self.reactor)) + client.dataReceived( + ( + b"HTTP/1.0 200 OK\r\nContent-Length: %d\r\n" + b'Content-Type: text/html; charset="utf8"\r\n\r\n' + ) + % (len(end_content),) + + end_content + ) + + self.pump() + self.assertEqual(channel.code, 200) + self._assert_small_png(channel.json_body) + def test_oembed_photo(self): """Test an oEmbed endpoint which returns a 'photo' type which redirects the preview to a new URL.""" self.lookups["publish.twitter.com"] = [(IPv4Address, "10.1.2.3")] @@ -626,10 +697,7 @@ class URLPreviewTests(unittest.HomeserverTestCase): self.assertEqual(channel.code, 200) body = channel.json_body self.assertEqual(body["og:url"], "http://twitter.com/matrixdotorg/status/12345") - self.assertTrue(body["og:image"].startswith("mxc://")) - self.assertEqual(body["og:image:height"], 1) - self.assertEqual(body["og:image:width"], 1) - self.assertEqual(body["og:image:type"], "image/png") + self._assert_small_png(body) def test_oembed_rich(self): """Test an oEmbed endpoint which returns HTML content via the 'rich' type.""" @@ -820,10 +888,7 @@ class URLPreviewTests(unittest.HomeserverTestCase): self.assertEqual( body["og:url"], "http://www.twitter.com/matrixdotorg/status/12345" ) - self.assertTrue(body["og:image"].startswith("mxc://")) - self.assertEqual(body["og:image:height"], 1) - self.assertEqual(body["og:image:width"], 1) - self.assertEqual(body["og:image:type"], "image/png") + self._assert_small_png(body) def _download_image(self): """Downloads an image into the URL cache. diff --git a/tests/server.py b/tests/server.py index a0cd14ea45..82990c2eb9 100644 --- a/tests/server.py +++ b/tests/server.py @@ -313,7 +313,7 @@ def make_request( req = request(channel, site) req.content = BytesIO(content) # Twisted expects to be at the end of the content when parsing the request. - req.content.seek(SEEK_END) + req.content.seek(0, SEEK_END) if access_token: req.requestHeaders.addRawHeader( diff --git a/tests/test_preview.py b/tests/test_preview.py deleted file mode 100644 index 46e02f483f..0000000000 --- a/tests/test_preview.py +++ /dev/null @@ -1,449 +0,0 @@ -# Copyright 2014-2016 OpenMarket Ltd -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from synapse.rest.media.v1.preview_html import ( - _get_html_media_encodings, - decode_body, - parse_html_to_open_graph, - summarize_paragraphs, -) - -from . import unittest - -try: - import lxml -except ImportError: - lxml = None - - -class SummarizeTestCase(unittest.TestCase): - if not lxml: - skip = "url preview feature requires lxml" - - def test_long_summarize(self): - example_paras = [ - """Tromsø (Norwegian pronunciation: [ˈtrʊmsœ] ( listen); Northern Sami: - Romsa; Finnish: Tromssa[2] Kven: Tromssa) is a city and municipality in - Troms county, Norway. The administrative centre of the municipality is - the city of Tromsø. Outside of Norway, Tromso and Tromsö are - alternative spellings of the city.Tromsø is considered the northernmost - city in the world with a population above 50,000. The most populous town - north of it is Alta, Norway, with a population of 14,272 (2013).""", - """Tromsø lies in Northern Norway. The municipality has a population of - (2015) 72,066, but with an annual influx of students it has over 75,000 - most of the year. It is the largest urban area in Northern Norway and the - third largest north of the Arctic Circle (following Murmansk and Norilsk). - Most of Tromsø, including the city centre, is located on the island of - Tromsøya, 350 kilometres (217 mi) north of the Arctic Circle. In 2012, - Tromsøya had a population of 36,088. Substantial parts of the urban area - are also situated on the mainland to the east, and on parts of Kvaløya—a - large island to the west. Tromsøya is connected to the mainland by the Tromsø - Bridge and the Tromsøysund Tunnel, and to the island of Kvaløya by the - Sandnessund Bridge. Tromsø Airport connects the city to many destinations - in Europe. The city is warmer than most other places located on the same - latitude, due to the warming effect of the Gulf Stream.""", - """The city centre of Tromsø contains the highest number of old wooden - houses in Northern Norway, the oldest house dating from 1789. The Arctic - Cathedral, a modern church from 1965, is probably the most famous landmark - in Tromsø. The city is a cultural centre for its region, with several - festivals taking place in the summer. Some of Norway's best-known - musicians, Torbjørn Brundtland and Svein Berge of the electronica duo - Röyksopp and Lene Marlin grew up and started their careers in Tromsø. - Noted electronic musician Geir Jenssen also hails from Tromsø.""", - ] - - desc = summarize_paragraphs(example_paras, min_size=200, max_size=500) - - self.assertEqual( - desc, - "Tromsø (Norwegian pronunciation: [ˈtrʊmsœ] ( listen); Northern Sami:" - " Romsa; Finnish: Tromssa[2] Kven: Tromssa) is a city and municipality in" - " Troms county, Norway. The administrative centre of the municipality is" - " the city of Tromsø. Outside of Norway, Tromso and Tromsö are" - " alternative spellings of the city.Tromsø is considered the northernmost" - " city in the world with a population above 50,000. The most populous town" - " north of it is Alta, Norway, with a population of 14,272 (2013).", - ) - - desc = summarize_paragraphs(example_paras[1:], min_size=200, max_size=500) - - self.assertEqual( - desc, - "Tromsø lies in Northern Norway. The municipality has a population of" - " (2015) 72,066, but with an annual influx of students it has over 75,000" - " most of the year. It is the largest urban area in Northern Norway and the" - " third largest north of the Arctic Circle (following Murmansk and Norilsk)." - " Most of Tromsø, including the city centre, is located on the island of" - " Tromsøya, 350 kilometres (217 mi) north of the Arctic Circle. In 2012," - " Tromsøya had a population of 36,088. Substantial parts of the urban…", - ) - - def test_short_summarize(self): - example_paras = [ - "Tromsø (Norwegian pronunciation: [ˈtrʊmsœ] ( listen); Northern Sami:" - " Romsa; Finnish: Tromssa[2] Kven: Tromssa) is a city and municipality in" - " Troms county, Norway.", - "Tromsø lies in Northern Norway. The municipality has a population of" - " (2015) 72,066, but with an annual influx of students it has over 75,000" - " most of the year.", - "The city centre of Tromsø contains the highest number of old wooden" - " houses in Northern Norway, the oldest house dating from 1789. The Arctic" - " Cathedral, a modern church from 1965, is probably the most famous landmark" - " in Tromsø.", - ] - - desc = summarize_paragraphs(example_paras, min_size=200, max_size=500) - - self.assertEqual( - desc, - "Tromsø (Norwegian pronunciation: [ˈtrʊmsœ] ( listen); Northern Sami:" - " Romsa; Finnish: Tromssa[2] Kven: Tromssa) is a city and municipality in" - " Troms county, Norway.\n" - "\n" - "Tromsø lies in Northern Norway. The municipality has a population of" - " (2015) 72,066, but with an annual influx of students it has over 75,000" - " most of the year.", - ) - - def test_small_then_large_summarize(self): - example_paras = [ - "Tromsø (Norwegian pronunciation: [ˈtrʊmsœ] ( listen); Northern Sami:" - " Romsa; Finnish: Tromssa[2] Kven: Tromssa) is a city and municipality in" - " Troms county, Norway.", - "Tromsø lies in Northern Norway. The municipality has a population of" - " (2015) 72,066, but with an annual influx of students it has over 75,000" - " most of the year." - " The city centre of Tromsø contains the highest number of old wooden" - " houses in Northern Norway, the oldest house dating from 1789. The Arctic" - " Cathedral, a modern church from 1965, is probably the most famous landmark" - " in Tromsø.", - ] - - desc = summarize_paragraphs(example_paras, min_size=200, max_size=500) - self.assertEqual( - desc, - "Tromsø (Norwegian pronunciation: [ˈtrʊmsœ] ( listen); Northern Sami:" - " Romsa; Finnish: Tromssa[2] Kven: Tromssa) is a city and municipality in" - " Troms county, Norway.\n" - "\n" - "Tromsø lies in Northern Norway. The municipality has a population of" - " (2015) 72,066, but with an annual influx of students it has over 75,000" - " most of the year. The city centre of Tromsø contains the highest number" - " of old wooden houses in Northern Norway, the oldest house dating from" - " 1789. The Arctic Cathedral, a modern church from…", - ) - - -class CalcOgTestCase(unittest.TestCase): - if not lxml: - skip = "url preview feature requires lxml" - - def test_simple(self): - html = b""" - - Foo - - Some text. - - - """ - - tree = decode_body(html, "http://example.com/test.html") - og = parse_html_to_open_graph(tree, "http://example.com/test.html") - - self.assertEqual(og, {"og:title": "Foo", "og:description": "Some text."}) - - def test_comment(self): - html = b""" - - Foo - - - Some text. - - - """ - - tree = decode_body(html, "http://example.com/test.html") - og = parse_html_to_open_graph(tree, "http://example.com/test.html") - - self.assertEqual(og, {"og:title": "Foo", "og:description": "Some text."}) - - def test_comment2(self): - html = b""" - - Foo - - Some text. - - Some more text. -

Text

- More text - - - """ - - tree = decode_body(html, "http://example.com/test.html") - og = parse_html_to_open_graph(tree, "http://example.com/test.html") - - self.assertEqual( - og, - { - "og:title": "Foo", - "og:description": "Some text.\n\nSome more text.\n\nText\n\nMore text", - }, - ) - - def test_script(self): - html = b""" - - Foo - - - Some text. - - - """ - - tree = decode_body(html, "http://example.com/test.html") - og = parse_html_to_open_graph(tree, "http://example.com/test.html") - - self.assertEqual(og, {"og:title": "Foo", "og:description": "Some text."}) - - def test_missing_title(self): - html = b""" - - - Some text. - - - """ - - tree = decode_body(html, "http://example.com/test.html") - og = parse_html_to_open_graph(tree, "http://example.com/test.html") - - self.assertEqual(og, {"og:title": None, "og:description": "Some text."}) - - def test_h1_as_title(self): - html = b""" - - - -

Title

- - - """ - - tree = decode_body(html, "http://example.com/test.html") - og = parse_html_to_open_graph(tree, "http://example.com/test.html") - - self.assertEqual(og, {"og:title": "Title", "og:description": "Some text."}) - - def test_missing_title_and_broken_h1(self): - html = b""" - - -

- Some text. - - - """ - - tree = decode_body(html, "http://example.com/test.html") - og = parse_html_to_open_graph(tree, "http://example.com/test.html") - - self.assertEqual(og, {"og:title": None, "og:description": "Some text."}) - - def test_empty(self): - """Test a body with no data in it.""" - html = b"" - tree = decode_body(html, "http://example.com/test.html") - self.assertIsNone(tree) - - def test_no_tree(self): - """A valid body with no tree in it.""" - html = b"\x00" - tree = decode_body(html, "http://example.com/test.html") - self.assertIsNone(tree) - - def test_xml(self): - """Test decoding XML and ensure it works properly.""" - # Note that the strip() call is important to ensure the xml tag starts - # at the initial byte. - html = b""" - - - - - FooSome text. - """.strip() - tree = decode_body(html, "http://example.com/test.html") - og = parse_html_to_open_graph(tree, "http://example.com/test.html") - self.assertEqual(og, {"og:title": "Foo", "og:description": "Some text."}) - - def test_invalid_encoding(self): - """An invalid character encoding should be ignored and treated as UTF-8, if possible.""" - html = b""" - - Foo - - Some text. - - - """ - tree = decode_body(html, "http://example.com/test.html", "invalid-encoding") - og = parse_html_to_open_graph(tree, "http://example.com/test.html") - self.assertEqual(og, {"og:title": "Foo", "og:description": "Some text."}) - - def test_invalid_encoding2(self): - """A body which doesn't match the sent character encoding.""" - # Note that this contains an invalid UTF-8 sequence in the title. - html = b""" - - \xff\xff Foo - - Some text. - - - """ - tree = decode_body(html, "http://example.com/test.html") - og = parse_html_to_open_graph(tree, "http://example.com/test.html") - self.assertEqual(og, {"og:title": "ÿÿ Foo", "og:description": "Some text."}) - - def test_windows_1252(self): - """A body which uses cp1252, but doesn't declare that.""" - html = b""" - - \xf3 - - Some text. - - - """ - tree = decode_body(html, "http://example.com/test.html") - og = parse_html_to_open_graph(tree, "http://example.com/test.html") - self.assertEqual(og, {"og:title": "ó", "og:description": "Some text."}) - - -class MediaEncodingTestCase(unittest.TestCase): - def test_meta_charset(self): - """A character encoding is found via the meta tag.""" - encodings = _get_html_media_encodings( - b""" - - - - - """, - "text/html", - ) - self.assertEqual(list(encodings), ["ascii", "utf-8", "cp1252"]) - - # A less well-formed version. - encodings = _get_html_media_encodings( - b""" - - < meta charset = ascii> - - - """, - "text/html", - ) - self.assertEqual(list(encodings), ["ascii", "utf-8", "cp1252"]) - - def test_meta_charset_underscores(self): - """A character encoding contains underscore.""" - encodings = _get_html_media_encodings( - b""" - - - - - """, - "text/html", - ) - self.assertEqual(list(encodings), ["shift_jis", "utf-8", "cp1252"]) - - def test_xml_encoding(self): - """A character encoding is found via the meta tag.""" - encodings = _get_html_media_encodings( - b""" - - - - """, - "text/html", - ) - self.assertEqual(list(encodings), ["ascii", "utf-8", "cp1252"]) - - def test_meta_xml_encoding(self): - """Meta tags take precedence over XML encoding.""" - encodings = _get_html_media_encodings( - b""" - - - - - - """, - "text/html", - ) - self.assertEqual(list(encodings), ["utf-16", "ascii", "utf-8", "cp1252"]) - - def test_content_type(self): - """A character encoding is found via the Content-Type header.""" - # Test a few variations of the header. - headers = ( - 'text/html; charset="ascii";', - "text/html;charset=ascii;", - 'text/html; charset="ascii"', - "text/html; charset=ascii", - 'text/html; charset="ascii;', - 'text/html; charset=ascii";', - ) - for header in headers: - encodings = _get_html_media_encodings(b"", header) - self.assertEqual(list(encodings), ["ascii", "utf-8", "cp1252"]) - - def test_fallback(self): - """A character encoding cannot be found in the body or header.""" - encodings = _get_html_media_encodings(b"", "text/html") - self.assertEqual(list(encodings), ["utf-8", "cp1252"]) - - def test_duplicates(self): - """Ensure each encoding is only attempted once.""" - encodings = _get_html_media_encodings( - b""" - - - - - - """, - 'text/html; charset="UTF_8"', - ) - self.assertEqual(list(encodings), ["utf-8", "cp1252"]) - - def test_unknown_invalid(self): - """A character encoding should be ignored if it is unknown or invalid.""" - encodings = _get_html_media_encodings( - b""" - - - - - """, - 'text/html; charset="invalid"', - ) - self.assertEqual(list(encodings), ["utf-8", "cp1252"]) -- cgit 1.4.1 From 02d99f044efbed3c347c19a430d56cfbb41c87a7 Mon Sep 17 00:00:00 2001 From: Patrick Cloke Date: Mon, 24 Jan 2022 09:38:37 -0500 Subject: Apply a timeout to reading the body when fetching a file. (#11784) This prevents the URL preview code from reading a stream forever. --- changelog.d/11784.bugfix | 1 + synapse/http/client.py | 15 ++++++++++++--- 2 files changed, 13 insertions(+), 3 deletions(-) create mode 100644 changelog.d/11784.bugfix (limited to 'synapse') diff --git a/changelog.d/11784.bugfix b/changelog.d/11784.bugfix new file mode 100644 index 0000000000..6569a8c299 --- /dev/null +++ b/changelog.d/11784.bugfix @@ -0,0 +1 @@ +Fix a long-standing bug that media streams could cause long-lived connections when generating URL previews. diff --git a/synapse/http/client.py b/synapse/http/client.py index ca33b45cb2..743a7ffcb1 100644 --- a/synapse/http/client.py +++ b/synapse/http/client.py @@ -731,15 +731,24 @@ class SimpleHttpClient: # straight back in again try: - length = await make_deferred_yieldable( - read_body_with_max_size(response, output_stream, max_size) - ) + d = read_body_with_max_size(response, output_stream, max_size) + + # Ensure that the body is not read forever. + d = timeout_deferred(d, 30, self.hs.get_reactor()) + + length = await make_deferred_yieldable(d) except BodyExceededMaxSize: raise SynapseError( HTTPStatus.BAD_GATEWAY, "Requested file is too large > %r bytes" % (max_size,), Codes.TOO_LARGE, ) + except defer.TimeoutError: + raise SynapseError( + HTTPStatus.BAD_GATEWAY, + "Requested file took too long to download", + Codes.TOO_LARGE, + ) except Exception as e: raise SynapseError( HTTPStatus.BAD_GATEWAY, ("Failed to download remote body: %s" % e) -- cgit 1.4.1 From 15c2a6a1067f57707688cc59f2efa7ff0000dcd2 Mon Sep 17 00:00:00 2001 From: Patrick Cloke Date: Tue, 25 Jan 2022 06:07:10 -0500 Subject: Ignore the jsonschema type. (#11817) --- changelog.d/11817.misc | 1 + synapse/events/validator.py | 4 +++- 2 files changed, 4 insertions(+), 1 deletion(-) create mode 100644 changelog.d/11817.misc (limited to 'synapse') diff --git a/changelog.d/11817.misc b/changelog.d/11817.misc new file mode 100644 index 0000000000..bd29d8d6eb --- /dev/null +++ b/changelog.d/11817.misc @@ -0,0 +1 @@ +Compatibility with updated type hints for jsonschema 4.4.0. diff --git a/synapse/events/validator.py b/synapse/events/validator.py index cf86934968..4245573017 100644 --- a/synapse/events/validator.py +++ b/synapse/events/validator.py @@ -246,7 +246,9 @@ POWER_LEVELS_SCHEMA = { # This could return something newer than Draft 7, but that's the current "latest" # validator. -def _create_power_level_validator() -> jsonschema.Draft7Validator: +# +# See https://github.com/python/typeshed/issues/7028 for the ignored return type. +def _create_power_level_validator() -> jsonschema.Draft7Validator: # type: ignore[valid-type] validator = jsonschema.validators.validator_for(POWER_LEVELS_SCHEMA) # by default jsonschema does not consider a frozendict to be an object so -- cgit 1.4.1 From 0d6cfea9b867a14fa0fa885b04c8cbfdb4a7c4a9 Mon Sep 17 00:00:00 2001 From: Dirk Klimpel <5740567+dklimpel@users.noreply.github.com> Date: Tue, 25 Jan 2022 13:06:29 +0100 Subject: Add admin API to reset connection timeouts for remote server (#11639) * Fix get federation status of destination if no error occured --- changelog.d/11639.feature | 1 + docs/usage/administration/admin_api/federation.md | 40 +++++++++++++++- synapse/federation/transport/server/__init__.py | 16 ++++--- synapse/federation/transport/server/_base.py | 14 +++--- synapse/federation/transport/server/federation.py | 24 +++++++--- .../federation/transport/server/groups_local.py | 8 ++-- .../federation/transport/server/groups_server.py | 8 ++-- synapse/rest/admin/__init__.py | 6 ++- synapse/rest/admin/federation.py | 44 ++++++++++++++++- tests/rest/admin/test_federation.py | 55 ++++++++++++++++++++-- 10 files changed, 183 insertions(+), 33 deletions(-) create mode 100644 changelog.d/11639.feature (limited to 'synapse') diff --git a/changelog.d/11639.feature b/changelog.d/11639.feature new file mode 100644 index 0000000000..e9f6704f7a --- /dev/null +++ b/changelog.d/11639.feature @@ -0,0 +1 @@ +Add admin API to reset connection timeouts for remote server. \ No newline at end of file diff --git a/docs/usage/administration/admin_api/federation.md b/docs/usage/administration/admin_api/federation.md index 8f9535f57b..5e609561a6 100644 --- a/docs/usage/administration/admin_api/federation.md +++ b/docs/usage/administration/admin_api/federation.md @@ -86,7 +86,7 @@ The following fields are returned in the JSON response body: - `next_token`: string representing a positive integer - Indication for pagination. See above. - `total` - integer - Total number of destinations. -# Destination Details API +## Destination Details API This API gets the retry timing info for a specific remote server. @@ -108,7 +108,45 @@ A response body like the following is returned: } ``` +**Parameters** + +The following parameters should be set in the URL: + +- `destination` - Name of the remote server. + **Response** The response fields are the same like in the `destinations` array in [List of destinations](#list-of-destinations) response. + +## Reset connection timeout + +Synapse makes federation requests to other homeservers. If a federation request fails, +Synapse will mark the destination homeserver as offline, preventing any future requests +to that server for a "cooldown" period. This period grows over time if the server +continues to fail its responses +([exponential backoff](https://en.wikipedia.org/wiki/Exponential_backoff)). + +Admins can cancel the cooldown period with this API. + +This API resets the retry timing for a specific remote server and tries to connect to +the remote server again. It does not wait for the next `retry_interval`. +The connection must have previously run into an error and `retry_last_ts` +([Destination Details API](#destination-details-api)) must not be equal to `0`. + +The connection attempt is carried out in the background and can take a while +even if the API already returns the http status 200. + +The API is: + +``` +POST /_synapse/admin/v1/federation/destinations//reset_connection + +{} +``` + +**Parameters** + +The following parameters should be set in the URL: + +- `destination` - Name of the remote server. diff --git a/synapse/federation/transport/server/__init__.py b/synapse/federation/transport/server/__init__.py index 77b936361a..db4fe2c798 100644 --- a/synapse/federation/transport/server/__init__.py +++ b/synapse/federation/transport/server/__init__.py @@ -13,7 +13,7 @@ # See the License for the specific language governing permissions and # limitations under the License. import logging -from typing import Dict, Iterable, List, Optional, Tuple, Type +from typing import TYPE_CHECKING, Dict, Iterable, List, Optional, Tuple, Type from typing_extensions import Literal @@ -36,17 +36,19 @@ from synapse.http.servlet import ( parse_integer_from_args, parse_string_from_args, ) -from synapse.server import HomeServer from synapse.types import JsonDict, ThirdPartyInstanceID from synapse.util.ratelimitutils import FederationRateLimiter +if TYPE_CHECKING: + from synapse.server import HomeServer + logger = logging.getLogger(__name__) class TransportLayerServer(JsonResource): """Handles incoming federation HTTP requests""" - def __init__(self, hs: HomeServer, servlet_groups: Optional[List[str]] = None): + def __init__(self, hs: "HomeServer", servlet_groups: Optional[List[str]] = None): """Initialize the TransportLayerServer Will by default register all servlets. For custom behaviour, pass in @@ -113,7 +115,7 @@ class PublicRoomList(BaseFederationServlet): def __init__( self, - hs: HomeServer, + hs: "HomeServer", authenticator: Authenticator, ratelimiter: FederationRateLimiter, server_name: str, @@ -203,7 +205,7 @@ class FederationGroupsRenewAttestaionServlet(BaseFederationServlet): def __init__( self, - hs: HomeServer, + hs: "HomeServer", authenticator: Authenticator, ratelimiter: FederationRateLimiter, server_name: str, @@ -251,7 +253,7 @@ class OpenIdUserInfo(BaseFederationServlet): def __init__( self, - hs: HomeServer, + hs: "HomeServer", authenticator: Authenticator, ratelimiter: FederationRateLimiter, server_name: str, @@ -297,7 +299,7 @@ DEFAULT_SERVLET_GROUPS: Dict[str, Iterable[Type[BaseFederationServlet]]] = { def register_servlets( - hs: HomeServer, + hs: "HomeServer", resource: HttpServer, authenticator: Authenticator, ratelimiter: FederationRateLimiter, diff --git a/synapse/federation/transport/server/_base.py b/synapse/federation/transport/server/_base.py index da1fbf8b63..2ca7c05835 100644 --- a/synapse/federation/transport/server/_base.py +++ b/synapse/federation/transport/server/_base.py @@ -15,7 +15,7 @@ import functools import logging import re -from typing import Any, Awaitable, Callable, Optional, Tuple, cast +from typing import TYPE_CHECKING, Any, Awaitable, Callable, Optional, Tuple, cast from synapse.api.errors import Codes, FederationDeniedError, SynapseError from synapse.api.urls import FEDERATION_V1_PREFIX @@ -29,11 +29,13 @@ from synapse.logging.opentracing import ( start_active_span_follows_from, whitelisted_homeserver, ) -from synapse.server import HomeServer from synapse.types import JsonDict from synapse.util.ratelimitutils import FederationRateLimiter from synapse.util.stringutils import parse_and_validate_server_name +if TYPE_CHECKING: + from synapse.server import HomeServer + logger = logging.getLogger(__name__) @@ -46,7 +48,7 @@ class NoAuthenticationError(AuthenticationError): class Authenticator: - def __init__(self, hs: HomeServer): + def __init__(self, hs: "HomeServer"): self._clock = hs.get_clock() self.keyring = hs.get_keyring() self.server_name = hs.hostname @@ -114,11 +116,11 @@ class Authenticator: # alive retry_timings = await self.store.get_destination_retry_timings(origin) if retry_timings and retry_timings.retry_last_ts: - run_in_background(self._reset_retry_timings, origin) + run_in_background(self.reset_retry_timings, origin) return origin - async def _reset_retry_timings(self, origin: str) -> None: + async def reset_retry_timings(self, origin: str) -> None: try: logger.info("Marking origin %r as up", origin) await self.store.set_destination_retry_timings(origin, None, 0, 0) @@ -227,7 +229,7 @@ class BaseFederationServlet: def __init__( self, - hs: HomeServer, + hs: "HomeServer", authenticator: Authenticator, ratelimiter: FederationRateLimiter, server_name: str, diff --git a/synapse/federation/transport/server/federation.py b/synapse/federation/transport/server/federation.py index beadfa422b..9c1ad5851f 100644 --- a/synapse/federation/transport/server/federation.py +++ b/synapse/federation/transport/server/federation.py @@ -12,7 +12,17 @@ # See the License for the specific language governing permissions and # limitations under the License. import logging -from typing import Dict, List, Mapping, Optional, Sequence, Tuple, Type, Union +from typing import ( + TYPE_CHECKING, + Dict, + List, + Mapping, + Optional, + Sequence, + Tuple, + Type, + Union, +) from typing_extensions import Literal @@ -30,11 +40,13 @@ from synapse.http.servlet import ( parse_string_from_args, parse_strings_from_args, ) -from synapse.server import HomeServer from synapse.types import JsonDict from synapse.util.ratelimitutils import FederationRateLimiter from synapse.util.versionstring import get_version_string +if TYPE_CHECKING: + from synapse.server import HomeServer + logger = logging.getLogger(__name__) issue_8631_logger = logging.getLogger("synapse.8631_debug") @@ -47,7 +59,7 @@ class BaseFederationServerServlet(BaseFederationServlet): def __init__( self, - hs: HomeServer, + hs: "HomeServer", authenticator: Authenticator, ratelimiter: FederationRateLimiter, server_name: str, @@ -596,7 +608,7 @@ class FederationSpaceSummaryServlet(BaseFederationServlet): def __init__( self, - hs: HomeServer, + hs: "HomeServer", authenticator: Authenticator, ratelimiter: FederationRateLimiter, server_name: str, @@ -670,7 +682,7 @@ class FederationRoomHierarchyServlet(BaseFederationServlet): def __init__( self, - hs: HomeServer, + hs: "HomeServer", authenticator: Authenticator, ratelimiter: FederationRateLimiter, server_name: str, @@ -706,7 +718,7 @@ class RoomComplexityServlet(BaseFederationServlet): def __init__( self, - hs: HomeServer, + hs: "HomeServer", authenticator: Authenticator, ratelimiter: FederationRateLimiter, server_name: str, diff --git a/synapse/federation/transport/server/groups_local.py b/synapse/federation/transport/server/groups_local.py index a12cd18d58..496472e1dc 100644 --- a/synapse/federation/transport/server/groups_local.py +++ b/synapse/federation/transport/server/groups_local.py @@ -11,7 +11,7 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. -from typing import Dict, List, Tuple, Type +from typing import TYPE_CHECKING, Dict, List, Tuple, Type from synapse.api.errors import SynapseError from synapse.federation.transport.server._base import ( @@ -19,10 +19,12 @@ from synapse.federation.transport.server._base import ( BaseFederationServlet, ) from synapse.handlers.groups_local import GroupsLocalHandler -from synapse.server import HomeServer from synapse.types import JsonDict, get_domain_from_id from synapse.util.ratelimitutils import FederationRateLimiter +if TYPE_CHECKING: + from synapse.server import HomeServer + class BaseGroupsLocalServlet(BaseFederationServlet): """Abstract base class for federation servlet classes which provides a groups local handler. @@ -32,7 +34,7 @@ class BaseGroupsLocalServlet(BaseFederationServlet): def __init__( self, - hs: HomeServer, + hs: "HomeServer", authenticator: Authenticator, ratelimiter: FederationRateLimiter, server_name: str, diff --git a/synapse/federation/transport/server/groups_server.py b/synapse/federation/transport/server/groups_server.py index b30e92a5eb..851b50152e 100644 --- a/synapse/federation/transport/server/groups_server.py +++ b/synapse/federation/transport/server/groups_server.py @@ -11,7 +11,7 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. -from typing import Dict, List, Tuple, Type +from typing import TYPE_CHECKING, Dict, List, Tuple, Type from typing_extensions import Literal @@ -22,10 +22,12 @@ from synapse.federation.transport.server._base import ( BaseFederationServlet, ) from synapse.http.servlet import parse_string_from_args -from synapse.server import HomeServer from synapse.types import JsonDict, get_domain_from_id from synapse.util.ratelimitutils import FederationRateLimiter +if TYPE_CHECKING: + from synapse.server import HomeServer + class BaseGroupsServerServlet(BaseFederationServlet): """Abstract base class for federation servlet classes which provides a groups server handler. @@ -35,7 +37,7 @@ class BaseGroupsServerServlet(BaseFederationServlet): def __init__( self, - hs: HomeServer, + hs: "HomeServer", authenticator: Authenticator, ratelimiter: FederationRateLimiter, server_name: str, diff --git a/synapse/rest/admin/__init__.py b/synapse/rest/admin/__init__.py index 465e06772b..b1e49d51b7 100644 --- a/synapse/rest/admin/__init__.py +++ b/synapse/rest/admin/__init__.py @@ -41,7 +41,8 @@ from synapse.rest.admin.event_reports import ( EventReportsRestServlet, ) from synapse.rest.admin.federation import ( - DestinationsRestServlet, + DestinationResetConnectionRestServlet, + DestinationRestServlet, ListDestinationsRestServlet, ) from synapse.rest.admin.groups import DeleteGroupAdminRestServlet @@ -267,7 +268,8 @@ def register_servlets(hs: "HomeServer", http_server: HttpServer) -> None: ListRegistrationTokensRestServlet(hs).register(http_server) NewRegistrationTokenRestServlet(hs).register(http_server) RegistrationTokenRestServlet(hs).register(http_server) - DestinationsRestServlet(hs).register(http_server) + DestinationResetConnectionRestServlet(hs).register(http_server) + DestinationRestServlet(hs).register(http_server) ListDestinationsRestServlet(hs).register(http_server) # Some servlets only get registered for the main process. diff --git a/synapse/rest/admin/federation.py b/synapse/rest/admin/federation.py index 8cd3fa189e..0f33f9e4da 100644 --- a/synapse/rest/admin/federation.py +++ b/synapse/rest/admin/federation.py @@ -16,6 +16,7 @@ from http import HTTPStatus from typing import TYPE_CHECKING, Tuple from synapse.api.errors import Codes, NotFoundError, SynapseError +from synapse.federation.transport.server import Authenticator from synapse.http.servlet import RestServlet, parse_integer, parse_string from synapse.http.site import SynapseRequest from synapse.rest.admin._base import admin_patterns, assert_requester_is_admin @@ -90,7 +91,7 @@ class ListDestinationsRestServlet(RestServlet): return HTTPStatus.OK, response -class DestinationsRestServlet(RestServlet): +class DestinationRestServlet(RestServlet): """Get details of a destination. This needs user to have administrator access in Synapse. @@ -145,3 +146,44 @@ class DestinationsRestServlet(RestServlet): } return HTTPStatus.OK, response + + +class DestinationResetConnectionRestServlet(RestServlet): + """Reset destinations' connection timeouts and wake it up. + This needs user to have administrator access in Synapse. + + POST /_synapse/admin/v1/federation/destinations//reset_connection + {} + + returns: + 200 OK otherwise an error. + """ + + PATTERNS = admin_patterns( + "/federation/destinations/(?P[^/]+)/reset_connection$" + ) + + def __init__(self, hs: "HomeServer"): + self._auth = hs.get_auth() + self._store = hs.get_datastore() + self._authenticator = Authenticator(hs) + + async def on_POST( + self, request: SynapseRequest, destination: str + ) -> Tuple[int, JsonDict]: + await assert_requester_is_admin(self._auth, request) + + if not await self._store.is_destination_known(destination): + raise NotFoundError("Unknown destination") + + retry_timings = await self._store.get_destination_retry_timings(destination) + if not (retry_timings and retry_timings.retry_last_ts): + raise SynapseError( + HTTPStatus.BAD_REQUEST, + "The retry timing does not need to be reset for this destination.", + ) + + # reset timings and wake up + await self._authenticator.reset_retry_timings(destination) + + return HTTPStatus.OK, {} diff --git a/tests/rest/admin/test_federation.py b/tests/rest/admin/test_federation.py index b70350b6f1..e2d3cff2a3 100644 --- a/tests/rest/admin/test_federation.py +++ b/tests/rest/admin/test_federation.py @@ -43,11 +43,15 @@ class FederationTestCase(unittest.HomeserverTestCase): @parameterized.expand( [ - ("/_synapse/admin/v1/federation/destinations",), - ("/_synapse/admin/v1/federation/destinations/dummy",), + ("GET", "/_synapse/admin/v1/federation/destinations"), + ("GET", "/_synapse/admin/v1/federation/destinations/dummy"), + ( + "POST", + "/_synapse/admin/v1/federation/destinations/dummy/reset_connection", + ), ] ) - def test_requester_is_no_admin(self, url: str) -> None: + def test_requester_is_no_admin(self, method: str, url: str) -> None: """ If the user is not a server admin, an error 403 is returned. """ @@ -56,7 +60,7 @@ class FederationTestCase(unittest.HomeserverTestCase): other_user_tok = self.login("user", "pass") channel = self.make_request( - "GET", + method, url, content={}, access_token=other_user_tok, @@ -120,6 +124,16 @@ class FederationTestCase(unittest.HomeserverTestCase): self.assertEqual(HTTPStatus.NOT_FOUND, channel.code, msg=channel.json_body) self.assertEqual(Codes.NOT_FOUND, channel.json_body["errcode"]) + # invalid destination + channel = self.make_request( + "POST", + self.url + "/dummy/reset_connection", + access_token=self.admin_user_tok, + ) + + self.assertEqual(HTTPStatus.NOT_FOUND, channel.code, msg=channel.json_body) + self.assertEqual(Codes.NOT_FOUND, channel.json_body["errcode"]) + def test_limit(self) -> None: """ Testing list of destinations with limit @@ -444,6 +458,39 @@ class FederationTestCase(unittest.HomeserverTestCase): self.assertIsNone(channel.json_body["failure_ts"]) self.assertIsNone(channel.json_body["last_successful_stream_ordering"]) + def test_destination_reset_connection(self) -> None: + """Reset timeouts and wake up destination.""" + self._create_destination("sub0.example.com", 100, 100, 100) + + channel = self.make_request( + "POST", + self.url + "/sub0.example.com/reset_connection", + access_token=self.admin_user_tok, + ) + + self.assertEqual(HTTPStatus.OK, channel.code, msg=channel.json_body) + + retry_timings = self.get_success( + self.store.get_destination_retry_timings("sub0.example.com") + ) + self.assertIsNone(retry_timings) + + def test_destination_reset_connection_not_required(self) -> None: + """Try to reset timeouts of a destination with no timeouts and get an error.""" + self._create_destination("sub0.example.com", None, 0, 0) + + channel = self.make_request( + "POST", + self.url + "/sub0.example.com/reset_connection", + access_token=self.admin_user_tok, + ) + + self.assertEqual(HTTPStatus.BAD_REQUEST, channel.code, msg=channel.json_body) + self.assertEqual( + "The retry timing does not need to be reset for this destination.", + channel.json_body["error"], + ) + def _create_destination( self, destination: str, -- cgit 1.4.1 From fc8598bc87d5bcc7e8526492f309e73c8dcff3f6 Mon Sep 17 00:00:00 2001 From: Richard van der Hoff <1389908+richvdh@users.noreply.github.com> Date: Tue, 25 Jan 2022 14:11:13 +0000 Subject: Minor updates, and docs, for schema delta files (#11823) * Make functions in python deltas optional It's annoying to always have to write stubs for these. * Documentation for delta files * changelog --- changelog.d/11823.misc | 1 + docs/development/database_schema.md | 54 +++++++++++++++++++++++++++++++++++++ synapse/storage/prepare_database.py | 9 ++++--- 3 files changed, 61 insertions(+), 3 deletions(-) create mode 100644 changelog.d/11823.misc (limited to 'synapse') diff --git a/changelog.d/11823.misc b/changelog.d/11823.misc new file mode 100644 index 0000000000..2d153eae4a --- /dev/null +++ b/changelog.d/11823.misc @@ -0,0 +1 @@ +Minor updates and documentation for database schema delta files. diff --git a/docs/development/database_schema.md b/docs/development/database_schema.md index 256a629210..a767d3af9f 100644 --- a/docs/development/database_schema.md +++ b/docs/development/database_schema.md @@ -96,6 +96,60 @@ Ensure postgres is installed, then run: NB at the time of writing, this script predates the split into separate `state`/`main` databases so will require updates to handle that correctly. +## Delta files + +Delta files define the steps required to upgrade the database from an earlier version. +They can be written as either a file containing a series of SQL statements, or a Python +module. + +Synapse remembers which delta files it has applied to a database (they are stored in the +`applied_schema_deltas` table) and will not re-apply them (even if a given file is +subsequently updated). + +Delta files should be placed in a directory named `synapse/storage/schema//delta//`. +They are applied in alphanumeric order, so by convention the first two characters +of the filename should be an integer such as `01`, to put the file in the right order. + +### SQL delta files + +These should be named `*.sql`, or — for changes which should only be applied for a +given database engine — `*.sql.posgres` or `*.sql.sqlite`. For example, a delta which +adds a new column to the `foo` table might be called `01add_bar_to_foo.sql`. + +Note that our SQL parser is a bit simple - it understands comments (`--` and `/*...*/`), +but complex statements which require a `;` in the middle of them (such as `CREATE +TRIGGER`) are beyond it and you'll have to use a Python delta file. + +### Python delta files + +For more flexibility, a delta file can take the form of a python module. These should +be named `*.py`. Note that database-engine-specific modules are not supported here – +instead you can write `if isinstance(database_engine, PostgresEngine)` or similar. + +A Python delta module should define either or both of the following functions: + +```python +import synapse.config.homeserver +import synapse.storage.engines +import synapse.storage.types + + +def run_create( + cur: synapse.storage.types.Cursor, + database_engine: synapse.storage.engines.BaseDatabaseEngine, +) -> None: + """Called whenever an existing or new database is to be upgraded""" + ... + +def run_upgrade( + cur: synapse.storage.types.Cursor, + database_engine: synapse.storage.engines.BaseDatabaseEngine, + config: synapse.config.homeserver.HomeServerConfig, +) -> None: + """Called whenever an existing database is to be upgraded.""" + ... +``` + ## Boolean columns Boolean columns require special treatment, since SQLite treats booleans the diff --git a/synapse/storage/prepare_database.py b/synapse/storage/prepare_database.py index 1823e18720..e3153d1a4a 100644 --- a/synapse/storage/prepare_database.py +++ b/synapse/storage/prepare_database.py @@ -499,9 +499,12 @@ def _upgrade_existing_database( module = importlib.util.module_from_spec(spec) spec.loader.exec_module(module) # type: ignore - logger.info("Running script %s", relative_path) - module.run_create(cur, database_engine) # type: ignore - if not is_empty: + if hasattr(module, "run_create"): + logger.info("Running %s:run_create", relative_path) + module.run_create(cur, database_engine) # type: ignore + + if not is_empty and hasattr(module, "run_upgrade"): + logger.info("Running %s:run_upgrade", relative_path) module.run_upgrade(cur, database_engine, config=config) # type: ignore elif ext == ".pyc" or file_name == "__pycache__": # Sometimes .pyc files turn up anyway even though we've -- cgit 1.4.1 From b59d285f7c2ffce56a273686e63bcb34a461317b Mon Sep 17 00:00:00 2001 From: Nick Barrett Date: Tue, 25 Jan 2022 14:14:46 +0000 Subject: Db txn set isolation level (#11799) Co-authored-by: Brendan Abolivier --- changelog.d/11799.misc | 1 + synapse/storage/database.py | 10 ++++++++++ synapse/storage/engines/_base.py | 19 ++++++++++++++++++- synapse/storage/engines/postgres.py | 29 +++++++++++++++++++++++++---- synapse/storage/engines/sqlite.py | 7 +++++++ 5 files changed, 61 insertions(+), 5 deletions(-) create mode 100644 changelog.d/11799.misc (limited to 'synapse') diff --git a/changelog.d/11799.misc b/changelog.d/11799.misc new file mode 100644 index 0000000000..5c3b2bcaf4 --- /dev/null +++ b/changelog.d/11799.misc @@ -0,0 +1 @@ +Preparation for reducing Postgres serialization errors: allow setting transaction isolation level. Contributed by Nick @ Beeper. diff --git a/synapse/storage/database.py b/synapse/storage/database.py index 57cc1d76e0..7455326ed3 100644 --- a/synapse/storage/database.py +++ b/synapse/storage/database.py @@ -702,6 +702,7 @@ class DatabasePool: func: Callable[..., R], *args: Any, db_autocommit: bool = False, + isolation_level: Optional[int] = None, **kwargs: Any, ) -> R: """Starts a transaction on the database and runs a given function @@ -724,6 +725,7 @@ class DatabasePool: called multiple times if the transaction is retried, so must correctly handle that case. + isolation_level: Set the server isolation level for this transaction. args: positional args to pass to `func` kwargs: named args to pass to `func` @@ -763,6 +765,7 @@ class DatabasePool: func: Callable[..., R], *args: Any, db_autocommit: bool = False, + isolation_level: Optional[int] = None, **kwargs: Any, ) -> R: """Wraps the .runWithConnection() method on the underlying db_pool. @@ -775,6 +778,7 @@ class DatabasePool: db_autocommit: Whether to run the function in "autocommit" mode, i.e. outside of a transaction. This is useful for transaction that are only a single query. Currently only affects postgres. + isolation_level: Set the server isolation level for this transaction. kwargs: named args to pass to `func` Returns: @@ -834,6 +838,10 @@ class DatabasePool: try: if db_autocommit: self.engine.attempt_to_set_autocommit(conn, True) + if isolation_level is not None: + self.engine.attempt_to_set_isolation_level( + conn, isolation_level + ) db_conn = LoggingDatabaseConnection( conn, self.engine, "runWithConnection" @@ -842,6 +850,8 @@ class DatabasePool: finally: if db_autocommit: self.engine.attempt_to_set_autocommit(conn, False) + if isolation_level: + self.engine.attempt_to_set_isolation_level(conn, None) return await make_deferred_yieldable( self._db_pool.runWithConnection(inner_func, *args, **kwargs) diff --git a/synapse/storage/engines/_base.py b/synapse/storage/engines/_base.py index 20cd63c330..143cd98ca2 100644 --- a/synapse/storage/engines/_base.py +++ b/synapse/storage/engines/_base.py @@ -12,11 +12,18 @@ # See the License for the specific language governing permissions and # limitations under the License. import abc -from typing import Generic, TypeVar +from enum import IntEnum +from typing import Generic, Optional, TypeVar from synapse.storage.types import Connection +class IsolationLevel(IntEnum): + READ_COMMITTED: int = 1 + REPEATABLE_READ: int = 2 + SERIALIZABLE: int = 3 + + class IncorrectDatabaseSetup(RuntimeError): pass @@ -109,3 +116,13 @@ class BaseDatabaseEngine(Generic[ConnectionType], metaclass=abc.ABCMeta): commit/rollback the connections. """ ... + + @abc.abstractmethod + def attempt_to_set_isolation_level( + self, conn: Connection, isolation_level: Optional[int] + ): + """Attempt to set the connections isolation level. + + Note: This has no effect on SQLite3, as transactions are SERIALIZABLE by default. + """ + ... diff --git a/synapse/storage/engines/postgres.py b/synapse/storage/engines/postgres.py index b3d71f661c..808342fafb 100644 --- a/synapse/storage/engines/postgres.py +++ b/synapse/storage/engines/postgres.py @@ -13,8 +13,13 @@ # limitations under the License. import logging +from typing import Mapping, Optional -from synapse.storage.engines._base import BaseDatabaseEngine, IncorrectDatabaseSetup +from synapse.storage.engines._base import ( + BaseDatabaseEngine, + IncorrectDatabaseSetup, + IsolationLevel, +) from synapse.storage.types import Connection logger = logging.getLogger(__name__) @@ -34,6 +39,15 @@ class PostgresEngine(BaseDatabaseEngine): self.synchronous_commit = database_config.get("synchronous_commit", True) self._version = None # unknown as yet + self.isolation_level_map: Mapping[int, int] = { + IsolationLevel.READ_COMMITTED: self.module.extensions.ISOLATION_LEVEL_READ_COMMITTED, + IsolationLevel.REPEATABLE_READ: self.module.extensions.ISOLATION_LEVEL_REPEATABLE_READ, + IsolationLevel.SERIALIZABLE: self.module.extensions.ISOLATION_LEVEL_SERIALIZABLE, + } + self.default_isolation_level = ( + self.module.extensions.ISOLATION_LEVEL_REPEATABLE_READ + ) + @property def single_threaded(self) -> bool: return False @@ -104,9 +118,7 @@ class PostgresEngine(BaseDatabaseEngine): return sql.replace("?", "%s") def on_new_connection(self, db_conn): - db_conn.set_isolation_level( - self.module.extensions.ISOLATION_LEVEL_REPEATABLE_READ - ) + db_conn.set_isolation_level(self.default_isolation_level) # Set the bytea output to escape, vs the default of hex cursor = db_conn.cursor() @@ -175,3 +187,12 @@ class PostgresEngine(BaseDatabaseEngine): def attempt_to_set_autocommit(self, conn: Connection, autocommit: bool): return conn.set_session(autocommit=autocommit) # type: ignore + + def attempt_to_set_isolation_level( + self, conn: Connection, isolation_level: Optional[int] + ): + if isolation_level is None: + isolation_level = self.default_isolation_level + else: + isolation_level = self.isolation_level_map[isolation_level] + return conn.set_isolation_level(isolation_level) # type: ignore diff --git a/synapse/storage/engines/sqlite.py b/synapse/storage/engines/sqlite.py index 70d17d4f2c..6c19e55999 100644 --- a/synapse/storage/engines/sqlite.py +++ b/synapse/storage/engines/sqlite.py @@ -15,6 +15,7 @@ import platform import struct import threading import typing +from typing import Optional from synapse.storage.engines import BaseDatabaseEngine from synapse.storage.types import Connection @@ -122,6 +123,12 @@ class Sqlite3Engine(BaseDatabaseEngine["sqlite3.Connection"]): # set the connection to autocommit mode. pass + def attempt_to_set_isolation_level( + self, conn: Connection, isolation_level: Optional[int] + ): + # All transactions are SERIALIZABLE by default in sqllite + pass + # Following functions taken from: https://github.com/coleifer/peewee -- cgit 1.4.1 From 1d5f7b2cc622a6ed91b9bf5b61c1c243b015c495 Mon Sep 17 00:00:00 2001 From: Brendan Abolivier Date: Tue, 25 Jan 2022 14:35:35 +0000 Subject: Log modules at startup (#11813) --- changelog.d/11813.misc | 1 + synapse/app/_base.py | 3 ++- 2 files changed, 3 insertions(+), 1 deletion(-) create mode 100644 changelog.d/11813.misc (limited to 'synapse') diff --git a/changelog.d/11813.misc b/changelog.d/11813.misc new file mode 100644 index 0000000000..f90d183b45 --- /dev/null +++ b/changelog.d/11813.misc @@ -0,0 +1 @@ +Log module names at startup. diff --git a/synapse/app/_base.py b/synapse/app/_base.py index e5ee03b79f..9efdd071cc 100644 --- a/synapse/app/_base.py +++ b/synapse/app/_base.py @@ -435,7 +435,8 @@ async def start(hs: "HomeServer") -> None: # before we start the listeners. module_api = hs.get_module_api() for module, config in hs.config.modules.loaded_modules: - module(config=config, api=module_api) + m = module(config=config, api=module_api) + logger.info("Loaded module %s", m) load_legacy_spam_checkers(hs) load_legacy_third_party_event_rules(hs) -- cgit 1.4.1 From 6a72c910f180ee8b4bd78223775af48492769472 Mon Sep 17 00:00:00 2001 From: Dirk Klimpel <5740567+dklimpel@users.noreply.github.com> Date: Tue, 25 Jan 2022 17:11:40 +0100 Subject: Add admin API to get a list of federated rooms (#11658) --- changelog.d/11658.feature | 1 + docs/usage/administration/admin_api/federation.md | 60 +++++ synapse/rest/admin/__init__.py | 2 + synapse/rest/admin/federation.py | 56 ++++ synapse/storage/databases/main/transactions.py | 48 ++++ tests/rest/admin/test_federation.py | 302 ++++++++++++++++++++-- 6 files changed, 444 insertions(+), 25 deletions(-) create mode 100644 changelog.d/11658.feature (limited to 'synapse') diff --git a/changelog.d/11658.feature b/changelog.d/11658.feature new file mode 100644 index 0000000000..2ec9fb5eec --- /dev/null +++ b/changelog.d/11658.feature @@ -0,0 +1 @@ +Add an admin API to get a list of rooms that federate with a given remote homeserver. \ No newline at end of file diff --git a/docs/usage/administration/admin_api/federation.md b/docs/usage/administration/admin_api/federation.md index 5e609561a6..60cbc5265e 100644 --- a/docs/usage/administration/admin_api/federation.md +++ b/docs/usage/administration/admin_api/federation.md @@ -119,6 +119,66 @@ The following parameters should be set in the URL: The response fields are the same like in the `destinations` array in [List of destinations](#list-of-destinations) response. +## Destination rooms + +This API gets the rooms that federate with a specific remote server. + +The API is: + +``` +GET /_synapse/admin/v1/federation/destinations//rooms +``` + +A response body like the following is returned: + +```json +{ + "rooms":[ + { + "room_id": "!OGEhHVWSdvArJzumhm:matrix.org", + "stream_ordering": 8326 + }, + { + "room_id": "!xYvNcQPhnkrdUmYczI:matrix.org", + "stream_ordering": 93534 + } + ], + "total": 2 +} +``` + +To paginate, check for `next_token` and if present, call the endpoint again +with `from` set to the value of `next_token`. This will return a new page. + +If the endpoint does not return a `next_token` then there are no more destinations +to paginate through. + +**Parameters** + +The following parameters should be set in the URL: + +- `destination` - Name of the remote server. + +The following query parameters are available: + +- `from` - Offset in the returned list. Defaults to `0`. +- `limit` - Maximum amount of destinations to return. Defaults to `100`. +- `dir` - Direction of room order by `room_id`. Either `f` for forwards or `b` for + backwards. Defaults to `f`. + +**Response** + +The following fields are returned in the JSON response body: + +- `rooms` - An array of objects, each containing information about a room. + Room objects contain the following fields: + - `room_id` - string - The ID of the room. + - `stream_ordering` - integer - The stream ordering of the most recent + successfully-sent [PDU](understanding_synapse_through_grafana_graphs.md#federation) + to this destination in this room. +- `next_token`: string representing a positive integer - Indication for pagination. See above. +- `total` - integer - Total number of destinations. + ## Reset connection timeout Synapse makes federation requests to other homeservers. If a federation request fails, diff --git a/synapse/rest/admin/__init__.py b/synapse/rest/admin/__init__.py index b1e49d51b7..9be9e33c8e 100644 --- a/synapse/rest/admin/__init__.py +++ b/synapse/rest/admin/__init__.py @@ -41,6 +41,7 @@ from synapse.rest.admin.event_reports import ( EventReportsRestServlet, ) from synapse.rest.admin.federation import ( + DestinationMembershipRestServlet, DestinationResetConnectionRestServlet, DestinationRestServlet, ListDestinationsRestServlet, @@ -268,6 +269,7 @@ def register_servlets(hs: "HomeServer", http_server: HttpServer) -> None: ListRegistrationTokensRestServlet(hs).register(http_server) NewRegistrationTokenRestServlet(hs).register(http_server) RegistrationTokenRestServlet(hs).register(http_server) + DestinationMembershipRestServlet(hs).register(http_server) DestinationResetConnectionRestServlet(hs).register(http_server) DestinationRestServlet(hs).register(http_server) ListDestinationsRestServlet(hs).register(http_server) diff --git a/synapse/rest/admin/federation.py b/synapse/rest/admin/federation.py index 0f33f9e4da..d162e0081e 100644 --- a/synapse/rest/admin/federation.py +++ b/synapse/rest/admin/federation.py @@ -148,6 +148,62 @@ class DestinationRestServlet(RestServlet): return HTTPStatus.OK, response +class DestinationMembershipRestServlet(RestServlet): + """Get list of rooms of a destination. + This needs user to have administrator access in Synapse. + + GET /_synapse/admin/v1/federation/destinations//rooms?from=0&limit=10 + + returns: + 200 OK with a list of rooms if success otherwise an error. + + The parameters `from` and `limit` are required only for pagination. + By default, a `limit` of 100 is used. + """ + + PATTERNS = admin_patterns("/federation/destinations/(?P[^/]*)/rooms$") + + def __init__(self, hs: "HomeServer"): + self._auth = hs.get_auth() + self._store = hs.get_datastore() + + async def on_GET( + self, request: SynapseRequest, destination: str + ) -> Tuple[int, JsonDict]: + await assert_requester_is_admin(self._auth, request) + + if not await self._store.is_destination_known(destination): + raise NotFoundError("Unknown destination") + + start = parse_integer(request, "from", default=0) + limit = parse_integer(request, "limit", default=100) + + if start < 0: + raise SynapseError( + HTTPStatus.BAD_REQUEST, + "Query parameter from must be a string representing a positive integer.", + errcode=Codes.INVALID_PARAM, + ) + + if limit < 0: + raise SynapseError( + HTTPStatus.BAD_REQUEST, + "Query parameter limit must be a string representing a positive integer.", + errcode=Codes.INVALID_PARAM, + ) + + direction = parse_string(request, "dir", default="f", allowed_values=("f", "b")) + + rooms, total = await self._store.get_destination_rooms_paginate( + destination, start, limit, direction + ) + response = {"rooms": rooms, "total": total} + if (start + limit) < total: + response["next_token"] = str(start + len(rooms)) + + return HTTPStatus.OK, response + + class DestinationResetConnectionRestServlet(RestServlet): """Reset destinations' connection timeouts and wake it up. This needs user to have administrator access in Synapse. diff --git a/synapse/storage/databases/main/transactions.py b/synapse/storage/databases/main/transactions.py index 4b78b4d098..ba79e19f7f 100644 --- a/synapse/storage/databases/main/transactions.py +++ b/synapse/storage/databases/main/transactions.py @@ -561,6 +561,54 @@ class TransactionWorkerStore(CacheInvalidationWorkerStore): "get_destinations_paginate_txn", get_destinations_paginate_txn ) + async def get_destination_rooms_paginate( + self, destination: str, start: int, limit: int, direction: str = "f" + ) -> Tuple[List[JsonDict], int]: + """Function to retrieve a paginated list of destination's rooms. + This will return a json list of rooms and the + total number of rooms. + + Args: + destination: the destination to query + start: start number to begin the query from + limit: number of rows to retrieve + direction: sort ascending or descending by room_id + Returns: + A tuple of a dict of rooms and a count of total rooms. + """ + + def get_destination_rooms_paginate_txn( + txn: LoggingTransaction, + ) -> Tuple[List[JsonDict], int]: + + if direction == "b": + order = "DESC" + else: + order = "ASC" + + sql = """ + SELECT COUNT(*) as total_rooms + FROM destination_rooms + WHERE destination = ? + """ + txn.execute(sql, [destination]) + count = cast(Tuple[int], txn.fetchone())[0] + + rooms = self.db_pool.simple_select_list_paginate_txn( + txn=txn, + table="destination_rooms", + orderby="room_id", + start=start, + limit=limit, + retcols=("room_id", "stream_ordering"), + order_direction=order, + ) + return rooms, count + + return await self.db_pool.runInteraction( + "get_destination_rooms_paginate_txn", get_destination_rooms_paginate_txn + ) + async def is_destination_known(self, destination: str) -> bool: """Check if a destination is known to the server.""" result = await self.db_pool.simple_select_one_onecol( diff --git a/tests/rest/admin/test_federation.py b/tests/rest/admin/test_federation.py index e2d3cff2a3..71068d16cd 100644 --- a/tests/rest/admin/test_federation.py +++ b/tests/rest/admin/test_federation.py @@ -20,7 +20,7 @@ from twisted.test.proto_helpers import MemoryReactor import synapse.rest.admin from synapse.api.errors import Codes -from synapse.rest.client import login +from synapse.rest.client import login, room from synapse.server import HomeServer from synapse.types import JsonDict from synapse.util import Clock @@ -52,9 +52,7 @@ class FederationTestCase(unittest.HomeserverTestCase): ] ) def test_requester_is_no_admin(self, method: str, url: str) -> None: - """ - If the user is not a server admin, an error 403 is returned. - """ + """If the user is not a server admin, an error 403 is returned.""" self.register_user("user", "pass", admin=False) other_user_tok = self.login("user", "pass") @@ -70,9 +68,7 @@ class FederationTestCase(unittest.HomeserverTestCase): self.assertEqual(Codes.FORBIDDEN, channel.json_body["errcode"]) def test_invalid_parameter(self) -> None: - """ - If parameters are invalid, an error is returned. - """ + """If parameters are invalid, an error is returned.""" # negative limit channel = self.make_request( @@ -135,9 +131,7 @@ class FederationTestCase(unittest.HomeserverTestCase): self.assertEqual(Codes.NOT_FOUND, channel.json_body["errcode"]) def test_limit(self) -> None: - """ - Testing list of destinations with limit - """ + """Testing list of destinations with limit""" number_destinations = 20 self._create_destinations(number_destinations) @@ -155,9 +149,7 @@ class FederationTestCase(unittest.HomeserverTestCase): self._check_fields(channel.json_body["destinations"]) def test_from(self) -> None: - """ - Testing list of destinations with a defined starting point (from) - """ + """Testing list of destinations with a defined starting point (from)""" number_destinations = 20 self._create_destinations(number_destinations) @@ -175,9 +167,7 @@ class FederationTestCase(unittest.HomeserverTestCase): self._check_fields(channel.json_body["destinations"]) def test_limit_and_from(self) -> None: - """ - Testing list of destinations with a defined starting point and limit - """ + """Testing list of destinations with a defined starting point and limit""" number_destinations = 20 self._create_destinations(number_destinations) @@ -195,9 +185,7 @@ class FederationTestCase(unittest.HomeserverTestCase): self._check_fields(channel.json_body["destinations"]) def test_next_token(self) -> None: - """ - Testing that `next_token` appears at the right place - """ + """Testing that `next_token` appears at the right place""" number_destinations = 20 self._create_destinations(number_destinations) @@ -256,9 +244,7 @@ class FederationTestCase(unittest.HomeserverTestCase): self.assertNotIn("next_token", channel.json_body) def test_list_all_destinations(self) -> None: - """ - List all destinations. - """ + """List all destinations.""" number_destinations = 5 self._create_destinations(number_destinations) @@ -277,9 +263,7 @@ class FederationTestCase(unittest.HomeserverTestCase): self._check_fields(channel.json_body["destinations"]) def test_order_by(self) -> None: - """ - Testing order list with parameter `order_by` - """ + """Testing order list with parameter `order_by`""" def _order_test( expected_destination_list: List[str], @@ -543,3 +527,271 @@ class FederationTestCase(unittest.HomeserverTestCase): self.assertIn("retry_interval", c) self.assertIn("failure_ts", c) self.assertIn("last_successful_stream_ordering", c) + + +class DestinationMembershipTestCase(unittest.HomeserverTestCase): + servlets = [ + synapse.rest.admin.register_servlets, + login.register_servlets, + room.register_servlets, + ] + + def prepare(self, reactor: MemoryReactor, clock: Clock, hs: HomeServer) -> None: + self.store = hs.get_datastore() + self.admin_user = self.register_user("admin", "pass", admin=True) + self.admin_user_tok = self.login("admin", "pass") + + self.dest = "sub0.example.com" + self.url = f"/_synapse/admin/v1/federation/destinations/{self.dest}/rooms" + + # Record that we successfully contacted a destination in the DB. + self.get_success( + self.store.set_destination_retry_timings(self.dest, None, 0, 0) + ) + + def test_requester_is_no_admin(self) -> None: + """If the user is not a server admin, an error 403 is returned.""" + + self.register_user("user", "pass", admin=False) + other_user_tok = self.login("user", "pass") + + channel = self.make_request( + "GET", + self.url, + access_token=other_user_tok, + ) + + self.assertEqual(HTTPStatus.FORBIDDEN, channel.code, msg=channel.json_body) + self.assertEqual(Codes.FORBIDDEN, channel.json_body["errcode"]) + + def test_invalid_parameter(self) -> None: + """If parameters are invalid, an error is returned.""" + + # negative limit + channel = self.make_request( + "GET", + self.url + "?limit=-5", + access_token=self.admin_user_tok, + ) + + self.assertEqual(HTTPStatus.BAD_REQUEST, channel.code, msg=channel.json_body) + self.assertEqual(Codes.INVALID_PARAM, channel.json_body["errcode"]) + + # negative from + channel = self.make_request( + "GET", + self.url + "?from=-5", + access_token=self.admin_user_tok, + ) + + self.assertEqual(HTTPStatus.BAD_REQUEST, channel.code, msg=channel.json_body) + self.assertEqual(Codes.INVALID_PARAM, channel.json_body["errcode"]) + + # invalid search order + channel = self.make_request( + "GET", + self.url + "?dir=bar", + access_token=self.admin_user_tok, + ) + + self.assertEqual(HTTPStatus.BAD_REQUEST, channel.code, msg=channel.json_body) + self.assertEqual(Codes.INVALID_PARAM, channel.json_body["errcode"]) + + # invalid destination + channel = self.make_request( + "GET", + "/_synapse/admin/v1/federation/destinations/%s/rooms" % ("invalid",), + access_token=self.admin_user_tok, + ) + + self.assertEqual(HTTPStatus.NOT_FOUND, channel.code, msg=channel.json_body) + self.assertEqual(Codes.NOT_FOUND, channel.json_body["errcode"]) + + def test_limit(self) -> None: + """Testing list of destinations with limit""" + + number_rooms = 5 + self._create_destination_rooms(number_rooms) + + channel = self.make_request( + "GET", + self.url + "?limit=3", + access_token=self.admin_user_tok, + ) + + self.assertEqual(HTTPStatus.OK, channel.code, msg=channel.json_body) + self.assertEqual(channel.json_body["total"], number_rooms) + self.assertEqual(len(channel.json_body["rooms"]), 3) + self.assertEqual(channel.json_body["next_token"], "3") + self._check_fields(channel.json_body["rooms"]) + + def test_from(self) -> None: + """Testing list of rooms with a defined starting point (from)""" + + number_rooms = 10 + self._create_destination_rooms(number_rooms) + + channel = self.make_request( + "GET", + self.url + "?from=5", + access_token=self.admin_user_tok, + ) + + self.assertEqual(HTTPStatus.OK, channel.code, msg=channel.json_body) + self.assertEqual(channel.json_body["total"], number_rooms) + self.assertEqual(len(channel.json_body["rooms"]), 5) + self.assertNotIn("next_token", channel.json_body) + self._check_fields(channel.json_body["rooms"]) + + def test_limit_and_from(self) -> None: + """Testing list of rooms with a defined starting point and limit""" + + number_rooms = 10 + self._create_destination_rooms(number_rooms) + + channel = self.make_request( + "GET", + self.url + "?from=3&limit=5", + access_token=self.admin_user_tok, + ) + + self.assertEqual(HTTPStatus.OK, channel.code, msg=channel.json_body) + self.assertEqual(channel.json_body["total"], number_rooms) + self.assertEqual(channel.json_body["next_token"], "8") + self.assertEqual(len(channel.json_body["rooms"]), 5) + self._check_fields(channel.json_body["rooms"]) + + def test_order_direction(self) -> None: + """Testing order list with parameter `dir`""" + number_rooms = 4 + self._create_destination_rooms(number_rooms) + + # get list in forward direction + channel_asc = self.make_request( + "GET", + self.url + "?dir=f", + access_token=self.admin_user_tok, + ) + + self.assertEqual(HTTPStatus.OK, channel_asc.code, msg=channel_asc.json_body) + self.assertEqual(channel_asc.json_body["total"], number_rooms) + self.assertEqual(number_rooms, len(channel_asc.json_body["rooms"])) + self._check_fields(channel_asc.json_body["rooms"]) + + # get list in backward direction + channel_desc = self.make_request( + "GET", + self.url + "?dir=b", + access_token=self.admin_user_tok, + ) + + self.assertEqual(HTTPStatus.OK, channel_desc.code, msg=channel_desc.json_body) + self.assertEqual(channel_desc.json_body["total"], number_rooms) + self.assertEqual(number_rooms, len(channel_desc.json_body["rooms"])) + self._check_fields(channel_desc.json_body["rooms"]) + + # test that both lists have different directions + for i in range(0, number_rooms): + self.assertEqual( + channel_asc.json_body["rooms"][i]["room_id"], + channel_desc.json_body["rooms"][number_rooms - 1 - i]["room_id"], + ) + + def test_next_token(self) -> None: + """Testing that `next_token` appears at the right place""" + + number_rooms = 5 + self._create_destination_rooms(number_rooms) + + # `next_token` does not appear + # Number of results is the number of entries + channel = self.make_request( + "GET", + self.url + "?limit=5", + access_token=self.admin_user_tok, + ) + + self.assertEqual(HTTPStatus.OK, channel.code, msg=channel.json_body) + self.assertEqual(channel.json_body["total"], number_rooms) + self.assertEqual(len(channel.json_body["rooms"]), number_rooms) + self.assertNotIn("next_token", channel.json_body) + + # `next_token` does not appear + # Number of max results is larger than the number of entries + channel = self.make_request( + "GET", + self.url + "?limit=6", + access_token=self.admin_user_tok, + ) + + self.assertEqual(HTTPStatus.OK, channel.code, msg=channel.json_body) + self.assertEqual(channel.json_body["total"], number_rooms) + self.assertEqual(len(channel.json_body["rooms"]), number_rooms) + self.assertNotIn("next_token", channel.json_body) + + # `next_token` does appear + # Number of max results is smaller than the number of entries + channel = self.make_request( + "GET", + self.url + "?limit=4", + access_token=self.admin_user_tok, + ) + + self.assertEqual(HTTPStatus.OK, channel.code, msg=channel.json_body) + self.assertEqual(channel.json_body["total"], number_rooms) + self.assertEqual(len(channel.json_body["rooms"]), 4) + self.assertEqual(channel.json_body["next_token"], "4") + + # Check + # Set `from` to value of `next_token` for request remaining entries + # `next_token` does not appear + channel = self.make_request( + "GET", + self.url + "?from=4", + access_token=self.admin_user_tok, + ) + + self.assertEqual(HTTPStatus.OK, channel.code, msg=channel.json_body) + self.assertEqual(channel.json_body["total"], number_rooms) + self.assertEqual(len(channel.json_body["rooms"]), 1) + self.assertNotIn("next_token", channel.json_body) + + def test_destination_rooms(self) -> None: + """Testing that request the list of rooms is successfully.""" + number_rooms = 3 + self._create_destination_rooms(number_rooms) + + channel = self.make_request( + "GET", + self.url, + access_token=self.admin_user_tok, + ) + + self.assertEqual(HTTPStatus.OK, channel.code, msg=channel.json_body) + self.assertEqual(channel.json_body["total"], number_rooms) + self.assertEqual(number_rooms, len(channel.json_body["rooms"])) + self._check_fields(channel.json_body["rooms"]) + + def _create_destination_rooms(self, number_rooms: int) -> None: + """Create a number rooms for destination + + Args: + number_rooms: Number of rooms to be created + """ + for _ in range(0, number_rooms): + room_id = self.helper.create_room_as( + self.admin_user, tok=self.admin_user_tok + ) + self.get_success( + self.store.store_destination_rooms_entries((self.dest,), room_id, 1234) + ) + + def _check_fields(self, content: List[JsonDict]) -> None: + """Checks that the expected room attributes are present in content + + Args: + content: List that is checked for content + """ + for c in content: + self.assertIn("room_id", c) + self.assertIn("stream_ordering", c) -- cgit 1.4.1 From b8bf6007002131b8931f4b5f49a77bf5aba85067 Mon Sep 17 00:00:00 2001 From: Shay Date: Tue, 25 Jan 2022 10:35:18 -0800 Subject: Check that `gc` method is available before using in `synapse/app/_base` (#11816) * add check that gc.freeze is available before calling * newsfragment * lint * Update comment Co-authored-by: Dan Callahan Co-authored-by: Dan Callahan --- changelog.d/11816.misc | 1 + synapse/app/_base.py | 14 ++++++++------ 2 files changed, 9 insertions(+), 6 deletions(-) create mode 100644 changelog.d/11816.misc (limited to 'synapse') diff --git a/changelog.d/11816.misc b/changelog.d/11816.misc new file mode 100644 index 0000000000..b1f048f7f5 --- /dev/null +++ b/changelog.d/11816.misc @@ -0,0 +1 @@ +Drop support for Python 3.6, which is EOL. \ No newline at end of file diff --git a/synapse/app/_base.py b/synapse/app/_base.py index 9efdd071cc..bbab8a052a 100644 --- a/synapse/app/_base.py +++ b/synapse/app/_base.py @@ -468,12 +468,14 @@ async def start(hs: "HomeServer") -> None: # everything currently allocated are things that will be used for the # rest of time. Doing so means less work each GC (hopefully). # - gc.collect() - gc.freeze() - - # Speed up shutdowns by freezing all allocated objects. This moves everything - # into the permanent generation and excludes them from the final GC. - atexit.register(gc.freeze) + # PyPy does not (yet?) implement gc.freeze() + if hasattr(gc, "freeze"): + gc.collect() + gc.freeze() + + # Speed up shutdowns by freezing all allocated objects. This moves everything + # into the permanent generation and excludes them from the final GC. + atexit.register(gc.freeze) def setup_sentry(hs: "HomeServer") -> None: -- cgit 1.4.1 From 74e4419eb4b18ef7d2a3b4416290a4f103042436 Mon Sep 17 00:00:00 2001 From: David Robertson Date: Tue, 25 Jan 2022 20:29:28 +0000 Subject: Fix another jsonschema typecheck error (#11830) Similar to #11817. In `_create_power_level_validator` we - retrieve `validator`. This is a class implementing the `jsonschema.protocols.Validator` interface. In other words, `validator: Type[jsonschema.protocols.Validator]`. - we then create an second validator class by modifying the original `validator`. We return that class, which is also of type `Type[jsonschema.protocols.Validator]`. So the original annotation was incorrect: it claimed we were returning an instance of jsonSchema.Draft7Validator, not the class (or a subclass) itself. (Strictly speaking this is incorrect, because `POWER_LEVELS_SCHEMA` isn't pinned to a particular version of JSON Schema. But there are other complications with the type stubs if you try to fix this; I felt like the change herein was a decent compromise that better expresses intent). (I suspect/hope the typeshed project would welcome an effort to improve the jsonschema stubs. Let's see if I get some spare time.) --- changelog.d/11817.misc | 2 +- changelog.d/11830.misc | 1 + synapse/events/validator.py | 6 ++---- 3 files changed, 4 insertions(+), 5 deletions(-) create mode 100644 changelog.d/11830.misc (limited to 'synapse') diff --git a/changelog.d/11817.misc b/changelog.d/11817.misc index bd29d8d6eb..3d6b2ea4d4 100644 --- a/changelog.d/11817.misc +++ b/changelog.d/11817.misc @@ -1 +1 @@ -Compatibility with updated type hints for jsonschema 4.4.0. +Correct a type annotation in the event validation logic. diff --git a/changelog.d/11830.misc b/changelog.d/11830.misc new file mode 100644 index 0000000000..fe248d00ab --- /dev/null +++ b/changelog.d/11830.misc @@ -0,0 +1 @@ +Correct a type annotation in the event validation logic. \ No newline at end of file diff --git a/synapse/events/validator.py b/synapse/events/validator.py index 4245573017..360d24274a 100644 --- a/synapse/events/validator.py +++ b/synapse/events/validator.py @@ -12,7 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. import collections.abc -from typing import Iterable, Union +from typing import Iterable, Type, Union import jsonschema @@ -246,9 +246,7 @@ POWER_LEVELS_SCHEMA = { # This could return something newer than Draft 7, but that's the current "latest" # validator. -# -# See https://github.com/python/typeshed/issues/7028 for the ignored return type. -def _create_power_level_validator() -> jsonschema.Draft7Validator: # type: ignore[valid-type] +def _create_power_level_validator() -> Type[jsonschema.Draft7Validator]: validator = jsonschema.validators.validator_for(POWER_LEVELS_SCHEMA) # by default jsonschema does not consider a frozendict to be an object so -- cgit 1.4.1 From 95b3f952fa43e51feae166fa1678761c5e32d900 Mon Sep 17 00:00:00 2001 From: Brendan Abolivier Date: Wed, 26 Jan 2022 12:02:54 +0000 Subject: Add a config flag to inhibit `M_USER_IN_USE` during registration (#11743) This is mostly motivated by the tchap use case, where usernames are automatically generated from the user's email address (in a way that allows figuring out the email address from the username). Therefore, it's an issue if we respond to requests on /register and /register/available with M_USER_IN_USE, because it can potentially leak email addresses (which include the user's real name and place of work). This commit adds a flag to inhibit the M_USER_IN_USE errors that are raised both by /register/available, and when providing a username early into the registration process. This error will still be raised if the user completes the registration process but the username conflicts. This is particularly useful when using modules (https://github.com/matrix-org/synapse/pull/11790 adds a module callback to set the username of users at registration) or SSO, since they can ensure the username is unique. More context is available in the PR that introduced this behaviour to synapse-dinsic: matrix-org/synapse-dinsic#48 - as well as the issue in the matrix-dinsic repo: matrix-org/matrix-dinsic#476 --- changelog.d/11743.feature | 1 + docs/sample_config.yaml | 10 ++++++++++ synapse/config/registration.py | 12 +++++++++++ synapse/handlers/register.py | 26 +++++++++++++----------- synapse/rest/client/register.py | 11 ++++++++++ tests/rest/client/test_register.py | 41 ++++++++++++++++++++++++++++++++++++++ 6 files changed, 89 insertions(+), 12 deletions(-) create mode 100644 changelog.d/11743.feature (limited to 'synapse') diff --git a/changelog.d/11743.feature b/changelog.d/11743.feature new file mode 100644 index 0000000000..9809f48b96 --- /dev/null +++ b/changelog.d/11743.feature @@ -0,0 +1 @@ +Add a config flag to inhibit M_USER_IN_USE during registration. diff --git a/docs/sample_config.yaml b/docs/sample_config.yaml index 1b86d0295d..b38e6d6c88 100644 --- a/docs/sample_config.yaml +++ b/docs/sample_config.yaml @@ -1428,6 +1428,16 @@ account_threepid_delegates: # #auto_join_rooms_for_guests: false +# Whether to inhibit errors raised when registering a new account if the user ID +# already exists. If turned on, that requests to /register/available will always +# show a user ID as available, and Synapse won't raise an error when starting +# a registration with a user ID that already exists. However, Synapse will still +# raise an error if the registration completes and the username conflicts. +# +# Defaults to false. +# +#inhibit_user_in_use_error: true + ## Metrics ### diff --git a/synapse/config/registration.py b/synapse/config/registration.py index 7a059c6dec..ea9b50fe97 100644 --- a/synapse/config/registration.py +++ b/synapse/config/registration.py @@ -190,6 +190,8 @@ class RegistrationConfig(Config): # The success template used during fallback auth. self.fallback_success_template = self.read_template("auth_success.html") + self.inhibit_user_in_use_error = config.get("inhibit_user_in_use_error", False) + def generate_config_section(self, generate_secrets=False, **kwargs): if generate_secrets: registration_shared_secret = 'registration_shared_secret: "%s"' % ( @@ -446,6 +448,16 @@ class RegistrationConfig(Config): # Defaults to true. # #auto_join_rooms_for_guests: false + + # Whether to inhibit errors raised when registering a new account if the user ID + # already exists. If turned on, that requests to /register/available will always + # show a user ID as available, and Synapse won't raise an error when starting + # a registration with a user ID that already exists. However, Synapse will still + # raise an error if the registration completes and the username conflicts. + # + # Defaults to false. + # + #inhibit_user_in_use_error: true """ % locals() ) diff --git a/synapse/handlers/register.py b/synapse/handlers/register.py index f08a516a75..a719d5eef3 100644 --- a/synapse/handlers/register.py +++ b/synapse/handlers/register.py @@ -132,6 +132,7 @@ class RegistrationHandler: localpart: str, guest_access_token: Optional[str] = None, assigned_user_id: Optional[str] = None, + inhibit_user_in_use_error: bool = False, ) -> None: if types.contains_invalid_mxid_characters(localpart): raise SynapseError( @@ -171,21 +172,22 @@ class RegistrationHandler: users = await self.store.get_users_by_id_case_insensitive(user_id) if users: - if not guest_access_token: + if not inhibit_user_in_use_error and not guest_access_token: raise SynapseError( 400, "User ID already taken.", errcode=Codes.USER_IN_USE ) - user_data = await self.auth.get_user_by_access_token(guest_access_token) - if ( - not user_data.is_guest - or UserID.from_string(user_data.user_id).localpart != localpart - ): - raise AuthError( - 403, - "Cannot register taken user ID without valid guest " - "credentials for that user.", - errcode=Codes.FORBIDDEN, - ) + if guest_access_token: + user_data = await self.auth.get_user_by_access_token(guest_access_token) + if ( + not user_data.is_guest + or UserID.from_string(user_data.user_id).localpart != localpart + ): + raise AuthError( + 403, + "Cannot register taken user ID without valid guest " + "credentials for that user.", + errcode=Codes.FORBIDDEN, + ) if guest_access_token is None: try: diff --git a/synapse/rest/client/register.py b/synapse/rest/client/register.py index 8b56c76aed..c59dae7c03 100644 --- a/synapse/rest/client/register.py +++ b/synapse/rest/client/register.py @@ -339,12 +339,19 @@ class UsernameAvailabilityRestServlet(RestServlet): ), ) + self.inhibit_user_in_use_error = ( + hs.config.registration.inhibit_user_in_use_error + ) + async def on_GET(self, request: Request) -> Tuple[int, JsonDict]: if not self.hs.config.registration.enable_registration: raise SynapseError( 403, "Registration has been disabled", errcode=Codes.FORBIDDEN ) + if self.inhibit_user_in_use_error: + return 200, {"available": True} + ip = request.getClientIP() with self.ratelimiter.ratelimit(ip) as wait_deferred: await wait_deferred @@ -422,6 +429,9 @@ class RegisterRestServlet(RestServlet): self._refresh_tokens_enabled = ( hs.config.registration.refreshable_access_token_lifetime is not None ) + self._inhibit_user_in_use_error = ( + hs.config.registration.inhibit_user_in_use_error + ) self._registration_flows = _calculate_registration_flows( hs.config, self.auth_handler @@ -564,6 +574,7 @@ class RegisterRestServlet(RestServlet): desired_username, guest_access_token=guest_access_token, assigned_user_id=registered_user_id, + inhibit_user_in_use_error=self._inhibit_user_in_use_error, ) # Check if the user-interactive authentication flows are complete, if diff --git a/tests/rest/client/test_register.py b/tests/rest/client/test_register.py index 6e7c0f11df..407dd32a73 100644 --- a/tests/rest/client/test_register.py +++ b/tests/rest/client/test_register.py @@ -726,6 +726,47 @@ class RegisterRestServletTestCase(unittest.HomeserverTestCase): {"errcode": "M_UNKNOWN", "error": "Unable to parse email address"}, ) + @override_config( + { + "inhibit_user_in_use_error": True, + } + ) + def test_inhibit_user_in_use_error(self): + """Tests that the 'inhibit_user_in_use_error' configuration flag behaves + correctly. + """ + username = "arthur" + + # Manually register the user, so we know the test isn't passing because of a lack + # of clashing. + reg_handler = self.hs.get_registration_handler() + self.get_success(reg_handler.register_user(username)) + + # Check that /available correctly ignores the username provided despite the + # username being already registered. + channel = self.make_request("GET", "register/available?username=" + username) + self.assertEquals(200, channel.code, channel.result) + + # Test that when starting a UIA registration flow the request doesn't fail because + # of a conflicting username + channel = self.make_request( + "POST", + "register", + {"username": username, "type": "m.login.password", "password": "foo"}, + ) + self.assertEqual(channel.code, 401) + self.assertIn("session", channel.json_body) + + # Test that finishing the registration fails because of a conflicting username. + session = channel.json_body["session"] + channel = self.make_request( + "POST", + "register", + {"auth": {"session": session, "type": LoginType.DUMMY}}, + ) + self.assertEqual(channel.code, 400, channel.json_body) + self.assertEqual(channel.json_body["errcode"], Codes.USER_IN_USE) + class AccountValidityTestCase(unittest.HomeserverTestCase): -- cgit 1.4.1 From c5815567a40a3f50de24b73f9cf150c9b5dbfd42 Mon Sep 17 00:00:00 2001 From: David Robertson Date: Wed, 26 Jan 2022 12:06:56 +0000 Subject: Avoid type annotation problems in prom-client (#11834) --- changelog.d/11834.misc | 1 + synapse/python_dependencies.py | 3 ++- 2 files changed, 3 insertions(+), 1 deletion(-) create mode 100644 changelog.d/11834.misc (limited to 'synapse') diff --git a/changelog.d/11834.misc b/changelog.d/11834.misc new file mode 100644 index 0000000000..29a5635f7a --- /dev/null +++ b/changelog.d/11834.misc @@ -0,0 +1 @@ +Workaround a type annotation problem in `prometheus_client` 0.13.0. \ No newline at end of file diff --git a/synapse/python_dependencies.py b/synapse/python_dependencies.py index 22b4606ae0..80786464c2 100644 --- a/synapse/python_dependencies.py +++ b/synapse/python_dependencies.py @@ -76,7 +76,8 @@ REQUIREMENTS = [ "msgpack>=0.5.2", "phonenumbers>=8.2.0", # we use GaugeHistogramMetric, which was added in prom-client 0.4.0. - "prometheus_client>=0.4.0", + # 0.13.0 has an incorrect type annotation, see #11832. + "prometheus_client>=0.4.0,<0.13.0", # we use `order`, which arrived in attrs 19.2.0. # Note: 21.1.0 broke `/sync`, see #9936 "attrs>=19.2.0,!=21.1.0", -- cgit 1.4.1 From d8df8e6c1432d25ea1c0310a5f2dc48d1688345f Mon Sep 17 00:00:00 2001 From: David Robertson Date: Wed, 26 Jan 2022 12:47:34 +0000 Subject: Don't print HTTPStatus.* in "Processed..." logs (#11827) * Don't print HTTPStatus.* in "Processed..." logs Fixes #11812. See also #7118 and https://github.com/matrix-org/synapse/pull/7188#r401719326 in particular. Co-authored-by: Brendan Abolivier --- changelog.d/11827.bugfix | 1 + synapse/http/site.py | 5 ++++- 2 files changed, 5 insertions(+), 1 deletion(-) create mode 100644 changelog.d/11827.bugfix (limited to 'synapse') diff --git a/changelog.d/11827.bugfix b/changelog.d/11827.bugfix new file mode 100644 index 0000000000..30222dfb62 --- /dev/null +++ b/changelog.d/11827.bugfix @@ -0,0 +1 @@ +Fix a bug introduced in Synapse 0.33.3 causing requests to sometimes log strings such as `HTTPStatus.OK` instead of integer status codes. \ No newline at end of file diff --git a/synapse/http/site.py b/synapse/http/site.py index c180a1d323..40f6c04894 100644 --- a/synapse/http/site.py +++ b/synapse/http/site.py @@ -407,7 +407,10 @@ class SynapseRequest(Request): user_agent = get_request_user_agent(self, "-") - code = str(self.code) + # int(self.code) looks redundant, because self.code is already an int. + # But self.code might be an HTTPStatus (which inherits from int)---which has + # a different string representation. So ensure we really have an integer. + code = str(int(self.code)) if not self.finished: # we didn't send the full response before we gave up (presumably because # the connection dropped) -- cgit 1.4.1 From 2897fb6b4fb8bdaea0e919233d5ccaf5dea12742 Mon Sep 17 00:00:00 2001 From: Patrick Cloke Date: Wed, 26 Jan 2022 08:27:04 -0500 Subject: Improvements to bundling aggregations. (#11815) This is some odds and ends found during the review of #11791 and while continuing to work in this code: * Return attrs classes instead of dictionaries from some methods to improve type safety. * Call `get_bundled_aggregations` fewer times. * Adds a missing assertion in the tests. * Do not return empty bundled aggregations for an event (preferring to not include the bundle at all, as the docstring states). --- changelog.d/11815.misc | 1 + synapse/events/utils.py | 57 ++++++++++++++------- synapse/handlers/room.py | 77 +++++++++++++++-------------- synapse/handlers/search.py | 45 ++++++++--------- synapse/handlers/sync.py | 3 +- synapse/push/mailer.py | 2 +- synapse/rest/admin/rooms.py | 39 +++++++++------ synapse/rest/client/room.py | 39 +++++++++------ synapse/rest/client/sync.py | 3 +- synapse/storage/databases/main/relations.py | 61 ++++++++++++++--------- synapse/storage/databases/main/stream.py | 22 ++++++--- tests/rest/client/test_relations.py | 2 +- 12 files changed, 212 insertions(+), 139 deletions(-) create mode 100644 changelog.d/11815.misc (limited to 'synapse') diff --git a/changelog.d/11815.misc b/changelog.d/11815.misc new file mode 100644 index 0000000000..83aa6d6eb0 --- /dev/null +++ b/changelog.d/11815.misc @@ -0,0 +1 @@ +Improve type safety of bundled aggregations code. diff --git a/synapse/events/utils.py b/synapse/events/utils.py index 918adeecf8..243696b357 100644 --- a/synapse/events/utils.py +++ b/synapse/events/utils.py @@ -14,7 +14,17 @@ # limitations under the License. import collections.abc import re -from typing import Any, Callable, Dict, Iterable, List, Mapping, Optional, Union +from typing import ( + TYPE_CHECKING, + Any, + Callable, + Dict, + Iterable, + List, + Mapping, + Optional, + Union, +) from frozendict import frozendict @@ -26,6 +36,10 @@ from synapse.util.frozenutils import unfreeze from . import EventBase +if TYPE_CHECKING: + from synapse.storage.databases.main.relations import BundledAggregations + + # Split strings on "." but not "\." This uses a negative lookbehind assertion for '\' # (? JsonDict: """Serializes a single event. @@ -415,7 +429,7 @@ class EventClientSerializer: self, event: EventBase, time_now: int, - aggregations: JsonDict, + aggregations: "BundledAggregations", serialized_event: JsonDict, ) -> None: """Potentially injects bundled aggregations into the unsigned portion of the serialized event. @@ -427,13 +441,18 @@ class EventClientSerializer: serialized_event: The serialized event which may be modified. """ - # Make a copy in-case the object is cached. - aggregations = aggregations.copy() + serialized_aggregations = {} + + if aggregations.annotations: + serialized_aggregations[RelationTypes.ANNOTATION] = aggregations.annotations + + if aggregations.references: + serialized_aggregations[RelationTypes.REFERENCE] = aggregations.references - if RelationTypes.REPLACE in aggregations: + if aggregations.replace: # If there is an edit replace the content, preserving existing # relations. - edit = aggregations[RelationTypes.REPLACE] + edit = aggregations.replace # Ensure we take copies of the edit content, otherwise we risk modifying # the original event. @@ -451,24 +470,28 @@ class EventClientSerializer: else: serialized_event["content"].pop("m.relates_to", None) - aggregations[RelationTypes.REPLACE] = { + serialized_aggregations[RelationTypes.REPLACE] = { "event_id": edit.event_id, "origin_server_ts": edit.origin_server_ts, "sender": edit.sender, } # If this event is the start of a thread, include a summary of the replies. - if RelationTypes.THREAD in aggregations: - # Serialize the latest thread event. - latest_thread_event = aggregations[RelationTypes.THREAD]["latest_event"] - - # Don't bundle aggregations as this could recurse forever. - aggregations[RelationTypes.THREAD]["latest_event"] = self.serialize_event( - latest_thread_event, time_now, bundle_aggregations=None - ) + if aggregations.thread: + serialized_aggregations[RelationTypes.THREAD] = { + # Don't bundle aggregations as this could recurse forever. + "latest_event": self.serialize_event( + aggregations.thread.latest_event, time_now, bundle_aggregations=None + ), + "count": aggregations.thread.count, + "current_user_participated": aggregations.thread.current_user_participated, + } # Include the bundled aggregations in the event. - serialized_event["unsigned"].setdefault("m.relations", {}).update(aggregations) + if serialized_aggregations: + serialized_event["unsigned"].setdefault("m.relations", {}).update( + serialized_aggregations + ) def serialize_events( self, events: Iterable[Union[JsonDict, EventBase]], time_now: int, **kwargs: Any diff --git a/synapse/handlers/room.py b/synapse/handlers/room.py index f963078e59..1420d67729 100644 --- a/synapse/handlers/room.py +++ b/synapse/handlers/room.py @@ -30,6 +30,7 @@ from typing import ( Tuple, ) +import attr from typing_extensions import TypedDict from synapse.api.constants import ( @@ -60,6 +61,7 @@ from synapse.events.utils import copy_power_levels_contents from synapse.federation.federation_client import InvalidResponseError from synapse.handlers.federation import get_domains_from_state from synapse.rest.admin._base import assert_user_is_admin +from synapse.storage.databases.main.relations import BundledAggregations from synapse.storage.state import StateFilter from synapse.streams import EventSource from synapse.types import ( @@ -90,6 +92,17 @@ id_server_scheme = "https://" FIVE_MINUTES_IN_MS = 5 * 60 * 1000 +@attr.s(slots=True, frozen=True, auto_attribs=True) +class EventContext: + events_before: List[EventBase] + event: EventBase + events_after: List[EventBase] + state: List[EventBase] + aggregations: Dict[str, BundledAggregations] + start: str + end: str + + class RoomCreationHandler: def __init__(self, hs: "HomeServer"): self.store = hs.get_datastore() @@ -1119,7 +1132,7 @@ class RoomContextHandler: limit: int, event_filter: Optional[Filter], use_admin_priviledge: bool = False, - ) -> Optional[JsonDict]: + ) -> Optional[EventContext]: """Retrieves events, pagination tokens and state around a given event in a room. @@ -1167,38 +1180,28 @@ class RoomContextHandler: results = await self.store.get_events_around( room_id, event_id, before_limit, after_limit, event_filter ) + events_before = results.events_before + events_after = results.events_after if event_filter: - results["events_before"] = await event_filter.filter( - results["events_before"] - ) - results["events_after"] = await event_filter.filter(results["events_after"]) + events_before = await event_filter.filter(events_before) + events_after = await event_filter.filter(events_after) - results["events_before"] = await filter_evts(results["events_before"]) - results["events_after"] = await filter_evts(results["events_after"]) + events_before = await filter_evts(events_before) + events_after = await filter_evts(events_after) # filter_evts can return a pruned event in case the user is allowed to see that # there's something there but not see the content, so use the event that's in # `filtered` rather than the event we retrieved from the datastore. - results["event"] = filtered[0] + event = filtered[0] # Fetch the aggregations. aggregations = await self.store.get_bundled_aggregations( - [results["event"]], user.to_string() + itertools.chain(events_before, (event,), events_after), + user.to_string(), ) - aggregations.update( - await self.store.get_bundled_aggregations( - results["events_before"], user.to_string() - ) - ) - aggregations.update( - await self.store.get_bundled_aggregations( - results["events_after"], user.to_string() - ) - ) - results["aggregations"] = aggregations - if results["events_after"]: - last_event_id = results["events_after"][-1].event_id + if events_after: + last_event_id = events_after[-1].event_id else: last_event_id = event_id @@ -1206,9 +1209,9 @@ class RoomContextHandler: state_filter = StateFilter.from_lazy_load_member_list( ev.sender for ev in itertools.chain( - results["events_before"], - (results["event"],), - results["events_after"], + events_before, + (event,), + events_after, ) ) else: @@ -1226,21 +1229,23 @@ class RoomContextHandler: if event_filter: state_events = await event_filter.filter(state_events) - results["state"] = await filter_evts(state_events) - # We use a dummy token here as we only care about the room portion of # the token, which we replace. token = StreamToken.START - results["start"] = await token.copy_and_replace( - "room_key", results["start"] - ).to_string(self.store) - - results["end"] = await token.copy_and_replace( - "room_key", results["end"] - ).to_string(self.store) - - return results + return EventContext( + events_before=events_before, + event=event, + events_after=events_after, + state=await filter_evts(state_events), + aggregations=aggregations, + start=await token.copy_and_replace("room_key", results.start).to_string( + self.store + ), + end=await token.copy_and_replace("room_key", results.end).to_string( + self.store + ), + ) class TimestampLookupHandler: diff --git a/synapse/handlers/search.py b/synapse/handlers/search.py index 0b153a6822..02bb5ae72f 100644 --- a/synapse/handlers/search.py +++ b/synapse/handlers/search.py @@ -361,36 +361,37 @@ class SearchHandler: logger.info( "Context for search returned %d and %d events", - len(res["events_before"]), - len(res["events_after"]), + len(res.events_before), + len(res.events_after), ) - res["events_before"] = await filter_events_for_client( - self.storage, user.to_string(), res["events_before"] + events_before = await filter_events_for_client( + self.storage, user.to_string(), res.events_before ) - res["events_after"] = await filter_events_for_client( - self.storage, user.to_string(), res["events_after"] + events_after = await filter_events_for_client( + self.storage, user.to_string(), res.events_after ) - res["start"] = await now_token.copy_and_replace( - "room_key", res["start"] - ).to_string(self.store) - - res["end"] = await now_token.copy_and_replace( - "room_key", res["end"] - ).to_string(self.store) + context = { + "events_before": events_before, + "events_after": events_after, + "start": await now_token.copy_and_replace( + "room_key", res.start + ).to_string(self.store), + "end": await now_token.copy_and_replace( + "room_key", res.end + ).to_string(self.store), + } if include_profile: senders = { ev.sender - for ev in itertools.chain( - res["events_before"], [event], res["events_after"] - ) + for ev in itertools.chain(events_before, [event], events_after) } - if res["events_after"]: - last_event_id = res["events_after"][-1].event_id + if events_after: + last_event_id = events_after[-1].event_id else: last_event_id = event.event_id @@ -402,7 +403,7 @@ class SearchHandler: last_event_id, state_filter ) - res["profile_info"] = { + context["profile_info"] = { s.state_key: { "displayname": s.content.get("displayname", None), "avatar_url": s.content.get("avatar_url", None), @@ -411,7 +412,7 @@ class SearchHandler: if s.type == EventTypes.Member and s.state_key in senders } - contexts[event.event_id] = res + contexts[event.event_id] = context else: contexts = {} @@ -421,10 +422,10 @@ class SearchHandler: for context in contexts.values(): context["events_before"] = self._event_serializer.serialize_events( - context["events_before"], time_now + context["events_before"], time_now # type: ignore[arg-type] ) context["events_after"] = self._event_serializer.serialize_events( - context["events_after"], time_now + context["events_after"], time_now # type: ignore[arg-type] ) state_results = {} diff --git a/synapse/handlers/sync.py b/synapse/handlers/sync.py index 7e2a892b63..c72ed7c290 100644 --- a/synapse/handlers/sync.py +++ b/synapse/handlers/sync.py @@ -37,6 +37,7 @@ from synapse.logging.context import current_context from synapse.logging.opentracing import SynapseTags, log_kv, set_tag, start_active_span from synapse.push.clientformat import format_push_rules_for_user from synapse.storage.databases.main.event_push_actions import NotifCounts +from synapse.storage.databases.main.relations import BundledAggregations from synapse.storage.roommember import MemberSummary from synapse.storage.state import StateFilter from synapse.types import ( @@ -100,7 +101,7 @@ class TimelineBatch: limited: bool # A mapping of event ID to the bundled aggregations for the above events. # This is only calculated if limited is true. - bundled_aggregations: Optional[Dict[str, Dict[str, Any]]] = None + bundled_aggregations: Optional[Dict[str, BundledAggregations]] = None def __bool__(self) -> bool: """Make the result appear empty if there are no updates. This is used diff --git a/synapse/push/mailer.py b/synapse/push/mailer.py index dadfc57413..3df8452eec 100644 --- a/synapse/push/mailer.py +++ b/synapse/push/mailer.py @@ -455,7 +455,7 @@ class Mailer: } the_events = await filter_events_for_client( - self.storage, user_id, results["events_before"] + self.storage, user_id, results.events_before ) the_events.append(notif_event) diff --git a/synapse/rest/admin/rooms.py b/synapse/rest/admin/rooms.py index efe25fe7eb..5b706efbcf 100644 --- a/synapse/rest/admin/rooms.py +++ b/synapse/rest/admin/rooms.py @@ -729,7 +729,7 @@ class RoomEventContextServlet(RestServlet): else: event_filter = None - results = await self.room_context_handler.get_event_context( + event_context = await self.room_context_handler.get_event_context( requester, room_id, event_id, @@ -738,25 +738,34 @@ class RoomEventContextServlet(RestServlet): use_admin_priviledge=True, ) - if not results: + if not event_context: raise SynapseError( HTTPStatus.NOT_FOUND, "Event not found.", errcode=Codes.NOT_FOUND ) time_now = self.clock.time_msec() - aggregations = results.pop("aggregations", None) - results["events_before"] = self._event_serializer.serialize_events( - results["events_before"], time_now, bundle_aggregations=aggregations - ) - results["event"] = self._event_serializer.serialize_event( - results["event"], time_now, bundle_aggregations=aggregations - ) - results["events_after"] = self._event_serializer.serialize_events( - results["events_after"], time_now, bundle_aggregations=aggregations - ) - results["state"] = self._event_serializer.serialize_events( - results["state"], time_now - ) + results = { + "events_before": self._event_serializer.serialize_events( + event_context.events_before, + time_now, + bundle_aggregations=event_context.aggregations, + ), + "event": self._event_serializer.serialize_event( + event_context.event, + time_now, + bundle_aggregations=event_context.aggregations, + ), + "events_after": self._event_serializer.serialize_events( + event_context.events_after, + time_now, + bundle_aggregations=event_context.aggregations, + ), + "state": self._event_serializer.serialize_events( + event_context.state, time_now + ), + "start": event_context.start, + "end": event_context.end, + } return HTTPStatus.OK, results diff --git a/synapse/rest/client/room.py b/synapse/rest/client/room.py index 90bb9142a0..90355e44b2 100644 --- a/synapse/rest/client/room.py +++ b/synapse/rest/client/room.py @@ -706,27 +706,36 @@ class RoomEventContextServlet(RestServlet): else: event_filter = None - results = await self.room_context_handler.get_event_context( + event_context = await self.room_context_handler.get_event_context( requester, room_id, event_id, limit, event_filter ) - if not results: + if not event_context: raise SynapseError(404, "Event not found.", errcode=Codes.NOT_FOUND) time_now = self.clock.time_msec() - aggregations = results.pop("aggregations", None) - results["events_before"] = self._event_serializer.serialize_events( - results["events_before"], time_now, bundle_aggregations=aggregations - ) - results["event"] = self._event_serializer.serialize_event( - results["event"], time_now, bundle_aggregations=aggregations - ) - results["events_after"] = self._event_serializer.serialize_events( - results["events_after"], time_now, bundle_aggregations=aggregations - ) - results["state"] = self._event_serializer.serialize_events( - results["state"], time_now - ) + results = { + "events_before": self._event_serializer.serialize_events( + event_context.events_before, + time_now, + bundle_aggregations=event_context.aggregations, + ), + "event": self._event_serializer.serialize_event( + event_context.event, + time_now, + bundle_aggregations=event_context.aggregations, + ), + "events_after": self._event_serializer.serialize_events( + event_context.events_after, + time_now, + bundle_aggregations=event_context.aggregations, + ), + "state": self._event_serializer.serialize_events( + event_context.state, time_now + ), + "start": event_context.start, + "end": event_context.end, + } return 200, results diff --git a/synapse/rest/client/sync.py b/synapse/rest/client/sync.py index d20ae1421e..f9615da525 100644 --- a/synapse/rest/client/sync.py +++ b/synapse/rest/client/sync.py @@ -48,6 +48,7 @@ from synapse.http.server import HttpServer from synapse.http.servlet import RestServlet, parse_boolean, parse_integer, parse_string from synapse.http.site import SynapseRequest from synapse.logging.opentracing import trace +from synapse.storage.databases.main.relations import BundledAggregations from synapse.types import JsonDict, StreamToken from synapse.util import json_decoder @@ -526,7 +527,7 @@ class SyncRestServlet(RestServlet): def serialize( events: Iterable[EventBase], - aggregations: Optional[Dict[str, Dict[str, Any]]] = None, + aggregations: Optional[Dict[str, BundledAggregations]] = None, ) -> List[JsonDict]: return self._event_serializer.serialize_events( events, diff --git a/synapse/storage/databases/main/relations.py b/synapse/storage/databases/main/relations.py index 2cb5d06c13..a9a5dd5f03 100644 --- a/synapse/storage/databases/main/relations.py +++ b/synapse/storage/databases/main/relations.py @@ -13,17 +13,7 @@ # limitations under the License. import logging -from typing import ( - TYPE_CHECKING, - Any, - Dict, - Iterable, - List, - Optional, - Tuple, - Union, - cast, -) +from typing import TYPE_CHECKING, Dict, Iterable, List, Optional, Tuple, Union, cast import attr from frozendict import frozendict @@ -43,6 +33,7 @@ from synapse.storage.relations import ( PaginationChunk, RelationPaginationToken, ) +from synapse.types import JsonDict from synapse.util.caches.descriptors import cached if TYPE_CHECKING: @@ -51,6 +42,30 @@ if TYPE_CHECKING: logger = logging.getLogger(__name__) +@attr.s(slots=True, frozen=True, auto_attribs=True) +class _ThreadAggregation: + latest_event: EventBase + count: int + current_user_participated: bool + + +@attr.s(slots=True, auto_attribs=True) +class BundledAggregations: + """ + The bundled aggregations for an event. + + Some values require additional processing during serialization. + """ + + annotations: Optional[JsonDict] = None + references: Optional[JsonDict] = None + replace: Optional[EventBase] = None + thread: Optional[_ThreadAggregation] = None + + def __bool__(self) -> bool: + return bool(self.annotations or self.references or self.replace or self.thread) + + class RelationsWorkerStore(SQLBaseStore): def __init__( self, @@ -585,7 +600,7 @@ class RelationsWorkerStore(SQLBaseStore): async def _get_bundled_aggregation_for_event( self, event: EventBase, user_id: str - ) -> Optional[Dict[str, Any]]: + ) -> Optional[BundledAggregations]: """Generate bundled aggregations for an event. Note that this does not use a cache, but depends on cached methods. @@ -616,24 +631,24 @@ class RelationsWorkerStore(SQLBaseStore): # The bundled aggregations to include, a mapping of relation type to a # type-specific value. Some types include the direct return type here # while others need more processing during serialization. - aggregations: Dict[str, Any] = {} + aggregations = BundledAggregations() annotations = await self.get_aggregation_groups_for_event(event_id, room_id) if annotations.chunk: - aggregations[RelationTypes.ANNOTATION] = annotations.to_dict() + aggregations.annotations = annotations.to_dict() references = await self.get_relations_for_event( event_id, room_id, RelationTypes.REFERENCE, direction="f" ) if references.chunk: - aggregations[RelationTypes.REFERENCE] = references.to_dict() + aggregations.references = references.to_dict() edit = None if event.type == EventTypes.Message: edit = await self.get_applicable_edit(event_id, room_id) if edit: - aggregations[RelationTypes.REPLACE] = edit + aggregations.replace = edit # If this event is the start of a thread, include a summary of the replies. if self._msc3440_enabled: @@ -644,11 +659,11 @@ class RelationsWorkerStore(SQLBaseStore): event_id, room_id, user_id ) if latest_thread_event: - aggregations[RelationTypes.THREAD] = { - "latest_event": latest_thread_event, - "count": thread_count, - "current_user_participated": participated, - } + aggregations.thread = _ThreadAggregation( + latest_event=latest_thread_event, + count=thread_count, + current_user_participated=participated, + ) # Store the bundled aggregations in the event metadata for later use. return aggregations @@ -657,7 +672,7 @@ class RelationsWorkerStore(SQLBaseStore): self, events: Iterable[EventBase], user_id: str, - ) -> Dict[str, Dict[str, Any]]: + ) -> Dict[str, BundledAggregations]: """Generate bundled aggregations for events. Args: @@ -676,7 +691,7 @@ class RelationsWorkerStore(SQLBaseStore): results = {} for event in events: event_result = await self._get_bundled_aggregation_for_event(event, user_id) - if event_result is not None: + if event_result: results[event.event_id] = event_result return results diff --git a/synapse/storage/databases/main/stream.py b/synapse/storage/databases/main/stream.py index 319464b1fa..a898f847e7 100644 --- a/synapse/storage/databases/main/stream.py +++ b/synapse/storage/databases/main/stream.py @@ -81,6 +81,14 @@ class _EventDictReturn: stream_ordering: int +@attr.s(slots=True, frozen=True, auto_attribs=True) +class _EventsAround: + events_before: List[EventBase] + events_after: List[EventBase] + start: RoomStreamToken + end: RoomStreamToken + + def generate_pagination_where_clause( direction: str, column_names: Tuple[str, str], @@ -846,7 +854,7 @@ class StreamWorkerStore(EventsWorkerStore, SQLBaseStore): before_limit: int, after_limit: int, event_filter: Optional[Filter] = None, - ) -> dict: + ) -> _EventsAround: """Retrieve events and pagination tokens around a given event in a room. """ @@ -869,12 +877,12 @@ class StreamWorkerStore(EventsWorkerStore, SQLBaseStore): list(results["after"]["event_ids"]), get_prev_content=True ) - return { - "events_before": events_before, - "events_after": events_after, - "start": results["before"]["token"], - "end": results["after"]["token"], - } + return _EventsAround( + events_before=events_before, + events_after=events_after, + start=results["before"]["token"], + end=results["after"]["token"], + ) def _get_events_around_txn( self, diff --git a/tests/rest/client/test_relations.py b/tests/rest/client/test_relations.py index c9b220e73d..96ae7790bb 100644 --- a/tests/rest/client/test_relations.py +++ b/tests/rest/client/test_relations.py @@ -577,7 +577,7 @@ class RelationsTestCase(unittest.HomeserverTestCase): self.assertEquals(200, channel.code, channel.json_body) room_timeline = channel.json_body["rooms"]["join"][self.room]["timeline"] self.assertTrue(room_timeline["limited"]) - self._find_event_in_chunk(room_timeline["events"]) + assert_bundle(self._find_event_in_chunk(room_timeline["events"])) def test_aggregation_get_event_for_annotation(self): """Test that annotations do not get bundled aggregations included -- cgit 1.4.1 From 2d3bd9aa670eedd299cc03093459929adec41918 Mon Sep 17 00:00:00 2001 From: Brendan Abolivier Date: Wed, 26 Jan 2022 14:21:13 +0000 Subject: Add a module callback to set username at registration (#11790) This is in the context of mainlining the Tchap fork of Synapse. Currently in Tchap usernames are derived from the user's email address (extracted from the UIA results, more specifically the m.login.email.identity step). This change also exports the check_username method from the registration handler as part of the module API, so that a module can check if the username it's trying to generate is correct and doesn't conflict with an existing one, and fallback gracefully if not. Co-authored-by: David Robertson --- changelog.d/11790.feature | 1 + docs/modules/password_auth_provider_callbacks.md | 62 +++++++++++++++++++ synapse/handlers/auth.py | 58 +++++++++++++++++ synapse/module_api/__init__.py | 22 +++++++ synapse/rest/client/register.py | 12 +++- tests/handlers/test_password_providers.py | 79 +++++++++++++++++++++++- 6 files changed, 231 insertions(+), 3 deletions(-) create mode 100644 changelog.d/11790.feature (limited to 'synapse') diff --git a/changelog.d/11790.feature b/changelog.d/11790.feature new file mode 100644 index 0000000000..4a5cc8ec37 --- /dev/null +++ b/changelog.d/11790.feature @@ -0,0 +1 @@ +Add a module callback to set username at registration. diff --git a/docs/modules/password_auth_provider_callbacks.md b/docs/modules/password_auth_provider_callbacks.md index e53abf6409..ec8324d292 100644 --- a/docs/modules/password_auth_provider_callbacks.md +++ b/docs/modules/password_auth_provider_callbacks.md @@ -105,6 +105,68 @@ device ID), and the (now deactivated) access token. If multiple modules implement this callback, Synapse runs them all in order. +### `get_username_for_registration` + +_First introduced in Synapse v1.52.0_ + +```python +async def get_username_for_registration( + uia_results: Dict[str, Any], + params: Dict[str, Any], +) -> Optional[str] +``` + +Called when registering a new user. The module can return a username to set for the user +being registered by returning it as a string, or `None` if it doesn't wish to force a +username for this user. If a username is returned, it will be used as the local part of a +user's full Matrix ID (e.g. it's `alice` in `@alice:example.com`). + +This callback is called once [User-Interactive Authentication](https://spec.matrix.org/latest/client-server-api/#user-interactive-authentication-api) +has been completed by the user. It is not called when registering a user via SSO. It is +passed two dictionaries, which include the information that the user has provided during +the registration process. + +The first dictionary contains the results of the [User-Interactive Authentication](https://spec.matrix.org/latest/client-server-api/#user-interactive-authentication-api) +flow followed by the user. Its keys are the identifiers of every step involved in the flow, +associated with either a boolean value indicating whether the step was correctly completed, +or additional information (e.g. email address, phone number...). A list of most existing +identifiers can be found in the [Matrix specification](https://spec.matrix.org/v1.1/client-server-api/#authentication-types). +Here's an example featuring all currently supported keys: + +```python +{ + "m.login.dummy": True, # Dummy authentication + "m.login.terms": True, # User has accepted the terms of service for the homeserver + "m.login.recaptcha": True, # User has completed the recaptcha challenge + "m.login.email.identity": { # User has provided and verified an email address + "medium": "email", + "address": "alice@example.com", + "validated_at": 1642701357084, + }, + "m.login.msisdn": { # User has provided and verified a phone number + "medium": "msisdn", + "address": "33123456789", + "validated_at": 1642701357084, + }, + "org.matrix.msc3231.login.registration_token": "sometoken", # User has registered through the flow described in MSC3231 +} +``` + +The second dictionary contains the parameters provided by the user's client in the request +to `/_matrix/client/v3/register`. See the [Matrix specification](https://spec.matrix.org/latest/client-server-api/#post_matrixclientv3register) +for a complete list of these parameters. + +If the module cannot, or does not wish to, generate a username for this user, it must +return `None`. + +If multiple modules implement this callback, they will be considered in order. If a +callback returns `None`, Synapse falls through to the next one. The value of the first +callback that does not return `None` will be used. If this happens, Synapse will not call +any of the subsequent implementations of this callback. If every callback return `None`, +the username provided by the user is used, if any (otherwise one is automatically +generated). + + ## Example The example module below implements authentication checkers for two different login types: diff --git a/synapse/handlers/auth.py b/synapse/handlers/auth.py index bd1a322563..e32c93e234 100644 --- a/synapse/handlers/auth.py +++ b/synapse/handlers/auth.py @@ -2060,6 +2060,10 @@ CHECK_AUTH_CALLBACK = Callable[ Optional[Tuple[str, Optional[Callable[["LoginResponse"], Awaitable[None]]]]] ], ] +GET_USERNAME_FOR_REGISTRATION_CALLBACK = Callable[ + [JsonDict, JsonDict], + Awaitable[Optional[str]], +] class PasswordAuthProvider: @@ -2072,6 +2076,9 @@ class PasswordAuthProvider: # lists of callbacks self.check_3pid_auth_callbacks: List[CHECK_3PID_AUTH_CALLBACK] = [] self.on_logged_out_callbacks: List[ON_LOGGED_OUT_CALLBACK] = [] + self.get_username_for_registration_callbacks: List[ + GET_USERNAME_FOR_REGISTRATION_CALLBACK + ] = [] # Mapping from login type to login parameters self._supported_login_types: Dict[str, Iterable[str]] = {} @@ -2086,6 +2093,9 @@ class PasswordAuthProvider: auth_checkers: Optional[ Dict[Tuple[str, Tuple[str, ...]], CHECK_AUTH_CALLBACK] ] = None, + get_username_for_registration: Optional[ + GET_USERNAME_FOR_REGISTRATION_CALLBACK + ] = None, ) -> None: # Register check_3pid_auth callback if check_3pid_auth is not None: @@ -2130,6 +2140,11 @@ class PasswordAuthProvider: # Add the new method to the list of auth_checker_callbacks for this login type self.auth_checker_callbacks.setdefault(login_type, []).append(callback) + if get_username_for_registration is not None: + self.get_username_for_registration_callbacks.append( + get_username_for_registration, + ) + def get_supported_login_types(self) -> Mapping[str, Iterable[str]]: """Get the login types supported by this password provider @@ -2285,3 +2300,46 @@ class PasswordAuthProvider: except Exception as e: logger.warning("Failed to run module API callback %s: %s", callback, e) continue + + async def get_username_for_registration( + self, + uia_results: JsonDict, + params: JsonDict, + ) -> Optional[str]: + """Defines the username to use when registering the user, using the credentials + and parameters provided during the UIA flow. + + Stops at the first callback that returns a string. + + Args: + uia_results: The credentials provided during the UIA flow. + params: The parameters provided by the registration request. + + Returns: + The localpart to use when registering this user, or None if no module + returned a localpart. + """ + for callback in self.get_username_for_registration_callbacks: + try: + res = await callback(uia_results, params) + + if isinstance(res, str): + return res + elif res is not None: + # mypy complains that this line is unreachable because it assumes the + # data returned by the module fits the expected type. We just want + # to make sure this is the case. + logger.warning( # type: ignore[unreachable] + "Ignoring non-string value returned by" + " get_username_for_registration callback %s: %s", + callback, + res, + ) + except Exception as e: + logger.error( + "Module raised an exception in get_username_for_registration: %s", + e, + ) + raise SynapseError(code=500, msg="Internal Server Error") + + return None diff --git a/synapse/module_api/__init__.py b/synapse/module_api/__init__.py index 662e60bc33..788b2e47d5 100644 --- a/synapse/module_api/__init__.py +++ b/synapse/module_api/__init__.py @@ -71,6 +71,7 @@ from synapse.handlers.account_validity import ( from synapse.handlers.auth import ( CHECK_3PID_AUTH_CALLBACK, CHECK_AUTH_CALLBACK, + GET_USERNAME_FOR_REGISTRATION_CALLBACK, ON_LOGGED_OUT_CALLBACK, AuthHandler, ) @@ -177,6 +178,7 @@ class ModuleApi: self._presence_stream = hs.get_event_sources().sources.presence self._state = hs.get_state_handler() self._clock: Clock = hs.get_clock() + self._registration_handler = hs.get_registration_handler() self._send_email_handler = hs.get_send_email_handler() self.custom_template_dir = hs.config.server.custom_template_directory @@ -310,6 +312,9 @@ class ModuleApi: auth_checkers: Optional[ Dict[Tuple[str, Tuple[str, ...]], CHECK_AUTH_CALLBACK] ] = None, + get_username_for_registration: Optional[ + GET_USERNAME_FOR_REGISTRATION_CALLBACK + ] = None, ) -> None: """Registers callbacks for password auth provider capabilities. @@ -319,6 +324,7 @@ class ModuleApi: check_3pid_auth=check_3pid_auth, on_logged_out=on_logged_out, auth_checkers=auth_checkers, + get_username_for_registration=get_username_for_registration, ) def register_background_update_controller_callbacks( @@ -1202,6 +1208,22 @@ class ModuleApi: """ return await defer_to_thread(self._hs.get_reactor(), f, *args, **kwargs) + async def check_username(self, username: str) -> None: + """Checks if the provided username uses the grammar defined in the Matrix + specification, and is already being used by an existing user. + + Added in Synapse v1.52.0. + + Args: + username: The username to check. This is the local part of the user's full + Matrix user ID, i.e. it's "alice" if the full user ID is "@alice:foo.com". + + Raises: + SynapseError with the errcode "M_USER_IN_USE" if the username is already in + use. + """ + await self._registration_handler.check_username(username) + class PublicRoomListManager: """Contains methods for adding to, removing from and querying whether a room diff --git a/synapse/rest/client/register.py b/synapse/rest/client/register.py index c59dae7c03..e3492f9f93 100644 --- a/synapse/rest/client/register.py +++ b/synapse/rest/client/register.py @@ -425,6 +425,7 @@ class RegisterRestServlet(RestServlet): self.ratelimiter = hs.get_registration_ratelimiter() self.password_policy_handler = hs.get_password_policy_handler() self.clock = hs.get_clock() + self.password_auth_provider = hs.get_password_auth_provider() self._registration_enabled = self.hs.config.registration.enable_registration self._refresh_tokens_enabled = ( hs.config.registration.refreshable_access_token_lifetime is not None @@ -638,7 +639,16 @@ class RegisterRestServlet(RestServlet): if not password_hash: raise SynapseError(400, "Missing params: password", Codes.MISSING_PARAM) - desired_username = params.get("username", None) + desired_username = await ( + self.password_auth_provider.get_username_for_registration( + auth_result, + params, + ) + ) + + if desired_username is None: + desired_username = params.get("username", None) + guest_access_token = params.get("guest_access_token", None) if desired_username is not None: diff --git a/tests/handlers/test_password_providers.py b/tests/handlers/test_password_providers.py index 2add72b28a..94809cb8be 100644 --- a/tests/handlers/test_password_providers.py +++ b/tests/handlers/test_password_providers.py @@ -20,10 +20,11 @@ from unittest.mock import Mock from twisted.internet import defer import synapse +from synapse.api.constants import LoginType from synapse.handlers.auth import load_legacy_password_auth_providers from synapse.module_api import ModuleApi -from synapse.rest.client import devices, login, logout -from synapse.types import JsonDict +from synapse.rest.client import devices, login, logout, register +from synapse.types import JsonDict, UserID from tests import unittest from tests.server import FakeChannel @@ -156,6 +157,7 @@ class PasswordAuthProviderTests(unittest.HomeserverTestCase): login.register_servlets, devices.register_servlets, logout.register_servlets, + register.register_servlets, ] def setUp(self): @@ -745,6 +747,79 @@ class PasswordAuthProviderTests(unittest.HomeserverTestCase): on_logged_out.assert_called_once() self.assertTrue(self.called) + def test_username(self): + """Tests that the get_username_for_registration callback can define the username + of a user when registering. + """ + self._setup_get_username_for_registration() + + username = "rin" + channel = self.make_request( + "POST", + "/register", + { + "username": username, + "password": "bar", + "auth": {"type": LoginType.DUMMY}, + }, + ) + self.assertEqual(channel.code, 200) + + # Our callback takes the username and appends "-foo" to it, check that's what we + # have. + mxid = channel.json_body["user_id"] + self.assertEqual(UserID.from_string(mxid).localpart, username + "-foo") + + def test_username_uia(self): + """Tests that the get_username_for_registration callback is only called at the + end of the UIA flow. + """ + m = self._setup_get_username_for_registration() + + # Initiate the UIA flow. + username = "rin" + channel = self.make_request( + "POST", + "register", + {"username": username, "type": "m.login.password", "password": "bar"}, + ) + self.assertEqual(channel.code, 401) + self.assertIn("session", channel.json_body) + + # Check that the callback hasn't been called yet. + m.assert_not_called() + + # Finish the UIA flow. + session = channel.json_body["session"] + channel = self.make_request( + "POST", + "register", + {"auth": {"session": session, "type": LoginType.DUMMY}}, + ) + self.assertEqual(channel.code, 200, channel.json_body) + mxid = channel.json_body["user_id"] + self.assertEqual(UserID.from_string(mxid).localpart, username + "-foo") + + # Check that the callback has been called. + m.assert_called_once() + + def _setup_get_username_for_registration(self) -> Mock: + """Registers a get_username_for_registration callback that appends "-foo" to the + username the client is trying to register. + """ + + async def get_username_for_registration(uia_results, params): + self.assertIn(LoginType.DUMMY, uia_results) + username = params["username"] + return username + "-foo" + + m = Mock(side_effect=get_username_for_registration) + + password_auth_provider = self.hs.get_password_auth_provider() + password_auth_provider.get_username_for_registration_callbacks.append(m) + + return m + def _get_login_flows(self) -> JsonDict: channel = self.make_request("GET", "/_matrix/client/r0/login") self.assertEqual(channel.code, 200, channel.result) -- cgit 1.4.1 From cef0d5d90a4782096c03ec79f825d114b8d61b6e Mon Sep 17 00:00:00 2001 From: Vaishnav Nair <64798176+totallynotvaishnav@users.noreply.github.com> Date: Wed, 26 Jan 2022 20:18:27 +0530 Subject: Include `prev_content` field in AS events (#11798) * Include 'prev_content' field in AS events Signed-off-by: Vaishnav Nair Co-authored-by: Brendan Abolivier --- changelog.d/11798.bugfix | 1 + synapse/storage/databases/main/appservice.py | 2 +- 2 files changed, 2 insertions(+), 1 deletion(-) create mode 100644 changelog.d/11798.bugfix (limited to 'synapse') diff --git a/changelog.d/11798.bugfix b/changelog.d/11798.bugfix new file mode 100644 index 0000000000..2651fd11cf --- /dev/null +++ b/changelog.d/11798.bugfix @@ -0,0 +1 @@ +Include a `prev_content` field in state events sent to Application Services. Contributed by @totallynotvaishnav. \ No newline at end of file diff --git a/synapse/storage/databases/main/appservice.py b/synapse/storage/databases/main/appservice.py index 92c95a41d7..2bb5288431 100644 --- a/synapse/storage/databases/main/appservice.py +++ b/synapse/storage/databases/main/appservice.py @@ -384,7 +384,7 @@ class ApplicationServiceTransactionWorkerStore( "get_new_events_for_appservice", get_new_events_for_appservice_txn ) - events = await self.get_events_as_list(event_ids) + events = await self.get_events_as_list(event_ids, get_prev_content=True) return upper_bound, events -- cgit 1.4.1 From fd65139714433763e8ef5be6017ab6ba5988e766 Mon Sep 17 00:00:00 2001 From: Dirk Klimpel <5740567+dklimpel@users.noreply.github.com> Date: Thu, 27 Jan 2022 11:06:29 +0100 Subject: Fix some indentation inconsistencies in the sample config (modules) (#11838) --- changelog.d/11838.misc | 1 + docs/sample_config.yaml | 10 +++++----- synapse/config/modules.py | 10 +++++----- 3 files changed, 11 insertions(+), 10 deletions(-) create mode 100644 changelog.d/11838.misc (limited to 'synapse') diff --git a/changelog.d/11838.misc b/changelog.d/11838.misc new file mode 100644 index 0000000000..4747bbe88b --- /dev/null +++ b/changelog.d/11838.misc @@ -0,0 +1 @@ +Fix some indentation inconsistencies in the sample config. \ No newline at end of file diff --git a/docs/sample_config.yaml b/docs/sample_config.yaml index b38e6d6c88..abf28e4490 100644 --- a/docs/sample_config.yaml +++ b/docs/sample_config.yaml @@ -41,11 +41,11 @@ # documentation on how to configure or create custom modules for Synapse. # modules: - # - module: my_super_module.MySuperClass - # config: - # do_thing: true - # - module: my_other_super_module.SomeClass - # config: {} + #- module: my_super_module.MySuperClass + # config: + # do_thing: true + #- module: my_other_super_module.SomeClass + # config: {} ## Server ## diff --git a/synapse/config/modules.py b/synapse/config/modules.py index 85fb05890d..2ef02b8f55 100644 --- a/synapse/config/modules.py +++ b/synapse/config/modules.py @@ -41,9 +41,9 @@ class ModulesConfig(Config): # documentation on how to configure or create custom modules for Synapse. # modules: - # - module: my_super_module.MySuperClass - # config: - # do_thing: true - # - module: my_other_super_module.SomeClass - # config: {} + #- module: my_super_module.MySuperClass + # config: + # do_thing: true + #- module: my_other_super_module.SomeClass + # config: {} """ -- cgit 1.4.1 From 57e4786e907c390502f4ec6fb915e24cf5124351 Mon Sep 17 00:00:00 2001 From: Richard van der Hoff <1389908+richvdh@users.noreply.github.com> Date: Thu, 27 Jan 2022 10:54:27 +0000 Subject: Create singletons for `StateFilter.{all,none}()` (#11836) No point recreating these for each call, since they are frozen --- changelog.d/11836.misc | 1 + synapse/storage/state.py | 14 +++++++++----- 2 files changed, 10 insertions(+), 5 deletions(-) create mode 100644 changelog.d/11836.misc (limited to 'synapse') diff --git a/changelog.d/11836.misc b/changelog.d/11836.misc new file mode 100644 index 0000000000..be7e331c63 --- /dev/null +++ b/changelog.d/11836.misc @@ -0,0 +1 @@ +Minor performance improvement in room state lookup. diff --git a/synapse/storage/state.py b/synapse/storage/state.py index df8b2f1088..913448f0f9 100644 --- a/synapse/storage/state.py +++ b/synapse/storage/state.py @@ -74,21 +74,21 @@ class StateFilter: @staticmethod def all() -> "StateFilter": - """Creates a filter that fetches everything. + """Returns a filter that fetches everything. Returns: - The new state filter. + The state filter. """ - return StateFilter(types=frozendict(), include_others=True) + return _ALL_STATE_FILTER @staticmethod def none() -> "StateFilter": - """Creates a filter that fetches nothing. + """Returns a filter that fetches nothing. Returns: The new state filter. """ - return StateFilter(types=frozendict(), include_others=False) + return _NONE_STATE_FILTER @staticmethod def from_types(types: Iterable[Tuple[str, Optional[str]]]) -> "StateFilter": @@ -527,6 +527,10 @@ class StateFilter: ) +_ALL_STATE_FILTER = StateFilter(types=frozendict(), include_others=True) +_NONE_STATE_FILTER = StateFilter(types=frozendict(), include_others=False) + + class StateGroupStorage: """High level interface to fetching state for event.""" -- cgit 1.4.1 From 6d482ba259a55947678390c06b24a7ba91f0ebab Mon Sep 17 00:00:00 2001 From: Brendan Abolivier Date: Thu, 27 Jan 2022 17:45:39 +0000 Subject: Pass `isolation_level` to `runWithConnection` (#11847) This was missed in https://github.com/matrix-org/synapse/pull/11799 --- changelog.d/11847.misc | 1 + synapse/storage/database.py | 1 + 2 files changed, 2 insertions(+) create mode 100644 changelog.d/11847.misc (limited to 'synapse') diff --git a/changelog.d/11847.misc b/changelog.d/11847.misc new file mode 100644 index 0000000000..5c3b2bcaf4 --- /dev/null +++ b/changelog.d/11847.misc @@ -0,0 +1 @@ +Preparation for reducing Postgres serialization errors: allow setting transaction isolation level. Contributed by Nick @ Beeper. diff --git a/synapse/storage/database.py b/synapse/storage/database.py index 7455326ed3..99802228c9 100644 --- a/synapse/storage/database.py +++ b/synapse/storage/database.py @@ -748,6 +748,7 @@ class DatabasePool: func, *args, db_autocommit=db_autocommit, + isolation_level=isolation_level, **kwargs, ) -- cgit 1.4.1 From bf60da1a60096fac5fb778b732ff2214862ac808 Mon Sep 17 00:00:00 2001 From: Brendan Abolivier Date: Fri, 28 Jan 2022 14:41:33 +0000 Subject: Configurable limits on avatars (#11846) Only allow files which file size and content types match configured limits to be set as avatar. Most of the inspiration from the non-test code comes from matrix-org/synapse-dinsic#19 --- changelog.d/11846.feature | 1 + docs/sample_config.yaml | 14 ++++ synapse/config/server.py | 27 +++++++ synapse/handlers/profile.py | 67 ++++++++++++++++ synapse/handlers/room_member.py | 6 ++ tests/handlers/test_profile.py | 94 ++++++++++++++++++++++- tests/rest/client/test_profile.py | 156 ++++++++++++++++++++++++++++++++++++++ 7 files changed, 363 insertions(+), 2 deletions(-) create mode 100644 changelog.d/11846.feature (limited to 'synapse') diff --git a/changelog.d/11846.feature b/changelog.d/11846.feature new file mode 100644 index 0000000000..fcf6affdb5 --- /dev/null +++ b/changelog.d/11846.feature @@ -0,0 +1 @@ +Allow configuring a maximum file size as well as a list of allowed content types for avatars. diff --git a/docs/sample_config.yaml b/docs/sample_config.yaml index abf28e4490..689b207fc0 100644 --- a/docs/sample_config.yaml +++ b/docs/sample_config.yaml @@ -471,6 +471,20 @@ limit_remote_rooms: # #allow_per_room_profiles: false +# The largest allowed file size for a user avatar. Defaults to no restriction. +# +# Note that user avatar changes will not work if this is set without +# using Synapse's media repository. +# +#max_avatar_size: 10M + +# The MIME types allowed for user avatars. Defaults to no restriction. +# +# Note that user avatar changes will not work if this is set without +# using Synapse's media repository. +# +#allowed_avatar_mimetypes: ["image/png", "image/jpeg", "image/gif"] + # How long to keep redacted events in unredacted form in the database. After # this period redacted events get replaced with their redacted form in the DB. # diff --git a/synapse/config/server.py b/synapse/config/server.py index f200d0c1f1..a460cf25b4 100644 --- a/synapse/config/server.py +++ b/synapse/config/server.py @@ -489,6 +489,19 @@ class ServerConfig(Config): # events with profile information that differ from the target's global profile. self.allow_per_room_profiles = config.get("allow_per_room_profiles", True) + # The maximum size an avatar can have, in bytes. + self.max_avatar_size = config.get("max_avatar_size") + if self.max_avatar_size is not None: + self.max_avatar_size = self.parse_size(self.max_avatar_size) + + # The MIME types allowed for an avatar. + self.allowed_avatar_mimetypes = config.get("allowed_avatar_mimetypes") + if self.allowed_avatar_mimetypes and not isinstance( + self.allowed_avatar_mimetypes, + list, + ): + raise ConfigError("allowed_avatar_mimetypes must be a list") + self.listeners = [parse_listener_def(x) for x in config.get("listeners", [])] # no_tls is not really supported any more, but let's grandfather it in @@ -1168,6 +1181,20 @@ class ServerConfig(Config): # #allow_per_room_profiles: false + # The largest allowed file size for a user avatar. Defaults to no restriction. + # + # Note that user avatar changes will not work if this is set without + # using Synapse's media repository. + # + #max_avatar_size: 10M + + # The MIME types allowed for user avatars. Defaults to no restriction. + # + # Note that user avatar changes will not work if this is set without + # using Synapse's media repository. + # + #allowed_avatar_mimetypes: ["image/png", "image/jpeg", "image/gif"] + # How long to keep redacted events in unredacted form in the database. After # this period redacted events get replaced with their redacted form in the DB. # diff --git a/synapse/handlers/profile.py b/synapse/handlers/profile.py index 6b5a6ded8b..36e3ad2ba9 100644 --- a/synapse/handlers/profile.py +++ b/synapse/handlers/profile.py @@ -31,6 +31,8 @@ from synapse.types import ( create_requester, get_domain_from_id, ) +from synapse.util.caches.descriptors import cached +from synapse.util.stringutils import parse_and_validate_mxc_uri if TYPE_CHECKING: from synapse.server import HomeServer @@ -64,6 +66,11 @@ class ProfileHandler: self.user_directory_handler = hs.get_user_directory_handler() self.request_ratelimiter = hs.get_request_ratelimiter() + self.max_avatar_size = hs.config.server.max_avatar_size + self.allowed_avatar_mimetypes = hs.config.server.allowed_avatar_mimetypes + + self.server_name = hs.config.server.server_name + if hs.config.worker.run_background_tasks: self.clock.looping_call( self._update_remote_profile_cache, self.PROFILE_UPDATE_MS @@ -286,6 +293,9 @@ class ProfileHandler: 400, "Avatar URL is too long (max %i)" % (MAX_AVATAR_URL_LEN,) ) + if not await self.check_avatar_size_and_mime_type(new_avatar_url): + raise SynapseError(403, "This avatar is not allowed", Codes.FORBIDDEN) + avatar_url_to_set: Optional[str] = new_avatar_url if new_avatar_url == "": avatar_url_to_set = None @@ -307,6 +317,63 @@ class ProfileHandler: await self._update_join_states(requester, target_user) + @cached() + async def check_avatar_size_and_mime_type(self, mxc: str) -> bool: + """Check that the size and content type of the avatar at the given MXC URI are + within the configured limits. + + Args: + mxc: The MXC URI at which the avatar can be found. + + Returns: + A boolean indicating whether the file can be allowed to be set as an avatar. + """ + if not self.max_avatar_size and not self.allowed_avatar_mimetypes: + return True + + server_name, _, media_id = parse_and_validate_mxc_uri(mxc) + + if server_name == self.server_name: + media_info = await self.store.get_local_media(media_id) + else: + media_info = await self.store.get_cached_remote_media(server_name, media_id) + + if media_info is None: + # Both configuration options need to access the file's metadata, and + # retrieving remote avatars just for this becomes a bit of a faff, especially + # if e.g. the file is too big. It's also generally safe to assume most files + # used as avatar are uploaded locally, or if the upload didn't happen as part + # of a PUT request on /avatar_url that the file was at least previewed by the + # user locally (and therefore downloaded to the remote media cache). + logger.warning("Forbidding avatar change to %s: avatar not on server", mxc) + return False + + if self.max_avatar_size: + # Ensure avatar does not exceed max allowed avatar size + if media_info["media_length"] > self.max_avatar_size: + logger.warning( + "Forbidding avatar change to %s: %d bytes is above the allowed size " + "limit", + mxc, + media_info["media_length"], + ) + return False + + if self.allowed_avatar_mimetypes: + # Ensure the avatar's file type is allowed + if ( + self.allowed_avatar_mimetypes + and media_info["media_type"] not in self.allowed_avatar_mimetypes + ): + logger.warning( + "Forbidding avatar change to %s: mimetype %s not allowed", + mxc, + media_info["media_type"], + ) + return False + + return True + async def on_profile_query(self, args: JsonDict) -> JsonDict: """Handles federation profile query requests.""" diff --git a/synapse/handlers/room_member.py b/synapse/handlers/room_member.py index 6aa910dd10..3dd5e1b6e4 100644 --- a/synapse/handlers/room_member.py +++ b/synapse/handlers/room_member.py @@ -590,6 +590,12 @@ class RoomMemberHandler(metaclass=abc.ABCMeta): errcode=Codes.BAD_JSON, ) + if "avatar_url" in content: + if not await self.profile_handler.check_avatar_size_and_mime_type( + content["avatar_url"], + ): + raise SynapseError(403, "This avatar is not allowed", Codes.FORBIDDEN) + # The event content should *not* include the authorising user as # it won't be properly signed. Strip it out since it might come # back from a client updating a display name / avatar. diff --git a/tests/handlers/test_profile.py b/tests/handlers/test_profile.py index c153018fd8..60235e5699 100644 --- a/tests/handlers/test_profile.py +++ b/tests/handlers/test_profile.py @@ -11,12 +11,13 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. - +from typing import Any, Dict from unittest.mock import Mock import synapse.types from synapse.api.errors import AuthError, SynapseError from synapse.rest import admin +from synapse.server import HomeServer from synapse.types import UserID from tests import unittest @@ -46,7 +47,7 @@ class ProfileTestCase(unittest.HomeserverTestCase): ) return hs - def prepare(self, reactor, clock, hs): + def prepare(self, reactor, clock, hs: HomeServer): self.store = hs.get_datastore() self.frank = UserID.from_string("@1234abcd:test") @@ -248,3 +249,92 @@ class ProfileTestCase(unittest.HomeserverTestCase): ), SynapseError, ) + + def test_avatar_constraints_no_config(self): + """Tests that the method to check an avatar against configured constraints skips + all of its check if no constraint is configured. + """ + # The first check that's done by this method is whether the file exists; if we + # don't get an error on a non-existing file then it means all of the checks were + # successfully skipped. + res = self.get_success( + self.handler.check_avatar_size_and_mime_type("mxc://test/unknown_file") + ) + self.assertTrue(res) + + @unittest.override_config({"max_avatar_size": 50}) + def test_avatar_constraints_missing(self): + """Tests that an avatar isn't allowed if the file at the given MXC URI couldn't + be found. + """ + res = self.get_success( + self.handler.check_avatar_size_and_mime_type("mxc://test/unknown_file") + ) + self.assertFalse(res) + + @unittest.override_config({"max_avatar_size": 50}) + def test_avatar_constraints_file_size(self): + """Tests that a file that's above the allowed file size is forbidden but one + that's below it is allowed. + """ + self._setup_local_files( + { + "small": {"size": 40}, + "big": {"size": 60}, + } + ) + + res = self.get_success( + self.handler.check_avatar_size_and_mime_type("mxc://test/small") + ) + self.assertTrue(res) + + res = self.get_success( + self.handler.check_avatar_size_and_mime_type("mxc://test/big") + ) + self.assertFalse(res) + + @unittest.override_config({"allowed_avatar_mimetypes": ["image/png"]}) + def test_avatar_constraint_mime_type(self): + """Tests that a file with an unauthorised MIME type is forbidden but one with + an authorised content type is allowed. + """ + self._setup_local_files( + { + "good": {"mimetype": "image/png"}, + "bad": {"mimetype": "application/octet-stream"}, + } + ) + + res = self.get_success( + self.handler.check_avatar_size_and_mime_type("mxc://test/good") + ) + self.assertTrue(res) + + res = self.get_success( + self.handler.check_avatar_size_and_mime_type("mxc://test/bad") + ) + self.assertFalse(res) + + def _setup_local_files(self, names_and_props: Dict[str, Dict[str, Any]]): + """Stores metadata about files in the database. + + Args: + names_and_props: A dictionary with one entry per file, with the key being the + file's name, and the value being a dictionary of properties. Supported + properties are "mimetype" (for the file's type) and "size" (for the + file's size). + """ + store = self.hs.get_datastore() + + for name, props in names_and_props.items(): + self.get_success( + store.store_local_media( + media_id=name, + media_type=props.get("mimetype", "image/png"), + time_now_ms=self.clock.time_msec(), + upload_name=None, + media_length=props.get("size", 50), + user_id=UserID.from_string("@rin:test"), + ) + ) diff --git a/tests/rest/client/test_profile.py b/tests/rest/client/test_profile.py index 2860579c2e..ead883ded8 100644 --- a/tests/rest/client/test_profile.py +++ b/tests/rest/client/test_profile.py @@ -13,8 +13,12 @@ # limitations under the License. """Tests REST events for /profile paths.""" +from typing import Any, Dict + +from synapse.api.errors import Codes from synapse.rest import admin from synapse.rest.client import login, profile, room +from synapse.types import UserID from tests import unittest @@ -25,6 +29,7 @@ class ProfileTestCase(unittest.HomeserverTestCase): admin.register_servlets_for_client_rest_resource, login.register_servlets, profile.register_servlets, + room.register_servlets, ] def make_homeserver(self, reactor, clock): @@ -150,6 +155,157 @@ class ProfileTestCase(unittest.HomeserverTestCase): self.assertEqual(channel.code, 200, channel.result) return channel.json_body.get("avatar_url") + @unittest.override_config({"max_avatar_size": 50}) + def test_avatar_size_limit_global(self): + """Tests that the maximum size limit for avatars is enforced when updating a + global profile. + """ + self._setup_local_files( + { + "small": {"size": 40}, + "big": {"size": 60}, + } + ) + + channel = self.make_request( + "PUT", + f"/profile/{self.owner}/avatar_url", + content={"avatar_url": "mxc://test/big"}, + access_token=self.owner_tok, + ) + self.assertEqual(channel.code, 403, channel.result) + self.assertEqual( + channel.json_body["errcode"], Codes.FORBIDDEN, channel.json_body + ) + + channel = self.make_request( + "PUT", + f"/profile/{self.owner}/avatar_url", + content={"avatar_url": "mxc://test/small"}, + access_token=self.owner_tok, + ) + self.assertEqual(channel.code, 200, channel.result) + + @unittest.override_config({"max_avatar_size": 50}) + def test_avatar_size_limit_per_room(self): + """Tests that the maximum size limit for avatars is enforced when updating a + per-room profile. + """ + self._setup_local_files( + { + "small": {"size": 40}, + "big": {"size": 60}, + } + ) + + room_id = self.helper.create_room_as(tok=self.owner_tok) + + channel = self.make_request( + "PUT", + f"/rooms/{room_id}/state/m.room.member/{self.owner}", + content={"membership": "join", "avatar_url": "mxc://test/big"}, + access_token=self.owner_tok, + ) + self.assertEqual(channel.code, 403, channel.result) + self.assertEqual( + channel.json_body["errcode"], Codes.FORBIDDEN, channel.json_body + ) + + channel = self.make_request( + "PUT", + f"/rooms/{room_id}/state/m.room.member/{self.owner}", + content={"membership": "join", "avatar_url": "mxc://test/small"}, + access_token=self.owner_tok, + ) + self.assertEqual(channel.code, 200, channel.result) + + @unittest.override_config({"allowed_avatar_mimetypes": ["image/png"]}) + def test_avatar_allowed_mime_type_global(self): + """Tests that the MIME type whitelist for avatars is enforced when updating a + global profile. + """ + self._setup_local_files( + { + "good": {"mimetype": "image/png"}, + "bad": {"mimetype": "application/octet-stream"}, + } + ) + + channel = self.make_request( + "PUT", + f"/profile/{self.owner}/avatar_url", + content={"avatar_url": "mxc://test/bad"}, + access_token=self.owner_tok, + ) + self.assertEqual(channel.code, 403, channel.result) + self.assertEqual( + channel.json_body["errcode"], Codes.FORBIDDEN, channel.json_body + ) + + channel = self.make_request( + "PUT", + f"/profile/{self.owner}/avatar_url", + content={"avatar_url": "mxc://test/good"}, + access_token=self.owner_tok, + ) + self.assertEqual(channel.code, 200, channel.result) + + @unittest.override_config({"allowed_avatar_mimetypes": ["image/png"]}) + def test_avatar_allowed_mime_type_per_room(self): + """Tests that the MIME type whitelist for avatars is enforced when updating a + per-room profile. + """ + self._setup_local_files( + { + "good": {"mimetype": "image/png"}, + "bad": {"mimetype": "application/octet-stream"}, + } + ) + + room_id = self.helper.create_room_as(tok=self.owner_tok) + + channel = self.make_request( + "PUT", + f"/rooms/{room_id}/state/m.room.member/{self.owner}", + content={"membership": "join", "avatar_url": "mxc://test/bad"}, + access_token=self.owner_tok, + ) + self.assertEqual(channel.code, 403, channel.result) + self.assertEqual( + channel.json_body["errcode"], Codes.FORBIDDEN, channel.json_body + ) + + channel = self.make_request( + "PUT", + f"/rooms/{room_id}/state/m.room.member/{self.owner}", + content={"membership": "join", "avatar_url": "mxc://test/good"}, + access_token=self.owner_tok, + ) + self.assertEqual(channel.code, 200, channel.result) + + def _setup_local_files(self, names_and_props: Dict[str, Dict[str, Any]]): + """Stores metadata about files in the database. + + Args: + names_and_props: A dictionary with one entry per file, with the key being the + file's name, and the value being a dictionary of properties. Supported + properties are "mimetype" (for the file's type) and "size" (for the + file's size). + """ + store = self.hs.get_datastore() + + for name, props in names_and_props.items(): + self.get_success( + store.store_local_media( + media_id=name, + media_type=props.get("mimetype", "image/png"), + time_now_ms=self.clock.time_msec(), + upload_name=None, + media_length=props.get("size", 50), + user_id=UserID.from_string("@rin:test"), + ) + ) + class ProfilesRestrictedTestCase(unittest.HomeserverTestCase): -- cgit 1.4.1 From 02755c31882b75ae6e71d9033ed1e72d0fb7c00b Mon Sep 17 00:00:00 2001 From: Patrick Cloke Date: Mon, 31 Jan 2022 10:13:32 -0500 Subject: Remove the obsolete MSC1849 configuration flag. (#11843) MSC1849 was replaced by MSC2675, which was merged. The configuration flag, which defaulted to true, is no longer useful. --- changelog.d/11843.removal | 1 + synapse/config/experimental.py | 2 -- synapse/storage/databases/main/relations.py | 4 ---- 3 files changed, 1 insertion(+), 6 deletions(-) create mode 100644 changelog.d/11843.removal (limited to 'synapse') diff --git a/changelog.d/11843.removal b/changelog.d/11843.removal new file mode 100644 index 0000000000..d6d49b4ad5 --- /dev/null +++ b/changelog.d/11843.removal @@ -0,0 +1 @@ +Remove the `experimental_msc1849_support_enabled` flag as the features are now stable. diff --git a/synapse/config/experimental.py b/synapse/config/experimental.py index dbaeb10918..65c807a19a 100644 --- a/synapse/config/experimental.py +++ b/synapse/config/experimental.py @@ -24,8 +24,6 @@ class ExperimentalConfig(Config): def read_config(self, config: JsonDict, **kwargs): experimental = config.get("experimental_features") or {} - # Whether to enable experimental MSC1849 (aka relations) support - self.msc1849_enabled = config.get("experimental_msc1849_support_enabled", True) # MSC3440 (thread relation) self.msc3440_enabled: bool = experimental.get("msc3440_enabled", False) diff --git a/synapse/storage/databases/main/relations.py b/synapse/storage/databases/main/relations.py index a9a5dd5f03..37468a5183 100644 --- a/synapse/storage/databases/main/relations.py +++ b/synapse/storage/databases/main/relations.py @@ -75,7 +75,6 @@ class RelationsWorkerStore(SQLBaseStore): ): super().__init__(database, db_conn, hs) - self._msc1849_enabled = hs.config.experimental.msc1849_enabled self._msc3440_enabled = hs.config.experimental.msc3440_enabled @cached(tree=True) @@ -683,9 +682,6 @@ class RelationsWorkerStore(SQLBaseStore): A map of event ID to the bundled aggregation for the event. Not all events may have bundled aggregations in the results. """ - # If bundled aggregations are disabled, nothing to do. - if not self._msc1849_enabled: - return {} # TODO Parallelize. results = {} -- cgit 1.4.1 From a35e9db9be8666d9ce1164bec3f30b7623ecdfb5 Mon Sep 17 00:00:00 2001 From: David Robertson Date: Tue, 1 Feb 2022 11:04:17 +0000 Subject: 1.52.0rc1 --- CHANGES.md | 61 +++++++++++++++++++++++++++++++++++++++++++++++ changelog.d/11612.bugfix | 1 - changelog.d/11621.feature | 1 - changelog.d/11639.feature | 1 - changelog.d/11658.feature | 1 - changelog.d/11683.removal | 1 - changelog.d/11743.feature | 1 - changelog.d/11767.bugfix | 1 - changelog.d/11784.bugfix | 1 - changelog.d/11788.feature | 1 - changelog.d/11789.feature | 1 - changelog.d/11790.feature | 1 - changelog.d/11792.misc | 1 - changelog.d/11793.misc | 1 - changelog.d/11794.misc | 1 - changelog.d/11795.misc | 1 - changelog.d/11798.bugfix | 1 - changelog.d/11799.misc | 1 - changelog.d/11810.misc | 1 - changelog.d/11811.misc | 1 - changelog.d/11813.misc | 1 - changelog.d/11815.misc | 1 - changelog.d/11816.misc | 1 - changelog.d/11817.misc | 1 - changelog.d/11820.doc | 1 - changelog.d/11821.doc | 1 - changelog.d/11823.misc | 1 - changelog.d/11827.bugfix | 1 - changelog.d/11830.misc | 1 - changelog.d/11834.misc | 1 - changelog.d/11836.misc | 1 - changelog.d/11838.misc | 1 - changelog.d/11843.removal | 1 - changelog.d/11846.feature | 1 - changelog.d/11847.misc | 1 - changelog.d/11851.misc | 1 - changelog.d/11860.doc | 1 - changelog.d/11861.doc | 1 - debian/changelog | 6 +++++ synapse/__init__.py | 2 +- 40 files changed, 68 insertions(+), 38 deletions(-) delete mode 100644 changelog.d/11612.bugfix delete mode 100644 changelog.d/11621.feature delete mode 100644 changelog.d/11639.feature delete mode 100644 changelog.d/11658.feature delete mode 100644 changelog.d/11683.removal delete mode 100644 changelog.d/11743.feature delete mode 100644 changelog.d/11767.bugfix delete mode 100644 changelog.d/11784.bugfix delete mode 100644 changelog.d/11788.feature delete mode 100644 changelog.d/11789.feature delete mode 100644 changelog.d/11790.feature delete mode 100644 changelog.d/11792.misc delete mode 100644 changelog.d/11793.misc delete mode 100644 changelog.d/11794.misc delete mode 100644 changelog.d/11795.misc delete mode 100644 changelog.d/11798.bugfix delete mode 100644 changelog.d/11799.misc delete mode 100644 changelog.d/11810.misc delete mode 100644 changelog.d/11811.misc delete mode 100644 changelog.d/11813.misc delete mode 100644 changelog.d/11815.misc delete mode 100644 changelog.d/11816.misc delete mode 100644 changelog.d/11817.misc delete mode 100644 changelog.d/11820.doc delete mode 100644 changelog.d/11821.doc delete mode 100644 changelog.d/11823.misc delete mode 100644 changelog.d/11827.bugfix delete mode 100644 changelog.d/11830.misc delete mode 100644 changelog.d/11834.misc delete mode 100644 changelog.d/11836.misc delete mode 100644 changelog.d/11838.misc delete mode 100644 changelog.d/11843.removal delete mode 100644 changelog.d/11846.feature delete mode 100644 changelog.d/11847.misc delete mode 100644 changelog.d/11851.misc delete mode 100644 changelog.d/11860.doc delete mode 100644 changelog.d/11861.doc (limited to 'synapse') diff --git a/CHANGES.md b/CHANGES.md index 37b9e6bb96..6a8ce170dc 100644 --- a/CHANGES.md +++ b/CHANGES.md @@ -1,3 +1,64 @@ +Synapse 1.52.0rc1 (2022-02-01) +============================== + +Features +-------- + +- Remove account data (including client config, push rules and ignored users) upon user deactivation. ([\#11621](https://github.com/matrix-org/synapse/issues/11621), [\#11788](https://github.com/matrix-org/synapse/issues/11788), [\#11789](https://github.com/matrix-org/synapse/issues/11789)) +- Add admin API to reset connection timeouts for remote server. ([\#11639](https://github.com/matrix-org/synapse/issues/11639)) +- Add an admin API to get a list of rooms that federate with a given remote homeserver. ([\#11658](https://github.com/matrix-org/synapse/issues/11658)) +- Add a config flag to inhibit M_USER_IN_USE during registration. ([\#11743](https://github.com/matrix-org/synapse/issues/11743)) +- Add a module callback to set username at registration. ([\#11790](https://github.com/matrix-org/synapse/issues/11790)) +- Allow configuring a maximum file size as well as a list of allowed content types for avatars. ([\#11846](https://github.com/matrix-org/synapse/issues/11846)) + + +Bugfixes +-------- + +- Include the bundled aggregations in the `/sync` response, per [MSC2675](https://github.com/matrix-org/matrix-doc/pull/2675). ([\#11612](https://github.com/matrix-org/synapse/issues/11612)) +- Fix a long-standing bug when previewing Reddit URLs which do not contain an image. ([\#11767](https://github.com/matrix-org/synapse/issues/11767)) +- Fix a long-standing bug that media streams could cause long-lived connections when generating URL previews. ([\#11784](https://github.com/matrix-org/synapse/issues/11784)) +- Include a `prev_content` field in state events sent to Application Services. Contributed by @totallynotvaishnav. ([\#11798](https://github.com/matrix-org/synapse/issues/11798)) +- Fix a bug introduced in Synapse 0.33.3 causing requests to sometimes log strings such as `HTTPStatus.OK` instead of integer status codes. ([\#11827](https://github.com/matrix-org/synapse/issues/11827)) + + +Improved Documentation +---------------------- + +- Update pypi installation docs to indicate that we now support Python 3.10. ([\#11820](https://github.com/matrix-org/synapse/issues/11820)) +- Add missing steps to the contribution submission process in the documentation. Contributed by @sequentialread. ([\#11821](https://github.com/matrix-org/synapse/issues/11821)) +- Remove not needed old table of contents in documentation. ([\#11860](https://github.com/matrix-org/synapse/issues/11860)) +- Consolidate the `access_token` information at the top of each relevant page in the Admin API documentation. ([\#11861](https://github.com/matrix-org/synapse/issues/11861)) + + +Deprecations and Removals +------------------------- + +- Drop support for Python 3.6, which is EOL. ([\#11683](https://github.com/matrix-org/synapse/issues/11683)) +- Remove the `experimental_msc1849_support_enabled` flag as the features are now stable. ([\#11843](https://github.com/matrix-org/synapse/issues/11843)) + + +Internal Changes +---------------- + +- Preparation for database schema simplifications: add `state_key` and `rejection_reason` columns to `events` table. ([\#11792](https://github.com/matrix-org/synapse/issues/11792)) +- Add `FrozenEvent.get_state_key` and use it in a couple of places. ([\#11793](https://github.com/matrix-org/synapse/issues/11793)) +- Preparation for database schema simplifications: stop reading from `event_reference_hashes`. ([\#11794](https://github.com/matrix-org/synapse/issues/11794)) +- Drop unused table `public_room_list_stream`. ([\#11795](https://github.com/matrix-org/synapse/issues/11795)) +- Preparation for reducing Postgres serialization errors: allow setting transaction isolation level. Contributed by Nick @ Beeper. ([\#11799](https://github.com/matrix-org/synapse/issues/11799), [\#11847](https://github.com/matrix-org/synapse/issues/11847)) +- Docker: skip the initial amd64-only build and go straight to multiarch. ([\#11810](https://github.com/matrix-org/synapse/issues/11810)) +- Run Complement on the Github Actions VM and not inside a Docker container. ([\#11811](https://github.com/matrix-org/synapse/issues/11811)) +- Log module names at startup. ([\#11813](https://github.com/matrix-org/synapse/issues/11813)) +- Improve type safety of bundled aggregations code. ([\#11815](https://github.com/matrix-org/synapse/issues/11815)) +- Drop support for Python 3.6, which is EOL. ([\#11816](https://github.com/matrix-org/synapse/issues/11816)) +- Correct a type annotation in the event validation logic. ([\#11817](https://github.com/matrix-org/synapse/issues/11817), [\#11830](https://github.com/matrix-org/synapse/issues/11830)) +- Minor updates and documentation for database schema delta files. ([\#11823](https://github.com/matrix-org/synapse/issues/11823)) +- Workaround a type annotation problem in `prometheus_client` 0.13.0. ([\#11834](https://github.com/matrix-org/synapse/issues/11834)) +- Minor performance improvement in room state lookup. ([\#11836](https://github.com/matrix-org/synapse/issues/11836)) +- Fix some indentation inconsistencies in the sample config. ([\#11838](https://github.com/matrix-org/synapse/issues/11838)) +- Add type hints to `tests/rest/admin`. ([\#11851](https://github.com/matrix-org/synapse/issues/11851)) + + Synapse 1.51.0 (2022-01-25) =========================== diff --git a/changelog.d/11612.bugfix b/changelog.d/11612.bugfix deleted file mode 100644 index 842f6892fd..0000000000 --- a/changelog.d/11612.bugfix +++ /dev/null @@ -1 +0,0 @@ -Include the bundled aggregations in the `/sync` response, per [MSC2675](https://github.com/matrix-org/matrix-doc/pull/2675). diff --git a/changelog.d/11621.feature b/changelog.d/11621.feature deleted file mode 100644 index dc426fb658..0000000000 --- a/changelog.d/11621.feature +++ /dev/null @@ -1 +0,0 @@ -Remove account data (including client config, push rules and ignored users) upon user deactivation. \ No newline at end of file diff --git a/changelog.d/11639.feature b/changelog.d/11639.feature deleted file mode 100644 index e9f6704f7a..0000000000 --- a/changelog.d/11639.feature +++ /dev/null @@ -1 +0,0 @@ -Add admin API to reset connection timeouts for remote server. \ No newline at end of file diff --git a/changelog.d/11658.feature b/changelog.d/11658.feature deleted file mode 100644 index 2ec9fb5eec..0000000000 --- a/changelog.d/11658.feature +++ /dev/null @@ -1 +0,0 @@ -Add an admin API to get a list of rooms that federate with a given remote homeserver. \ No newline at end of file diff --git a/changelog.d/11683.removal b/changelog.d/11683.removal deleted file mode 100644 index b1f048f7f5..0000000000 --- a/changelog.d/11683.removal +++ /dev/null @@ -1 +0,0 @@ -Drop support for Python 3.6, which is EOL. \ No newline at end of file diff --git a/changelog.d/11743.feature b/changelog.d/11743.feature deleted file mode 100644 index 9809f48b96..0000000000 --- a/changelog.d/11743.feature +++ /dev/null @@ -1 +0,0 @@ -Add a config flag to inhibit M_USER_IN_USE during registration. diff --git a/changelog.d/11767.bugfix b/changelog.d/11767.bugfix deleted file mode 100644 index 3e344747f4..0000000000 --- a/changelog.d/11767.bugfix +++ /dev/null @@ -1 +0,0 @@ -Fix a long-standing bug when previewing Reddit URLs which do not contain an image. diff --git a/changelog.d/11784.bugfix b/changelog.d/11784.bugfix deleted file mode 100644 index 6569a8c299..0000000000 --- a/changelog.d/11784.bugfix +++ /dev/null @@ -1 +0,0 @@ -Fix a long-standing bug that media streams could cause long-lived connections when generating URL previews. diff --git a/changelog.d/11788.feature b/changelog.d/11788.feature deleted file mode 100644 index dc426fb658..0000000000 --- a/changelog.d/11788.feature +++ /dev/null @@ -1 +0,0 @@ -Remove account data (including client config, push rules and ignored users) upon user deactivation. \ No newline at end of file diff --git a/changelog.d/11789.feature b/changelog.d/11789.feature deleted file mode 100644 index dc426fb658..0000000000 --- a/changelog.d/11789.feature +++ /dev/null @@ -1 +0,0 @@ -Remove account data (including client config, push rules and ignored users) upon user deactivation. \ No newline at end of file diff --git a/changelog.d/11790.feature b/changelog.d/11790.feature deleted file mode 100644 index 4a5cc8ec37..0000000000 --- a/changelog.d/11790.feature +++ /dev/null @@ -1 +0,0 @@ -Add a module callback to set username at registration. diff --git a/changelog.d/11792.misc b/changelog.d/11792.misc deleted file mode 100644 index 6aa1cd61c3..0000000000 --- a/changelog.d/11792.misc +++ /dev/null @@ -1 +0,0 @@ -Preparation for database schema simplifications: add `state_key` and `rejection_reason` columns to `events` table. diff --git a/changelog.d/11793.misc b/changelog.d/11793.misc deleted file mode 100644 index fc0530bf2c..0000000000 --- a/changelog.d/11793.misc +++ /dev/null @@ -1 +0,0 @@ -Add `FrozenEvent.get_state_key` and use it in a couple of places. diff --git a/changelog.d/11794.misc b/changelog.d/11794.misc deleted file mode 100644 index 29826bc0e5..0000000000 --- a/changelog.d/11794.misc +++ /dev/null @@ -1 +0,0 @@ -Preparation for database schema simplifications: stop reading from `event_reference_hashes`. diff --git a/changelog.d/11795.misc b/changelog.d/11795.misc deleted file mode 100644 index aeba317670..0000000000 --- a/changelog.d/11795.misc +++ /dev/null @@ -1 +0,0 @@ -Drop unused table `public_room_list_stream`. diff --git a/changelog.d/11798.bugfix b/changelog.d/11798.bugfix deleted file mode 100644 index 2651fd11cf..0000000000 --- a/changelog.d/11798.bugfix +++ /dev/null @@ -1 +0,0 @@ -Include a `prev_content` field in state events sent to Application Services. Contributed by @totallynotvaishnav. \ No newline at end of file diff --git a/changelog.d/11799.misc b/changelog.d/11799.misc deleted file mode 100644 index 5c3b2bcaf4..0000000000 --- a/changelog.d/11799.misc +++ /dev/null @@ -1 +0,0 @@ -Preparation for reducing Postgres serialization errors: allow setting transaction isolation level. Contributed by Nick @ Beeper. diff --git a/changelog.d/11810.misc b/changelog.d/11810.misc deleted file mode 100644 index 5579b85979..0000000000 --- a/changelog.d/11810.misc +++ /dev/null @@ -1 +0,0 @@ -Docker: skip the initial amd64-only build and go straight to multiarch. diff --git a/changelog.d/11811.misc b/changelog.d/11811.misc deleted file mode 100644 index b911a2d042..0000000000 --- a/changelog.d/11811.misc +++ /dev/null @@ -1 +0,0 @@ -Run Complement on the Github Actions VM and not inside a Docker container. \ No newline at end of file diff --git a/changelog.d/11813.misc b/changelog.d/11813.misc deleted file mode 100644 index f90d183b45..0000000000 --- a/changelog.d/11813.misc +++ /dev/null @@ -1 +0,0 @@ -Log module names at startup. diff --git a/changelog.d/11815.misc b/changelog.d/11815.misc deleted file mode 100644 index 83aa6d6eb0..0000000000 --- a/changelog.d/11815.misc +++ /dev/null @@ -1 +0,0 @@ -Improve type safety of bundled aggregations code. diff --git a/changelog.d/11816.misc b/changelog.d/11816.misc deleted file mode 100644 index b1f048f7f5..0000000000 --- a/changelog.d/11816.misc +++ /dev/null @@ -1 +0,0 @@ -Drop support for Python 3.6, which is EOL. \ No newline at end of file diff --git a/changelog.d/11817.misc b/changelog.d/11817.misc deleted file mode 100644 index 3d6b2ea4d4..0000000000 --- a/changelog.d/11817.misc +++ /dev/null @@ -1 +0,0 @@ -Correct a type annotation in the event validation logic. diff --git a/changelog.d/11820.doc b/changelog.d/11820.doc deleted file mode 100644 index 4f563b9b56..0000000000 --- a/changelog.d/11820.doc +++ /dev/null @@ -1 +0,0 @@ -Update pypi installation docs to indicate that we now support Python 3.10. diff --git a/changelog.d/11821.doc b/changelog.d/11821.doc deleted file mode 100644 index a16a6ef956..0000000000 --- a/changelog.d/11821.doc +++ /dev/null @@ -1 +0,0 @@ -Add missing steps to the contribution submission process in the documentation. Contributed by @sequentialread. diff --git a/changelog.d/11823.misc b/changelog.d/11823.misc deleted file mode 100644 index 2d153eae4a..0000000000 --- a/changelog.d/11823.misc +++ /dev/null @@ -1 +0,0 @@ -Minor updates and documentation for database schema delta files. diff --git a/changelog.d/11827.bugfix b/changelog.d/11827.bugfix deleted file mode 100644 index 30222dfb62..0000000000 --- a/changelog.d/11827.bugfix +++ /dev/null @@ -1 +0,0 @@ -Fix a bug introduced in Synapse 0.33.3 causing requests to sometimes log strings such as `HTTPStatus.OK` instead of integer status codes. \ No newline at end of file diff --git a/changelog.d/11830.misc b/changelog.d/11830.misc deleted file mode 100644 index fe248d00ab..0000000000 --- a/changelog.d/11830.misc +++ /dev/null @@ -1 +0,0 @@ -Correct a type annotation in the event validation logic. \ No newline at end of file diff --git a/changelog.d/11834.misc b/changelog.d/11834.misc deleted file mode 100644 index 29a5635f7a..0000000000 --- a/changelog.d/11834.misc +++ /dev/null @@ -1 +0,0 @@ -Workaround a type annotation problem in `prometheus_client` 0.13.0. \ No newline at end of file diff --git a/changelog.d/11836.misc b/changelog.d/11836.misc deleted file mode 100644 index be7e331c63..0000000000 --- a/changelog.d/11836.misc +++ /dev/null @@ -1 +0,0 @@ -Minor performance improvement in room state lookup. diff --git a/changelog.d/11838.misc b/changelog.d/11838.misc deleted file mode 100644 index 4747bbe88b..0000000000 --- a/changelog.d/11838.misc +++ /dev/null @@ -1 +0,0 @@ -Fix some indentation inconsistencies in the sample config. \ No newline at end of file diff --git a/changelog.d/11843.removal b/changelog.d/11843.removal deleted file mode 100644 index d6d49b4ad5..0000000000 --- a/changelog.d/11843.removal +++ /dev/null @@ -1 +0,0 @@ -Remove the `experimental_msc1849_support_enabled` flag as the features are now stable. diff --git a/changelog.d/11846.feature b/changelog.d/11846.feature deleted file mode 100644 index fcf6affdb5..0000000000 --- a/changelog.d/11846.feature +++ /dev/null @@ -1 +0,0 @@ -Allow configuring a maximum file size as well as a list of allowed content types for avatars. diff --git a/changelog.d/11847.misc b/changelog.d/11847.misc deleted file mode 100644 index 5c3b2bcaf4..0000000000 --- a/changelog.d/11847.misc +++ /dev/null @@ -1 +0,0 @@ -Preparation for reducing Postgres serialization errors: allow setting transaction isolation level. Contributed by Nick @ Beeper. diff --git a/changelog.d/11851.misc b/changelog.d/11851.misc deleted file mode 100644 index ccc3ec3482..0000000000 --- a/changelog.d/11851.misc +++ /dev/null @@ -1 +0,0 @@ -Add type hints to `tests/rest/admin`. diff --git a/changelog.d/11860.doc b/changelog.d/11860.doc deleted file mode 100644 index 04b88c5f2c..0000000000 --- a/changelog.d/11860.doc +++ /dev/null @@ -1 +0,0 @@ -Remove not needed old table of contents in documentation. diff --git a/changelog.d/11861.doc b/changelog.d/11861.doc deleted file mode 100644 index 53c75a0883..0000000000 --- a/changelog.d/11861.doc +++ /dev/null @@ -1 +0,0 @@ -Consolidate the `access_token` information at the top of each relevant page in the Admin API documentation. diff --git a/debian/changelog b/debian/changelog index 3a598c4148..a458885655 100644 --- a/debian/changelog +++ b/debian/changelog @@ -1,3 +1,9 @@ +matrix-synapse-py3 (1.52.0~rc1) stable; urgency=medium + + * New synapse release 1.52.0~rc1. + + -- Synapse Packaging team Tue, 01 Feb 2022 11:04:09 +0000 + matrix-synapse-py3 (1.51.0) stable; urgency=medium * New synapse release 1.51.0. diff --git a/synapse/__init__.py b/synapse/__init__.py index 603dbb27e1..5e65033061 100644 --- a/synapse/__init__.py +++ b/synapse/__init__.py @@ -47,7 +47,7 @@ try: except ImportError: pass -__version__ = "1.51.0" +__version__ = "1.52.0rc1" if bool(os.environ.get("SYNAPSE_TEST_PATCH_LOG_CONTEXTS", False)): # We import here so that we don't have to install a bunch of deps when -- cgit 1.4.1 From 7d56b6c083d7d2eb683795d482453923c3e8be15 Mon Sep 17 00:00:00 2001 From: Brendan Abolivier Date: Tue, 8 Feb 2022 11:35:05 +0000 Subject: 1.52.0 --- CHANGES.md | 6 ++++++ debian/changelog | 6 ++++++ synapse/__init__.py | 2 +- 3 files changed, 13 insertions(+), 1 deletion(-) (limited to 'synapse') diff --git a/CHANGES.md b/CHANGES.md index 36707db03b..cee0549036 100644 --- a/CHANGES.md +++ b/CHANGES.md @@ -1,3 +1,9 @@ +Synapse 1.52.0 (2022-02-08) +=========================== + +No significant changes. + + Synapse 1.52.0rc1 (2022-02-01) ============================== diff --git a/debian/changelog b/debian/changelog index a458885655..64ea103f3e 100644 --- a/debian/changelog +++ b/debian/changelog @@ -1,3 +1,9 @@ +matrix-synapse-py3 (1.52.0) stable; urgency=medium + + * New synapse release 1.52.0. + + -- Synapse Packaging team Tue, 08 Feb 2022 11:34:54 +0000 + matrix-synapse-py3 (1.52.0~rc1) stable; urgency=medium * New synapse release 1.52.0~rc1. diff --git a/synapse/__init__.py b/synapse/__init__.py index 5e65033061..a23563937a 100644 --- a/synapse/__init__.py +++ b/synapse/__init__.py @@ -47,7 +47,7 @@ try: except ImportError: pass -__version__ = "1.52.0rc1" +__version__ = "1.52.0" if bool(os.environ.get("SYNAPSE_TEST_PATCH_LOG_CONTEXTS", False)): # We import here so that we don't have to install a bunch of deps when -- cgit 1.4.1