From a8eceb01e59fcbddcea7d19031ed2392772e6d66 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Fri, 11 Dec 2020 16:33:31 +0000 Subject: Honour AS ratelimit settings for /login requests (#8920) Fixes #8846. --- synapse/api/auth.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) (limited to 'synapse/api') diff --git a/synapse/api/auth.py b/synapse/api/auth.py index bfcaf68b2a..1951f6e178 100644 --- a/synapse/api/auth.py +++ b/synapse/api/auth.py @@ -31,7 +31,9 @@ from synapse.api.errors import ( MissingClientTokenError, ) from synapse.api.room_versions import KNOWN_ROOM_VERSIONS +from synapse.appservice import ApplicationService from synapse.events import EventBase +from synapse.http.site import SynapseRequest from synapse.logging import opentracing as opentracing from synapse.storage.databases.main.registration import TokenLookupResult from synapse.types import StateMap, UserID @@ -474,7 +476,7 @@ class Auth: now = self.hs.get_clock().time_msec() return now < expiry - def get_appservice_by_req(self, request): + def get_appservice_by_req(self, request: SynapseRequest) -> ApplicationService: token = self.get_access_token_from_request(request) service = self.store.get_app_service_by_token(token) if not service: -- cgit 1.5.1 From be2db93b3c14396d53d30f8d5f92db014453487b Mon Sep 17 00:00:00 2001 From: Patrick Cloke Date: Wed, 16 Dec 2020 08:46:37 -0500 Subject: Do not assume that the contents dictionary includes history_visibility. (#8945) --- changelog.d/8945.bugfix | 1 + synapse/api/auth.py | 5 +-- synapse/api/constants.py | 7 ++++ synapse/handlers/room.py | 7 ++-- synapse/handlers/room_list.py | 7 ++-- synapse/handlers/user_directory.py | 4 +-- synapse/notifier.py | 6 ++-- synapse/storage/databases/main/user_directory.py | 7 ++-- synapse/visibility.py | 42 ++++++++++++++++-------- 9 files changed, 58 insertions(+), 28 deletions(-) create mode 100644 changelog.d/8945.bugfix (limited to 'synapse/api') diff --git a/changelog.d/8945.bugfix b/changelog.d/8945.bugfix new file mode 100644 index 0000000000..f9e6dbba56 --- /dev/null +++ b/changelog.d/8945.bugfix @@ -0,0 +1 @@ +Fix a bug where 500 errors would be returned if the `m.room_history_visibility` event had invalid content. diff --git a/synapse/api/auth.py b/synapse/api/auth.py index 1951f6e178..48c4d7b0be 100644 --- a/synapse/api/auth.py +++ b/synapse/api/auth.py @@ -23,7 +23,7 @@ from twisted.web.server import Request import synapse.types from synapse import event_auth from synapse.api.auth_blocking import AuthBlocking -from synapse.api.constants import EventTypes, Membership +from synapse.api.constants import EventTypes, HistoryVisibility, Membership from synapse.api.errors import ( AuthError, Codes, @@ -648,7 +648,8 @@ class Auth: ) if ( visibility - and visibility.content["history_visibility"] == "world_readable" + and visibility.content.get("history_visibility") + == HistoryVisibility.WORLD_READABLE ): return Membership.JOIN, None raise AuthError( diff --git a/synapse/api/constants.py b/synapse/api/constants.py index 592abd844b..1932df83b4 100644 --- a/synapse/api/constants.py +++ b/synapse/api/constants.py @@ -160,3 +160,10 @@ class RoomEncryptionAlgorithms: class AccountDataTypes: DIRECT = "m.direct" IGNORED_USER_LIST = "m.ignored_user_list" + + +class HistoryVisibility: + INVITED = "invited" + JOINED = "joined" + SHARED = "shared" + WORLD_READABLE = "world_readable" diff --git a/synapse/handlers/room.py b/synapse/handlers/room.py index 7583418946..1f809fa161 100644 --- a/synapse/handlers/room.py +++ b/synapse/handlers/room.py @@ -27,6 +27,7 @@ from typing import TYPE_CHECKING, Any, Awaitable, Dict, List, Optional, Tuple from synapse.api.constants import ( EventTypes, + HistoryVisibility, JoinRules, Membership, RoomCreationPreset, @@ -81,21 +82,21 @@ class RoomCreationHandler(BaseHandler): self._presets_dict = { RoomCreationPreset.PRIVATE_CHAT: { "join_rules": JoinRules.INVITE, - "history_visibility": "shared", + "history_visibility": HistoryVisibility.SHARED, "original_invitees_have_ops": False, "guest_can_join": True, "power_level_content_override": {"invite": 0}, }, RoomCreationPreset.TRUSTED_PRIVATE_CHAT: { "join_rules": JoinRules.INVITE, - "history_visibility": "shared", + "history_visibility": HistoryVisibility.SHARED, "original_invitees_have_ops": True, "guest_can_join": True, "power_level_content_override": {"invite": 0}, }, RoomCreationPreset.PUBLIC_CHAT: { "join_rules": JoinRules.PUBLIC, - "history_visibility": "shared", + "history_visibility": HistoryVisibility.SHARED, "original_invitees_have_ops": False, "guest_can_join": False, "power_level_content_override": {}, diff --git a/synapse/handlers/room_list.py b/synapse/handlers/room_list.py index 4a13c8e912..bf58d302b0 100644 --- a/synapse/handlers/room_list.py +++ b/synapse/handlers/room_list.py @@ -20,7 +20,7 @@ from typing import Any, Dict, Optional import msgpack from unpaddedbase64 import decode_base64, encode_base64 -from synapse.api.constants import EventTypes, JoinRules +from synapse.api.constants import EventTypes, HistoryVisibility, JoinRules from synapse.api.errors import Codes, HttpResponseException from synapse.types import ThirdPartyInstanceID from synapse.util.caches.descriptors import cached @@ -159,7 +159,8 @@ class RoomListHandler(BaseHandler): "canonical_alias": room["canonical_alias"], "num_joined_members": room["joined_members"], "avatar_url": room["avatar"], - "world_readable": room["history_visibility"] == "world_readable", + "world_readable": room["history_visibility"] + == HistoryVisibility.WORLD_READABLE, "guest_can_join": room["guest_access"] == "can_join", } @@ -317,7 +318,7 @@ class RoomListHandler(BaseHandler): visibility = None if visibility_event: visibility = visibility_event.content.get("history_visibility", None) - result["world_readable"] = visibility == "world_readable" + result["world_readable"] = visibility == HistoryVisibility.WORLD_READABLE guest_event = current_state.get((EventTypes.GuestAccess, "")) guest = None diff --git a/synapse/handlers/user_directory.py b/synapse/handlers/user_directory.py index f263a638f8..3d80371f06 100644 --- a/synapse/handlers/user_directory.py +++ b/synapse/handlers/user_directory.py @@ -16,7 +16,7 @@ import logging import synapse.metrics -from synapse.api.constants import EventTypes, JoinRules, Membership +from synapse.api.constants import EventTypes, HistoryVisibility, JoinRules, Membership from synapse.handlers.state_deltas import StateDeltasHandler from synapse.metrics.background_process_metrics import run_as_background_process from synapse.storage.roommember import ProfileInfo @@ -250,7 +250,7 @@ class UserDirectoryHandler(StateDeltasHandler): prev_event_id, event_id, key_name="history_visibility", - public_value="world_readable", + public_value=HistoryVisibility.WORLD_READABLE, ) elif typ == EventTypes.JoinRules: change = await self._get_key_change( diff --git a/synapse/notifier.py b/synapse/notifier.py index a17352ef46..c4c8bb271d 100644 --- a/synapse/notifier.py +++ b/synapse/notifier.py @@ -34,7 +34,7 @@ from prometheus_client import Counter from twisted.internet import defer import synapse.server -from synapse.api.constants import EventTypes, Membership +from synapse.api.constants import EventTypes, HistoryVisibility, Membership from synapse.api.errors import AuthError from synapse.events import EventBase from synapse.handlers.presence import format_user_presence_state @@ -611,7 +611,9 @@ class Notifier: room_id, EventTypes.RoomHistoryVisibility, "" ) if state and "history_visibility" in state.content: - return state.content["history_visibility"] == "world_readable" + return ( + state.content["history_visibility"] == HistoryVisibility.WORLD_READABLE + ) else: return False diff --git a/synapse/storage/databases/main/user_directory.py b/synapse/storage/databases/main/user_directory.py index d87ceec6da..fc8caf46a0 100644 --- a/synapse/storage/databases/main/user_directory.py +++ b/synapse/storage/databases/main/user_directory.py @@ -17,7 +17,7 @@ import logging import re from typing import Any, Dict, Iterable, Optional, Set, Tuple -from synapse.api.constants import EventTypes, JoinRules +from synapse.api.constants import EventTypes, HistoryVisibility, JoinRules from synapse.storage.database import DatabasePool from synapse.storage.databases.main.state import StateFilter from synapse.storage.databases.main.state_deltas import StateDeltasStore @@ -360,7 +360,10 @@ class UserDirectoryBackgroundUpdateStore(StateDeltasStore): if hist_vis_id: hist_vis_ev = await self.get_event(hist_vis_id, allow_none=True) if hist_vis_ev: - if hist_vis_ev.content.get("history_visibility") == "world_readable": + if ( + hist_vis_ev.content.get("history_visibility") + == HistoryVisibility.WORLD_READABLE + ): return True return False diff --git a/synapse/visibility.py b/synapse/visibility.py index 527365498e..f2836ba9f0 100644 --- a/synapse/visibility.py +++ b/synapse/visibility.py @@ -12,11 +12,15 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. - import logging import operator -from synapse.api.constants import AccountDataTypes, EventTypes, Membership +from synapse.api.constants import ( + AccountDataTypes, + EventTypes, + HistoryVisibility, + Membership, +) from synapse.events.utils import prune_event from synapse.storage import Storage from synapse.storage.state import StateFilter @@ -25,7 +29,12 @@ from synapse.types import get_domain_from_id logger = logging.getLogger(__name__) -VISIBILITY_PRIORITY = ("world_readable", "shared", "invited", "joined") +VISIBILITY_PRIORITY = ( + HistoryVisibility.WORLD_READABLE, + HistoryVisibility.SHARED, + HistoryVisibility.INVITED, + HistoryVisibility.JOINED, +) MEMBERSHIP_PRIORITY = ( @@ -150,12 +159,14 @@ async def filter_events_for_client( # get the room_visibility at the time of the event. visibility_event = state.get((EventTypes.RoomHistoryVisibility, ""), None) if visibility_event: - visibility = visibility_event.content.get("history_visibility", "shared") + visibility = visibility_event.content.get( + "history_visibility", HistoryVisibility.SHARED + ) else: - visibility = "shared" + visibility = HistoryVisibility.SHARED if visibility not in VISIBILITY_PRIORITY: - visibility = "shared" + visibility = HistoryVisibility.SHARED # Always allow history visibility events on boundaries. This is done # by setting the effective visibility to the least restrictive @@ -165,7 +176,7 @@ async def filter_events_for_client( prev_visibility = prev_content.get("history_visibility", None) if prev_visibility not in VISIBILITY_PRIORITY: - prev_visibility = "shared" + prev_visibility = HistoryVisibility.SHARED new_priority = VISIBILITY_PRIORITY.index(visibility) old_priority = VISIBILITY_PRIORITY.index(prev_visibility) @@ -210,17 +221,17 @@ async def filter_events_for_client( # otherwise, it depends on the room visibility. - if visibility == "joined": + if visibility == HistoryVisibility.JOINED: # we weren't a member at the time of the event, so we can't # see this event. return None - elif visibility == "invited": + elif visibility == HistoryVisibility.INVITED: # user can also see the event if they were *invited* at the time # of the event. return event if membership == Membership.INVITE else None - elif visibility == "shared" and is_peeking: + elif visibility == HistoryVisibility.SHARED and is_peeking: # if the visibility is shared, users cannot see the event unless # they have *subequently* joined the room (or were members at the # time, of course) @@ -284,8 +295,10 @@ async def filter_events_for_server( def check_event_is_visible(event, state): history = state.get((EventTypes.RoomHistoryVisibility, ""), None) if history: - visibility = history.content.get("history_visibility", "shared") - if visibility in ["invited", "joined"]: + visibility = history.content.get( + "history_visibility", HistoryVisibility.SHARED + ) + if visibility in [HistoryVisibility.INVITED, HistoryVisibility.JOINED]: # We now loop through all state events looking for # membership states for the requesting server to determine # if the server is either in the room or has been invited @@ -305,7 +318,7 @@ async def filter_events_for_server( if memtype == Membership.JOIN: return True elif memtype == Membership.INVITE: - if visibility == "invited": + if visibility == HistoryVisibility.INVITED: return True else: # server has no users in the room: redact @@ -336,7 +349,8 @@ async def filter_events_for_server( else: event_map = await storage.main.get_events(visibility_ids) all_open = all( - e.content.get("history_visibility") in (None, "shared", "world_readable") + e.content.get("history_visibility") + in (None, HistoryVisibility.SHARED, HistoryVisibility.WORLD_READABLE) for e in event_map.values() ) -- cgit 1.5.1 From 7932d4e9f7e140ef05a3099e18120101019ec3e1 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Thu, 17 Dec 2020 12:04:14 +0000 Subject: Don't MAU limit AS ghost users --- synapse/api/auth_blocking.py | 7 +++++++ 1 file changed, 7 insertions(+) (limited to 'synapse/api') diff --git a/synapse/api/auth_blocking.py b/synapse/api/auth_blocking.py index 9c227218e0..d8088f524a 100644 --- a/synapse/api/auth_blocking.py +++ b/synapse/api/auth_blocking.py @@ -36,6 +36,7 @@ class AuthBlocking: self._limit_usage_by_mau = hs.config.limit_usage_by_mau self._mau_limits_reserved_threepids = hs.config.mau_limits_reserved_threepids self._server_name = hs.hostname + self._track_appservice_user_ips = hs.config.appservice.track_appservice_user_ips async def check_auth_blocking( self, @@ -76,6 +77,12 @@ class AuthBlocking: # We never block the server from doing actions on behalf of # users. return + elif requester.app_service and not self._track_appservice_user_ips: + # If we're authenticated as an appservice then we only block + # auth if `track_appservice_user_ips` is set, as that option + # implicitly means that application services are part of MAU + # limits. + return # Never fail an auth check for the server notices users or support user # This can be a problem where event creation is prohibited due to blocking -- cgit 1.5.1 From 70586aa63eaf129505324976fb092cb3ad327590 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Fri, 18 Dec 2020 09:49:18 +0000 Subject: Try and drop stale extremities. (#8929) If we see stale extremities while persisting events, and notice that they don't change the result of state resolution, we drop them. --- changelog.d/8929.misc | 1 + synapse/api/constants.py | 2 + synapse/handlers/message.py | 2 +- synapse/storage/persist_events.py | 200 +++++++++++++++++++++-- synapse/visibility.py | 2 +- tests/storage/test_events.py | 334 ++++++++++++++++++++++++++++++++++++++ 6 files changed, 523 insertions(+), 18 deletions(-) create mode 100644 changelog.d/8929.misc create mode 100644 tests/storage/test_events.py (limited to 'synapse/api') diff --git a/changelog.d/8929.misc b/changelog.d/8929.misc new file mode 100644 index 0000000000..157018b6a6 --- /dev/null +++ b/changelog.d/8929.misc @@ -0,0 +1 @@ +Automatically drop stale forward-extremities under some specific conditions. diff --git a/synapse/api/constants.py b/synapse/api/constants.py index 1932df83b4..565a8cd76a 100644 --- a/synapse/api/constants.py +++ b/synapse/api/constants.py @@ -95,6 +95,8 @@ class EventTypes: Presence = "m.presence" + Dummy = "org.matrix.dummy_event" + class RejectedReason: AUTH_ERROR = "auth_error" diff --git a/synapse/handlers/message.py b/synapse/handlers/message.py index 2b8aa9443d..9dfeab09cd 100644 --- a/synapse/handlers/message.py +++ b/synapse/handlers/message.py @@ -1261,7 +1261,7 @@ class EventCreationHandler: event, context = await self.create_event( requester, { - "type": "org.matrix.dummy_event", + "type": EventTypes.Dummy, "content": {}, "room_id": room_id, "sender": user_id, diff --git a/synapse/storage/persist_events.py b/synapse/storage/persist_events.py index 70e636b0ba..61fc49c69c 100644 --- a/synapse/storage/persist_events.py +++ b/synapse/storage/persist_events.py @@ -31,7 +31,14 @@ from synapse.logging.context import PreserveLoggingContext, make_deferred_yielda from synapse.metrics.background_process_metrics import run_as_background_process from synapse.storage.databases import Databases from synapse.storage.databases.main.events import DeltaState -from synapse.types import Collection, PersistedEventPosition, RoomStreamToken, StateMap +from synapse.storage.databases.main.events_worker import EventRedactBehaviour +from synapse.types import ( + Collection, + PersistedEventPosition, + RoomStreamToken, + StateMap, + get_domain_from_id, +) from synapse.util.async_helpers import ObservableDeferred from synapse.util.metrics import Measure @@ -68,6 +75,21 @@ stale_forward_extremities_counter = Histogram( buckets=(0, 1, 2, 3, 5, 7, 10, 15, 20, 50, 100, 200, 500, "+Inf"), ) +state_resolutions_during_persistence = Counter( + "synapse_storage_events_state_resolutions_during_persistence", + "Number of times we had to do state res to calculate new current state", +) + +potential_times_prune_extremities = Counter( + "synapse_storage_events_potential_times_prune_extremities", + "Number of times we might be able to prune extremities", +) + +times_pruned_extremities = Counter( + "synapse_storage_events_times_pruned_extremities", + "Number of times we were actually be able to prune extremities", +) + class _EventPeristenceQueue: """Queues up events so that they can be persisted in bulk with only one @@ -454,7 +476,15 @@ class EventsPersistenceStorage: latest_event_ids, new_latest_event_ids, ) - current_state, delta_ids = res + current_state, delta_ids, new_latest_event_ids = res + + # there should always be at least one forward extremity. + # (except during the initial persistence of the send_join + # results, in which case there will be no existing + # extremities, so we'll `continue` above and skip this bit.) + assert new_latest_event_ids, "No forward extremities left!" + + new_forward_extremeties[room_id] = new_latest_event_ids # If either are not None then there has been a change, # and we need to work out the delta (or use that @@ -573,29 +603,35 @@ class EventsPersistenceStorage: self, room_id: str, events_context: List[Tuple[EventBase, EventContext]], - old_latest_event_ids: Iterable[str], - new_latest_event_ids: Iterable[str], - ) -> Tuple[Optional[StateMap[str]], Optional[StateMap[str]]]: + old_latest_event_ids: Set[str], + new_latest_event_ids: Set[str], + ) -> Tuple[Optional[StateMap[str]], Optional[StateMap[str]], Set[str]]: """Calculate the current state dict after adding some new events to a room Args: - room_id (str): + room_id: room to which the events are being added. Used for logging etc - events_context (list[(EventBase, EventContext)]): + events_context: events and contexts which are being added to the room - old_latest_event_ids (iterable[str]): + old_latest_event_ids: the old forward extremities for the room. - new_latest_event_ids (iterable[str]): + new_latest_event_ids : the new forward extremities for the room. Returns: - Returns a tuple of two state maps, the first being the full new current - state and the second being the delta to the existing current state. - If both are None then there has been no change. + Returns a tuple of two state maps and a set of new forward + extremities. + + The first state map is the full new current state and the second + is the delta to the existing current state. If both are None then + there has been no change. + + The function may prune some old entries from the set of new + forward extremities if it's safe to do so. If there has been a change then we only return the delta if its already been calculated. Conversely if we do know the delta then @@ -672,7 +708,7 @@ class EventsPersistenceStorage: # If they old and new groups are the same then we don't need to do # anything. if old_state_groups == new_state_groups: - return None, None + return None, None, new_latest_event_ids if len(new_state_groups) == 1 and len(old_state_groups) == 1: # If we're going from one state group to another, lets check if @@ -689,7 +725,7 @@ class EventsPersistenceStorage: # the current state in memory then lets also return that, # but it doesn't matter if we don't. new_state = state_groups_map.get(new_state_group) - return new_state, delta_ids + return new_state, delta_ids, new_latest_event_ids # Now that we have calculated new_state_groups we need to get # their state IDs so we can resolve to a single state set. @@ -701,7 +737,7 @@ class EventsPersistenceStorage: if len(new_state_groups) == 1: # If there is only one state group, then we know what the current # state is. - return state_groups_map[new_state_groups.pop()], None + return state_groups_map[new_state_groups.pop()], None, new_latest_event_ids # Ok, we need to defer to the state handler to resolve our state sets. @@ -734,7 +770,139 @@ class EventsPersistenceStorage: state_res_store=StateResolutionStore(self.main_store), ) - return res.state, None + state_resolutions_during_persistence.inc() + + # If the returned state matches the state group of one of the new + # forward extremities then we check if we are able to prune some state + # extremities. + if res.state_group and res.state_group in new_state_groups: + new_latest_event_ids = await self._prune_extremities( + room_id, + new_latest_event_ids, + res.state_group, + event_id_to_state_group, + events_context, + ) + + return res.state, None, new_latest_event_ids + + async def _prune_extremities( + self, + room_id: str, + new_latest_event_ids: Set[str], + resolved_state_group: int, + event_id_to_state_group: Dict[str, int], + events_context: List[Tuple[EventBase, EventContext]], + ) -> Set[str]: + """See if we can prune any of the extremities after calculating the + resolved state. + """ + potential_times_prune_extremities.inc() + + # We keep all the extremities that have the same state group, and + # see if we can drop the others. + new_new_extrems = { + e + for e in new_latest_event_ids + if event_id_to_state_group[e] == resolved_state_group + } + + dropped_extrems = set(new_latest_event_ids) - new_new_extrems + + logger.debug("Might drop extremities: %s", dropped_extrems) + + # We only drop events from the extremities list if: + # 1. we're not currently persisting them; + # 2. they're not our own events (or are dummy events); and + # 3. they're either: + # 1. over N hours old and more than N events ago (we use depth to + # calculate); or + # 2. we are persisting an event from the same domain and more than + # M events ago. + # + # The idea is that we don't want to drop events that are "legitimate" + # extremities (that we would want to include as prev events), only + # "stuck" extremities that are e.g. due to a gap in the graph. + # + # Note that we either drop all of them or none of them. If we only drop + # some of the events we don't know if state res would come to the same + # conclusion. + + for ev, _ in events_context: + if ev.event_id in dropped_extrems: + logger.debug( + "Not dropping extremities: %s is being persisted", ev.event_id + ) + return new_latest_event_ids + + dropped_events = await self.main_store.get_events( + dropped_extrems, + allow_rejected=True, + redact_behaviour=EventRedactBehaviour.AS_IS, + ) + + new_senders = {get_domain_from_id(e.sender) for e, _ in events_context} + + one_day_ago = self._clock.time_msec() - 24 * 60 * 60 * 1000 + current_depth = max(e.depth for e, _ in events_context) + for event in dropped_events.values(): + # If the event is a local dummy event then we should check it + # doesn't reference any local events, as we want to reference those + # if we send any new events. + # + # Note we do this recursively to handle the case where a dummy event + # references a dummy event that only references remote events. + # + # Ideally we'd figure out a way of still being able to drop old + # dummy events that reference local events, but this is good enough + # as a first cut. + events_to_check = [event] + while events_to_check: + new_events = set() + for event_to_check in events_to_check: + if self.is_mine_id(event_to_check.sender): + if event_to_check.type != EventTypes.Dummy: + logger.debug("Not dropping own event") + return new_latest_event_ids + new_events.update(event_to_check.prev_event_ids()) + + prev_events = await self.main_store.get_events( + new_events, + allow_rejected=True, + redact_behaviour=EventRedactBehaviour.AS_IS, + ) + events_to_check = prev_events.values() + + if ( + event.origin_server_ts < one_day_ago + and event.depth < current_depth - 100 + ): + continue + + # We can be less conservative about dropping extremities from the + # same domain, though we do want to wait a little bit (otherwise + # we'll immediately remove all extremities from a given server). + if ( + get_domain_from_id(event.sender) in new_senders + and event.depth < current_depth - 20 + ): + continue + + logger.debug( + "Not dropping as too new and not in new_senders: %s", new_senders, + ) + + return new_latest_event_ids + + times_pruned_extremities.inc() + + logger.info( + "Pruning forward extremities in room %s: from %s -> %s", + room_id, + new_latest_event_ids, + new_new_extrems, + ) + return new_new_extrems async def _calculate_state_delta( self, room_id: str, current_state: StateMap[str] diff --git a/synapse/visibility.py b/synapse/visibility.py index f2836ba9f0..ec50e7e977 100644 --- a/synapse/visibility.py +++ b/synapse/visibility.py @@ -125,7 +125,7 @@ async def filter_events_for_client( # see events in the room at that point in the DAG, and that shouldn't be decided # on those checks. if filter_send_to_client: - if event.type == "org.matrix.dummy_event": + if event.type == EventTypes.Dummy: return None if not event.is_state() and event.sender in ignore_list: diff --git a/tests/storage/test_events.py b/tests/storage/test_events.py new file mode 100644 index 0000000000..71210ce606 --- /dev/null +++ b/tests/storage/test_events.py @@ -0,0 +1,334 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 The Matrix.org Foundation C.I.C. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +from synapse.api.constants import EventTypes, Membership +from synapse.api.room_versions import RoomVersions +from synapse.federation.federation_base import event_from_pdu_json +from synapse.rest import admin +from synapse.rest.client.v1 import login, room + +from tests.unittest import HomeserverTestCase + + +class ExtremPruneTestCase(HomeserverTestCase): + servlets = [ + admin.register_servlets, + room.register_servlets, + login.register_servlets, + ] + + def prepare(self, reactor, clock, homeserver): + self.state = self.hs.get_state_handler() + self.persistence = self.hs.get_storage().persistence + self.store = self.hs.get_datastore() + + self.register_user("user", "pass") + self.token = self.login("user", "pass") + + self.room_id = self.helper.create_room_as( + "user", room_version=RoomVersions.V6.identifier, tok=self.token + ) + + body = self.helper.send(self.room_id, body="Test", tok=self.token) + local_message_event_id = body["event_id"] + + # Fudge a remote event and persist it. This will be the extremity before + # the gap. + self.remote_event_1 = event_from_pdu_json( + { + "type": EventTypes.Message, + "state_key": "@user:other", + "content": {}, + "room_id": self.room_id, + "sender": "@user:other", + "depth": 5, + "prev_events": [local_message_event_id], + "auth_events": [], + "origin_server_ts": self.clock.time_msec(), + }, + RoomVersions.V6, + ) + + self.persist_event(self.remote_event_1) + + # Check that the current extremities is the remote event. + self.assert_extremities([self.remote_event_1.event_id]) + + def persist_event(self, event, state=None): + """Persist the event, with optional state + """ + context = self.get_success( + self.state.compute_event_context(event, old_state=state) + ) + self.get_success(self.persistence.persist_event(event, context)) + + def assert_extremities(self, expected_extremities): + """Assert the current extremities for the room + """ + extremities = self.get_success( + self.store.get_prev_events_for_room(self.room_id) + ) + self.assertCountEqual(extremities, expected_extremities) + + def test_prune_gap(self): + """Test that we drop extremities after a gap when we see an event from + the same domain. + """ + + # Fudge a second event which points to an event we don't have. This is a + # state event so that the state changes (otherwise we won't prune the + # extremity as they'll have the same state group). + remote_event_2 = event_from_pdu_json( + { + "type": EventTypes.Member, + "state_key": "@user:other", + "content": {"membership": Membership.JOIN}, + "room_id": self.room_id, + "sender": "@user:other", + "depth": 50, + "prev_events": ["$some_unknown_message"], + "auth_events": [], + "origin_server_ts": self.clock.time_msec(), + }, + RoomVersions.V6, + ) + + state_before_gap = self.get_success(self.state.get_current_state(self.room_id)) + + self.persist_event(remote_event_2, state=state_before_gap.values()) + + # Check the new extremity is just the new remote event. + self.assert_extremities([remote_event_2.event_id]) + + def test_do_not_prune_gap_if_state_different(self): + """Test that we don't prune extremities after a gap if the resolved + state is different. + """ + + # Fudge a second event which points to an event we don't have. + remote_event_2 = event_from_pdu_json( + { + "type": EventTypes.Message, + "state_key": "@user:other", + "content": {}, + "room_id": self.room_id, + "sender": "@user:other", + "depth": 10, + "prev_events": ["$some_unknown_message"], + "auth_events": [], + "origin_server_ts": self.clock.time_msec(), + }, + RoomVersions.V6, + ) + + # Now we persist it with state with a dropped history visibility + # setting. The state resolution across the old and new event will then + # include it, and so the resolved state won't match the new state. + state_before_gap = dict( + self.get_success(self.state.get_current_state(self.room_id)) + ) + state_before_gap.pop(("m.room.history_visibility", "")) + + context = self.get_success( + self.state.compute_event_context( + remote_event_2, old_state=state_before_gap.values() + ) + ) + + self.get_success(self.persistence.persist_event(remote_event_2, context)) + + # Check that we haven't dropped the old extremity. + self.assert_extremities([self.remote_event_1.event_id, remote_event_2.event_id]) + + def test_prune_gap_if_old(self): + """Test that we drop extremities after a gap when the previous extremity + is "old" + """ + + # Advance the clock for many days to make the old extremity "old". We + # also set the depth to "lots". + self.reactor.advance(7 * 24 * 60 * 60) + + # Fudge a second event which points to an event we don't have. This is a + # state event so that the state changes (otherwise we won't prune the + # extremity as they'll have the same state group). + remote_event_2 = event_from_pdu_json( + { + "type": EventTypes.Member, + "state_key": "@user:other2", + "content": {"membership": Membership.JOIN}, + "room_id": self.room_id, + "sender": "@user:other2", + "depth": 10000, + "prev_events": ["$some_unknown_message"], + "auth_events": [], + "origin_server_ts": self.clock.time_msec(), + }, + RoomVersions.V6, + ) + + state_before_gap = self.get_success(self.state.get_current_state(self.room_id)) + + self.persist_event(remote_event_2, state=state_before_gap.values()) + + # Check the new extremity is just the new remote event. + self.assert_extremities([remote_event_2.event_id]) + + def test_do_not_prune_gap_if_other_server(self): + """Test that we do not drop extremities after a gap when we see an event + from a different domain. + """ + + # Fudge a second event which points to an event we don't have. This is a + # state event so that the state changes (otherwise we won't prune the + # extremity as they'll have the same state group). + remote_event_2 = event_from_pdu_json( + { + "type": EventTypes.Member, + "state_key": "@user:other2", + "content": {"membership": Membership.JOIN}, + "room_id": self.room_id, + "sender": "@user:other2", + "depth": 10, + "prev_events": ["$some_unknown_message"], + "auth_events": [], + "origin_server_ts": self.clock.time_msec(), + }, + RoomVersions.V6, + ) + + state_before_gap = self.get_success(self.state.get_current_state(self.room_id)) + + self.persist_event(remote_event_2, state=state_before_gap.values()) + + # Check the new extremity is just the new remote event. + self.assert_extremities([self.remote_event_1.event_id, remote_event_2.event_id]) + + def test_prune_gap_if_dummy_remote(self): + """Test that we drop extremities after a gap when the previous extremity + is a local dummy event and only points to remote events. + """ + + body = self.helper.send_event( + self.room_id, type=EventTypes.Dummy, content={}, tok=self.token + ) + local_message_event_id = body["event_id"] + self.assert_extremities([local_message_event_id]) + + # Advance the clock for many days to make the old extremity "old". We + # also set the depth to "lots". + self.reactor.advance(7 * 24 * 60 * 60) + + # Fudge a second event which points to an event we don't have. This is a + # state event so that the state changes (otherwise we won't prune the + # extremity as they'll have the same state group). + remote_event_2 = event_from_pdu_json( + { + "type": EventTypes.Member, + "state_key": "@user:other2", + "content": {"membership": Membership.JOIN}, + "room_id": self.room_id, + "sender": "@user:other2", + "depth": 10000, + "prev_events": ["$some_unknown_message"], + "auth_events": [], + "origin_server_ts": self.clock.time_msec(), + }, + RoomVersions.V6, + ) + + state_before_gap = self.get_success(self.state.get_current_state(self.room_id)) + + self.persist_event(remote_event_2, state=state_before_gap.values()) + + # Check the new extremity is just the new remote event. + self.assert_extremities([remote_event_2.event_id]) + + def test_prune_gap_if_dummy_local(self): + """Test that we don't drop extremities after a gap when the previous + extremity is a local dummy event and points to local events. + """ + + body = self.helper.send(self.room_id, body="Test", tok=self.token) + + body = self.helper.send_event( + self.room_id, type=EventTypes.Dummy, content={}, tok=self.token + ) + local_message_event_id = body["event_id"] + self.assert_extremities([local_message_event_id]) + + # Advance the clock for many days to make the old extremity "old". We + # also set the depth to "lots". + self.reactor.advance(7 * 24 * 60 * 60) + + # Fudge a second event which points to an event we don't have. This is a + # state event so that the state changes (otherwise we won't prune the + # extremity as they'll have the same state group). + remote_event_2 = event_from_pdu_json( + { + "type": EventTypes.Member, + "state_key": "@user:other2", + "content": {"membership": Membership.JOIN}, + "room_id": self.room_id, + "sender": "@user:other2", + "depth": 10000, + "prev_events": ["$some_unknown_message"], + "auth_events": [], + "origin_server_ts": self.clock.time_msec(), + }, + RoomVersions.V6, + ) + + state_before_gap = self.get_success(self.state.get_current_state(self.room_id)) + + self.persist_event(remote_event_2, state=state_before_gap.values()) + + # Check the new extremity is just the new remote event. + self.assert_extremities([remote_event_2.event_id, local_message_event_id]) + + def test_do_not_prune_gap_if_not_dummy(self): + """Test that we do not drop extremities after a gap when the previous extremity + is not a dummy event. + """ + + body = self.helper.send(self.room_id, body="test", tok=self.token) + local_message_event_id = body["event_id"] + self.assert_extremities([local_message_event_id]) + + # Fudge a second event which points to an event we don't have. This is a + # state event so that the state changes (otherwise we won't prune the + # extremity as they'll have the same state group). + remote_event_2 = event_from_pdu_json( + { + "type": EventTypes.Member, + "state_key": "@user:other2", + "content": {"membership": Membership.JOIN}, + "room_id": self.room_id, + "sender": "@user:other2", + "depth": 10000, + "prev_events": ["$some_unknown_message"], + "auth_events": [], + "origin_server_ts": self.clock.time_msec(), + }, + RoomVersions.V6, + ) + + state_before_gap = self.get_success(self.state.get_current_state(self.room_id)) + + self.persist_event(remote_event_2, state=state_before_gap.values()) + + # Check the new extremity is just the new remote event. + self.assert_extremities([local_message_event_id, remote_event_2.event_id]) -- cgit 1.5.1 From 9dde9c9f01ff8ed4c60314f10d97261739ea0547 Mon Sep 17 00:00:00 2001 From: Patrick Cloke Date: Tue, 5 Jan 2021 07:41:48 -0500 Subject: Implement MSC2176: Updated redaction rules (#8984) An experimental room version ("org.matrix.msc2176") contains the new redaction rules for testing. --- changelog.d/8984.feature | 1 + synapse/api/room_versions.py | 32 ++++++-- synapse/events/utils.py | 16 +++- synapse/handlers/room.py | 2 +- tests/events/test_utils.py | 185 ++++++++++++++++++++++++++++++++++++++----- 5 files changed, 206 insertions(+), 30 deletions(-) create mode 100644 changelog.d/8984.feature (limited to 'synapse/api') diff --git a/changelog.d/8984.feature b/changelog.d/8984.feature new file mode 100644 index 0000000000..4db629746e --- /dev/null +++ b/changelog.d/8984.feature @@ -0,0 +1 @@ +Implement [MSC2176](https://github.com/matrix-org/matrix-doc/pull/2176) in an experimental room version. diff --git a/synapse/api/room_versions.py b/synapse/api/room_versions.py index f3ecbf36b6..de2cc15d33 100644 --- a/synapse/api/room_versions.py +++ b/synapse/api/room_versions.py @@ -51,11 +51,11 @@ class RoomDisposition: class RoomVersion: """An object which describes the unique attributes of a room version.""" - identifier = attr.ib() # str; the identifier for this version - disposition = attr.ib() # str; one of the RoomDispositions - event_format = attr.ib() # int; one of the EventFormatVersions - state_res = attr.ib() # int; one of the StateResolutionVersions - enforce_key_validity = attr.ib() # bool + identifier = attr.ib(type=str) # the identifier for this version + disposition = attr.ib(type=str) # one of the RoomDispositions + event_format = attr.ib(type=int) # one of the EventFormatVersions + state_res = attr.ib(type=int) # one of the StateResolutionVersions + enforce_key_validity = attr.ib(type=bool) # bool: before MSC2261/MSC2432, m.room.aliases had special auth rules and redaction rules special_case_aliases_auth = attr.ib(type=bool) @@ -64,9 +64,11 @@ class RoomVersion: # * Floats # * NaN, Infinity, -Infinity strict_canonicaljson = attr.ib(type=bool) - # bool: MSC2209: Check 'notifications' key while verifying + # MSC2209: Check 'notifications' key while verifying # m.room.power_levels auth rules. limit_notifications_power_levels = attr.ib(type=bool) + # MSC2174/MSC2176: Apply updated redaction rules algorithm. + msc2176_redaction_rules = attr.ib(type=bool) class RoomVersions: @@ -79,6 +81,7 @@ class RoomVersions: special_case_aliases_auth=True, strict_canonicaljson=False, limit_notifications_power_levels=False, + msc2176_redaction_rules=False, ) V2 = RoomVersion( "2", @@ -89,6 +92,7 @@ class RoomVersions: special_case_aliases_auth=True, strict_canonicaljson=False, limit_notifications_power_levels=False, + msc2176_redaction_rules=False, ) V3 = RoomVersion( "3", @@ -99,6 +103,7 @@ class RoomVersions: special_case_aliases_auth=True, strict_canonicaljson=False, limit_notifications_power_levels=False, + msc2176_redaction_rules=False, ) V4 = RoomVersion( "4", @@ -109,6 +114,7 @@ class RoomVersions: special_case_aliases_auth=True, strict_canonicaljson=False, limit_notifications_power_levels=False, + msc2176_redaction_rules=False, ) V5 = RoomVersion( "5", @@ -119,6 +125,7 @@ class RoomVersions: special_case_aliases_auth=True, strict_canonicaljson=False, limit_notifications_power_levels=False, + msc2176_redaction_rules=False, ) V6 = RoomVersion( "6", @@ -129,6 +136,18 @@ class RoomVersions: special_case_aliases_auth=False, strict_canonicaljson=True, limit_notifications_power_levels=True, + msc2176_redaction_rules=False, + ) + MSC2176 = RoomVersion( + "org.matrix.msc2176", + RoomDisposition.UNSTABLE, + EventFormatVersions.V3, + StateResolutionVersions.V2, + enforce_key_validity=True, + special_case_aliases_auth=False, + strict_canonicaljson=True, + limit_notifications_power_levels=True, + msc2176_redaction_rules=True, ) @@ -141,5 +160,6 @@ KNOWN_ROOM_VERSIONS = { RoomVersions.V4, RoomVersions.V5, RoomVersions.V6, + RoomVersions.MSC2176, ) } # type: Dict[str, RoomVersion] diff --git a/synapse/events/utils.py b/synapse/events/utils.py index 14f7f1156f..9c22e33813 100644 --- a/synapse/events/utils.py +++ b/synapse/events/utils.py @@ -79,13 +79,15 @@ def prune_event_dict(room_version: RoomVersion, event_dict: dict) -> dict: "state_key", "depth", "prev_events", - "prev_state", "auth_events", "origin", "origin_server_ts", - "membership", ] + # Room versions from before MSC2176 had additional allowed keys. + if not room_version.msc2176_redaction_rules: + allowed_keys.extend(["prev_state", "membership"]) + event_type = event_dict["type"] new_content = {} @@ -98,6 +100,10 @@ def prune_event_dict(room_version: RoomVersion, event_dict: dict) -> dict: if event_type == EventTypes.Member: add_fields("membership") elif event_type == EventTypes.Create: + # MSC2176 rules state that create events cannot be redacted. + if room_version.msc2176_redaction_rules: + return event_dict + add_fields("creator") elif event_type == EventTypes.JoinRules: add_fields("join_rule") @@ -112,10 +118,16 @@ def prune_event_dict(room_version: RoomVersion, event_dict: dict) -> dict: "kick", "redact", ) + + if room_version.msc2176_redaction_rules: + add_fields("invite") + elif event_type == EventTypes.Aliases and room_version.special_case_aliases_auth: add_fields("aliases") elif event_type == EventTypes.RoomHistoryVisibility: add_fields("history_visibility") + elif event_type == EventTypes.Redaction and room_version.msc2176_redaction_rules: + add_fields("redacts") allowed_fields = {k: v for k, v in event_dict.items() if k in allowed_keys} diff --git a/synapse/handlers/room.py b/synapse/handlers/room.py index 1f809fa161..3bece6d668 100644 --- a/synapse/handlers/room.py +++ b/synapse/handlers/room.py @@ -365,7 +365,7 @@ class RoomCreationHandler(BaseHandler): creation_content = { "room_version": new_room_version.identifier, "predecessor": {"room_id": old_room_id, "event_id": tombstone_event_id}, - } + } # type: JsonDict # Check if old room was non-federatable diff --git a/tests/events/test_utils.py b/tests/events/test_utils.py index c1274c14af..8ba36c6074 100644 --- a/tests/events/test_utils.py +++ b/tests/events/test_utils.py @@ -34,11 +34,17 @@ def MockEvent(**kwargs): class PruneEventTestCase(unittest.TestCase): - """ Asserts that a new event constructed with `evdict` will look like - `matchdict` when it is redacted. """ - def run_test(self, evdict, matchdict, **kwargs): - self.assertEquals( + """ + Asserts that a new event constructed with `evdict` will look like + `matchdict` when it is redacted. + + Args: + evdict: The dictionary to build the event from. + matchdict: The expected resulting dictionary. + kwargs: Additional keyword arguments used to create the event. + """ + self.assertEqual( prune_event(make_event_from_dict(evdict, **kwargs)).get_dict(), matchdict ) @@ -55,54 +61,80 @@ class PruneEventTestCase(unittest.TestCase): ) def test_basic_keys(self): + """Ensure that the keys that should be untouched are kept.""" + # Note that some of the values below don't really make sense, but the + # pruning of events doesn't worry about the values of any fields (with + # the exception of the content field). self.run_test( { + "event_id": "$3:domain", "type": "A", "room_id": "!1:domain", "sender": "@2:domain", - "event_id": "$3:domain", + "state_key": "B", + "content": {"other_key": "foo"}, + "hashes": "hashes", + "signatures": {"domain": {"algo:1": "sigs"}}, + "depth": 4, + "prev_events": "prev_events", + "prev_state": "prev_state", + "auth_events": "auth_events", "origin": "domain", + "origin_server_ts": 1234, + "membership": "join", + # Also include a key that should be removed. + "other_key": "foo", }, { + "event_id": "$3:domain", "type": "A", "room_id": "!1:domain", "sender": "@2:domain", - "event_id": "$3:domain", + "state_key": "B", + "hashes": "hashes", + "depth": 4, + "prev_events": "prev_events", + "prev_state": "prev_state", + "auth_events": "auth_events", "origin": "domain", + "origin_server_ts": 1234, + "membership": "join", "content": {}, - "signatures": {}, + "signatures": {"domain": {"algo:1": "sigs"}}, "unsigned": {}, }, ) - def test_unsigned_age_ts(self): + # As of MSC2176 we now redact the membership and prev_states keys. self.run_test( - {"type": "B", "event_id": "$test:domain", "unsigned": {"age_ts": 20}}, - { - "type": "B", - "event_id": "$test:domain", - "content": {}, - "signatures": {}, - "unsigned": {"age_ts": 20}, - }, + {"type": "A", "prev_state": "prev_state", "membership": "join"}, + {"type": "A", "content": {}, "signatures": {}, "unsigned": {}}, + room_version=RoomVersions.MSC2176, ) + def test_unsigned(self): + """Ensure that unsigned properties get stripped (except age_ts and replaces_state).""" self.run_test( { "type": "B", "event_id": "$test:domain", - "unsigned": {"other_key": "here"}, + "unsigned": { + "age_ts": 20, + "replaces_state": "$test2:domain", + "other_key": "foo", + }, }, { "type": "B", "event_id": "$test:domain", "content": {}, "signatures": {}, - "unsigned": {}, + "unsigned": {"age_ts": 20, "replaces_state": "$test2:domain"}, }, ) def test_content(self): + """The content dictionary should be stripped in most cases.""" self.run_test( {"type": "C", "event_id": "$test:domain", "content": {"things": "here"}}, { @@ -114,11 +146,35 @@ class PruneEventTestCase(unittest.TestCase): }, ) + # Some events keep a single content key/value. + EVENT_KEEP_CONTENT_KEYS = [ + ("member", "membership", "join"), + ("join_rules", "join_rule", "invite"), + ("history_visibility", "history_visibility", "shared"), + ] + for event_type, key, value in EVENT_KEEP_CONTENT_KEYS: + self.run_test( + { + "type": "m.room." + event_type, + "event_id": "$test:domain", + "content": {key: value, "other_key": "foo"}, + }, + { + "type": "m.room." + event_type, + "event_id": "$test:domain", + "content": {key: value}, + "signatures": {}, + "unsigned": {}, + }, + ) + + def test_create(self): + """Create events are partially redacted until MSC2176.""" self.run_test( { "type": "m.room.create", "event_id": "$test:domain", - "content": {"creator": "@2:domain", "other_field": "here"}, + "content": {"creator": "@2:domain", "other_key": "foo"}, }, { "type": "m.room.create", @@ -129,6 +185,68 @@ class PruneEventTestCase(unittest.TestCase): }, ) + # After MSC2176, create events get nothing redacted. + self.run_test( + {"type": "m.room.create", "content": {"not_a_real_key": True}}, + { + "type": "m.room.create", + "content": {"not_a_real_key": True}, + "signatures": {}, + "unsigned": {}, + }, + room_version=RoomVersions.MSC2176, + ) + + def test_power_levels(self): + """Power level events keep a variety of content keys.""" + self.run_test( + { + "type": "m.room.power_levels", + "event_id": "$test:domain", + "content": { + "ban": 1, + "events": {"m.room.name": 100}, + "events_default": 2, + "invite": 3, + "kick": 4, + "redact": 5, + "state_default": 6, + "users": {"@admin:domain": 100}, + "users_default": 7, + "other_key": 8, + }, + }, + { + "type": "m.room.power_levels", + "event_id": "$test:domain", + "content": { + "ban": 1, + "events": {"m.room.name": 100}, + "events_default": 2, + # Note that invite is not here. + "kick": 4, + "redact": 5, + "state_default": 6, + "users": {"@admin:domain": 100}, + "users_default": 7, + }, + "signatures": {}, + "unsigned": {}, + }, + ) + + # After MSC2176, power levels events keep the invite key. + self.run_test( + {"type": "m.room.power_levels", "content": {"invite": 75}}, + { + "type": "m.room.power_levels", + "content": {"invite": 75}, + "signatures": {}, + "unsigned": {}, + }, + room_version=RoomVersions.MSC2176, + ) + def test_alias_event(self): """Alias events have special behavior up through room version 6.""" self.run_test( @@ -146,8 +264,7 @@ class PruneEventTestCase(unittest.TestCase): }, ) - def test_msc2432_alias_event(self): - """After MSC2432, alias events have no special behavior.""" + # After MSC2432, alias events have no special behavior. self.run_test( {"type": "m.room.aliases", "content": {"aliases": ["test"]}}, { @@ -159,6 +276,32 @@ class PruneEventTestCase(unittest.TestCase): room_version=RoomVersions.V6, ) + def test_redacts(self): + """Redaction events have no special behaviour until MSC2174/MSC2176.""" + + self.run_test( + {"type": "m.room.redaction", "content": {"redacts": "$test2:domain"}}, + { + "type": "m.room.redaction", + "content": {}, + "signatures": {}, + "unsigned": {}, + }, + room_version=RoomVersions.V6, + ) + + # After MSC2174, redaction events keep the redacts content key. + self.run_test( + {"type": "m.room.redaction", "content": {"redacts": "$test2:domain"}}, + { + "type": "m.room.redaction", + "content": {"redacts": "$test2:domain"}, + "signatures": {}, + "unsigned": {}, + }, + room_version=RoomVersions.MSC2176, + ) + class SerializeEventTestCase(unittest.TestCase): def serialize(self, ev, fields): -- cgit 1.5.1 From 2ec8ca5e6046b207eabe008101631b978758ac6d Mon Sep 17 00:00:00 2001 From: Richard van der Hoff <1389908+richvdh@users.noreply.github.com> Date: Tue, 12 Jan 2021 12:34:16 +0000 Subject: Remove SynapseRequest.get_user_agent (#9069) SynapseRequest is in danger of becoming a bit of a dumping-ground for "useful stuff relating to Requests", which isn't really its intention (its purpose is to override render, finished and connectionLost to set up the LoggingContext and write the right entries to the request log). Putting utility functions inside SynapseRequest means that lots of our code ends up requiring a SynapseRequest when there is nothing synapse-specific about the Request at all, and any old twisted.web.iweb.IRequest will do. This increases code coupling and makes testing more difficult. In short: move get_user_agent out to a utility function. --- changelog.d/9069.misc | 1 + synapse/api/auth.py | 3 ++- synapse/handlers/auth.py | 6 +++--- synapse/handlers/sso.py | 5 +++-- synapse/http/__init__.py | 15 +++++++++++++++ synapse/http/site.py | 18 ++---------------- tests/handlers/test_cas.py | 2 +- tests/handlers/test_oidc.py | 3 +-- tests/handlers/test_saml.py | 2 +- 9 files changed, 29 insertions(+), 26 deletions(-) create mode 100644 changelog.d/9069.misc (limited to 'synapse/api') diff --git a/changelog.d/9069.misc b/changelog.d/9069.misc new file mode 100644 index 0000000000..5e9e62d252 --- /dev/null +++ b/changelog.d/9069.misc @@ -0,0 +1 @@ +Remove `SynapseRequest.get_user_agent`. diff --git a/synapse/api/auth.py b/synapse/api/auth.py index 48c4d7b0be..6d6703250b 100644 --- a/synapse/api/auth.py +++ b/synapse/api/auth.py @@ -33,6 +33,7 @@ from synapse.api.errors import ( from synapse.api.room_versions import KNOWN_ROOM_VERSIONS from synapse.appservice import ApplicationService from synapse.events import EventBase +from synapse.http import get_request_user_agent from synapse.http.site import SynapseRequest from synapse.logging import opentracing as opentracing from synapse.storage.databases.main.registration import TokenLookupResult @@ -187,7 +188,7 @@ class Auth: """ try: ip_addr = self.hs.get_ip_from_request(request) - user_agent = request.get_user_agent("") + user_agent = get_request_user_agent(request) access_token = self.get_access_token_from_request(request) diff --git a/synapse/handlers/auth.py b/synapse/handlers/auth.py index f4434673dc..5b86ee85c7 100644 --- a/synapse/handlers/auth.py +++ b/synapse/handlers/auth.py @@ -49,8 +49,10 @@ from synapse.api.errors import ( UserDeactivatedError, ) from synapse.api.ratelimiting import Ratelimiter +from synapse.handlers._base import BaseHandler from synapse.handlers.ui_auth import INTERACTIVE_AUTH_CHECKERS from synapse.handlers.ui_auth.checkers import UserInteractiveAuthChecker +from synapse.http import get_request_user_agent from synapse.http.server import finish_request, respond_with_html from synapse.http.site import SynapseRequest from synapse.logging.context import defer_to_thread @@ -62,8 +64,6 @@ from synapse.util.async_helpers import maybe_awaitable from synapse.util.msisdn import phone_number_to_msisdn from synapse.util.threepids import canonicalise_email -from ._base import BaseHandler - if TYPE_CHECKING: from synapse.app.homeserver import HomeServer @@ -539,7 +539,7 @@ class AuthHandler(BaseHandler): # authentication flow. await self.store.set_ui_auth_clientdict(sid, clientdict) - user_agent = request.get_user_agent("") + user_agent = get_request_user_agent(request) await self.store.add_user_agent_ip_to_ui_auth_session( session.session_id, user_agent, clientip diff --git a/synapse/handlers/sso.py b/synapse/handlers/sso.py index 2da1ea2223..740df7e4a0 100644 --- a/synapse/handlers/sso.py +++ b/synapse/handlers/sso.py @@ -23,6 +23,7 @@ from typing_extensions import NoReturn, Protocol from twisted.web.http import Request from synapse.api.errors import Codes, RedirectException, SynapseError +from synapse.http import get_request_user_agent from synapse.http.server import respond_with_html from synapse.http.site import SynapseRequest from synapse.types import JsonDict, UserID, contains_invalid_mxid_characters @@ -362,7 +363,7 @@ class SsoHandler: attributes, auth_provider_id, remote_user_id, - request.get_user_agent(""), + get_request_user_agent(request), request.getClientIP(), ) @@ -628,7 +629,7 @@ class SsoHandler: attributes, session.auth_provider_id, session.remote_user_id, - request.get_user_agent(""), + get_request_user_agent(request), request.getClientIP(), ) diff --git a/synapse/http/__init__.py b/synapse/http/__init__.py index 59b01b812c..4bc3cb53f0 100644 --- a/synapse/http/__init__.py +++ b/synapse/http/__init__.py @@ -17,6 +17,7 @@ import re from twisted.internet import task from twisted.web.client import FileBodyProducer +from twisted.web.iweb import IRequest from synapse.api.errors import SynapseError @@ -50,3 +51,17 @@ class QuieterFileBodyProducer(FileBodyProducer): FileBodyProducer.stopProducing(self) except task.TaskStopped: pass + + +def get_request_user_agent(request: IRequest, default: str = "") -> str: + """Return the last User-Agent header, or the given default. + """ + # There could be raw utf-8 bytes in the User-Agent header. + + # N.B. if you don't do this, the logger explodes cryptically + # with maximum recursion trying to log errors about + # the charset problem. + # c.f. https://github.com/matrix-org/synapse/issues/3471 + + h = request.getHeader(b"User-Agent") + return h.decode("ascii", "replace") if h else default diff --git a/synapse/http/site.py b/synapse/http/site.py index 5a5790831b..12ec3f851f 100644 --- a/synapse/http/site.py +++ b/synapse/http/site.py @@ -20,7 +20,7 @@ from twisted.python.failure import Failure from twisted.web.server import Request, Site from synapse.config.server import ListenerConfig -from synapse.http import redact_uri +from synapse.http import get_request_user_agent, redact_uri from synapse.http.request_metrics import RequestMetrics, requests_counter from synapse.logging.context import LoggingContext, PreserveLoggingContext from synapse.types import Requester @@ -113,15 +113,6 @@ class SynapseRequest(Request): method = self.method.decode("ascii") return method - def get_user_agent(self, default: str) -> str: - """Return the last User-Agent header, or the given default. - """ - user_agent = self.requestHeaders.getRawHeaders(b"User-Agent", [None])[-1] - if user_agent is None: - return default - - return user_agent.decode("ascii", "replace") - def render(self, resrc): # this is called once a Resource has been found to serve the request; in our # case the Resource in question will normally be a JsonResource. @@ -292,12 +283,7 @@ class SynapseRequest(Request): # and can see that we're doing something wrong. authenticated_entity = repr(self.requester) # type: ignore[unreachable] - # ...or could be raw utf-8 bytes in the User-Agent header. - # N.B. if you don't do this, the logger explodes cryptically - # with maximum recursion trying to log errors about - # the charset problem. - # c.f. https://github.com/matrix-org/synapse/issues/3471 - user_agent = self.get_user_agent("-") + user_agent = get_request_user_agent(self, "-") code = str(self.code) if not self.finished: diff --git a/tests/handlers/test_cas.py b/tests/handlers/test_cas.py index bd7a1b6891..c37bb6440e 100644 --- a/tests/handlers/test_cas.py +++ b/tests/handlers/test_cas.py @@ -118,4 +118,4 @@ class CasHandlerTestCase(HomeserverTestCase): def _mock_request(): """Returns a mock which will stand in as a SynapseRequest""" - return Mock(spec=["getClientIP", "get_user_agent"]) + return Mock(spec=["getClientIP", "getHeader"]) diff --git a/tests/handlers/test_oidc.py b/tests/handlers/test_oidc.py index f5df657814..4ce0f74f22 100644 --- a/tests/handlers/test_oidc.py +++ b/tests/handlers/test_oidc.py @@ -1011,7 +1011,7 @@ def _build_callback_request( "addCookie", "requestHeaders", "getClientIP", - "get_user_agent", + "getHeader", ] ) @@ -1020,5 +1020,4 @@ def _build_callback_request( request.args[b"code"] = [code.encode("utf-8")] request.args[b"state"] = [state.encode("utf-8")] request.getClientIP.return_value = ip_address - request.get_user_agent.return_value = user_agent return request diff --git a/tests/handlers/test_saml.py b/tests/handlers/test_saml.py index 548038214b..261c7083d1 100644 --- a/tests/handlers/test_saml.py +++ b/tests/handlers/test_saml.py @@ -262,4 +262,4 @@ class SamlHandlerTestCase(HomeserverTestCase): def _mock_request(): """Returns a mock which will stand in as a SynapseRequest""" - return Mock(spec=["getClientIP", "get_user_agent"]) + return Mock(spec=["getClientIP", "getHeader"]) -- cgit 1.5.1 From 0f8945e166b5f1965e69943e16c8220da74211bd Mon Sep 17 00:00:00 2001 From: Richard van der Hoff <1389908+richvdh@users.noreply.github.com> Date: Tue, 12 Jan 2021 12:48:12 +0000 Subject: Kill off `HomeServer.get_ip_from_request()` (#9080) Homeserver.get_ip_from_request() used to be a bit more complicated, but now it is totally redundant. Let's get rid of it. --- changelog.d/9080.misc | 1 + synapse/api/auth.py | 4 ++-- synapse/handlers/auth.py | 9 ++------- synapse/rest/client/v2_alpha/account.py | 19 +++---------------- synapse/rest/client/v2_alpha/auth.py | 4 ++-- synapse/rest/client/v2_alpha/devices.py | 12 ++---------- synapse/rest/client/v2_alpha/keys.py | 6 +----- synapse/rest/client/v2_alpha/register.py | 8 ++------ synapse/server.py | 4 ---- 9 files changed, 15 insertions(+), 52 deletions(-) create mode 100644 changelog.d/9080.misc (limited to 'synapse/api') diff --git a/changelog.d/9080.misc b/changelog.d/9080.misc new file mode 100644 index 0000000000..3da8171f5f --- /dev/null +++ b/changelog.d/9080.misc @@ -0,0 +1 @@ +Remove redundant `Homeserver.get_ip_from_request` method. diff --git a/synapse/api/auth.py b/synapse/api/auth.py index 6d6703250b..67ecbd32ff 100644 --- a/synapse/api/auth.py +++ b/synapse/api/auth.py @@ -187,7 +187,7 @@ class Auth: AuthError if access is denied for the user in the access token """ try: - ip_addr = self.hs.get_ip_from_request(request) + ip_addr = request.getClientIP() user_agent = get_request_user_agent(request) access_token = self.get_access_token_from_request(request) @@ -276,7 +276,7 @@ class Auth: return None, None if app_service.ip_range_whitelist: - ip_address = IPAddress(self.hs.get_ip_from_request(request)) + ip_address = IPAddress(request.getClientIP()) if ip_address not in app_service.ip_range_whitelist: return None, None diff --git a/synapse/handlers/auth.py b/synapse/handlers/auth.py index 5b86ee85c7..2f5b2b61aa 100644 --- a/synapse/handlers/auth.py +++ b/synapse/handlers/auth.py @@ -284,7 +284,6 @@ class AuthHandler(BaseHandler): requester: Requester, request: SynapseRequest, request_body: Dict[str, Any], - clientip: str, description: str, ) -> Tuple[dict, Optional[str]]: """ @@ -301,8 +300,6 @@ class AuthHandler(BaseHandler): request_body: The body of the request sent by the client - clientip: The IP address of the client. - description: A human readable string to be displayed to the user that describes the operation happening on their account. @@ -351,7 +348,7 @@ class AuthHandler(BaseHandler): try: result, params, session_id = await self.check_ui_auth( - flows, request, request_body, clientip, description + flows, request, request_body, description ) except LoginError: # Update the ratelimiter to say we failed (`can_do_action` doesn't raise). @@ -426,7 +423,6 @@ class AuthHandler(BaseHandler): flows: List[List[str]], request: SynapseRequest, clientdict: Dict[str, Any], - clientip: str, description: str, ) -> Tuple[dict, dict, str]: """ @@ -448,8 +444,6 @@ class AuthHandler(BaseHandler): clientdict: The dictionary from the client root level, not the 'auth' key: this method prompts for auth if none is sent. - clientip: The IP address of the client. - description: A human readable string to be displayed to the user that describes the operation happening on their account. @@ -540,6 +534,7 @@ class AuthHandler(BaseHandler): await self.store.set_ui_auth_clientdict(sid, clientdict) user_agent = get_request_user_agent(request) + clientip = request.getClientIP() await self.store.add_user_agent_ip_to_ui_auth_session( session.session_id, user_agent, clientip diff --git a/synapse/rest/client/v2_alpha/account.py b/synapse/rest/client/v2_alpha/account.py index d837bde1d6..7f3445fe5d 100644 --- a/synapse/rest/client/v2_alpha/account.py +++ b/synapse/rest/client/v2_alpha/account.py @@ -189,11 +189,7 @@ class PasswordRestServlet(RestServlet): requester = await self.auth.get_user_by_req(request) try: params, session_id = await self.auth_handler.validate_user_via_ui_auth( - requester, - request, - body, - self.hs.get_ip_from_request(request), - "modify your account password", + requester, request, body, "modify your account password", ) except InteractiveAuthIncompleteError as e: # The user needs to provide more steps to complete auth, but @@ -215,7 +211,6 @@ class PasswordRestServlet(RestServlet): [[LoginType.EMAIL_IDENTITY]], request, body, - self.hs.get_ip_from_request(request), "modify your account password", ) except InteractiveAuthIncompleteError as e: @@ -309,11 +304,7 @@ class DeactivateAccountRestServlet(RestServlet): return 200, {} await self.auth_handler.validate_user_via_ui_auth( - requester, - request, - body, - self.hs.get_ip_from_request(request), - "deactivate your account", + requester, request, body, "deactivate your account", ) result = await self._deactivate_account_handler.deactivate_account( requester.user.to_string(), erase, id_server=body.get("id_server") @@ -695,11 +686,7 @@ class ThreepidAddRestServlet(RestServlet): assert_valid_client_secret(client_secret) await self.auth_handler.validate_user_via_ui_auth( - requester, - request, - body, - self.hs.get_ip_from_request(request), - "add a third-party identifier to your account", + requester, request, body, "add a third-party identifier to your account", ) validation_session = await self.identity_handler.validate_threepid_session( diff --git a/synapse/rest/client/v2_alpha/auth.py b/synapse/rest/client/v2_alpha/auth.py index 9b9514632f..149697fc23 100644 --- a/synapse/rest/client/v2_alpha/auth.py +++ b/synapse/rest/client/v2_alpha/auth.py @@ -128,7 +128,7 @@ class AuthRestServlet(RestServlet): authdict = {"response": response, "session": session} success = await self.auth_handler.add_oob_auth( - LoginType.RECAPTCHA, authdict, self.hs.get_ip_from_request(request) + LoginType.RECAPTCHA, authdict, request.getClientIP() ) if success: @@ -144,7 +144,7 @@ class AuthRestServlet(RestServlet): authdict = {"session": session} success = await self.auth_handler.add_oob_auth( - LoginType.TERMS, authdict, self.hs.get_ip_from_request(request) + LoginType.TERMS, authdict, request.getClientIP() ) if success: diff --git a/synapse/rest/client/v2_alpha/devices.py b/synapse/rest/client/v2_alpha/devices.py index af117cb27c..314e01dfe4 100644 --- a/synapse/rest/client/v2_alpha/devices.py +++ b/synapse/rest/client/v2_alpha/devices.py @@ -83,11 +83,7 @@ class DeleteDevicesRestServlet(RestServlet): assert_params_in_dict(body, ["devices"]) await self.auth_handler.validate_user_via_ui_auth( - requester, - request, - body, - self.hs.get_ip_from_request(request), - "remove device(s) from your account", + requester, request, body, "remove device(s) from your account", ) await self.device_handler.delete_devices( @@ -133,11 +129,7 @@ class DeviceRestServlet(RestServlet): raise await self.auth_handler.validate_user_via_ui_auth( - requester, - request, - body, - self.hs.get_ip_from_request(request), - "remove a device from your account", + requester, request, body, "remove a device from your account", ) await self.device_handler.delete_device(requester.user.to_string(), device_id) diff --git a/synapse/rest/client/v2_alpha/keys.py b/synapse/rest/client/v2_alpha/keys.py index b91996c738..a6134ead8a 100644 --- a/synapse/rest/client/v2_alpha/keys.py +++ b/synapse/rest/client/v2_alpha/keys.py @@ -271,11 +271,7 @@ class SigningKeyUploadServlet(RestServlet): body = parse_json_object_from_request(request) await self.auth_handler.validate_user_via_ui_auth( - requester, - request, - body, - self.hs.get_ip_from_request(request), - "add a device signing key to your account", + requester, request, body, "add a device signing key to your account", ) result = await self.e2e_keys_handler.upload_signing_keys_for_user(user_id, body) diff --git a/synapse/rest/client/v2_alpha/register.py b/synapse/rest/client/v2_alpha/register.py index 6b5a1b7109..35e646390e 100644 --- a/synapse/rest/client/v2_alpha/register.py +++ b/synapse/rest/client/v2_alpha/register.py @@ -353,7 +353,7 @@ class UsernameAvailabilityRestServlet(RestServlet): 403, "Registration has been disabled", errcode=Codes.FORBIDDEN ) - ip = self.hs.get_ip_from_request(request) + ip = request.getClientIP() with self.ratelimiter.ratelimit(ip) as wait_deferred: await wait_deferred @@ -513,11 +513,7 @@ class RegisterRestServlet(RestServlet): # not this will raise a user-interactive auth error. try: auth_result, params, session_id = await self.auth_handler.check_ui_auth( - self._registration_flows, - request, - body, - self.hs.get_ip_from_request(request), - "register a new account", + self._registration_flows, request, body, "register a new account", ) except InteractiveAuthIncompleteError as e: # The user needs to provide more steps to complete auth. diff --git a/synapse/server.py b/synapse/server.py index a198b0eb46..12da92b63c 100644 --- a/synapse/server.py +++ b/synapse/server.py @@ -283,10 +283,6 @@ class HomeServer(metaclass=abc.ABCMeta): """ return self._reactor - def get_ip_from_request(self, request) -> str: - # X-Forwarded-For is handled by our custom request type. - return request.getClientIP() - def is_mine(self, domain_specific_string: DomainSpecificString) -> bool: return domain_specific_string.domain == self.hostname -- cgit 1.5.1 From fa50e4bf4ddcb8e98d44700513a28c490f80f02b Mon Sep 17 00:00:00 2001 From: Richard van der Hoff <1389908+richvdh@users.noreply.github.com> Date: Wed, 20 Jan 2021 12:30:41 +0000 Subject: Give `public_baseurl` a default value (#9159) --- changelog.d/9159.feature | 1 + docs/sample_config.yaml | 31 +++++++++++++++++-------------- synapse/api/urls.py | 2 -- synapse/config/_base.py | 11 ++++++----- synapse/config/emailconfig.py | 8 -------- synapse/config/oidc_config.py | 2 -- synapse/config/registration.py | 21 ++++----------------- synapse/config/saml2_config.py | 2 -- synapse/config/server.py | 24 +++++++++++++++--------- synapse/config/sso.py | 13 +++++-------- synapse/handlers/identity.py | 2 -- synapse/rest/well_known.py | 4 ---- tests/rest/test_well_known.py | 9 --------- tests/utils.py | 1 - 14 files changed, 48 insertions(+), 83 deletions(-) create mode 100644 changelog.d/9159.feature (limited to 'synapse/api') diff --git a/changelog.d/9159.feature b/changelog.d/9159.feature new file mode 100644 index 0000000000..b7748757de --- /dev/null +++ b/changelog.d/9159.feature @@ -0,0 +1 @@ +Give the `public_baseurl` a default value, if it is not explicitly set in the configuration file. diff --git a/docs/sample_config.yaml b/docs/sample_config.yaml index ae995efe9b..7fdd798d70 100644 --- a/docs/sample_config.yaml +++ b/docs/sample_config.yaml @@ -67,11 +67,16 @@ pid_file: DATADIR/homeserver.pid # #web_client_location: https://riot.example.com/ -# The public-facing base URL that clients use to access this HS -# (not including _matrix/...). This is the same URL a user would -# enter into the 'custom HS URL' field on their client. If you -# use synapse with a reverse proxy, this should be the URL to reach -# synapse via the proxy. +# The public-facing base URL that clients use to access this Homeserver (not +# including _matrix/...). This is the same URL a user might enter into the +# 'Custom Homeserver URL' field on their client. If you use Synapse with a +# reverse proxy, this should be the URL to reach Synapse via the proxy. +# Otherwise, it should be the URL to reach Synapse's client HTTP listener (see +# 'listeners' below). +# +# If this is left unset, it defaults to 'https:///'. (Note that +# that will not work unless you configure Synapse or a reverse-proxy to listen +# on port 443.) # #public_baseurl: https://example.com/ @@ -1150,8 +1155,9 @@ account_validity: # send an email to the account's email address with a renewal link. By # default, no such emails are sent. # - # If you enable this setting, you will also need to fill out the 'email' and - # 'public_baseurl' configuration sections. + # If you enable this setting, you will also need to fill out the 'email' + # configuration section. You should also check that 'public_baseurl' is set + # correctly. # #renew_at: 1w @@ -1242,8 +1248,7 @@ account_validity: # The identity server which we suggest that clients should use when users log # in on this server. # -# (By default, no suggestion is made, so it is left up to the client. -# This setting is ignored unless public_baseurl is also set.) +# (By default, no suggestion is made, so it is left up to the client.) # #default_identity_server: https://matrix.org @@ -1268,8 +1273,6 @@ account_validity: # by the Matrix Identity Service API specification: # https://matrix.org/docs/spec/identity_service/latest # -# If a delegate is specified, the config option public_baseurl must also be filled out. -# account_threepid_delegates: #email: https://example.com # Delegate email sending to example.com #msisdn: http://localhost:8090 # Delegate SMS sending to this local process @@ -1901,9 +1904,9 @@ sso: # phishing attacks from evil.site. To avoid this, include a slash after the # hostname: "https://my.client/". # - # If public_baseurl is set, then the login fallback page (used by clients - # that don't natively support the required login flows) is whitelisted in - # addition to any URLs in this list. + # The login fallback page (used by clients that don't natively support the + # required login flows) is automatically whitelisted in addition to any URLs + # in this list. # # By default, this list is empty. # diff --git a/synapse/api/urls.py b/synapse/api/urls.py index 6379c86dde..e36aeef31f 100644 --- a/synapse/api/urls.py +++ b/synapse/api/urls.py @@ -42,8 +42,6 @@ class ConsentURIBuilder: """ if hs_config.form_secret is None: raise ConfigError("form_secret not set in config") - if hs_config.public_baseurl is None: - raise ConfigError("public_baseurl not set in config") self._hmac_secret = hs_config.form_secret.encode("utf-8") self._public_baseurl = hs_config.public_baseurl diff --git a/synapse/config/_base.py b/synapse/config/_base.py index 2931a88207..94144efc87 100644 --- a/synapse/config/_base.py +++ b/synapse/config/_base.py @@ -252,11 +252,12 @@ class Config: env = jinja2.Environment(loader=loader, autoescape=autoescape) # Update the environment with our custom filters - env.filters.update({"format_ts": _format_ts_filter}) - if self.public_baseurl: - env.filters.update( - {"mxc_to_http": _create_mxc_to_http_filter(self.public_baseurl)} - ) + env.filters.update( + { + "format_ts": _format_ts_filter, + "mxc_to_http": _create_mxc_to_http_filter(self.public_baseurl), + } + ) for filename in filenames: # Load the template diff --git a/synapse/config/emailconfig.py b/synapse/config/emailconfig.py index d4328c46b9..6a487afd34 100644 --- a/synapse/config/emailconfig.py +++ b/synapse/config/emailconfig.py @@ -166,11 +166,6 @@ class EmailConfig(Config): if not self.email_notif_from: missing.append("email.notif_from") - # public_baseurl is required to build password reset and validation links that - # will be emailed to users - if config.get("public_baseurl") is None: - missing.append("public_baseurl") - if missing: raise ConfigError( MISSING_PASSWORD_RESET_CONFIG_ERROR % (", ".join(missing),) @@ -269,9 +264,6 @@ class EmailConfig(Config): if not self.email_notif_from: missing.append("email.notif_from") - if config.get("public_baseurl") is None: - missing.append("public_baseurl") - if missing: raise ConfigError( "email.enable_notifs is True but required keys are missing: %s" diff --git a/synapse/config/oidc_config.py b/synapse/config/oidc_config.py index 80a24cfbc9..df55367434 100644 --- a/synapse/config/oidc_config.py +++ b/synapse/config/oidc_config.py @@ -43,8 +43,6 @@ class OIDCConfig(Config): raise ConfigError(e.message) from e public_baseurl = self.public_baseurl - if public_baseurl is None: - raise ConfigError("oidc_config requires a public_baseurl to be set") self.oidc_callback_url = public_baseurl + "_synapse/oidc/callback" @property diff --git a/synapse/config/registration.py b/synapse/config/registration.py index 740c3fc1b1..4bfc69cb7a 100644 --- a/synapse/config/registration.py +++ b/synapse/config/registration.py @@ -49,10 +49,6 @@ class AccountValidityConfig(Config): self.startup_job_max_delta = self.period * 10.0 / 100.0 - if self.renew_by_email_enabled: - if "public_baseurl" not in synapse_config: - raise ConfigError("Can't send renewal emails without 'public_baseurl'") - template_dir = config.get("template_dir") if not template_dir: @@ -109,13 +105,6 @@ class RegistrationConfig(Config): account_threepid_delegates = config.get("account_threepid_delegates") or {} self.account_threepid_delegate_email = account_threepid_delegates.get("email") self.account_threepid_delegate_msisdn = account_threepid_delegates.get("msisdn") - if self.account_threepid_delegate_msisdn and not self.public_baseurl: - raise ConfigError( - "The configuration option `public_baseurl` is required if " - "`account_threepid_delegate.msisdn` is set, such that " - "clients know where to submit validation tokens to. Please " - "configure `public_baseurl`." - ) self.default_identity_server = config.get("default_identity_server") self.allow_guest_access = config.get("allow_guest_access", False) @@ -240,8 +229,9 @@ class RegistrationConfig(Config): # send an email to the account's email address with a renewal link. By # default, no such emails are sent. # - # If you enable this setting, you will also need to fill out the 'email' and - # 'public_baseurl' configuration sections. + # If you enable this setting, you will also need to fill out the 'email' + # configuration section. You should also check that 'public_baseurl' is set + # correctly. # #renew_at: 1w @@ -332,8 +322,7 @@ class RegistrationConfig(Config): # The identity server which we suggest that clients should use when users log # in on this server. # - # (By default, no suggestion is made, so it is left up to the client. - # This setting is ignored unless public_baseurl is also set.) + # (By default, no suggestion is made, so it is left up to the client.) # #default_identity_server: https://matrix.org @@ -358,8 +347,6 @@ class RegistrationConfig(Config): # by the Matrix Identity Service API specification: # https://matrix.org/docs/spec/identity_service/latest # - # If a delegate is specified, the config option public_baseurl must also be filled out. - # account_threepid_delegates: #email: https://example.com # Delegate email sending to example.com #msisdn: http://localhost:8090 # Delegate SMS sending to this local process diff --git a/synapse/config/saml2_config.py b/synapse/config/saml2_config.py index 7b97d4f114..f33dfa0d6a 100644 --- a/synapse/config/saml2_config.py +++ b/synapse/config/saml2_config.py @@ -189,8 +189,6 @@ class SAML2Config(Config): import saml2 public_baseurl = self.public_baseurl - if public_baseurl is None: - raise ConfigError("saml2_config requires a public_baseurl to be set") if self.saml2_grandfathered_mxid_source_attribute: optional_attributes.add(self.saml2_grandfathered_mxid_source_attribute) diff --git a/synapse/config/server.py b/synapse/config/server.py index 7242a4aa8e..75ba161f35 100644 --- a/synapse/config/server.py +++ b/synapse/config/server.py @@ -161,7 +161,11 @@ class ServerConfig(Config): self.print_pidfile = config.get("print_pidfile") self.user_agent_suffix = config.get("user_agent_suffix") self.use_frozen_dicts = config.get("use_frozen_dicts", False) - self.public_baseurl = config.get("public_baseurl") + self.public_baseurl = config.get("public_baseurl") or "https://%s/" % ( + self.server_name, + ) + if self.public_baseurl[-1] != "/": + self.public_baseurl += "/" # Whether to enable user presence. self.use_presence = config.get("use_presence", True) @@ -317,9 +321,6 @@ class ServerConfig(Config): # Always blacklist 0.0.0.0, :: self.federation_ip_range_blacklist.update(["0.0.0.0", "::"]) - if self.public_baseurl is not None: - if self.public_baseurl[-1] != "/": - self.public_baseurl += "/" self.start_pushers = config.get("start_pushers", True) # (undocumented) option for torturing the worker-mode replication a bit, @@ -740,11 +741,16 @@ class ServerConfig(Config): # #web_client_location: https://riot.example.com/ - # The public-facing base URL that clients use to access this HS - # (not including _matrix/...). This is the same URL a user would - # enter into the 'custom HS URL' field on their client. If you - # use synapse with a reverse proxy, this should be the URL to reach - # synapse via the proxy. + # The public-facing base URL that clients use to access this Homeserver (not + # including _matrix/...). This is the same URL a user might enter into the + # 'Custom Homeserver URL' field on their client. If you use Synapse with a + # reverse proxy, this should be the URL to reach Synapse via the proxy. + # Otherwise, it should be the URL to reach Synapse's client HTTP listener (see + # 'listeners' below). + # + # If this is left unset, it defaults to 'https:///'. (Note that + # that will not work unless you configure Synapse or a reverse-proxy to listen + # on port 443.) # #public_baseurl: https://example.com/ diff --git a/synapse/config/sso.py b/synapse/config/sso.py index 366f0d4698..59be825532 100644 --- a/synapse/config/sso.py +++ b/synapse/config/sso.py @@ -64,11 +64,8 @@ class SSOConfig(Config): # gracefully to the client). This would make it pointless to ask the user for # confirmation, since the URL the confirmation page would be showing wouldn't be # the client's. - # public_baseurl is an optional setting, so we only add the fallback's URL to the - # list if it's provided (because we can't figure out what that URL is otherwise). - if self.public_baseurl: - login_fallback_url = self.public_baseurl + "_matrix/static/client/login" - self.sso_client_whitelist.append(login_fallback_url) + login_fallback_url = self.public_baseurl + "_matrix/static/client/login" + self.sso_client_whitelist.append(login_fallback_url) def generate_config_section(self, **kwargs): return """\ @@ -86,9 +83,9 @@ class SSOConfig(Config): # phishing attacks from evil.site. To avoid this, include a slash after the # hostname: "https://my.client/". # - # If public_baseurl is set, then the login fallback page (used by clients - # that don't natively support the required login flows) is whitelisted in - # addition to any URLs in this list. + # The login fallback page (used by clients that don't natively support the + # required login flows) is automatically whitelisted in addition to any URLs + # in this list. # # By default, this list is empty. # diff --git a/synapse/handlers/identity.py b/synapse/handlers/identity.py index c05036ad1f..f61844d688 100644 --- a/synapse/handlers/identity.py +++ b/synapse/handlers/identity.py @@ -476,8 +476,6 @@ class IdentityHandler(BaseHandler): except RequestTimedOutError: raise SynapseError(500, "Timed out contacting identity server") - assert self.hs.config.public_baseurl - # we need to tell the client to send the token back to us, since it doesn't # otherwise know where to send it, so add submit_url response parameter # (see also MSC2078) diff --git a/synapse/rest/well_known.py b/synapse/rest/well_known.py index f591cc6c5c..241fe746d9 100644 --- a/synapse/rest/well_known.py +++ b/synapse/rest/well_known.py @@ -34,10 +34,6 @@ class WellKnownBuilder: self._config = hs.config def get_well_known(self): - # if we don't have a public_baseurl, we can't help much here. - if self._config.public_baseurl is None: - return None - result = {"m.homeserver": {"base_url": self._config.public_baseurl}} if self._config.default_identity_server: diff --git a/tests/rest/test_well_known.py b/tests/rest/test_well_known.py index 14de0921be..c5e44af9f7 100644 --- a/tests/rest/test_well_known.py +++ b/tests/rest/test_well_known.py @@ -40,12 +40,3 @@ class WellKnownTests(unittest.HomeserverTestCase): "m.identity_server": {"base_url": "https://testis"}, }, ) - - def test_well_known_no_public_baseurl(self): - self.hs.config.public_baseurl = None - - channel = self.make_request( - "GET", "/.well-known/matrix/client", shorthand=False - ) - - self.assertEqual(channel.code, 404) diff --git a/tests/utils.py b/tests/utils.py index 977eeaf6ee..09614093bc 100644 --- a/tests/utils.py +++ b/tests/utils.py @@ -159,7 +159,6 @@ def default_config(name, parse=False): "remote": {"per_second": 10000, "burst_count": 10000}, }, "saml2_enabled": False, - "public_baseurl": None, "default_identity_server": None, "key_refresh_interval": 24 * 60 * 60 * 1000, "old_signing_keys": {}, -- cgit 1.5.1 From e40d88cff3cca3d5186d5f623ad1107bc403d69b Mon Sep 17 00:00:00 2001 From: Patrick Cloke Date: Thu, 11 Feb 2021 11:16:54 -0500 Subject: Backout changes for automatically calculating the public baseurl. (#9313) This breaks some people's configurations (if their Client-Server API is not accessed via port 443). --- changelog.d/9313.bugfix | 1 + docs/sample_config.yaml | 20 +++++++++----------- synapse/api/urls.py | 2 ++ synapse/config/cas.py | 16 +++++++++------- synapse/config/emailconfig.py | 8 ++++++++ synapse/config/oidc_config.py | 5 ++++- synapse/config/registration.py | 21 +++++++++++++++++---- synapse/config/saml2_config.py | 2 ++ synapse/config/server.py | 13 ++++--------- synapse/config/sso.py | 13 ++++++++----- synapse/handlers/identity.py | 4 ++++ synapse/rest/well_known.py | 4 ++++ synapse/util/templates.py | 15 ++++++++++++--- tests/rest/client/v1/test_login.py | 4 +++- tests/rest/test_well_known.py | 9 +++++++++ tests/utils.py | 1 + 16 files changed, 97 insertions(+), 41 deletions(-) create mode 100644 changelog.d/9313.bugfix (limited to 'synapse/api') diff --git a/changelog.d/9313.bugfix b/changelog.d/9313.bugfix new file mode 100644 index 0000000000..f578fd13dd --- /dev/null +++ b/changelog.d/9313.bugfix @@ -0,0 +1 @@ +Do not automatically calculate `public_baseurl` since it can be wrong in some situations. diff --git a/docs/sample_config.yaml b/docs/sample_config.yaml index 236abd9a3f..d395da11b4 100644 --- a/docs/sample_config.yaml +++ b/docs/sample_config.yaml @@ -74,10 +74,6 @@ pid_file: DATADIR/homeserver.pid # Otherwise, it should be the URL to reach Synapse's client HTTP listener (see # 'listeners' below). # -# If this is left unset, it defaults to 'https:///'. (Note that -# that will not work unless you configure Synapse or a reverse-proxy to listen -# on port 443.) -# #public_baseurl: https://example.com/ # Set the soft limit on the number of file descriptors synapse can use @@ -1169,9 +1165,8 @@ account_validity: # send an email to the account's email address with a renewal link. By # default, no such emails are sent. # - # If you enable this setting, you will also need to fill out the 'email' - # configuration section. You should also check that 'public_baseurl' is set - # correctly. + # If you enable this setting, you will also need to fill out the 'email' and + # 'public_baseurl' configuration sections. # #renew_at: 1w @@ -1262,7 +1257,8 @@ account_validity: # The identity server which we suggest that clients should use when users log # in on this server. # -# (By default, no suggestion is made, so it is left up to the client.) +# (By default, no suggestion is made, so it is left up to the client. +# This setting is ignored unless public_baseurl is also set.) # #default_identity_server: https://matrix.org @@ -1287,6 +1283,8 @@ account_validity: # by the Matrix Identity Service API specification: # https://matrix.org/docs/spec/identity_service/latest # +# If a delegate is specified, the config option public_baseurl must also be filled out. +# account_threepid_delegates: #email: https://example.com # Delegate email sending to example.com #msisdn: http://localhost:8090 # Delegate SMS sending to this local process @@ -1938,9 +1936,9 @@ sso: # phishing attacks from evil.site. To avoid this, include a slash after the # hostname: "https://my.client/". # - # The login fallback page (used by clients that don't natively support the - # required login flows) is automatically whitelisted in addition to any URLs - # in this list. + # If public_baseurl is set, then the login fallback page (used by clients + # that don't natively support the required login flows) is whitelisted in + # addition to any URLs in this list. # # By default, this list is empty. # diff --git a/synapse/api/urls.py b/synapse/api/urls.py index e36aeef31f..6379c86dde 100644 --- a/synapse/api/urls.py +++ b/synapse/api/urls.py @@ -42,6 +42,8 @@ class ConsentURIBuilder: """ if hs_config.form_secret is None: raise ConfigError("form_secret not set in config") + if hs_config.public_baseurl is None: + raise ConfigError("public_baseurl not set in config") self._hmac_secret = hs_config.form_secret.encode("utf-8") self._public_baseurl = hs_config.public_baseurl diff --git a/synapse/config/cas.py b/synapse/config/cas.py index b226890c2a..aaa7eba110 100644 --- a/synapse/config/cas.py +++ b/synapse/config/cas.py @@ -13,7 +13,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -from ._base import Config +from ._base import Config, ConfigError class CasConfig(Config): @@ -30,13 +30,15 @@ class CasConfig(Config): if self.cas_enabled: self.cas_server_url = cas_config["server_url"] - public_base_url = cas_config.get("service_url") or self.public_baseurl - if public_base_url[-1] != "/": - public_base_url += "/" + + # The public baseurl is required because it is used by the redirect + # template. + public_baseurl = self.public_baseurl + if not public_baseurl: + raise ConfigError("cas_config requires a public_baseurl to be set") + # TODO Update this to a _synapse URL. - self.cas_service_url = ( - public_base_url + "_matrix/client/r0/login/cas/ticket" - ) + self.cas_service_url = public_baseurl + "_matrix/client/r0/login/cas/ticket" self.cas_displayname_attribute = cas_config.get("displayname_attribute") self.cas_required_attributes = cas_config.get("required_attributes") or {} else: diff --git a/synapse/config/emailconfig.py b/synapse/config/emailconfig.py index 6a487afd34..d4328c46b9 100644 --- a/synapse/config/emailconfig.py +++ b/synapse/config/emailconfig.py @@ -166,6 +166,11 @@ class EmailConfig(Config): if not self.email_notif_from: missing.append("email.notif_from") + # public_baseurl is required to build password reset and validation links that + # will be emailed to users + if config.get("public_baseurl") is None: + missing.append("public_baseurl") + if missing: raise ConfigError( MISSING_PASSWORD_RESET_CONFIG_ERROR % (", ".join(missing),) @@ -264,6 +269,9 @@ class EmailConfig(Config): if not self.email_notif_from: missing.append("email.notif_from") + if config.get("public_baseurl") is None: + missing.append("public_baseurl") + if missing: raise ConfigError( "email.enable_notifs is True but required keys are missing: %s" diff --git a/synapse/config/oidc_config.py b/synapse/config/oidc_config.py index 4c24c50629..4d0f24a9d5 100644 --- a/synapse/config/oidc_config.py +++ b/synapse/config/oidc_config.py @@ -53,7 +53,10 @@ class OIDCConfig(Config): "Multiple OIDC providers have the idp_id %r." % idp_id ) - self.oidc_callback_url = self.public_baseurl + "_synapse/client/oidc/callback" + public_baseurl = self.public_baseurl + if public_baseurl is None: + raise ConfigError("oidc_config requires a public_baseurl to be set") + self.oidc_callback_url = public_baseurl + "_synapse/client/oidc/callback" @property def oidc_enabled(self) -> bool: diff --git a/synapse/config/registration.py b/synapse/config/registration.py index ac48913a0b..eb650af7fb 100644 --- a/synapse/config/registration.py +++ b/synapse/config/registration.py @@ -49,6 +49,10 @@ class AccountValidityConfig(Config): self.startup_job_max_delta = self.period * 10.0 / 100.0 + if self.renew_by_email_enabled: + if "public_baseurl" not in synapse_config: + raise ConfigError("Can't send renewal emails without 'public_baseurl'") + template_dir = config.get("template_dir") if not template_dir: @@ -105,6 +109,13 @@ class RegistrationConfig(Config): account_threepid_delegates = config.get("account_threepid_delegates") or {} self.account_threepid_delegate_email = account_threepid_delegates.get("email") self.account_threepid_delegate_msisdn = account_threepid_delegates.get("msisdn") + if self.account_threepid_delegate_msisdn and not self.public_baseurl: + raise ConfigError( + "The configuration option `public_baseurl` is required if " + "`account_threepid_delegate.msisdn` is set, such that " + "clients know where to submit validation tokens to. Please " + "configure `public_baseurl`." + ) self.default_identity_server = config.get("default_identity_server") self.allow_guest_access = config.get("allow_guest_access", False) @@ -227,9 +238,8 @@ class RegistrationConfig(Config): # send an email to the account's email address with a renewal link. By # default, no such emails are sent. # - # If you enable this setting, you will also need to fill out the 'email' - # configuration section. You should also check that 'public_baseurl' is set - # correctly. + # If you enable this setting, you will also need to fill out the 'email' and + # 'public_baseurl' configuration sections. # #renew_at: 1w @@ -320,7 +330,8 @@ class RegistrationConfig(Config): # The identity server which we suggest that clients should use when users log # in on this server. # - # (By default, no suggestion is made, so it is left up to the client.) + # (By default, no suggestion is made, so it is left up to the client. + # This setting is ignored unless public_baseurl is also set.) # #default_identity_server: https://matrix.org @@ -345,6 +356,8 @@ class RegistrationConfig(Config): # by the Matrix Identity Service API specification: # https://matrix.org/docs/spec/identity_service/latest # + # If a delegate is specified, the config option public_baseurl must also be filled out. + # account_threepid_delegates: #email: https://example.com # Delegate email sending to example.com #msisdn: http://localhost:8090 # Delegate SMS sending to this local process diff --git a/synapse/config/saml2_config.py b/synapse/config/saml2_config.py index ad865a667f..7226abd829 100644 --- a/synapse/config/saml2_config.py +++ b/synapse/config/saml2_config.py @@ -189,6 +189,8 @@ class SAML2Config(Config): import saml2 public_baseurl = self.public_baseurl + if public_baseurl is None: + raise ConfigError("saml2_config requires a public_baseurl to be set") if self.saml2_grandfathered_mxid_source_attribute: optional_attributes.add(self.saml2_grandfathered_mxid_source_attribute) diff --git a/synapse/config/server.py b/synapse/config/server.py index 47a0370173..5d72cf2d82 100644 --- a/synapse/config/server.py +++ b/synapse/config/server.py @@ -161,11 +161,7 @@ class ServerConfig(Config): self.print_pidfile = config.get("print_pidfile") self.user_agent_suffix = config.get("user_agent_suffix") self.use_frozen_dicts = config.get("use_frozen_dicts", False) - self.public_baseurl = config.get("public_baseurl") or "https://%s/" % ( - self.server_name, - ) - if self.public_baseurl[-1] != "/": - self.public_baseurl += "/" + self.public_baseurl = config.get("public_baseurl") # Whether to enable user presence. self.use_presence = config.get("use_presence", True) @@ -321,6 +317,9 @@ class ServerConfig(Config): # Always blacklist 0.0.0.0, :: self.federation_ip_range_blacklist.update(["0.0.0.0", "::"]) + if self.public_baseurl is not None: + if self.public_baseurl[-1] != "/": + self.public_baseurl += "/" self.start_pushers = config.get("start_pushers", True) # (undocumented) option for torturing the worker-mode replication a bit, @@ -748,10 +747,6 @@ class ServerConfig(Config): # Otherwise, it should be the URL to reach Synapse's client HTTP listener (see # 'listeners' below). # - # If this is left unset, it defaults to 'https:///'. (Note that - # that will not work unless you configure Synapse or a reverse-proxy to listen - # on port 443.) - # #public_baseurl: https://example.com/ # Set the soft limit on the number of file descriptors synapse can use diff --git a/synapse/config/sso.py b/synapse/config/sso.py index 6c60c6fea4..19bdfd462b 100644 --- a/synapse/config/sso.py +++ b/synapse/config/sso.py @@ -64,8 +64,11 @@ class SSOConfig(Config): # gracefully to the client). This would make it pointless to ask the user for # confirmation, since the URL the confirmation page would be showing wouldn't be # the client's. - login_fallback_url = self.public_baseurl + "_matrix/static/client/login" - self.sso_client_whitelist.append(login_fallback_url) + # public_baseurl is an optional setting, so we only add the fallback's URL to the + # list if it's provided (because we can't figure out what that URL is otherwise). + if self.public_baseurl: + login_fallback_url = self.public_baseurl + "_matrix/static/client/login" + self.sso_client_whitelist.append(login_fallback_url) def generate_config_section(self, **kwargs): return """\ @@ -83,9 +86,9 @@ class SSOConfig(Config): # phishing attacks from evil.site. To avoid this, include a slash after the # hostname: "https://my.client/". # - # The login fallback page (used by clients that don't natively support the - # required login flows) is automatically whitelisted in addition to any URLs - # in this list. + # If public_baseurl is set, then the login fallback page (used by clients + # that don't natively support the required login flows) is whitelisted in + # addition to any URLs in this list. # # By default, this list is empty. # diff --git a/synapse/handlers/identity.py b/synapse/handlers/identity.py index 4f7137539b..8fc1e8b91c 100644 --- a/synapse/handlers/identity.py +++ b/synapse/handlers/identity.py @@ -504,6 +504,10 @@ class IdentityHandler(BaseHandler): except RequestTimedOutError: raise SynapseError(500, "Timed out contacting identity server") + # It is already checked that public_baseurl is configured since this code + # should only be used if account_threepid_delegate_msisdn is true. + assert self.hs.config.public_baseurl + # we need to tell the client to send the token back to us, since it doesn't # otherwise know where to send it, so add submit_url response parameter # (see also MSC2078) diff --git a/synapse/rest/well_known.py b/synapse/rest/well_known.py index 241fe746d9..f591cc6c5c 100644 --- a/synapse/rest/well_known.py +++ b/synapse/rest/well_known.py @@ -34,6 +34,10 @@ class WellKnownBuilder: self._config = hs.config def get_well_known(self): + # if we don't have a public_baseurl, we can't help much here. + if self._config.public_baseurl is None: + return None + result = {"m.homeserver": {"base_url": self._config.public_baseurl}} if self._config.default_identity_server: diff --git a/synapse/util/templates.py b/synapse/util/templates.py index 7e5109d206..392dae4a40 100644 --- a/synapse/util/templates.py +++ b/synapse/util/templates.py @@ -17,7 +17,7 @@ import time import urllib.parse -from typing import TYPE_CHECKING, Callable, Iterable, Union +from typing import TYPE_CHECKING, Callable, Iterable, Optional, Union import jinja2 @@ -74,14 +74,23 @@ def build_jinja_env( return env -def _create_mxc_to_http_filter(public_baseurl: str) -> Callable: +def _create_mxc_to_http_filter( + public_baseurl: Optional[str], +) -> Callable[[str, int, int, str], str]: """Create and return a jinja2 filter that converts MXC urls to HTTP Args: public_baseurl: The public, accessible base URL of the homeserver """ - def mxc_to_http_filter(value, width, height, resize_method="crop"): + def mxc_to_http_filter( + value: str, width: int, height: int, resize_method: str = "crop" + ) -> str: + if not public_baseurl: + raise RuntimeError( + "public_baseurl must be set in the homeserver config to convert MXC URLs to HTTP URLs." + ) + if value[0:6] != "mxc://": return "" diff --git a/tests/rest/client/v1/test_login.py b/tests/rest/client/v1/test_login.py index 66dfdaffbc..bfcb786af8 100644 --- a/tests/rest/client/v1/test_login.py +++ b/tests/rest/client/v1/test_login.py @@ -672,10 +672,12 @@ class CASTestCase(unittest.HomeserverTestCase): self.redirect_path = "_synapse/client/login/sso/redirect/confirm" config = self.default_config() + config["public_baseurl"] = ( + config.get("public_baseurl") or "https://matrix.goodserver.com:8448" + ) config["cas_config"] = { "enabled": True, "server_url": CAS_SERVER, - "service_url": "https://matrix.goodserver.com:8448", } cas_user_id = "username" diff --git a/tests/rest/test_well_known.py b/tests/rest/test_well_known.py index c5e44af9f7..14de0921be 100644 --- a/tests/rest/test_well_known.py +++ b/tests/rest/test_well_known.py @@ -40,3 +40,12 @@ class WellKnownTests(unittest.HomeserverTestCase): "m.identity_server": {"base_url": "https://testis"}, }, ) + + def test_well_known_no_public_baseurl(self): + self.hs.config.public_baseurl = None + + channel = self.make_request( + "GET", "/.well-known/matrix/client", shorthand=False + ) + + self.assertEqual(channel.code, 404) diff --git a/tests/utils.py b/tests/utils.py index 68033d7535..840b657f82 100644 --- a/tests/utils.py +++ b/tests/utils.py @@ -159,6 +159,7 @@ def default_config(name, parse=False): }, "rc_3pid_validation": {"per_second": 10000, "burst_count": 10000}, "saml2_enabled": False, + "public_baseurl": None, "default_identity_server": None, "key_refresh_interval": 24 * 60 * 60 * 1000, "old_signing_keys": {}, -- cgit 1.5.1