summary refs log tree commit diff
path: root/synapse/api
diff options
context:
space:
mode:
Diffstat (limited to '')
-rw-r--r--synapse/api/auth/__init__.py4
-rw-r--r--synapse/api/auth/msc3861_delegated.py291
-rw-r--r--synapse/api/auth_blocking.py20
-rw-r--r--synapse/api/constants.py36
-rw-r--r--synapse/api/errors.py27
-rw-r--r--synapse/api/ratelimiting.py49
-rw-r--r--synapse/api/room_versions.py95
-rw-r--r--synapse/api/urls.py45
8 files changed, 413 insertions, 154 deletions
diff --git a/synapse/api/auth/__init__.py b/synapse/api/auth/__init__.py

index d5241afe73..1b801d3ad3 100644 --- a/synapse/api/auth/__init__.py +++ b/synapse/api/auth/__init__.py
@@ -18,9 +18,7 @@ # [This file includes modifications made by New Vector Limited] # # -from typing import TYPE_CHECKING, Optional, Tuple - -from typing_extensions import Protocol +from typing import TYPE_CHECKING, Optional, Protocol, Tuple from twisted.web.server import Request diff --git a/synapse/api/auth/msc3861_delegated.py b/synapse/api/auth/msc3861_delegated.py
index 7361666c77..e500a06afe 100644 --- a/synapse/api/auth/msc3861_delegated.py +++ b/synapse/api/auth/msc3861_delegated.py
@@ -19,7 +19,8 @@ # # import logging -from typing import TYPE_CHECKING, Any, Dict, List, Optional +from dataclasses import dataclass +from typing import TYPE_CHECKING, Any, Callable, Dict, List, Optional from urllib.parse import urlencode from authlib.oauth2 import ClientAuth @@ -38,15 +39,16 @@ from synapse.api.errors import ( HttpResponseException, InvalidClientTokenError, OAuthInsufficientScopeError, - StoreError, SynapseError, UnrecognizedRequestError, ) from synapse.http.site import SynapseRequest from synapse.logging.context import make_deferred_yieldable +from synapse.logging.opentracing import active_span, force_tracing, start_active_span from synapse.types import Requester, UserID, create_requester from synapse.util import json_decoder from synapse.util.caches.cached_call import RetryOnExceptionCachedCall +from synapse.util.caches.response_cache import ResponseCache, ResponseCacheContext if TYPE_CHECKING: from synapse.rest.admin.experimental_features import ExperimentalFeature @@ -76,6 +78,61 @@ def scope_to_list(scope: str) -> List[str]: return scope.strip().split(" ") +@dataclass +class IntrospectionResult: + _inner: IntrospectionToken + + # when we retrieved this token, + # in milliseconds since the Unix epoch + retrieved_at_ms: int + + def is_active(self, now_ms: int) -> bool: + if not self._inner.get("active"): + return False + + expires_in = self._inner.get("expires_in") + if expires_in is None: + return True + if not isinstance(expires_in, int): + raise InvalidClientTokenError("token `expires_in` is not an int") + + absolute_expiry_ms = expires_in * 1000 + self.retrieved_at_ms + return now_ms < absolute_expiry_ms + + def get_scope_list(self) -> List[str]: + value = self._inner.get("scope") + if not isinstance(value, str): + return [] + return scope_to_list(value) + + def get_sub(self) -> Optional[str]: + value = self._inner.get("sub") + if not isinstance(value, str): + return None + return value + + def get_username(self) -> Optional[str]: + value = self._inner.get("username") + if not isinstance(value, str): + return None + return value + + def get_name(self) -> Optional[str]: + value = self._inner.get("name") + if not isinstance(value, str): + return None + return value + + def get_device_id(self) -> Optional[str]: + value = self._inner.get("device_id") + if value is not None and not isinstance(value, str): + raise AuthError( + 500, + "Invalid device ID in introspection result", + ) + return value + + class PrivateKeyJWTWithKid(PrivateKeyJWT): # type: ignore[misc] """An implementation of the private_key_jwt client auth method that includes a kid header. @@ -119,9 +176,39 @@ class MSC3861DelegatedAuth(BaseAuth): self._clock = hs.get_clock() self._http_client = hs.get_proxied_http_client() self._hostname = hs.hostname - self._admin_token = self._config.admin_token + self._admin_token: Callable[[], Optional[str]] = self._config.admin_token + self._force_tracing_for_users = hs.config.tracing.force_tracing_for_users + + # # Token Introspection Cache + # This remembers what users/devices are represented by which access tokens, + # in order to reduce overall system load: + # - on Synapse (as requests are relatively expensive) + # - on the network + # - on MAS + # + # Since there is no invalidation mechanism currently, + # the entries expire after 2 minutes. + # This does mean tokens can be treated as valid by Synapse + # for longer than reality. + # + # Ideally, tokens should logically be invalidated in the following circumstances: + # - If a session logout happens. + # In this case, MAS will delete the device within Synapse + # anyway and this is good enough as an invalidation. + # - If the client refreshes their token in MAS. + # In this case, the device still exists and it's not the end of the world for + # the old access token to continue working for a short time. + self._introspection_cache: ResponseCache[str] = ResponseCache( + self._clock, + "token_introspection", + timeout_ms=120_000, + # don't log because the keys are access tokens + enable_logging=False, + ) - self._issuer_metadata = RetryOnExceptionCachedCall(self._load_metadata) + self._issuer_metadata = RetryOnExceptionCachedCall[OpenIDProviderMetadata]( + self._load_metadata + ) if isinstance(auth_method, PrivateKeyJWTWithKid): # Use the JWK as the client secret when using the private_key_jwt method @@ -131,9 +218,10 @@ class MSC3861DelegatedAuth(BaseAuth): ) else: # Else use the client secret - assert self._config.client_secret, "No client_secret provided" + client_secret = self._config.client_secret() + assert client_secret, "No client_secret provided" self._client_auth = ClientAuth( - self._config.client_id, self._config.client_secret, auth_method + self._config.client_id, client_secret, auth_method ) async def _load_metadata(self) -> OpenIDProviderMetadata: @@ -145,6 +233,39 @@ class MSC3861DelegatedAuth(BaseAuth): # metadata.validate_introspection_endpoint() return metadata + async def issuer(self) -> str: + """ + Get the configured issuer + + This will use the issuer value set in the metadata, + falling back to the one set in the config if not set in the metadata + """ + metadata = await self._issuer_metadata.get() + return metadata.issuer or self._config.issuer + + async def account_management_url(self) -> Optional[str]: + """ + Get the configured account management URL + + This will discover the account management URL from the issuer if it's not set in the config + """ + if self._config.account_management_url is not None: + return self._config.account_management_url + + try: + metadata = await self._issuer_metadata.get() + return metadata.get("account_management_uri", None) + # We don't want to raise here if we can't load the metadata + except Exception: + logger.warning("Failed to load metadata:", exc_info=True) + return None + + async def auth_metadata(self) -> Dict[str, Any]: + """ + Returns the auth metadata dict + """ + return await self._issuer_metadata.get() + async def _introspection_endpoint(self) -> str: """ Returns the introspection endpoint of the issuer @@ -154,10 +275,12 @@ class MSC3861DelegatedAuth(BaseAuth): if self._config.introspection_endpoint is not None: return self._config.introspection_endpoint - metadata = await self._load_metadata() + metadata = await self._issuer_metadata.get() return metadata.get("introspection_endpoint") - async def _introspect_token(self, token: str) -> IntrospectionToken: + async def _introspect_token( + self, token: str, cache_context: ResponseCacheContext[str] + ) -> IntrospectionResult: """ Send a token to the introspection endpoint and returns the introspection response @@ -173,11 +296,16 @@ class MSC3861DelegatedAuth(BaseAuth): Returns: The introspection response """ + # By default, we shouldn't cache the result unless we know it's valid + cache_context.should_cache = False introspection_endpoint = await self._introspection_endpoint() raw_headers: Dict[str, str] = { "Content-Type": "application/x-www-form-urlencoded", "User-Agent": str(self._http_client.user_agent, "utf-8"), "Accept": "application/json", + # Tell MAS that we support reading the device ID as an explicit + # value, not encoded in the scope. This is supported by MAS 0.15+ + "X-MAS-Supports-Device-Id": "1", } args = {"token": token, "token_type_hint": "access_token"} @@ -227,7 +355,11 @@ class MSC3861DelegatedAuth(BaseAuth): "The introspection endpoint returned an invalid JSON response." ) - return IntrospectionToken(**resp) + # We had a valid response, so we can cache it + cache_context.should_cache = True + return IntrospectionResult( + IntrospectionToken(**resp), retrieved_at_ms=self._clock.time_msec() + ) async def is_server_admin(self, requester: Requester) -> bool: return "urn:synapse:admin:*" in requester.scope @@ -239,6 +371,55 @@ class MSC3861DelegatedAuth(BaseAuth): allow_expired: bool = False, allow_locked: bool = False, ) -> Requester: + """Get a registered user's ID. + + Args: + request: An HTTP request with an access_token query parameter. + allow_guest: If False, will raise an AuthError if the user making the + request is a guest. + allow_expired: If True, allow the request through even if the account + is expired, or session token lifetime has ended. Note that + /login will deliver access tokens regardless of expiration. + + Returns: + Resolves to the requester + Raises: + InvalidClientCredentialsError if no user by that token exists or the token + is invalid. + AuthError if access is denied for the user in the access token + """ + parent_span = active_span() + with start_active_span("get_user_by_req"): + requester = await self._wrapped_get_user_by_req( + request, allow_guest, allow_expired, allow_locked + ) + + if parent_span: + if requester.authenticated_entity in self._force_tracing_for_users: + # request tracing is enabled for this user, so we need to force it + # tracing on for the parent span (which will be the servlet span). + # + # It's too late for the get_user_by_req span to inherit the setting, + # so we also force it on for that. + force_tracing() + force_tracing(parent_span) + parent_span.set_tag( + "authenticated_entity", requester.authenticated_entity + ) + parent_span.set_tag("user_id", requester.user.to_string()) + if requester.device_id is not None: + parent_span.set_tag("device_id", requester.device_id) + if requester.app_service is not None: + parent_span.set_tag("appservice_id", requester.app_service.id) + return requester + + async def _wrapped_get_user_by_req( + self, + request: SynapseRequest, + allow_guest: bool = False, + allow_expired: bool = False, + allow_locked: bool = False, + ) -> Requester: access_token = self.get_access_token_from_request(request) requester = await self.get_appservice_user(request, access_token) @@ -248,7 +429,7 @@ class MSC3861DelegatedAuth(BaseAuth): requester = await self.get_user_by_access_token(access_token, allow_expired) # Do not record requests from MAS using the virtual `__oidc_admin` user. - if access_token != self._admin_token: + if access_token != self._admin_token(): await self._record_request(request, requester) if not allow_guest and requester.is_guest: @@ -289,7 +470,8 @@ class MSC3861DelegatedAuth(BaseAuth): token: str, allow_expired: bool = False, ) -> Requester: - if self._admin_token is not None and token == self._admin_token: + admin_token = self._admin_token() + if admin_token is not None and token == admin_token: # XXX: This is a temporary solution so that the admin API can be called by # the OIDC provider. This will be removed once we have OIDC client # credentials grant support in matrix-authentication-service. @@ -304,20 +486,22 @@ class MSC3861DelegatedAuth(BaseAuth): ) try: - introspection_result = await self._introspect_token(token) + introspection_result = await self._introspection_cache.wrap( + token, self._introspect_token, token, cache_context=True + ) except Exception: logger.exception("Failed to introspect token") raise SynapseError(503, "Unable to introspect the access token") - logger.info(f"Introspection result: {introspection_result!r}") + logger.debug("Introspection result: %r", introspection_result) # TODO: introspection verification should be more extensive, especially: # - verify the audience - if not introspection_result.get("active"): + if not introspection_result.is_active(self._clock.time_msec()): raise InvalidClientTokenError("Token is not active") # Let's look at the scope - scope: List[str] = scope_to_list(introspection_result.get("scope", "")) + scope: List[str] = introspection_result.get_scope_list() # Determine type of user based on presence of particular scopes has_user_scope = SCOPE_MATRIX_API in scope @@ -327,7 +511,7 @@ class MSC3861DelegatedAuth(BaseAuth): raise InvalidClientTokenError("No scope in token granting user rights") # Match via the sub claim - sub: Optional[str] = introspection_result.get("sub") + sub = introspection_result.get_sub() if sub is None: raise InvalidClientTokenError( "Invalid sub claim in the introspection result" @@ -340,29 +524,20 @@ class MSC3861DelegatedAuth(BaseAuth): # If we could not find a user via the external_id, it either does not exist, # or the external_id was never recorded - # TODO: claim mapping should be configurable - username: Optional[str] = introspection_result.get("username") - if username is None or not isinstance(username, str): + username = introspection_result.get_username() + if username is None: raise AuthError( 500, "Invalid username claim in the introspection result", ) user_id = UserID(username, self._hostname) - # First try to find a user from the username claim + # Try to find a user from the username claim user_info = await self.store.get_user_by_id(user_id=user_id.to_string()) if user_info is None: - # If the user does not exist, we should create it on the fly - # TODO: we could use SCIM to provision users ahead of time and listen - # for SCIM SET events if those ever become standard: - # https://datatracker.ietf.org/doc/html/draft-hunt-scim-notify-00 - - # TODO: claim mapping should be configurable - # If present, use the name claim as the displayname - name: Optional[str] = introspection_result.get("name") - - await self.store.register_user( - user_id=user_id.to_string(), create_profile_with_displayname=name + raise AuthError( + 500, + "User not found", ) # And record the sub as external_id @@ -372,42 +547,40 @@ class MSC3861DelegatedAuth(BaseAuth): else: user_id = UserID.from_string(user_id_str) - # Find device_ids in scope - # We only allow a single device_id in the scope, so we find them all in the - # scope list, and raise if there are more than one. The OIDC server should be - # the one enforcing valid scopes, so we raise a 500 if we find an invalid scope. - device_ids = [ - tok[len(SCOPE_MATRIX_DEVICE_PREFIX) :] - for tok in scope - if tok.startswith(SCOPE_MATRIX_DEVICE_PREFIX) - ] - - if len(device_ids) > 1: - raise AuthError( - 500, - "Multiple device IDs in scope", - ) + # MAS 0.15+ will give us the device ID as an explicit value for compatibility sessions + # If present, we get it from here, if not we get it in thee scope + device_id = introspection_result.get_device_id() + if device_id is None: + # Find device_ids in scope + # We only allow a single device_id in the scope, so we find them all in the + # scope list, and raise if there are more than one. The OIDC server should be + # the one enforcing valid scopes, so we raise a 500 if we find an invalid scope. + device_ids = [ + tok[len(SCOPE_MATRIX_DEVICE_PREFIX) :] + for tok in scope + if tok.startswith(SCOPE_MATRIX_DEVICE_PREFIX) + ] + + if len(device_ids) > 1: + raise AuthError( + 500, + "Multiple device IDs in scope", + ) + + device_id = device_ids[0] if device_ids else None - device_id = device_ids[0] if device_ids else None if device_id is not None: # Sanity check the device_id if len(device_id) > 255 or len(device_id) < 1: raise AuthError( 500, - "Invalid device ID in scope", + "Invalid device ID in introspection result", ) - # Create the device on the fly if it does not exist - try: - await self.store.get_device( - user_id=user_id.to_string(), device_id=device_id - ) - except StoreError: - await self.store.store_device( - user_id=user_id.to_string(), - device_id=device_id, - initial_device_display_name="OIDC-native client", - ) + # Make sure the device exists + await self.store.get_device( + user_id=user_id.to_string(), device_id=device_id + ) # TODO: there is a few things missing in the requester here, which still need # to be figured out, like: diff --git a/synapse/api/auth_blocking.py b/synapse/api/auth_blocking.py
index 303c9ba03e..a56ffd58e4 100644 --- a/synapse/api/auth_blocking.py +++ b/synapse/api/auth_blocking.py
@@ -24,7 +24,6 @@ from typing import TYPE_CHECKING, Optional from synapse.api.constants import LimitBlockingTypes, UserTypes from synapse.api.errors import Codes, ResourceLimitError -from synapse.config.server import is_threepid_reserved from synapse.types import Requester if TYPE_CHECKING: @@ -43,16 +42,13 @@ class AuthBlocking: self._admin_contact = hs.config.server.admin_contact self._max_mau_value = hs.config.server.max_mau_value self._limit_usage_by_mau = hs.config.server.limit_usage_by_mau - self._mau_limits_reserved_threepids = ( - hs.config.server.mau_limits_reserved_threepids - ) self._is_mine_server_name = hs.is_mine_server_name self._track_appservice_user_ips = hs.config.appservice.track_appservice_user_ips async def check_auth_blocking( self, user_id: Optional[str] = None, - threepid: Optional[dict] = None, + threepid: Optional[str] = None, # Not used in this method, but kept for compatibility user_type: Optional[str] = None, requester: Optional[Requester] = None, ) -> None: @@ -63,12 +59,6 @@ class AuthBlocking: user_id: If present, checks for presence against existing MAU cohort - threepid: If present, checks for presence against configured - reserved threepid. Used in cases where the user is trying register - with a MAU blocked server, normally they would be rejected but their - threepid is on the reserved list. user_id and - threepid should never be set at the same time. - user_type: If present, is used to decide whether to check against certain blocking reasons like MAU. @@ -111,9 +101,8 @@ class AuthBlocking: admin_contact=self._admin_contact, limit_type=LimitBlockingTypes.HS_DISABLED, ) - if self._limit_usage_by_mau is True: - assert not (user_id and threepid) + if self._limit_usage_by_mau is True: # If the user is already part of the MAU cohort or a trial user if user_id: timestamp = await self.store.user_last_seen_monthly_active(user_id) @@ -123,11 +112,6 @@ class AuthBlocking: is_trial = await self.store.is_trial_user(user_id) if is_trial: return - elif threepid: - # If the user does not exist yet, but is signing up with a - # reserved threepid then pass auth check - if is_threepid_reserved(self._mau_limits_reserved_threepids, threepid): - return elif user_type == UserTypes.SUPPORT: # If the user does not exist yet and is of type "support", # allow registration. Support users are excluded from MAU checks. diff --git a/synapse/api/constants.py b/synapse/api/constants.py
index 7dcb1e01fd..cd2ebf2cc3 100644 --- a/synapse/api/constants.py +++ b/synapse/api/constants.py
@@ -29,8 +29,13 @@ from typing import Final # the max size of a (canonical-json-encoded) event MAX_PDU_SIZE = 65536 -# the "depth" field on events is limited to 2**63 - 1 -MAX_DEPTH = 2**63 - 1 +# Max/min size of ints in canonical JSON +CANONICALJSON_MAX_INT = (2**53) - 1 +CANONICALJSON_MIN_INT = -CANONICALJSON_MAX_INT + +# the "depth" field on events is limited to the same as what +# canonicaljson accepts +MAX_DEPTH = CANONICALJSON_MAX_INT # the maximum length for a room alias is 255 characters MAX_ALIAS_LENGTH = 255 @@ -81,8 +86,6 @@ class RestrictedJoinRuleTypes: class LoginType: PASSWORD: Final = "m.login.password" - EMAIL_IDENTITY: Final = "m.login.email.identity" - MSISDN: Final = "m.login.msisdn" RECAPTCHA: Final = "m.login.recaptcha" TERMS: Final = "m.login.terms" SSO: Final = "m.login.sso" @@ -180,12 +183,18 @@ ServerNoticeLimitReached: Final = "m.server_notice.usage_limit_reached" class UserTypes: """Allows for user type specific behaviour. With the benefit of hindsight - 'admin' and 'guest' users should also be UserTypes. Normal users are type None + 'admin' and 'guest' users should also be UserTypes. Extra user types can be + added in the configuration. Normal users are type None or one of the extra + user types (if configured). """ SUPPORT: Final = "support" BOT: Final = "bot" - ALL_USER_TYPES: Final = (SUPPORT, BOT) + ALL_BUILTIN_USER_TYPES: Final = (SUPPORT, BOT) + """ + The user types that are built-in to Synapse. Extra user types can be + added in the configuration. + """ class RelationTypes: @@ -230,6 +239,10 @@ class EventContentFields: ROOM_NAME: Final = "name" + MEMBERSHIP: Final = "membership" + MEMBERSHIP_DISPLAYNAME: Final = "displayname" + MEMBERSHIP_AVATAR_URL: Final = "avatar_url" + # Used in m.room.guest_access events. GUEST_ACCESS: Final = "guest_access" @@ -245,6 +258,8 @@ class EventContentFields: # `m.room.encryption`` algorithm field ENCRYPTION_ALGORITHM: Final = "algorithm" + TOMBSTONE_SUCCESSOR_ROOM: Final = "replacement_room" + class EventUnsignedContentFields: """Fields found inside the 'unsigned' data on events""" @@ -269,6 +284,10 @@ class AccountDataTypes: IGNORED_USER_LIST: Final = "m.ignored_user_list" TAG: Final = "m.tag" PUSH_RULES: Final = "m.push_rules" + # MSC4155: Invite filtering + MSC4155_INVITE_PERMISSION_CONFIG: Final = ( + "org.matrix.msc4155.invite_permission_config" + ) class HistoryVisibility: @@ -314,3 +333,8 @@ class ApprovalNoticeMedium: class Direction(enum.Enum): BACKWARDS = "b" FORWARDS = "f" + + +class ProfileFields: + DISPLAYNAME: Final = "displayname" + AVATAR_URL: Final = "avatar_url" diff --git a/synapse/api/errors.py b/synapse/api/errors.py
index e6efa7a424..a095fb195b 100644 --- a/synapse/api/errors.py +++ b/synapse/api/errors.py
@@ -65,11 +65,8 @@ class Codes(str, Enum): INVALID_PARAM = "M_INVALID_PARAM" TOO_LARGE = "M_TOO_LARGE" EXCLUSIVE = "M_EXCLUSIVE" - THREEPID_AUTH_FAILED = "M_THREEPID_AUTH_FAILED" - THREEPID_IN_USE = "M_THREEPID_IN_USE" - THREEPID_NOT_FOUND = "M_THREEPID_NOT_FOUND" - THREEPID_DENIED = "M_THREEPID_DENIED" INVALID_USERNAME = "M_INVALID_USERNAME" + THREEPID_MEDIUM_NOT_SUPPORTED = "M_THREEPID_MEDIUM_NOT_SUPPORTED" # Kept around for throwing when 3PID is attempted SERVER_NOT_TRUSTED = "M_SERVER_NOT_TRUSTED" CONSENT_NOT_GIVEN = "M_CONSENT_NOT_GIVEN" CANNOT_LEAVE_SERVER_NOTICE_ROOM = "M_CANNOT_LEAVE_SERVER_NOTICE_ROOM" @@ -87,8 +84,7 @@ class Codes(str, Enum): WEAK_PASSWORD = "M_WEAK_PASSWORD" INVALID_SIGNATURE = "M_INVALID_SIGNATURE" USER_DEACTIVATED = "M_USER_DEACTIVATED" - # USER_LOCKED = "M_USER_LOCKED" - USER_LOCKED = "ORG_MATRIX_MSC3939_USER_LOCKED" + USER_LOCKED = "M_USER_LOCKED" NOT_YET_UPLOADED = "M_NOT_YET_UPLOADED" CANNOT_OVERWRITE_MEDIA = "M_CANNOT_OVERWRITE_MEDIA" @@ -101,8 +97,9 @@ class Codes(str, Enum): # The account has been suspended on the server. # By opposition to `USER_DEACTIVATED`, this is a reversible measure # that can possibly be appealed and reverted. - # Part of MSC3823. - USER_ACCOUNT_SUSPENDED = "ORG.MATRIX.MSC3823.USER_ACCOUNT_SUSPENDED" + # Introduced by MSC3823 + # https://github.com/matrix-org/matrix-spec-proposals/pull/3823 + USER_ACCOUNT_SUSPENDED = "M_USER_SUSPENDED" BAD_ALIAS = "M_BAD_ALIAS" # For restricted join rules. @@ -132,6 +129,13 @@ class Codes(str, Enum): # connection. UNKNOWN_POS = "M_UNKNOWN_POS" + # Part of MSC4133 + PROFILE_TOO_LARGE = "M_PROFILE_TOO_LARGE" + KEY_TOO_LARGE = "M_KEY_TOO_LARGE" + + # Part of MSC4155 + INVITE_BLOCKED = "ORG.MATRIX.MSC4155.M_INVITE_BLOCKED" + class CodeMessageException(RuntimeError): """An exception with integer code, a message string attributes and optional headers. @@ -573,13 +577,6 @@ class UnsupportedRoomVersionError(SynapseError): ) -class ThreepidValidationError(SynapseError): - """An error raised when there was a problem authorising an event.""" - - def __init__(self, msg: str, errcode: str = Codes.FORBIDDEN): - super().__init__(400, msg, errcode) - - class IncompatibleRoomVersionError(SynapseError): """A server is trying to join a room whose version it does not support. diff --git a/synapse/api/ratelimiting.py b/synapse/api/ratelimiting.py
index b80630c5d3..4f3bf8f770 100644 --- a/synapse/api/ratelimiting.py +++ b/synapse/api/ratelimiting.py
@@ -20,8 +20,7 @@ # # -from collections import OrderedDict -from typing import Hashable, Optional, Tuple +from typing import TYPE_CHECKING, Dict, Hashable, Optional, Tuple from synapse.api.errors import LimitExceededError from synapse.config.ratelimiting import RatelimitSettings @@ -29,6 +28,12 @@ from synapse.storage.databases.main import DataStore from synapse.types import Requester from synapse.util import Clock +if TYPE_CHECKING: + # To avoid circular imports: + from synapse.module_api.callbacks.ratelimit_callbacks import ( + RatelimitModuleApiCallbacks, + ) + class Ratelimiter: """ @@ -73,19 +78,23 @@ class Ratelimiter: store: DataStore, clock: Clock, cfg: RatelimitSettings, + ratelimit_callbacks: Optional["RatelimitModuleApiCallbacks"] = None, ): self.clock = clock self.rate_hz = cfg.per_second self.burst_count = cfg.burst_count self.store = store self._limiter_name = cfg.key + self._ratelimit_callbacks = ratelimit_callbacks - # An ordered dictionary representing the token buckets tracked by this rate + # A dictionary representing the token buckets tracked by this rate # limiter. Each entry maps a key of arbitrary type to a tuple representing: # * The number of tokens currently in the bucket, # * The time point when the bucket was last completely empty, and # * The rate_hz (leak rate) of this particular bucket. - self.actions: OrderedDict[Hashable, Tuple[float, float, float]] = OrderedDict() + self.actions: Dict[Hashable, Tuple[float, float, float]] = {} + + self.clock.looping_call(self._prune_message_counts, 60 * 1000) def _get_key( self, requester: Optional[Requester], key: Optional[Hashable] @@ -164,14 +173,25 @@ class Ratelimiter: if override and not override.messages_per_second: return True, -1.0 + if requester and self._ratelimit_callbacks: + # Check if the user has a custom rate limit for this specific limiter + # as returned by the module API. + module_override = ( + await self._ratelimit_callbacks.get_ratelimit_override_for_user( + requester.user.to_string(), + self._limiter_name, + ) + ) + + if module_override: + rate_hz = module_override.per_second + burst_count = module_override.burst_count + # Override default values if set time_now_s = _time_now_s if _time_now_s is not None else self.clock.time() rate_hz = rate_hz if rate_hz is not None else self.rate_hz burst_count = burst_count if burst_count is not None else self.burst_count - # Remove any expired entries - self._prune_message_counts(time_now_s) - # Check if there is an existing count entry for this key action_count, time_start, _ = self._get_action_counts(key, time_now_s) @@ -246,13 +266,12 @@ class Ratelimiter: action_count, time_start, rate_hz = self._get_action_counts(key, time_now_s) self.actions[key] = (action_count + n_actions, time_start, rate_hz) - def _prune_message_counts(self, time_now_s: float) -> None: + def _prune_message_counts(self) -> None: """Remove message count entries that have not exceeded their defined rate_hz limit - - Args: - time_now_s: The current time """ + time_now_s = self.clock.time() + # We create a copy of the key list here as the dictionary is modified during # the loop for key in list(self.actions.keys()): @@ -275,6 +294,7 @@ class Ratelimiter: update: bool = True, n_actions: int = 1, _time_now_s: Optional[float] = None, + pause: Optional[float] = 0.5, ) -> None: """Checks if an action can be performed. If not, raises a LimitExceededError @@ -298,6 +318,8 @@ class Ratelimiter: at all. _time_now_s: The current time. Optional, defaults to the current time according to self.clock. Only used by tests. + pause: Time in seconds to pause when an action is being limited. Defaults to 0.5 + to stop clients from "tight-looping" on retrying their request. Raises: LimitExceededError: If an action could not be performed, along with the time in @@ -316,9 +338,8 @@ class Ratelimiter: ) if not allowed: - # We pause for a bit here to stop clients from "tight-looping" on - # retrying their request. - await self.clock.sleep(0.5) + if pause: + await self.clock.sleep(pause) raise LimitExceededError( limiter_name=self._limiter_name, diff --git a/synapse/api/room_versions.py b/synapse/api/room_versions.py
index fbc1d58ecb..697acc25ba 100644 --- a/synapse/api/room_versions.py +++ b/synapse/api/room_versions.py
@@ -107,6 +107,8 @@ class RoomVersion: # support the flag. Unknown flags are ignored by the evaluator, making conditions # fail if used. msc3931_push_features: Tuple[str, ...] # values from PushRuleRoomFlag + # MSC3757: Restricting who can overwrite a state event + msc3757_enabled: bool class RoomVersions: @@ -128,6 +130,7 @@ class RoomVersions: knock_restricted_join_rule=False, enforce_int_power_levels=False, msc3931_push_features=(), + msc3757_enabled=False, ) V2 = RoomVersion( "2", @@ -147,6 +150,7 @@ class RoomVersions: knock_restricted_join_rule=False, enforce_int_power_levels=False, msc3931_push_features=(), + msc3757_enabled=False, ) V3 = RoomVersion( "3", @@ -166,6 +170,7 @@ class RoomVersions: knock_restricted_join_rule=False, enforce_int_power_levels=False, msc3931_push_features=(), + msc3757_enabled=False, ) V4 = RoomVersion( "4", @@ -185,6 +190,7 @@ class RoomVersions: knock_restricted_join_rule=False, enforce_int_power_levels=False, msc3931_push_features=(), + msc3757_enabled=False, ) V5 = RoomVersion( "5", @@ -204,6 +210,7 @@ class RoomVersions: knock_restricted_join_rule=False, enforce_int_power_levels=False, msc3931_push_features=(), + msc3757_enabled=False, ) V6 = RoomVersion( "6", @@ -223,6 +230,7 @@ class RoomVersions: knock_restricted_join_rule=False, enforce_int_power_levels=False, msc3931_push_features=(), + msc3757_enabled=False, ) V7 = RoomVersion( "7", @@ -242,6 +250,7 @@ class RoomVersions: knock_restricted_join_rule=False, enforce_int_power_levels=False, msc3931_push_features=(), + msc3757_enabled=False, ) V8 = RoomVersion( "8", @@ -261,6 +270,7 @@ class RoomVersions: knock_restricted_join_rule=False, enforce_int_power_levels=False, msc3931_push_features=(), + msc3757_enabled=False, ) V9 = RoomVersion( "9", @@ -280,6 +290,7 @@ class RoomVersions: knock_restricted_join_rule=False, enforce_int_power_levels=False, msc3931_push_features=(), + msc3757_enabled=False, ) V10 = RoomVersion( "10", @@ -299,6 +310,7 @@ class RoomVersions: knock_restricted_join_rule=True, enforce_int_power_levels=True, msc3931_push_features=(), + msc3757_enabled=False, ) MSC1767v10 = RoomVersion( # MSC1767 (Extensible Events) based on room version "10" @@ -319,6 +331,28 @@ class RoomVersions: knock_restricted_join_rule=True, enforce_int_power_levels=True, msc3931_push_features=(PushRuleRoomFlag.EXTENSIBLE_EVENTS,), + msc3757_enabled=False, + ) + MSC3757v10 = RoomVersion( + # MSC3757 (Restricting who can overwrite a state event) based on room version "10" + "org.matrix.msc3757.10", + RoomDisposition.UNSTABLE, + EventFormatVersions.ROOM_V4_PLUS, + StateResolutionVersions.V2, + enforce_key_validity=True, + special_case_aliases_auth=False, + strict_canonicaljson=True, + limit_notifications_power_levels=True, + implicit_room_creator=False, + updated_redaction_rules=False, + restricted_join_rule=True, + restricted_join_rule_fix=True, + knock_join_rule=True, + msc3389_relation_redactions=False, + knock_restricted_join_rule=True, + enforce_int_power_levels=True, + msc3931_push_features=(), + msc3757_enabled=True, ) V11 = RoomVersion( "11", @@ -338,6 +372,28 @@ class RoomVersions: knock_restricted_join_rule=True, enforce_int_power_levels=True, msc3931_push_features=(), + msc3757_enabled=False, + ) + MSC3757v11 = RoomVersion( + # MSC3757 (Restricting who can overwrite a state event) based on room version "11" + "org.matrix.msc3757.11", + RoomDisposition.UNSTABLE, + EventFormatVersions.ROOM_V4_PLUS, + StateResolutionVersions.V2, + enforce_key_validity=True, + special_case_aliases_auth=False, + strict_canonicaljson=True, + limit_notifications_power_levels=True, + implicit_room_creator=True, # Used by MSC3820 + updated_redaction_rules=True, # Used by MSC3820 + restricted_join_rule=True, + restricted_join_rule_fix=True, + knock_join_rule=True, + msc3389_relation_redactions=False, + knock_restricted_join_rule=True, + enforce_int_power_levels=True, + msc3931_push_features=(), + msc3757_enabled=True, ) @@ -355,42 +411,7 @@ KNOWN_ROOM_VERSIONS: Dict[str, RoomVersion] = { RoomVersions.V9, RoomVersions.V10, RoomVersions.V11, - ) -} - - -@attr.s(slots=True, frozen=True, auto_attribs=True) -class RoomVersionCapability: - """An object which describes the unique attributes of a room version.""" - - identifier: str # the identifier for this capability - preferred_version: Optional[RoomVersion] - support_check_lambda: Callable[[RoomVersion], bool] - - -MSC3244_CAPABILITIES = { - cap.identifier: { - "preferred": ( - cap.preferred_version.identifier - if cap.preferred_version is not None - else None - ), - "support": [ - v.identifier - for v in KNOWN_ROOM_VERSIONS.values() - if cap.support_check_lambda(v) - ], - } - for cap in ( - RoomVersionCapability( - "knock", - RoomVersions.V7, - lambda room_version: room_version.knock_join_rule, - ), - RoomVersionCapability( - "restricted", - RoomVersions.V9, - lambda room_version: room_version.restricted_join_rule, - ), + RoomVersions.MSC3757v10, + RoomVersions.MSC3757v11, ) } diff --git a/synapse/api/urls.py b/synapse/api/urls.py
index d077a2c613..655b5edd7a 100644 --- a/synapse/api/urls.py +++ b/synapse/api/urls.py
@@ -19,10 +19,12 @@ # # -"""Contains the URL paths to prefix various aspects of the server with. """ +"""Contains the URL paths to prefix various aspects of the server with.""" + import hmac from hashlib import sha256 -from urllib.parse import urlencode +from typing import Optional +from urllib.parse import urlencode, urljoin from synapse.config import ConfigError from synapse.config.homeserver import HomeServerConfig @@ -65,3 +67,42 @@ class ConsentURIBuilder: urlencode({"u": user_id, "h": mac}), ) return consent_uri + + +class LoginSSORedirectURIBuilder: + def __init__(self, hs_config: HomeServerConfig): + self._public_baseurl = hs_config.server.public_baseurl + + def build_login_sso_redirect_uri( + self, *, idp_id: Optional[str], client_redirect_url: str + ) -> str: + """Build a `/login/sso/redirect` URI for the given identity provider. + + Builds `/_matrix/client/v3/login/sso/redirect/{idpId}?redirectUrl=xxx` when `idp_id` is specified. + Otherwise, builds `/_matrix/client/v3/login/sso/redirect?redirectUrl=xxx` when `idp_id` is `None`. + + Args: + idp_id: Optional ID of the identity provider + client_redirect_url: URL to redirect the user to after login + + Returns + The URI to follow when choosing a specific identity provider. + """ + base_url = urljoin( + self._public_baseurl, + f"{CLIENT_API_PREFIX}/v3/login/sso/redirect", + ) + + serialized_query_parameters = urlencode({"redirectUrl": client_redirect_url}) + + if idp_id: + resultant_url = urljoin( + # We have to add a trailing slash to the base URL to ensure that the + # last path segment is not stripped away when joining with another path. + f"{base_url}/", + f"{idp_id}?{serialized_query_parameters}", + ) + else: + resultant_url = f"{base_url}?{serialized_query_parameters}" + + return resultant_url