diff options
Diffstat (limited to 'synapse/handlers')
-rw-r--r-- | synapse/handlers/auth.py | 8 | ||||
-rw-r--r-- | synapse/handlers/federation.py | 32 | ||||
-rw-r--r-- | synapse/handlers/message.py | 2 | ||||
-rw-r--r-- | synapse/handlers/oidc_handler.py | 1036 | ||||
-rw-r--r-- | synapse/handlers/presence.py | 6 | ||||
-rw-r--r-- | synapse/handlers/room.py | 42 | ||||
-rw-r--r-- | synapse/handlers/room_member.py | 360 | ||||
-rw-r--r-- | synapse/handlers/room_member_worker.py | 39 | ||||
-rw-r--r-- | synapse/handlers/saml_handler.py | 28 | ||||
-rw-r--r-- | synapse/handlers/search.py | 44 |
10 files changed, 1321 insertions, 276 deletions
diff --git a/synapse/handlers/auth.py b/synapse/handlers/auth.py index 5c20e29171..75b39e878c 100644 --- a/synapse/handlers/auth.py +++ b/synapse/handlers/auth.py @@ -80,7 +80,9 @@ class AuthHandler(BaseHandler): self.hs = hs # FIXME better possibility to access registrationHandler later? self.macaroon_gen = hs.get_macaroon_generator() self._password_enabled = hs.config.password_enabled - self._sso_enabled = hs.config.saml2_enabled or hs.config.cas_enabled + self._sso_enabled = ( + hs.config.cas_enabled or hs.config.saml2_enabled or hs.config.oidc_enabled + ) # we keep this as a list despite the O(N^2) implication so that we can # keep PASSWORD first and avoid confusing clients which pick the first @@ -126,13 +128,13 @@ class AuthHandler(BaseHandler): # It notifies the user they are about to give access to their matrix account # to the client. self._sso_redirect_confirm_template = load_jinja2_templates( - hs.config.sso_redirect_confirm_template_dir, ["sso_redirect_confirm.html"], + hs.config.sso_template_dir, ["sso_redirect_confirm.html"], )[0] # The following template is shown during user interactive authentication # in the fallback auth scenario. It notifies the user that they are # authenticating for an operation to occur on their account. self._sso_auth_confirm_template = load_jinja2_templates( - hs.config.sso_redirect_confirm_template_dir, ["sso_auth_confirm.html"], + hs.config.sso_template_dir, ["sso_auth_confirm.html"], )[0] # The following template is shown after a successful user interactive # authentication session. It tells the user they can close the window. diff --git a/synapse/handlers/federation.py b/synapse/handlers/federation.py index 4e5c645525..81d859f807 100644 --- a/synapse/handlers/federation.py +++ b/synapse/handlers/federation.py @@ -2681,8 +2681,7 @@ class FederationHandler(BaseHandler): member_handler = self.hs.get_room_member_handler() await member_handler.send_membership_event(None, event, context) - @defer.inlineCallbacks - def add_display_name_to_third_party_invite( + async def add_display_name_to_third_party_invite( self, room_version, event_dict, event, context ): key = ( @@ -2690,10 +2689,10 @@ class FederationHandler(BaseHandler): event.content["third_party_invite"]["signed"]["token"], ) original_invite = None - prev_state_ids = yield context.get_prev_state_ids() + prev_state_ids = await context.get_prev_state_ids() original_invite_id = prev_state_ids.get(key) if original_invite_id: - original_invite = yield self.store.get_event( + original_invite = await self.store.get_event( original_invite_id, allow_none=True ) if original_invite: @@ -2714,14 +2713,13 @@ class FederationHandler(BaseHandler): builder = self.event_builder_factory.new(room_version, event_dict) EventValidator().validate_builder(builder) - event, context = yield self.event_creation_handler.create_new_client_event( + event, context = await self.event_creation_handler.create_new_client_event( builder=builder ) EventValidator().validate_new(event, self.config) return (event, context) - @defer.inlineCallbacks - def _check_signature(self, event, context): + async def _check_signature(self, event, context): """ Checks that the signature in the event is consistent with its invite. @@ -2738,12 +2736,12 @@ class FederationHandler(BaseHandler): signed = event.content["third_party_invite"]["signed"] token = signed["token"] - prev_state_ids = yield context.get_prev_state_ids() + prev_state_ids = await context.get_prev_state_ids() invite_event_id = prev_state_ids.get((EventTypes.ThirdPartyInvite, token)) invite_event = None if invite_event_id: - invite_event = yield self.store.get_event(invite_event_id, allow_none=True) + invite_event = await self.store.get_event(invite_event_id, allow_none=True) if not invite_event: raise AuthError(403, "Could not find invite") @@ -2792,7 +2790,7 @@ class FederationHandler(BaseHandler): raise try: if "key_validity_url" in public_key_object: - yield self._check_key_revocation( + await self._check_key_revocation( public_key, public_key_object["key_validity_url"] ) except Exception: @@ -2806,8 +2804,7 @@ class FederationHandler(BaseHandler): last_exception = e raise last_exception - @defer.inlineCallbacks - def _check_key_revocation(self, public_key, url): + async def _check_key_revocation(self, public_key, url): """ Checks whether public_key has been revoked. @@ -2821,7 +2818,7 @@ class FederationHandler(BaseHandler): for revocation. """ try: - response = yield self.http_client.get_json(url, {"public_key": public_key}) + response = await self.http_client.get_json(url, {"public_key": public_key}) except Exception: raise SynapseError(502, "Third party certificate could not be checked") if "valid" not in response or not response["valid"]: @@ -2916,8 +2913,7 @@ class FederationHandler(BaseHandler): else: user_joined_room(self.distributor, user, room_id) - @defer.inlineCallbacks - def get_room_complexity(self, remote_room_hosts, room_id): + async def get_room_complexity(self, remote_room_hosts, room_id): """ Fetch the complexity of a remote room over federation. @@ -2931,12 +2927,12 @@ class FederationHandler(BaseHandler): """ for host in remote_room_hosts: - res = yield self.federation_client.get_room_complexity(host, room_id) + res = await self.federation_client.get_room_complexity(host, room_id) # We got a result, return it. if res: - defer.returnValue(res) + return res # We fell off the bottom, couldn't get the complexity from anyone. Oh # well. - defer.returnValue(None) + return None diff --git a/synapse/handlers/message.py b/synapse/handlers/message.py index a622a600b4..0242521cc6 100644 --- a/synapse/handlers/message.py +++ b/synapse/handlers/message.py @@ -72,7 +72,6 @@ class MessageHandler(object): self.state_store = self.storage.state self._event_serializer = hs.get_event_client_serializer() self._ephemeral_events_enabled = hs.config.enable_ephemeral_messages - self._is_worker_app = bool(hs.config.worker_app) # The scheduled call to self._expire_event. None if no call is currently # scheduled. @@ -260,7 +259,6 @@ class MessageHandler(object): Args: event (EventBase): The event to schedule the expiry of. """ - assert not self._is_worker_app expiry_ts = event.content.get(EventContentFields.SELF_DESTRUCT_AFTER) if not isinstance(expiry_ts, int) or event.is_state(): diff --git a/synapse/handlers/oidc_handler.py b/synapse/handlers/oidc_handler.py new file mode 100644 index 0000000000..4ba8c7fda5 --- /dev/null +++ b/synapse/handlers/oidc_handler.py @@ -0,0 +1,1036 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Quentin Gliech +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +import json +import logging +from typing import Dict, Generic, List, Optional, Tuple, TypeVar +from urllib.parse import urlencode + +import attr +import pymacaroons +from authlib.common.security import generate_token +from authlib.jose import JsonWebToken +from authlib.oauth2.auth import ClientAuth +from authlib.oauth2.rfc6749.parameters import prepare_grant_uri +from authlib.oidc.core import CodeIDToken, ImplicitIDToken, UserInfo +from authlib.oidc.discovery import OpenIDProviderMetadata, get_well_known_url +from jinja2 import Environment, Template +from pymacaroons.exceptions import ( + MacaroonDeserializationException, + MacaroonInvalidSignatureException, +) +from typing_extensions import TypedDict + +from twisted.web.client import readBody + +from synapse.config import ConfigError +from synapse.http.server import finish_request +from synapse.http.site import SynapseRequest +from synapse.push.mailer import load_jinja2_templates +from synapse.server import HomeServer +from synapse.types import UserID, map_username_to_mxid_localpart + +logger = logging.getLogger(__name__) + +SESSION_COOKIE_NAME = b"oidc_session" + +#: A token exchanged from the token endpoint, as per RFC6749 sec 5.1. and +#: OpenID.Core sec 3.1.3.3. +Token = TypedDict( + "Token", + { + "access_token": str, + "token_type": str, + "id_token": Optional[str], + "refresh_token": Optional[str], + "expires_in": int, + "scope": Optional[str], + }, +) + +#: A JWK, as per RFC7517 sec 4. The type could be more precise than that, but +#: there is no real point of doing this in our case. +JWK = Dict[str, str] + +#: A JWK Set, as per RFC7517 sec 5. +JWKS = TypedDict("JWKS", {"keys": List[JWK]}) + + +class OidcError(Exception): + """Used to catch errors when calling the token_endpoint + """ + + def __init__(self, error, error_description=None): + self.error = error + self.error_description = error_description + + def __str__(self): + if self.error_description: + return "{}: {}".format(self.error, self.error_description) + return self.error + + +class MappingException(Exception): + """Used to catch errors when mapping the UserInfo object + """ + + +class OidcHandler: + """Handles requests related to the OpenID Connect login flow. + """ + + def __init__(self, hs: HomeServer): + self._callback_url = hs.config.oidc_callback_url # type: str + self._scopes = hs.config.oidc_scopes # type: List[str] + self._client_auth = ClientAuth( + hs.config.oidc_client_id, + hs.config.oidc_client_secret, + hs.config.oidc_client_auth_method, + ) # type: ClientAuth + self._client_auth_method = hs.config.oidc_client_auth_method # type: str + self._subject_claim = hs.config.oidc_subject_claim + self._provider_metadata = OpenIDProviderMetadata( + issuer=hs.config.oidc_issuer, + authorization_endpoint=hs.config.oidc_authorization_endpoint, + token_endpoint=hs.config.oidc_token_endpoint, + userinfo_endpoint=hs.config.oidc_userinfo_endpoint, + jwks_uri=hs.config.oidc_jwks_uri, + ) # type: OpenIDProviderMetadata + self._provider_needs_discovery = hs.config.oidc_discover # type: bool + self._user_mapping_provider = hs.config.oidc_user_mapping_provider_class( + hs.config.oidc_user_mapping_provider_config + ) # type: OidcMappingProvider + self._skip_verification = hs.config.oidc_skip_verification # type: bool + + self._http_client = hs.get_proxied_http_client() + self._auth_handler = hs.get_auth_handler() + self._registration_handler = hs.get_registration_handler() + self._datastore = hs.get_datastore() + self._clock = hs.get_clock() + self._hostname = hs.hostname # type: str + self._server_name = hs.config.server_name # type: str + self._macaroon_secret_key = hs.config.macaroon_secret_key + self._error_template = load_jinja2_templates( + hs.config.sso_template_dir, ["sso_error.html"] + )[0] + + # identifier for the external_ids table + self._auth_provider_id = "oidc" + + def _render_error( + self, request, error: str, error_description: Optional[str] = None + ) -> None: + """Renders the error template and respond with it. + + This is used to show errors to the user. The template of this page can + be found under ``synapse/res/templates/sso_error.html``. + + Args: + request: The incoming request from the browser. + We'll respond with an HTML page describing the error. + error: A technical identifier for this error. Those include + well-known OAuth2/OIDC error types like invalid_request or + access_denied. + error_description: A human-readable description of the error. + """ + html_bytes = self._error_template.render( + error=error, error_description=error_description + ).encode("utf-8") + + request.setResponseCode(400) + request.setHeader(b"Content-Type", b"text/html; charset=utf-8") + request.setHeader(b"Content-Length", b"%i" % len(html_bytes)) + request.write(html_bytes) + finish_request(request) + + def _validate_metadata(self): + """Verifies the provider metadata. + + This checks the validity of the currently loaded provider. Not + everything is checked, only: + + - ``issuer`` + - ``authorization_endpoint`` + - ``token_endpoint`` + - ``response_types_supported`` (checks if "code" is in it) + - ``jwks_uri`` + + Raises: + ValueError: if something in the provider is not valid + """ + # Skip verification to allow non-compliant providers (e.g. issuers not running on a secure origin) + if self._skip_verification is True: + return + + m = self._provider_metadata + m.validate_issuer() + m.validate_authorization_endpoint() + m.validate_token_endpoint() + + if m.get("token_endpoint_auth_methods_supported") is not None: + m.validate_token_endpoint_auth_methods_supported() + if ( + self._client_auth_method + not in m["token_endpoint_auth_methods_supported"] + ): + raise ValueError( + '"{auth_method}" not in "token_endpoint_auth_methods_supported" ({supported!r})'.format( + auth_method=self._client_auth_method, + supported=m["token_endpoint_auth_methods_supported"], + ) + ) + + if m.get("response_types_supported") is not None: + m.validate_response_types_supported() + + if "code" not in m["response_types_supported"]: + raise ValueError( + '"code" not in "response_types_supported" (%r)' + % (m["response_types_supported"],) + ) + + # If the openid scope was not requested, we need a userinfo endpoint to fetch user infos + if self._uses_userinfo: + if m.get("userinfo_endpoint") is None: + raise ValueError( + 'provider has no "userinfo_endpoint", even though it is required because the "openid" scope is not requested' + ) + else: + # If we're not using userinfo, we need a valid jwks to validate the ID token + if m.get("jwks") is None: + if m.get("jwks_uri") is not None: + m.validate_jwks_uri() + else: + raise ValueError('"jwks_uri" must be set') + + @property + def _uses_userinfo(self) -> bool: + """Returns True if the ``userinfo_endpoint`` should be used. + + This is based on the requested scopes: if the scopes include + ``openid``, the provider should give use an ID token containing the + user informations. If not, we should fetch them using the + ``access_token`` with the ``userinfo_endpoint``. + """ + + # Maybe that should be user-configurable and not inferred? + return "openid" not in self._scopes + + async def load_metadata(self) -> OpenIDProviderMetadata: + """Load and validate the provider metadata. + + The values metadatas are discovered if ``oidc_config.discovery`` is + ``True`` and then cached. + + Raises: + ValueError: if something in the provider is not valid + + Returns: + The provider's metadata. + """ + # If we are using the OpenID Discovery documents, it needs to be loaded once + # FIXME: should there be a lock here? + if self._provider_needs_discovery: + url = get_well_known_url(self._provider_metadata["issuer"], external=True) + metadata_response = await self._http_client.get_json(url) + # TODO: maybe update the other way around to let user override some values? + self._provider_metadata.update(metadata_response) + self._provider_needs_discovery = False + + self._validate_metadata() + + return self._provider_metadata + + async def load_jwks(self, force: bool = False) -> JWKS: + """Load the JSON Web Key Set used to sign ID tokens. + + If we're not using the ``userinfo_endpoint``, user infos are extracted + from the ID token, which is a JWT signed by keys given by the provider. + The keys are then cached. + + Args: + force: Force reloading the keys. + + Returns: + The key set + + Looks like this:: + + { + 'keys': [ + { + 'kid': 'abcdef', + 'kty': 'RSA', + 'alg': 'RS256', + 'use': 'sig', + 'e': 'XXXX', + 'n': 'XXXX', + } + ] + } + """ + if self._uses_userinfo: + # We're not using jwt signing, return an empty jwk set + return {"keys": []} + + # First check if the JWKS are loaded in the provider metadata. + # It can happen either if the provider gives its JWKS in the discovery + # document directly or if it was already loaded once. + metadata = await self.load_metadata() + jwk_set = metadata.get("jwks") + if jwk_set is not None and not force: + return jwk_set + + # Loading the JWKS using the `jwks_uri` metadata + uri = metadata.get("jwks_uri") + if not uri: + raise RuntimeError('Missing "jwks_uri" in metadata') + + jwk_set = await self._http_client.get_json(uri) + + # Caching the JWKS in the provider's metadata + self._provider_metadata["jwks"] = jwk_set + return jwk_set + + async def _exchange_code(self, code: str) -> Token: + """Exchange an authorization code for a token. + + This calls the ``token_endpoint`` with the authorization code we + received in the callback to exchange it for a token. The call uses the + ``ClientAuth`` to authenticate with the client with its ID and secret. + + Args: + code: The authorization code we got from the callback. + + Returns: + A dict containing various tokens. + + May look like this:: + + { + 'token_type': 'bearer', + 'access_token': 'abcdef', + 'expires_in': 3599, + 'id_token': 'ghijkl', + 'refresh_token': 'mnopqr', + } + + Raises: + OidcError: when the ``token_endpoint`` returned an error. + """ + metadata = await self.load_metadata() + token_endpoint = metadata.get("token_endpoint") + headers = { + "Content-Type": "application/x-www-form-urlencoded", + "User-Agent": self._http_client.user_agent, + "Accept": "application/json", + } + + args = { + "grant_type": "authorization_code", + "code": code, + "redirect_uri": self._callback_url, + } + body = urlencode(args, True) + + # Fill the body/headers with credentials + uri, headers, body = self._client_auth.prepare( + method="POST", uri=token_endpoint, headers=headers, body=body + ) + headers = {k: [v] for (k, v) in headers.items()} + + # Do the actual request + # We're not using the SimpleHttpClient util methods as we don't want to + # check the HTTP status code and we do the body encoding ourself. + response = await self._http_client.request( + method="POST", uri=uri, data=body.encode("utf-8"), headers=headers, + ) + + # This is used in multiple error messages below + status = "{code} {phrase}".format( + code=response.code, phrase=response.phrase.decode("utf-8") + ) + + resp_body = await readBody(response) + + if response.code >= 500: + # In case of a server error, we should first try to decode the body + # and check for an error field. If not, we respond with a generic + # error message. + try: + resp = json.loads(resp_body.decode("utf-8")) + error = resp["error"] + description = resp.get("error_description", error) + except (ValueError, KeyError): + # Catch ValueError for the JSON decoding and KeyError for the "error" field + error = "server_error" + description = ( + ( + 'Authorization server responded with a "{status}" error ' + "while exchanging the authorization code." + ).format(status=status), + ) + + raise OidcError(error, description) + + # Since it is a not a 5xx code, body should be a valid JSON. It will + # raise if not. + resp = json.loads(resp_body.decode("utf-8")) + + if "error" in resp: + error = resp["error"] + # In case the authorization server responded with an error field, + # it should be a 4xx code. If not, warn about it but don't do + # anything special and report the original error message. + if response.code < 400: + logger.debug( + "Invalid response from the authorization server: " + 'responded with a "{status}" ' + "but body has an error field: {error!r}".format( + status=status, error=resp["error"] + ) + ) + + description = resp.get("error_description", error) + raise OidcError(error, description) + + # Now, this should not be an error. According to RFC6749 sec 5.1, it + # should be a 200 code. We're a bit more flexible than that, and will + # only throw on a 4xx code. + if response.code >= 400: + description = ( + 'Authorization server responded with a "{status}" error ' + 'but did not include an "error" field in its response.'.format( + status=status + ) + ) + logger.warning(description) + # Body was still valid JSON. Might be useful to log it for debugging. + logger.warning("Code exchange response: {resp!r}".format(resp=resp)) + raise OidcError("server_error", description) + + return resp + + async def _fetch_userinfo(self, token: Token) -> UserInfo: + """Fetch user informations from the ``userinfo_endpoint``. + + Args: + token: the token given by the ``token_endpoint``. + Must include an ``access_token`` field. + + Returns: + UserInfo: an object representing the user. + """ + metadata = await self.load_metadata() + + resp = await self._http_client.get_json( + metadata["userinfo_endpoint"], + headers={"Authorization": ["Bearer {}".format(token["access_token"])]}, + ) + + return UserInfo(resp) + + async def _parse_id_token(self, token: Token, nonce: str) -> UserInfo: + """Return an instance of UserInfo from token's ``id_token``. + + Args: + token: the token given by the ``token_endpoint``. + Must include an ``id_token`` field. + nonce: the nonce value originally sent in the initial authorization + request. This value should match the one inside the token. + + Returns: + An object representing the user. + """ + metadata = await self.load_metadata() + claims_params = { + "nonce": nonce, + "client_id": self._client_auth.client_id, + } + if "access_token" in token: + # If we got an `access_token`, there should be an `at_hash` claim + # in the `id_token` that we can check against. + claims_params["access_token"] = token["access_token"] + claims_cls = CodeIDToken + else: + claims_cls = ImplicitIDToken + + alg_values = metadata.get("id_token_signing_alg_values_supported", ["RS256"]) + + jwt = JsonWebToken(alg_values) + + claim_options = {"iss": {"values": [metadata["issuer"]]}} + + # Try to decode the keys in cache first, then retry by forcing the keys + # to be reloaded + jwk_set = await self.load_jwks() + try: + claims = jwt.decode( + token["id_token"], + key=jwk_set, + claims_cls=claims_cls, + claims_options=claim_options, + claims_params=claims_params, + ) + except ValueError: + jwk_set = await self.load_jwks(force=True) # try reloading the jwks + claims = jwt.decode( + token["id_token"], + key=jwk_set, + claims_cls=claims_cls, + claims_options=claim_options, + claims_params=claims_params, + ) + + claims.validate(leeway=120) # allows 2 min of clock skew + return UserInfo(claims) + + async def handle_redirect_request( + self, + request: SynapseRequest, + client_redirect_url: bytes, + ui_auth_session_id: Optional[str] = None, + ) -> str: + """Handle an incoming request to /login/sso/redirect + + It returns a redirect to the authorization endpoint with a few + parameters: + + - ``client_id``: the client ID set in ``oidc_config.client_id`` + - ``response_type``: ``code`` + - ``redirect_uri``: the callback URL ; ``{base url}/_synapse/oidc/callback`` + - ``scope``: the list of scopes set in ``oidc_config.scopes`` + - ``state``: a random string + - ``nonce``: a random string + + In addition generating a redirect URL, we are setting a cookie with + a signed macaroon token containing the state, the nonce and the + client_redirect_url params. Those are then checked when the client + comes back from the provider. + + Args: + request: the incoming request from the browser. + We'll respond to it with a redirect and a cookie. + client_redirect_url: the URL that we should redirect the client to + when everything is done + ui_auth_session_id: The session ID of the ongoing UI Auth (or + None if this is a login). + + Returns: + The redirect URL to the authorization endpoint. + + """ + + state = generate_token() + nonce = generate_token() + + cookie = self._generate_oidc_session_token( + state=state, + nonce=nonce, + client_redirect_url=client_redirect_url.decode(), + ui_auth_session_id=ui_auth_session_id, + ) + request.addCookie( + SESSION_COOKIE_NAME, + cookie, + path="/_synapse/oidc", + max_age="3600", + httpOnly=True, + sameSite="lax", + ) + + metadata = await self.load_metadata() + authorization_endpoint = metadata.get("authorization_endpoint") + return prepare_grant_uri( + authorization_endpoint, + client_id=self._client_auth.client_id, + response_type="code", + redirect_uri=self._callback_url, + scope=self._scopes, + state=state, + nonce=nonce, + ) + + async def handle_oidc_callback(self, request: SynapseRequest) -> None: + """Handle an incoming request to /_synapse/oidc/callback + + Since we might want to display OIDC-related errors in a user-friendly + way, we don't raise SynapseError from here. Instead, we call + ``self._render_error`` which displays an HTML page for the error. + + Most of the OpenID Connect logic happens here: + + - first, we check if there was any error returned by the provider and + display it + - then we fetch the session cookie, decode and verify it + - the ``state`` query parameter should match with the one stored in the + session cookie + - once we known this session is legit, exchange the code with the + provider using the ``token_endpoint`` (see ``_exchange_code``) + - once we have the token, use it to either extract the UserInfo from + the ``id_token`` (``_parse_id_token``), or use the ``access_token`` + to fetch UserInfo from the ``userinfo_endpoint`` + (``_fetch_userinfo``) + - map those UserInfo to a Matrix user (``_map_userinfo_to_user``) and + finish the login + + Args: + request: the incoming request from the browser. + """ + + # The provider might redirect with an error. + # In that case, just display it as-is. + if b"error" in request.args: + error = request.args[b"error"][0].decode() + description = request.args.get(b"error_description", [b""])[0].decode() + + # Most of the errors returned by the provider could be due by + # either the provider misbehaving or Synapse being misconfigured. + # The only exception of that is "access_denied", where the user + # probably cancelled the login flow. In other cases, log those errors. + if error != "access_denied": + logger.error("Error from the OIDC provider: %s %s", error, description) + + self._render_error(request, error, description) + return + + # Fetch the session cookie + session = request.getCookie(SESSION_COOKIE_NAME) + if session is None: + logger.info("No session cookie found") + self._render_error(request, "missing_session", "No session cookie found") + return + + # Remove the cookie. There is a good chance that if the callback failed + # once, it will fail next time and the code will already be exchanged. + # Removing it early avoids spamming the provider with token requests. + request.addCookie( + SESSION_COOKIE_NAME, + b"", + path="/_synapse/oidc", + expires="Thu, Jan 01 1970 00:00:00 UTC", + httpOnly=True, + sameSite="lax", + ) + + # Check for the state query parameter + if b"state" not in request.args: + logger.info("State parameter is missing") + self._render_error(request, "invalid_request", "State parameter is missing") + return + + state = request.args[b"state"][0].decode() + + # Deserialize the session token and verify it. + try: + ( + nonce, + client_redirect_url, + ui_auth_session_id, + ) = self._verify_oidc_session_token(session, state) + except MacaroonDeserializationException as e: + logger.exception("Invalid session") + self._render_error(request, "invalid_session", str(e)) + return + except MacaroonInvalidSignatureException as e: + logger.exception("Could not verify session") + self._render_error(request, "mismatching_session", str(e)) + return + + # Exchange the code with the provider + if b"code" not in request.args: + logger.info("Code parameter is missing") + self._render_error(request, "invalid_request", "Code parameter is missing") + return + + logger.info("Exchanging code") + code = request.args[b"code"][0].decode() + try: + token = await self._exchange_code(code) + except OidcError as e: + logger.exception("Could not exchange code") + self._render_error(request, e.error, e.error_description) + return + + # Now that we have a token, get the userinfo, either by decoding the + # `id_token` or by fetching the `userinfo_endpoint`. + if self._uses_userinfo: + logger.info("Fetching userinfo") + try: + userinfo = await self._fetch_userinfo(token) + except Exception as e: + logger.exception("Could not fetch userinfo") + self._render_error(request, "fetch_error", str(e)) + return + else: + logger.info("Extracting userinfo from id_token") + try: + userinfo = await self._parse_id_token(token, nonce=nonce) + except Exception as e: + logger.exception("Invalid id_token") + self._render_error(request, "invalid_token", str(e)) + return + + # Call the mapper to register/login the user + try: + user_id = await self._map_userinfo_to_user(userinfo, token) + except MappingException as e: + logger.exception("Could not map user") + self._render_error(request, "mapping_error", str(e)) + return + + # and finally complete the login + if ui_auth_session_id: + await self._auth_handler.complete_sso_ui_auth( + user_id, ui_auth_session_id, request + ) + else: + await self._auth_handler.complete_sso_login( + user_id, request, client_redirect_url + ) + + def _generate_oidc_session_token( + self, + state: str, + nonce: str, + client_redirect_url: str, + ui_auth_session_id: Optional[str], + duration_in_ms: int = (60 * 60 * 1000), + ) -> str: + """Generates a signed token storing data about an OIDC session. + + When Synapse initiates an authorization flow, it creates a random state + and a random nonce. Those parameters are given to the provider and + should be verified when the client comes back from the provider. + It is also used to store the client_redirect_url, which is used to + complete the SSO login flow. + + Args: + state: The ``state`` parameter passed to the OIDC provider. + nonce: The ``nonce`` parameter passed to the OIDC provider. + client_redirect_url: The URL the client gave when it initiated the + flow. + ui_auth_session_id: The session ID of the ongoing UI Auth (or + None if this is a login). + duration_in_ms: An optional duration for the token in milliseconds. + Defaults to an hour. + + Returns: + A signed macaroon token with the session informations. + """ + macaroon = pymacaroons.Macaroon( + location=self._server_name, identifier="key", key=self._macaroon_secret_key, + ) + macaroon.add_first_party_caveat("gen = 1") + macaroon.add_first_party_caveat("type = session") + macaroon.add_first_party_caveat("state = %s" % (state,)) + macaroon.add_first_party_caveat("nonce = %s" % (nonce,)) + macaroon.add_first_party_caveat( + "client_redirect_url = %s" % (client_redirect_url,) + ) + if ui_auth_session_id: + macaroon.add_first_party_caveat( + "ui_auth_session_id = %s" % (ui_auth_session_id,) + ) + now = self._clock.time_msec() + expiry = now + duration_in_ms + macaroon.add_first_party_caveat("time < %d" % (expiry,)) + + return macaroon.serialize() + + def _verify_oidc_session_token( + self, session: str, state: str + ) -> Tuple[str, str, Optional[str]]: + """Verifies and extract an OIDC session token. + + This verifies that a given session token was issued by this homeserver + and extract the nonce and client_redirect_url caveats. + + Args: + session: The session token to verify + state: The state the OIDC provider gave back + + Returns: + The nonce, client_redirect_url, and ui_auth_session_id for this session + """ + macaroon = pymacaroons.Macaroon.deserialize(session) + + v = pymacaroons.Verifier() + v.satisfy_exact("gen = 1") + v.satisfy_exact("type = session") + v.satisfy_exact("state = %s" % (state,)) + v.satisfy_general(lambda c: c.startswith("nonce = ")) + v.satisfy_general(lambda c: c.startswith("client_redirect_url = ")) + # Sometimes there's a UI auth session ID, it seems to be OK to attempt + # to always satisfy this. + v.satisfy_general(lambda c: c.startswith("ui_auth_session_id = ")) + v.satisfy_general(self._verify_expiry) + + v.verify(macaroon, self._macaroon_secret_key) + + # Extract the `nonce`, `client_redirect_url`, and maybe the + # `ui_auth_session_id` from the token. + nonce = self._get_value_from_macaroon(macaroon, "nonce") + client_redirect_url = self._get_value_from_macaroon( + macaroon, "client_redirect_url" + ) + try: + ui_auth_session_id = self._get_value_from_macaroon( + macaroon, "ui_auth_session_id" + ) # type: Optional[str] + except ValueError: + ui_auth_session_id = None + + return nonce, client_redirect_url, ui_auth_session_id + + def _get_value_from_macaroon(self, macaroon: pymacaroons.Macaroon, key: str) -> str: + """Extracts a caveat value from a macaroon token. + + Args: + macaroon: the token + key: the key of the caveat to extract + + Returns: + The extracted value + + Raises: + Exception: if the caveat was not in the macaroon + """ + prefix = key + " = " + for caveat in macaroon.caveats: + if caveat.caveat_id.startswith(prefix): + return caveat.caveat_id[len(prefix) :] + raise ValueError("No %s caveat in macaroon" % (key,)) + + def _verify_expiry(self, caveat: str) -> bool: + prefix = "time < " + if not caveat.startswith(prefix): + return False + expiry = int(caveat[len(prefix) :]) + now = self._clock.time_msec() + return now < expiry + + async def _map_userinfo_to_user(self, userinfo: UserInfo, token: Token) -> str: + """Maps a UserInfo object to a mxid. + + UserInfo should have a claim that uniquely identifies users. This claim + is usually `sub`, but can be configured with `oidc_config.subject_claim`. + It is then used as an `external_id`. + + If we don't find the user that way, we should register the user, + mapping the localpart and the display name from the UserInfo. + + If a user already exists with the mxid we've mapped, raise an exception. + + Args: + userinfo: an object representing the user + token: a dict with the tokens obtained from the provider + + Raises: + MappingException: if there was an error while mapping some properties + + Returns: + The mxid of the user + """ + try: + remote_user_id = self._user_mapping_provider.get_remote_user_id(userinfo) + except Exception as e: + raise MappingException( + "Failed to extract subject from OIDC response: %s" % (e,) + ) + + logger.info( + "Looking for existing mapping for user %s:%s", + self._auth_provider_id, + remote_user_id, + ) + + registered_user_id = await self._datastore.get_user_by_external_id( + self._auth_provider_id, remote_user_id, + ) + + if registered_user_id is not None: + logger.info("Found existing mapping %s", registered_user_id) + return registered_user_id + + try: + attributes = await self._user_mapping_provider.map_user_attributes( + userinfo, token + ) + except Exception as e: + raise MappingException( + "Could not extract user attributes from OIDC response: " + str(e) + ) + + logger.debug( + "Retrieved user attributes from user mapping provider: %r", attributes + ) + + if not attributes["localpart"]: + raise MappingException("localpart is empty") + + localpart = map_username_to_mxid_localpart(attributes["localpart"]) + + user_id = UserID(localpart, self._hostname) + if await self._datastore.get_users_by_id_case_insensitive(user_id.to_string()): + # This mxid is taken + raise MappingException( + "mxid '{}' is already taken".format(user_id.to_string()) + ) + + # It's the first time this user is logging in and the mapped mxid was + # not taken, register the user + registered_user_id = await self._registration_handler.register_user( + localpart=localpart, default_display_name=attributes["display_name"], + ) + + await self._datastore.record_user_external_id( + self._auth_provider_id, remote_user_id, registered_user_id, + ) + return registered_user_id + + +UserAttribute = TypedDict( + "UserAttribute", {"localpart": str, "display_name": Optional[str]} +) +C = TypeVar("C") + + +class OidcMappingProvider(Generic[C]): + """A mapping provider maps a UserInfo object to user attributes. + + It should provide the API described by this class. + """ + + def __init__(self, config: C): + """ + Args: + config: A custom config object from this module, parsed by ``parse_config()`` + """ + + @staticmethod + def parse_config(config: dict) -> C: + """Parse the dict provided by the homeserver's config + + Args: + config: A dictionary containing configuration options for this provider + + Returns: + A custom config object for this module + """ + raise NotImplementedError() + + def get_remote_user_id(self, userinfo: UserInfo) -> str: + """Get a unique user ID for this user. + + Usually, in an OIDC-compliant scenario, it should be the ``sub`` claim from the UserInfo object. + + Args: + userinfo: An object representing the user given by the OIDC provider + + Returns: + A unique user ID + """ + raise NotImplementedError() + + async def map_user_attributes( + self, userinfo: UserInfo, token: Token + ) -> UserAttribute: + """Map a ``UserInfo`` objects into user attributes. + + Args: + userinfo: An object representing the user given by the OIDC provider + token: A dict with the tokens returned by the provider + + Returns: + A dict containing the ``localpart`` and (optionally) the ``display_name`` + """ + raise NotImplementedError() + + +# Used to clear out "None" values in templates +def jinja_finalize(thing): + return thing if thing is not None else "" + + +env = Environment(finalize=jinja_finalize) + + +@attr.s +class JinjaOidcMappingConfig: + subject_claim = attr.ib() # type: str + localpart_template = attr.ib() # type: Template + display_name_template = attr.ib() # type: Optional[Template] + + +class JinjaOidcMappingProvider(OidcMappingProvider[JinjaOidcMappingConfig]): + """An implementation of a mapping provider based on Jinja templates. + + This is the default mapping provider. + """ + + def __init__(self, config: JinjaOidcMappingConfig): + self._config = config + + @staticmethod + def parse_config(config: dict) -> JinjaOidcMappingConfig: + subject_claim = config.get("subject_claim", "sub") + + if "localpart_template" not in config: + raise ConfigError( + "missing key: oidc_config.user_mapping_provider.config.localpart_template" + ) + + try: + localpart_template = env.from_string(config["localpart_template"]) + except Exception as e: + raise ConfigError( + "invalid jinja template for oidc_config.user_mapping_provider.config.localpart_template: %r" + % (e,) + ) + + display_name_template = None # type: Optional[Template] + if "display_name_template" in config: + try: + display_name_template = env.from_string(config["display_name_template"]) + except Exception as e: + raise ConfigError( + "invalid jinja template for oidc_config.user_mapping_provider.config.display_name_template: %r" + % (e,) + ) + + return JinjaOidcMappingConfig( + subject_claim=subject_claim, + localpart_template=localpart_template, + display_name_template=display_name_template, + ) + + def get_remote_user_id(self, userinfo: UserInfo) -> str: + return userinfo[self._config.subject_claim] + + async def map_user_attributes( + self, userinfo: UserInfo, token: Token + ) -> UserAttribute: + localpart = self._config.localpart_template.render(user=userinfo).strip() + + display_name = None # type: Optional[str] + if self._config.display_name_template is not None: + display_name = self._config.display_name_template.render( + user=userinfo + ).strip() + + if display_name == "": + display_name = None + + return UserAttribute(localpart=localpart, display_name=display_name) diff --git a/synapse/handlers/presence.py b/synapse/handlers/presence.py index 5cbefae177..9ea11c0754 100644 --- a/synapse/handlers/presence.py +++ b/synapse/handlers/presence.py @@ -204,6 +204,7 @@ class PresenceHandler(BasePresenceHandler): self.notifier = hs.get_notifier() self.federation = hs.get_federation_sender() self.state = hs.get_state_handler() + self._presence_enabled = hs.config.use_presence federation_registry = hs.get_federation_registry() @@ -676,13 +677,14 @@ class PresenceHandler(BasePresenceHandler): async def incoming_presence(self, origin, content): """Called when we receive a `m.presence` EDU from a remote server. """ + if not self._presence_enabled: + return + now = self.clock.time_msec() updates = [] for push in content.get("push", []): # A "push" contains a list of presence that we are probably interested # in. - # TODO: Actually check if we're interested, rather than blindly - # accepting presence updates. user_id = push.get("user_id", None) if not user_id: logger.info( diff --git a/synapse/handlers/room.py b/synapse/handlers/room.py index da12df7f53..73f9eeb399 100644 --- a/synapse/handlers/room.py +++ b/synapse/handlers/room.py @@ -25,8 +25,6 @@ from collections import OrderedDict from six import iteritems, string_types -from twisted.internet import defer - from synapse.api.constants import EventTypes, JoinRules, RoomCreationPreset from synapse.api.errors import AuthError, Codes, NotFoundError, StoreError, SynapseError from synapse.api.room_versions import KNOWN_ROOM_VERSIONS, RoomVersion @@ -103,8 +101,7 @@ class RoomCreationHandler(BaseHandler): self.third_party_event_rules = hs.get_third_party_event_rules() - @defer.inlineCallbacks - def upgrade_room( + async def upgrade_room( self, requester: Requester, old_room_id: str, new_version: RoomVersion ): """Replace a room with a new room with a different version @@ -117,7 +114,7 @@ class RoomCreationHandler(BaseHandler): Returns: Deferred[unicode]: the new room id """ - yield self.ratelimit(requester) + await self.ratelimit(requester) user_id = requester.user.to_string() @@ -138,7 +135,7 @@ class RoomCreationHandler(BaseHandler): # If this user has sent multiple upgrade requests for the same room # and one of them is not complete yet, cache the response and # return it to all subsequent requests - ret = yield self._upgrade_response_cache.wrap( + ret = await self._upgrade_response_cache.wrap( (old_room_id, user_id), self._upgrade_room, requester, @@ -856,8 +853,7 @@ class RoomCreationHandler(BaseHandler): for (etype, state_key), content in initial_state.items(): await send(etype=etype, state_key=state_key, content=content) - @defer.inlineCallbacks - def _generate_room_id( + async def _generate_room_id( self, creator_id: str, is_public: str, room_version: RoomVersion, ): # autogen room IDs and try to create it. We may clash, so just @@ -869,7 +865,7 @@ class RoomCreationHandler(BaseHandler): gen_room_id = RoomID(random_string, self.hs.hostname).to_string() if isinstance(gen_room_id, bytes): gen_room_id = gen_room_id.decode("utf-8") - yield self.store.store_room( + await self.store.store_room( room_id=gen_room_id, room_creator_user_id=creator_id, is_public=is_public, @@ -888,8 +884,7 @@ class RoomContextHandler(object): self.storage = hs.get_storage() self.state_store = self.storage.state - @defer.inlineCallbacks - def get_event_context(self, user, room_id, event_id, limit, event_filter): + async def get_event_context(self, user, room_id, event_id, limit, event_filter): """Retrieves events, pagination tokens and state around a given event in a room. @@ -908,7 +903,7 @@ class RoomContextHandler(object): before_limit = math.floor(limit / 2.0) after_limit = limit - before_limit - users = yield self.store.get_users_in_room(room_id) + users = await self.store.get_users_in_room(room_id) is_peeking = user.to_string() not in users def filter_evts(events): @@ -916,17 +911,17 @@ class RoomContextHandler(object): self.storage, user.to_string(), events, is_peeking=is_peeking ) - event = yield self.store.get_event( + event = await self.store.get_event( event_id, get_prev_content=True, allow_none=True ) if not event: return None - filtered = yield (filter_evts([event])) + filtered = await filter_evts([event]) if not filtered: raise AuthError(403, "You don't have permission to access that event.") - results = yield self.store.get_events_around( + results = await self.store.get_events_around( room_id, event_id, before_limit, after_limit, event_filter ) @@ -934,8 +929,8 @@ class RoomContextHandler(object): results["events_before"] = event_filter.filter(results["events_before"]) results["events_after"] = event_filter.filter(results["events_after"]) - results["events_before"] = yield filter_evts(results["events_before"]) - results["events_after"] = yield filter_evts(results["events_after"]) + results["events_before"] = await filter_evts(results["events_before"]) + results["events_after"] = await filter_evts(results["events_after"]) # filter_evts can return a pruned event in case the user is allowed to see that # there's something there but not see the content, so use the event that's in # `filtered` rather than the event we retrieved from the datastore. @@ -962,7 +957,7 @@ class RoomContextHandler(object): # first? Shouldn't we be consistent with /sync? # https://github.com/matrix-org/matrix-doc/issues/687 - state = yield self.state_store.get_state_for_events( + state = await self.state_store.get_state_for_events( [last_event_id], state_filter=state_filter ) @@ -970,7 +965,7 @@ class RoomContextHandler(object): if event_filter: state_events = event_filter.filter(state_events) - results["state"] = yield filter_evts(state_events) + results["state"] = await filter_evts(state_events) # We use a dummy token here as we only care about the room portion of # the token, which we replace. @@ -989,13 +984,12 @@ class RoomEventSource(object): def __init__(self, hs): self.store = hs.get_datastore() - @defer.inlineCallbacks - def get_new_events( + async def get_new_events( self, user, from_key, limit, room_ids, is_guest, explicit_room_id=None ): # We just ignore the key for now. - to_key = yield self.get_current_key() + to_key = await self.get_current_key() from_token = RoomStreamToken.parse(from_key) if from_token.topological: @@ -1008,11 +1002,11 @@ class RoomEventSource(object): # See https://github.com/matrix-org/matrix-doc/issues/1144 raise NotImplementedError() else: - room_events = yield self.store.get_membership_changes_for_user( + room_events = await self.store.get_membership_changes_for_user( user.to_string(), from_key, to_key ) - room_to_events = yield self.store.get_room_events_stream_for_rooms( + room_to_events = await self.store.get_room_events_stream_for_rooms( room_ids=room_ids, from_key=from_key, to_key=to_key, diff --git a/synapse/handlers/room_member.py b/synapse/handlers/room_member.py index 53b49bc15f..e51e1c32fe 100644 --- a/synapse/handlers/room_member.py +++ b/synapse/handlers/room_member.py @@ -17,15 +17,16 @@ import abc import logging +from typing import Dict, Iterable, List, Optional, Tuple, Union from six.moves import http_client -from twisted.internet import defer - from synapse import types from synapse.api.constants import EventTypes, Membership from synapse.api.errors import AuthError, Codes, SynapseError -from synapse.types import Collection, RoomID, UserID +from synapse.events import EventBase +from synapse.events.snapshot import EventContext +from synapse.types import Collection, Requester, RoomAlias, RoomID, UserID from synapse.util.async_helpers import Linearizer from synapse.util.distributor import user_joined_room, user_left_room @@ -76,84 +77,84 @@ class RoomMemberHandler(object): self.base_handler = BaseHandler(hs) @abc.abstractmethod - def _remote_join(self, requester, remote_room_hosts, room_id, user, content): + async def _remote_join( + self, + requester: Requester, + remote_room_hosts: List[str], + room_id: str, + user: UserID, + content: dict, + ) -> Optional[dict]: """Try and join a room that this server is not in Args: - requester (Requester) - remote_room_hosts (list[str]): List of servers that can be used - to join via. - room_id (str): Room that we are trying to join - user (UserID): User who is trying to join - content (dict): A dict that should be used as the content of the - join event. - - Returns: - Deferred + requester + remote_room_hosts: List of servers that can be used to join via. + room_id: Room that we are trying to join + user: User who is trying to join + content: A dict that should be used as the content of the join event. """ raise NotImplementedError() @abc.abstractmethod - def _remote_reject_invite( - self, requester, remote_room_hosts, room_id, target, content - ): + async def _remote_reject_invite( + self, + requester: Requester, + remote_room_hosts: List[str], + room_id: str, + target: UserID, + content: dict, + ) -> dict: """Attempt to reject an invite for a room this server is not in. If we fail to do so we locally mark the invite as rejected. Args: - requester (Requester) - remote_room_hosts (list[str]): List of servers to use to try and - reject invite - room_id (str) - target (UserID): The user rejecting the invite - content (dict): The content for the rejection event + requester + remote_room_hosts: List of servers to use to try and reject invite + room_id + target: The user rejecting the invite + content: The content for the rejection event Returns: - Deferred[dict]: A dictionary to be returned to the client, may + A dictionary to be returned to the client, may include event_id etc, or nothing if we locally rejected """ raise NotImplementedError() @abc.abstractmethod - def _user_joined_room(self, target, room_id): + async def _user_joined_room(self, target: UserID, room_id: str) -> None: """Notifies distributor on master process that the user has joined the room. Args: - target (UserID) - room_id (str) - - Returns: - Deferred|None + target + room_id """ raise NotImplementedError() @abc.abstractmethod - def _user_left_room(self, target, room_id): + async def _user_left_room(self, target: UserID, room_id: str) -> None: """Notifies distributor on master process that the user has left the room. Args: - target (UserID) - room_id (str) - - Returns: - Deferred|None + target + room_id """ raise NotImplementedError() async def _local_membership_update( self, - requester, - target, - room_id, - membership, + requester: Requester, + target: UserID, + room_id: str, + membership: str, prev_event_ids: Collection[str], - txn_id=None, - ratelimit=True, - content=None, - require_consent=True, - ): + txn_id: Optional[str] = None, + ratelimit: bool = True, + content: Optional[dict] = None, + require_consent: bool = True, + ) -> EventBase: user_id = target.to_string() if content is None: @@ -214,20 +215,18 @@ class RoomMemberHandler(object): return event - @defer.inlineCallbacks - def copy_room_tags_and_direct_to_room(self, old_room_id, new_room_id, user_id): + async def copy_room_tags_and_direct_to_room( + self, old_room_id, new_room_id, user_id + ) -> None: """Copies the tags and direct room state from one room to another. Args: - old_room_id (str) - new_room_id (str) - user_id (str) - - Returns: - Deferred[None] + old_room_id: The room ID of the old room. + new_room_id: The room ID of the new room. + user_id: The user's ID. """ # Retrieve user account data for predecessor room - user_account_data, _ = yield self.store.get_account_data_for_user(user_id) + user_account_data, _ = await self.store.get_account_data_for_user(user_id) # Copy direct message state if applicable direct_rooms = user_account_data.get("m.direct", {}) @@ -240,31 +239,31 @@ class RoomMemberHandler(object): direct_rooms[key].append(new_room_id) # Save back to user's m.direct account data - yield self.store.add_account_data_for_user( + await self.store.add_account_data_for_user( user_id, "m.direct", direct_rooms ) break # Copy room tags if applicable - room_tags = yield self.store.get_tags_for_room(user_id, old_room_id) + room_tags = await self.store.get_tags_for_room(user_id, old_room_id) # Copy each room tag to the new room for tag, tag_content in room_tags.items(): - yield self.store.add_tag_to_room(user_id, new_room_id, tag, tag_content) + await self.store.add_tag_to_room(user_id, new_room_id, tag, tag_content) async def update_membership( self, - requester, - target, - room_id, - action, - txn_id=None, - remote_room_hosts=None, - third_party_signed=None, - ratelimit=True, - content=None, - require_consent=True, - ): + requester: Requester, + target: UserID, + room_id: str, + action: str, + txn_id: Optional[str] = None, + remote_room_hosts: Optional[List[str]] = None, + third_party_signed: Optional[dict] = None, + ratelimit: bool = True, + content: Optional[dict] = None, + require_consent: bool = True, + ) -> Union[EventBase, Optional[dict]]: key = (room_id,) with (await self.member_linearizer.queue(key)): @@ -285,17 +284,17 @@ class RoomMemberHandler(object): async def _update_membership( self, - requester, - target, - room_id, - action, - txn_id=None, - remote_room_hosts=None, - third_party_signed=None, - ratelimit=True, - content=None, - require_consent=True, - ): + requester: Requester, + target: UserID, + room_id: str, + action: str, + txn_id: Optional[str] = None, + remote_room_hosts: Optional[List[str]] = None, + third_party_signed: Optional[dict] = None, + ratelimit: bool = True, + content: Optional[dict] = None, + require_consent: bool = True, + ) -> Union[EventBase, Optional[dict]]: content_specified = bool(content) if content is None: content = {} @@ -469,12 +468,11 @@ class RoomMemberHandler(object): else: # send the rejection to the inviter's HS. remote_room_hosts = remote_room_hosts + [inviter.domain] - res = await self._remote_reject_invite( + return await self._remote_reject_invite( requester, remote_room_hosts, room_id, target, content, ) - return res - res = await self._local_membership_update( + return await self._local_membership_update( requester=requester, target=target, room_id=room_id, @@ -485,10 +483,10 @@ class RoomMemberHandler(object): content=content, require_consent=require_consent, ) - return res - @defer.inlineCallbacks - def transfer_room_state_on_room_upgrade(self, old_room_id, room_id): + async def transfer_room_state_on_room_upgrade( + self, old_room_id: str, room_id: str + ) -> None: """Upon our server becoming aware of an upgraded room, either by upgrading a room ourselves or joining one, we can transfer over information from the previous room. @@ -496,50 +494,44 @@ class RoomMemberHandler(object): well as migrating the room directory state. Args: - old_room_id (str): The ID of the old room - - room_id (str): The ID of the new room - - Returns: - Deferred + old_room_id: The ID of the old room + room_id: The ID of the new room """ logger.info("Transferring room state from %s to %s", old_room_id, room_id) # Find all local users that were in the old room and copy over each user's state - users = yield self.store.get_users_in_room(old_room_id) - yield self.copy_user_state_on_room_upgrade(old_room_id, room_id, users) + users = await self.store.get_users_in_room(old_room_id) + await self.copy_user_state_on_room_upgrade(old_room_id, room_id, users) # Add new room to the room directory if the old room was there # Remove old room from the room directory - old_room = yield self.store.get_room(old_room_id) + old_room = await self.store.get_room(old_room_id) if old_room and old_room["is_public"]: - yield self.store.set_room_is_public(old_room_id, False) - yield self.store.set_room_is_public(room_id, True) + await self.store.set_room_is_public(old_room_id, False) + await self.store.set_room_is_public(room_id, True) # Transfer alias mappings in the room directory - yield self.store.update_aliases_for_room(old_room_id, room_id) + await self.store.update_aliases_for_room(old_room_id, room_id) # Check if any groups we own contain the predecessor room - local_group_ids = yield self.store.get_local_groups_for_room(old_room_id) + local_group_ids = await self.store.get_local_groups_for_room(old_room_id) for group_id in local_group_ids: # Add new the new room to those groups - yield self.store.add_room_to_group(group_id, room_id, old_room["is_public"]) + await self.store.add_room_to_group(group_id, room_id, old_room["is_public"]) # Remove the old room from those groups - yield self.store.remove_room_from_group(group_id, old_room_id) + await self.store.remove_room_from_group(group_id, old_room_id) - @defer.inlineCallbacks - def copy_user_state_on_room_upgrade(self, old_room_id, new_room_id, user_ids): + async def copy_user_state_on_room_upgrade( + self, old_room_id: str, new_room_id: str, user_ids: Iterable[str] + ) -> None: """Copy user-specific information when they join a new room when that new room is the result of a room upgrade Args: - old_room_id (str): The ID of upgraded room - new_room_id (str): The ID of the new room - user_ids (Iterable[str]): User IDs to copy state for - - Returns: - Deferred + old_room_id: The ID of upgraded room + new_room_id: The ID of the new room + user_ids: User IDs to copy state for """ logger.debug( @@ -552,11 +544,11 @@ class RoomMemberHandler(object): for user_id in user_ids: try: # It is an upgraded room. Copy over old tags - yield self.copy_room_tags_and_direct_to_room( + await self.copy_room_tags_and_direct_to_room( old_room_id, new_room_id, user_id ) # Copy over push rules - yield self.store.copy_push_rules_from_room_to_room_for_user( + await self.store.copy_push_rules_from_room_to_room_for_user( old_room_id, new_room_id, user_id ) except Exception: @@ -569,17 +561,23 @@ class RoomMemberHandler(object): ) continue - async def send_membership_event(self, requester, event, context, ratelimit=True): + async def send_membership_event( + self, + requester: Requester, + event: EventBase, + context: EventContext, + ratelimit: bool = True, + ): """ Change the membership status of a user in a room. Args: - requester (Requester): The local user who requested the membership + requester: The local user who requested the membership event. If None, certain checks, like whether this homeserver can act as the sender, will be skipped. - event (SynapseEvent): The membership event. + event: The membership event. context: The context of the event. - ratelimit (bool): Whether to rate limit this request. + ratelimit: Whether to rate limit this request. Raises: SynapseError if there was a problem changing the membership. """ @@ -639,8 +637,9 @@ class RoomMemberHandler(object): if prev_member_event.membership == Membership.JOIN: await self._user_left_room(target_user, room_id) - @defer.inlineCallbacks - def _can_guest_join(self, current_state_ids): + async def _can_guest_join( + self, current_state_ids: Dict[Tuple[str, str], str] + ) -> bool: """ Returns whether a guest can join a room based on its current state. """ @@ -648,7 +647,7 @@ class RoomMemberHandler(object): if not guest_access_id: return False - guest_access = yield self.store.get_event(guest_access_id) + guest_access = await self.store.get_event(guest_access_id) return ( guest_access @@ -657,13 +656,14 @@ class RoomMemberHandler(object): and guest_access.content["guest_access"] == "can_join" ) - @defer.inlineCallbacks - def lookup_room_alias(self, room_alias): + async def lookup_room_alias( + self, room_alias: RoomAlias + ) -> Tuple[RoomID, List[str]]: """ Get the room ID associated with a room alias. Args: - room_alias (RoomAlias): The alias to look up. + room_alias: The alias to look up. Returns: A tuple of: The room ID as a RoomID object. @@ -672,7 +672,7 @@ class RoomMemberHandler(object): SynapseError if room alias could not be found. """ directory_handler = self.directory_handler - mapping = yield directory_handler.get_association(room_alias) + mapping = await directory_handler.get_association(room_alias) if not mapping: raise SynapseError(404, "No such room alias") @@ -687,25 +687,25 @@ class RoomMemberHandler(object): return RoomID.from_string(room_id), servers - @defer.inlineCallbacks - def _get_inviter(self, user_id, room_id): - invite = yield self.store.get_invite_for_local_user_in_room( + async def _get_inviter(self, user_id: str, room_id: str) -> Optional[UserID]: + invite = await self.store.get_invite_for_local_user_in_room( user_id=user_id, room_id=room_id ) if invite: return UserID.from_string(invite.sender) + return None async def do_3pid_invite( self, - room_id, - inviter, - medium, - address, - id_server, - requester, - txn_id, - id_access_token=None, - ): + room_id: str, + inviter: UserID, + medium: str, + address: str, + id_server: str, + requester: Requester, + txn_id: Optional[str], + id_access_token: Optional[str] = None, + ) -> None: if self.config.block_non_admin_invites: is_requester_admin = await self.auth.is_server_admin(requester.user) if not is_requester_admin: @@ -754,15 +754,15 @@ class RoomMemberHandler(object): async def _make_and_store_3pid_invite( self, - requester, - id_server, - medium, - address, - room_id, - user, - txn_id, - id_access_token=None, - ): + requester: Requester, + id_server: str, + medium: str, + address: str, + room_id: str, + user: UserID, + txn_id: Optional[str], + id_access_token: Optional[str] = None, + ) -> None: room_state = await self.state_handler.get_current_state(room_id) inviter_display_name = "" @@ -836,8 +836,9 @@ class RoomMemberHandler(object): txn_id=txn_id, ) - @defer.inlineCallbacks - def _is_host_in_room(self, current_state_ids): + async def _is_host_in_room( + self, current_state_ids: Dict[Tuple[str, str], str] + ) -> bool: # Have we just created the room, and is this about to be the very # first member event? create_event_id = current_state_ids.get(("m.room.create", "")) @@ -850,7 +851,7 @@ class RoomMemberHandler(object): continue event_id = current_state_ids[(etype, state_key)] - event = yield self.store.get_event(event_id, allow_none=True) + event = await self.store.get_event(event_id, allow_none=True) if not event: continue @@ -859,11 +860,10 @@ class RoomMemberHandler(object): return False - @defer.inlineCallbacks - def _is_server_notice_room(self, room_id): + async def _is_server_notice_room(self, room_id: str) -> bool: if self._server_notices_mxid is None: return False - user_ids = yield self.store.get_users_in_room(room_id) + user_ids = await self.store.get_users_in_room(room_id) return self._server_notices_mxid in user_ids @@ -875,20 +875,21 @@ class RoomMemberMasterHandler(RoomMemberHandler): self.distributor.declare("user_joined_room") self.distributor.declare("user_left_room") - @defer.inlineCallbacks - def _is_remote_room_too_complex(self, room_id, remote_room_hosts): + async def _is_remote_room_too_complex( + self, room_id: str, remote_room_hosts: List[str] + ) -> Optional[bool]: """ Check if complexity of a remote room is too great. Args: - room_id (str) - remote_room_hosts (list[str]) + room_id + remote_room_hosts Returns: bool of whether the complexity is too great, or None if unable to be fetched """ max_complexity = self.hs.config.limit_remote_rooms.complexity - complexity = yield self.federation_handler.get_room_complexity( + complexity = await self.federation_handler.get_room_complexity( remote_room_hosts, room_id ) @@ -896,22 +897,26 @@ class RoomMemberMasterHandler(RoomMemberHandler): return complexity["v1"] > max_complexity return None - @defer.inlineCallbacks - def _is_local_room_too_complex(self, room_id): + async def _is_local_room_too_complex(self, room_id: str) -> bool: """ Check if the complexity of a local room is too great. Args: - room_id (str) - - Returns: bool + room_id: The room ID to check for complexity. """ max_complexity = self.hs.config.limit_remote_rooms.complexity - complexity = yield self.store.get_room_complexity(room_id) + complexity = await self.store.get_room_complexity(room_id) return complexity["v1"] > max_complexity - async def _remote_join(self, requester, remote_room_hosts, room_id, user, content): + async def _remote_join( + self, + requester: Requester, + remote_room_hosts: List[str], + room_id: str, + user: UserID, + content: dict, + ) -> None: """Implements RoomMemberHandler._remote_join """ # filter ourselves out of remote_room_hosts: do_invite_join ignores it @@ -970,18 +975,20 @@ class RoomMemberMasterHandler(RoomMemberHandler): errcode=Codes.RESOURCE_LIMIT_EXCEEDED, ) - @defer.inlineCallbacks - def _remote_reject_invite( - self, requester, remote_room_hosts, room_id, target, content - ): + async def _remote_reject_invite( + self, + requester: Requester, + remote_room_hosts: List[str], + room_id: str, + target: UserID, + content: dict, + ) -> dict: """Implements RoomMemberHandler._remote_reject_invite """ fed_handler = self.federation_handler try: - ret = yield defer.ensureDeferred( - fed_handler.do_remotely_reject_invite( - remote_room_hosts, room_id, target.to_string(), content=content, - ) + ret = await fed_handler.do_remotely_reject_invite( + remote_room_hosts, room_id, target.to_string(), content=content, ) return ret except Exception as e: @@ -993,24 +1000,23 @@ class RoomMemberMasterHandler(RoomMemberHandler): # logger.warning("Failed to reject invite: %s", e) - yield self.store.locally_reject_invite(target.to_string(), room_id) + await self.store.locally_reject_invite(target.to_string(), room_id) return {} - def _user_joined_room(self, target, room_id): + async def _user_joined_room(self, target: UserID, room_id: str) -> None: """Implements RoomMemberHandler._user_joined_room """ - return defer.succeed(user_joined_room(self.distributor, target, room_id)) + user_joined_room(self.distributor, target, room_id) - def _user_left_room(self, target, room_id): + async def _user_left_room(self, target: UserID, room_id: str) -> None: """Implements RoomMemberHandler._user_left_room """ - return defer.succeed(user_left_room(self.distributor, target, room_id)) + user_left_room(self.distributor, target, room_id) - @defer.inlineCallbacks - def forget(self, user, room_id): + async def forget(self, user: UserID, room_id: str) -> None: user_id = user.to_string() - member = yield self.state_handler.get_current_state( + member = await self.state_handler.get_current_state( room_id=room_id, event_type=EventTypes.Member, state_key=user_id ) membership = member.membership if member else None @@ -1022,4 +1028,4 @@ class RoomMemberMasterHandler(RoomMemberHandler): raise SynapseError(400, "User %s in room %s" % (user_id, room_id)) if membership: - yield self.store.forget(user_id, room_id) + await self.store.forget(user_id, room_id) diff --git a/synapse/handlers/room_member_worker.py b/synapse/handlers/room_member_worker.py index 69be86893b..5c776cc0be 100644 --- a/synapse/handlers/room_member_worker.py +++ b/synapse/handlers/room_member_worker.py @@ -14,8 +14,7 @@ # limitations under the License. import logging - -from twisted.internet import defer +from typing import List, Optional from synapse.api.errors import SynapseError from synapse.handlers.room_member import RoomMemberHandler @@ -24,6 +23,7 @@ from synapse.replication.http.membership import ( ReplicationRemoteRejectInviteRestServlet as ReplRejectInvite, ReplicationUserJoinedLeftRoomRestServlet as ReplJoinedLeft, ) +from synapse.types import Requester, UserID logger = logging.getLogger(__name__) @@ -36,14 +36,20 @@ class RoomMemberWorkerHandler(RoomMemberHandler): self._remote_reject_client = ReplRejectInvite.make_client(hs) self._notify_change_client = ReplJoinedLeft.make_client(hs) - @defer.inlineCallbacks - def _remote_join(self, requester, remote_room_hosts, room_id, user, content): + async def _remote_join( + self, + requester: Requester, + remote_room_hosts: List[str], + room_id: str, + user: UserID, + content: dict, + ) -> Optional[dict]: """Implements RoomMemberHandler._remote_join """ if len(remote_room_hosts) == 0: raise SynapseError(404, "No known servers") - ret = yield self._remote_join_client( + ret = await self._remote_join_client( requester=requester, remote_room_hosts=remote_room_hosts, room_id=room_id, @@ -51,16 +57,21 @@ class RoomMemberWorkerHandler(RoomMemberHandler): content=content, ) - yield self._user_joined_room(user, room_id) + await self._user_joined_room(user, room_id) return ret - def _remote_reject_invite( - self, requester, remote_room_hosts, room_id, target, content - ): + async def _remote_reject_invite( + self, + requester: Requester, + remote_room_hosts: List[str], + room_id: str, + target: UserID, + content: dict, + ) -> dict: """Implements RoomMemberHandler._remote_reject_invite """ - return self._remote_reject_client( + return await self._remote_reject_client( requester=requester, remote_room_hosts=remote_room_hosts, room_id=room_id, @@ -68,16 +79,16 @@ class RoomMemberWorkerHandler(RoomMemberHandler): content=content, ) - def _user_joined_room(self, target, room_id): + async def _user_joined_room(self, target: UserID, room_id: str) -> None: """Implements RoomMemberHandler._user_joined_room """ - return self._notify_change_client( + await self._notify_change_client( user_id=target.to_string(), room_id=room_id, change="joined" ) - def _user_left_room(self, target, room_id): + async def _user_left_room(self, target: UserID, room_id: str) -> None: """Implements RoomMemberHandler._user_left_room """ - return self._notify_change_client( + await self._notify_change_client( user_id=target.to_string(), room_id=room_id, change="left" ) diff --git a/synapse/handlers/saml_handler.py b/synapse/handlers/saml_handler.py index 96f2dd36ad..e7015c704f 100644 --- a/synapse/handlers/saml_handler.py +++ b/synapse/handlers/saml_handler.py @@ -14,7 +14,7 @@ # limitations under the License. import logging import re -from typing import Optional, Tuple +from typing import Callable, Dict, Optional, Set, Tuple import attr import saml2 @@ -25,6 +25,7 @@ from synapse.api.errors import SynapseError from synapse.config import ConfigError from synapse.http.server import finish_request from synapse.http.servlet import parse_string +from synapse.http.site import SynapseRequest from synapse.module_api import ModuleApi from synapse.module_api.errors import RedirectException from synapse.types import ( @@ -81,17 +82,19 @@ class SamlHandler: self._error_html_content = hs.config.saml2_error_html_content - def handle_redirect_request(self, client_redirect_url, ui_auth_session_id=None): + def handle_redirect_request( + self, client_redirect_url: bytes, ui_auth_session_id: Optional[str] = None + ) -> bytes: """Handle an incoming request to /login/sso/redirect Args: - client_redirect_url (bytes): the URL that we should redirect the + client_redirect_url: the URL that we should redirect the client to when everything is done - ui_auth_session_id (Optional[str]): The session ID of the ongoing UI Auth (or + ui_auth_session_id: The session ID of the ongoing UI Auth (or None if this is a login). Returns: - bytes: URL to redirect to + URL to redirect to """ reqid, info = self._saml_client.prepare_for_authenticate( relay_state=client_redirect_url @@ -109,15 +112,15 @@ class SamlHandler: # this shouldn't happen! raise Exception("prepare_for_authenticate didn't return a Location header") - async def handle_saml_response(self, request): + async def handle_saml_response(self, request: SynapseRequest) -> None: """Handle an incoming request to /_matrix/saml2/authn_response Args: - request (SynapseRequest): the incoming request from the browser. We'll + request: the incoming request from the browser. We'll respond to it with a redirect. Returns: - Deferred[none]: Completes once we have handled the request. + Completes once we have handled the request. """ resp_bytes = parse_string(request, "SAMLResponse", required=True) relay_state = parse_string(request, "RelayState", required=True) @@ -310,6 +313,7 @@ DOT_REPLACE_PATTERN = re.compile( def dot_replace_for_mxid(username: str) -> str: + """Replace any characters which are not allowed in Matrix IDs with a dot.""" username = username.lower() username = DOT_REPLACE_PATTERN.sub(".", username) @@ -321,7 +325,7 @@ def dot_replace_for_mxid(username: str) -> str: MXID_MAPPER_MAP = { "hexencode": map_username_to_mxid_localpart, "dotreplace": dot_replace_for_mxid, -} +} # type: Dict[str, Callable[[str], str]] @attr.s @@ -349,7 +353,7 @@ class DefaultSamlMappingProvider(object): def get_remote_user_id( self, saml_response: saml2.response.AuthnResponse, client_redirect_url: str - ): + ) -> str: """Extracts the remote user id from the SAML response""" try: return saml_response.ava["uid"][0] @@ -428,14 +432,14 @@ class DefaultSamlMappingProvider(object): return SamlConfig(mxid_source_attribute, mxid_mapper) @staticmethod - def get_saml_attributes(config: SamlConfig) -> Tuple[set, set]: + def get_saml_attributes(config: SamlConfig) -> Tuple[Set[str], Set[str]]: """Returns the required attributes of a SAML Args: config: A SamlConfig object containing configuration params for this provider Returns: - tuple[set,set]: The first set equates to the saml auth response + The first set equates to the saml auth response attributes that are required for the module to function, whereas the second set consists of those attributes which can be used if available, but are not necessary diff --git a/synapse/handlers/search.py b/synapse/handlers/search.py index ec1542d416..4d40d3ac9c 100644 --- a/synapse/handlers/search.py +++ b/synapse/handlers/search.py @@ -18,8 +18,6 @@ import logging from unpaddedbase64 import decode_base64, encode_base64 -from twisted.internet import defer - from synapse.api.constants import EventTypes, Membership from synapse.api.errors import NotFoundError, SynapseError from synapse.api.filtering import Filter @@ -39,8 +37,7 @@ class SearchHandler(BaseHandler): self.state_store = self.storage.state self.auth = hs.get_auth() - @defer.inlineCallbacks - def get_old_rooms_from_upgraded_room(self, room_id): + async def get_old_rooms_from_upgraded_room(self, room_id): """Retrieves room IDs of old rooms in the history of an upgraded room. We do so by checking the m.room.create event of the room for a @@ -60,7 +57,7 @@ class SearchHandler(BaseHandler): historical_room_ids = [] # The initial room must have been known for us to get this far - predecessor = yield self.store.get_room_predecessor(room_id) + predecessor = await self.store.get_room_predecessor(room_id) while True: if not predecessor: @@ -75,7 +72,7 @@ class SearchHandler(BaseHandler): # Don't add it to the list until we have checked that we are in the room try: - next_predecessor_room = yield self.store.get_room_predecessor( + next_predecessor_room = await self.store.get_room_predecessor( predecessor_room_id ) except NotFoundError: @@ -89,8 +86,7 @@ class SearchHandler(BaseHandler): return historical_room_ids - @defer.inlineCallbacks - def search(self, user, content, batch=None): + async def search(self, user, content, batch=None): """Performs a full text search for a user. Args: @@ -179,7 +175,7 @@ class SearchHandler(BaseHandler): search_filter = Filter(filter_dict) # TODO: Search through left rooms too - rooms = yield self.store.get_rooms_for_local_user_where_membership_is( + rooms = await self.store.get_rooms_for_local_user_where_membership_is( user.to_string(), membership_list=[Membership.JOIN], # membership_list=[Membership.JOIN, Membership.LEAVE, Membership.Ban], @@ -192,7 +188,7 @@ class SearchHandler(BaseHandler): historical_room_ids = [] for room_id in search_filter.rooms: # Add any previous rooms to the search if they exist - ids = yield self.get_old_rooms_from_upgraded_room(room_id) + ids = await self.get_old_rooms_from_upgraded_room(room_id) historical_room_ids += ids # Prevent any historical events from being filtered @@ -223,7 +219,7 @@ class SearchHandler(BaseHandler): count = None if order_by == "rank": - search_result = yield self.store.search_msgs(room_ids, search_term, keys) + search_result = await self.store.search_msgs(room_ids, search_term, keys) count = search_result["count"] @@ -238,7 +234,7 @@ class SearchHandler(BaseHandler): filtered_events = search_filter.filter([r["event"] for r in results]) - events = yield filter_events_for_client( + events = await filter_events_for_client( self.storage, user.to_string(), filtered_events ) @@ -267,7 +263,7 @@ class SearchHandler(BaseHandler): # But only go around 5 times since otherwise synapse will be sad. while len(room_events) < search_filter.limit() and i < 5: i += 1 - search_result = yield self.store.search_rooms( + search_result = await self.store.search_rooms( room_ids, search_term, keys, @@ -288,7 +284,7 @@ class SearchHandler(BaseHandler): filtered_events = search_filter.filter([r["event"] for r in results]) - events = yield filter_events_for_client( + events = await filter_events_for_client( self.storage, user.to_string(), filtered_events ) @@ -343,11 +339,11 @@ class SearchHandler(BaseHandler): # If client has asked for "context" for each event (i.e. some surrounding # events and state), fetch that if event_context is not None: - now_token = yield self.hs.get_event_sources().get_current_token() + now_token = await self.hs.get_event_sources().get_current_token() contexts = {} for event in allowed_events: - res = yield self.store.get_events_around( + res = await self.store.get_events_around( event.room_id, event.event_id, before_limit, after_limit ) @@ -357,11 +353,11 @@ class SearchHandler(BaseHandler): len(res["events_after"]), ) - res["events_before"] = yield filter_events_for_client( + res["events_before"] = await filter_events_for_client( self.storage, user.to_string(), res["events_before"] ) - res["events_after"] = yield filter_events_for_client( + res["events_after"] = await filter_events_for_client( self.storage, user.to_string(), res["events_after"] ) @@ -390,7 +386,7 @@ class SearchHandler(BaseHandler): [(EventTypes.Member, sender) for sender in senders] ) - state = yield self.state_store.get_state_for_event( + state = await self.state_store.get_state_for_event( last_event_id, state_filter ) @@ -412,10 +408,10 @@ class SearchHandler(BaseHandler): time_now = self.clock.time_msec() for context in contexts.values(): - context["events_before"] = yield self._event_serializer.serialize_events( + context["events_before"] = await self._event_serializer.serialize_events( context["events_before"], time_now ) - context["events_after"] = yield self._event_serializer.serialize_events( + context["events_after"] = await self._event_serializer.serialize_events( context["events_after"], time_now ) @@ -423,7 +419,7 @@ class SearchHandler(BaseHandler): if include_state: rooms = {e.room_id for e in allowed_events} for room_id in rooms: - state = yield self.state_handler.get_current_state(room_id) + state = await self.state_handler.get_current_state(room_id) state_results[room_id] = list(state.values()) state_results.values() @@ -437,7 +433,7 @@ class SearchHandler(BaseHandler): { "rank": rank_map[e.event_id], "result": ( - yield self._event_serializer.serialize_event(e, time_now) + await self._event_serializer.serialize_event(e, time_now) ), "context": contexts.get(e.event_id, {}), } @@ -452,7 +448,7 @@ class SearchHandler(BaseHandler): if state_results: s = {} for room_id, state in state_results.items(): - s[room_id] = yield self._event_serializer.serialize_events( + s[room_id] = await self._event_serializer.serialize_events( state, time_now ) |