From ea0a3aaf0a8b1d05a6acfd94fad6effe2deb55de Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Thu, 4 Mar 2021 10:29:43 +0000 Subject: Fix changelog --- CHANGES.md | 9 +++------ 1 file changed, 3 insertions(+), 6 deletions(-) diff --git a/CHANGES.md b/CHANGES.md index cd830e5178..15d2894bc6 100644 --- a/CHANGES.md +++ b/CHANGES.md @@ -1,6 +1,9 @@ Synapse 1.29.0rc1 (2021-03-04) ============================== +Note that synapse now expects an `X-Forwarded-Proto` header when used with a reverse proxy. Please see [UPGRADE.rst](UPGRADE.rst#upgrading-to-v1290) for more details on this change. + + Features -------- @@ -52,12 +55,6 @@ Internal Changes - Bump the versions of mypy and mypy-zope used for static type checking. ([\#9529](https://github.com/matrix-org/synapse/issues/9529)) -Synapse 1.xx.0 -============== - -Note that synapse now expects an `X-Forwarded-Proto` header when used with a reverse proxy. Please see [UPGRADE.rst](UPGRADE.rst#upgrading-to-v1290) for more details on this change. - - Synapse 1.28.0 (2021-02-25) =========================== -- cgit 1.5.1 From a6333b8d42462150eac51d33460375945b4586af Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Thu, 4 Mar 2021 10:32:44 +0000 Subject: Fix link in UPGRADES --- UPGRADE.rst | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/UPGRADE.rst b/UPGRADE.rst index e852b806c2..031e02bda9 100644 --- a/UPGRADE.rst +++ b/UPGRADE.rst @@ -98,9 +98,9 @@ will log a warning on each received request. To avoid the warning, administrators using a reverse proxy should ensure that the reverse proxy sets `X-Forwarded-Proto` header to `https` or `http` to -indicate the protocol used by the client. See the [reverse proxy -documentation](docs/reverse_proxy.md), where the example configurations have -been updated to show how to set this header. +indicate the protocol used by the client. See the `reverse proxy documentation +`_, where the example configurations have been updated to +show how to set this header. (Users of `Caddy `_ are unaffected, since we believe it sets `X-Forwarded-Proto` by default.) -- cgit 1.5.1 From 7eb6e39a8fe9d42a411cefd905cf2caa29896923 Mon Sep 17 00:00:00 2001 From: Richard van der Hoff <1389908+richvdh@users.noreply.github.com> Date: Thu, 4 Mar 2021 14:44:22 +0000 Subject: Record the SSO Auth Provider in the login token (#9510) This great big stack of commits is a a whole load of hoop-jumping to make it easier to store additional values in login tokens, and then to actually store the SSO Identity Provider in the login token. (Making use of that data will follow in a subsequent PR.) --- changelog.d/9510.feature | 1 + mypy.ini | 1 + synapse/api/auth.py | 41 ++++-------------- synapse/handlers/auth.py | 68 +++++++++++++++++++++++++----- synapse/handlers/oidc_handler.py | 65 +++++++---------------------- synapse/handlers/sso.py | 2 + synapse/module_api/__init__.py | 31 ++++++++++++-- synapse/rest/client/v1/login.py | 6 +-- synapse/util/macaroons.py | 89 ++++++++++++++++++++++++++++++++++++++++ tests/handlers/test_auth.py | 49 +++++++++++++--------- tests/handlers/test_cas.py | 10 ++--- tests/handlers/test_oidc.py | 36 ++++++++-------- tests/handlers/test_saml.py | 10 ++--- 13 files changed, 258 insertions(+), 151 deletions(-) create mode 100644 changelog.d/9510.feature create mode 100644 synapse/util/macaroons.py diff --git a/changelog.d/9510.feature b/changelog.d/9510.feature new file mode 100644 index 0000000000..5214b50d41 --- /dev/null +++ b/changelog.d/9510.feature @@ -0,0 +1 @@ +Add prometheus metrics for number of users successfully registering and logging in. diff --git a/mypy.ini b/mypy.ini index 64ed45dac2..f31cd432e6 100644 --- a/mypy.ini +++ b/mypy.ini @@ -69,6 +69,7 @@ files = synapse/util/async_helpers.py, synapse/util/caches, synapse/util/metrics.py, + synapse/util/macaroons.py, synapse/util/stringutils.py, tests/replication, tests/test_utils, diff --git a/synapse/api/auth.py b/synapse/api/auth.py index 89e62b0e36..968cf6f174 100644 --- a/synapse/api/auth.py +++ b/synapse/api/auth.py @@ -39,6 +39,7 @@ from synapse.logging import opentracing as opentracing from synapse.storage.databases.main.registration import TokenLookupResult from synapse.types import StateMap, UserID from synapse.util.caches.lrucache import LruCache +from synapse.util.macaroons import get_value_from_macaroon, satisfy_expiry from synapse.util.metrics import Measure logger = logging.getLogger(__name__) @@ -408,7 +409,7 @@ class Auth: raise _InvalidMacaroonException() try: - user_id = self.get_user_id_from_macaroon(macaroon) + user_id = get_value_from_macaroon(macaroon, "user_id") guest = False for caveat in macaroon.caveats: @@ -416,7 +417,12 @@ class Auth: guest = True self.validate_macaroon(macaroon, rights, user_id=user_id) - except (pymacaroons.exceptions.MacaroonException, TypeError, ValueError): + except ( + pymacaroons.exceptions.MacaroonException, + KeyError, + TypeError, + ValueError, + ): raise InvalidClientTokenError("Invalid macaroon passed.") if rights == "access": @@ -424,27 +430,6 @@ class Auth: return user_id, guest - def get_user_id_from_macaroon(self, macaroon): - """Retrieve the user_id given by the caveats on the macaroon. - - Does *not* validate the macaroon. - - Args: - macaroon (pymacaroons.Macaroon): The macaroon to validate - - Returns: - (str) user id - - Raises: - InvalidClientCredentialsError if there is no user_id caveat in the - macaroon - """ - user_prefix = "user_id = " - for caveat in macaroon.caveats: - if caveat.caveat_id.startswith(user_prefix): - return caveat.caveat_id[len(user_prefix) :] - raise InvalidClientTokenError("No user caveat in macaroon") - def validate_macaroon(self, macaroon, type_string, user_id): """ validate that a Macaroon is understood by and was signed by this server. @@ -465,21 +450,13 @@ class Auth: v.satisfy_exact("type = " + type_string) v.satisfy_exact("user_id = %s" % user_id) v.satisfy_exact("guest = true") - v.satisfy_general(self._verify_expiry) + satisfy_expiry(v, self.clock.time_msec) # access_tokens include a nonce for uniqueness: any value is acceptable v.satisfy_general(lambda c: c.startswith("nonce = ")) v.verify(macaroon, self._macaroon_secret_key) - def _verify_expiry(self, caveat): - prefix = "time < " - if not caveat.startswith(prefix): - return False - expiry = int(caveat[len(prefix) :]) - now = self.hs.get_clock().time_msec() - return now < expiry - def get_appservice_by_req(self, request: SynapseRequest) -> ApplicationService: token = self.get_access_token_from_request(request) service = self.store.get_app_service_by_token(token) diff --git a/synapse/handlers/auth.py b/synapse/handlers/auth.py index 3978e41518..bec0c615d4 100644 --- a/synapse/handlers/auth.py +++ b/synapse/handlers/auth.py @@ -65,6 +65,7 @@ from synapse.storage.roommember import ProfileInfo from synapse.types import JsonDict, Requester, UserID from synapse.util import stringutils as stringutils from synapse.util.async_helpers import maybe_awaitable +from synapse.util.macaroons import get_value_from_macaroon, satisfy_expiry from synapse.util.msisdn import phone_number_to_msisdn from synapse.util.threepids import canonicalise_email @@ -170,6 +171,16 @@ class SsoLoginExtraAttributes: extra_attributes = attr.ib(type=JsonDict) +@attr.s(slots=True, frozen=True) +class LoginTokenAttributes: + """Data we store in a short-term login token""" + + user_id = attr.ib(type=str) + + # the SSO Identity Provider that the user authenticated with, to get this token + auth_provider_id = attr.ib(type=str) + + class AuthHandler(BaseHandler): SESSION_EXPIRE_MS = 48 * 60 * 60 * 1000 @@ -1164,18 +1175,16 @@ class AuthHandler(BaseHandler): return None return user_id - async def validate_short_term_login_token_and_get_user_id(self, login_token: str): - auth_api = self.hs.get_auth() - user_id = None + async def validate_short_term_login_token( + self, login_token: str + ) -> LoginTokenAttributes: try: - macaroon = pymacaroons.Macaroon.deserialize(login_token) - user_id = auth_api.get_user_id_from_macaroon(macaroon) - auth_api.validate_macaroon(macaroon, "login", user_id) + res = self.macaroon_gen.verify_short_term_login_token(login_token) except Exception: raise AuthError(403, "Invalid token", errcode=Codes.FORBIDDEN) - await self.auth.check_auth_blocking(user_id) - return user_id + await self.auth.check_auth_blocking(res.user_id) + return res async def delete_access_token(self, access_token: str): """Invalidate a single access token @@ -1397,6 +1406,7 @@ class AuthHandler(BaseHandler): async def complete_sso_login( self, registered_user_id: str, + auth_provider_id: str, request: Request, client_redirect_url: str, extra_attributes: Optional[JsonDict] = None, @@ -1406,6 +1416,9 @@ class AuthHandler(BaseHandler): Args: registered_user_id: The registered user ID to complete SSO login for. + auth_provider_id: The id of the SSO Identity provider that was used for + login. This will be stored in the login token for future tracking in + prometheus metrics. request: The request to complete. client_redirect_url: The URL to which to redirect the user at the end of the process. @@ -1427,6 +1440,7 @@ class AuthHandler(BaseHandler): self._complete_sso_login( registered_user_id, + auth_provider_id, request, client_redirect_url, extra_attributes, @@ -1437,6 +1451,7 @@ class AuthHandler(BaseHandler): def _complete_sso_login( self, registered_user_id: str, + auth_provider_id: str, request: Request, client_redirect_url: str, extra_attributes: Optional[JsonDict] = None, @@ -1463,7 +1478,7 @@ class AuthHandler(BaseHandler): # Create a login token login_token = self.macaroon_gen.generate_short_term_login_token( - registered_user_id + registered_user_id, auth_provider_id=auth_provider_id ) # Append the login token to the original redirect URL (i.e. with its query @@ -1569,15 +1584,48 @@ class MacaroonGenerator: return macaroon.serialize() def generate_short_term_login_token( - self, user_id: str, duration_in_ms: int = (2 * 60 * 1000) + self, + user_id: str, + auth_provider_id: str, + duration_in_ms: int = (2 * 60 * 1000), ) -> str: macaroon = self._generate_base_macaroon(user_id) macaroon.add_first_party_caveat("type = login") now = self.hs.get_clock().time_msec() expiry = now + duration_in_ms macaroon.add_first_party_caveat("time < %d" % (expiry,)) + macaroon.add_first_party_caveat("auth_provider_id = %s" % (auth_provider_id,)) return macaroon.serialize() + def verify_short_term_login_token(self, token: str) -> LoginTokenAttributes: + """Verify a short-term-login macaroon + + Checks that the given token is a valid, unexpired short-term-login token + minted by this server. + + Args: + token: the login token to verify + + Returns: + the user_id that this token is valid for + + Raises: + MacaroonVerificationFailedException if the verification failed + """ + macaroon = pymacaroons.Macaroon.deserialize(token) + user_id = get_value_from_macaroon(macaroon, "user_id") + auth_provider_id = get_value_from_macaroon(macaroon, "auth_provider_id") + + v = pymacaroons.Verifier() + v.satisfy_exact("gen = 1") + v.satisfy_exact("type = login") + v.satisfy_general(lambda c: c.startswith("user_id = ")) + v.satisfy_general(lambda c: c.startswith("auth_provider_id = ")) + satisfy_expiry(v, self.hs.get_clock().time_msec) + v.verify(macaroon, self.hs.config.key.macaroon_secret_key) + + return LoginTokenAttributes(user_id=user_id, auth_provider_id=auth_provider_id) + def generate_delete_pusher_token(self, user_id: str) -> str: macaroon = self._generate_base_macaroon(user_id) macaroon.add_first_party_caveat("type = delete_pusher") diff --git a/synapse/handlers/oidc_handler.py b/synapse/handlers/oidc_handler.py index 07db1e31e4..b4a74390cc 100644 --- a/synapse/handlers/oidc_handler.py +++ b/synapse/handlers/oidc_handler.py @@ -42,6 +42,7 @@ from synapse.logging.context import make_deferred_yieldable from synapse.types import JsonDict, UserID, map_username_to_mxid_localpart from synapse.util import json_decoder from synapse.util.caches.cached_call import RetryOnExceptionCachedCall +from synapse.util.macaroons import get_value_from_macaroon, satisfy_expiry if TYPE_CHECKING: from synapse.server import HomeServer @@ -211,7 +212,7 @@ class OidcHandler: session_data = self._token_generator.verify_oidc_session_token( session, state ) - except (MacaroonDeserializationException, ValueError) as e: + except (MacaroonDeserializationException, KeyError) as e: logger.exception("Invalid session for OIDC callback") self._sso_handler.render_error(request, "invalid_session", str(e)) return @@ -745,7 +746,7 @@ class OidcProvider: idp_id=self.idp_id, nonce=nonce, client_redirect_url=client_redirect_url.decode(), - ui_auth_session_id=ui_auth_session_id, + ui_auth_session_id=ui_auth_session_id or "", ), ) @@ -1020,10 +1021,9 @@ class OidcSessionTokenGenerator: macaroon.add_first_party_caveat( "client_redirect_url = %s" % (session_data.client_redirect_url,) ) - if session_data.ui_auth_session_id: - macaroon.add_first_party_caveat( - "ui_auth_session_id = %s" % (session_data.ui_auth_session_id,) - ) + macaroon.add_first_party_caveat( + "ui_auth_session_id = %s" % (session_data.ui_auth_session_id,) + ) now = self._clock.time_msec() expiry = now + duration_in_ms macaroon.add_first_party_caveat("time < %d" % (expiry,)) @@ -1046,7 +1046,7 @@ class OidcSessionTokenGenerator: The data extracted from the session cookie Raises: - ValueError if an expected caveat is missing from the macaroon. + KeyError if an expected caveat is missing from the macaroon. """ macaroon = pymacaroons.Macaroon.deserialize(session) @@ -1057,26 +1057,16 @@ class OidcSessionTokenGenerator: v.satisfy_general(lambda c: c.startswith("nonce = ")) v.satisfy_general(lambda c: c.startswith("idp_id = ")) v.satisfy_general(lambda c: c.startswith("client_redirect_url = ")) - # Sometimes there's a UI auth session ID, it seems to be OK to attempt - # to always satisfy this. v.satisfy_general(lambda c: c.startswith("ui_auth_session_id = ")) - v.satisfy_general(self._verify_expiry) + satisfy_expiry(v, self._clock.time_msec) v.verify(macaroon, self._macaroon_secret_key) # Extract the session data from the token. - nonce = self._get_value_from_macaroon(macaroon, "nonce") - idp_id = self._get_value_from_macaroon(macaroon, "idp_id") - client_redirect_url = self._get_value_from_macaroon( - macaroon, "client_redirect_url" - ) - try: - ui_auth_session_id = self._get_value_from_macaroon( - macaroon, "ui_auth_session_id" - ) # type: Optional[str] - except ValueError: - ui_auth_session_id = None - + nonce = get_value_from_macaroon(macaroon, "nonce") + idp_id = get_value_from_macaroon(macaroon, "idp_id") + client_redirect_url = get_value_from_macaroon(macaroon, "client_redirect_url") + ui_auth_session_id = get_value_from_macaroon(macaroon, "ui_auth_session_id") return OidcSessionData( nonce=nonce, idp_id=idp_id, @@ -1084,33 +1074,6 @@ class OidcSessionTokenGenerator: ui_auth_session_id=ui_auth_session_id, ) - def _get_value_from_macaroon(self, macaroon: pymacaroons.Macaroon, key: str) -> str: - """Extracts a caveat value from a macaroon token. - - Args: - macaroon: the token - key: the key of the caveat to extract - - Returns: - The extracted value - - Raises: - ValueError: if the caveat was not in the macaroon - """ - prefix = key + " = " - for caveat in macaroon.caveats: - if caveat.caveat_id.startswith(prefix): - return caveat.caveat_id[len(prefix) :] - raise ValueError("No %s caveat in macaroon" % (key,)) - - def _verify_expiry(self, caveat: str) -> bool: - prefix = "time < " - if not caveat.startswith(prefix): - return False - expiry = int(caveat[len(prefix) :]) - now = self._clock.time_msec() - return now < expiry - @attr.s(frozen=True, slots=True) class OidcSessionData: @@ -1125,8 +1088,8 @@ class OidcSessionData: # The URL the client gave when it initiated the flow. ("" if this is a UI Auth) client_redirect_url = attr.ib(type=str) - # The session ID of the ongoing UI Auth (None if this is a login) - ui_auth_session_id = attr.ib(type=Optional[str], default=None) + # The session ID of the ongoing UI Auth ("" if this is a login) + ui_auth_session_id = attr.ib(type=str) UserAttributeDict = TypedDict( diff --git a/synapse/handlers/sso.py b/synapse/handlers/sso.py index 80e28bdcbe..8a22dab54a 100644 --- a/synapse/handlers/sso.py +++ b/synapse/handlers/sso.py @@ -456,6 +456,7 @@ class SsoHandler: await self._auth_handler.complete_sso_login( user_id, + auth_provider_id, request, client_redirect_url, extra_login_attributes, @@ -886,6 +887,7 @@ class SsoHandler: await self._auth_handler.complete_sso_login( user_id, + session.auth_provider_id, request, session.client_redirect_url, session.extra_login_attributes, diff --git a/synapse/module_api/__init__.py b/synapse/module_api/__init__.py index db2d400b7e..781e02fbbb 100644 --- a/synapse/module_api/__init__.py +++ b/synapse/module_api/__init__.py @@ -203,11 +203,26 @@ class ModuleApi: ) def generate_short_term_login_token( - self, user_id: str, duration_in_ms: int = (2 * 60 * 1000) + self, + user_id: str, + duration_in_ms: int = (2 * 60 * 1000), + auth_provider_id: str = "", ) -> str: - """Generate a login token suitable for m.login.token authentication""" + """Generate a login token suitable for m.login.token authentication + + Args: + user_id: gives the ID of the user that the token is for + + duration_in_ms: the time that the token will be valid for + + auth_provider_id: the ID of the SSO IdP that the user used to authenticate + to get this token, if any. This is encoded in the token so that + /login can report stats on number of successful logins by IdP. + """ return self._hs.get_macaroon_generator().generate_short_term_login_token( - user_id, duration_in_ms + user_id, + auth_provider_id, + duration_in_ms, ) @defer.inlineCallbacks @@ -276,6 +291,7 @@ class ModuleApi: """ self._auth_handler._complete_sso_login( registered_user_id, + "", request, client_redirect_url, ) @@ -286,6 +302,7 @@ class ModuleApi: request: SynapseRequest, client_redirect_url: str, new_user: bool = False, + auth_provider_id: str = "", ): """Complete a SSO login by redirecting the user to a page to confirm whether they want their access token sent to `client_redirect_url`, or redirect them to that @@ -299,9 +316,15 @@ class ModuleApi: redirect them directly if whitelisted). new_user: set to true to use wording for the consent appropriate to a user who has just registered. + auth_provider_id: the ID of the SSO IdP which was used to log in. This + is used to track counts of sucessful logins by IdP. """ await self._auth_handler.complete_sso_login( - registered_user_id, request, client_redirect_url, new_user=new_user + registered_user_id, + auth_provider_id, + request, + client_redirect_url, + new_user=new_user, ) @defer.inlineCallbacks diff --git a/synapse/rest/client/v1/login.py b/synapse/rest/client/v1/login.py index 925edfc402..1ec3a47ffb 100644 --- a/synapse/rest/client/v1/login.py +++ b/synapse/rest/client/v1/login.py @@ -283,12 +283,10 @@ class LoginRestServlet(RestServlet): """ token = login_submission["token"] auth_handler = self.auth_handler - user_id = await auth_handler.validate_short_term_login_token_and_get_user_id( - token - ) + res = await auth_handler.validate_short_term_login_token(token) return await self._complete_login( - user_id, login_submission, self.auth_handler._sso_login_callback + res.user_id, login_submission, self.auth_handler._sso_login_callback ) async def _do_jwt_login(self, login_submission: JsonDict) -> Dict[str, str]: diff --git a/synapse/util/macaroons.py b/synapse/util/macaroons.py new file mode 100644 index 0000000000..12cdd53327 --- /dev/null +++ b/synapse/util/macaroons.py @@ -0,0 +1,89 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Quentin Gliech +# Copyright 2021 The Matrix.org Foundation C.I.C. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Utilities for manipulating macaroons""" + +from typing import Callable, Optional + +import pymacaroons +from pymacaroons.exceptions import MacaroonVerificationFailedException + + +def get_value_from_macaroon(macaroon: pymacaroons.Macaroon, key: str) -> str: + """Extracts a caveat value from a macaroon token. + + Checks that there is exactly one caveat of the form "key = " in the macaroon, + and returns the extracted value. + + Args: + macaroon: the token + key: the key of the caveat to extract + + Returns: + The extracted value + + Raises: + MacaroonVerificationFailedException: if there are conflicting values for the + caveat in the macaroon, or if the caveat was not found in the macaroon. + """ + prefix = key + " = " + result = None # type: Optional[str] + for caveat in macaroon.caveats: + if not caveat.caveat_id.startswith(prefix): + continue + + val = caveat.caveat_id[len(prefix) :] + + if result is None: + # first time we found this caveat: record the value + result = val + elif val != result: + # on subsequent occurrences, raise if the value is different. + raise MacaroonVerificationFailedException( + "Conflicting values for caveat " + key + ) + + if result is not None: + return result + + # If the caveat is not there, we raise a MacaroonVerificationFailedException. + # Note that it is insecure to generate a macaroon without all the caveats you + # might need (because there is nothing stopping people from adding extra caveats), + # so if the caveat isn't there, something odd must be going on. + raise MacaroonVerificationFailedException("No %s caveat in macaroon" % (key,)) + + +def satisfy_expiry(v: pymacaroons.Verifier, get_time_ms: Callable[[], int]) -> None: + """Make a macaroon verifier which accepts 'time' caveats + + Builds a caveat verifier which will accept unexpired 'time' caveats, and adds it to + the given macaroon verifier. + + Args: + v: the macaroon verifier + get_time_ms: a callable which will return the timestamp after which the caveat + should be considered expired. Normally the current time. + """ + + def verify_expiry_caveat(caveat: str): + time_msec = get_time_ms() + prefix = "time < " + if not caveat.startswith(prefix): + return False + expiry = int(caveat[len(prefix) :]) + return time_msec < expiry + + v.satisfy_general(verify_expiry_caveat) diff --git a/tests/handlers/test_auth.py b/tests/handlers/test_auth.py index 0e42013bb9..c9f889b511 100644 --- a/tests/handlers/test_auth.py +++ b/tests/handlers/test_auth.py @@ -68,38 +68,45 @@ class AuthTestCase(unittest.HomeserverTestCase): v.verify(macaroon, self.hs.config.macaroon_secret_key) def test_short_term_login_token_gives_user_id(self): - token = self.macaroon_generator.generate_short_term_login_token("a_user", 5000) - user_id = self.get_success( - self.auth_handler.validate_short_term_login_token_and_get_user_id(token) + token = self.macaroon_generator.generate_short_term_login_token( + "a_user", "", 5000 ) - self.assertEqual("a_user", user_id) + res = self.get_success(self.auth_handler.validate_short_term_login_token(token)) + self.assertEqual("a_user", res.user_id) + self.assertEqual("", res.auth_provider_id) # when we advance the clock, the token should be rejected self.reactor.advance(6) self.get_failure( - self.auth_handler.validate_short_term_login_token_and_get_user_id(token), + self.auth_handler.validate_short_term_login_token(token), AuthError, ) + def test_short_term_login_token_gives_auth_provider(self): + token = self.macaroon_generator.generate_short_term_login_token( + "a_user", auth_provider_id="my_idp" + ) + res = self.get_success(self.auth_handler.validate_short_term_login_token(token)) + self.assertEqual("a_user", res.user_id) + self.assertEqual("my_idp", res.auth_provider_id) + def test_short_term_login_token_cannot_replace_user_id(self): - token = self.macaroon_generator.generate_short_term_login_token("a_user", 5000) + token = self.macaroon_generator.generate_short_term_login_token( + "a_user", "", 5000 + ) macaroon = pymacaroons.Macaroon.deserialize(token) - user_id = self.get_success( - self.auth_handler.validate_short_term_login_token_and_get_user_id( - macaroon.serialize() - ) + res = self.get_success( + self.auth_handler.validate_short_term_login_token(macaroon.serialize()) ) - self.assertEqual("a_user", user_id) + self.assertEqual("a_user", res.user_id) # add another "user_id" caveat, which might allow us to override the # user_id. macaroon.add_first_party_caveat("user_id = b_user") self.get_failure( - self.auth_handler.validate_short_term_login_token_and_get_user_id( - macaroon.serialize() - ), + self.auth_handler.validate_short_term_login_token(macaroon.serialize()), AuthError, ) @@ -113,7 +120,7 @@ class AuthTestCase(unittest.HomeserverTestCase): ) self.get_success( - self.auth_handler.validate_short_term_login_token_and_get_user_id( + self.auth_handler.validate_short_term_login_token( self._get_macaroon().serialize() ) ) @@ -135,7 +142,7 @@ class AuthTestCase(unittest.HomeserverTestCase): return_value=make_awaitable(self.large_number_of_users) ) self.get_failure( - self.auth_handler.validate_short_term_login_token_and_get_user_id( + self.auth_handler.validate_short_term_login_token( self._get_macaroon().serialize() ), ResourceLimitError, @@ -159,7 +166,7 @@ class AuthTestCase(unittest.HomeserverTestCase): ResourceLimitError, ) self.get_failure( - self.auth_handler.validate_short_term_login_token_and_get_user_id( + self.auth_handler.validate_short_term_login_token( self._get_macaroon().serialize() ), ResourceLimitError, @@ -175,7 +182,7 @@ class AuthTestCase(unittest.HomeserverTestCase): ) ) self.get_success( - self.auth_handler.validate_short_term_login_token_and_get_user_id( + self.auth_handler.validate_short_term_login_token( self._get_macaroon().serialize() ) ) @@ -197,11 +204,13 @@ class AuthTestCase(unittest.HomeserverTestCase): return_value=make_awaitable(self.small_number_of_users) ) self.get_success( - self.auth_handler.validate_short_term_login_token_and_get_user_id( + self.auth_handler.validate_short_term_login_token( self._get_macaroon().serialize() ) ) def _get_macaroon(self): - token = self.macaroon_generator.generate_short_term_login_token("user_a", 5000) + token = self.macaroon_generator.generate_short_term_login_token( + "user_a", "", 5000 + ) return pymacaroons.Macaroon.deserialize(token) diff --git a/tests/handlers/test_cas.py b/tests/handlers/test_cas.py index 6f992291b8..7975af243c 100644 --- a/tests/handlers/test_cas.py +++ b/tests/handlers/test_cas.py @@ -66,7 +66,7 @@ class CasHandlerTestCase(HomeserverTestCase): # check that the auth handler got called as expected auth_handler.complete_sso_login.assert_called_once_with( - "@test_user:test", request, "redirect_uri", None, new_user=True + "@test_user:test", "cas", request, "redirect_uri", None, new_user=True ) def test_map_cas_user_to_existing_user(self): @@ -89,7 +89,7 @@ class CasHandlerTestCase(HomeserverTestCase): # check that the auth handler got called as expected auth_handler.complete_sso_login.assert_called_once_with( - "@test_user:test", request, "redirect_uri", None, new_user=False + "@test_user:test", "cas", request, "redirect_uri", None, new_user=False ) # Subsequent calls should map to the same mxid. @@ -98,7 +98,7 @@ class CasHandlerTestCase(HomeserverTestCase): self.handler._handle_cas_response(request, cas_response, "redirect_uri", "") ) auth_handler.complete_sso_login.assert_called_once_with( - "@test_user:test", request, "redirect_uri", None, new_user=False + "@test_user:test", "cas", request, "redirect_uri", None, new_user=False ) def test_map_cas_user_to_invalid_localpart(self): @@ -116,7 +116,7 @@ class CasHandlerTestCase(HomeserverTestCase): # check that the auth handler got called as expected auth_handler.complete_sso_login.assert_called_once_with( - "@f=c3=b6=c3=b6:test", request, "redirect_uri", None, new_user=True + "@f=c3=b6=c3=b6:test", "cas", request, "redirect_uri", None, new_user=True ) @override_config( @@ -160,7 +160,7 @@ class CasHandlerTestCase(HomeserverTestCase): # check that the auth handler got called as expected auth_handler.complete_sso_login.assert_called_once_with( - "@test_user:test", request, "redirect_uri", None, new_user=True + "@test_user:test", "cas", request, "redirect_uri", None, new_user=True ) diff --git a/tests/handlers/test_oidc.py b/tests/handlers/test_oidc.py index cf1de28fa9..02d4b2de0d 100644 --- a/tests/handlers/test_oidc.py +++ b/tests/handlers/test_oidc.py @@ -13,7 +13,6 @@ # See the License for the specific language governing permissions and # limitations under the License. import json -from typing import Optional from urllib.parse import parse_qs, urlparse from mock import ANY, Mock, patch @@ -23,6 +22,7 @@ import pymacaroons from synapse.handlers.sso import MappingException from synapse.server import HomeServer from synapse.types import UserID +from synapse.util.macaroons import get_value_from_macaroon from tests.test_utils import FakeResponse, get_awaitable_result, simple_async_mock from tests.unittest import HomeserverTestCase, override_config @@ -360,15 +360,9 @@ class OidcHandlerTestCase(HomeserverTestCase): self.assertEqual(name, b"oidc_session") macaroon = pymacaroons.Macaroon.deserialize(cookie) - state = self.handler._token_generator._get_value_from_macaroon( - macaroon, "state" - ) - nonce = self.handler._token_generator._get_value_from_macaroon( - macaroon, "nonce" - ) - redirect = self.handler._token_generator._get_value_from_macaroon( - macaroon, "client_redirect_url" - ) + state = get_value_from_macaroon(macaroon, "state") + nonce = get_value_from_macaroon(macaroon, "nonce") + redirect = get_value_from_macaroon(macaroon, "client_redirect_url") self.assertEqual(params["state"], [state]) self.assertEqual(params["nonce"], [nonce]) @@ -434,7 +428,7 @@ class OidcHandlerTestCase(HomeserverTestCase): self.get_success(self.handler.handle_oidc_callback(request)) auth_handler.complete_sso_login.assert_called_once_with( - expected_user_id, request, client_redirect_url, None, new_user=True + expected_user_id, "oidc", request, client_redirect_url, None, new_user=True ) self.provider._exchange_code.assert_called_once_with(code) self.provider._parse_id_token.assert_called_once_with(token, nonce=nonce) @@ -465,7 +459,7 @@ class OidcHandlerTestCase(HomeserverTestCase): self.get_success(self.handler.handle_oidc_callback(request)) auth_handler.complete_sso_login.assert_called_once_with( - expected_user_id, request, client_redirect_url, None, new_user=False + expected_user_id, "oidc", request, client_redirect_url, None, new_user=False ) self.provider._exchange_code.assert_called_once_with(code) self.provider._parse_id_token.assert_not_called() @@ -651,6 +645,7 @@ class OidcHandlerTestCase(HomeserverTestCase): auth_handler.complete_sso_login.assert_called_once_with( "@foo:test", + "oidc", request, client_redirect_url, {"phone": "1234567"}, @@ -668,7 +663,7 @@ class OidcHandlerTestCase(HomeserverTestCase): } self.get_success(_make_callback_with_userinfo(self.hs, userinfo)) auth_handler.complete_sso_login.assert_called_once_with( - "@test_user:test", ANY, ANY, None, new_user=True + "@test_user:test", "oidc", ANY, ANY, None, new_user=True ) auth_handler.complete_sso_login.reset_mock() @@ -679,7 +674,7 @@ class OidcHandlerTestCase(HomeserverTestCase): } self.get_success(_make_callback_with_userinfo(self.hs, userinfo)) auth_handler.complete_sso_login.assert_called_once_with( - "@test_user_2:test", ANY, ANY, None, new_user=True + "@test_user_2:test", "oidc", ANY, ANY, None, new_user=True ) auth_handler.complete_sso_login.reset_mock() @@ -716,14 +711,14 @@ class OidcHandlerTestCase(HomeserverTestCase): } self.get_success(_make_callback_with_userinfo(self.hs, userinfo)) auth_handler.complete_sso_login.assert_called_once_with( - user.to_string(), ANY, ANY, None, new_user=False + user.to_string(), "oidc", ANY, ANY, None, new_user=False ) auth_handler.complete_sso_login.reset_mock() # Subsequent calls should map to the same mxid. self.get_success(_make_callback_with_userinfo(self.hs, userinfo)) auth_handler.complete_sso_login.assert_called_once_with( - user.to_string(), ANY, ANY, None, new_user=False + user.to_string(), "oidc", ANY, ANY, None, new_user=False ) auth_handler.complete_sso_login.reset_mock() @@ -738,7 +733,7 @@ class OidcHandlerTestCase(HomeserverTestCase): } self.get_success(_make_callback_with_userinfo(self.hs, userinfo)) auth_handler.complete_sso_login.assert_called_once_with( - user.to_string(), ANY, ANY, None, new_user=False + user.to_string(), "oidc", ANY, ANY, None, new_user=False ) auth_handler.complete_sso_login.reset_mock() @@ -774,7 +769,7 @@ class OidcHandlerTestCase(HomeserverTestCase): self.get_success(_make_callback_with_userinfo(self.hs, userinfo)) auth_handler.complete_sso_login.assert_called_once_with( - "@TEST_USER_2:test", ANY, ANY, None, new_user=False + "@TEST_USER_2:test", "oidc", ANY, ANY, None, new_user=False ) def test_map_userinfo_to_invalid_localpart(self): @@ -810,7 +805,7 @@ class OidcHandlerTestCase(HomeserverTestCase): # test_user is already taken, so test_user1 gets registered instead. auth_handler.complete_sso_login.assert_called_once_with( - "@test_user1:test", ANY, ANY, None, new_user=True + "@test_user1:test", "oidc", ANY, ANY, None, new_user=True ) auth_handler.complete_sso_login.reset_mock() @@ -866,7 +861,7 @@ class OidcHandlerTestCase(HomeserverTestCase): state: str, nonce: str, client_redirect_url: str, - ui_auth_session_id: Optional[str] = None, + ui_auth_session_id: str = "", ) -> str: from synapse.handlers.oidc_handler import OidcSessionData @@ -909,6 +904,7 @@ async def _make_callback_with_userinfo( idp_id="oidc", nonce="nonce", client_redirect_url=client_redirect_url, + ui_auth_session_id="", ), ) request = _build_callback_request("code", state, session) diff --git a/tests/handlers/test_saml.py b/tests/handlers/test_saml.py index 029af2853e..30efd43b40 100644 --- a/tests/handlers/test_saml.py +++ b/tests/handlers/test_saml.py @@ -131,7 +131,7 @@ class SamlHandlerTestCase(HomeserverTestCase): # check that the auth handler got called as expected auth_handler.complete_sso_login.assert_called_once_with( - "@test_user:test", request, "redirect_uri", None, new_user=True + "@test_user:test", "saml", request, "redirect_uri", None, new_user=True ) @override_config({"saml2_config": {"grandfathered_mxid_source_attribute": "mxid"}}) @@ -157,7 +157,7 @@ class SamlHandlerTestCase(HomeserverTestCase): # check that the auth handler got called as expected auth_handler.complete_sso_login.assert_called_once_with( - "@test_user:test", request, "", None, new_user=False + "@test_user:test", "saml", request, "", None, new_user=False ) # Subsequent calls should map to the same mxid. @@ -166,7 +166,7 @@ class SamlHandlerTestCase(HomeserverTestCase): self.handler._handle_authn_response(request, saml_response, "") ) auth_handler.complete_sso_login.assert_called_once_with( - "@test_user:test", request, "", None, new_user=False + "@test_user:test", "saml", request, "", None, new_user=False ) def test_map_saml_response_to_invalid_localpart(self): @@ -214,7 +214,7 @@ class SamlHandlerTestCase(HomeserverTestCase): # test_user is already taken, so test_user1 gets registered instead. auth_handler.complete_sso_login.assert_called_once_with( - "@test_user1:test", request, "", None, new_user=True + "@test_user1:test", "saml", request, "", None, new_user=True ) auth_handler.complete_sso_login.reset_mock() @@ -310,7 +310,7 @@ class SamlHandlerTestCase(HomeserverTestCase): # check that the auth handler got called as expected auth_handler.complete_sso_login.assert_called_once_with( - "@test_user:test", request, "redirect_uri", None, new_user=True + "@test_user:test", "saml", request, "redirect_uri", None, new_user=True ) -- cgit 1.5.1 From df425c2c63d969778f725c9053dd93e57ee854e1 Mon Sep 17 00:00:00 2001 From: Richard van der Hoff <1389908+richvdh@users.noreply.github.com> Date: Thu, 4 Mar 2021 16:39:27 +0000 Subject: Prometheus metrics for logins and registrations (#9511) Add prom metrics for number of users successfully registering and logging in, by SSO provider. --- changelog.d/9511.feature | 1 + synapse/handlers/register.py | 35 +++++++++++++++++++++++++++++++++-- synapse/handlers/sso.py | 1 + synapse/rest/client/v1/login.py | 10 ++++++++-- 4 files changed, 43 insertions(+), 4 deletions(-) create mode 100644 changelog.d/9511.feature diff --git a/changelog.d/9511.feature b/changelog.d/9511.feature new file mode 100644 index 0000000000..5214b50d41 --- /dev/null +++ b/changelog.d/9511.feature @@ -0,0 +1 @@ +Add prometheus metrics for number of users successfully registering and logging in. diff --git a/synapse/handlers/register.py b/synapse/handlers/register.py index 3cda89657e..b66f8756b8 100644 --- a/synapse/handlers/register.py +++ b/synapse/handlers/register.py @@ -18,6 +18,8 @@ import logging from typing import TYPE_CHECKING, Iterable, List, Optional, Tuple +from prometheus_client import Counter + from synapse import types from synapse.api.constants import MAX_USERID_LENGTH, EventTypes, JoinRules, LoginType from synapse.api.errors import AuthError, Codes, ConsentNotGivenError, SynapseError @@ -41,6 +43,19 @@ if TYPE_CHECKING: logger = logging.getLogger(__name__) +registration_counter = Counter( + "synapse_user_registrations_total", + "Number of new users registered (since restart)", + ["guest", "shadow_banned", "auth_provider"], +) + +login_counter = Counter( + "synapse_user_logins_total", + "Number of user logins (since restart)", + ["guest", "auth_provider"], +) + + class RegistrationHandler(BaseHandler): def __init__(self, hs: "HomeServer"): super().__init__(hs) @@ -156,6 +171,7 @@ class RegistrationHandler(BaseHandler): bind_emails: Iterable[str] = [], by_admin: bool = False, user_agent_ips: Optional[List[Tuple[str, str]]] = None, + auth_provider_id: Optional[str] = None, ) -> str: """Registers a new client on the server. @@ -181,8 +197,10 @@ class RegistrationHandler(BaseHandler): admin api, otherwise False. user_agent_ips: Tuples of IP addresses and user-agents used during the registration process. + auth_provider_id: The SSO IdP the user used, if any (just used for the + prometheus metrics). Returns: - The registere user_id. + The registered user_id. Raises: SynapseError if there was a problem registering. """ @@ -280,6 +298,12 @@ class RegistrationHandler(BaseHandler): # if user id is taken, just generate another fail_count += 1 + registration_counter.labels( + guest=make_guest, + shadow_banned=shadow_banned, + auth_provider=(auth_provider_id or ""), + ).inc() + if not self.hs.config.user_consent_at_registration: if not self.hs.config.auto_join_rooms_for_guests and make_guest: logger.info( @@ -638,6 +662,7 @@ class RegistrationHandler(BaseHandler): initial_display_name: Optional[str], is_guest: bool = False, is_appservice_ghost: bool = False, + auth_provider_id: Optional[str] = None, ) -> Tuple[str, str]: """Register a device for a user and generate an access token. @@ -648,7 +673,8 @@ class RegistrationHandler(BaseHandler): device_id: The device ID to check, or None to generate a new one. initial_display_name: An optional display name for the device. is_guest: Whether this is a guest account - + auth_provider_id: The SSO IdP the user used, if any (just used for the + prometheus metrics). Returns: Tuple of device ID and access token """ @@ -687,6 +713,11 @@ class RegistrationHandler(BaseHandler): is_appservice_ghost=is_appservice_ghost, ) + login_counter.labels( + guest=is_guest, + auth_provider=(auth_provider_id or ""), + ).inc() + return (registered_device_id, access_token) async def post_registration_actions( diff --git a/synapse/handlers/sso.py b/synapse/handlers/sso.py index 8a22dab54a..6ef459acff 100644 --- a/synapse/handlers/sso.py +++ b/synapse/handlers/sso.py @@ -606,6 +606,7 @@ class SsoHandler: default_display_name=attributes.display_name, bind_emails=attributes.emails, user_agent_ips=[(user_agent, ip_address)], + auth_provider_id=auth_provider_id, ) await self._store.record_user_external_id( diff --git a/synapse/rest/client/v1/login.py b/synapse/rest/client/v1/login.py index 1ec3a47ffb..34bc1bd49b 100644 --- a/synapse/rest/client/v1/login.py +++ b/synapse/rest/client/v1/login.py @@ -219,6 +219,7 @@ class LoginRestServlet(RestServlet): callback: Optional[Callable[[Dict[str, str]], Awaitable[None]]] = None, create_non_existent_users: bool = False, ratelimit: bool = True, + auth_provider_id: Optional[str] = None, ) -> Dict[str, str]: """Called when we've successfully authed the user and now need to actually login them in (e.g. create devices). This gets called on @@ -234,6 +235,8 @@ class LoginRestServlet(RestServlet): create_non_existent_users: Whether to create the user if they don't exist. Defaults to False. ratelimit: Whether to ratelimit the login request. + auth_provider_id: The SSO IdP the user used, if any (just used for the + prometheus metrics). Returns: result: Dictionary of account information after successful login. @@ -256,7 +259,7 @@ class LoginRestServlet(RestServlet): device_id = login_submission.get("device_id") initial_display_name = login_submission.get("initial_device_display_name") device_id, access_token = await self.registration_handler.register_device( - user_id, device_id, initial_display_name + user_id, device_id, initial_display_name, auth_provider_id=auth_provider_id ) result = { @@ -286,7 +289,10 @@ class LoginRestServlet(RestServlet): res = await auth_handler.validate_short_term_login_token(token) return await self._complete_login( - res.user_id, login_submission, self.auth_handler._sso_login_callback + res.user_id, + login_submission, + self.auth_handler._sso_login_callback, + auth_provider_id=res.auth_provider_id, ) async def _do_jwt_login(self, login_submission: JsonDict) -> Dict[str, str]: -- cgit 1.5.1 From 8a4b3738f3dcf49f70c08204ad457559dcef4112 Mon Sep 17 00:00:00 2001 From: Richard van der Hoff <1389908+richvdh@users.noreply.github.com> Date: Thu, 4 Mar 2021 16:40:18 +0000 Subject: Replace `last_*_pdu_age` metrics with timestamps (#9540) Following the advice at https://prometheus.io/docs/practices/instrumentation/#timestamps-not-time-since, it's preferable to export unix timestamps, not ages. There doesn't seem to be any particular naming convention for timestamp metrics. --- changelog.d/9540.feature | 1 + changelog.d/9540.removal | 1 + synapse/federation/federation_server.py | 10 ++++------ synapse/federation/sender/transaction_manager.py | 11 +++++------ 4 files changed, 11 insertions(+), 12 deletions(-) create mode 100644 changelog.d/9540.feature create mode 100644 changelog.d/9540.removal diff --git a/changelog.d/9540.feature b/changelog.d/9540.feature new file mode 100644 index 0000000000..5417e51b93 --- /dev/null +++ b/changelog.d/9540.feature @@ -0,0 +1 @@ +Add `synapse_federation_last_sent_pdu_time` and `synapse_federation_last_received_pdu_time` prometheus metrics, which monitor federation delays by reporting the timestamps of messages sent and received to a set of remote servers. diff --git a/changelog.d/9540.removal b/changelog.d/9540.removal new file mode 100644 index 0000000000..d54f553cb9 --- /dev/null +++ b/changelog.d/9540.removal @@ -0,0 +1 @@ +The `synapse_federation_last_sent_pdu_age` and `synapse_federation_last_received_pdu_age` prometheus metrics have been removed. They are replaced by `synapse_federation_last_sent_pdu_time` and `synapse_federation_last_received_pdu_time`. diff --git a/synapse/federation/federation_server.py b/synapse/federation/federation_server.py index 2f832b47f6..362895bf42 100644 --- a/synapse/federation/federation_server.py +++ b/synapse/federation/federation_server.py @@ -90,10 +90,9 @@ pdu_process_time = Histogram( "Time taken to process an event", ) - -last_pdu_age_metric = Gauge( - "synapse_federation_last_received_pdu_age", - "The age (in seconds) of the last PDU successfully received from the given domain", +last_pdu_ts_metric = Gauge( + "synapse_federation_last_received_pdu_time", + "The timestamp of the last PDU which was successfully received from the given domain", labelnames=("server_name",), ) @@ -369,8 +368,7 @@ class FederationServer(FederationBase): ) if newest_pdu_ts and origin in self._federation_metrics_domains: - newest_pdu_age = self._clock.time_msec() - newest_pdu_ts - last_pdu_age_metric.labels(server_name=origin).set(newest_pdu_age / 1000) + last_pdu_ts_metric.labels(server_name=origin).set(newest_pdu_ts / 1000) return pdu_results diff --git a/synapse/federation/sender/transaction_manager.py b/synapse/federation/sender/transaction_manager.py index 763aff296c..2a9cd063c4 100644 --- a/synapse/federation/sender/transaction_manager.py +++ b/synapse/federation/sender/transaction_manager.py @@ -36,9 +36,9 @@ if TYPE_CHECKING: logger = logging.getLogger(__name__) -last_pdu_age_metric = Gauge( - "synapse_federation_last_sent_pdu_age", - "The age (in seconds) of the last PDU successfully sent to the given domain", +last_pdu_ts_metric = Gauge( + "synapse_federation_last_sent_pdu_time", + "The timestamp of the last PDU which was successfully sent to the given domain", labelnames=("server_name",), ) @@ -187,9 +187,8 @@ class TransactionManager: if success and pdus and destination in self._federation_metrics_domains: last_pdu = pdus[-1] - last_pdu_age = self.clock.time_msec() - last_pdu.origin_server_ts - last_pdu_age_metric.labels(server_name=destination).set( - last_pdu_age / 1000 + last_pdu_ts_metric.labels(server_name=destination).set( + last_pdu.origin_server_ts / 1000 ) set_tag(tags.ERROR, not success) -- cgit 1.5.1 From e5da770ccea03bc386c476913951ee220d5cf04e Mon Sep 17 00:00:00 2001 From: Ben Banfield-Zanin Date: Fri, 5 Mar 2021 12:07:50 +0000 Subject: Add additional SAML2 upgrade notes (#9550) --- UPGRADE.rst | 7 +++++++ changelog.d/9550.doc | 1 + 2 files changed, 8 insertions(+) create mode 100644 changelog.d/9550.doc diff --git a/UPGRADE.rst b/UPGRADE.rst index 031e02bda9..8bc2ff91ab 100644 --- a/UPGRADE.rst +++ b/UPGRADE.rst @@ -124,6 +124,13 @@ This version changes the URI used for callbacks from OAuth2 and SAML2 identity p need to add ``[synapse public baseurl]/_synapse/client/saml2/authn_response`` as a permitted "ACS location" (also known as "allowed callback URLs") at the identity provider. + The "Issuer" in the "AuthnRequest" to the SAML2 identity provider is also updated to + ``[synapse public baseurl]/_synapse/client/saml2/metadata.xml``. If your SAML2 identity + provider uses this property to validate or otherwise identify Synapse, its configuration + will need to be updated to use the new URL. Alternatively you could create a new, separate + "EntityDescriptor" in your SAML2 identity provider with the new URLs and leave the URLs in + the existing "EntityDescriptor" as they were. + Changes to HTML templates ------------------------- diff --git a/changelog.d/9550.doc b/changelog.d/9550.doc new file mode 100644 index 0000000000..adbbeb0ae4 --- /dev/null +++ b/changelog.d/9550.doc @@ -0,0 +1 @@ +Improve the SAML2 upgrade notes for 1.27.0. -- cgit 1.5.1 From 0fc4eb103a1587d861ae388c6b30bcc04851e70c Mon Sep 17 00:00:00 2001 From: Leo Bärring Date: Sat, 6 Mar 2021 12:49:19 +0100 Subject: Update reverse proxy to add OpenBSD relayd example configuration. (#9508) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Update reverse proxy to add OpenBSD relayd example configuration. Signed-off-by: Leo Bärring --- README.rst | 5 +++-- changelog.d/9508.doc | 1 + docs/reverse_proxy.md | 51 +++++++++++++++++++++++++++++++++++++++++++++++++-- 3 files changed, 53 insertions(+), 4 deletions(-) create mode 100644 changelog.d/9508.doc diff --git a/README.rst b/README.rst index d872b11f57..6a1e713590 100644 --- a/README.rst +++ b/README.rst @@ -183,8 +183,9 @@ Using a reverse proxy with Synapse It is recommended to put a reverse proxy such as `nginx `_, `Apache `_, -`Caddy `_ or -`HAProxy `_ in front of Synapse. One advantage of +`Caddy `_, +`HAProxy `_ or +`relayd `_ in front of Synapse. One advantage of doing so is that it means that you can expose the default https port (443) to Matrix clients without needing to run Synapse with root privileges. diff --git a/changelog.d/9508.doc b/changelog.d/9508.doc new file mode 100644 index 0000000000..a17a8faecf --- /dev/null +++ b/changelog.d/9508.doc @@ -0,0 +1 @@ +Add relayd entry to reverse proxy example configurations. diff --git a/docs/reverse_proxy.md b/docs/reverse_proxy.md index 81e5a68a36..860afd5a04 100644 --- a/docs/reverse_proxy.md +++ b/docs/reverse_proxy.md @@ -3,8 +3,9 @@ It is recommended to put a reverse proxy such as [nginx](https://nginx.org/en/docs/http/ngx_http_proxy_module.html), [Apache](https://httpd.apache.org/docs/current/mod/mod_proxy_http.html), -[Caddy](https://caddyserver.com/docs/quick-starts/reverse-proxy) or -[HAProxy](https://www.haproxy.org/) in front of Synapse. One advantage +[Caddy](https://caddyserver.com/docs/quick-starts/reverse-proxy), +[HAProxy](https://www.haproxy.org/) or +[relayd](https://man.openbsd.org/relayd.8) in front of Synapse. One advantage of doing so is that it means that you can expose the default https port (443) to Matrix clients without needing to run Synapse with root privileges. @@ -162,6 +163,52 @@ backend matrix server matrix 127.0.0.1:8008 ``` +### Relayd + +``` +table { 127.0.0.1 } +table { 127.0.0.1 } + +http protocol "https" { + tls { no tlsv1.0, ciphers "HIGH" } + tls keypair "example.com" + match header set "X-Forwarded-For" value "$REMOTE_ADDR" + match header set "X-Forwarded-Proto" value "https" + + # set CORS header for .well-known/matrix/server, .well-known/matrix/client + # httpd does not support setting headers, so do it here + match request path "/.well-known/matrix/*" tag "matrix-cors" + match response tagged "matrix-cors" header set "Access-Control-Allow-Origin" value "*" + + pass quick path "/_matrix/*" forward to + pass quick path "/_synapse/client/*" forward to + + # pass on non-matrix traffic to webserver + pass forward to +} + +relay "https_traffic" { + listen on egress port 443 tls + protocol "https" + forward to port 8008 check tcp + forward to port 8080 check tcp +} + +http protocol "matrix" { + tls { no tlsv1.0, ciphers "HIGH" } + tls keypair "example.com" + block + pass quick path "/_matrix/*" forward to + pass quick path "/_synapse/client/*" forward to +} + +relay "matrix_federation" { + listen on egress port 8448 tls + protocol "matrix" + forward to port 8008 check tcp +} +``` + ## Homeserver Configuration You will also want to set `bind_addresses: ['127.0.0.1']` and -- cgit 1.5.1 From 58114f8a17a5b52a9b90b89b3c7d9b595307c9a8 Mon Sep 17 00:00:00 2001 From: Patrick Cloke Date: Mon, 8 Mar 2021 08:25:43 -0500 Subject: Create a SynapseReactor type which incorporates the necessary reactor interfaces. (#9528) This helps fix some type hints when running with Twisted 21.2.0. --- changelog.d/9528.misc | 1 + synapse/handlers/acme.py | 4 +++- synapse/http/client.py | 5 +++-- synapse/http/federation/matrix_federation_agent.py | 3 ++- synapse/http/matrixfederationclient.py | 8 ++++---- synapse/replication/tcp/redis.py | 2 +- synapse/server.py | 5 ++--- synapse/types.py | 16 ++++++++++++++++ 8 files changed, 32 insertions(+), 12 deletions(-) create mode 100644 changelog.d/9528.misc diff --git a/changelog.d/9528.misc b/changelog.d/9528.misc new file mode 100644 index 0000000000..14c7b78dd9 --- /dev/null +++ b/changelog.d/9528.misc @@ -0,0 +1 @@ +Fix incorrect type hints. diff --git a/synapse/handlers/acme.py b/synapse/handlers/acme.py index 5ecb2da1ac..132be238dd 100644 --- a/synapse/handlers/acme.py +++ b/synapse/handlers/acme.py @@ -73,7 +73,9 @@ class AcmeHandler: "Listening for ACME requests on %s:%i", host, self.hs.config.acme_port ) try: - self.reactor.listenTCP(self.hs.config.acme_port, srv, interface=host) + self.reactor.listenTCP( + self.hs.config.acme_port, srv, backlog=50, interface=host + ) except twisted.internet.error.CannotListenError as e: check_bind_error(e, host, bind_addresses) diff --git a/synapse/http/client.py b/synapse/http/client.py index 72901e3f95..af34d583ad 100644 --- a/synapse/http/client.py +++ b/synapse/http/client.py @@ -63,6 +63,7 @@ from synapse.http import QuieterFileBodyProducer, RequestTimedOutError, redact_u from synapse.http.proxyagent import ProxyAgent from synapse.logging.context import make_deferred_yieldable from synapse.logging.opentracing import set_tag, start_active_span, tags +from synapse.types import ISynapseReactor from synapse.util import json_decoder from synapse.util.async_helpers import timeout_deferred @@ -199,7 +200,7 @@ class _IPBlacklistingResolver: return r -@implementer(IReactorPluggableNameResolver) +@implementer(ISynapseReactor) class BlacklistingReactorWrapper: """ A Reactor wrapper which will prevent DNS resolution to blacklisted IP @@ -324,7 +325,7 @@ class SimpleHttpClient: # filters out blacklisted IP addresses, to prevent DNS rebinding. self.reactor = BlacklistingReactorWrapper( hs.get_reactor(), self._ip_whitelist, self._ip_blacklist - ) + ) # type: ISynapseReactor else: self.reactor = hs.get_reactor() diff --git a/synapse/http/federation/matrix_federation_agent.py b/synapse/http/federation/matrix_federation_agent.py index b07aa59c08..5935a125fd 100644 --- a/synapse/http/federation/matrix_federation_agent.py +++ b/synapse/http/federation/matrix_federation_agent.py @@ -35,6 +35,7 @@ from synapse.http.client import BlacklistingAgentWrapper from synapse.http.federation.srv_resolver import Server, SrvResolver from synapse.http.federation.well_known_resolver import WellKnownResolver from synapse.logging.context import make_deferred_yieldable, run_in_background +from synapse.types import ISynapseReactor from synapse.util import Clock logger = logging.getLogger(__name__) @@ -68,7 +69,7 @@ class MatrixFederationAgent: def __init__( self, - reactor: IReactorCore, + reactor: ISynapseReactor, tls_client_options_factory: Optional[FederationPolicyForHTTPS], user_agent: bytes, ip_blacklist: IPSet, diff --git a/synapse/http/matrixfederationclient.py b/synapse/http/matrixfederationclient.py index 0f107714ea..da6866addf 100644 --- a/synapse/http/matrixfederationclient.py +++ b/synapse/http/matrixfederationclient.py @@ -59,7 +59,7 @@ from synapse.logging.opentracing import ( start_active_span, tags, ) -from synapse.types import JsonDict +from synapse.types import ISynapseReactor, JsonDict from synapse.util import json_decoder from synapse.util.async_helpers import timeout_deferred from synapse.util.metrics import Measure @@ -237,14 +237,14 @@ class MatrixFederationHttpClient: # addresses, to prevent DNS rebinding. self.reactor = BlacklistingReactorWrapper( hs.get_reactor(), None, hs.config.federation_ip_range_blacklist - ) + ) # type: ISynapseReactor user_agent = hs.version_string if hs.config.user_agent_suffix: user_agent = "%s %s" % (user_agent, hs.config.user_agent_suffix) user_agent = user_agent.encode("ascii") - self.agent = MatrixFederationAgent( + federation_agent = MatrixFederationAgent( self.reactor, tls_client_options_factory, user_agent, @@ -254,7 +254,7 @@ class MatrixFederationHttpClient: # Use a BlacklistingAgentWrapper to prevent circumventing the IP # blacklist via IP literals in server names self.agent = BlacklistingAgentWrapper( - self.agent, + federation_agent, ip_blacklist=hs.config.federation_ip_range_blacklist, ) diff --git a/synapse/replication/tcp/redis.py b/synapse/replication/tcp/redis.py index 0e6155cf53..7560706b4b 100644 --- a/synapse/replication/tcp/redis.py +++ b/synapse/replication/tcp/redis.py @@ -328,6 +328,6 @@ def lazyConnection( factory.continueTrying = reconnect reactor = hs.get_reactor() - reactor.connectTCP(host, port, factory, 30) + reactor.connectTCP(host, port, factory, timeout=30, bindAddress=None) return factory.handler diff --git a/synapse/server.py b/synapse/server.py index afd7cd72e7..369cc88026 100644 --- a/synapse/server.py +++ b/synapse/server.py @@ -36,7 +36,6 @@ from typing import ( cast, ) -import twisted.internet.base import twisted.internet.tcp from twisted.internet import defer from twisted.mail.smtp import sendmail @@ -130,7 +129,7 @@ from synapse.server_notices.worker_server_notices_sender import ( from synapse.state import StateHandler, StateResolutionHandler from synapse.storage import Databases, DataStore, Storage from synapse.streams.events import EventSources -from synapse.types import DomainSpecificString +from synapse.types import DomainSpecificString, ISynapseReactor from synapse.util import Clock from synapse.util.distributor import Distributor from synapse.util.ratelimitutils import FederationRateLimiter @@ -291,7 +290,7 @@ class HomeServer(metaclass=abc.ABCMeta): for i in self.REQUIRED_ON_BACKGROUND_TASK_STARTUP: getattr(self, "get_" + i + "_handler")() - def get_reactor(self) -> twisted.internet.base.ReactorBase: + def get_reactor(self) -> ISynapseReactor: """ Fetch the Twisted reactor in use by this HomeServer. """ diff --git a/synapse/types.py b/synapse/types.py index 721343f0b5..0216d213c7 100644 --- a/synapse/types.py +++ b/synapse/types.py @@ -35,6 +35,14 @@ from typing import ( import attr from signedjson.key import decode_verify_key_bytes from unpaddedbase64 import decode_base64 +from zope.interface import Interface + +from twisted.internet.interfaces import ( + IReactorCore, + IReactorPluggableNameResolver, + IReactorTCP, + IReactorTime, +) from synapse.api.errors import Codes, SynapseError from synapse.util.stringutils import parse_and_validate_server_name @@ -67,6 +75,14 @@ MutableStateMap = MutableMapping[StateKey, T] JsonDict = Dict[str, Any] +# Note that this seems to require inheriting *directly* from Interface in order +# for mypy-zope to realize it is an interface. +class ISynapseReactor( + IReactorTCP, IReactorPluggableNameResolver, IReactorTime, IReactorCore, Interface +): + """The interfaces necessary for Synapse to function.""" + + class Requester( namedtuple( "Requester", -- cgit 1.5.1 From 15c788e22d87de8f9780d2c8a7304fc0e7d506ab Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Mon, 8 Mar 2021 13:52:13 +0000 Subject: 1.29.0 --- CHANGES.md | 7 +++++-- debian/changelog | 7 +++++-- synapse/__init__.py | 2 +- 3 files changed, 11 insertions(+), 5 deletions(-) diff --git a/CHANGES.md b/CHANGES.md index 15d2894bc6..a30afe6572 100644 --- a/CHANGES.md +++ b/CHANGES.md @@ -1,9 +1,12 @@ -Synapse 1.29.0rc1 (2021-03-04) -============================== +Synapse 1.29.0 (2021-03-08) +=========================== Note that synapse now expects an `X-Forwarded-Proto` header when used with a reverse proxy. Please see [UPGRADE.rst](UPGRADE.rst#upgrading-to-v1290) for more details on this change. +Synapse 1.29.0rc1 (2021-03-04) +============================== + Features -------- diff --git a/debian/changelog b/debian/changelog index 04cd0d0d68..3feefd8a1c 100644 --- a/debian/changelog +++ b/debian/changelog @@ -1,9 +1,12 @@ -matrix-synapse-py3 (1.29.0) UNRELEASED; urgency=medium +matrix-synapse-py3 (1.29.0) stable; urgency=medium [ Jonathan de Jong ] * Remove the python -B flag (don't generate bytecode) in scripts and documentation. - -- Synapse Packaging team Fri, 26 Feb 2021 14:41:31 +0100 + [ Synapse Packaging team ] + * New synapse release 1.29.0. + + -- Synapse Packaging team Mon, 08 Mar 2021 13:51:50 +0000 matrix-synapse-py3 (1.28.0) stable; urgency=medium diff --git a/synapse/__init__.py b/synapse/__init__.py index 2c24d4ae03..56ca888862 100644 --- a/synapse/__init__.py +++ b/synapse/__init__.py @@ -48,7 +48,7 @@ try: except ImportError: pass -__version__ = "1.29.0rc1" +__version__ = "1.29.0" if bool(os.environ.get("SYNAPSE_TEST_PATCH_LOG_CONTEXTS", False)): # We import here so that we don't have to install a bunch of deps when -- cgit 1.5.1 From 4de1c3572818a68ded9e7054ca23dbfb531a6283 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Mon, 8 Mar 2021 13:59:17 +0000 Subject: Fixup changelog --- CHANGES.md | 3 +++ 1 file changed, 3 insertions(+) diff --git a/CHANGES.md b/CHANGES.md index a30afe6572..99e314c68d 100644 --- a/CHANGES.md +++ b/CHANGES.md @@ -4,6 +4,9 @@ Synapse 1.29.0 (2021-03-08) Note that synapse now expects an `X-Forwarded-Proto` header when used with a reverse proxy. Please see [UPGRADE.rst](UPGRADE.rst#upgrading-to-v1290) for more details on this change. +No significant changes. + + Synapse 1.29.0rc1 (2021-03-04) ============================== -- cgit 1.5.1 From cb7fc7523e5288cda7845961cf49d5b58d345210 Mon Sep 17 00:00:00 2001 From: Patrick Cloke Date: Mon, 8 Mar 2021 09:21:36 -0500 Subject: Add a basic test for purging rooms. (#9541) Unfortunately this doesn't test re-joining the room since that requires having another homeserver to query over federation, which isn't easily doable in unit tests. --- changelog.d/9541.misc | 1 + tests/storage/test_purge.py | 71 ++++++++++++++++++++++++++++----------------- 2 files changed, 46 insertions(+), 26 deletions(-) create mode 100644 changelog.d/9541.misc diff --git a/changelog.d/9541.misc b/changelog.d/9541.misc new file mode 100644 index 0000000000..a82bef3431 --- /dev/null +++ b/changelog.d/9541.misc @@ -0,0 +1 @@ +Add an additional test for purging a room. diff --git a/tests/storage/test_purge.py b/tests/storage/test_purge.py index a06ad2c03e..41af8c4847 100644 --- a/tests/storage/test_purge.py +++ b/tests/storage/test_purge.py @@ -13,9 +13,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -from twisted.internet import defer - -from synapse.api.errors import NotFoundError +from synapse.api.errors import NotFoundError, SynapseError from synapse.rest.client.v1 import room from tests.unittest import HomeserverTestCase @@ -33,9 +31,12 @@ class PurgeTests(HomeserverTestCase): def prepare(self, reactor, clock, hs): self.room_id = self.helper.create_room_as(self.user_id) - def test_purge(self): + self.store = hs.get_datastore() + self.storage = self.hs.get_storage() + + def test_purge_history(self): """ - Purging a room will delete everything before the topological point. + Purging a room history will delete everything before the topological point. """ # Send four messages to the room first = self.helper.send(self.room_id, body="test1") @@ -43,30 +44,27 @@ class PurgeTests(HomeserverTestCase): third = self.helper.send(self.room_id, body="test3") last = self.helper.send(self.room_id, body="test4") - store = self.hs.get_datastore() - storage = self.hs.get_storage() - # Get the topological token token = self.get_success( - store.get_topological_token_for_event(last["event_id"]) + self.store.get_topological_token_for_event(last["event_id"]) ) token_str = self.get_success(token.to_string(self.hs.get_datastore())) # Purge everything before this topological token self.get_success( - storage.purge_events.purge_history(self.room_id, token_str, True) + self.storage.purge_events.purge_history(self.room_id, token_str, True) ) # 1-3 should fail and last will succeed, meaning that 1-3 are deleted # and last is not. - self.get_failure(store.get_event(first["event_id"]), NotFoundError) - self.get_failure(store.get_event(second["event_id"]), NotFoundError) - self.get_failure(store.get_event(third["event_id"]), NotFoundError) - self.get_success(store.get_event(last["event_id"])) + self.get_failure(self.store.get_event(first["event_id"]), NotFoundError) + self.get_failure(self.store.get_event(second["event_id"]), NotFoundError) + self.get_failure(self.store.get_event(third["event_id"]), NotFoundError) + self.get_success(self.store.get_event(last["event_id"])) - def test_purge_wont_delete_extrems(self): + def test_purge_history_wont_delete_extrems(self): """ - Purging a room will delete everything before the topological point. + Purging a room history will delete everything before the topological point. """ # Send four messages to the room first = self.helper.send(self.room_id, body="test1") @@ -74,22 +72,43 @@ class PurgeTests(HomeserverTestCase): third = self.helper.send(self.room_id, body="test3") last = self.helper.send(self.room_id, body="test4") - storage = self.hs.get_datastore() - # Set the topological token higher than it should be token = self.get_success( - storage.get_topological_token_for_event(last["event_id"]) + self.store.get_topological_token_for_event(last["event_id"]) ) event = "t{}-{}".format(token.topological + 1, token.stream + 1) # Purge everything before this topological token - purge = defer.ensureDeferred(storage.purge_history(self.room_id, event, True)) - self.pump() - f = self.failureResultOf(purge) + f = self.get_failure( + self.storage.purge_events.purge_history(self.room_id, event, True), + SynapseError, + ) self.assertIn("greater than forward", f.value.args[0]) # Try and get the events - self.get_success(storage.get_event(first["event_id"])) - self.get_success(storage.get_event(second["event_id"])) - self.get_success(storage.get_event(third["event_id"])) - self.get_success(storage.get_event(last["event_id"])) + self.get_success(self.store.get_event(first["event_id"])) + self.get_success(self.store.get_event(second["event_id"])) + self.get_success(self.store.get_event(third["event_id"])) + self.get_success(self.store.get_event(last["event_id"])) + + def test_purge_room(self): + """ + Purging a room will delete everything about it. + """ + # Send four messages to the room + first = self.helper.send(self.room_id, body="test1") + + # Get the current room state. + state_handler = self.hs.get_state_handler() + create_event = self.get_success( + state_handler.get_current_state(self.room_id, "m.room.create", "") + ) + self.assertIsNotNone(create_event) + + # Purge everything before this topological token + self.get_success(self.storage.purge_events.purge_room(self.room_id)) + + # The events aren't found. + self.store._invalidate_get_event_cache(create_event.event_id) + self.get_failure(self.store.get_event(create_event.event_id), NotFoundError) + self.get_failure(self.store.get_event(first["event_id"]), NotFoundError) -- cgit 1.5.1 From 7076eee4b9a1a3fccb660c2435c155f3d9a67435 Mon Sep 17 00:00:00 2001 From: Dirk Klimpel <5740567+dklimpel@users.noreply.github.com> Date: Mon, 8 Mar 2021 16:34:38 +0100 Subject: Add type hints to purge room and server notice admin API. (#9520) --- changelog.d/9520.misc | 1 + synapse/rest/admin/purge_room_servlet.py | 15 +++++++++------ synapse/rest/admin/server_notice_servlet.py | 23 ++++++++++++++--------- 3 files changed, 24 insertions(+), 15 deletions(-) create mode 100644 changelog.d/9520.misc diff --git a/changelog.d/9520.misc b/changelog.d/9520.misc new file mode 100644 index 0000000000..825ba5bbc1 --- /dev/null +++ b/changelog.d/9520.misc @@ -0,0 +1 @@ +Add type hints to purge room and server notice admin API. \ No newline at end of file diff --git a/synapse/rest/admin/purge_room_servlet.py b/synapse/rest/admin/purge_room_servlet.py index 8b7bb6d44e..49966ee3e0 100644 --- a/synapse/rest/admin/purge_room_servlet.py +++ b/synapse/rest/admin/purge_room_servlet.py @@ -12,13 +12,20 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. +from typing import TYPE_CHECKING, Tuple + from synapse.http.servlet import ( RestServlet, assert_params_in_dict, parse_json_object_from_request, ) +from synapse.http.site import SynapseRequest from synapse.rest.admin import assert_requester_is_admin from synapse.rest.admin._base import admin_patterns +from synapse.types import JsonDict + +if TYPE_CHECKING: + from synapse.server import HomeServer class PurgeRoomServlet(RestServlet): @@ -36,16 +43,12 @@ class PurgeRoomServlet(RestServlet): PATTERNS = admin_patterns("/purge_room$") - def __init__(self, hs): - """ - Args: - hs (synapse.server.HomeServer): server - """ + def __init__(self, hs: "HomeServer"): self.hs = hs self.auth = hs.get_auth() self.pagination_handler = hs.get_pagination_handler() - async def on_POST(self, request): + async def on_POST(self, request: SynapseRequest) -> Tuple[int, JsonDict]: await assert_requester_is_admin(self.auth, request) body = parse_json_object_from_request(request) diff --git a/synapse/rest/admin/server_notice_servlet.py b/synapse/rest/admin/server_notice_servlet.py index 375d055445..f495666f4a 100644 --- a/synapse/rest/admin/server_notice_servlet.py +++ b/synapse/rest/admin/server_notice_servlet.py @@ -12,17 +12,24 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. +from typing import TYPE_CHECKING, Optional, Tuple + from synapse.api.constants import EventTypes from synapse.api.errors import SynapseError +from synapse.http.server import HttpServer from synapse.http.servlet import ( RestServlet, assert_params_in_dict, parse_json_object_from_request, ) +from synapse.http.site import SynapseRequest from synapse.rest.admin import assert_requester_is_admin from synapse.rest.admin._base import admin_patterns from synapse.rest.client.transactions import HttpTransactionCache -from synapse.types import UserID +from synapse.types import JsonDict, UserID + +if TYPE_CHECKING: + from synapse.server import HomeServer class SendServerNoticeServlet(RestServlet): @@ -44,17 +51,13 @@ class SendServerNoticeServlet(RestServlet): } """ - def __init__(self, hs): - """ - Args: - hs (synapse.server.HomeServer): server - """ + def __init__(self, hs: "HomeServer"): self.hs = hs self.auth = hs.get_auth() self.txns = HttpTransactionCache(hs) self.snm = hs.get_server_notices_manager() - def register(self, json_resource): + def register(self, json_resource: HttpServer): PATTERN = "/send_server_notice" json_resource.register_paths( "POST", admin_patterns(PATTERN + "$"), self.on_POST, self.__class__.__name__ @@ -66,7 +69,9 @@ class SendServerNoticeServlet(RestServlet): self.__class__.__name__, ) - async def on_POST(self, request, txn_id=None): + async def on_POST( + self, request: SynapseRequest, txn_id: Optional[str] = None + ) -> Tuple[int, JsonDict]: await assert_requester_is_admin(self.auth, request) body = parse_json_object_from_request(request) assert_params_in_dict(body, ("user_id", "content")) @@ -90,7 +95,7 @@ class SendServerNoticeServlet(RestServlet): return 200, {"event_id": event.event_id} - def on_PUT(self, request, txn_id): + def on_PUT(self, request: SynapseRequest, txn_id: str) -> Tuple[int, JsonDict]: return self.txns.fetch_or_execute_request( request, self.on_POST, request, txn_id ) -- cgit 1.5.1 From b2c4d3d72165e98b6d9c1f797d5f7013280f515d Mon Sep 17 00:00:00 2001 From: Will Hunt Date: Mon, 8 Mar 2021 16:35:04 +0000 Subject: Warn that /register will soon require a type when called with an access token (#9559) This notice is giving a heads up to the planned spec compliance fix https://github.com/matrix-org/synapse/pull/9548. --- CHANGES.md | 6 ++++++ changelog.d/9559.removal | 1 + 2 files changed, 7 insertions(+) create mode 100644 changelog.d/9559.removal diff --git a/CHANGES.md b/CHANGES.md index 99e314c68d..3f0c5685ea 100644 --- a/CHANGES.md +++ b/CHANGES.md @@ -1,3 +1,9 @@ +Removal warning +--------------- + +Note that this release deprecates the ability for appservices to call `POST /_matrix/client/r0/register` without the body parameter `type`. Appservice developers should use a `type` value of `m.login.application_service` as per the spec. In future releases, calling this endpoint with an access token but +without a valid type will fail. + Synapse 1.29.0 (2021-03-08) =========================== diff --git a/changelog.d/9559.removal b/changelog.d/9559.removal new file mode 100644 index 0000000000..f97bf56dc0 --- /dev/null +++ b/changelog.d/9559.removal @@ -0,0 +1 @@ +Registering an Application Service user without using the `m.login.application_service` login type will be unsupported in an upcoming Synapse release. -- cgit 1.5.1 From d6196efafcc312472464c882ab630bc3fbf7bd37 Mon Sep 17 00:00:00 2001 From: Jonathan de Jong Date: Mon, 8 Mar 2021 20:00:07 +0100 Subject: Add ResponseCache tests. (#9458) --- changelog.d/9458.misc | 1 + synapse/appservice/api.py | 2 +- synapse/federation/federation_server.py | 13 ++-- synapse/handlers/initial_sync.py | 2 +- synapse/handlers/room.py | 2 +- synapse/handlers/room_list.py | 4 +- synapse/handlers/sync.py | 2 +- synapse/replication/http/_base.py | 9 ++- synapse/util/caches/response_cache.py | 10 +-- tests/util/caches/test_responsecache.py | 131 ++++++++++++++++++++++++++++++++ 10 files changed, 156 insertions(+), 20 deletions(-) create mode 100644 changelog.d/9458.misc create mode 100644 tests/util/caches/test_responsecache.py diff --git a/changelog.d/9458.misc b/changelog.d/9458.misc new file mode 100644 index 0000000000..8ceeed1352 --- /dev/null +++ b/changelog.d/9458.misc @@ -0,0 +1 @@ +Add tests to ResponseCache. \ No newline at end of file diff --git a/synapse/appservice/api.py b/synapse/appservice/api.py index 93c2aabcca..9d3bbe3b8b 100644 --- a/synapse/appservice/api.py +++ b/synapse/appservice/api.py @@ -90,7 +90,7 @@ class ApplicationServiceApi(SimpleHttpClient): self.clock = hs.get_clock() self.protocol_meta_cache = ResponseCache( - hs, "as_protocol_meta", timeout_ms=HOUR_IN_MS + hs.get_clock(), "as_protocol_meta", timeout_ms=HOUR_IN_MS ) # type: ResponseCache[Tuple[str, str]] async def query_user(self, service, user_id): diff --git a/synapse/federation/federation_server.py b/synapse/federation/federation_server.py index 362895bf42..7657697bfa 100644 --- a/synapse/federation/federation_server.py +++ b/synapse/federation/federation_server.py @@ -22,6 +22,7 @@ from typing import ( Awaitable, Callable, Dict, + Iterable, List, Optional, Tuple, @@ -98,7 +99,7 @@ last_pdu_ts_metric = Gauge( class FederationServer(FederationBase): - def __init__(self, hs): + def __init__(self, hs: "HomeServer"): super().__init__(hs) self.auth = hs.get_auth() @@ -118,7 +119,7 @@ class FederationServer(FederationBase): # We cache results for transaction with the same ID self._transaction_resp_cache = ResponseCache( - hs, "fed_txn_handler", timeout_ms=30000 + hs.get_clock(), "fed_txn_handler", timeout_ms=30000 ) # type: ResponseCache[Tuple[str, str]] self.transaction_actions = TransactionActions(self.store) @@ -128,10 +129,10 @@ class FederationServer(FederationBase): # We cache responses to state queries, as they take a while and often # come in waves. self._state_resp_cache = ResponseCache( - hs, "state_resp", timeout_ms=30000 + hs.get_clock(), "state_resp", timeout_ms=30000 ) # type: ResponseCache[Tuple[str, str]] self._state_ids_resp_cache = ResponseCache( - hs, "state_ids_resp", timeout_ms=30000 + hs.get_clock(), "state_ids_resp", timeout_ms=30000 ) # type: ResponseCache[Tuple[str, str]] self._federation_metrics_domains = ( @@ -453,7 +454,9 @@ class FederationServer(FederationBase): self, room_id: str, event_id: str ) -> Dict[str, list]: if event_id: - pdus = await self.handler.get_state_for_pdu(room_id, event_id) + pdus = await self.handler.get_state_for_pdu( + room_id, event_id + ) # type: Iterable[EventBase] else: pdus = (await self.state.get_current_state(room_id)).values() diff --git a/synapse/handlers/initial_sync.py b/synapse/handlers/initial_sync.py index 71a5076672..13f8152283 100644 --- a/synapse/handlers/initial_sync.py +++ b/synapse/handlers/initial_sync.py @@ -48,7 +48,7 @@ class InitialSyncHandler(BaseHandler): self.clock = hs.get_clock() self.validator = EventValidator() self.snapshot_cache = ResponseCache( - hs, "initial_sync_cache" + hs.get_clock(), "initial_sync_cache" ) # type: ResponseCache[Tuple[str, Optional[StreamToken], Optional[StreamToken], str, Optional[int], bool, bool]] self._event_serializer = hs.get_event_client_serializer() self.storage = hs.get_storage() diff --git a/synapse/handlers/room.py b/synapse/handlers/room.py index a488df10d6..4b3d0d72e3 100644 --- a/synapse/handlers/room.py +++ b/synapse/handlers/room.py @@ -121,7 +121,7 @@ class RoomCreationHandler(BaseHandler): # succession, only process the first attempt and return its result to # subsequent requests self._upgrade_response_cache = ResponseCache( - hs, "room_upgrade", timeout_ms=FIVE_MINUTES_IN_MS + hs.get_clock(), "room_upgrade", timeout_ms=FIVE_MINUTES_IN_MS ) # type: ResponseCache[Tuple[str, str]] self._server_notices_mxid = hs.config.server_notices_mxid diff --git a/synapse/handlers/room_list.py b/synapse/handlers/room_list.py index 14f14db449..8bfc46c654 100644 --- a/synapse/handlers/room_list.py +++ b/synapse/handlers/room_list.py @@ -44,10 +44,10 @@ class RoomListHandler(BaseHandler): super().__init__(hs) self.enable_room_list_search = hs.config.enable_room_list_search self.response_cache = ResponseCache( - hs, "room_list" + hs.get_clock(), "room_list" ) # type: ResponseCache[Tuple[Optional[int], Optional[str], ThirdPartyInstanceID]] self.remote_response_cache = ResponseCache( - hs, "remote_room_list", timeout_ms=30 * 1000 + hs.get_clock(), "remote_room_list", timeout_ms=30 * 1000 ) # type: ResponseCache[Tuple[str, Optional[int], Optional[str], bool, Optional[str]]] async def get_local_public_room_list( diff --git a/synapse/handlers/sync.py b/synapse/handlers/sync.py index 4e8ed7b33f..f50257cd57 100644 --- a/synapse/handlers/sync.py +++ b/synapse/handlers/sync.py @@ -244,7 +244,7 @@ class SyncHandler: self.event_sources = hs.get_event_sources() self.clock = hs.get_clock() self.response_cache = ResponseCache( - hs, "sync" + hs.get_clock(), "sync" ) # type: ResponseCache[Tuple[Any, ...]] self.state = hs.get_state_handler() self.auth = hs.get_auth() diff --git a/synapse/replication/http/_base.py b/synapse/replication/http/_base.py index 8a3f113e76..b7aa0c280f 100644 --- a/synapse/replication/http/_base.py +++ b/synapse/replication/http/_base.py @@ -18,7 +18,7 @@ import logging import re import urllib from inspect import signature -from typing import Dict, List, Tuple +from typing import TYPE_CHECKING, Dict, List, Tuple from prometheus_client import Counter, Gauge @@ -28,6 +28,9 @@ from synapse.logging.opentracing import inject_active_span_byte_dict, trace from synapse.util.caches.response_cache import ResponseCache from synapse.util.stringutils import random_string +if TYPE_CHECKING: + from synapse.server import HomeServer + logger = logging.getLogger(__name__) _pending_outgoing_requests = Gauge( @@ -88,10 +91,10 @@ class ReplicationEndpoint(metaclass=abc.ABCMeta): CACHE = True RETRY_ON_TIMEOUT = True - def __init__(self, hs): + def __init__(self, hs: "HomeServer"): if self.CACHE: self.response_cache = ResponseCache( - hs, "repl." + self.NAME, timeout_ms=30 * 60 * 1000 + hs.get_clock(), "repl." + self.NAME, timeout_ms=30 * 60 * 1000 ) # type: ResponseCache[str] # We reserve `instance_name` as a parameter to sending requests, so we diff --git a/synapse/util/caches/response_cache.py b/synapse/util/caches/response_cache.py index 32228f42ee..46ea8e0964 100644 --- a/synapse/util/caches/response_cache.py +++ b/synapse/util/caches/response_cache.py @@ -13,17 +13,15 @@ # See the License for the specific language governing permissions and # limitations under the License. import logging -from typing import TYPE_CHECKING, Any, Callable, Dict, Generic, Optional, TypeVar +from typing import Any, Callable, Dict, Generic, Optional, TypeVar from twisted.internet import defer from synapse.logging.context import make_deferred_yieldable, run_in_background +from synapse.util import Clock from synapse.util.async_helpers import ObservableDeferred from synapse.util.caches import register_cache -if TYPE_CHECKING: - from synapse.app.homeserver import HomeServer - logger = logging.getLogger(__name__) T = TypeVar("T") @@ -37,11 +35,11 @@ class ResponseCache(Generic[T]): used rather than trying to compute a new response. """ - def __init__(self, hs: "HomeServer", name: str, timeout_ms: float = 0): + def __init__(self, clock: Clock, name: str, timeout_ms: float = 0): # Requests that haven't finished yet. self.pending_result_cache = {} # type: Dict[T, ObservableDeferred] - self.clock = hs.get_clock() + self.clock = clock self.timeout_sec = timeout_ms / 1000.0 self._name = name diff --git a/tests/util/caches/test_responsecache.py b/tests/util/caches/test_responsecache.py new file mode 100644 index 0000000000..f9a187b8de --- /dev/null +++ b/tests/util/caches/test_responsecache.py @@ -0,0 +1,131 @@ +# Copyright 2021 The Matrix.org Foundation C.I.C. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from synapse.util.caches.response_cache import ResponseCache + +from tests.server import get_clock +from tests.unittest import TestCase + + +class DeferredCacheTestCase(TestCase): + """ + A TestCase class for ResponseCache. + + The test-case function naming has some logic to it in it's parts, here's some notes about it: + wait: Denotes tests that have an element of "waiting" before its wrapped result becomes available + (Generally these just use .delayed_return instead of .instant_return in it's wrapped call.) + expire: Denotes tests that test expiry after assured existence. + (These have cache with a short timeout_ms=, shorter than will be tested through advancing the clock) + """ + + def setUp(self): + self.reactor, self.clock = get_clock() + + def with_cache(self, name: str, ms: int = 0) -> ResponseCache: + return ResponseCache(self.clock, name, timeout_ms=ms) + + @staticmethod + async def instant_return(o: str) -> str: + return o + + async def delayed_return(self, o: str) -> str: + await self.clock.sleep(1) + return o + + def test_cache_hit(self): + cache = self.with_cache("keeping_cache", ms=9001) + + expected_result = "howdy" + + wrap_d = cache.wrap(0, self.instant_return, expected_result) + + self.assertEqual( + expected_result, + self.successResultOf(wrap_d), + "initial wrap result should be the same", + ) + self.assertEqual( + expected_result, + self.successResultOf(cache.get(0)), + "cache should have the result", + ) + + def test_cache_miss(self): + cache = self.with_cache("trashing_cache", ms=0) + + expected_result = "howdy" + + wrap_d = cache.wrap(0, self.instant_return, expected_result) + + self.assertEqual( + expected_result, + self.successResultOf(wrap_d), + "initial wrap result should be the same", + ) + self.assertIsNone(cache.get(0), "cache should not have the result now") + + def test_cache_expire(self): + cache = self.with_cache("short_cache", ms=1000) + + expected_result = "howdy" + + wrap_d = cache.wrap(0, self.instant_return, expected_result) + + self.assertEqual(expected_result, self.successResultOf(wrap_d)) + self.assertEqual( + expected_result, + self.successResultOf(cache.get(0)), + "cache should still have the result", + ) + + # cache eviction timer is handled + self.reactor.pump((2,)) + + self.assertIsNone(cache.get(0), "cache should not have the result now") + + def test_cache_wait_hit(self): + cache = self.with_cache("neutral_cache") + + expected_result = "howdy" + + wrap_d = cache.wrap(0, self.delayed_return, expected_result) + self.assertNoResult(wrap_d) + + # function wakes up, returns result + self.reactor.pump((2,)) + + self.assertEqual(expected_result, self.successResultOf(wrap_d)) + + def test_cache_wait_expire(self): + cache = self.with_cache("medium_cache", ms=3000) + + expected_result = "howdy" + + wrap_d = cache.wrap(0, self.delayed_return, expected_result) + self.assertNoResult(wrap_d) + + # stop at 1 second to callback cache eviction callLater at that time, then another to set time at 2 + self.reactor.pump((1, 1)) + + self.assertEqual(expected_result, self.successResultOf(wrap_d)) + self.assertEqual( + expected_result, + self.successResultOf(cache.get(0)), + "cache should still have the result", + ) + + # (1 + 1 + 2) > 3.0, cache eviction timer is handled + self.reactor.pump((2,)) + + self.assertIsNone(cache.get(0), "cache should not have the result now") -- cgit 1.5.1 From 0764d0c6e575793ca506cf021aff3c4b9e0a5972 Mon Sep 17 00:00:00 2001 From: Matthew Hodgson Date: Mon, 8 Mar 2021 21:51:59 +0000 Subject: quick config comment tweak to clarify allow_profile_lookup_over_federation --- synapse/config/server.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/synapse/config/server.py b/synapse/config/server.py index 2afca36e7d..5f8910b6e1 100644 --- a/synapse/config/server.py +++ b/synapse/config/server.py @@ -841,8 +841,7 @@ class ServerConfig(Config): # Whether to require authentication to retrieve profile data (avatars, # display names) of other users through the client API. Defaults to # 'false'. Note that profile data is also available via the federation - # API, so this setting is of limited value if federation is enabled on - # the server. + # API, unless allow_profile_lookup_over_federation is set to false. # #require_auth_for_profile_requests: true -- cgit 1.5.1 From 9898470e7d7b1b2f3c0dfa7d07832ec4662221da Mon Sep 17 00:00:00 2001 From: Jonathan de Jong Date: Tue, 9 Mar 2021 12:09:31 +0100 Subject: Add logging to ObservableDeferred callbacks (#9523) --- changelog.d/9523.misc | 1 + synapse/util/async_helpers.py | 26 ++++++++++++++++++-------- 2 files changed, 19 insertions(+), 8 deletions(-) create mode 100644 changelog.d/9523.misc diff --git a/changelog.d/9523.misc b/changelog.d/9523.misc new file mode 100644 index 0000000000..f03e939efb --- /dev/null +++ b/changelog.d/9523.misc @@ -0,0 +1 @@ +Add extra logging to ObservableDeferred when callbacks throw exceptions. \ No newline at end of file diff --git a/synapse/util/async_helpers.py b/synapse/util/async_helpers.py index 719e35b78d..f33c115844 100644 --- a/synapse/util/async_helpers.py +++ b/synapse/util/async_helpers.py @@ -76,11 +76,16 @@ class ObservableDeferred: def callback(r): object.__setattr__(self, "_result", (True, r)) while self._observers: + observer = self._observers.pop() try: - # TODO: Handle errors here. - self._observers.pop().callback(r) - except Exception: - pass + observer.callback(r) + except Exception as e: + logger.exception( + "%r threw an exception on .callback(%r), ignoring...", + observer, + r, + exc_info=e, + ) return r def errback(f): @@ -90,11 +95,16 @@ class ObservableDeferred: # traces when we `await` on one of the observer deferreds. f.value.__failure__ = f + observer = self._observers.pop() try: - # TODO: Handle errors here. - self._observers.pop().errback(f) - except Exception: - pass + observer.errback(f) + except Exception as e: + logger.exception( + "%r threw an exception on .errback(%r), ignoring...", + observer, + f, + exc_info=e, + ) if consumeErrors: return None -- cgit 1.5.1 From 22db45bd4d567832d26f10ee2e104f1e886576b3 Mon Sep 17 00:00:00 2001 From: Andrew Morgan <1342360+anoadragon453@users.noreply.github.com> Date: Tue, 9 Mar 2021 11:11:42 +0000 Subject: Prevent the config-lint script erroring out on any sample_config changes (#9562) I noticed that I'd occasionally have `scripts-dev/lint.sh` fail when messing about with config options in my PR. The script calls `scripts-dev/config-lint.sh`, which attempts some validation on the sample config. It does this by using `sed` to edit the sample_config, and then seeing if the file changed using `git diff`. The problem is: if you changed the sample_config as part of your commit, this script will error regardless. This PR attempts to change the check so that existing, unstaged changes to the sample_config will not cause the script to report an invalid file. --- changelog.d/9562.misc | 1 + scripts-dev/config-lint.sh | 9 +++++++-- 2 files changed, 8 insertions(+), 2 deletions(-) create mode 100644 changelog.d/9562.misc diff --git a/changelog.d/9562.misc b/changelog.d/9562.misc new file mode 100644 index 0000000000..2f0133bff0 --- /dev/null +++ b/changelog.d/9562.misc @@ -0,0 +1 @@ +Fix spurious errors reported by the `config-lint.sh` script. \ No newline at end of file diff --git a/scripts-dev/config-lint.sh b/scripts-dev/config-lint.sh index 189ca66535..9132160463 100755 --- a/scripts-dev/config-lint.sh +++ b/scripts-dev/config-lint.sh @@ -2,9 +2,14 @@ # Find linting errors in Synapse's default config file. # Exits with 0 if there are no problems, or another code otherwise. +# cd to the root of the repository +cd `dirname $0`/.. + +# Restore backup of sample config upon script exit +trap "mv docs/sample_config.yaml.bak docs/sample_config.yaml" EXIT + # Fix non-lowercase true/false values sed -i.bak -E "s/: +True/: true/g; s/: +False/: false/g;" docs/sample_config.yaml -rm docs/sample_config.yaml.bak # Check if anything changed -git diff --exit-code docs/sample_config.yaml +diff docs/sample_config.yaml docs/sample_config.yaml.bak -- cgit 1.5.1 From 576c91c7c14ba3960b13213f94108189e9f361dc Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Tue, 9 Mar 2021 11:40:33 +0000 Subject: Fixup sample config After 0764d0c6e575793ca506cf021aff3c4b9e0a5972 --- docs/sample_config.yaml | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/docs/sample_config.yaml b/docs/sample_config.yaml index 4dbef41b7e..c95a4f5970 100644 --- a/docs/sample_config.yaml +++ b/docs/sample_config.yaml @@ -89,8 +89,7 @@ pid_file: DATADIR/homeserver.pid # Whether to require authentication to retrieve profile data (avatars, # display names) of other users through the client API. Defaults to # 'false'. Note that profile data is also available via the federation -# API, so this setting is of limited value if federation is enabled on -# the server. +# API, unless allow_profile_lookup_over_federation is set to false. # #require_auth_for_profile_requests: true -- cgit 1.5.1 From 3ce650057d3dcbca66ac9fb672594580f29c6e37 Mon Sep 17 00:00:00 2001 From: Patrick Cloke Date: Tue, 9 Mar 2021 07:34:55 -0500 Subject: Add a list of hashes to ignore during git blame. (#9560) The hashes are from commits due to auto-formatting, e.g. running black. git can be configured to use this automatically by running the following: git config blame.ignoreRevsFile .git-blame-ignore-revs --- .git-blame-ignore-revs | 8 ++++++++ changelog.d/9560.misc | 1 + setup.cfg | 1 + 3 files changed, 10 insertions(+) create mode 100644 .git-blame-ignore-revs create mode 100644 changelog.d/9560.misc diff --git a/.git-blame-ignore-revs b/.git-blame-ignore-revs new file mode 100644 index 0000000000..83ddd568c2 --- /dev/null +++ b/.git-blame-ignore-revs @@ -0,0 +1,8 @@ +# Black reformatting (#5482). +32e7c9e7f20b57dd081023ac42d6931a8da9b3a3 + +# Target Python 3.5 with black (#8664). +aff1eb7c671b0a3813407321d2702ec46c71fa56 + +# Update black to 20.8b1 (#9381). +0a00b7ff14890987f09112a2ae696c61001e6cf1 diff --git a/changelog.d/9560.misc b/changelog.d/9560.misc new file mode 100644 index 0000000000..57a698f846 --- /dev/null +++ b/changelog.d/9560.misc @@ -0,0 +1 @@ +Add a `.git-blame-ignore-revs` file with the hashes of auto-formatting. diff --git a/setup.cfg b/setup.cfg index f46e43fad0..5e301c2cd7 100644 --- a/setup.cfg +++ b/setup.cfg @@ -3,6 +3,7 @@ test_suite = tests [check-manifest] ignore = + .git-blame-ignore-revs contrib contrib/* docs/* -- cgit 1.5.1 From 075c16b4103e7368e304496f3016d3b826b3ee92 Mon Sep 17 00:00:00 2001 From: Patrick Cloke Date: Tue, 9 Mar 2021 07:37:09 -0500 Subject: Handle image transparency better when thumbnailing. (#9473) Properly uses RGBA mode for 1- and 8-bit images with transparency (instead of RBG mode). --- changelog.d/9473.bugfix | 1 + synapse/rest/media/v1/thumbnailer.py | 11 ++++++++--- tests/rest/media/v1/test_media_storage.py | 29 +++++++++++++++++++++-------- 3 files changed, 30 insertions(+), 11 deletions(-) create mode 100644 changelog.d/9473.bugfix diff --git a/changelog.d/9473.bugfix b/changelog.d/9473.bugfix new file mode 100644 index 0000000000..71fb487cf2 --- /dev/null +++ b/changelog.d/9473.bugfix @@ -0,0 +1 @@ +Fix long-standing bug when generating thumbnails for some images with transparency: `TypeError: cannot unpack non-iterable int object`. diff --git a/synapse/rest/media/v1/thumbnailer.py b/synapse/rest/media/v1/thumbnailer.py index 07903e4017..988f52c78f 100644 --- a/synapse/rest/media/v1/thumbnailer.py +++ b/synapse/rest/media/v1/thumbnailer.py @@ -96,9 +96,14 @@ class Thumbnailer: def _resize(self, width: int, height: int) -> Image: # 1-bit or 8-bit color palette images need converting to RGB # otherwise they will be scaled using nearest neighbour which - # looks awful - if self.image.mode in ["1", "P"]: - self.image = self.image.convert("RGB") + # looks awful. + # + # If the image has transparency, use RGBA instead. + if self.image.mode in ["1", "L", "P"]: + mode = "RGB" + if self.image.info.get("transparency", None) is not None: + mode = "RGBA" + self.image = self.image.convert(mode) return self.image.resize((width, height), Image.ANTIALIAS) def scale(self, width: int, height: int, output_type: str) -> BytesIO: diff --git a/tests/rest/media/v1/test_media_storage.py b/tests/rest/media/v1/test_media_storage.py index 36d1e6bc4a..9f77125fd4 100644 --- a/tests/rest/media/v1/test_media_storage.py +++ b/tests/rest/media/v1/test_media_storage.py @@ -105,7 +105,7 @@ class MediaStorageTests(unittest.HomeserverTestCase): self.assertEqual(test_body, body) -@attr.s +@attr.s(slots=True, frozen=True) class _TestImage: """An image for testing thumbnailing with the expected results @@ -117,13 +117,15 @@ class _TestImage: test should just check for success. expected_scaled: The expected bytes from scaled thumbnailing, or None if test should just check for a valid image returned. + expected_found: True if the file should exist on the server, or False if + a 404 is expected. """ data = attr.ib(type=bytes) content_type = attr.ib(type=bytes) extension = attr.ib(type=bytes) - expected_cropped = attr.ib(type=Optional[bytes]) - expected_scaled = attr.ib(type=Optional[bytes]) + expected_cropped = attr.ib(type=Optional[bytes], default=None) + expected_scaled = attr.ib(type=Optional[bytes], default=None) expected_found = attr.ib(default=True, type=bool) @@ -153,6 +155,21 @@ class _TestImage: ), ), ), + # small png with transparency. + ( + _TestImage( + unhexlify( + b"89504e470d0a1a0a0000000d49484452000000010000000101000" + b"00000376ef9240000000274524e5300010194fdae0000000a4944" + b"4154789c636800000082008177cd72b60000000049454e44ae426" + b"082" + ), + b"image/png", + b".png", + # Note that we don't check the output since it varies across + # different versions of Pillow. + ), + ), # small lossless webp ( _TestImage( @@ -162,8 +179,6 @@ class _TestImage: ), b"image/webp", b".webp", - None, - None, ), ), # an empty file @@ -172,9 +187,7 @@ class _TestImage: b"", b"image/gif", b".gif", - None, - None, - False, + expected_found=False, ), ), ], -- cgit 1.5.1 From 7fdc6cefb3017f3c4bbe1d824e1de249ac29d219 Mon Sep 17 00:00:00 2001 From: Patrick Cloke Date: Tue, 9 Mar 2021 07:41:32 -0500 Subject: Fix additional type hints. (#9543) Type hint fixes due to Twisted 21.2.0 adding type hints. --- changelog.d/9543.misc | 1 + synapse/config/logger.py | 5 ++++- synapse/federation/federation_server.py | 2 +- synapse/handlers/pagination.py | 2 +- synapse/http/federation/well_known_resolver.py | 3 ++- synapse/logging/context.py | 6 ++++-- tests/replication/_base.py | 27 ++++++++++++++++---------- tests/server.py | 2 +- tests/test_utils/logging_setup.py | 2 +- 9 files changed, 32 insertions(+), 18 deletions(-) create mode 100644 changelog.d/9543.misc diff --git a/changelog.d/9543.misc b/changelog.d/9543.misc new file mode 100644 index 0000000000..14c7b78dd9 --- /dev/null +++ b/changelog.d/9543.misc @@ -0,0 +1 @@ +Fix incorrect type hints. diff --git a/synapse/config/logger.py b/synapse/config/logger.py index e56cf846f5..999aecce5c 100644 --- a/synapse/config/logger.py +++ b/synapse/config/logger.py @@ -21,8 +21,10 @@ import threading from string import Template import yaml +from zope.interface import implementer from twisted.logger import ( + ILogObserver, LogBeginner, STDLibLogObserver, eventAsText, @@ -227,7 +229,8 @@ def _setup_stdlib_logging(config, log_config_path, logBeginner: LogBeginner) -> threadlocal = threading.local() - def _log(event): + @implementer(ILogObserver) + def _log(event: dict) -> None: if "log_text" in event: if event["log_text"].startswith("DNSDatagramProtocol starting on "): return diff --git a/synapse/federation/federation_server.py b/synapse/federation/federation_server.py index 7657697bfa..ffc735ba25 100644 --- a/synapse/federation/federation_server.py +++ b/synapse/federation/federation_server.py @@ -361,7 +361,7 @@ class FederationServer(FederationBase): logger.error( "Failed to handle PDU %s", event_id, - exc_info=(f.type, f.value, f.getTracebackObject()), + exc_info=(f.type, f.value, f.getTracebackObject()), # type: ignore ) await concurrently_execute( diff --git a/synapse/handlers/pagination.py b/synapse/handlers/pagination.py index 059064a4eb..66dc886c81 100644 --- a/synapse/handlers/pagination.py +++ b/synapse/handlers/pagination.py @@ -285,7 +285,7 @@ class PaginationHandler: except Exception: f = Failure() logger.error( - "[purge] failed", exc_info=(f.type, f.value, f.getTracebackObject()) + "[purge] failed", exc_info=(f.type, f.value, f.getTracebackObject()) # type: ignore ) self._purges_by_id[purge_id].status = PurgeStatus.STATUS_FAILED finally: diff --git a/synapse/http/federation/well_known_resolver.py b/synapse/http/federation/well_known_resolver.py index 4def7d7633..ecd63e6596 100644 --- a/synapse/http/federation/well_known_resolver.py +++ b/synapse/http/federation/well_known_resolver.py @@ -322,7 +322,8 @@ def _cache_period_from_headers( def _parse_cache_control(headers: Headers) -> Dict[bytes, Optional[bytes]]: cache_controls = {} - for hdr in headers.getRawHeaders(b"cache-control", []): + cache_control_headers = headers.getRawHeaders(b"cache-control") or [] + for hdr in cache_control_headers: for directive in hdr.split(b","): splits = [x.strip() for x in directive.split(b"=", 1)] k = splits[0].lower() diff --git a/synapse/logging/context.py b/synapse/logging/context.py index 78e27bfb00..1a7ea4fa96 100644 --- a/synapse/logging/context.py +++ b/synapse/logging/context.py @@ -669,7 +669,7 @@ def preserve_fn(f): return g -def run_in_background(f, *args, **kwargs): +def run_in_background(f, *args, **kwargs) -> defer.Deferred: """Calls a function, ensuring that the current context is restored after return from the function, and that the sentinel context is set once the deferred returned by the function completes. @@ -697,8 +697,10 @@ def run_in_background(f, *args, **kwargs): if isinstance(res, types.CoroutineType): res = defer.ensureDeferred(res) + # At this point we should have a Deferred, if not then f was a synchronous + # function, wrap it in a Deferred for consistency. if not isinstance(res, defer.Deferred): - return res + return defer.succeed(res) if res.called and not res.paused: # The function should have maintained the logcontext, so we can diff --git a/tests/replication/_base.py b/tests/replication/_base.py index f6a6aed35e..20940c8107 100644 --- a/tests/replication/_base.py +++ b/tests/replication/_base.py @@ -22,6 +22,7 @@ from twisted.internet.protocol import Protocol from twisted.internet.task import LoopingCall from twisted.web.http import HTTPChannel from twisted.web.resource import Resource +from twisted.web.server import Request, Site from synapse.app.generic_worker import ( GenericWorkerReplicationHandler, @@ -32,7 +33,10 @@ from synapse.http.site import SynapseRequest, SynapseSite from synapse.replication.http import ReplicationRestResource from synapse.replication.tcp.handler import ReplicationCommandHandler from synapse.replication.tcp.protocol import ClientReplicationStreamProtocol -from synapse.replication.tcp.resource import ReplicationStreamProtocolFactory +from synapse.replication.tcp.resource import ( + ReplicationStreamProtocolFactory, + ServerReplicationStreamProtocol, +) from synapse.server import HomeServer from synapse.util import Clock @@ -59,7 +63,9 @@ class BaseStreamTestCase(unittest.HomeserverTestCase): # build a replication server server_factory = ReplicationStreamProtocolFactory(hs) self.streamer = hs.get_replication_streamer() - self.server = server_factory.buildProtocol(None) + self.server = server_factory.buildProtocol( + None + ) # type: ServerReplicationStreamProtocol # Make a new HomeServer object for the worker self.reactor.lookups["testserv"] = "1.2.3.4" @@ -155,9 +161,7 @@ class BaseStreamTestCase(unittest.HomeserverTestCase): request_factory = OneShotRequestFactory() # Set up the server side protocol - channel = _PushHTTPChannel(self.reactor) - channel.requestFactory = request_factory - channel.site = self.site + channel = _PushHTTPChannel(self.reactor, request_factory, self.site) # Connect client to server and vice versa. client_to_server_transport = FakeTransport( @@ -188,8 +192,9 @@ class BaseStreamTestCase(unittest.HomeserverTestCase): fetching updates for given stream. """ + path = request.path # type: bytes # type: ignore self.assertRegex( - request.path, + path, br"^/_synapse/replication/get_repl_stream_updates/%s/[^/]+$" % (stream_name.encode("ascii"),), ) @@ -390,9 +395,7 @@ class BaseMultiWorkerStreamTestCase(unittest.HomeserverTestCase): request_factory = OneShotRequestFactory() # Set up the server side protocol - channel = _PushHTTPChannel(self.reactor) - channel.requestFactory = request_factory - channel.site = self._hs_to_site[hs] + channel = _PushHTTPChannel(self.reactor, request_factory, self._hs_to_site[hs]) # Connect client to server and vice versa. client_to_server_transport = FakeTransport( @@ -475,9 +478,13 @@ class _PushHTTPChannel(HTTPChannel): makes it very hard to test. """ - def __init__(self, reactor: IReactorTime): + def __init__( + self, reactor: IReactorTime, request_factory: Callable[..., Request], site: Site + ): super().__init__() self.reactor = reactor + self.requestFactory = request_factory + self.site = site self._pull_to_push_producer = None # type: Optional[_PullToPushProducer] diff --git a/tests/server.py b/tests/server.py index 939a0008ca..863f6da738 100644 --- a/tests/server.py +++ b/tests/server.py @@ -188,7 +188,7 @@ class FakeSite: def make_request( reactor, - site: Site, + site: Union[Site, FakeSite], method, path, content=b"", diff --git a/tests/test_utils/logging_setup.py b/tests/test_utils/logging_setup.py index 52ae5c5713..74568b34f8 100644 --- a/tests/test_utils/logging_setup.py +++ b/tests/test_utils/logging_setup.py @@ -28,7 +28,7 @@ class ToTwistedHandler(logging.Handler): def emit(self, record): log_entry = self.format(record) log_level = record.levelname.lower().replace("warning", "warn") - self.tx_log.emit( + self.tx_log.emit( # type: ignore twisted.logger.LogLevel.levelWithName(log_level), "{entry}", entry=log_entry ) -- cgit 1.5.1 From 9cd18cc5886a5f44625c1c4730f146ec189b833e Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Tue, 9 Mar 2021 13:15:12 +0000 Subject: Retry 5xx errors in federation client (#9567) Fixes #8915 --- changelog.d/9567.bugfix | 1 + synapse/http/matrixfederationclient.py | 7 ++++--- 2 files changed, 5 insertions(+), 3 deletions(-) create mode 100644 changelog.d/9567.bugfix diff --git a/changelog.d/9567.bugfix b/changelog.d/9567.bugfix new file mode 100644 index 0000000000..e7322c2b5e --- /dev/null +++ b/changelog.d/9567.bugfix @@ -0,0 +1 @@ +Fix bug where federation requests were not correctly retried on 5xx responses. diff --git a/synapse/http/matrixfederationclient.py b/synapse/http/matrixfederationclient.py index da6866addf..5f01ebd3d4 100644 --- a/synapse/http/matrixfederationclient.py +++ b/synapse/http/matrixfederationclient.py @@ -534,9 +534,10 @@ class MatrixFederationHttpClient: response.code, response_phrase, body ) - # Retry if the error is a 429 (Too Many Requests), - # otherwise just raise a standard HttpResponseException - if response.code == 429: + # Retry if the error is a 5xx or a 429 (Too Many + # Requests), otherwise just raise a standard + # `HttpResponseException` + if 500 <= response.code < 600 or response.code == 429: raise RequestSendFailed(exc, can_retry=True) from exc else: raise exc -- cgit 1.5.1 From eaada74075a4567c489fff6ae2206f2af8298fd4 Mon Sep 17 00:00:00 2001 From: Richard van der Hoff <1389908+richvdh@users.noreply.github.com> Date: Tue, 9 Mar 2021 15:03:37 +0000 Subject: JWT OIDC secrets for Sign in with Apple (#9549) Apple had to be special. They want a client secret which is generated from an EC key. Fixes #9220. Also fixes #9212 while I'm here. --- MANIFEST.in | 7 +- changelog.d/9549.feature | 1 + docs/openid.md | 42 +++++++- docs/sample_config.yaml | 21 +++- synapse/config/_base.py | 38 +++++++- synapse/config/_base.pyi | 2 + synapse/config/oidc_config.py | 89 +++++++++++++++-- synapse/handlers/oidc_handler.py | 101 ++++++++++++++++++- tests/handlers/oidc_test_key.p8 | 5 + tests/handlers/oidc_test_key.pub.pem | 4 + tests/handlers/test_oidc.py | 181 ++++++++++++++++++++++++++++++----- 11 files changed, 444 insertions(+), 47 deletions(-) create mode 100644 changelog.d/9549.feature create mode 100644 tests/handlers/oidc_test_key.p8 create mode 100644 tests/handlers/oidc_test_key.pub.pem diff --git a/MANIFEST.in b/MANIFEST.in index 120ce5b776..25d1cb758e 100644 --- a/MANIFEST.in +++ b/MANIFEST.in @@ -20,9 +20,10 @@ recursive-include scripts * recursive-include scripts-dev * recursive-include synapse *.pyi recursive-include tests *.py -include tests/http/ca.crt -include tests/http/ca.key -include tests/http/server.key +recursive-include tests *.pem +recursive-include tests *.p8 +recursive-include tests *.crt +recursive-include tests *.key recursive-include synapse/res * recursive-include synapse/static *.css diff --git a/changelog.d/9549.feature b/changelog.d/9549.feature new file mode 100644 index 0000000000..709e61eced --- /dev/null +++ b/changelog.d/9549.feature @@ -0,0 +1 @@ +Add support for generating JSON Web Tokens dynamically for use as OIDC client secrets. diff --git a/docs/openid.md b/docs/openid.md index 263bc9f6f8..01205d1220 100644 --- a/docs/openid.md +++ b/docs/openid.md @@ -386,7 +386,7 @@ oidc_providers: config: subject_claim: "id" localpart_template: "{{ user.login }}" - display_name_template: "{{ user.full_name }}" + display_name_template: "{{ user.full_name }}" ``` ### XWiki @@ -401,8 +401,7 @@ oidc_providers: idp_name: "XWiki" issuer: "https://myxwikihost/xwiki/oidc/" client_id: "your-client-id" # TO BE FILLED - # Needed until https://github.com/matrix-org/synapse/issues/9212 is fixed - client_secret: "dontcare" + client_auth_method: none scopes: ["openid", "profile"] user_profile_method: "userinfo_endpoint" user_mapping_provider: @@ -410,3 +409,40 @@ oidc_providers: localpart_template: "{{ user.preferred_username }}" display_name_template: "{{ user.name }}" ``` + +## Apple + +Configuring "Sign in with Apple" (SiWA) requires an Apple Developer account. + +You will need to create a new "Services ID" for SiWA, and create and download a +private key with "SiWA" enabled. + +As well as the private key file, you will need: + * Client ID: the "identifier" you gave the "Services ID" + * Team ID: a 10-character ID associated with your developer account. + * Key ID: the 10-character identifier for the key. + +https://help.apple.com/developer-account/?lang=en#/dev77c875b7e has more +documentation on setting up SiWA. + +The synapse config will look like this: + +```yaml + - idp_id: apple + idp_name: Apple + issuer: "https://appleid.apple.com" + client_id: "your-client-id" # Set to the "identifier" for your "ServicesID" + client_auth_method: "client_secret_post" + client_secret_jwt_key: + key_file: "/path/to/AuthKey_KEYIDCODE.p8" # point to your key file + jwt_header: + alg: ES256 + kid: "KEYIDCODE" # Set to the 10-char Key ID + jwt_payload: + iss: TEAMIDCODE # Set to the 10-char Team ID + scopes: ["name", "email", "openid"] + authorization_endpoint: https://appleid.apple.com/auth/authorize?response_mode=form_post + user_mapping_provider: + config: + email_template: "{{ user.email }}" +``` diff --git a/docs/sample_config.yaml b/docs/sample_config.yaml index c95a4f5970..c32ee4a897 100644 --- a/docs/sample_config.yaml +++ b/docs/sample_config.yaml @@ -1779,7 +1779,26 @@ saml2_config: # # client_id: Required. oauth2 client id to use. # -# client_secret: Required. oauth2 client secret to use. +# client_secret: oauth2 client secret to use. May be omitted if +# client_secret_jwt_key is given, or if client_auth_method is 'none'. +# +# client_secret_jwt_key: Alternative to client_secret: details of a key used +# to create a JSON Web Token to be used as an OAuth2 client secret. If +# given, must be a dictionary with the following properties: +# +# key: a pem-encoded signing key. Must be a suitable key for the +# algorithm specified. Required unless 'key_file' is given. +# +# key_file: the path to file containing a pem-encoded signing key file. +# Required unless 'key' is given. +# +# jwt_header: a dictionary giving properties to include in the JWT +# header. Must include the key 'alg', giving the algorithm used to +# sign the JWT, such as "ES256", using the JWA identifiers in +# RFC7518. +# +# jwt_payload: an optional dictionary giving properties to include in +# the JWT payload. Normally this should include an 'iss' key. # # client_auth_method: auth method to use when exchanging the token. Valid # values are 'client_secret_basic' (default), 'client_secret_post' and diff --git a/synapse/config/_base.py b/synapse/config/_base.py index 4026966711..ba9cd63cf2 100644 --- a/synapse/config/_base.py +++ b/synapse/config/_base.py @@ -212,9 +212,8 @@ class Config: @classmethod def read_file(cls, file_path, config_name): - cls.check_file(file_path, config_name) - with open(file_path) as file_stream: - return file_stream.read() + """Deprecated: call read_file directly""" + return read_file(file_path, (config_name,)) def read_template(self, filename: str) -> jinja2.Template: """Load a template file from disk. @@ -894,4 +893,35 @@ class RoutableShardedWorkerHandlingConfig(ShardedWorkerHandlingConfig): return self._get_instance(key) -__all__ = ["Config", "RootConfig", "ShardedWorkerHandlingConfig"] +def read_file(file_path: Any, config_path: Iterable[str]) -> str: + """Check the given file exists, and read it into a string + + If it does not, emit an error indicating the problem + + Args: + file_path: the file to be read + config_path: where in the configuration file_path came from, so that a useful + error can be emitted if it does not exist. + Returns: + content of the file. + Raises: + ConfigError if there is a problem reading the file. + """ + if not isinstance(file_path, str): + raise ConfigError("%r is not a string", config_path) + + try: + os.stat(file_path) + with open(file_path) as file_stream: + return file_stream.read() + except OSError as e: + raise ConfigError("Error accessing file %r" % (file_path,), config_path) from e + + +__all__ = [ + "Config", + "RootConfig", + "ShardedWorkerHandlingConfig", + "RoutableShardedWorkerHandlingConfig", + "read_file", +] diff --git a/synapse/config/_base.pyi b/synapse/config/_base.pyi index db16c86f50..e896fd34e2 100644 --- a/synapse/config/_base.pyi +++ b/synapse/config/_base.pyi @@ -152,3 +152,5 @@ class ShardedWorkerHandlingConfig: class RoutableShardedWorkerHandlingConfig(ShardedWorkerHandlingConfig): def get_instance(self, key: str) -> str: ... + +def read_file(file_path: Any, config_path: Iterable[str]) -> str: ... diff --git a/synapse/config/oidc_config.py b/synapse/config/oidc_config.py index a27594befc..7f5e449eb2 100644 --- a/synapse/config/oidc_config.py +++ b/synapse/config/oidc_config.py @@ -15,7 +15,7 @@ # limitations under the License. from collections import Counter -from typing import Iterable, Optional, Tuple, Type +from typing import Iterable, Mapping, Optional, Tuple, Type import attr @@ -25,7 +25,7 @@ from synapse.types import Collection, JsonDict from synapse.util.module_loader import load_module from synapse.util.stringutils import parse_and_validate_mxc_uri -from ._base import Config, ConfigError +from ._base import Config, ConfigError, read_file DEFAULT_USER_MAPPING_PROVIDER = "synapse.handlers.oidc_handler.JinjaOidcMappingProvider" @@ -97,7 +97,26 @@ class OIDCConfig(Config): # # client_id: Required. oauth2 client id to use. # - # client_secret: Required. oauth2 client secret to use. + # client_secret: oauth2 client secret to use. May be omitted if + # client_secret_jwt_key is given, or if client_auth_method is 'none'. + # + # client_secret_jwt_key: Alternative to client_secret: details of a key used + # to create a JSON Web Token to be used as an OAuth2 client secret. If + # given, must be a dictionary with the following properties: + # + # key: a pem-encoded signing key. Must be a suitable key for the + # algorithm specified. Required unless 'key_file' is given. + # + # key_file: the path to file containing a pem-encoded signing key file. + # Required unless 'key' is given. + # + # jwt_header: a dictionary giving properties to include in the JWT + # header. Must include the key 'alg', giving the algorithm used to + # sign the JWT, such as "ES256", using the JWA identifiers in + # RFC7518. + # + # jwt_payload: an optional dictionary giving properties to include in + # the JWT payload. Normally this should include an 'iss' key. # # client_auth_method: auth method to use when exchanging the token. Valid # values are 'client_secret_basic' (default), 'client_secret_post' and @@ -240,7 +259,7 @@ class OIDCConfig(Config): # jsonschema definition of the configuration settings for an oidc identity provider OIDC_PROVIDER_CONFIG_SCHEMA = { "type": "object", - "required": ["issuer", "client_id", "client_secret"], + "required": ["issuer", "client_id"], "properties": { "idp_id": { "type": "string", @@ -262,6 +281,30 @@ OIDC_PROVIDER_CONFIG_SCHEMA = { "issuer": {"type": "string"}, "client_id": {"type": "string"}, "client_secret": {"type": "string"}, + "client_secret_jwt_key": { + "type": "object", + "required": ["jwt_header"], + "oneOf": [ + {"required": ["key"]}, + {"required": ["key_file"]}, + ], + "properties": { + "key": {"type": "string"}, + "key_file": {"type": "string"}, + "jwt_header": { + "type": "object", + "required": ["alg"], + "properties": { + "alg": {"type": "string"}, + }, + "additionalProperties": {"type": "string"}, + }, + "jwt_payload": { + "type": "object", + "additionalProperties": {"type": "string"}, + }, + }, + }, "client_auth_method": { "type": "string", # the following list is the same as the keys of @@ -404,6 +447,20 @@ def _parse_oidc_config_dict( "idp_icon must be a valid MXC URI", config_path + ("idp_icon",) ) from e + client_secret_jwt_key_config = oidc_config.get("client_secret_jwt_key") + client_secret_jwt_key = None # type: Optional[OidcProviderClientSecretJwtKey] + if client_secret_jwt_key_config is not None: + keyfile = client_secret_jwt_key_config.get("key_file") + if keyfile: + key = read_file(keyfile, config_path + ("client_secret_jwt_key",)) + else: + key = client_secret_jwt_key_config["key"] + client_secret_jwt_key = OidcProviderClientSecretJwtKey( + key=key, + jwt_header=client_secret_jwt_key_config["jwt_header"], + jwt_payload=client_secret_jwt_key_config.get("jwt_payload", {}), + ) + return OidcProviderConfig( idp_id=idp_id, idp_name=oidc_config.get("idp_name", "OIDC"), @@ -412,7 +469,8 @@ def _parse_oidc_config_dict( discover=oidc_config.get("discover", True), issuer=oidc_config["issuer"], client_id=oidc_config["client_id"], - client_secret=oidc_config["client_secret"], + client_secret=oidc_config.get("client_secret"), + client_secret_jwt_key=client_secret_jwt_key, client_auth_method=oidc_config.get("client_auth_method", "client_secret_basic"), scopes=oidc_config.get("scopes", ["openid"]), authorization_endpoint=oidc_config.get("authorization_endpoint"), @@ -427,6 +485,18 @@ def _parse_oidc_config_dict( ) +@attr.s(slots=True, frozen=True) +class OidcProviderClientSecretJwtKey: + # a pem-encoded signing key + key = attr.ib(type=str) + + # properties to include in the JWT header + jwt_header = attr.ib(type=Mapping[str, str]) + + # properties to include in the JWT payload. + jwt_payload = attr.ib(type=Mapping[str, str]) + + @attr.s(slots=True, frozen=True) class OidcProviderConfig: # a unique identifier for this identity provider. Used in the 'user_external_ids' @@ -452,8 +522,13 @@ class OidcProviderConfig: # oauth2 client id to use client_id = attr.ib(type=str) - # oauth2 client secret to use - client_secret = attr.ib(type=str) + # oauth2 client secret to use. if `None`, use client_secret_jwt_key to generate + # a secret. + client_secret = attr.ib(type=Optional[str]) + + # key to use to construct a JWT to use as a client secret. May be `None` if + # `client_secret` is set. + client_secret_jwt_key = attr.ib(type=Optional[OidcProviderClientSecretJwtKey]) # auth method to use when exchanging the token. # Valid values are 'client_secret_basic', 'client_secret_post' and diff --git a/synapse/handlers/oidc_handler.py b/synapse/handlers/oidc_handler.py index b4a74390cc..825fadb76f 100644 --- a/synapse/handlers/oidc_handler.py +++ b/synapse/handlers/oidc_handler.py @@ -1,5 +1,6 @@ # -*- coding: utf-8 -*- # Copyright 2020 Quentin Gliech +# Copyright 2021 The Matrix.org Foundation C.I.C. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -14,13 +15,13 @@ # limitations under the License. import inspect import logging -from typing import TYPE_CHECKING, Dict, Generic, List, Optional, TypeVar +from typing import TYPE_CHECKING, Dict, Generic, List, Optional, TypeVar, Union from urllib.parse import urlencode import attr import pymacaroons from authlib.common.security import generate_token -from authlib.jose import JsonWebToken +from authlib.jose import JsonWebToken, jwt from authlib.oauth2.auth import ClientAuth from authlib.oauth2.rfc6749.parameters import prepare_grant_uri from authlib.oidc.core import CodeIDToken, ImplicitIDToken, UserInfo @@ -35,12 +36,15 @@ from typing_extensions import TypedDict from twisted.web.client import readBody from synapse.config import ConfigError -from synapse.config.oidc_config import OidcProviderConfig +from synapse.config.oidc_config import ( + OidcProviderClientSecretJwtKey, + OidcProviderConfig, +) from synapse.handlers.sso import MappingException, UserAttributes from synapse.http.site import SynapseRequest from synapse.logging.context import make_deferred_yieldable from synapse.types import JsonDict, UserID, map_username_to_mxid_localpart -from synapse.util import json_decoder +from synapse.util import Clock, json_decoder from synapse.util.caches.cached_call import RetryOnExceptionCachedCall from synapse.util.macaroons import get_value_from_macaroon, satisfy_expiry @@ -276,9 +280,21 @@ class OidcProvider: self._scopes = provider.scopes self._user_profile_method = provider.user_profile_method + + client_secret = None # type: Union[None, str, JwtClientSecret] + if provider.client_secret: + client_secret = provider.client_secret + elif provider.client_secret_jwt_key: + client_secret = JwtClientSecret( + provider.client_secret_jwt_key, + provider.client_id, + provider.issuer, + hs.get_clock(), + ) + self._client_auth = ClientAuth( provider.client_id, - provider.client_secret, + client_secret, provider.client_auth_method, ) # type: ClientAuth self._client_auth_method = provider.client_auth_method @@ -977,6 +993,81 @@ class OidcProvider: return str(remote_user_id) +# number of seconds a newly-generated client secret should be valid for +CLIENT_SECRET_VALIDITY_SECONDS = 3600 + +# minimum remaining validity on a client secret before we should generate a new one +CLIENT_SECRET_MIN_VALIDITY_SECONDS = 600 + + +class JwtClientSecret: + """A class which generates a new client secret on demand, based on a JWK + + This implementation is designed to comply with the requirements for Apple Sign in: + https://developer.apple.com/documentation/sign_in_with_apple/generate_and_validate_tokens#3262048 + + It looks like those requirements are based on https://tools.ietf.org/html/rfc7523, + but it's worth noting that we still put the generated secret in the "client_secret" + field (or rather, whereever client_auth_method puts it) rather than in a + client_assertion field in the body as that RFC seems to require. + """ + + def __init__( + self, + key: OidcProviderClientSecretJwtKey, + oauth_client_id: str, + oauth_issuer: str, + clock: Clock, + ): + self._key = key + self._oauth_client_id = oauth_client_id + self._oauth_issuer = oauth_issuer + self._clock = clock + self._cached_secret = b"" + self._cached_secret_replacement_time = 0 + + def __str__(self): + # if client_auth_method is client_secret_basic, then ClientAuth.prepare calls + # encode_client_secret_basic, which calls "{}".format(secret), which ends up + # here. + return self._get_secret().decode("ascii") + + def __bytes__(self): + # if client_auth_method is client_secret_post, then ClientAuth.prepare calls + # encode_client_secret_post, which ends up here. + return self._get_secret() + + def _get_secret(self) -> bytes: + now = self._clock.time() + + # if we have enough validity on our existing secret, use it + if now < self._cached_secret_replacement_time: + return self._cached_secret + + issued_at = int(now) + expires_at = issued_at + CLIENT_SECRET_VALIDITY_SECONDS + + # we copy the configured header because jwt.encode modifies it. + header = dict(self._key.jwt_header) + + # see https://tools.ietf.org/html/rfc7523#section-3 + payload = { + "sub": self._oauth_client_id, + "aud": self._oauth_issuer, + "iat": issued_at, + "exp": expires_at, + **self._key.jwt_payload, + } + logger.info( + "Generating new JWT for %s: %s %s", self._oauth_issuer, header, payload + ) + self._cached_secret = jwt.encode(header, payload, self._key.key) + self._cached_secret_replacement_time = ( + expires_at - CLIENT_SECRET_MIN_VALIDITY_SECONDS + ) + return self._cached_secret + + class OidcSessionTokenGenerator: """Methods for generating and checking OIDC Session cookies.""" diff --git a/tests/handlers/oidc_test_key.p8 b/tests/handlers/oidc_test_key.p8 new file mode 100644 index 0000000000..bb92976333 --- /dev/null +++ b/tests/handlers/oidc_test_key.p8 @@ -0,0 +1,5 @@ +-----BEGIN PRIVATE KEY----- +MIGHAgEAMBMGByqGSM49AgEGCCqGSM49AwEHBG0wawIBAQQgrHMvFcFjFhei6gHp +Gfy4C8+6z7634MZbC7SSx4a17GahRANCAATp0YxEzGUXuqszggiFxczDdPgDpCJA +P18rRuN7FLwZDuzYQPb8zVd8eGh4BqxjiVocICnVWyaSWD96N00I96SW +-----END PRIVATE KEY----- diff --git a/tests/handlers/oidc_test_key.pub.pem b/tests/handlers/oidc_test_key.pub.pem new file mode 100644 index 0000000000..176d4a4b4b --- /dev/null +++ b/tests/handlers/oidc_test_key.pub.pem @@ -0,0 +1,4 @@ +-----BEGIN PUBLIC KEY----- +MFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAE6dGMRMxlF7qrM4IIhcXMw3T4A6Qi +QD9fK0bjexS8GQ7s2ED2/M1XfHhoeAasY4laHCAp1Vsmklg/ejdNCPeklg== +-----END PUBLIC KEY----- diff --git a/tests/handlers/test_oidc.py b/tests/handlers/test_oidc.py index 02d4b2de0d..5e9c9c2e88 100644 --- a/tests/handlers/test_oidc.py +++ b/tests/handlers/test_oidc.py @@ -13,6 +13,7 @@ # See the License for the specific language governing permissions and # limitations under the License. import json +import os from urllib.parse import parse_qs, urlparse from mock import ANY, Mock, patch @@ -50,7 +51,18 @@ WELL_KNOWN = ISSUER + ".well-known/openid-configuration" JWKS_URI = ISSUER + ".well-known/jwks.json" # config for common cases -COMMON_CONFIG = { +DEFAULT_CONFIG = { + "enabled": True, + "client_id": CLIENT_ID, + "client_secret": CLIENT_SECRET, + "issuer": ISSUER, + "scopes": SCOPES, + "user_mapping_provider": {"module": __name__ + ".TestMappingProvider"}, +} + +# extends the default config with explicit OAuth2 endpoints instead of using discovery +EXPLICIT_ENDPOINT_CONFIG = { + **DEFAULT_CONFIG, "discover": False, "authorization_endpoint": AUTHORIZATION_ENDPOINT, "token_endpoint": TOKEN_ENDPOINT, @@ -107,6 +119,32 @@ async def get_json(url): return {"keys": []} +def _key_file_path() -> str: + """path to a file containing the private half of a test key""" + + # this key was generated with: + # openssl ecparam -name prime256v1 -genkey -noout | + # openssl pkcs8 -topk8 -nocrypt -out oidc_test_key.p8 + # + # we use PKCS8 rather than SEC-1 (which is what openssl ecparam spits out), because + # that's what Apple use, and we want to be sure that we work with Apple's keys. + # + # (For the record: both PKCS8 and SEC-1 specify (different) ways of representing + # keys using ASN.1. Both are then typically formatted using PEM, which says: use the + # base64-encoded DER encoding of ASN.1, with headers and footers. But we don't + # really need to care about any of that.) + return os.path.join(os.path.dirname(__file__), "oidc_test_key.p8") + + +def _public_key_file_path() -> str: + """path to a file containing the public half of a test key""" + # this was generated with: + # openssl ec -in oidc_test_key.p8 -pubout -out oidc_test_key.pub.pem + # + # See above about where oidc_test_key.p8 came from + return os.path.join(os.path.dirname(__file__), "oidc_test_key.pub.pem") + + class OidcHandlerTestCase(HomeserverTestCase): if not HAS_OIDC: skip = "requires OIDC" @@ -114,20 +152,6 @@ class OidcHandlerTestCase(HomeserverTestCase): def default_config(self): config = super().default_config() config["public_baseurl"] = BASE_URL - oidc_config = { - "enabled": True, - "client_id": CLIENT_ID, - "client_secret": CLIENT_SECRET, - "issuer": ISSUER, - "scopes": SCOPES, - "user_mapping_provider": {"module": __name__ + ".TestMappingProvider"}, - } - - # Update this config with what's in the default config so that - # override_config works as expected. - oidc_config.update(config.get("oidc_config", {})) - config["oidc_config"] = oidc_config - return config def make_homeserver(self, reactor, clock): @@ -170,13 +194,14 @@ class OidcHandlerTestCase(HomeserverTestCase): self.render_error.reset_mock() return args + @override_config({"oidc_config": DEFAULT_CONFIG}) def test_config(self): """Basic config correctly sets up the callback URL and client auth correctly.""" self.assertEqual(self.provider._callback_url, CALLBACK_URL) self.assertEqual(self.provider._client_auth.client_id, CLIENT_ID) self.assertEqual(self.provider._client_auth.client_secret, CLIENT_SECRET) - @override_config({"oidc_config": {"discover": True}}) + @override_config({"oidc_config": {**DEFAULT_CONFIG, "discover": True}}) def test_discovery(self): """The handler should discover the endpoints from OIDC discovery document.""" # This would throw if some metadata were invalid @@ -195,13 +220,13 @@ class OidcHandlerTestCase(HomeserverTestCase): self.get_success(self.provider.load_metadata()) self.http_client.get_json.assert_not_called() - @override_config({"oidc_config": COMMON_CONFIG}) + @override_config({"oidc_config": EXPLICIT_ENDPOINT_CONFIG}) def test_no_discovery(self): """When discovery is disabled, it should not try to load from discovery document.""" self.get_success(self.provider.load_metadata()) self.http_client.get_json.assert_not_called() - @override_config({"oidc_config": COMMON_CONFIG}) + @override_config({"oidc_config": EXPLICIT_ENDPOINT_CONFIG}) def test_load_jwks(self): """JWKS loading is done once (then cached) if used.""" jwks = self.get_success(self.provider.load_jwks()) @@ -236,6 +261,7 @@ class OidcHandlerTestCase(HomeserverTestCase): self.http_client.get_json.assert_not_called() self.assertEqual(jwks, {"keys": []}) + @override_config({"oidc_config": DEFAULT_CONFIG}) def test_validate_config(self): """Provider metadatas are extensively validated.""" h = self.provider @@ -318,13 +344,14 @@ class OidcHandlerTestCase(HomeserverTestCase): # Shouldn't raise with a valid userinfo, even without jwks force_load_metadata() - @override_config({"oidc_config": {"skip_verification": True}}) + @override_config({"oidc_config": {**DEFAULT_CONFIG, "skip_verification": True}}) def test_skip_verification(self): """Provider metadata validation can be disabled by config.""" with self.metadata_edit({"issuer": "http://insecure"}): # This should not throw get_awaitable_result(self.provider.load_metadata()) + @override_config({"oidc_config": DEFAULT_CONFIG}) def test_redirect_request(self): """The redirect request has the right arguments & generates a valid session cookie.""" req = Mock(spec=["cookies"]) @@ -368,6 +395,7 @@ class OidcHandlerTestCase(HomeserverTestCase): self.assertEqual(params["nonce"], [nonce]) self.assertEqual(redirect, "http://client/redirect") + @override_config({"oidc_config": DEFAULT_CONFIG}) def test_callback_error(self): """Errors from the provider returned in the callback are displayed.""" request = Mock(args={}) @@ -379,6 +407,7 @@ class OidcHandlerTestCase(HomeserverTestCase): self.get_success(self.handler.handle_oidc_callback(request)) self.assertRenderedError("invalid_client", "some description") + @override_config({"oidc_config": DEFAULT_CONFIG}) def test_callback(self): """Code callback works and display errors if something went wrong. @@ -480,6 +509,7 @@ class OidcHandlerTestCase(HomeserverTestCase): self.get_success(self.handler.handle_oidc_callback(request)) self.assertRenderedError("invalid_request") + @override_config({"oidc_config": DEFAULT_CONFIG}) def test_callback_session(self): """The callback verifies the session presence and validity""" request = Mock(spec=["args", "getCookie", "cookies"]) @@ -522,7 +552,9 @@ class OidcHandlerTestCase(HomeserverTestCase): self.get_success(self.handler.handle_oidc_callback(request)) self.assertRenderedError("invalid_request") - @override_config({"oidc_config": {"client_auth_method": "client_secret_post"}}) + @override_config( + {"oidc_config": {**DEFAULT_CONFIG, "client_auth_method": "client_secret_post"}} + ) def test_exchange_code(self): """Code exchange behaves correctly and handles various error scenarios.""" token = {"type": "bearer"} @@ -607,9 +639,105 @@ class OidcHandlerTestCase(HomeserverTestCase): @override_config( { "oidc_config": { + "enabled": True, + "client_id": CLIENT_ID, + "issuer": ISSUER, + "client_auth_method": "client_secret_post", + "client_secret_jwt_key": { + "key_file": _key_file_path(), + "jwt_header": {"alg": "ES256", "kid": "ABC789"}, + "jwt_payload": {"iss": "DEFGHI"}, + }, + } + } + ) + def test_exchange_code_jwt_key(self): + """Test that code exchange works with a JWK client secret.""" + from authlib.jose import jwt + + token = {"type": "bearer"} + self.http_client.request = simple_async_mock( + return_value=FakeResponse( + code=200, phrase=b"OK", body=json.dumps(token).encode("utf-8") + ) + ) + code = "code" + + # advance the clock a bit before we start, so we aren't working with zero + # timestamps. + self.reactor.advance(1000) + start_time = self.reactor.seconds() + ret = self.get_success(self.provider._exchange_code(code)) + + self.assertEqual(ret, token) + + # the request should have hit the token endpoint + kwargs = self.http_client.request.call_args[1] + self.assertEqual(kwargs["method"], "POST") + self.assertEqual(kwargs["uri"], TOKEN_ENDPOINT) + + # the client secret provided to the should be a jwt which can be checked with + # the public key + args = parse_qs(kwargs["data"].decode("utf-8")) + secret = args["client_secret"][0] + with open(_public_key_file_path()) as f: + key = f.read() + claims = jwt.decode(secret, key) + self.assertEqual(claims.header["kid"], "ABC789") + self.assertEqual(claims["aud"], ISSUER) + self.assertEqual(claims["iss"], "DEFGHI") + self.assertEqual(claims["sub"], CLIENT_ID) + self.assertEqual(claims["iat"], start_time) + self.assertGreater(claims["exp"], start_time) + + # check the rest of the POSTed data + self.assertEqual(args["grant_type"], ["authorization_code"]) + self.assertEqual(args["code"], [code]) + self.assertEqual(args["client_id"], [CLIENT_ID]) + self.assertEqual(args["redirect_uri"], [CALLBACK_URL]) + + @override_config( + { + "oidc_config": { + "enabled": True, + "client_id": CLIENT_ID, + "issuer": ISSUER, + "client_auth_method": "none", + } + } + ) + def test_exchange_code_no_auth(self): + """Test that code exchange works with no client secret.""" + token = {"type": "bearer"} + self.http_client.request = simple_async_mock( + return_value=FakeResponse( + code=200, phrase=b"OK", body=json.dumps(token).encode("utf-8") + ) + ) + code = "code" + ret = self.get_success(self.provider._exchange_code(code)) + + self.assertEqual(ret, token) + + # the request should have hit the token endpoint + kwargs = self.http_client.request.call_args[1] + self.assertEqual(kwargs["method"], "POST") + self.assertEqual(kwargs["uri"], TOKEN_ENDPOINT) + + # check the POSTed data + args = parse_qs(kwargs["data"].decode("utf-8")) + self.assertEqual(args["grant_type"], ["authorization_code"]) + self.assertEqual(args["code"], [code]) + self.assertEqual(args["client_id"], [CLIENT_ID]) + self.assertEqual(args["redirect_uri"], [CALLBACK_URL]) + + @override_config( + { + "oidc_config": { + **DEFAULT_CONFIG, "user_mapping_provider": { "module": __name__ + ".TestMappingProviderExtra" - } + }, } } ) @@ -652,6 +780,7 @@ class OidcHandlerTestCase(HomeserverTestCase): new_user=True, ) + @override_config({"oidc_config": DEFAULT_CONFIG}) def test_map_userinfo_to_user(self): """Ensure that mapping the userinfo returned from a provider to an MXID works properly.""" auth_handler = self.hs.get_auth_handler() @@ -692,7 +821,7 @@ class OidcHandlerTestCase(HomeserverTestCase): "Mapping provider does not support de-duplicating Matrix IDs", ) - @override_config({"oidc_config": {"allow_existing_users": True}}) + @override_config({"oidc_config": {**DEFAULT_CONFIG, "allow_existing_users": True}}) def test_map_userinfo_to_existing_user(self): """Existing users can log in with OpenID Connect when allow_existing_users is True.""" store = self.hs.get_datastore() @@ -772,6 +901,7 @@ class OidcHandlerTestCase(HomeserverTestCase): "@TEST_USER_2:test", "oidc", ANY, ANY, None, new_user=False ) + @override_config({"oidc_config": DEFAULT_CONFIG}) def test_map_userinfo_to_invalid_localpart(self): """If the mapping provider generates an invalid localpart it should be rejected.""" self.get_success( @@ -782,9 +912,10 @@ class OidcHandlerTestCase(HomeserverTestCase): @override_config( { "oidc_config": { + **DEFAULT_CONFIG, "user_mapping_provider": { "module": __name__ + ".TestMappingProviderFailures" - } + }, } } ) @@ -829,6 +960,7 @@ class OidcHandlerTestCase(HomeserverTestCase): "mapping_error", "Unable to generate a Matrix ID from the SSO response" ) + @override_config({"oidc_config": DEFAULT_CONFIG}) def test_empty_localpart(self): """Attempts to map onto an empty localpart should be rejected.""" userinfo = { @@ -841,9 +973,10 @@ class OidcHandlerTestCase(HomeserverTestCase): @override_config( { "oidc_config": { + **DEFAULT_CONFIG, "user_mapping_provider": { "config": {"localpart_template": "{{ user.username }}"} - } + }, } } ) -- cgit 1.5.1 From e9df3f496b80e77125837951c0a0b56c3f5bf9a2 Mon Sep 17 00:00:00 2001 From: Andrew Morgan <1342360+anoadragon453@users.noreply.github.com> Date: Tue, 9 Mar 2021 15:15:52 +0000 Subject: Link to the List user's media admin API from media Admin API docs (#9571) Earlier [I was convinced](https://github.com/matrix-org/synapse/issues/9565) that we didn't have an Admin API for listing media uploaded by a user. Foolishly I was looking under the Media Admin API documentation, instead of the User Admin API documentation. I thought it'd be helpful to link to the latter so others don't hit the same dead end :) --- changelog.d/9571.doc | 1 + docs/admin_api/media_admin_api.md | 16 ++++++++++++++-- 2 files changed, 15 insertions(+), 2 deletions(-) create mode 100644 changelog.d/9571.doc diff --git a/changelog.d/9571.doc b/changelog.d/9571.doc new file mode 100644 index 0000000000..1bba72e7d0 --- /dev/null +++ b/changelog.d/9571.doc @@ -0,0 +1 @@ +Link to the "List user's media" admin API from the media admin API docs. diff --git a/docs/admin_api/media_admin_api.md b/docs/admin_api/media_admin_api.md index 90faeaaef0..9dbec68c19 100644 --- a/docs/admin_api/media_admin_api.md +++ b/docs/admin_api/media_admin_api.md @@ -1,5 +1,7 @@ # Contents -- [List all media in a room](#list-all-media-in-a-room) +- [Querying media](#querying-media) + * [List all media in a room](#list-all-media-in-a-room) + * [List all media uploaded by a user](#list-all-media-uploaded-by-a-user) - [Quarantine media](#quarantine-media) * [Quarantining media by ID](#quarantining-media-by-id) * [Quarantining media in a room](#quarantining-media-in-a-room) @@ -10,7 +12,11 @@ * [Delete local media by date or size](#delete-local-media-by-date-or-size) - [Purge Remote Media API](#purge-remote-media-api) -# List all media in a room +# Querying media + +These APIs allow extracting media information from the homeserver. + +## List all media in a room This API gets a list of known media in a room. However, it only shows media from unencrypted events or rooms. @@ -36,6 +42,12 @@ The API returns a JSON body like the following: } ``` +## List all media uploaded by a user + +Listing all media that has been uploaded by a local user can be achieved through +the use of the [List media of a user](user_admin_api.rst#list-media-of-a-user) +Admin API. + # Quarantine media Quarantining media means that it is marked as inaccessible by users. It applies -- cgit 1.5.1