summary refs log tree commit diff
path: root/synapse
diff options
context:
space:
mode:
Diffstat (limited to 'synapse')
-rw-r--r--synapse/api/auth/msc3861_delegated.py91
-rw-r--r--synapse/api/ratelimiting.py7
-rw-r--r--synapse/replication/tcp/client.py12
-rw-r--r--synapse/rest/admin/__init__.py3
-rw-r--r--synapse/rest/admin/oidc.py55
-rw-r--r--synapse/storage/databases/main/cache.py13
-rw-r--r--synapse/storage/databases/main/devices.py9
-rw-r--r--synapse/util/caches/expiringcache.py22
8 files changed, 10 insertions, 202 deletions
diff --git a/synapse/api/auth/msc3861_delegated.py b/synapse/api/auth/msc3861_delegated.py
index 14cba50c90..ef5d3f9b81 100644
--- a/synapse/api/auth/msc3861_delegated.py
+++ b/synapse/api/auth/msc3861_delegated.py
@@ -28,7 +28,6 @@ from twisted.web.http_headers import Headers
 from synapse.api.auth.base import BaseAuth
 from synapse.api.errors import (
     AuthError,
-    Codes,
     HttpResponseException,
     InvalidClientTokenError,
     OAuthInsufficientScopeError,
@@ -40,7 +39,6 @@ from synapse.logging.context import make_deferred_yieldable
 from synapse.types import Requester, UserID, create_requester
 from synapse.util import json_decoder
 from synapse.util.caches.cached_call import RetryOnExceptionCachedCall
-from synapse.util.caches.expiringcache import ExpiringCache
 
 if TYPE_CHECKING:
     from synapse.server import HomeServer
@@ -109,20 +107,13 @@ class MSC3861DelegatedAuth(BaseAuth):
         assert self._config.client_id, "No client_id provided"
         assert auth_method is not None, "Invalid client_auth_method provided"
 
+        self._clock = hs.get_clock()
         self._http_client = hs.get_proxied_http_client()
         self._hostname = hs.hostname
         self._admin_token = self._config.admin_token
 
         self._issuer_metadata = RetryOnExceptionCachedCall(self._load_metadata)
 
-        self._clock = hs.get_clock()
-        self._token_cache: ExpiringCache[str, IntrospectionToken] = ExpiringCache(
-            cache_name="introspection_token_cache",
-            clock=self._clock,
-            max_len=10000,
-            expiry_ms=5 * 60 * 1000,
-        )
-
         if isinstance(auth_method, PrivateKeyJWTWithKid):
             # Use the JWK as the client secret when using the private_key_jwt method
             assert self._config.jwk, "No JWK provided"
@@ -161,20 +152,6 @@ class MSC3861DelegatedAuth(BaseAuth):
         Returns:
             The introspection response
         """
-        # check the cache before doing a request
-        introspection_token = self._token_cache.get(token, None)
-
-        if introspection_token:
-            # check the expiration field of the token (if it exists)
-            exp = introspection_token.get("exp", None)
-            if exp:
-                time_now = self._clock.time()
-                expired = time_now > exp
-                if not expired:
-                    return introspection_token
-            else:
-                return introspection_token
-
         metadata = await self._issuer_metadata.get()
         introspection_endpoint = metadata.get("introspection_endpoint")
         raw_headers: Dict[str, str] = {
@@ -188,10 +165,7 @@ class MSC3861DelegatedAuth(BaseAuth):
 
         # Fill the body/headers with credentials
         uri, raw_headers, body = self._client_auth.prepare(
-            method="POST",
-            uri=introspection_endpoint,
-            headers=raw_headers,
-            body=body,
+            method="POST", uri=introspection_endpoint, headers=raw_headers, body=body
         )
         headers = Headers({k: [v] for (k, v) in raw_headers.items()})
 
@@ -233,20 +207,10 @@ class MSC3861DelegatedAuth(BaseAuth):
                 "The introspection endpoint returned an invalid JSON response."
             )
 
-        expiration = resp.get("exp", None)
-        if expiration:
-            if self._clock.time() > expiration:
-                raise InvalidClientTokenError("Token is expired.")
-
-        introspection_token = IntrospectionToken(**resp)
-
-        # add token to cache
-        self._token_cache[token] = introspection_token
-
-        return introspection_token
+        return IntrospectionToken(**resp)
 
     async def is_server_admin(self, requester: Requester) -> bool:
-        return SCOPE_SYNAPSE_ADMIN in requester.scope
+        return "urn:synapse:admin:*" in requester.scope
 
     async def get_user_by_req(
         self,
@@ -263,36 +227,6 @@ class MSC3861DelegatedAuth(BaseAuth):
             # so that we don't provision the user if they don't have enough permission:
             requester = await self.get_user_by_access_token(access_token, allow_expired)
 
-            # Allow impersonation by an admin user using `_oidc_admin_impersonate_user_id` query parameter
-            if request.args is not None:
-                user_id_params = request.args.get(b"_oidc_admin_impersonate_user_id")
-                if user_id_params:
-                    if await self.is_server_admin(requester):
-                        user_id_str = user_id_params[0].decode("ascii")
-                        impersonated_user_id = UserID.from_string(user_id_str)
-                        logging.info(f"Admin impersonation of user {user_id_str}")
-                        requester = create_requester(
-                            user_id=impersonated_user_id,
-                            scope=[SCOPE_MATRIX_API],
-                            authenticated_entity=requester.user.to_string(),
-                        )
-                    else:
-                        raise AuthError(
-                            401,
-                            "Impersonation not possible by a non admin user",
-                        )
-
-            # Deny the request if the user account is locked.
-            if not allow_locked and await self.store.get_user_locked_status(
-                requester.user.to_string()
-            ):
-                raise AuthError(
-                    401,
-                    "User account has been locked",
-                    errcode=Codes.USER_LOCKED,
-                    additional_fields={"soft_logout": True},
-                )
-
         if not allow_guest and requester.is_guest:
             raise OAuthInsufficientScopeError([SCOPE_MATRIX_API])
 
@@ -309,14 +243,14 @@ class MSC3861DelegatedAuth(BaseAuth):
             # XXX: This is a temporary solution so that the admin API can be called by
             # the OIDC provider. This will be removed once we have OIDC client
             # credentials grant support in matrix-authentication-service.
-            logging.info("Admin token used")
+            logging.info("Admin toked used")
             # XXX: that user doesn't exist and won't be provisioned.
             # This is mostly fine for admin calls, but we should also think about doing
             # requesters without a user_id.
             admin_user = UserID("__oidc_admin", self._hostname)
             return create_requester(
                 user_id=admin_user,
-                scope=[SCOPE_SYNAPSE_ADMIN],
+                scope=["urn:synapse:admin:*"],
             )
 
         try:
@@ -438,16 +372,3 @@ class MSC3861DelegatedAuth(BaseAuth):
             scope=scope,
             is_guest=(has_guest_scope and not has_user_scope),
         )
-
-    def invalidate_cached_tokens(self, keys: List[str]) -> None:
-        """
-        Invalidate the entry(s) in the introspection token cache corresponding to the given key
-        """
-        for key in keys:
-            self._token_cache.invalidate(key)
-
-    def invalidate_token_cache(self) -> None:
-        """
-        Invalidate the entire token cache.
-        """
-        self._token_cache.invalidate_all()
diff --git a/synapse/api/ratelimiting.py b/synapse/api/ratelimiting.py
index 887b214d64..02ae45e8b3 100644
--- a/synapse/api/ratelimiting.py
+++ b/synapse/api/ratelimiting.py
@@ -40,7 +40,7 @@ class Ratelimiter:
     - the cost C of this request in tokens.
     Then, if there is room in the bucket for C tokens (T + C <= `burst_count`),
     the request is permitted and `cost` tokens are added to the bucket.
-    Otherwise the request is denied, and the bucket continues to hold T tokens.
+    Otherwise, the request is denied, and the bucket continues to hold T tokens.
 
     This means that the limiter enforces an average request frequency of `rate_hz`,
     while accumulating a buffer of up to `burst_count` requests which can be consumed
@@ -55,9 +55,10 @@ class Ratelimiter:
     request.
 
     Args:
+        store: The datastore providing get_ratelimit_for_user.
         clock: A homeserver clock, for retrieving the current time
-        rate_hz: The long term number of actions that can be performed in a second.
-        burst_count: How many actions that can be performed before being limited.
+        cfg: The ratelimit configuration for this rate limiter including the
+            allowed rate and burst count.
     """
 
     def __init__(
diff --git a/synapse/replication/tcp/client.py b/synapse/replication/tcp/client.py
index 51285e6d33..ca8a76f77c 100644
--- a/synapse/replication/tcp/client.py
+++ b/synapse/replication/tcp/client.py
@@ -28,7 +28,6 @@ from synapse.logging.context import PreserveLoggingContext, make_deferred_yielda
 from synapse.metrics.background_process_metrics import run_as_background_process
 from synapse.replication.tcp.streams import (
     AccountDataStream,
-    CachesStream,
     DeviceListsStream,
     PushersStream,
     PushRulesStream,
@@ -76,7 +75,6 @@ class ReplicationDataHandler:
         self._instance_name = hs.get_instance_name()
         self._typing_handler = hs.get_typing_handler()
         self._state_storage_controller = hs.get_storage_controllers().state
-        self.auth = hs.get_auth()
 
         self._notify_pushers = hs.config.worker.start_pushers
         self._pusher_pool = hs.get_pusherpool()
@@ -224,16 +222,6 @@ class ReplicationDataHandler:
                 self._state_storage_controller.notify_event_un_partial_stated(
                     row.event_id
                 )
-        # invalidate the introspection token cache
-        elif stream_name == CachesStream.NAME:
-            for row in rows:
-                if row.cache_func == "introspection_token_invalidation":
-                    if row.keys[0] is None:
-                        # invalidate the whole cache
-                        # mypy ignore - the token cache is defined on MSC3861DelegatedAuth
-                        self.auth.invalidate_token_cache()  # type: ignore[attr-defined]
-                    else:
-                        self.auth.invalidate_cached_tokens(row.keys)  # type: ignore[attr-defined]
 
         await self._presence_handler.process_replication_rows(
             stream_name, instance_name, token, rows
diff --git a/synapse/rest/admin/__init__.py b/synapse/rest/admin/__init__.py
index 94170715fb..0d42c89ff7 100644
--- a/synapse/rest/admin/__init__.py
+++ b/synapse/rest/admin/__init__.py
@@ -47,7 +47,6 @@ from synapse.rest.admin.federation import (
     ListDestinationsRestServlet,
 )
 from synapse.rest.admin.media import ListMediaInRoom, register_servlets_for_media_repo
-from synapse.rest.admin.oidc import OIDCTokenRevocationRestServlet
 from synapse.rest.admin.registration_tokens import (
     ListRegistrationTokensRestServlet,
     NewRegistrationTokenRestServlet,
@@ -298,8 +297,6 @@ def register_servlets(hs: "HomeServer", http_server: HttpServer) -> None:
     BackgroundUpdateRestServlet(hs).register(http_server)
     BackgroundUpdateStartJobRestServlet(hs).register(http_server)
     ExperimentalFeaturesRestServlet(hs).register(http_server)
-    if hs.config.experimental.msc3861.enabled:
-        OIDCTokenRevocationRestServlet(hs).register(http_server)
 
 
 def register_servlets_for_client_rest_resource(
diff --git a/synapse/rest/admin/oidc.py b/synapse/rest/admin/oidc.py
deleted file mode 100644
index 64d2d40550..0000000000
--- a/synapse/rest/admin/oidc.py
+++ /dev/null
@@ -1,55 +0,0 @@
-# Copyright 2023 The Matrix.org Foundation C.I.C
-#
-#  Licensed under the Apache License, Version 2.0 (the "License");
-#  you may not use this file except in compliance with the License.
-#  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-#  Unless required by applicable law or agreed to in writing, software
-#  distributed under the License is distributed on an "AS IS" BASIS,
-#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-#  See the License for the specific language governing permissions and
-#  limitations under the License.
-from http import HTTPStatus
-from typing import TYPE_CHECKING, Dict, Tuple
-
-from synapse.http.servlet import RestServlet
-from synapse.http.site import SynapseRequest
-from synapse.rest.admin._base import admin_patterns, assert_requester_is_admin
-
-if TYPE_CHECKING:
-    from synapse.server import HomeServer
-
-
-class OIDCTokenRevocationRestServlet(RestServlet):
-    """
-    Delete a given token introspection response - identified by the `jti` field - from the
-    introspection token cache when a token is revoked at the authorizing server
-    """
-
-    PATTERNS = admin_patterns("/OIDC_token_revocation/(?P<token_id>[^/]*)")
-
-    def __init__(self, hs: "HomeServer"):
-        super().__init__()
-        auth = hs.get_auth()
-
-        # If this endpoint is loaded then we must have enabled delegated auth.
-        from synapse.api.auth.msc3861_delegated import MSC3861DelegatedAuth
-
-        assert isinstance(auth, MSC3861DelegatedAuth)
-
-        self.auth = auth
-        self.store = hs.get_datastores().main
-
-    async def on_DELETE(
-        self, request: SynapseRequest, token_id: str
-    ) -> Tuple[HTTPStatus, Dict]:
-        await assert_requester_is_admin(self.auth, request)
-
-        self.auth._token_cache.invalidate(token_id)
-
-        # make sure we invalidate the cache on any workers
-        await self.store.stream_introspection_token_invalidation((token_id,))
-
-        return HTTPStatus.OK, {}
diff --git a/synapse/storage/databases/main/cache.py b/synapse/storage/databases/main/cache.py
index 18905e07b6..2fbd389c71 100644
--- a/synapse/storage/databases/main/cache.py
+++ b/synapse/storage/databases/main/cache.py
@@ -584,19 +584,6 @@ class CacheInvalidationWorkerStore(SQLBaseStore):
         else:
             return 0
 
-    async def stream_introspection_token_invalidation(
-        self, key: Tuple[Optional[str]]
-    ) -> None:
-        """
-        Stream an invalidation request for the introspection token cache to workers
-
-        Args:
-            key: token_id of the introspection token to remove from the cache
-        """
-        await self.send_invalidation_to_replication(
-            "introspection_token_invalidation", key
-        )
-
     @wrap_as_background_process("clean_up_old_cache_invalidations")
     async def _clean_up_cache_invalidation_wrapper(self) -> None:
         """
diff --git a/synapse/storage/databases/main/devices.py b/synapse/storage/databases/main/devices.py
index 7208fc8b33..324fdfa892 100644
--- a/synapse/storage/databases/main/devices.py
+++ b/synapse/storage/databases/main/devices.py
@@ -33,7 +33,6 @@ from typing_extensions import Literal
 
 from synapse.api.constants import EduTypes
 from synapse.api.errors import Codes, StoreError
-from synapse.config.homeserver import HomeServerConfig
 from synapse.logging.opentracing import (
     get_active_span_text_map,
     set_tag,
@@ -1664,7 +1663,6 @@ class DeviceStore(DeviceWorkerStore, DeviceBackgroundUpdateStore):
         self.device_id_exists_cache: LruCache[
             Tuple[str, str], Literal[True]
         ] = LruCache(cache_name="device_id_exists", max_size=10000)
-        self.config: HomeServerConfig = hs.config
 
     async def store_device(
         self,
@@ -1778,13 +1776,6 @@ class DeviceStore(DeviceWorkerStore, DeviceBackgroundUpdateStore):
         for device_id in device_ids:
             self.device_id_exists_cache.invalidate((user_id, device_id))
 
-        # TODO: don't nuke the entire cache once there is a way to associate
-        #  device_id -> introspection_token
-        if self.config.experimental.msc3861.enabled:
-            # mypy ignore - the token cache is defined on MSC3861DelegatedAuth
-            self.auth._token_cache.invalidate_all()  # type: ignore[attr-defined]
-            await self.stream_introspection_token_invalidation((None,))
-
     async def update_device(
         self, user_id: str, device_id: str, new_display_name: Optional[str] = None
     ) -> None:
diff --git a/synapse/util/caches/expiringcache.py b/synapse/util/caches/expiringcache.py
index 9a3e10ddee..01ad02af67 100644
--- a/synapse/util/caches/expiringcache.py
+++ b/synapse/util/caches/expiringcache.py
@@ -140,20 +140,6 @@ class ExpiringCache(Generic[KT, VT]):
 
         return value.value
 
-    def invalidate(self, key: KT) -> None:
-        """
-        Remove the given key from the cache.
-        """
-
-        value = self._cache.pop(key, None)
-        if value:
-            if self.iterable:
-                self.metrics.inc_evictions(
-                    EvictionReason.invalidation, len(value.value)
-                )
-            else:
-                self.metrics.inc_evictions(EvictionReason.invalidation)
-
     def __contains__(self, key: KT) -> bool:
         return key in self._cache
 
@@ -207,14 +193,6 @@ class ExpiringCache(Generic[KT, VT]):
             len(self),
         )
 
-    def invalidate_all(self) -> None:
-        """
-        Remove all items from the cache.
-        """
-        keys = set(self._cache.keys())
-        for key in keys:
-            self._cache.pop(key)
-
     def __len__(self) -> int:
         if self.iterable:
             return sum(len(entry.value) for entry in self._cache.values())