summary refs log tree commit diff
path: root/synapse
diff options
context:
space:
mode:
authorErik Johnston <erikj@element.io>2024-05-14 15:08:46 +0100
committerGitHub <noreply@github.com>2024-05-14 15:08:46 +0100
commit284d85dee34d1d79ff92c38799dabdc28a713793 (patch)
tree72a8916b3b890901e4d5edb2dbe20976c3553a77 /synapse
parentReduce pauses on large device list changes (#17192) (diff)
downloadsynapse-284d85dee34d1d79ff92c38799dabdc28a713793.tar.xz
Cache literal sync filter validation (#17186)
The sliding sync proxy (amongst other things) use literal json blobs as
filters, and repeatedly validating them takes a bunch of CPU.
Diffstat (limited to 'synapse')
-rw-r--r--synapse/rest/client/sync.py14
1 files changed, 13 insertions, 1 deletions
diff --git a/synapse/rest/client/sync.py b/synapse/rest/client/sync.py
index 2b103ca6a8..d19aaf0e22 100644
--- a/synapse/rest/client/sync.py
+++ b/synapse/rest/client/sync.py
@@ -47,6 +47,7 @@ from synapse.http.site import SynapseRequest
 from synapse.logging.opentracing import trace_with_opname
 from synapse.types import JsonDict, Requester, StreamToken
 from synapse.util import json_decoder
+from synapse.util.caches.lrucache import LruCache
 
 from ._base import client_patterns, set_timeline_upper_limit
 
@@ -110,6 +111,11 @@ class SyncRestServlet(RestServlet):
         self._msc2654_enabled = hs.config.experimental.msc2654_enabled
         self._msc3773_enabled = hs.config.experimental.msc3773_enabled
 
+        self._json_filter_cache: LruCache[str, bool] = LruCache(
+            max_size=1000,
+            cache_name="sync_valid_filter",
+        )
+
     async def on_GET(self, request: SynapseRequest) -> Tuple[int, JsonDict]:
         # This will always be set by the time Twisted calls us.
         assert request.args is not None
@@ -177,7 +183,13 @@ class SyncRestServlet(RestServlet):
                 filter_object = json_decoder.decode(filter_id)
             except Exception:
                 raise SynapseError(400, "Invalid filter JSON", errcode=Codes.NOT_JSON)
-            self.filtering.check_valid_filter(filter_object)
+
+            # We cache the validation, as this can get quite expensive if people use
+            # a literal json blob as a query param.
+            if not self._json_filter_cache.get(filter_id):
+                self.filtering.check_valid_filter(filter_object)
+                self._json_filter_cache[filter_id] = True
+
             set_timeline_upper_limit(
                 filter_object, self.hs.config.server.filter_timeline_limit
             )