diff options
author | Andrew Morgan <andrew@amorgan.xyz> | 2019-01-24 12:44:27 +0000 |
---|---|---|
committer | Andrew Morgan <andrew@amorgan.xyz> | 2019-01-24 12:44:27 +0000 |
commit | 068aa1d22840a1154bb8fbdd445a8c36b290db91 (patch) | |
tree | bf745b5f0f2b7e20563eeab4fd735da76a746c9a | |
parent | Use native UPSERTs where possible (#4306) (diff) | |
download | synapse-068aa1d22840a1154bb8fbdd445a8c36b290db91.tar.xz |
Time out filtered room dir queries after 60s
-rw-r--r-- | synapse/handlers/room_list.py | 14 |
1 files changed, 12 insertions, 2 deletions
diff --git a/synapse/handlers/room_list.py b/synapse/handlers/room_list.py index dc88620885..ea63fb604c 100644 --- a/synapse/handlers/room_list.py +++ b/synapse/handlers/room_list.py @@ -31,6 +31,7 @@ from synapse.util.caches.descriptors import cachedInlineCallbacks from synapse.util.caches.response_cache import ResponseCache from ._base import BaseHandler +from datetime import datetime, timedelta logger = logging.getLogger(__name__) @@ -73,8 +74,13 @@ class RoomListHandler(BaseHandler): # We explicitly don't bother caching searches or requests for # appservice specific lists. logger.info("Bypassing cache as search request.") + + # XXX: Quick hack to stop room directory queries taking too long. + # Timeout request after 60s. Probably want a more fundamental + # solution at some point + timeout = datetime.now() + timedelta(seconds=60) return self._get_public_room_list( - limit, since_token, search_filter, network_tuple=network_tuple, + limit, since_token, search_filter, network_tuple=network_tuple, timeout=timeout, ) key = (limit, since_token, network_tuple) @@ -87,7 +93,8 @@ class RoomListHandler(BaseHandler): @defer.inlineCallbacks def _get_public_room_list(self, limit=None, since_token=None, search_filter=None, - network_tuple=EMPTY_THIRD_PARTY_ID,): + network_tuple=EMPTY_THIRD_PARTY_ID, + timeout=None,): if since_token and since_token != "END": since_token = RoomListNextBatch.from_token(since_token) else: @@ -202,6 +209,9 @@ class RoomListHandler(BaseHandler): chunk = [] for i in range(0, len(rooms_to_scan), step): + if timeout and datetime.now() > timeout: + raise Exception("Timed out searching room directory") + batch = rooms_to_scan[i:i + step] logger.info("Processing %i rooms for result", len(batch)) yield concurrently_execute( |