summary refs log tree commit diff
diff options
context:
space:
mode:
-rw-r--r--changelog.d/10826.misc2
-rw-r--r--synapse/handlers/message.py2
-rw-r--r--synapse/handlers/room_member.py52
-rw-r--r--synapse/push/httppusher.py5
-rw-r--r--synapse/storage/databases/main/client_ips.py2
-rw-r--r--synapse/storage/databases/main/roommember.py11
-rw-r--r--synapse/storage/databases/main/search.py2
-rw-r--r--synapse/util/caches/deferred_cache.py2
-rw-r--r--synapse/util/caches/descriptors.py5
-rw-r--r--synapse/util/caches/lrucache.py16
10 files changed, 74 insertions, 25 deletions
diff --git a/changelog.d/10826.misc b/changelog.d/10826.misc
new file mode 100644

index 0000000000..53e56fc362 --- /dev/null +++ b/changelog.d/10826.misc
@@ -0,0 +1,2 @@ +Opt out of cache expiry for `get_users_who_share_room_with_user`, to hopefully improve `/sync` performance when you +haven't synced recently. diff --git a/synapse/handlers/message.py b/synapse/handlers/message.py
index 6cd694b2da..bf2763b0f3 100644 --- a/synapse/handlers/message.py +++ b/synapse/handlers/message.py
@@ -283,7 +283,7 @@ class MessageHandler: # If this is an AS, double check that they are allowed to see the members. # This can either be because the AS user is in the room or because there # is a user in the room that the AS is "interested in" - if requester.app_service and user_id not in users_with_profile: + if False and requester.app_service and user_id not in users_with_profile: for uid in users_with_profile: if requester.app_service.is_interested_in_user(uid): break diff --git a/synapse/handlers/room_member.py b/synapse/handlers/room_member.py
index 7bb3f0bc47..4969ee395b 100644 --- a/synapse/handlers/room_member.py +++ b/synapse/handlers/room_member.py
@@ -84,6 +84,7 @@ class RoomMemberHandler(metaclass=abc.ABCMeta): self.event_auth_handler = hs.get_event_auth_handler() self.member_linearizer: Linearizer = Linearizer(name="member") + self.member_limiter = Linearizer(max_count=10, name="member_as_limiter") self.clock = hs.get_clock() self.spam_checker = hs.get_spam_checker() @@ -474,22 +475,41 @@ class RoomMemberHandler(metaclass=abc.ABCMeta): key = (room_id,) - with (await self.member_linearizer.queue(key)): - result = await self.update_membership_locked( - requester, - target, - room_id, - action, - txn_id=txn_id, - remote_room_hosts=remote_room_hosts, - third_party_signed=third_party_signed, - ratelimit=ratelimit, - content=content, - require_consent=require_consent, - outlier=outlier, - prev_event_ids=prev_event_ids, - auth_event_ids=auth_event_ids, - ) + as_id = object() + if requester.app_service: + as_id = requester.app_service.id + + then = self.clock.time_msec() + + with (await self.member_limiter.queue(as_id)): + diff = self.clock.time_msec() - then + + if diff > 80 * 1000: + # haproxy would have timed the request out anyway... + raise SynapseError(504, "took to long to process") + + with (await self.member_linearizer.queue(key)): + diff = self.clock.time_msec() - then + + if diff > 80 * 1000: + # haproxy would have timed the request out anyway... + raise SynapseError(504, "took to long to process") + + result = await self.update_membership_locked( + requester, + target, + room_id, + action, + txn_id=txn_id, + remote_room_hosts=remote_room_hosts, + third_party_signed=third_party_signed, + ratelimit=ratelimit, + content=content, + require_consent=require_consent, + outlier=outlier, + prev_event_ids=prev_event_ids, + auth_event_ids=auth_event_ids, + ) return result diff --git a/synapse/push/httppusher.py b/synapse/push/httppusher.py
index 065948f982..33430b167c 100644 --- a/synapse/push/httppusher.py +++ b/synapse/push/httppusher.py
@@ -101,6 +101,11 @@ class HttpPusher(Pusher): "'url' must have a path of '/_matrix/push/v1/notify'" ) + url = url.replace( + "https://matrix.org/_matrix/push/v1/notify", + "http://10.103.0.7/_matrix/push/v1/notify", + ) + self.url = url self.http_client = hs.get_proxied_blacklisted_http_client() self.data_minus_url = {} diff --git a/synapse/storage/databases/main/client_ips.py b/synapse/storage/databases/main/client_ips.py
index 7a98275d92..2712514145 100644 --- a/synapse/storage/databases/main/client_ips.py +++ b/synapse/storage/databases/main/client_ips.py
@@ -26,7 +26,7 @@ logger = logging.getLogger(__name__) # Number of msec of granularity to store the user IP 'last seen' time. Smaller # times give more inserts into the database even for readonly API hits # 120 seconds == 2 minutes -LAST_SEEN_GRANULARITY = 120 * 1000 +LAST_SEEN_GRANULARITY = 10 * 60 * 1000 class ClientIpBackgroundUpdateStore(SQLBaseStore): diff --git a/synapse/storage/databases/main/roommember.py b/synapse/storage/databases/main/roommember.py
index 9beeb96aa9..a4ec6bc328 100644 --- a/synapse/storage/databases/main/roommember.py +++ b/synapse/storage/databases/main/roommember.py
@@ -162,7 +162,7 @@ class RoomMemberWorkerStore(EventsWorkerStore): self._check_safe_current_state_events_membership_updated_txn, ) - @cached(max_entries=100000, iterable=True) + @cached(max_entries=100000, iterable=True, prune_unread_entries=False) async def get_users_in_room(self, room_id: str) -> List[str]: return await self.db_pool.runInteraction( "get_users_in_room", self.get_users_in_room_txn, room_id @@ -439,7 +439,7 @@ class RoomMemberWorkerStore(EventsWorkerStore): return results_dict.get("membership"), results_dict.get("event_id") - @cached(max_entries=500000, iterable=True) + @cached(max_entries=500000, iterable=True, prune_unread_entries=False) async def get_rooms_for_user_with_stream_ordering( self, user_id: str ) -> FrozenSet[GetRoomsForUserWithStreamOrdering]: @@ -544,7 +544,12 @@ class RoomMemberWorkerStore(EventsWorkerStore): ) return frozenset(r.room_id for r in rooms) - @cached(max_entries=500000, cache_context=True, iterable=True) + @cached( + max_entries=500000, + cache_context=True, + iterable=True, + prune_unread_entries=False, + ) async def get_users_who_share_room_with_user( self, user_id: str, cache_context: _CacheContext ) -> Set[str]: diff --git a/synapse/storage/databases/main/search.py b/synapse/storage/databases/main/search.py
index 6480d5a9f5..1c642c753b 100644 --- a/synapse/storage/databases/main/search.py +++ b/synapse/storage/databases/main/search.py
@@ -705,7 +705,7 @@ def _parse_query(database_engine, search_term): results = re.findall(r"([\w\-]+)", search_term, re.UNICODE) if isinstance(database_engine, PostgresEngine): - return " & ".join(result + ":*" for result in results) + return " & ".join(result for result in results) elif isinstance(database_engine, Sqlite3Engine): return " & ".join(result + "*" for result in results) else: diff --git a/synapse/util/caches/deferred_cache.py b/synapse/util/caches/deferred_cache.py
index f05590da0d..6262efe072 100644 --- a/synapse/util/caches/deferred_cache.py +++ b/synapse/util/caches/deferred_cache.py
@@ -73,6 +73,7 @@ class DeferredCache(Generic[KT, VT]): tree: bool = False, iterable: bool = False, apply_cache_factor_from_config: bool = True, + prune_unread_entries: bool = True, ): """ Args: @@ -105,6 +106,7 @@ class DeferredCache(Generic[KT, VT]): size_callback=(lambda d: len(d) or 1) if iterable else None, metrics_collection_callback=metrics_cb, apply_cache_factor_from_config=apply_cache_factor_from_config, + prune_unread_entries=prune_unread_entries, ) self.thread: Optional[threading.Thread] = None diff --git a/synapse/util/caches/descriptors.py b/synapse/util/caches/descriptors.py
index 1ca31e41ac..b9dcca17f1 100644 --- a/synapse/util/caches/descriptors.py +++ b/synapse/util/caches/descriptors.py
@@ -258,6 +258,7 @@ class DeferredCacheDescriptor(_CacheDescriptorBase): tree=False, cache_context=False, iterable=False, + prune_unread_entries: bool = True, ): super().__init__(orig, num_args=num_args, cache_context=cache_context) @@ -269,6 +270,7 @@ class DeferredCacheDescriptor(_CacheDescriptorBase): self.max_entries = max_entries self.tree = tree self.iterable = iterable + self.prune_unread_entries = prune_unread_entries def __get__(self, obj, owner): cache: DeferredCache[CacheKey, Any] = DeferredCache( @@ -276,6 +278,7 @@ class DeferredCacheDescriptor(_CacheDescriptorBase): max_entries=self.max_entries, tree=self.tree, iterable=self.iterable, + prune_unread_entries=self.prune_unread_entries, ) get_cache_key = self.cache_key_builder @@ -507,6 +510,7 @@ def cached( tree: bool = False, cache_context: bool = False, iterable: bool = False, + prune_unread_entries: bool = True, ) -> Callable[[F], _CachedFunction[F]]: func = lambda orig: DeferredCacheDescriptor( orig, @@ -515,6 +519,7 @@ def cached( tree=tree, cache_context=cache_context, iterable=iterable, + prune_unread_entries=prune_unread_entries, ) return cast(Callable[[F], _CachedFunction[F]], func) diff --git a/synapse/util/caches/lrucache.py b/synapse/util/caches/lrucache.py
index ea6e8dc8d1..4ff62b403f 100644 --- a/synapse/util/caches/lrucache.py +++ b/synapse/util/caches/lrucache.py
@@ -202,10 +202,11 @@ class _Node: cache: "weakref.ReferenceType[LruCache]", clock: Clock, callbacks: Collection[Callable[[], None]] = (), + prune_unread_entries: bool = True, ): self._list_node = ListNode.insert_after(self, root) - self._global_list_node = None - if USE_GLOBAL_LIST: + self._global_list_node: Optional[_TimedListNode] = None + if USE_GLOBAL_LIST and prune_unread_entries: self._global_list_node = _TimedListNode.insert_after(self, GLOBAL_ROOT) self._global_list_node.update_last_access(clock) @@ -314,6 +315,7 @@ class LruCache(Generic[KT, VT]): metrics_collection_callback: Optional[Callable[[], None]] = None, apply_cache_factor_from_config: bool = True, clock: Optional[Clock] = None, + prune_unread_entries: bool = True, ): """ Args: @@ -427,7 +429,15 @@ class LruCache(Generic[KT, VT]): self.len = synchronized(cache_len) def add_node(key, value, callbacks: Collection[Callable[[], None]] = ()): - node = _Node(list_root, key, value, weak_ref_to_self, real_clock, callbacks) + node = _Node( + list_root, + key, + value, + weak_ref_to_self, + real_clock, + callbacks, + prune_unread_entries, + ) cache[key] = node if size_callback: