diff options
author | Erik Johnston <erikj@element.io> | 2024-06-06 17:46:52 +0100 |
---|---|---|
committer | GitHub <noreply@github.com> | 2024-06-06 17:46:52 +0100 |
commit | a963f579de02ffd4347cd3f013bd6fce816ba89c (patch) | |
tree | 17f3e283f3cb78f8d96b16fb964fbe606f489d57 /synapse/handlers/e2e_keys.py | |
parent | Always return OTK counts (#17275) (diff) | |
download | synapse-a963f579de02ffd4347cd3f013bd6fce816ba89c.tar.xz |
Don't try and resync devices for down hosts (#17273)
It's just a waste of time if we won't even query the remote host as its marked as down.
Diffstat (limited to '')
-rw-r--r-- | synapse/handlers/e2e_keys.py | 24 |
1 files changed, 18 insertions, 6 deletions
diff --git a/synapse/handlers/e2e_keys.py b/synapse/handlers/e2e_keys.py index 560530a7b3..7d4feecaf1 100644 --- a/synapse/handlers/e2e_keys.py +++ b/synapse/handlers/e2e_keys.py @@ -45,7 +45,10 @@ from synapse.types import ( from synapse.util import json_decoder from synapse.util.async_helpers import Linearizer, concurrently_execute from synapse.util.cancellation import cancellable -from synapse.util.retryutils import NotRetryingDestination +from synapse.util.retryutils import ( + NotRetryingDestination, + filter_destinations_by_retry_limiter, +) if TYPE_CHECKING: from synapse.server import HomeServer @@ -268,10 +271,8 @@ class E2eKeysHandler: "%d destinations to query devices for", len(remote_queries_not_in_cache) ) - async def _query( - destination_queries: Tuple[str, Dict[str, Iterable[str]]] - ) -> None: - destination, queries = destination_queries + async def _query(destination: str) -> None: + queries = remote_queries_not_in_cache[destination] return await self._query_devices_for_destination( results, cross_signing_keys, @@ -281,9 +282,20 @@ class E2eKeysHandler: timeout, ) + # Only try and fetch keys for destinations that are not marked as + # down. + filtered_destinations = await filter_destinations_by_retry_limiter( + remote_queries_not_in_cache.keys(), + self.clock, + self.store, + # Let's give an arbitrary grace period for those hosts that are + # only recently down + retry_due_within_ms=60 * 1000, + ) + await concurrently_execute( _query, - remote_queries_not_in_cache.items(), + filtered_destinations, 10, delay_cancellation=True, ) |