diff options
author | Patrick Cloke <patrickc@matrix.org> | 2021-03-02 09:43:34 -0500 |
---|---|---|
committer | Patrick Cloke <patrickc@matrix.org> | 2021-03-02 09:43:34 -0500 |
commit | aee10768d8a9bf6c3df80859d2c135607aa1bf80 (patch) | |
tree | 3a57573d9add15ea26f5ba16f43072aaebca1f0d /synapse/util | |
parent | Re-run rejected metadata background update. (#9503) (diff) | |
download | synapse-aee10768d8a9bf6c3df80859d2c135607aa1bf80.tar.xz |
Revert "Fix #8518 (sync requests being cached wrongly on timeout) (#9358)"
This reverts commit f5c93fc9931e4029bbd8000f398b6f39d67a8c46. This is being backed out due to a regression (#9507) and additional review feedback being provided.
Diffstat (limited to 'synapse/util')
-rw-r--r-- | synapse/util/caches/response_cache.py | 34 |
1 files changed, 2 insertions, 32 deletions
diff --git a/synapse/util/caches/response_cache.py b/synapse/util/caches/response_cache.py index 53f85195a7..32228f42ee 100644 --- a/synapse/util/caches/response_cache.py +++ b/synapse/util/caches/response_cache.py @@ -13,7 +13,7 @@ # See the License for the specific language governing permissions and # limitations under the License. import logging -from typing import TYPE_CHECKING, Any, Callable, Dict, Generic, Optional, Set, TypeVar +from typing import TYPE_CHECKING, Any, Callable, Dict, Generic, Optional, TypeVar from twisted.internet import defer @@ -40,7 +40,6 @@ class ResponseCache(Generic[T]): def __init__(self, hs: "HomeServer", name: str, timeout_ms: float = 0): # Requests that haven't finished yet. self.pending_result_cache = {} # type: Dict[T, ObservableDeferred] - self.pending_conditionals = {} # type: Dict[T, Set[Callable[[Any], bool]]] self.clock = hs.get_clock() self.timeout_sec = timeout_ms / 1000.0 @@ -102,11 +101,7 @@ class ResponseCache(Generic[T]): self.pending_result_cache[key] = result def remove(r): - should_cache = all( - func(r) for func in self.pending_conditionals.pop(key, []) - ) - - if self.timeout_sec and should_cache: + if self.timeout_sec: self.clock.call_later( self.timeout_sec, self.pending_result_cache.pop, key, None ) @@ -117,31 +112,6 @@ class ResponseCache(Generic[T]): result.addBoth(remove) return result.observe() - def add_conditional(self, key: T, conditional: Callable[[Any], bool]): - self.pending_conditionals.setdefault(key, set()).add(conditional) - - def wrap_conditional( - self, - key: T, - should_cache: Callable[[Any], bool], - callback: "Callable[..., Any]", - *args: Any, - **kwargs: Any - ) -> defer.Deferred: - """The same as wrap(), but adds a conditional to the final execution. - - When the final execution completes, *all* conditionals need to return True for it to properly cache, - else it'll not be cached in a timed fashion. - """ - - # See if there's already a result on this key that hasn't yet completed. Due to the single-threaded nature of - # python, adding a key immediately in the same execution thread will not cause a race condition. - result = self.get(key) - if not result or isinstance(result, defer.Deferred) and not result.called: - self.add_conditional(key, should_cache) - - return self.wrap(key, callback, *args, **kwargs) - def wrap( self, key: T, callback: "Callable[..., Any]", *args: Any, **kwargs: Any ) -> defer.Deferred: |