diff --git a/synapse/util/caches/response_cache.py b/synapse/util/caches/response_cache.py
index 7f68289723..066fa423fd 100644
--- a/synapse/util/caches/response_cache.py
+++ b/synapse/util/caches/response_cache.py
@@ -41,6 +41,18 @@ class ResponseCache(object):
return len(self.pending_result_cache)
def get(self, key):
+ """Look up the given key.
+
+ Returns a deferred which doesn't follow the synapse logcontext rules,
+ so you'll probably want to make_deferred_yieldable it.
+
+ Args:
+ key (str):
+
+ Returns:
+ twisted.internet.defer.Deferred|None: None if there is no entry
+ for this key; otherwise a deferred result.
+ """
result = self.pending_result_cache.get(key)
if result is not None:
self._metrics.inc_hits()
@@ -50,6 +62,26 @@ class ResponseCache(object):
return None
def set(self, key, deferred):
+ """Set the entry for the given key to the given deferred.
+
+ *deferred* should run its callbacks in the sentinel logcontext (ie,
+ you should wrap normal synapse deferreds with
+ logcontext.run_in_background).
+
+ Returns a new Deferred which also doesn't follow the synapse logcontext
+ rules, so you will want to make_deferred_yieldable it
+
+ (TODO: before using this more widely, it might make sense to refactor
+ it and get() so that they do the necessary wrapping rather than having
+ to do it everywhere ResponseCache is used.)
+
+ Args:
+ key (str):
+ deferred (twisted.internet.defer.Deferred):
+
+ Returns:
+ twisted.internet.defer.Deferred
+ """
result = ObservableDeferred(deferred, consumeErrors=True)
self.pending_result_cache[key] = result
diff --git a/synapse/util/caches/stream_change_cache.py b/synapse/util/caches/stream_change_cache.py
index 941d873ab8..2ff46090a6 100644
--- a/synapse/util/caches/stream_change_cache.py
+++ b/synapse/util/caches/stream_change_cache.py
@@ -16,7 +16,7 @@
from synapse.util.caches import register_cache, CACHE_SIZE_FACTOR
-from blist import sorteddict
+from sortedcontainers import SortedDict
import logging
@@ -35,7 +35,7 @@ class StreamChangeCache(object):
def __init__(self, name, current_stream_pos, max_size=10000, prefilled_cache={}):
self._max_size = int(max_size * CACHE_SIZE_FACTOR)
self._entity_to_key = {}
- self._cache = sorteddict()
+ self._cache = SortedDict()
self._earliest_known_stream_pos = current_stream_pos
self.name = name
self.metrics = register_cache(self.name, self._cache)
|