diff --git a/synapse/storage/_base.py b/synapse/storage/_base.py
index 78ba5f25ea..4b1ec687c9 100644
--- a/synapse/storage/_base.py
+++ b/synapse/storage/_base.py
@@ -48,24 +48,30 @@ def cached(max_entries=1000):
The wrapped function has an additional member, a callable called
"invalidate". This can be used to remove individual entries from the cache.
+
+ The wrapped function has another additional callable, called "prefill",
+ which can be used to insert values into the cache specifically, without
+ calling the calculation function.
"""
def wrap(orig):
cache = {}
- @defer.inlineCallbacks
- def wrapped(self, key):
- if key in cache:
- defer.returnValue(cache[key])
-
- ret = yield orig(self, key)
-
+ def prefill(key, value):
while len(cache) > max_entries:
# TODO(paul): This feels too biased. However, a random index
# would be a bit inefficient, walking the list of keys just
# to ignore most of them?
del cache[cache.keys()[0]]
- cache[key] = ret;
+ cache[key] = value
+
+ @defer.inlineCallbacks
+ def wrapped(self, key):
+ if key in cache:
+ defer.returnValue(cache[key])
+
+ ret = yield orig(self, key)
+ prefill(key, ret)
defer.returnValue(ret)
def invalidate(key):
@@ -73,6 +79,7 @@ def cached(max_entries=1000):
del cache[key]
wrapped.invalidate = invalidate
+ wrapped.prefill = prefill
return wrapped
return wrap
diff --git a/tests/storage/test__base.py b/tests/storage/test__base.py
index 057f798640..fb306cb784 100644
--- a/tests/storage/test__base.py
+++ b/tests/storage/test__base.py
@@ -87,3 +87,17 @@ class CacheDecoratorTestCase(unittest.TestCase):
self.assertTrue(callcount[0] >= 14,
msg="Expected callcount >= 14, got %d" % (callcount[0]))
+
+ @defer.inlineCallbacks
+ def test_prefill(self):
+ callcount = [0]
+
+ @cached()
+ def func(self, key):
+ callcount[0] += 1
+ return key
+
+ func.prefill("foo", 123)
+
+ self.assertEquals((yield func(self, "foo")), 123)
+ self.assertEquals(callcount[0], 0)
|