summary refs log tree commit diff
path: root/synapse
diff options
context:
space:
mode:
authorPaul "LeoNerd" Evans <paul@matrix.org>2015-03-25 19:05:34 +0000
committerPaul "LeoNerd" Evans <paul@matrix.org>2015-03-25 19:05:34 +0000
commit9ba6487b3fe985c4ec84b02d9804aea7e2df6c40 (patch)
tree031159b1398664dc27ed6bc7a0d6917924258312 /synapse
parentImplement the 'key in dict' test for LruCache() (diff)
downloadsynapse-9ba6487b3fe985c4ec84b02d9804aea7e2df6c40.tar.xz
Allow a choice of LRU behaviour for Cache() by using LruCache() or OrderedDict()
Diffstat (limited to 'synapse')
-rw-r--r--synapse/storage/_base.py20
1 files changed, 12 insertions, 8 deletions
diff --git a/synapse/storage/_base.py b/synapse/storage/_base.py
index 27ea65a0f6..6fa63f052e 100644
--- a/synapse/storage/_base.py
+++ b/synapse/storage/_base.py
@@ -55,10 +55,14 @@ cache_counter = metrics.register_cache(
 
 class Cache(object):
 
-    def __init__(self, name, max_entries=1000, keylen=1):
-        self.cache = OrderedDict()
+    def __init__(self, name, max_entries=1000, keylen=1, lru=False):
+        if lru:
+            self.cache = LruCache(max_size=max_entries)
+            self.max_entries = None
+        else:
+            self.cache = OrderedDict()
+            self.max_entries = max_entries
 
-        self.max_entries = max_entries
         self.name = name
         self.keylen = keylen
 
@@ -82,8 +86,9 @@ class Cache(object):
         if len(keyargs) != self.keylen:
             raise ValueError("Expected a key to have %d items", self.keylen)
 
-        while len(self.cache) > self.max_entries:
-            self.cache.popitem(last=False)
+        if self.max_entries is not None:
+            while len(self.cache) >= self.max_entries:
+                self.cache.popitem(last=False)
 
         self.cache[keyargs] = value
 
@@ -94,9 +99,7 @@ class Cache(object):
         self.cache.pop(keyargs, None)
 
 
-# TODO(paul):
-#  * consider other eviction strategies - LRU?
-def cached(max_entries=1000, num_args=1):
+def cached(max_entries=1000, num_args=1, lru=False):
     """ A method decorator that applies a memoizing cache around the function.
 
     The function is presumed to take zero or more arguments, which are used in
@@ -115,6 +118,7 @@ def cached(max_entries=1000, num_args=1):
             name=orig.__name__,
             max_entries=max_entries,
             keylen=num_args,
+            lru=lru,
         )
 
         @functools.wraps(orig)