summary refs log tree commit diff
path: root/synapse/util/caches/lrucache.py
diff options
context:
space:
mode:
authorDavid Baker <dave@matrix.org>2016-01-21 19:16:25 +0000
committerDavid Baker <dave@matrix.org>2016-01-21 19:16:25 +0000
commitf1f81221205cf2ec101f96234050569d6419fd6b (patch)
tree8cd4e21847d0c63ae70c869ac08575dcc441931e /synapse/util/caches/lrucache.py
parentMerge pull request #517 from matrix-org/erikj/push_only_room (diff)
downloadsynapse-f1f81221205cf2ec101f96234050569d6419fd6b.tar.xz
Change LRUCache to be tree-based so we can delete subtrees.
Diffstat (limited to 'synapse/util/caches/lrucache.py')
-rw-r--r--synapse/util/caches/lrucache.py43
1 files changed, 35 insertions, 8 deletions
diff --git a/synapse/util/caches/lrucache.py b/synapse/util/caches/lrucache.py
index 0122b0bb3f..0feceb298a 100644
--- a/synapse/util/caches/lrucache.py
+++ b/synapse/util/caches/lrucache.py
@@ -17,11 +17,23 @@
 from functools import wraps
 import threading
 
+from synapse.util.caches.treecache import TreeCache
+
+
+def enumerate_leaves(node, depth):
+    if depth == 0:
+        yield node
+    else:
+        for n in node.values():
+            for m in enumerate_leaves(n, depth - 1):
+                yield m
+
 
 class LruCache(object):
     """Least-recently-used cache."""
-    def __init__(self, max_size):
-        cache = {}
+    def __init__(self, max_size, keylen):
+        cache = TreeCache()
+        self.size = 0
         list_root = []
         list_root[:] = [list_root, list_root, None, None]
 
@@ -44,6 +56,7 @@ class LruCache(object):
             prev_node[NEXT] = node
             next_node[PREV] = node
             cache[key] = node
+            self.size += 1
 
         def move_node_to_front(node):
             prev_node = node[PREV]
@@ -62,7 +75,7 @@ class LruCache(object):
             next_node = node[NEXT]
             prev_node[NEXT] = next_node
             next_node[PREV] = prev_node
-            cache.pop(node[KEY], None)
+            self.size -= 1
 
         @synchronized
         def cache_get(key, default=None):
@@ -81,8 +94,10 @@ class LruCache(object):
                 node[VALUE] = value
             else:
                 add_node(key, value)
-                if len(cache) > max_size:
-                    delete_node(list_root[PREV])
+                if self.size > max_size:
+                    todelete = list_root[PREV]
+                    delete_node(todelete)
+                    cache.pop(todelete[KEY], None)
 
         @synchronized
         def cache_set_default(key, value):
@@ -91,8 +106,10 @@ class LruCache(object):
                 return node[VALUE]
             else:
                 add_node(key, value)
-                if len(cache) > max_size:
-                    delete_node(list_root[PREV])
+                if self.size > max_size:
+                    todelete = list_root[PREV]
+                    delete_node(todelete)
+                    cache.pop(todelete[KEY], None)
                 return value
 
         @synchronized
@@ -100,11 +117,20 @@ class LruCache(object):
             node = cache.get(key, None)
             if node:
                 delete_node(node)
+                cache.pop(node[KEY], None)
                 return node[VALUE]
             else:
                 return default
 
         @synchronized
+        def cache_del_multi(key):
+            popped = cache.pop(key)
+            if popped is None:
+                return
+            for leaf in enumerate_leaves(popped, keylen - len(key)):
+                delete_node(leaf)
+
+        @synchronized
         def cache_clear():
             list_root[NEXT] = list_root
             list_root[PREV] = list_root
@@ -112,7 +138,7 @@ class LruCache(object):
 
         @synchronized
         def cache_len():
-            return len(cache)
+            return self.size
 
         @synchronized
         def cache_contains(key):
@@ -123,6 +149,7 @@ class LruCache(object):
         self.set = cache_set
         self.setdefault = cache_set_default
         self.pop = cache_pop
+        self.del_multi = cache_del_multi
         self.len = cache_len
         self.contains = cache_contains
         self.clear = cache_clear