From f1f81221205cf2ec101f96234050569d6419fd6b Mon Sep 17 00:00:00 2001 From: David Baker Date: Thu, 21 Jan 2016 19:16:25 +0000 Subject: Change LRUCache to be tree-based so we can delete subtrees. --- synapse/util/caches/lrucache.py | 43 +++++++++++++++++++++++++++++++++-------- 1 file changed, 35 insertions(+), 8 deletions(-) (limited to 'synapse/util/caches/lrucache.py') diff --git a/synapse/util/caches/lrucache.py b/synapse/util/caches/lrucache.py index 0122b0bb3f..0feceb298a 100644 --- a/synapse/util/caches/lrucache.py +++ b/synapse/util/caches/lrucache.py @@ -17,11 +17,23 @@ from functools import wraps import threading +from synapse.util.caches.treecache import TreeCache + + +def enumerate_leaves(node, depth): + if depth == 0: + yield node + else: + for n in node.values(): + for m in enumerate_leaves(n, depth - 1): + yield m + class LruCache(object): """Least-recently-used cache.""" - def __init__(self, max_size): - cache = {} + def __init__(self, max_size, keylen): + cache = TreeCache() + self.size = 0 list_root = [] list_root[:] = [list_root, list_root, None, None] @@ -44,6 +56,7 @@ class LruCache(object): prev_node[NEXT] = node next_node[PREV] = node cache[key] = node + self.size += 1 def move_node_to_front(node): prev_node = node[PREV] @@ -62,7 +75,7 @@ class LruCache(object): next_node = node[NEXT] prev_node[NEXT] = next_node next_node[PREV] = prev_node - cache.pop(node[KEY], None) + self.size -= 1 @synchronized def cache_get(key, default=None): @@ -81,8 +94,10 @@ class LruCache(object): node[VALUE] = value else: add_node(key, value) - if len(cache) > max_size: - delete_node(list_root[PREV]) + if self.size > max_size: + todelete = list_root[PREV] + delete_node(todelete) + cache.pop(todelete[KEY], None) @synchronized def cache_set_default(key, value): @@ -91,8 +106,10 @@ class LruCache(object): return node[VALUE] else: add_node(key, value) - if len(cache) > max_size: - delete_node(list_root[PREV]) + if self.size > max_size: + todelete = list_root[PREV] + delete_node(todelete) + cache.pop(todelete[KEY], None) return value @synchronized @@ -100,10 +117,19 @@ class LruCache(object): node = cache.get(key, None) if node: delete_node(node) + cache.pop(node[KEY], None) return node[VALUE] else: return default + @synchronized + def cache_del_multi(key): + popped = cache.pop(key) + if popped is None: + return + for leaf in enumerate_leaves(popped, keylen - len(key)): + delete_node(leaf) + @synchronized def cache_clear(): list_root[NEXT] = list_root @@ -112,7 +138,7 @@ class LruCache(object): @synchronized def cache_len(): - return len(cache) + return self.size @synchronized def cache_contains(key): @@ -123,6 +149,7 @@ class LruCache(object): self.set = cache_set self.setdefault = cache_set_default self.pop = cache_pop + self.del_multi = cache_del_multi self.len = cache_len self.contains = cache_contains self.clear = cache_clear -- cgit 1.5.1 From 10f76dc5da47c49a4191d8113b3c0615224eb9fd Mon Sep 17 00:00:00 2001 From: David Baker Date: Fri, 22 Jan 2016 12:10:33 +0000 Subject: Make LRU cache not default to treecache & add options to use it --- synapse/storage/event_push_actions.py | 2 +- synapse/util/caches/descriptors.py | 20 ++++++++++++++------ synapse/util/caches/lrucache.py | 9 +++++---- tests/util/test_lrucache.py | 3 ++- 4 files changed, 22 insertions(+), 12 deletions(-) (limited to 'synapse/util/caches/lrucache.py') diff --git a/synapse/storage/event_push_actions.py b/synapse/storage/event_push_actions.py index 6a212c630b..a05c4f84cf 100644 --- a/synapse/storage/event_push_actions.py +++ b/synapse/storage/event_push_actions.py @@ -53,7 +53,7 @@ class EventPushActionsStore(SQLBaseStore): f, ) - @cachedInlineCallbacks(num_args=3, lru=True) + @cachedInlineCallbacks(num_args=3, lru=True, tree=True) def get_unread_event_push_actions_by_room_for_user( self, room_id, user_id, last_read_event_id ): diff --git a/synapse/util/caches/descriptors.py b/synapse/util/caches/descriptors.py index f4a2b4e590..88e56e3302 100644 --- a/synapse/util/caches/descriptors.py +++ b/synapse/util/caches/descriptors.py @@ -17,6 +17,7 @@ import logging from synapse.util.async import ObservableDeferred from synapse.util import unwrapFirstError from synapse.util.caches.lrucache import LruCache +from synapse.util.caches.treecache import TreeCache from . import caches_by_name, DEBUG_CACHES, cache_counter @@ -36,9 +37,12 @@ _CacheSentinel = object() class Cache(object): - def __init__(self, name, max_entries=1000, keylen=1, lru=True): + def __init__(self, name, max_entries=1000, keylen=1, lru=True, tree=False): if lru: - self.cache = LruCache(max_size=max_entries, keylen=keylen) + cache_type = TreeCache if tree else dict + self.cache = LruCache( + max_size=max_entries, keylen=keylen, cache_type=cache_type + ) self.max_entries = None else: self.cache = OrderedDict() @@ -131,7 +135,7 @@ class CacheDescriptor(object): which can be used to insert values into the cache specifically, without calling the calculation function. """ - def __init__(self, orig, max_entries=1000, num_args=1, lru=True, + def __init__(self, orig, max_entries=1000, num_args=1, lru=True, tree=False, inlineCallbacks=False): self.orig = orig @@ -143,6 +147,7 @@ class CacheDescriptor(object): self.max_entries = max_entries self.num_args = num_args self.lru = lru + self.tree = tree self.arg_names = inspect.getargspec(orig).args[1:num_args+1] @@ -158,6 +163,7 @@ class CacheDescriptor(object): max_entries=self.max_entries, keylen=self.num_args, lru=self.lru, + tree=self.tree, ) def __get__(self, obj, objtype=None): @@ -331,21 +337,23 @@ class CacheListDescriptor(object): return wrapped -def cached(max_entries=1000, num_args=1, lru=True): +def cached(max_entries=1000, num_args=1, lru=True, tree=False): return lambda orig: CacheDescriptor( orig, max_entries=max_entries, num_args=num_args, - lru=lru + lru=lru, + tree=tree, ) -def cachedInlineCallbacks(max_entries=1000, num_args=1, lru=False): +def cachedInlineCallbacks(max_entries=1000, num_args=1, lru=False, tree=False): return lambda orig: CacheDescriptor( orig, max_entries=max_entries, num_args=num_args, lru=lru, + tree=tree, inlineCallbacks=True, ) diff --git a/synapse/util/caches/lrucache.py b/synapse/util/caches/lrucache.py index 0feceb298a..23e86ec110 100644 --- a/synapse/util/caches/lrucache.py +++ b/synapse/util/caches/lrucache.py @@ -17,8 +17,6 @@ from functools import wraps import threading -from synapse.util.caches.treecache import TreeCache - def enumerate_leaves(node, depth): if depth == 0: @@ -31,8 +29,8 @@ def enumerate_leaves(node, depth): class LruCache(object): """Least-recently-used cache.""" - def __init__(self, max_size, keylen): - cache = TreeCache() + def __init__(self, max_size, keylen, cache_type=dict): + cache = cache_type() self.size = 0 list_root = [] list_root[:] = [list_root, list_root, None, None] @@ -124,6 +122,9 @@ class LruCache(object): @synchronized def cache_del_multi(key): + """ + This will only work if constructed with cache_type=TreeCache + """ popped = cache.pop(key) if popped is None: return diff --git a/tests/util/test_lrucache.py b/tests/util/test_lrucache.py index fca2e98983..bcad1d4258 100644 --- a/tests/util/test_lrucache.py +++ b/tests/util/test_lrucache.py @@ -17,6 +17,7 @@ from .. import unittest from synapse.util.caches.lrucache import LruCache +from synapse.util.caches.treecache import TreeCache class LruCacheTestCase(unittest.TestCase): @@ -54,7 +55,7 @@ class LruCacheTestCase(unittest.TestCase): self.assertEquals(cache.pop(("key",)), None) def test_del_multi(self): - cache = LruCache(4, 2) + cache = LruCache(4, 2, cache_type=TreeCache) cache[("animal", "cat")] = "mew" cache[("animal", "dog")] = "woof" cache[("vehicles", "car")] = "vroom" -- cgit 1.5.1 From d552861346d6f2f3d50fa0aff3e239d17cf9b7c0 Mon Sep 17 00:00:00 2001 From: David Baker Date: Fri, 22 Jan 2016 12:18:14 +0000 Subject: Revert all the bits changing keys of eeverything that used LRUCaches to tuples --- synapse/push/push_rule_evaluator.py | 6 ++--- synapse/util/caches/dictionary_cache.py | 10 ++++---- synapse/util/caches/lrucache.py | 2 +- tests/storage/test__base.py | 26 +++++++++---------- tests/util/test_lrucache.py | 44 ++++++++++++++++----------------- 5 files changed, 44 insertions(+), 44 deletions(-) (limited to 'synapse/util/caches/lrucache.py') diff --git a/synapse/push/push_rule_evaluator.py b/synapse/push/push_rule_evaluator.py index 27b0de4f66..dca018af95 100644 --- a/synapse/push/push_rule_evaluator.py +++ b/synapse/push/push_rule_evaluator.py @@ -309,14 +309,14 @@ def _flatten_dict(d, prefix=[], result={}): return result -regex_cache = LruCache(5000, 1) +regex_cache = LruCache(5000) def _compile_regex(regex_str): - r = regex_cache.get((regex_str,), None) + r = regex_cache.get(regex_str, None) if r: return r r = re.compile(regex_str, flags=re.IGNORECASE) - regex_cache[(regex_str,)] = r + regex_cache[regex_str] = r return r diff --git a/synapse/util/caches/dictionary_cache.py b/synapse/util/caches/dictionary_cache.py index b7964467eb..f92d80542b 100644 --- a/synapse/util/caches/dictionary_cache.py +++ b/synapse/util/caches/dictionary_cache.py @@ -32,7 +32,7 @@ class DictionaryCache(object): """ def __init__(self, name, max_entries=1000): - self.cache = LruCache(max_size=max_entries, keylen=1) + self.cache = LruCache(max_size=max_entries) self.name = name self.sequence = 0 @@ -56,7 +56,7 @@ class DictionaryCache(object): ) def get(self, key, dict_keys=None): - entry = self.cache.get((key,), self.sentinel) + entry = self.cache.get(key, self.sentinel) if entry is not self.sentinel: cache_counter.inc_hits(self.name) @@ -78,7 +78,7 @@ class DictionaryCache(object): # Increment the sequence number so that any SELECT statements that # raced with the INSERT don't update the cache (SYN-369) self.sequence += 1 - self.cache.pop((key,), None) + self.cache.pop(key, None) def invalidate_all(self): self.check_thread() @@ -96,8 +96,8 @@ class DictionaryCache(object): self._update_or_insert(key, value) def _update_or_insert(self, key, value): - entry = self.cache.setdefault((key,), DictionaryEntry(False, {})) + entry = self.cache.setdefault(key, DictionaryEntry(False, {})) entry.value.update(value) def _insert(self, key, value): - self.cache[(key,)] = DictionaryEntry(True, value) + self.cache[key] = DictionaryEntry(True, value) diff --git a/synapse/util/caches/lrucache.py b/synapse/util/caches/lrucache.py index 23e86ec110..5f9405c95f 100644 --- a/synapse/util/caches/lrucache.py +++ b/synapse/util/caches/lrucache.py @@ -29,7 +29,7 @@ def enumerate_leaves(node, depth): class LruCache(object): """Least-recently-used cache.""" - def __init__(self, max_size, keylen, cache_type=dict): + def __init__(self, max_size, keylen=1, cache_type=dict): cache = cache_type() self.size = 0 list_root = [] diff --git a/tests/storage/test__base.py b/tests/storage/test__base.py index c4e4c9b4bf..219288621d 100644 --- a/tests/storage/test__base.py +++ b/tests/storage/test__base.py @@ -56,42 +56,42 @@ class CacheTestCase(unittest.TestCase): def test_eviction(self): cache = Cache("test", max_entries=2) - cache.prefill((1,), "one") - cache.prefill((2,), "two") - cache.prefill((3,), "three") # 1 will be evicted + cache.prefill(1, "one") + cache.prefill(2, "two") + cache.prefill(3, "three") # 1 will be evicted failed = False try: - cache.get((1,)) + cache.get(1) except KeyError: failed = True self.assertTrue(failed) - cache.get((2,)) - cache.get((3,)) + cache.get(2) + cache.get(3) def test_eviction_lru(self): cache = Cache("test", max_entries=2, lru=True) - cache.prefill((1,), "one") - cache.prefill((2,), "two") + cache.prefill(1, "one") + cache.prefill(2, "two") # Now access 1 again, thus causing 2 to be least-recently used - cache.get((1,)) + cache.get(1) - cache.prefill((3,), "three") + cache.prefill(3, "three") failed = False try: - cache.get((2,)) + cache.get(2) except KeyError: failed = True self.assertTrue(failed) - cache.get((1,)) - cache.get((3,)) + cache.get(1) + cache.get(3) class CacheDecoratorTestCase(unittest.TestCase): diff --git a/tests/util/test_lrucache.py b/tests/util/test_lrucache.py index bcad1d4258..2cd3d26454 100644 --- a/tests/util/test_lrucache.py +++ b/tests/util/test_lrucache.py @@ -22,37 +22,37 @@ from synapse.util.caches.treecache import TreeCache class LruCacheTestCase(unittest.TestCase): def test_get_set(self): - cache = LruCache(1, 1) - cache[("key",)] = "value" - self.assertEquals(cache.get(("key",)), "value") - self.assertEquals(cache[("key",)], "value") + cache = LruCache(1) + cache["key"] = "value" + self.assertEquals(cache.get("key"), "value") + self.assertEquals(cache["key"], "value") def test_eviction(self): - cache = LruCache(2, 1) - cache[(1,)] = 1 - cache[(2,)] = 2 + cache = LruCache(2) + cache[1] = 1 + cache[2] = 2 - self.assertEquals(cache.get((1,)), 1) - self.assertEquals(cache.get((2,)), 2) + self.assertEquals(cache.get(1), 1) + self.assertEquals(cache.get(2), 2) - cache[(3,)] = 3 + cache[3] = 3 - self.assertEquals(cache.get((1,)), None) - self.assertEquals(cache.get((2,)), 2) - self.assertEquals(cache.get((3,)), 3) + self.assertEquals(cache.get(1), None) + self.assertEquals(cache.get(2), 2) + self.assertEquals(cache.get(3), 3) def test_setdefault(self): - cache = LruCache(1, 1) - self.assertEquals(cache.setdefault(("key",), 1), 1) - self.assertEquals(cache.get(("key",)), 1) - self.assertEquals(cache.setdefault(("key",), 2), 1) - self.assertEquals(cache.get(("key",)), 1) + cache = LruCache(1) + self.assertEquals(cache.setdefault("key", 1), 1) + self.assertEquals(cache.get("key"), 1) + self.assertEquals(cache.setdefault("key", 2), 1) + self.assertEquals(cache.get("key"), 1) def test_pop(self): - cache = LruCache(1, 1) - cache[("key",)] = 1 - self.assertEquals(cache.pop(("key",)), 1) - self.assertEquals(cache.pop(("key",)), None) + cache = LruCache(1) + cache["key"] = 1 + self.assertEquals(cache.pop("key"), 1) + self.assertEquals(cache.pop("key"), None) def test_del_multi(self): cache = LruCache(4, 2, cache_type=TreeCache) -- cgit 1.5.1 From cd80019eec69172f9753e0da25ad22d2f7c434db Mon Sep 17 00:00:00 2001 From: David Baker Date: Fri, 22 Jan 2016 12:21:13 +0000 Subject: docs --- synapse/util/caches/lrucache.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) (limited to 'synapse/util/caches/lrucache.py') diff --git a/synapse/util/caches/lrucache.py b/synapse/util/caches/lrucache.py index 5f9405c95f..cc6c25a712 100644 --- a/synapse/util/caches/lrucache.py +++ b/synapse/util/caches/lrucache.py @@ -28,7 +28,11 @@ def enumerate_leaves(node, depth): class LruCache(object): - """Least-recently-used cache.""" + """ + Least-recently-used cache. + Supports del_multi only if cache_type=TreeCache + If cache_type=TreeCache, all keys must be tuples. + """ def __init__(self, max_size, keylen=1, cache_type=dict): cache = cache_type() self.size = 0 -- cgit 1.5.1 From 7cd418d38e472563d8df6546b3227dcfafe3e145 Mon Sep 17 00:00:00 2001 From: David Baker Date: Fri, 22 Jan 2016 13:40:37 +0000 Subject: Don't add the member functiopn if we're not using treecache --- synapse/util/caches/lrucache.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) (limited to 'synapse/util/caches/lrucache.py') diff --git a/synapse/util/caches/lrucache.py b/synapse/util/caches/lrucache.py index cc6c25a712..e6a66dc041 100644 --- a/synapse/util/caches/lrucache.py +++ b/synapse/util/caches/lrucache.py @@ -17,6 +17,8 @@ from functools import wraps import threading +from synapse.util.caches.treecache import TreeCache + def enumerate_leaves(node, depth): if depth == 0: @@ -154,7 +156,8 @@ class LruCache(object): self.set = cache_set self.setdefault = cache_set_default self.pop = cache_pop - self.del_multi = cache_del_multi + if cache_type is TreeCache: + self.del_multi = cache_del_multi self.len = cache_len self.contains = cache_contains self.clear = cache_clear -- cgit 1.5.1