diff options
author | Erik Johnston <erik@matrix.org> | 2016-08-19 15:58:52 +0100 |
---|---|---|
committer | Erik Johnston <erik@matrix.org> | 2016-08-19 16:09:16 +0100 |
commit | 45fd2c8942009a634cf38a90ca1f306aae7022fc (patch) | |
tree | a37d4431236537d669209c10281e3d798fcc21fa /synapse/util | |
parent | Rename to on_invalidate (diff) | |
download | synapse-45fd2c8942009a634cf38a90ca1f306aae7022fc.tar.xz |
Ensure invalidation list does not grow unboundedly
Diffstat (limited to 'synapse/util')
-rw-r--r-- | synapse/util/caches/descriptors.py | 20 | ||||
-rw-r--r-- | synapse/util/caches/lrucache.py | 16 |
2 files changed, 16 insertions, 20 deletions
diff --git a/synapse/util/caches/descriptors.py b/synapse/util/caches/descriptors.py index e93ff40dc0..8dba61d49f 100644 --- a/synapse/util/caches/descriptors.py +++ b/synapse/util/caches/descriptors.py @@ -25,6 +25,7 @@ from synapse.util.logcontext import ( from . import DEBUG_CACHES, register_cache from twisted.internet import defer +from collections import namedtuple import os import functools @@ -210,16 +211,17 @@ class CacheDescriptor(object): # whenever we are invalidated invalidate_callback = kwargs.pop("on_invalidate", None) - # Add our own `cache_context` to argument list if the wrapped function - # has asked for one - self_context = _CacheContext(cache, None) + # Add temp cache_context so inspect.getcallargs doesn't explode if self.add_cache_context: - kwargs["cache_context"] = self_context + kwargs["cache_context"] = None arg_dict = inspect.getcallargs(self.orig, obj, *args, **kwargs) cache_key = tuple(arg_dict[arg_nm] for arg_nm in self.arg_names) - self_context.key = cache_key + # Add our own `cache_context` to argument list if the wrapped function + # has asked for one + if self.add_cache_context: + kwargs["cache_context"] = _CacheContext(cache, cache_key) try: cached_result_d = cache.get(cache_key, callback=invalidate_callback) @@ -414,13 +416,7 @@ class CacheListDescriptor(object): return wrapped -class _CacheContext(object): - __slots__ = ["cache", "key"] - - def __init__(self, cache, key): - self.cache = cache - self.key = key - +class _CacheContext(namedtuple("_CacheContext", ("cache", "key"))): def invalidate(self): self.cache.invalidate(self.key) diff --git a/synapse/util/caches/lrucache.py b/synapse/util/caches/lrucache.py index a5a827b4d1..9c4c679175 100644 --- a/synapse/util/caches/lrucache.py +++ b/synapse/util/caches/lrucache.py @@ -32,7 +32,7 @@ def enumerate_leaves(node, depth): class _Node(object): __slots__ = ["prev_node", "next_node", "key", "value", "callbacks"] - def __init__(self, prev_node, next_node, key, value, callbacks=[]): + def __init__(self, prev_node, next_node, key, value, callbacks=set()): self.prev_node = prev_node self.next_node = next_node self.key = key @@ -66,7 +66,7 @@ class LruCache(object): return inner - def add_node(key, value, callbacks=[]): + def add_node(key, value, callbacks=set()): prev_node = list_root next_node = prev_node.next_node node = _Node(prev_node, next_node, key, value, callbacks) @@ -94,7 +94,7 @@ class LruCache(object): for cb in node.callbacks: cb() - node.callbacks = [] + node.callbacks.clear() @synchronized def cache_get(key, default=None, callback=None): @@ -102,7 +102,7 @@ class LruCache(object): if node is not None: move_node_to_front(node) if callback: - node.callbacks.append(callback) + node.callbacks.add(callback) return node.value else: return default @@ -114,18 +114,18 @@ class LruCache(object): if value != node.value: for cb in node.callbacks: cb() - node.callbacks = [] + node.callbacks.clear() if callback: - node.callbacks.append(callback) + node.callbacks.add(callback) move_node_to_front(node) node.value = value else: if callback: - callbacks = [callback] + callbacks = set([callback]) else: - callbacks = [] + callbacks = set() add_node(key, value, callbacks) if len(cache) > max_size: todelete = list_root.prev_node |