diff options
author | Mark Haines <mark.haines@matrix.org> | 2015-04-29 13:15:14 +0100 |
---|---|---|
committer | Mark Haines <mark.haines@matrix.org> | 2015-04-29 13:15:14 +0100 |
commit | 4ad8b451559682645c818f6c1180e5f9d42a7eeb (patch) | |
tree | 94fe0beaaaa4ed8c1bb9451f1274df819d9ce8d7 /synapse/util/lrucache.py | |
parent | Update the query format used by keyring to match current key v2 spec (diff) | |
parent | Mention that postgres databases must have the correct charset encoding (diff) | |
download | synapse-4ad8b451559682645c818f6c1180e5f9d42a7eeb.tar.xz |
Merge branch 'develop' into key_distribution
Conflicts: synapse/config/homeserver.py
Diffstat (limited to 'synapse/util/lrucache.py')
-rw-r--r-- | synapse/util/lrucache.py | 20 |
1 files changed, 20 insertions, 0 deletions
diff --git a/synapse/util/lrucache.py b/synapse/util/lrucache.py index 2f7b615f78..96163c90f1 100644 --- a/synapse/util/lrucache.py +++ b/synapse/util/lrucache.py @@ -14,6 +14,10 @@ # limitations under the License. +from functools import wraps +import threading + + class LruCache(object): """Least-recently-used cache.""" # TODO(mjark) Add mutex for linked list for thread safety. @@ -24,6 +28,16 @@ class LruCache(object): PREV, NEXT, KEY, VALUE = 0, 1, 2, 3 + lock = threading.Lock() + + def synchronized(f): + @wraps(f) + def inner(*args, **kwargs): + with lock: + return f(*args, **kwargs) + + return inner + def add_node(key, value): prev_node = list_root next_node = prev_node[NEXT] @@ -51,6 +65,7 @@ class LruCache(object): next_node[PREV] = prev_node cache.pop(node[KEY], None) + @synchronized def cache_get(key, default=None): node = cache.get(key, None) if node is not None: @@ -59,6 +74,7 @@ class LruCache(object): else: return default + @synchronized def cache_set(key, value): node = cache.get(key, None) if node is not None: @@ -69,6 +85,7 @@ class LruCache(object): if len(cache) > max_size: delete_node(list_root[PREV]) + @synchronized def cache_set_default(key, value): node = cache.get(key, None) if node is not None: @@ -79,6 +96,7 @@ class LruCache(object): delete_node(list_root[PREV]) return value + @synchronized def cache_pop(key, default=None): node = cache.get(key, None) if node: @@ -87,9 +105,11 @@ class LruCache(object): else: return default + @synchronized def cache_len(): return len(cache) + @synchronized def cache_contains(key): return key in cache |