diff options
Diffstat (limited to 'synapse/util')
-rw-r--r-- | synapse/util/async.py | 19 | ||||
-rw-r--r-- | synapse/util/lrucache.py | 27 | ||||
-rw-r--r-- | synapse/util/retryutils.py | 2 |
3 files changed, 47 insertions, 1 deletions
diff --git a/synapse/util/async.py b/synapse/util/async.py index c4fe5d522f..d8febdb90c 100644 --- a/synapse/util/async.py +++ b/synapse/util/async.py @@ -32,3 +32,22 @@ def run_on_reactor(): iteration of the main loop """ return sleep(0) + + +def create_observer(deferred): + """Creates a deferred that observes the result or failure of the given + deferred *without* affecting the given deferred. + """ + d = defer.Deferred() + + def callback(r): + d.callback(r) + return r + + def errback(f): + d.errback(f) + return f + + deferred.addCallbacks(callback, errback) + + return d diff --git a/synapse/util/lrucache.py b/synapse/util/lrucache.py index 65d5792907..96163c90f1 100644 --- a/synapse/util/lrucache.py +++ b/synapse/util/lrucache.py @@ -14,6 +14,10 @@ # limitations under the License. +from functools import wraps +import threading + + class LruCache(object): """Least-recently-used cache.""" # TODO(mjark) Add mutex for linked list for thread safety. @@ -24,6 +28,16 @@ class LruCache(object): PREV, NEXT, KEY, VALUE = 0, 1, 2, 3 + lock = threading.Lock() + + def synchronized(f): + @wraps(f) + def inner(*args, **kwargs): + with lock: + return f(*args, **kwargs) + + return inner + def add_node(key, value): prev_node = list_root next_node = prev_node[NEXT] @@ -51,6 +65,7 @@ class LruCache(object): next_node[PREV] = prev_node cache.pop(node[KEY], None) + @synchronized def cache_get(key, default=None): node = cache.get(key, None) if node is not None: @@ -59,6 +74,7 @@ class LruCache(object): else: return default + @synchronized def cache_set(key, value): node = cache.get(key, None) if node is not None: @@ -69,6 +85,7 @@ class LruCache(object): if len(cache) > max_size: delete_node(list_root[PREV]) + @synchronized def cache_set_default(key, value): node = cache.get(key, None) if node is not None: @@ -79,6 +96,7 @@ class LruCache(object): delete_node(list_root[PREV]) return value + @synchronized def cache_pop(key, default=None): node = cache.get(key, None) if node: @@ -87,15 +105,21 @@ class LruCache(object): else: return default + @synchronized def cache_len(): return len(cache) + @synchronized + def cache_contains(key): + return key in cache + self.sentinel = object() self.get = cache_get self.set = cache_set self.setdefault = cache_set_default self.pop = cache_pop self.len = cache_len + self.contains = cache_contains def __getitem__(self, key): result = self.get(key, self.sentinel) @@ -114,3 +138,6 @@ class LruCache(object): def __len__(self): return self.len() + + def __contains__(self, key): + return self.contains(key) diff --git a/synapse/util/retryutils.py b/synapse/util/retryutils.py index 4e82232796..a42138f556 100644 --- a/synapse/util/retryutils.py +++ b/synapse/util/retryutils.py @@ -60,7 +60,7 @@ def get_retry_limiter(destination, clock, store, **kwargs): if retry_timings: retry_last_ts, retry_interval = ( - retry_timings.retry_last_ts, retry_timings.retry_interval + retry_timings["retry_last_ts"], retry_timings["retry_interval"] ) now = int(clock.time_msec()) |