summary refs log tree commit diff
path: root/synapse
diff options
context:
space:
mode:
Diffstat (limited to 'synapse')
-rw-r--r--synapse/metrics/metric.py22
-rw-r--r--synapse/util/caches/descriptors.py44
2 files changed, 46 insertions, 20 deletions
diff --git a/synapse/metrics/metric.py b/synapse/metrics/metric.py
index 368fc24984..6f82b360bc 100644
--- a/synapse/metrics/metric.py
+++ b/synapse/metrics/metric.py
@@ -15,6 +15,7 @@
 
 
 from itertools import chain
+from collections import Counter
 
 
 # TODO(paul): I can't believe Python doesn't have one of these
@@ -55,30 +56,29 @@ class CounterMetric(BaseMetric):
     """The simplest kind of metric; one that stores a monotonically-increasing
     integer that counts events."""
 
+    __slots__ = ("counts")
+
     def __init__(self, *args, **kwargs):
         super(CounterMetric, self).__init__(*args, **kwargs)
 
-        self.counts = {}
+        self.counts = Counter()
 
         # Scalar metrics are never empty
         if self.is_scalar():
             self.counts[()] = 0
 
     def inc_by(self, incr, *values):
-        if len(values) != self.dimension():
-            raise ValueError(
-                "Expected as many values to inc() as labels (%d)" % (self.dimension())
-            )
+        # if len(values) != self.dimension():
+        #     raise ValueError(
+        #         "Expected as many values to inc() as labels (%d)" % (self.dimension())
+        #     )
 
         # TODO: should assert that the tag values are all strings
 
-        if values not in self.counts:
-            self.counts[values] = incr
-        else:
-            self.counts[values] += incr
+        self.counts[values] += incr
 
     def inc(self, *values):
-        self.inc_by(1, *values)
+        self.counts[values] += 1
 
     def render_item(self, k):
         return ["%s%s %d" % (self.name, self._render_key(k), self.counts[k])]
@@ -132,6 +132,8 @@ class CacheMetric(object):
     This metric generates standard metric name pairs, so that monitoring rules
     can easily be applied to measure hit ratio."""
 
+    __slots__ = ("name", "hits", "total", "size")
+
     def __init__(self, name, size_callback, labels=[]):
         self.name = name
 
diff --git a/synapse/util/caches/descriptors.py b/synapse/util/caches/descriptors.py
index 758f5982b0..4bbb16ed3c 100644
--- a/synapse/util/caches/descriptors.py
+++ b/synapse/util/caches/descriptors.py
@@ -32,6 +32,7 @@ import os
 import functools
 import inspect
 import threading
+import itertools
 
 logger = logging.getLogger(__name__)
 
@@ -43,6 +44,14 @@ CACHE_SIZE_FACTOR = float(os.environ.get("SYNAPSE_CACHE_FACTOR", 0.1))
 
 
 class Cache(object):
+    __slots__ = (
+        "cache",
+        "max_entries",
+        "name",
+        "keylen",
+        "sequence",
+        "thread",
+    )
 
     def __init__(self, name, max_entries=1000, keylen=1, lru=True, tree=False):
         if lru:
@@ -293,16 +302,21 @@ class CacheListDescriptor(object):
 
             # cached is a dict arg -> deferred, where deferred results in a
             # 2-tuple (`arg`, `result`)
-            cached = {}
+            results = {}
+            cached_defers = {}
             missing = []
             for arg in list_args:
                 key = list(keyargs)
                 key[self.list_pos] = arg
 
                 try:
-                    res = cache.get(tuple(key)).observe()
-                    res.addCallback(lambda r, arg: (arg, r), arg)
-                    cached[arg] = res
+                    res = cache.get(tuple(key))
+                    if not res.called:
+                        res = res.observe()
+                        res.addCallback(lambda r, arg: (arg, r), arg)
+                        cached_defers[arg] = res
+                    else:
+                        results[arg] = res.result
                 except KeyError:
                     missing.append(arg)
 
@@ -340,12 +354,22 @@ class CacheListDescriptor(object):
                     res = observer.observe()
                     res.addCallback(lambda r, arg: (arg, r), arg)
 
-                    cached[arg] = res
-
-            return preserve_context_over_deferred(defer.gatherResults(
-                cached.values(),
-                consumeErrors=True,
-            ).addErrback(unwrapFirstError).addCallback(lambda res: dict(res)))
+                    cached_defers[arg] = res
+
+            if cached_defers:
+                return preserve_context_over_deferred(defer.gatherResults(
+                    cached_defers.values(),
+                    consumeErrors=True,
+                ).addCallback(
+                    lambda res: {
+                        k: v
+                        for k, v in itertools.chain(results.items(), res)
+                    }
+                )).addErrback(
+                    unwrapFirstError
+                )
+            else:
+                return results
 
         obj.__dict__[self.orig.__name__] = wrapped