summary refs log tree commit diff
diff options
context:
space:
mode:
-rw-r--r--changelog.d/12389.misc1
-rw-r--r--synapse/metrics/__init__.py16
-rw-r--r--synapse/metrics/_gc.py6
-rw-r--r--synapse/metrics/_reactor_metrics.py4
-rw-r--r--synapse/metrics/_types.py31
-rw-r--r--synapse/metrics/background_process_metrics.py3
-rw-r--r--synapse/metrics/jemalloc.py20
-rw-r--r--synapse/storage/databases/main/events.py4
8 files changed, 67 insertions, 18 deletions
diff --git a/changelog.d/12389.misc b/changelog.d/12389.misc
new file mode 100644
index 0000000000..00e3a5d758
--- /dev/null
+++ b/changelog.d/12389.misc
@@ -0,0 +1 @@
+Update type annotations for compatiblity with prometheus_client 0.14.
diff --git a/synapse/metrics/__init__.py b/synapse/metrics/__init__.py
index d321946aa2..fffd83546c 100644
--- a/synapse/metrics/__init__.py
+++ b/synapse/metrics/__init__.py
@@ -1,4 +1,5 @@
 # Copyright 2015, 2016 OpenMarket Ltd
+# Copyright 2022 The Matrix.org Foundation C.I.C.
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
 # you may not use this file except in compliance with the License.
@@ -52,12 +53,13 @@ from synapse.metrics._exposition import (
     start_http_server,
 )
 from synapse.metrics._gc import MIN_TIME_BETWEEN_GCS, install_gc_manager
+from synapse.metrics._types import Collector
 
 logger = logging.getLogger(__name__)
 
 METRICS_PREFIX = "/_synapse/metrics"
 
-all_gauges: "Dict[str, Union[LaterGauge, InFlightGauge]]" = {}
+all_gauges: Dict[str, Collector] = {}
 
 HAVE_PROC_SELF_STAT = os.path.exists("/proc/self/stat")
 
@@ -78,11 +80,10 @@ RegistryProxy = cast(CollectorRegistry, _RegistryProxy)
 
 
 @attr.s(slots=True, hash=True, auto_attribs=True)
-class LaterGauge:
-
+class LaterGauge(Collector):
     name: str
     desc: str
-    labels: Optional[Iterable[str]] = attr.ib(hash=False)
+    labels: Optional[Sequence[str]] = attr.ib(hash=False)
     # callback: should either return a value (if there are no labels for this metric),
     # or dict mapping from a label tuple to a value
     caller: Callable[
@@ -125,7 +126,7 @@ class LaterGauge:
 MetricsEntry = TypeVar("MetricsEntry")
 
 
-class InFlightGauge(Generic[MetricsEntry]):
+class InFlightGauge(Generic[MetricsEntry], Collector):
     """Tracks number of things (e.g. requests, Measure blocks, etc) in flight
     at any given time.
 
@@ -246,7 +247,7 @@ class InFlightGauge(Generic[MetricsEntry]):
         all_gauges[self.name] = self
 
 
-class GaugeBucketCollector:
+class GaugeBucketCollector(Collector):
     """Like a Histogram, but the buckets are Gauges which are updated atomically.
 
     The data is updated by calling `update_data` with an iterable of measurements.
@@ -340,7 +341,7 @@ class GaugeBucketCollector:
 #
 
 
-class CPUMetrics:
+class CPUMetrics(Collector):
     def __init__(self) -> None:
         ticks_per_sec = 100
         try:
@@ -470,6 +471,7 @@ def register_threadpool(name: str, threadpool: ThreadPool) -> None:
 
 
 __all__ = [
+    "Collector",
     "MetricsResource",
     "generate_latest",
     "start_http_server",
diff --git a/synapse/metrics/_gc.py b/synapse/metrics/_gc.py
index 2bc909efa0..b7d47ce3e7 100644
--- a/synapse/metrics/_gc.py
+++ b/synapse/metrics/_gc.py
@@ -30,6 +30,8 @@ from prometheus_client.core import (
 
 from twisted.internet import task
 
+from synapse.metrics._types import Collector
+
 """Prometheus metrics for garbage collection"""
 
 
@@ -71,7 +73,7 @@ gc_time = Histogram(
 )
 
 
-class GCCounts:
+class GCCounts(Collector):
     def collect(self) -> Iterable[Metric]:
         cm = GaugeMetricFamily("python_gc_counts", "GC object counts", labels=["gen"])
         for n, m in enumerate(gc.get_count()):
@@ -135,7 +137,7 @@ def install_gc_manager() -> None:
 #
 
 
-class PyPyGCStats:
+class PyPyGCStats(Collector):
     def collect(self) -> Iterable[Metric]:
 
         # @stats is a pretty-printer object with __str__() returning a nice table,
diff --git a/synapse/metrics/_reactor_metrics.py b/synapse/metrics/_reactor_metrics.py
index f38f798313..a2c6e6842d 100644
--- a/synapse/metrics/_reactor_metrics.py
+++ b/synapse/metrics/_reactor_metrics.py
@@ -21,6 +21,8 @@ from prometheus_client.core import REGISTRY, GaugeMetricFamily
 
 from twisted.internet import reactor
 
+from synapse.metrics._types import Collector
+
 #
 # Twisted reactor metrics
 #
@@ -54,7 +56,7 @@ class EpollWrapper:
         return getattr(self._poller, item)
 
 
-class ReactorLastSeenMetric:
+class ReactorLastSeenMetric(Collector):
     def __init__(self, epoll_wrapper: EpollWrapper):
         self._epoll_wrapper = epoll_wrapper
 
diff --git a/synapse/metrics/_types.py b/synapse/metrics/_types.py
new file mode 100644
index 0000000000..dc5aa49397
--- /dev/null
+++ b/synapse/metrics/_types.py
@@ -0,0 +1,31 @@
+# Copyright 2022 The Matrix.org Foundation C.I.C.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+from abc import ABC, abstractmethod
+from typing import Iterable
+
+from prometheus_client import Metric
+
+try:
+    from prometheus_client.registry import Collector
+except ImportError:
+    # prometheus_client.Collector is new as of prometheus 0.14. We redefine it here
+    # for compatibility with earlier versions.
+    class _Collector(ABC):
+        @abstractmethod
+        def collect(self) -> Iterable[Metric]:
+            pass
+
+    Collector = _Collector  # type: ignore
diff --git a/synapse/metrics/background_process_metrics.py b/synapse/metrics/background_process_metrics.py
index 53c508af91..f61396bb79 100644
--- a/synapse/metrics/background_process_metrics.py
+++ b/synapse/metrics/background_process_metrics.py
@@ -46,6 +46,7 @@ from synapse.logging.opentracing import (
     noop_context_manager,
     start_active_span,
 )
+from synapse.metrics._types import Collector
 
 if TYPE_CHECKING:
     import resource
@@ -127,7 +128,7 @@ _background_processes_active_since_last_scrape: "Set[_BackgroundProcess]" = set(
 _bg_metrics_lock = threading.Lock()
 
 
-class _Collector:
+class _Collector(Collector):
     """A custom metrics collector for the background process metrics.
 
     Ensures that all of the metrics are up-to-date with any in-flight processes
diff --git a/synapse/metrics/jemalloc.py b/synapse/metrics/jemalloc.py
index 98ed9c0829..6bc329f04a 100644
--- a/synapse/metrics/jemalloc.py
+++ b/synapse/metrics/jemalloc.py
@@ -16,11 +16,13 @@ import ctypes
 import logging
 import os
 import re
-from typing import Iterable, Optional
+from typing import Iterable, Optional, overload
 
-from prometheus_client import Metric
+from prometheus_client import REGISTRY, Metric
+from typing_extensions import Literal
 
-from synapse.metrics import REGISTRY, GaugeMetricFamily
+from synapse.metrics import GaugeMetricFamily
+from synapse.metrics._types import Collector
 
 logger = logging.getLogger(__name__)
 
@@ -59,6 +61,16 @@ def _setup_jemalloc_stats() -> None:
 
     jemalloc = ctypes.CDLL(jemalloc_path)
 
+    @overload
+    def _mallctl(
+        name: str, read: Literal[True] = True, write: Optional[int] = None
+    ) -> int:
+        ...
+
+    @overload
+    def _mallctl(name: str, read: Literal[False], write: Optional[int] = None) -> None:
+        ...
+
     def _mallctl(
         name: str, read: bool = True, write: Optional[int] = None
     ) -> Optional[int]:
@@ -134,7 +146,7 @@ def _setup_jemalloc_stats() -> None:
         except Exception as e:
             logger.warning("Failed to reload jemalloc stats: %s", e)
 
-    class JemallocCollector:
+    class JemallocCollector(Collector):
         """Metrics for internal jemalloc stats."""
 
         def collect(self) -> Iterable[Metric]:
diff --git a/synapse/storage/databases/main/events.py b/synapse/storage/databases/main/events.py
index d253243125..57489c30f1 100644
--- a/synapse/storage/databases/main/events.py
+++ b/synapse/storage/databases/main/events.py
@@ -200,9 +200,7 @@ class PersistEventsStore:
             if stream < 0:
                 # backfilled events have negative stream orderings, so we don't
                 # want to set the event_persisted_position to that.
-                synapse.metrics.event_persisted_position.set(
-                    events_and_contexts[-1][0].internal_metadata.stream_ordering
-                )
+                synapse.metrics.event_persisted_position.set(stream)
 
             for event, context in events_and_contexts:
                 if context.app_service: