diff --git a/synapse/metrics/__init__.py b/synapse/metrics/__init__.py
index fef2846669..f237b8a236 100644
--- a/synapse/metrics/__init__.py
+++ b/synapse/metrics/__init__.py
@@ -46,7 +46,7 @@ logger = logging.getLogger(__name__)
METRICS_PREFIX = "/_synapse/metrics"
running_on_pypy = platform.python_implementation() == "PyPy"
-all_gauges = {} # type: Dict[str, Union[LaterGauge, InFlightGauge]]
+all_gauges: "Dict[str, Union[LaterGauge, InFlightGauge]]" = {}
HAVE_PROC_SELF_STAT = os.path.exists("/proc/self/stat")
@@ -130,7 +130,7 @@ class InFlightGauge:
)
# Counts number of in flight blocks for a given set of label values
- self._registrations = {} # type: Dict
+ self._registrations: Dict = {}
# Protects access to _registrations
self._lock = threading.Lock()
@@ -248,7 +248,7 @@ class GaugeBucketCollector:
# We initially set this to None. We won't report metrics until
# this has been initialised after a successful data update
- self._metric = None # type: Optional[GaugeHistogramMetricFamily]
+ self._metric: Optional[GaugeHistogramMetricFamily] = None
registry.register(self)
diff --git a/synapse/metrics/_exposition.py b/synapse/metrics/_exposition.py
index 8002be56e0..7e49d0d02c 100644
--- a/synapse/metrics/_exposition.py
+++ b/synapse/metrics/_exposition.py
@@ -125,7 +125,7 @@ def generate_latest(registry, emit_help=False):
)
output.append("# TYPE {0} {1}\n".format(mname, mtype))
- om_samples = {} # type: Dict[str, List[str]]
+ om_samples: Dict[str, List[str]] = {}
for s in metric.samples:
for suffix in ["_created", "_gsum", "_gcount"]:
if s.name == metric.name + suffix:
diff --git a/synapse/metrics/background_process_metrics.py b/synapse/metrics/background_process_metrics.py
index de96ca0821..4455fa71a8 100644
--- a/synapse/metrics/background_process_metrics.py
+++ b/synapse/metrics/background_process_metrics.py
@@ -93,7 +93,7 @@ _background_process_db_sched_duration = Counter(
# map from description to a counter, so that we can name our logcontexts
# incrementally. (It actually duplicates _background_process_start_count, but
# it's much simpler to do so than to try to combine them.)
-_background_process_counts = {} # type: Dict[str, int]
+_background_process_counts: Dict[str, int] = {}
# Set of all running background processes that became active active since the
# last time metrics were scraped (i.e. background processes that performed some
@@ -103,7 +103,7 @@ _background_process_counts = {} # type: Dict[str, int]
# background processes stacking up behind a lock or linearizer, where we then
# only need to iterate over and update metrics for the process that have
# actually been active and can ignore the idle ones.
-_background_processes_active_since_last_scrape = set() # type: Set[_BackgroundProcess]
+_background_processes_active_since_last_scrape: "Set[_BackgroundProcess]" = set()
# A lock that covers the above set and dict
_bg_metrics_lock = threading.Lock()
|