From 5cace20bf1f3c50ea50d56a938c4eee5825f4b84 Mon Sep 17 00:00:00 2001 From: Patrick Cloke Date: Wed, 10 Nov 2021 15:06:54 -0500 Subject: Add missing type hints to `synapse.app`. (#11287) --- synapse/app/_base.py | 140 ++++++++++++++++++++++++++++++++------------------- 1 file changed, 89 insertions(+), 51 deletions(-) (limited to 'synapse/app/_base.py') diff --git a/synapse/app/_base.py b/synapse/app/_base.py index f2c1028b5d..573bb487b2 100644 --- a/synapse/app/_base.py +++ b/synapse/app/_base.py @@ -22,13 +22,27 @@ import socket import sys import traceback import warnings -from typing import TYPE_CHECKING, Awaitable, Callable, Iterable +from typing import ( + TYPE_CHECKING, + Any, + Awaitable, + Callable, + Collection, + Dict, + Iterable, + List, + NoReturn, + Tuple, + cast, +) from cryptography.utils import CryptographyDeprecationWarning -from typing_extensions import NoReturn import twisted -from twisted.internet import defer, error, reactor +from twisted.internet import defer, error, reactor as _reactor +from twisted.internet.interfaces import IOpenSSLContextFactory, IReactorSSL, IReactorTCP +from twisted.internet.protocol import ServerFactory +from twisted.internet.tcp import Port from twisted.logger import LoggingFile, LogLevel from twisted.protocols.tls import TLSMemoryBIOFactory from twisted.python.threadpool import ThreadPool @@ -48,6 +62,7 @@ from synapse.logging.context import PreserveLoggingContext from synapse.metrics import register_threadpool from synapse.metrics.background_process_metrics import wrap_as_background_process from synapse.metrics.jemalloc import setup_jemalloc_stats +from synapse.types import ISynapseReactor from synapse.util.caches.lrucache import setup_expire_lru_cache_entries from synapse.util.daemonize import daemonize_process from synapse.util.gai_resolver import GAIResolver @@ -57,33 +72,44 @@ from synapse.util.versionstring import get_version_string if TYPE_CHECKING: from synapse.server import HomeServer +# Twisted injects the global reactor to make it easier to import, this confuses +# mypy which thinks it is a module. Tell it that it a more proper type. +reactor = cast(ISynapseReactor, _reactor) + + logger = logging.getLogger(__name__) # list of tuples of function, args list, kwargs dict -_sighup_callbacks = [] +_sighup_callbacks: List[ + Tuple[Callable[..., None], Tuple[Any, ...], Dict[str, Any]] +] = [] -def register_sighup(func, *args, **kwargs): +def register_sighup(func: Callable[..., None], *args: Any, **kwargs: Any) -> None: """ Register a function to be called when a SIGHUP occurs. Args: - func (function): Function to be called when sent a SIGHUP signal. + func: Function to be called when sent a SIGHUP signal. *args, **kwargs: args and kwargs to be passed to the target function. """ _sighup_callbacks.append((func, args, kwargs)) -def start_worker_reactor(appname, config, run_command=reactor.run): +def start_worker_reactor( + appname: str, + config: HomeServerConfig, + run_command: Callable[[], None] = reactor.run, +) -> None: """Run the reactor in the main process Daemonizes if necessary, and then configures some resources, before starting the reactor. Pulls configuration from the 'worker' settings in 'config'. Args: - appname (str): application name which will be sent to syslog - config (synapse.config.Config): config object - run_command (Callable[]): callable that actually runs the reactor + appname: application name which will be sent to syslog + config: config object + run_command: callable that actually runs the reactor """ logger = logging.getLogger(config.worker.worker_app) @@ -101,32 +127,32 @@ def start_worker_reactor(appname, config, run_command=reactor.run): def start_reactor( - appname, - soft_file_limit, - gc_thresholds, - pid_file, - daemonize, - print_pidfile, - logger, - run_command=reactor.run, -): + appname: str, + soft_file_limit: int, + gc_thresholds: Tuple[int, int, int], + pid_file: str, + daemonize: bool, + print_pidfile: bool, + logger: logging.Logger, + run_command: Callable[[], None] = reactor.run, +) -> None: """Run the reactor in the main process Daemonizes if necessary, and then configures some resources, before starting the reactor Args: - appname (str): application name which will be sent to syslog - soft_file_limit (int): + appname: application name which will be sent to syslog + soft_file_limit: gc_thresholds: - pid_file (str): name of pid file to write to if daemonize is True - daemonize (bool): true to run the reactor in a background process - print_pidfile (bool): whether to print the pid file, if daemonize is True - logger (logging.Logger): logger instance to pass to Daemonize - run_command (Callable[]): callable that actually runs the reactor + pid_file: name of pid file to write to if daemonize is True + daemonize: true to run the reactor in a background process + print_pidfile: whether to print the pid file, if daemonize is True + logger: logger instance to pass to Daemonize + run_command: callable that actually runs the reactor """ - def run(): + def run() -> None: logger.info("Running") setup_jemalloc_stats() change_resource_limit(soft_file_limit) @@ -185,7 +211,7 @@ def redirect_stdio_to_logs() -> None: print("Redirected stdout/stderr to logs") -def register_start(cb: Callable[..., Awaitable], *args, **kwargs) -> None: +def register_start(cb: Callable[..., Awaitable], *args: Any, **kwargs: Any) -> None: """Register a callback with the reactor, to be called once it is running This can be used to initialise parts of the system which require an asynchronous @@ -195,7 +221,7 @@ def register_start(cb: Callable[..., Awaitable], *args, **kwargs) -> None: will exit. """ - async def wrapper(): + async def wrapper() -> None: try: await cb(*args, **kwargs) except Exception: @@ -224,7 +250,7 @@ def register_start(cb: Callable[..., Awaitable], *args, **kwargs) -> None: reactor.callWhenRunning(lambda: defer.ensureDeferred(wrapper())) -def listen_metrics(bind_addresses, port): +def listen_metrics(bind_addresses: Iterable[str], port: int) -> None: """ Start Prometheus metrics server. """ @@ -236,11 +262,11 @@ def listen_metrics(bind_addresses, port): def listen_manhole( - bind_addresses: Iterable[str], + bind_addresses: Collection[str], port: int, manhole_settings: ManholeConfig, manhole_globals: dict, -): +) -> None: # twisted.conch.manhole 21.1.0 uses "int_from_bytes", which produces a confusing # warning. It's fixed by https://github.com/twisted/twisted/pull/1522), so # suppress the warning for now. @@ -259,12 +285,18 @@ def listen_manhole( ) -def listen_tcp(bind_addresses, port, factory, reactor=reactor, backlog=50): +def listen_tcp( + bind_addresses: Collection[str], + port: int, + factory: ServerFactory, + reactor: IReactorTCP = reactor, + backlog: int = 50, +) -> List[Port]: """ Create a TCP socket for a port and several addresses Returns: - list[twisted.internet.tcp.Port]: listening for TCP connections + list of twisted.internet.tcp.Port listening for TCP connections """ r = [] for address in bind_addresses: @@ -273,12 +305,19 @@ def listen_tcp(bind_addresses, port, factory, reactor=reactor, backlog=50): except error.CannotListenError as e: check_bind_error(e, address, bind_addresses) - return r + # IReactorTCP returns an object implementing IListeningPort from listenTCP, + # but we know it will be a Port instance. + return r # type: ignore[return-value] def listen_ssl( - bind_addresses, port, factory, context_factory, reactor=reactor, backlog=50 -): + bind_addresses: Collection[str], + port: int, + factory: ServerFactory, + context_factory: IOpenSSLContextFactory, + reactor: IReactorSSL = reactor, + backlog: int = 50, +) -> List[Port]: """ Create an TLS-over-TCP socket for a port and several addresses @@ -294,10 +333,13 @@ def listen_ssl( except error.CannotListenError as e: check_bind_error(e, address, bind_addresses) - return r + # IReactorSSL incorrectly declares that an int is returned from listenSSL, + # it actually returns an object implementing IListeningPort, but we know it + # will be a Port instance. + return r # type: ignore[return-value] -def refresh_certificate(hs: "HomeServer"): +def refresh_certificate(hs: "HomeServer") -> None: """ Refresh the TLS certificates that Synapse is using by re-reading them from disk and updating the TLS context factories to use them. @@ -329,7 +371,7 @@ def refresh_certificate(hs: "HomeServer"): logger.info("Context factories updated.") -async def start(hs: "HomeServer"): +async def start(hs: "HomeServer") -> None: """ Start a Synapse server or worker. @@ -360,7 +402,7 @@ async def start(hs: "HomeServer"): if hasattr(signal, "SIGHUP"): @wrap_as_background_process("sighup") - def handle_sighup(*args, **kwargs): + def handle_sighup(*args: Any, **kwargs: Any) -> None: # Tell systemd our state, if we're using it. This will silently fail if # we're not using systemd. sdnotify(b"RELOADING=1") @@ -373,7 +415,7 @@ async def start(hs: "HomeServer"): # We defer running the sighup handlers until next reactor tick. This # is so that we're in a sane state, e.g. flushing the logs may fail # if the sighup happens in the middle of writing a log entry. - def run_sighup(*args, **kwargs): + def run_sighup(*args: Any, **kwargs: Any) -> None: # `callFromThread` should be "signal safe" as well as thread # safe. reactor.callFromThread(handle_sighup, *args, **kwargs) @@ -436,12 +478,8 @@ async def start(hs: "HomeServer"): atexit.register(gc.freeze) -def setup_sentry(hs: "HomeServer"): - """Enable sentry integration, if enabled in configuration - - Args: - hs - """ +def setup_sentry(hs: "HomeServer") -> None: + """Enable sentry integration, if enabled in configuration""" if not hs.config.metrics.sentry_enabled: return @@ -466,7 +504,7 @@ def setup_sentry(hs: "HomeServer"): scope.set_tag("worker_name", name) -def setup_sdnotify(hs: "HomeServer"): +def setup_sdnotify(hs: "HomeServer") -> None: """Adds process state hooks to tell systemd what we are up to.""" # Tell systemd our state, if we're using it. This will silently fail if @@ -481,7 +519,7 @@ def setup_sdnotify(hs: "HomeServer"): sdnotify_sockaddr = os.getenv("NOTIFY_SOCKET") -def sdnotify(state): +def sdnotify(state: bytes) -> None: """ Send a notification to systemd, if the NOTIFY_SOCKET env var is set. @@ -490,7 +528,7 @@ def sdnotify(state): package which many OSes don't include as a matter of principle. Args: - state (bytes): notification to send + state: notification to send """ if not isinstance(state, bytes): raise TypeError("sdnotify should be called with a bytes") -- cgit 1.5.1 From 84fac0f814f69645ff1ad564ef8294b31203dc95 Mon Sep 17 00:00:00 2001 From: Sean Quah <8349537+squahtx@users.noreply.github.com> Date: Wed, 17 Nov 2021 19:07:02 +0000 Subject: Add type annotations to `synapse.metrics` (#10847) --- changelog.d/10847.misc | 1 + mypy.ini | 3 + synapse/app/_base.py | 2 +- synapse/groups/attestations.py | 4 +- synapse/handlers/typing.py | 2 +- synapse/metrics/__init__.py | 101 ++++++++++++++++++-------- synapse/metrics/_exposition.py | 34 ++++----- synapse/metrics/background_process_metrics.py | 78 +++++++++++++++----- synapse/metrics/jemalloc.py | 10 ++- synapse/storage/database.py | 6 +- synapse/util/caches/expiringcache.py | 2 +- synapse/util/metrics.py | 15 ++-- 12 files changed, 173 insertions(+), 85 deletions(-) create mode 100644 changelog.d/10847.misc (limited to 'synapse/app/_base.py') diff --git a/changelog.d/10847.misc b/changelog.d/10847.misc new file mode 100644 index 0000000000..7933a38dca --- /dev/null +++ b/changelog.d/10847.misc @@ -0,0 +1 @@ +Add type annotations to `synapse.metrics`. diff --git a/mypy.ini b/mypy.ini index f32c6c41a3..308cfd95d8 100644 --- a/mypy.ini +++ b/mypy.ini @@ -160,6 +160,9 @@ disallow_untyped_defs = True [mypy-synapse.handlers.*] disallow_untyped_defs = True +[mypy-synapse.metrics.*] +disallow_untyped_defs = True + [mypy-synapse.push.*] disallow_untyped_defs = True diff --git a/synapse/app/_base.py b/synapse/app/_base.py index 573bb487b2..807ee3d46e 100644 --- a/synapse/app/_base.py +++ b/synapse/app/_base.py @@ -402,7 +402,7 @@ async def start(hs: "HomeServer") -> None: if hasattr(signal, "SIGHUP"): @wrap_as_background_process("sighup") - def handle_sighup(*args: Any, **kwargs: Any) -> None: + async def handle_sighup(*args: Any, **kwargs: Any) -> None: # Tell systemd our state, if we're using it. This will silently fail if # we're not using systemd. sdnotify(b"RELOADING=1") diff --git a/synapse/groups/attestations.py b/synapse/groups/attestations.py index 53f99031b1..a87896e538 100644 --- a/synapse/groups/attestations.py +++ b/synapse/groups/attestations.py @@ -40,6 +40,8 @@ from typing import TYPE_CHECKING, Optional, Tuple from signedjson.sign import sign_json +from twisted.internet.defer import Deferred + from synapse.api.errors import HttpResponseException, RequestSendFailed, SynapseError from synapse.metrics.background_process_metrics import run_as_background_process from synapse.types import JsonDict, get_domain_from_id @@ -166,7 +168,7 @@ class GroupAttestionRenewer: return {} - def _start_renew_attestations(self) -> None: + def _start_renew_attestations(self) -> "Deferred[None]": return run_as_background_process("renew_attestations", self._renew_attestations) async def _renew_attestations(self) -> None: diff --git a/synapse/handlers/typing.py b/synapse/handlers/typing.py index 22c6174821..1676ebd057 100644 --- a/synapse/handlers/typing.py +++ b/synapse/handlers/typing.py @@ -90,7 +90,7 @@ class FollowerTypingHandler: self.wheel_timer = WheelTimer(bucket_size=5000) @wrap_as_background_process("typing._handle_timeouts") - def _handle_timeouts(self) -> None: + async def _handle_timeouts(self) -> None: logger.debug("Checking for typing timeouts") now = self.clock.time_msec() diff --git a/synapse/metrics/__init__.py b/synapse/metrics/__init__.py index 91ee5c8193..ceef57ad88 100644 --- a/synapse/metrics/__init__.py +++ b/synapse/metrics/__init__.py @@ -20,10 +20,25 @@ import os import platform import threading import time -from typing import Callable, Dict, Iterable, Mapping, Optional, Tuple, Union +from typing import ( + Any, + Callable, + Dict, + Generic, + Iterable, + Mapping, + Optional, + Sequence, + Set, + Tuple, + Type, + TypeVar, + Union, + cast, +) import attr -from prometheus_client import Counter, Gauge, Histogram +from prometheus_client import CollectorRegistry, Counter, Gauge, Histogram, Metric from prometheus_client.core import ( REGISTRY, CounterMetricFamily, @@ -32,6 +47,7 @@ from prometheus_client.core import ( ) from twisted.internet import reactor +from twisted.internet.base import ReactorBase from twisted.python.threadpool import ThreadPool import synapse @@ -54,7 +70,7 @@ HAVE_PROC_SELF_STAT = os.path.exists("/proc/self/stat") class RegistryProxy: @staticmethod - def collect(): + def collect() -> Iterable[Metric]: for metric in REGISTRY.collect(): if not metric.name.startswith("__"): yield metric @@ -74,7 +90,7 @@ class LaterGauge: ] ) - def collect(self): + def collect(self) -> Iterable[Metric]: g = GaugeMetricFamily(self.name, self.desc, labels=self.labels) @@ -93,10 +109,10 @@ class LaterGauge: yield g - def __attrs_post_init__(self): + def __attrs_post_init__(self) -> None: self._register() - def _register(self): + def _register(self) -> None: if self.name in all_gauges.keys(): logger.warning("%s already registered, reregistering" % (self.name,)) REGISTRY.unregister(all_gauges.pop(self.name)) @@ -105,7 +121,12 @@ class LaterGauge: all_gauges[self.name] = self -class InFlightGauge: +# `MetricsEntry` only makes sense when it is a `Protocol`, +# but `Protocol` can't be used as a `TypeVar` bound. +MetricsEntry = TypeVar("MetricsEntry") + + +class InFlightGauge(Generic[MetricsEntry]): """Tracks number of things (e.g. requests, Measure blocks, etc) in flight at any given time. @@ -115,14 +136,19 @@ class InFlightGauge: callbacks. Args: - name (str) - desc (str) - labels (list[str]) - sub_metrics (list[str]): A list of sub metrics that the callbacks - will update. + name + desc + labels + sub_metrics: A list of sub metrics that the callbacks will update. """ - def __init__(self, name, desc, labels, sub_metrics): + def __init__( + self, + name: str, + desc: str, + labels: Sequence[str], + sub_metrics: Sequence[str], + ): self.name = name self.desc = desc self.labels = labels @@ -130,19 +156,25 @@ class InFlightGauge: # Create a class which have the sub_metrics values as attributes, which # default to 0 on initialization. Used to pass to registered callbacks. - self._metrics_class = attr.make_class( + self._metrics_class: Type[MetricsEntry] = attr.make_class( "_MetricsEntry", attrs={x: attr.ib(0) for x in sub_metrics}, slots=True ) # Counts number of in flight blocks for a given set of label values - self._registrations: Dict = {} + self._registrations: Dict[ + Tuple[str, ...], Set[Callable[[MetricsEntry], None]] + ] = {} # Protects access to _registrations self._lock = threading.Lock() self._register_with_collector() - def register(self, key, callback): + def register( + self, + key: Tuple[str, ...], + callback: Callable[[MetricsEntry], None], + ) -> None: """Registers that we've entered a new block with labels `key`. `callback` gets called each time the metrics are collected. The same @@ -158,13 +190,17 @@ class InFlightGauge: with self._lock: self._registrations.setdefault(key, set()).add(callback) - def unregister(self, key, callback): + def unregister( + self, + key: Tuple[str, ...], + callback: Callable[[MetricsEntry], None], + ) -> None: """Registers that we've exited a block with labels `key`.""" with self._lock: self._registrations.setdefault(key, set()).discard(callback) - def collect(self): + def collect(self) -> Iterable[Metric]: """Called by prometheus client when it reads metrics. Note: may be called by a separate thread. @@ -200,7 +236,7 @@ class InFlightGauge: gauge.add_metric(key, getattr(metrics, name)) yield gauge - def _register_with_collector(self): + def _register_with_collector(self) -> None: if self.name in all_gauges.keys(): logger.warning("%s already registered, reregistering" % (self.name,)) REGISTRY.unregister(all_gauges.pop(self.name)) @@ -230,7 +266,7 @@ class GaugeBucketCollector: name: str, documentation: str, buckets: Iterable[float], - registry=REGISTRY, + registry: CollectorRegistry = REGISTRY, ): """ Args: @@ -257,12 +293,12 @@ class GaugeBucketCollector: registry.register(self) - def collect(self): + def collect(self) -> Iterable[Metric]: # Don't report metrics unless we've already collected some data if self._metric is not None: yield self._metric - def update_data(self, values: Iterable[float]): + def update_data(self, values: Iterable[float]) -> None: """Update the data to be reported by the metric The existing data is cleared, and each measurement in the input is assigned @@ -304,7 +340,7 @@ class GaugeBucketCollector: class CPUMetrics: - def __init__(self): + def __init__(self) -> None: ticks_per_sec = 100 try: # Try and get the system config @@ -314,7 +350,7 @@ class CPUMetrics: self.ticks_per_sec = ticks_per_sec - def collect(self): + def collect(self) -> Iterable[Metric]: if not HAVE_PROC_SELF_STAT: return @@ -364,7 +400,7 @@ gc_time = Histogram( class GCCounts: - def collect(self): + def collect(self) -> Iterable[Metric]: cm = GaugeMetricFamily("python_gc_counts", "GC object counts", labels=["gen"]) for n, m in enumerate(gc.get_count()): cm.add_metric([str(n)], m) @@ -382,7 +418,7 @@ if not running_on_pypy: class PyPyGCStats: - def collect(self): + def collect(self) -> Iterable[Metric]: # @stats is a pretty-printer object with __str__() returning a nice table, # plus some fields that contain data from that table. @@ -565,7 +601,7 @@ def register_threadpool(name: str, threadpool: ThreadPool) -> None: class ReactorLastSeenMetric: - def collect(self): + def collect(self) -> Iterable[Metric]: cm = GaugeMetricFamily( "python_twisted_reactor_last_seen", "Seconds since the Twisted reactor was last seen", @@ -584,9 +620,12 @@ MIN_TIME_BETWEEN_GCS = (1.0, 10.0, 30.0) _last_gc = [0.0, 0.0, 0.0] -def runUntilCurrentTimer(reactor, func): +F = TypeVar("F", bound=Callable[..., Any]) + + +def runUntilCurrentTimer(reactor: ReactorBase, func: F) -> F: @functools.wraps(func) - def f(*args, **kwargs): + def f(*args: Any, **kwargs: Any) -> Any: now = reactor.seconds() num_pending = 0 @@ -649,7 +688,7 @@ def runUntilCurrentTimer(reactor, func): return ret - return f + return cast(F, f) try: @@ -677,5 +716,5 @@ __all__ = [ "start_http_server", "LaterGauge", "InFlightGauge", - "BucketCollector", + "GaugeBucketCollector", ] diff --git a/synapse/metrics/_exposition.py b/synapse/metrics/_exposition.py index bb9bcb5592..353d0a63b6 100644 --- a/synapse/metrics/_exposition.py +++ b/synapse/metrics/_exposition.py @@ -25,27 +25,25 @@ import math import threading from http.server import BaseHTTPRequestHandler, HTTPServer from socketserver import ThreadingMixIn -from typing import Dict, List +from typing import Any, Dict, List, Type, Union from urllib.parse import parse_qs, urlparse -from prometheus_client import REGISTRY +from prometheus_client import REGISTRY, CollectorRegistry +from prometheus_client.core import Sample from twisted.web.resource import Resource +from twisted.web.server import Request from synapse.util import caches CONTENT_TYPE_LATEST = "text/plain; version=0.0.4; charset=utf-8" -INF = float("inf") -MINUS_INF = float("-inf") - - -def floatToGoString(d): +def floatToGoString(d: Union[int, float]) -> str: d = float(d) - if d == INF: + if d == math.inf: return "+Inf" - elif d == MINUS_INF: + elif d == -math.inf: return "-Inf" elif math.isnan(d): return "NaN" @@ -60,7 +58,7 @@ def floatToGoString(d): return s -def sample_line(line, name): +def sample_line(line: Sample, name: str) -> str: if line.labels: labelstr = "{{{0}}}".format( ",".join( @@ -82,7 +80,7 @@ def sample_line(line, name): return "{}{} {}{}\n".format(name, labelstr, floatToGoString(line.value), timestamp) -def generate_latest(registry, emit_help=False): +def generate_latest(registry: CollectorRegistry, emit_help: bool = False) -> bytes: # Trigger the cache metrics to be rescraped, which updates the common # metrics but do not produce metrics themselves @@ -187,7 +185,7 @@ class MetricsHandler(BaseHTTPRequestHandler): registry = REGISTRY - def do_GET(self): + def do_GET(self) -> None: registry = self.registry params = parse_qs(urlparse(self.path).query) @@ -207,11 +205,11 @@ class MetricsHandler(BaseHTTPRequestHandler): self.end_headers() self.wfile.write(output) - def log_message(self, format, *args): + def log_message(self, format: str, *args: Any) -> None: """Log nothing.""" @classmethod - def factory(cls, registry): + def factory(cls, registry: CollectorRegistry) -> Type: """Returns a dynamic MetricsHandler class tied to the passed registry. """ @@ -236,7 +234,9 @@ class _ThreadingSimpleServer(ThreadingMixIn, HTTPServer): daemon_threads = True -def start_http_server(port, addr="", registry=REGISTRY): +def start_http_server( + port: int, addr: str = "", registry: CollectorRegistry = REGISTRY +) -> None: """Starts an HTTP server for prometheus metrics as a daemon thread""" CustomMetricsHandler = MetricsHandler.factory(registry) httpd = _ThreadingSimpleServer((addr, port), CustomMetricsHandler) @@ -252,10 +252,10 @@ class MetricsResource(Resource): isLeaf = True - def __init__(self, registry=REGISTRY): + def __init__(self, registry: CollectorRegistry = REGISTRY): self.registry = registry - def render_GET(self, request): + def render_GET(self, request: Request) -> bytes: request.setHeader(b"Content-Type", CONTENT_TYPE_LATEST.encode("ascii")) response = generate_latest(self.registry) request.setHeader(b"Content-Length", str(len(response))) diff --git a/synapse/metrics/background_process_metrics.py b/synapse/metrics/background_process_metrics.py index 2ab599a334..53c508af91 100644 --- a/synapse/metrics/background_process_metrics.py +++ b/synapse/metrics/background_process_metrics.py @@ -15,19 +15,37 @@ import logging import threading from functools import wraps -from typing import TYPE_CHECKING, Dict, Optional, Set, Union +from types import TracebackType +from typing import ( + TYPE_CHECKING, + Any, + Awaitable, + Callable, + Dict, + Iterable, + Optional, + Set, + Type, + TypeVar, + Union, + cast, +) +from prometheus_client import Metric from prometheus_client.core import REGISTRY, Counter, Gauge from twisted.internet import defer -from synapse.logging.context import LoggingContext, PreserveLoggingContext +from synapse.logging.context import ( + ContextResourceUsage, + LoggingContext, + PreserveLoggingContext, +) from synapse.logging.opentracing import ( SynapseTags, noop_context_manager, start_active_span, ) -from synapse.util.async_helpers import maybe_awaitable if TYPE_CHECKING: import resource @@ -116,7 +134,7 @@ class _Collector: before they are returned. """ - def collect(self): + def collect(self) -> Iterable[Metric]: global _background_processes_active_since_last_scrape # We swap out the _background_processes set with an empty one so that @@ -144,12 +162,12 @@ REGISTRY.register(_Collector()) class _BackgroundProcess: - def __init__(self, desc, ctx): + def __init__(self, desc: str, ctx: LoggingContext): self.desc = desc self._context = ctx - self._reported_stats = None + self._reported_stats: Optional[ContextResourceUsage] = None - def update_metrics(self): + def update_metrics(self) -> None: """Updates the metrics with values from this process.""" new_stats = self._context.get_resource_usage() if self._reported_stats is None: @@ -169,7 +187,16 @@ class _BackgroundProcess: ) -def run_as_background_process(desc: str, func, *args, bg_start_span=True, **kwargs): +R = TypeVar("R") + + +def run_as_background_process( + desc: str, + func: Callable[..., Awaitable[Optional[R]]], + *args: Any, + bg_start_span: bool = True, + **kwargs: Any, +) -> "defer.Deferred[Optional[R]]": """Run the given function in its own logcontext, with resource metrics This should be used to wrap processes which are fired off to run in the @@ -189,11 +216,13 @@ def run_as_background_process(desc: str, func, *args, bg_start_span=True, **kwar args: positional args for func kwargs: keyword args for func - Returns: Deferred which returns the result of func, but note that it does not - follow the synapse logcontext rules. + Returns: + Deferred which returns the result of func, or `None` if func raises. + Note that the returned Deferred does not follow the synapse logcontext + rules. """ - async def run(): + async def run() -> Optional[R]: with _bg_metrics_lock: count = _background_process_counts.get(desc, 0) _background_process_counts[desc] = count + 1 @@ -210,12 +239,13 @@ def run_as_background_process(desc: str, func, *args, bg_start_span=True, **kwar else: ctx = noop_context_manager() with ctx: - return await maybe_awaitable(func(*args, **kwargs)) + return await func(*args, **kwargs) except Exception: logger.exception( "Background process '%s' threw an exception", desc, ) + return None finally: _background_process_in_flight_count.labels(desc).dec() @@ -225,19 +255,24 @@ def run_as_background_process(desc: str, func, *args, bg_start_span=True, **kwar return defer.ensureDeferred(run()) -def wrap_as_background_process(desc): +F = TypeVar("F", bound=Callable[..., Awaitable[Optional[Any]]]) + + +def wrap_as_background_process(desc: str) -> Callable[[F], F]: """Decorator that wraps a function that gets called as a background process. - Equivalent of calling the function with `run_as_background_process` + Equivalent to calling the function with `run_as_background_process` """ - def wrap_as_background_process_inner(func): + def wrap_as_background_process_inner(func: F) -> F: @wraps(func) - def wrap_as_background_process_inner_2(*args, **kwargs): + def wrap_as_background_process_inner_2( + *args: Any, **kwargs: Any + ) -> "defer.Deferred[Optional[R]]": return run_as_background_process(desc, func, *args, **kwargs) - return wrap_as_background_process_inner_2 + return cast(F, wrap_as_background_process_inner_2) return wrap_as_background_process_inner @@ -265,7 +300,7 @@ class BackgroundProcessLoggingContext(LoggingContext): super().__init__("%s-%s" % (name, instance_id)) self._proc = _BackgroundProcess(name, self) - def start(self, rusage: "Optional[resource.struct_rusage]"): + def start(self, rusage: "Optional[resource.struct_rusage]") -> None: """Log context has started running (again).""" super().start(rusage) @@ -276,7 +311,12 @@ class BackgroundProcessLoggingContext(LoggingContext): with _bg_metrics_lock: _background_processes_active_since_last_scrape.add(self._proc) - def __exit__(self, type, value, traceback) -> None: + def __exit__( + self, + type: Optional[Type[BaseException]], + value: Optional[BaseException], + traceback: Optional[TracebackType], + ) -> None: """Log context has finished.""" super().__exit__(type, value, traceback) diff --git a/synapse/metrics/jemalloc.py b/synapse/metrics/jemalloc.py index 29ab6c0229..98ed9c0829 100644 --- a/synapse/metrics/jemalloc.py +++ b/synapse/metrics/jemalloc.py @@ -16,14 +16,16 @@ import ctypes import logging import os import re -from typing import Optional +from typing import Iterable, Optional + +from prometheus_client import Metric from synapse.metrics import REGISTRY, GaugeMetricFamily logger = logging.getLogger(__name__) -def _setup_jemalloc_stats(): +def _setup_jemalloc_stats() -> None: """Checks to see if jemalloc is loaded, and hooks up a collector to record statistics exposed by jemalloc. """ @@ -135,7 +137,7 @@ def _setup_jemalloc_stats(): class JemallocCollector: """Metrics for internal jemalloc stats.""" - def collect(self): + def collect(self) -> Iterable[Metric]: _jemalloc_refresh_stats() g = GaugeMetricFamily( @@ -185,7 +187,7 @@ def _setup_jemalloc_stats(): logger.debug("Added jemalloc stats") -def setup_jemalloc_stats(): +def setup_jemalloc_stats() -> None: """Try to setup jemalloc stats, if jemalloc is loaded.""" try: diff --git a/synapse/storage/database.py b/synapse/storage/database.py index d4cab69ebf..0693d39006 100644 --- a/synapse/storage/database.py +++ b/synapse/storage/database.py @@ -188,7 +188,7 @@ class LoggingDatabaseConnection: # The type of entry which goes on our after_callbacks and exception_callbacks lists. -_CallbackListEntry = Tuple[Callable[..., None], Iterable[Any], Dict[str, Any]] +_CallbackListEntry = Tuple[Callable[..., object], Iterable[Any], Dict[str, Any]] R = TypeVar("R") @@ -235,7 +235,7 @@ class LoggingTransaction: self.after_callbacks = after_callbacks self.exception_callbacks = exception_callbacks - def call_after(self, callback: Callable[..., None], *args: Any, **kwargs: Any): + def call_after(self, callback: Callable[..., object], *args: Any, **kwargs: Any): """Call the given callback on the main twisted thread after the transaction has finished. Used to invalidate the caches on the correct thread. @@ -247,7 +247,7 @@ class LoggingTransaction: self.after_callbacks.append((callback, args, kwargs)) def call_on_exception( - self, callback: Callable[..., None], *args: Any, **kwargs: Any + self, callback: Callable[..., object], *args: Any, **kwargs: Any ): # if self.exception_callbacks is None, that means that whatever constructed the # LoggingTransaction isn't expecting there to be any callbacks; assert that diff --git a/synapse/util/caches/expiringcache.py b/synapse/util/caches/expiringcache.py index 6a7e534576..67ee4c693b 100644 --- a/synapse/util/caches/expiringcache.py +++ b/synapse/util/caches/expiringcache.py @@ -159,7 +159,7 @@ class ExpiringCache(Generic[KT, VT]): self[key] = value return value - def _prune_cache(self) -> None: + async def _prune_cache(self) -> None: if not self._expiry_ms: # zero expiry time means don't expire. This should never get called # since we have this check in start too. diff --git a/synapse/util/metrics.py b/synapse/util/metrics.py index ad775dfc7d..98ee49af6e 100644 --- a/synapse/util/metrics.py +++ b/synapse/util/metrics.py @@ -56,8 +56,15 @@ block_db_sched_duration = Counter( "synapse_util_metrics_block_db_sched_duration_seconds", "", ["block_name"] ) + +# This is dynamically created in InFlightGauge.__init__. +class _InFlightMetric(Protocol): + real_time_max: float + real_time_sum: float + + # Tracks the number of blocks currently active -in_flight = InFlightGauge( +in_flight: InFlightGauge[_InFlightMetric] = InFlightGauge( "synapse_util_metrics_block_in_flight", "", labels=["block_name"], @@ -65,12 +72,6 @@ in_flight = InFlightGauge( ) -# This is dynamically created in InFlightGauge.__init__. -class _InFlightMetric(Protocol): - real_time_max: float - real_time_sum: float - - T = TypeVar("T", bound=Callable[..., Any]) -- cgit 1.5.1 From f44d729d4ccae61bc0cdd5774acb3233eb5f7c13 Mon Sep 17 00:00:00 2001 From: Patrick Cloke Date: Wed, 1 Dec 2021 07:28:23 -0500 Subject: Additional type hints for config module. (#11465) This adds some misc. type hints to helper methods used in the `synapse.config` module. --- changelog.d/11465.misc | 1 + synapse/app/_base.py | 3 ++- synapse/config/__main__.py | 3 ++- synapse/config/appservice.py | 23 ++++++++++------- synapse/config/cache.py | 26 ++++++++++--------- synapse/config/cas.py | 5 ++-- synapse/config/database.py | 13 +++++----- synapse/config/logger.py | 24 +++++++++-------- synapse/config/oidc.py | 58 +++++++++++++++++++++--------------------- synapse/config/registration.py | 6 +++-- synapse/config/repository.py | 9 ++++--- synapse/config/saml2.py | 21 +++++++-------- synapse/config/server.py | 20 ++++++++++----- synapse/config/sso.py | 12 ++++----- synapse/config/workers.py | 4 ++- 15 files changed, 129 insertions(+), 99 deletions(-) create mode 100644 changelog.d/11465.misc (limited to 'synapse/app/_base.py') diff --git a/changelog.d/11465.misc b/changelog.d/11465.misc new file mode 100644 index 0000000000..aadc938b2b --- /dev/null +++ b/changelog.d/11465.misc @@ -0,0 +1 @@ +Add missing type hints to `synapse.config` module. diff --git a/synapse/app/_base.py b/synapse/app/_base.py index 807ee3d46e..5fc59c1be1 100644 --- a/synapse/app/_base.py +++ b/synapse/app/_base.py @@ -32,6 +32,7 @@ from typing import ( Iterable, List, NoReturn, + Optional, Tuple, cast, ) @@ -129,7 +130,7 @@ def start_worker_reactor( def start_reactor( appname: str, soft_file_limit: int, - gc_thresholds: Tuple[int, int, int], + gc_thresholds: Optional[Tuple[int, int, int]], pid_file: str, daemonize: bool, print_pidfile: bool, diff --git a/synapse/config/__main__.py b/synapse/config/__main__.py index c555f5f914..b2a7a89a35 100644 --- a/synapse/config/__main__.py +++ b/synapse/config/__main__.py @@ -13,12 +13,13 @@ # See the License for the specific language governing permissions and # limitations under the License. import sys +from typing import List from synapse.config._base import ConfigError from synapse.config.homeserver import HomeServerConfig -def main(args): +def main(args: List[str]) -> None: action = args[1] if len(args) > 1 and args[1] == "read" else None # If we're reading a key in the config file, then `args[1]` will be `read` and `args[2]` # will be the key to read. diff --git a/synapse/config/appservice.py b/synapse/config/appservice.py index 1ebea88db2..e4bb7224a4 100644 --- a/synapse/config/appservice.py +++ b/synapse/config/appservice.py @@ -1,4 +1,5 @@ # Copyright 2015, 2016 OpenMarket Ltd +# Copyright 2021 The Matrix.org Foundation C.I.C. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -13,14 +14,14 @@ # limitations under the License. import logging -from typing import Dict +from typing import Dict, List from urllib import parse as urlparse import yaml from netaddr import IPSet from synapse.appservice import ApplicationService -from synapse.types import UserID +from synapse.types import JsonDict, UserID from ._base import Config, ConfigError @@ -30,12 +31,12 @@ logger = logging.getLogger(__name__) class AppServiceConfig(Config): section = "appservice" - def read_config(self, config, **kwargs): + def read_config(self, config, **kwargs) -> None: self.app_service_config_files = config.get("app_service_config_files", []) self.notify_appservices = config.get("notify_appservices", True) self.track_appservice_user_ips = config.get("track_appservice_user_ips", False) - def generate_config_section(cls, **kwargs): + def generate_config_section(cls, **kwargs) -> str: return """\ # A list of application service config files to use # @@ -50,7 +51,9 @@ class AppServiceConfig(Config): """ -def load_appservices(hostname, config_files): +def load_appservices( + hostname: str, config_files: List[str] +) -> List[ApplicationService]: """Returns a list of Application Services from the config files.""" if not isinstance(config_files, list): logger.warning("Expected %s to be a list of AS config files.", config_files) @@ -93,7 +96,9 @@ def load_appservices(hostname, config_files): return appservices -def _load_appservice(hostname, as_info, config_filename): +def _load_appservice( + hostname: str, as_info: JsonDict, config_filename: str +) -> ApplicationService: required_string_fields = ["id", "as_token", "hs_token", "sender_localpart"] for field in required_string_fields: if not isinstance(as_info.get(field), str): @@ -115,9 +120,9 @@ def _load_appservice(hostname, as_info, config_filename): user_id = user.to_string() # Rate limiting for users of this AS is on by default (excludes sender) - rate_limited = True - if isinstance(as_info.get("rate_limited"), bool): - rate_limited = as_info.get("rate_limited") + rate_limited = as_info.get("rate_limited") + if not isinstance(rate_limited, bool): + rate_limited = True # namespace checks if not isinstance(as_info.get("namespaces"), dict): diff --git a/synapse/config/cache.py b/synapse/config/cache.py index f054455534..d9d85f98e1 100644 --- a/synapse/config/cache.py +++ b/synapse/config/cache.py @@ -1,4 +1,4 @@ -# Copyright 2019 Matrix.org Foundation C.I.C. +# Copyright 2019-2021 Matrix.org Foundation C.I.C. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -17,6 +17,8 @@ import re import threading from typing import Callable, Dict, Optional +import attr + from synapse.python_dependencies import DependencyException, check_requirements from ._base import Config, ConfigError @@ -34,13 +36,13 @@ _DEFAULT_FACTOR_SIZE = 0.5 _DEFAULT_EVENT_CACHE_SIZE = "10K" +@attr.s(slots=True, auto_attribs=True) class CacheProperties: - def __init__(self): - # The default factor size for all caches - self.default_factor_size = float( - os.environ.get(_CACHE_PREFIX, _DEFAULT_FACTOR_SIZE) - ) - self.resize_all_caches_func = None + # The default factor size for all caches + default_factor_size: float = float( + os.environ.get(_CACHE_PREFIX, _DEFAULT_FACTOR_SIZE) + ) + resize_all_caches_func: Optional[Callable[[], None]] = None properties = CacheProperties() @@ -62,7 +64,7 @@ def _canonicalise_cache_name(cache_name: str) -> str: def add_resizable_cache( cache_name: str, cache_resize_callback: Callable[[float], None] -): +) -> None: """Register a cache that's size can dynamically change Args: @@ -91,7 +93,7 @@ class CacheConfig(Config): _environ = os.environ @staticmethod - def reset(): + def reset() -> None: """Resets the caches to their defaults. Used for tests.""" properties.default_factor_size = float( os.environ.get(_CACHE_PREFIX, _DEFAULT_FACTOR_SIZE) @@ -100,7 +102,7 @@ class CacheConfig(Config): with _CACHES_LOCK: _CACHES.clear() - def generate_config_section(self, **kwargs): + def generate_config_section(self, **kwargs) -> str: return """\ ## Caching ## @@ -162,7 +164,7 @@ class CacheConfig(Config): #sync_response_cache_duration: 2m """ - def read_config(self, config, **kwargs): + def read_config(self, config, **kwargs) -> None: self.event_cache_size = self.parse_size( config.get("event_cache_size", _DEFAULT_EVENT_CACHE_SIZE) ) @@ -232,7 +234,7 @@ class CacheConfig(Config): # needing an instance of Config properties.resize_all_caches_func = self.resize_all_caches - def resize_all_caches(self): + def resize_all_caches(self) -> None: """Ensure all cache sizes are up to date For each cache, run the mapped callback function with either diff --git a/synapse/config/cas.py b/synapse/config/cas.py index 3f81814043..6f2754092e 100644 --- a/synapse/config/cas.py +++ b/synapse/config/cas.py @@ -1,4 +1,5 @@ # Copyright 2015, 2016 OpenMarket Ltd +# Copyright 2021 The Matrix.org Foundation C.I.C. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -28,7 +29,7 @@ class CasConfig(Config): section = "cas" - def read_config(self, config, **kwargs): + def read_config(self, config, **kwargs) -> None: cas_config = config.get("cas_config", None) self.cas_enabled = cas_config and cas_config.get("enabled", True) @@ -51,7 +52,7 @@ class CasConfig(Config): self.cas_displayname_attribute = None self.cas_required_attributes = [] - def generate_config_section(self, config_dir_path, server_name, **kwargs): + def generate_config_section(self, config_dir_path, server_name, **kwargs) -> str: return """\ # Enable Central Authentication Service (CAS) for registration and login. # diff --git a/synapse/config/database.py b/synapse/config/database.py index 651e31b576..06ccf15cd9 100644 --- a/synapse/config/database.py +++ b/synapse/config/database.py @@ -1,5 +1,5 @@ # Copyright 2014-2016 OpenMarket Ltd -# Copyright 2020 The Matrix.org Foundation C.I.C. +# Copyright 2020-2021 The Matrix.org Foundation C.I.C. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -12,6 +12,7 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. +import argparse import logging import os @@ -119,7 +120,7 @@ class DatabaseConfig(Config): self.databases = [] - def read_config(self, config, **kwargs): + def read_config(self, config, **kwargs) -> None: # We *experimentally* support specifying multiple databases via the # `databases` key. This is a map from a label to database config in the # same format as the `database` config option, plus an extra @@ -163,12 +164,12 @@ class DatabaseConfig(Config): self.databases = [DatabaseConnectionConfig("master", database_config)] self.set_databasepath(database_path) - def generate_config_section(self, data_dir_path, **kwargs): + def generate_config_section(self, data_dir_path, **kwargs) -> str: return DEFAULT_CONFIG % { "database_path": os.path.join(data_dir_path, "homeserver.db") } - def read_arguments(self, args): + def read_arguments(self, args: argparse.Namespace) -> None: """ Cases for the cli input: - If no databases are configured and no database_path is set, raise. @@ -194,7 +195,7 @@ class DatabaseConfig(Config): else: logger.warning(NON_SQLITE_DATABASE_PATH_WARNING) - def set_databasepath(self, database_path): + def set_databasepath(self, database_path: str) -> None: if database_path != ":memory:": database_path = self.abspath(database_path) @@ -202,7 +203,7 @@ class DatabaseConfig(Config): self.databases[0].config["args"]["database"] = database_path @staticmethod - def add_arguments(parser): + def add_arguments(parser: argparse.ArgumentParser) -> None: db_group = parser.add_argument_group("database") db_group.add_argument( "-d", diff --git a/synapse/config/logger.py b/synapse/config/logger.py index 63aab0babe..ea69b9bd9b 100644 --- a/synapse/config/logger.py +++ b/synapse/config/logger.py @@ -1,4 +1,5 @@ # Copyright 2014-2016 OpenMarket Ltd +# Copyright 2021 The Matrix.org Foundation C.I.C. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -18,7 +19,7 @@ import os import sys import threading from string import Template -from typing import TYPE_CHECKING, Any, Dict +from typing import TYPE_CHECKING, Any, Dict, Optional import yaml from zope.interface import implementer @@ -40,6 +41,7 @@ from synapse.util.versionstring import get_version_string from ._base import Config, ConfigError if TYPE_CHECKING: + from synapse.config.homeserver import HomeServerConfig from synapse.server import HomeServer DEFAULT_LOG_CONFIG = Template( @@ -141,13 +143,13 @@ removed in Synapse 1.3.0. You should instead set up a separate log configuration class LoggingConfig(Config): section = "logging" - def read_config(self, config, **kwargs): + def read_config(self, config, **kwargs) -> None: if config.get("log_file"): raise ConfigError(LOG_FILE_ERROR) self.log_config = self.abspath(config.get("log_config")) self.no_redirect_stdio = config.get("no_redirect_stdio", False) - def generate_config_section(self, config_dir_path, server_name, **kwargs): + def generate_config_section(self, config_dir_path, server_name, **kwargs) -> str: log_config = os.path.join(config_dir_path, server_name + ".log.config") return ( """\ @@ -161,14 +163,14 @@ class LoggingConfig(Config): % locals() ) - def read_arguments(self, args): + def read_arguments(self, args: argparse.Namespace) -> None: if args.no_redirect_stdio is not None: self.no_redirect_stdio = args.no_redirect_stdio if args.log_file is not None: raise ConfigError(LOG_FILE_ERROR) @staticmethod - def add_arguments(parser): + def add_arguments(parser: argparse.ArgumentParser) -> None: logging_group = parser.add_argument_group("logging") logging_group.add_argument( "-n", @@ -197,7 +199,9 @@ class LoggingConfig(Config): log_config_file.write(DEFAULT_LOG_CONFIG.substitute(log_file=log_file)) -def _setup_stdlib_logging(config, log_config_path, logBeginner: LogBeginner) -> None: +def _setup_stdlib_logging( + config: "HomeServerConfig", log_config_path: Optional[str], logBeginner: LogBeginner +) -> None: """ Set up Python standard library logging. """ @@ -230,7 +234,7 @@ def _setup_stdlib_logging(config, log_config_path, logBeginner: LogBeginner) -> log_metadata_filter = MetadataFilter({"server_name": config.server.server_name}) old_factory = logging.getLogRecordFactory() - def factory(*args, **kwargs): + def factory(*args: Any, **kwargs: Any) -> logging.LogRecord: record = old_factory(*args, **kwargs) log_context_filter.filter(record) log_metadata_filter.filter(record) @@ -297,7 +301,7 @@ def _load_logging_config(log_config_path: str) -> None: logging.config.dictConfig(log_config) -def _reload_logging_config(log_config_path): +def _reload_logging_config(log_config_path: Optional[str]) -> None: """ Reload the log configuration from the file and apply it. """ @@ -311,8 +315,8 @@ def _reload_logging_config(log_config_path): def setup_logging( hs: "HomeServer", - config, - use_worker_options=False, + config: "HomeServerConfig", + use_worker_options: bool = False, logBeginner: LogBeginner = globalLogBeginner, ) -> None: """ diff --git a/synapse/config/oidc.py b/synapse/config/oidc.py index 42f113cd24..79c400fe30 100644 --- a/synapse/config/oidc.py +++ b/synapse/config/oidc.py @@ -14,7 +14,7 @@ # limitations under the License. from collections import Counter -from typing import Collection, Iterable, List, Mapping, Optional, Tuple, Type +from typing import Any, Collection, Iterable, List, Mapping, Optional, Tuple, Type import attr @@ -36,7 +36,7 @@ LEGACY_USER_MAPPING_PROVIDER = "synapse.handlers.oidc_handler.JinjaOidcMappingPr class OIDCConfig(Config): section = "oidc" - def read_config(self, config, **kwargs): + def read_config(self, config, **kwargs) -> None: self.oidc_providers = tuple(_parse_oidc_provider_configs(config)) if not self.oidc_providers: return @@ -66,7 +66,7 @@ class OIDCConfig(Config): # OIDC is enabled if we have a provider return bool(self.oidc_providers) - def generate_config_section(self, config_dir_path, server_name, **kwargs): + def generate_config_section(self, config_dir_path, server_name, **kwargs) -> str: return """\ # List of OpenID Connect (OIDC) / OAuth 2.0 identity providers, for registration # and login. @@ -495,89 +495,89 @@ def _parse_oidc_config_dict( ) -@attr.s(slots=True, frozen=True) +@attr.s(slots=True, frozen=True, auto_attribs=True) class OidcProviderClientSecretJwtKey: # a pem-encoded signing key - key = attr.ib(type=str) + key: str # properties to include in the JWT header - jwt_header = attr.ib(type=Mapping[str, str]) + jwt_header: Mapping[str, str] # properties to include in the JWT payload. - jwt_payload = attr.ib(type=Mapping[str, str]) + jwt_payload: Mapping[str, str] -@attr.s(slots=True, frozen=True) +@attr.s(slots=True, frozen=True, auto_attribs=True) class OidcProviderConfig: # a unique identifier for this identity provider. Used in the 'user_external_ids' # table, as well as the query/path parameter used in the login protocol. - idp_id = attr.ib(type=str) + idp_id: str # user-facing name for this identity provider. - idp_name = attr.ib(type=str) + idp_name: str # Optional MXC URI for icon for this IdP. - idp_icon = attr.ib(type=Optional[str]) + idp_icon: Optional[str] # Optional brand identifier for this IdP. - idp_brand = attr.ib(type=Optional[str]) + idp_brand: Optional[str] # whether the OIDC discovery mechanism is used to discover endpoints - discover = attr.ib(type=bool) + discover: bool # the OIDC issuer. Used to validate tokens and (if discovery is enabled) to # discover the provider's endpoints. - issuer = attr.ib(type=str) + issuer: str # oauth2 client id to use - client_id = attr.ib(type=str) + client_id: str # oauth2 client secret to use. if `None`, use client_secret_jwt_key to generate # a secret. - client_secret = attr.ib(type=Optional[str]) + client_secret: Optional[str] # key to use to construct a JWT to use as a client secret. May be `None` if # `client_secret` is set. - client_secret_jwt_key = attr.ib(type=Optional[OidcProviderClientSecretJwtKey]) + client_secret_jwt_key: Optional[OidcProviderClientSecretJwtKey] # auth method to use when exchanging the token. # Valid values are 'client_secret_basic', 'client_secret_post' and # 'none'. - client_auth_method = attr.ib(type=str) + client_auth_method: str # list of scopes to request - scopes = attr.ib(type=Collection[str]) + scopes: Collection[str] # the oauth2 authorization endpoint. Required if discovery is disabled. - authorization_endpoint = attr.ib(type=Optional[str]) + authorization_endpoint: Optional[str] # the oauth2 token endpoint. Required if discovery is disabled. - token_endpoint = attr.ib(type=Optional[str]) + token_endpoint: Optional[str] # the OIDC userinfo endpoint. Required if discovery is disabled and the # "openid" scope is not requested. - userinfo_endpoint = attr.ib(type=Optional[str]) + userinfo_endpoint: Optional[str] # URI where to fetch the JWKS. Required if discovery is disabled and the # "openid" scope is used. - jwks_uri = attr.ib(type=Optional[str]) + jwks_uri: Optional[str] # Whether to skip metadata verification - skip_verification = attr.ib(type=bool) + skip_verification: bool # Whether to fetch the user profile from the userinfo endpoint. Valid # values are: "auto" or "userinfo_endpoint". - user_profile_method = attr.ib(type=str) + user_profile_method: str # whether to allow a user logging in via OIDC to match a pre-existing account # instead of failing - allow_existing_users = attr.ib(type=bool) + allow_existing_users: bool # the class of the user mapping provider - user_mapping_provider_class = attr.ib(type=Type) + user_mapping_provider_class: Type # the config of the user mapping provider - user_mapping_provider_config = attr.ib() + user_mapping_provider_config: Any # required attributes to require in userinfo to allow login/registration - attribute_requirements = attr.ib(type=List[SsoAttributeRequirement]) + attribute_requirements: List[SsoAttributeRequirement] diff --git a/synapse/config/registration.py b/synapse/config/registration.py index 1ddad7cb70..47853199f4 100644 --- a/synapse/config/registration.py +++ b/synapse/config/registration.py @@ -1,4 +1,5 @@ # Copyright 2015, 2016 OpenMarket Ltd +# Copyright 2021 The Matrix.org Foundation C.I.C. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -11,6 +12,7 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. +import argparse from typing import Optional from synapse.api.constants import RoomCreationPreset @@ -362,7 +364,7 @@ class RegistrationConfig(Config): ) @staticmethod - def add_arguments(parser): + def add_arguments(parser: argparse.ArgumentParser) -> None: reg_group = parser.add_argument_group("registration") reg_group.add_argument( "--enable-registration", @@ -371,6 +373,6 @@ class RegistrationConfig(Config): help="Enable registration for new users.", ) - def read_arguments(self, args): + def read_arguments(self, args: argparse.Namespace) -> None: if args.enable_registration is not None: self.enable_registration = strtobool(str(args.enable_registration)) diff --git a/synapse/config/repository.py b/synapse/config/repository.py index 69906a98d4..b129b9dd68 100644 --- a/synapse/config/repository.py +++ b/synapse/config/repository.py @@ -15,11 +15,12 @@ import logging import os from collections import namedtuple -from typing import Dict, List +from typing import Dict, List, Tuple from urllib.request import getproxies_environment # type: ignore from synapse.config.server import DEFAULT_IP_RANGE_BLACKLIST, generate_ip_set from synapse.python_dependencies import DependencyException, check_requirements +from synapse.types import JsonDict from synapse.util.module_loader import load_module from ._base import Config, ConfigError @@ -57,7 +58,9 @@ MediaStorageProviderConfig = namedtuple( ) -def parse_thumbnail_requirements(thumbnail_sizes): +def parse_thumbnail_requirements( + thumbnail_sizes: List[JsonDict], +) -> Dict[str, Tuple[ThumbnailRequirement, ...]]: """Takes a list of dictionaries with "width", "height", and "method" keys and creates a map from image media types to the thumbnail size, thumbnailing method, and thumbnail media type to precalculate @@ -69,7 +72,7 @@ def parse_thumbnail_requirements(thumbnail_sizes): Dictionary mapping from media type string to list of ThumbnailRequirement tuples. """ - requirements: Dict[str, List] = {} + requirements: Dict[str, List[ThumbnailRequirement]] = {} for size in thumbnail_sizes: width = size["width"] height = size["height"] diff --git a/synapse/config/saml2.py b/synapse/config/saml2.py index ba2b0905ff..ec9d9f65e7 100644 --- a/synapse/config/saml2.py +++ b/synapse/config/saml2.py @@ -1,5 +1,5 @@ # Copyright 2018 New Vector Ltd -# Copyright 2019 The Matrix.org Foundation C.I.C. +# Copyright 2019-2021 The Matrix.org Foundation C.I.C. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -14,10 +14,11 @@ # limitations under the License. import logging -from typing import Any, List +from typing import Any, List, Set from synapse.config.sso import SsoAttributeRequirement from synapse.python_dependencies import DependencyException, check_requirements +from synapse.types import JsonDict from synapse.util.module_loader import load_module, load_python_module from ._base import Config, ConfigError @@ -33,7 +34,7 @@ LEGACY_USER_MAPPING_PROVIDER = ( ) -def _dict_merge(merge_dict, into_dict): +def _dict_merge(merge_dict: dict, into_dict: dict) -> None: """Do a deep merge of two dicts Recursively merges `merge_dict` into `into_dict`: @@ -43,8 +44,8 @@ def _dict_merge(merge_dict, into_dict): the value from `merge_dict`. Args: - merge_dict (dict): dict to merge - into_dict (dict): target dict + merge_dict: dict to merge + into_dict: target dict to be modified """ for k, v in merge_dict.items(): if k not in into_dict: @@ -64,7 +65,7 @@ def _dict_merge(merge_dict, into_dict): class SAML2Config(Config): section = "saml2" - def read_config(self, config, **kwargs): + def read_config(self, config, **kwargs) -> None: self.saml2_enabled = False saml2_config = config.get("saml2_config") @@ -183,8 +184,8 @@ class SAML2Config(Config): ) def _default_saml_config_dict( - self, required_attributes: set, optional_attributes: set - ): + self, required_attributes: Set[str], optional_attributes: Set[str] + ) -> JsonDict: """Generate a configuration dictionary with required and optional attributes that will be needed to process new user registration @@ -195,7 +196,7 @@ class SAML2Config(Config): additional information to Synapse user accounts, but are not required Returns: - dict: A SAML configuration dictionary + A SAML configuration dictionary """ import saml2 @@ -222,7 +223,7 @@ class SAML2Config(Config): }, } - def generate_config_section(self, config_dir_path, server_name, **kwargs): + def generate_config_section(self, config_dir_path, server_name, **kwargs) -> str: return """\ ## Single sign-on integration ## diff --git a/synapse/config/server.py b/synapse/config/server.py index 8445e9dd05..ba5b954263 100644 --- a/synapse/config/server.py +++ b/synapse/config/server.py @@ -12,6 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. +import argparse import itertools import logging import os.path @@ -27,6 +28,7 @@ from netaddr import AddrFormatError, IPNetwork, IPSet from twisted.conch.ssh.keys import Key from synapse.api.room_versions import KNOWN_ROOM_VERSIONS +from synapse.types import JsonDict from synapse.util.module_loader import load_module from synapse.util.stringutils import parse_and_validate_server_name @@ -1223,7 +1225,7 @@ class ServerConfig(Config): % locals() ) - def read_arguments(self, args): + def read_arguments(self, args: argparse.Namespace) -> None: if args.manhole is not None: self.manhole = args.manhole if args.daemonize is not None: @@ -1232,7 +1234,7 @@ class ServerConfig(Config): self.print_pidfile = args.print_pidfile @staticmethod - def add_arguments(parser): + def add_arguments(parser: argparse.ArgumentParser) -> None: server_group = parser.add_argument_group("server") server_group.add_argument( "-D", @@ -1274,14 +1276,16 @@ class ServerConfig(Config): ) -def is_threepid_reserved(reserved_threepids, threepid): +def is_threepid_reserved( + reserved_threepids: List[JsonDict], threepid: JsonDict +) -> bool: """Check the threepid against the reserved threepid config Args: - reserved_threepids([dict]) - list of reserved threepids - threepid(dict) - The threepid to test for + reserved_threepids: List of reserved threepids + threepid: The threepid to test for Returns: - boolean Is the threepid undertest reserved_user + Is the threepid undertest reserved_user """ for tp in reserved_threepids: @@ -1290,7 +1294,9 @@ def is_threepid_reserved(reserved_threepids, threepid): return False -def read_gc_thresholds(thresholds): +def read_gc_thresholds( + thresholds: Optional[List[Any]], +) -> Optional[Tuple[int, int, int]]: """Reads the three integer thresholds for garbage collection. Ensures that the thresholds are integers if thresholds are supplied. """ diff --git a/synapse/config/sso.py b/synapse/config/sso.py index 60aacb13ea..e4a4243261 100644 --- a/synapse/config/sso.py +++ b/synapse/config/sso.py @@ -1,4 +1,4 @@ -# Copyright 2020 The Matrix.org Foundation C.I.C. +# Copyright 2020-2021 The Matrix.org Foundation C.I.C. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -29,13 +29,13 @@ https://matrix-org.github.io/synapse/latest/templates.html ---------------------------------------------------------------------------------------""" -@attr.s(frozen=True) +@attr.s(frozen=True, auto_attribs=True) class SsoAttributeRequirement: """Object describing a single requirement for SSO attributes.""" - attribute = attr.ib(type=str) + attribute: str # If a value is not given, than the attribute must simply exist. - value = attr.ib(type=Optional[str]) + value: Optional[str] JSON_SCHEMA = { "type": "object", @@ -49,7 +49,7 @@ class SSOConfig(Config): section = "sso" - def read_config(self, config, **kwargs): + def read_config(self, config, **kwargs) -> None: sso_config: Dict[str, Any] = config.get("sso") or {} # The sso-specific template_dir @@ -106,7 +106,7 @@ class SSOConfig(Config): ) self.sso_client_whitelist.append(login_fallback_url) - def generate_config_section(self, **kwargs): + def generate_config_section(self, **kwargs) -> str: return """\ # Additional settings to use with single-sign on systems such as OpenID Connect, # SAML2 and CAS. diff --git a/synapse/config/workers.py b/synapse/config/workers.py index 4507992031..576f519188 100644 --- a/synapse/config/workers.py +++ b/synapse/config/workers.py @@ -1,4 +1,5 @@ # Copyright 2016 OpenMarket Ltd +# Copyright 2021 The Matrix.org Foundation C.I.C. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -12,6 +13,7 @@ # See the License for the specific language governing permissions and # limitations under the License. +import argparse from typing import List, Union import attr @@ -343,7 +345,7 @@ class WorkerConfig(Config): #worker_replication_secret: "" """ - def read_arguments(self, args): + def read_arguments(self, args: argparse.Namespace) -> None: # We support a bunch of command line arguments that override options in # the config. A lot of these options have a worker_* prefix when running # on workers so we also have to override them when command line options -- cgit 1.5.1