From 30a5076da8ad776c150ad2745b5f34b4446012e0 Mon Sep 17 00:00:00 2001 From: Eric Eastwood Date: Thu, 1 Jun 2023 21:27:18 -0500 Subject: Log when events are (unexpectedly) filtered out of responses in tests (#14213) See https://github.com/matrix-org/synapse/pull/14095#discussion_r990335492 This is useful because when see that a relevant event is an `outlier` or `soft-failed`, then that's a good unexpected indicator explaining why it's not showing up. `filter_events_for_client` is used in `/sync`, `/messages`, `/context` which are all common end-to-end assertion touch points (also notifications, relations). --- synapse/visibility.py | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) (limited to 'synapse/visibility.py') diff --git a/synapse/visibility.py b/synapse/visibility.py index 468e22f8f6..fc71dc92a4 100644 --- a/synapse/visibility.py +++ b/synapse/visibility.py @@ -41,7 +41,7 @@ from synapse.types.state import StateFilter from synapse.util import Clock logger = logging.getLogger(__name__) - +filtered_event_logger = logging.getLogger("synapse.visibility.filtered_event_debug") VISIBILITY_PRIORITY = ( HistoryVisibility.WORLD_READABLE, @@ -97,8 +97,8 @@ async def filter_events_for_client( events_before_filtering = events events = [e for e in events if not e.internal_metadata.is_soft_failed()] if len(events_before_filtering) != len(events): - if logger.isEnabledFor(logging.DEBUG): - logger.debug( + if filtered_event_logger.isEnabledFor(logging.DEBUG): + filtered_event_logger.debug( "filter_events_for_client: Filtered out soft-failed events: Before=%s, After=%s", [event.event_id for event in events_before_filtering], [event.event_id for event in events], @@ -319,7 +319,7 @@ def _check_client_allowed_to_see_event( _check_filter_send_to_client(event, clock, retention_policy, sender_ignored) == _CheckFilter.DENIED ): - logger.debug( + filtered_event_logger.debug( "_check_client_allowed_to_see_event(event=%s): Filtered out event because `_check_filter_send_to_client` returned `_CheckFilter.DENIED`", event.event_id, ) @@ -341,7 +341,7 @@ def _check_client_allowed_to_see_event( ) return event - logger.debug( + filtered_event_logger.debug( "_check_client_allowed_to_see_event(event=%s): Filtered out event because it's an outlier", event.event_id, ) @@ -367,7 +367,7 @@ def _check_client_allowed_to_see_event( membership_result = _check_membership(user_id, event, visibility, state, is_peeking) if not membership_result.allowed: - logger.debug( + filtered_event_logger.debug( "_check_client_allowed_to_see_event(event=%s): Filtered out event because the user can't see the event because of their membership, membership_result.allowed=%s membership_result.joined=%s", event.event_id, membership_result.allowed, @@ -378,7 +378,7 @@ def _check_client_allowed_to_see_event( # If the sender has been erased and the user was not joined at the time, we # must only return the redacted form. if sender_erased and not membership_result.joined: - logger.debug( + filtered_event_logger.debug( "_check_client_allowed_to_see_event(event=%s): Returning pruned event because `sender_erased` and the user was not joined at the time", event.event_id, ) -- cgit 1.5.1 From ad3f43be9a597dd4fdf59e0a95e4630e7b9502fe Mon Sep 17 00:00:00 2001 From: Patrick Cloke Date: Tue, 15 Aug 2023 08:11:20 -0400 Subject: Run pyupgrade for python 3.7 & 3.8. (#16110) --- changelog.d/16110.misc | 1 + contrib/cmdclient/console.py | 2 +- docker/configure_workers_and_start.py | 2 +- docker/start.py | 2 +- scripts-dev/build_debian_packages.py | 2 +- scripts-dev/check_schema_delta.py | 2 +- scripts-dev/federation_client.py | 2 +- scripts-dev/release.py | 1 - scripts-dev/sign_json.py | 2 +- synapse/__init__.py | 8 +++- synapse/_scripts/synapse_port_db.py | 6 +-- synapse/_scripts/update_synapse_database.py | 2 +- synapse/api/constants.py | 3 +- synapse/handlers/presence.py | 2 +- synapse/handlers/sso.py | 5 ++- synapse/handlers/stats.py | 12 ++++-- synapse/handlers/sync.py | 8 ++-- synapse/logging/_remote.py | 3 +- .../module_api/callbacks/spamchecker_callbacks.py | 48 ++++++---------------- synapse/replication/tcp/handler.py | 2 +- synapse/storage/databases/main/filtering.py | 3 +- synapse/storage/databases/main/keys.py | 2 +- synapse/storage/databases/main/stats.py | 3 +- synapse/storage/engines/_base.py | 2 +- synapse/storage/prepare_database.py | 12 +++++- synapse/types/__init__.py | 3 +- synapse/util/async_helpers.py | 3 +- synapse/util/macaroons.py | 2 +- synapse/util/ratelimitutils.py | 2 +- synapse/visibility.py | 2 +- tests/app/test_phone_stats_home.py | 2 +- tests/crypto/test_keyring.py | 2 +- .../federation/test_matrix_federation_agent.py | 2 +- tests/module_api/test_api.py | 2 +- tests/replication/test_multi_media_repo.py | 2 +- tests/rest/client/test_redactions.py | 10 ++--- tests/rest/client/test_relations.py | 38 ++++++++--------- tests/rest/client/test_rooms.py | 6 +-- tests/server.py | 3 +- tests/storage/test_appservice.py | 6 +-- tests/storage/test_main.py | 2 +- tests/storage/test_room_search.py | 8 ++-- tests/test_visibility.py | 2 +- 43 files changed, 113 insertions(+), 121 deletions(-) create mode 100644 changelog.d/16110.misc (limited to 'synapse/visibility.py') diff --git a/changelog.d/16110.misc b/changelog.d/16110.misc new file mode 100644 index 0000000000..68efe86ddc --- /dev/null +++ b/changelog.d/16110.misc @@ -0,0 +1 @@ +Run `pyupgrade` for Python 3.8+. diff --git a/contrib/cmdclient/console.py b/contrib/cmdclient/console.py index 895b2a7af1..710fe25699 100755 --- a/contrib/cmdclient/console.py +++ b/contrib/cmdclient/console.py @@ -769,7 +769,7 @@ def main(server_url, identity_server_url, username, token, config_path): global CONFIG_JSON CONFIG_JSON = config_path # bit cheeky, but just overwrite the global try: - with open(config_path, "r") as config: + with open(config_path) as config: syn_cmd.config = json.load(config) try: http_client.verbose = "on" == syn_cmd.config["verbose"] diff --git a/docker/configure_workers_and_start.py b/docker/configure_workers_and_start.py index dc824038b5..400a7515aa 100755 --- a/docker/configure_workers_and_start.py +++ b/docker/configure_workers_and_start.py @@ -861,7 +861,7 @@ def generate_worker_files( # Then a worker config file convert( "/conf/worker.yaml.j2", - "/conf/workers/{name}.yaml".format(name=worker_name), + f"/conf/workers/{worker_name}.yaml", **worker_config, worker_log_config_filepath=log_config_filepath, using_unix_sockets=using_unix_sockets, diff --git a/docker/start.py b/docker/start.py index ebcc599f04..aebc7e4aaa 100755 --- a/docker/start.py +++ b/docker/start.py @@ -82,7 +82,7 @@ def generate_config_from_template( with open(filename) as handle: value = handle.read() else: - log("Generating a random secret for {}".format(secret)) + log(f"Generating a random secret for {secret}") value = codecs.encode(os.urandom(32), "hex").decode() with open(filename, "w") as handle: handle.write(value) diff --git a/scripts-dev/build_debian_packages.py b/scripts-dev/build_debian_packages.py index bb89ba581c..c03e3418c0 100755 --- a/scripts-dev/build_debian_packages.py +++ b/scripts-dev/build_debian_packages.py @@ -47,7 +47,7 @@ can be passed on the commandline for debugging. projdir = os.path.dirname(os.path.dirname(os.path.realpath(__file__))) -class Builder(object): +class Builder: def __init__( self, redirect_stdout: bool = False, diff --git a/scripts-dev/check_schema_delta.py b/scripts-dev/check_schema_delta.py index fee4a8bd3d..467be96fdf 100755 --- a/scripts-dev/check_schema_delta.py +++ b/scripts-dev/check_schema_delta.py @@ -43,7 +43,7 @@ def main(force_colors: bool) -> None: diffs: List[git.Diff] = repo.remote().refs.develop.commit.diff(None) # Get the schema version of the local file to check against current schema on develop - with open("synapse/storage/schema/__init__.py", "r") as file: + with open("synapse/storage/schema/__init__.py") as file: local_schema = file.read() new_locals: Dict[str, Any] = {} exec(local_schema, new_locals) diff --git a/scripts-dev/federation_client.py b/scripts-dev/federation_client.py index 63f0b25ddd..5ad334b4d8 100755 --- a/scripts-dev/federation_client.py +++ b/scripts-dev/federation_client.py @@ -247,7 +247,7 @@ def main() -> None: def read_args_from_config(args: argparse.Namespace) -> None: - with open(args.config, "r") as fh: + with open(args.config) as fh: config = yaml.safe_load(fh) if not args.server_name: diff --git a/scripts-dev/release.py b/scripts-dev/release.py index 89ffba8d92..4ac8eaa889 100755 --- a/scripts-dev/release.py +++ b/scripts-dev/release.py @@ -1,5 +1,4 @@ #!/usr/bin/env python -# -*- coding: utf-8 -*- # Copyright 2020 The Matrix.org Foundation C.I.C. # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/scripts-dev/sign_json.py b/scripts-dev/sign_json.py index bb217799fb..00cbaf68f5 100755 --- a/scripts-dev/sign_json.py +++ b/scripts-dev/sign_json.py @@ -145,7 +145,7 @@ Example usage: def read_args_from_config(args: argparse.Namespace) -> None: - with open(args.config, "r") as fh: + with open(args.config) as fh: config = yaml.safe_load(fh) if not args.server_name: args.server_name = config["server_name"] diff --git a/synapse/__init__.py b/synapse/__init__.py index 6c1801862b..2f9c22a833 100644 --- a/synapse/__init__.py +++ b/synapse/__init__.py @@ -25,7 +25,11 @@ from synapse.util.rust import check_rust_lib_up_to_date from synapse.util.stringutils import strtobool # Check that we're not running on an unsupported Python version. -if sys.version_info < (3, 8): +# +# Note that we use an (unneeded) variable here so that pyupgrade doesn't nuke the +# if-statement completely. +py_version = sys.version_info +if py_version < (3, 8): print("Synapse requires Python 3.8 or above.") sys.exit(1) @@ -78,7 +82,7 @@ try: except ImportError: pass -import synapse.util +import synapse.util # noqa: E402 __version__ = synapse.util.SYNAPSE_VERSION diff --git a/synapse/_scripts/synapse_port_db.py b/synapse/_scripts/synapse_port_db.py index 1300aaf63c..49242800b8 100755 --- a/synapse/_scripts/synapse_port_db.py +++ b/synapse/_scripts/synapse_port_db.py @@ -1205,10 +1205,10 @@ class CursesProgress(Progress): self.total_processed = 0 self.total_remaining = 0 - super(CursesProgress, self).__init__() + super().__init__() def update(self, table: str, num_done: int) -> None: - super(CursesProgress, self).update(table, num_done) + super().update(table, num_done) self.total_processed = 0 self.total_remaining = 0 @@ -1304,7 +1304,7 @@ class TerminalProgress(Progress): """Just prints progress to the terminal""" def update(self, table: str, num_done: int) -> None: - super(TerminalProgress, self).update(table, num_done) + super().update(table, num_done) data = self.tables[table] diff --git a/synapse/_scripts/update_synapse_database.py b/synapse/_scripts/update_synapse_database.py index 0adf94bba6..f97aecf8d5 100644 --- a/synapse/_scripts/update_synapse_database.py +++ b/synapse/_scripts/update_synapse_database.py @@ -38,7 +38,7 @@ class MockHomeserver(HomeServer): DATASTORE_CLASS = DataStore # type: ignore [assignment] def __init__(self, config: HomeServerConfig): - super(MockHomeserver, self).__init__( + super().__init__( hostname=config.server.server_name, config=config, reactor=reactor, diff --git a/synapse/api/constants.py b/synapse/api/constants.py index dc32553d0c..bf311b636d 100644 --- a/synapse/api/constants.py +++ b/synapse/api/constants.py @@ -18,8 +18,7 @@ """Contains constants from the specification.""" import enum - -from typing_extensions import Final +from typing import Final # the max size of a (canonical-json-encoded) event MAX_PDU_SIZE = 65536 diff --git a/synapse/handlers/presence.py b/synapse/handlers/presence.py index 11dff724e6..e8e9db4b91 100644 --- a/synapse/handlers/presence.py +++ b/synapse/handlers/presence.py @@ -32,6 +32,7 @@ from typing import ( Any, Callable, Collection, + ContextManager, Dict, Generator, Iterable, @@ -43,7 +44,6 @@ from typing import ( ) from prometheus_client import Counter -from typing_extensions import ContextManager import synapse.metrics from synapse.api.constants import EduTypes, EventTypes, Membership, PresenceState diff --git a/synapse/handlers/sso.py b/synapse/handlers/sso.py index 4d29328a74..e9a544e754 100644 --- a/synapse/handlers/sso.py +++ b/synapse/handlers/sso.py @@ -24,13 +24,14 @@ from typing import ( Iterable, List, Mapping, + NoReturn, Optional, Set, ) from urllib.parse import urlencode import attr -from typing_extensions import NoReturn, Protocol +from typing_extensions import Protocol from twisted.web.iweb import IRequest from twisted.web.server import Request @@ -791,7 +792,7 @@ class SsoHandler: if code != 200: raise Exception( - "GET request to download sso avatar image returned {}".format(code) + f"GET request to download sso avatar image returned {code}" ) # upload name includes hash of the image file's content so that we can diff --git a/synapse/handlers/stats.py b/synapse/handlers/stats.py index 7cabf7980a..3dde19fc81 100644 --- a/synapse/handlers/stats.py +++ b/synapse/handlers/stats.py @@ -14,9 +14,15 @@ # limitations under the License. import logging from collections import Counter -from typing import TYPE_CHECKING, Any, Dict, Iterable, Optional, Tuple - -from typing_extensions import Counter as CounterType +from typing import ( + TYPE_CHECKING, + Any, + Counter as CounterType, + Dict, + Iterable, + Optional, + Tuple, +) from synapse.api.constants import EventContentFields, EventTypes, Membership from synapse.metrics import event_processing_positions diff --git a/synapse/handlers/sync.py b/synapse/handlers/sync.py index c010405be6..8174248387 100644 --- a/synapse/handlers/sync.py +++ b/synapse/handlers/sync.py @@ -1442,11 +1442,9 @@ class SyncHandler: # Now we have our list of joined room IDs, exclude as configured and freeze joined_room_ids = frozenset( - ( - room_id - for room_id in mutable_joined_room_ids - if room_id not in mutable_rooms_to_exclude - ) + room_id + for room_id in mutable_joined_room_ids + if room_id not in mutable_rooms_to_exclude ) logger.debug( diff --git a/synapse/logging/_remote.py b/synapse/logging/_remote.py index 5a61b21eaf..284fbac524 100644 --- a/synapse/logging/_remote.py +++ b/synapse/logging/_remote.py @@ -18,10 +18,9 @@ import traceback from collections import deque from ipaddress import IPv4Address, IPv6Address, ip_address from math import floor -from typing import Callable, Optional +from typing import Callable, Deque, Optional import attr -from typing_extensions import Deque from zope.interface import implementer from twisted.application.internet import ClientService diff --git a/synapse/module_api/callbacks/spamchecker_callbacks.py b/synapse/module_api/callbacks/spamchecker_callbacks.py index e191450323..32db7cce8d 100644 --- a/synapse/module_api/callbacks/spamchecker_callbacks.py +++ b/synapse/module_api/callbacks/spamchecker_callbacks.py @@ -426,9 +426,7 @@ class SpamCheckerModuleApiCallbacks: generally discouraged as it doesn't support internationalization. """ for callback in self._check_event_for_spam_callbacks: - with Measure( - self.clock, "{}.{}".format(callback.__module__, callback.__qualname__) - ): + with Measure(self.clock, f"{callback.__module__}.{callback.__qualname__}"): res = await delay_cancellation(callback(event)) if res is False or res == self.NOT_SPAM: # This spam-checker accepts the event. @@ -481,9 +479,7 @@ class SpamCheckerModuleApiCallbacks: True if the event should be silently dropped """ for callback in self._should_drop_federated_event_callbacks: - with Measure( - self.clock, "{}.{}".format(callback.__module__, callback.__qualname__) - ): + with Measure(self.clock, f"{callback.__module__}.{callback.__qualname__}"): res: Union[bool, str] = await delay_cancellation(callback(event)) if res: return res @@ -505,9 +501,7 @@ class SpamCheckerModuleApiCallbacks: NOT_SPAM if the operation is permitted, [Codes, Dict] otherwise. """ for callback in self._user_may_join_room_callbacks: - with Measure( - self.clock, "{}.{}".format(callback.__module__, callback.__qualname__) - ): + with Measure(self.clock, f"{callback.__module__}.{callback.__qualname__}"): res = await delay_cancellation(callback(user_id, room_id, is_invited)) # Normalize return values to `Codes` or `"NOT_SPAM"`. if res is True or res is self.NOT_SPAM: @@ -546,9 +540,7 @@ class SpamCheckerModuleApiCallbacks: NOT_SPAM if the operation is permitted, Codes otherwise. """ for callback in self._user_may_invite_callbacks: - with Measure( - self.clock, "{}.{}".format(callback.__module__, callback.__qualname__) - ): + with Measure(self.clock, f"{callback.__module__}.{callback.__qualname__}"): res = await delay_cancellation( callback(inviter_userid, invitee_userid, room_id) ) @@ -593,9 +585,7 @@ class SpamCheckerModuleApiCallbacks: NOT_SPAM if the operation is permitted, Codes otherwise. """ for callback in self._user_may_send_3pid_invite_callbacks: - with Measure( - self.clock, "{}.{}".format(callback.__module__, callback.__qualname__) - ): + with Measure(self.clock, f"{callback.__module__}.{callback.__qualname__}"): res = await delay_cancellation( callback(inviter_userid, medium, address, room_id) ) @@ -630,9 +620,7 @@ class SpamCheckerModuleApiCallbacks: userid: The ID of the user attempting to create a room """ for callback in self._user_may_create_room_callbacks: - with Measure( - self.clock, "{}.{}".format(callback.__module__, callback.__qualname__) - ): + with Measure(self.clock, f"{callback.__module__}.{callback.__qualname__}"): res = await delay_cancellation(callback(userid)) if res is True or res is self.NOT_SPAM: continue @@ -666,9 +654,7 @@ class SpamCheckerModuleApiCallbacks: """ for callback in self._user_may_create_room_alias_callbacks: - with Measure( - self.clock, "{}.{}".format(callback.__module__, callback.__qualname__) - ): + with Measure(self.clock, f"{callback.__module__}.{callback.__qualname__}"): res = await delay_cancellation(callback(userid, room_alias)) if res is True or res is self.NOT_SPAM: continue @@ -701,9 +687,7 @@ class SpamCheckerModuleApiCallbacks: room_id: The ID of the room that would be published """ for callback in self._user_may_publish_room_callbacks: - with Measure( - self.clock, "{}.{}".format(callback.__module__, callback.__qualname__) - ): + with Measure(self.clock, f"{callback.__module__}.{callback.__qualname__}"): res = await delay_cancellation(callback(userid, room_id)) if res is True or res is self.NOT_SPAM: continue @@ -742,9 +726,7 @@ class SpamCheckerModuleApiCallbacks: True if the user is spammy. """ for callback in self._check_username_for_spam_callbacks: - with Measure( - self.clock, "{}.{}".format(callback.__module__, callback.__qualname__) - ): + with Measure(self.clock, f"{callback.__module__}.{callback.__qualname__}"): # Make a copy of the user profile object to ensure the spam checker cannot # modify it. res = await delay_cancellation(callback(user_profile.copy())) @@ -776,9 +758,7 @@ class SpamCheckerModuleApiCallbacks: """ for callback in self._check_registration_for_spam_callbacks: - with Measure( - self.clock, "{}.{}".format(callback.__module__, callback.__qualname__) - ): + with Measure(self.clock, f"{callback.__module__}.{callback.__qualname__}"): behaviour = await delay_cancellation( callback(email_threepid, username, request_info, auth_provider_id) ) @@ -820,9 +800,7 @@ class SpamCheckerModuleApiCallbacks: """ for callback in self._check_media_file_for_spam_callbacks: - with Measure( - self.clock, "{}.{}".format(callback.__module__, callback.__qualname__) - ): + with Measure(self.clock, f"{callback.__module__}.{callback.__qualname__}"): res = await delay_cancellation(callback(file_wrapper, file_info)) # Normalize return values to `Codes` or `"NOT_SPAM"`. if res is False or res is self.NOT_SPAM: @@ -869,9 +847,7 @@ class SpamCheckerModuleApiCallbacks: """ for callback in self._check_login_for_spam_callbacks: - with Measure( - self.clock, "{}.{}".format(callback.__module__, callback.__qualname__) - ): + with Measure(self.clock, f"{callback.__module__}.{callback.__qualname__}"): res = await delay_cancellation( callback( user_id, diff --git a/synapse/replication/tcp/handler.py b/synapse/replication/tcp/handler.py index a2cabba7b1..38adcbe1d0 100644 --- a/synapse/replication/tcp/handler.py +++ b/synapse/replication/tcp/handler.py @@ -17,6 +17,7 @@ from typing import ( TYPE_CHECKING, Any, Awaitable, + Deque, Dict, Iterable, Iterator, @@ -29,7 +30,6 @@ from typing import ( ) from prometheus_client import Counter -from typing_extensions import Deque from twisted.internet.protocol import ReconnectingClientFactory diff --git a/synapse/storage/databases/main/filtering.py b/synapse/storage/databases/main/filtering.py index fff417f9e3..047de6283a 100644 --- a/synapse/storage/databases/main/filtering.py +++ b/synapse/storage/databases/main/filtering.py @@ -13,10 +13,9 @@ # See the License for the specific language governing permissions and # limitations under the License. -from typing import Optional, Tuple, Union, cast +from typing import TYPE_CHECKING, Optional, Tuple, Union, cast from canonicaljson import encode_canonical_json -from typing_extensions import TYPE_CHECKING from synapse.api.errors import Codes, StoreError, SynapseError from synapse.storage._base import SQLBaseStore, db_to_json diff --git a/synapse/storage/databases/main/keys.py b/synapse/storage/databases/main/keys.py index 1666e3c43b..cea32a034a 100644 --- a/synapse/storage/databases/main/keys.py +++ b/synapse/storage/databases/main/keys.py @@ -188,7 +188,7 @@ class KeyStore(SQLBaseStore): # invalidate takes a tuple corresponding to the params of # _get_server_keys_json. _get_server_keys_json only takes one # param, which is itself the 2-tuple (server_name, key_id). - self._get_server_keys_json.invalidate((((server_name, key_id),))) + self._get_server_keys_json.invalidate(((server_name, key_id),)) @cached() def _get_server_keys_json( diff --git a/synapse/storage/databases/main/stats.py b/synapse/storage/databases/main/stats.py index f34b7ce8f4..6298f0984d 100644 --- a/synapse/storage/databases/main/stats.py +++ b/synapse/storage/databases/main/stats.py @@ -19,6 +19,7 @@ from itertools import chain from typing import ( TYPE_CHECKING, Any, + Counter, Dict, Iterable, List, @@ -28,8 +29,6 @@ from typing import ( cast, ) -from typing_extensions import Counter - from twisted.internet.defer import DeferredLock from synapse.api.constants import Direction, EventContentFields, EventTypes, Membership diff --git a/synapse/storage/engines/_base.py b/synapse/storage/engines/_base.py index 0363cdc038..0b5b3bf03e 100644 --- a/synapse/storage/engines/_base.py +++ b/synapse/storage/engines/_base.py @@ -145,5 +145,5 @@ class BaseDatabaseEngine(Generic[ConnectionType, CursorType], metaclass=abc.ABCM This is not provided by DBAPI2, and so needs engine-specific support. """ - with open(filepath, "rt") as f: + with open(filepath) as f: cls.executescript(cursor, f.read()) diff --git a/synapse/storage/prepare_database.py b/synapse/storage/prepare_database.py index 38b7abd801..31501fd573 100644 --- a/synapse/storage/prepare_database.py +++ b/synapse/storage/prepare_database.py @@ -16,10 +16,18 @@ import logging import os import re from collections import Counter -from typing import Collection, Generator, Iterable, List, Optional, TextIO, Tuple +from typing import ( + Collection, + Counter as CounterType, + Generator, + Iterable, + List, + Optional, + TextIO, + Tuple, +) import attr -from typing_extensions import Counter as CounterType from synapse.config.homeserver import HomeServerConfig from synapse.storage.database import LoggingDatabaseConnection, LoggingTransaction diff --git a/synapse/types/__init__.py b/synapse/types/__init__.py index 39a1ae4ac3..073f682aca 100644 --- a/synapse/types/__init__.py +++ b/synapse/types/__init__.py @@ -21,6 +21,7 @@ from typing import ( Any, ClassVar, Dict, + Final, List, Mapping, Match, @@ -38,7 +39,7 @@ import attr from immutabledict import immutabledict from signedjson.key import decode_verify_key_bytes from signedjson.types import VerifyKey -from typing_extensions import Final, TypedDict +from typing_extensions import TypedDict from unpaddedbase64 import decode_base64 from zope.interface import Interface diff --git a/synapse/util/async_helpers.py b/synapse/util/async_helpers.py index 4041e49e71..943ad54456 100644 --- a/synapse/util/async_helpers.py +++ b/synapse/util/async_helpers.py @@ -22,6 +22,7 @@ import logging from contextlib import asynccontextmanager from typing import ( Any, + AsyncContextManager, AsyncIterator, Awaitable, Callable, @@ -42,7 +43,7 @@ from typing import ( ) import attr -from typing_extensions import AsyncContextManager, Concatenate, Literal, ParamSpec +from typing_extensions import Concatenate, Literal, ParamSpec from twisted.internet import defer from twisted.internet.defer import CancelledError diff --git a/synapse/util/macaroons.py b/synapse/util/macaroons.py index 644c341e8c..db6c40a3e1 100644 --- a/synapse/util/macaroons.py +++ b/synapse/util/macaroons.py @@ -218,7 +218,7 @@ class MacaroonGenerator: # to avoid validating those as guest tokens, we explicitely verify if # the macaroon includes the "guest = true" caveat. is_guest = any( - (caveat.caveat_id == "guest = true" for caveat in macaroon.caveats) + caveat.caveat_id == "guest = true" for caveat in macaroon.caveats ) if not is_guest: diff --git a/synapse/util/ratelimitutils.py b/synapse/util/ratelimitutils.py index 2ad55ac13e..cde4a0780f 100644 --- a/synapse/util/ratelimitutils.py +++ b/synapse/util/ratelimitutils.py @@ -20,6 +20,7 @@ import typing from typing import ( Any, Callable, + ContextManager, DefaultDict, Dict, Iterator, @@ -33,7 +34,6 @@ from typing import ( from weakref import WeakSet from prometheus_client.core import Counter -from typing_extensions import ContextManager from twisted.internet import defer diff --git a/synapse/visibility.py b/synapse/visibility.py index fc71dc92a4..eac10f6438 100644 --- a/synapse/visibility.py +++ b/synapse/visibility.py @@ -17,6 +17,7 @@ from enum import Enum, auto from typing import ( Collection, Dict, + Final, FrozenSet, List, Mapping, @@ -27,7 +28,6 @@ from typing import ( ) import attr -from typing_extensions import Final from synapse.api.constants import EventTypes, HistoryVisibility, Membership from synapse.events import EventBase diff --git a/tests/app/test_phone_stats_home.py b/tests/app/test_phone_stats_home.py index 9305b758d7..93af614def 100644 --- a/tests/app/test_phone_stats_home.py +++ b/tests/app/test_phone_stats_home.py @@ -26,7 +26,7 @@ class PhoneHomeR30V2TestCase(HomeserverTestCase): def make_homeserver( self, reactor: ThreadedMemoryReactorClock, clock: Clock ) -> HomeServer: - hs = super(PhoneHomeR30V2TestCase, self).make_homeserver(reactor, clock) + hs = super().make_homeserver(reactor, clock) # We don't want our tests to actually report statistics, so check # that it's not enabled diff --git a/tests/crypto/test_keyring.py b/tests/crypto/test_keyring.py index 7c63b2ea4c..fdfd4f911d 100644 --- a/tests/crypto/test_keyring.py +++ b/tests/crypto/test_keyring.py @@ -312,7 +312,7 @@ class KeyringTestCase(unittest.HomeserverTestCase): [("server9", get_key_id(key1))] ) result = self.get_success(d) - self.assertEquals(result[("server9", get_key_id(key1))].valid_until_ts, 0) + self.assertEqual(result[("server9", get_key_id(key1))].valid_until_ts, 0) def test_verify_json_dedupes_key_requests(self) -> None: """Two requests for the same key should be deduped.""" diff --git a/tests/http/federation/test_matrix_federation_agent.py b/tests/http/federation/test_matrix_federation_agent.py index aed2a4c07a..6a0b5fc0bd 100644 --- a/tests/http/federation/test_matrix_federation_agent.py +++ b/tests/http/federation/test_matrix_federation_agent.py @@ -514,7 +514,7 @@ class MatrixFederationAgentTests(unittest.TestCase): self.assertEqual(response.code, 200) # Send the body - request.write('{ "a": 1 }'.encode("ascii")) + request.write(b'{ "a": 1 }') request.finish() self.reactor.pump((0.1,)) diff --git a/tests/module_api/test_api.py b/tests/module_api/test_api.py index b3310abe1b..fe631d7ecb 100644 --- a/tests/module_api/test_api.py +++ b/tests/module_api/test_api.py @@ -757,7 +757,7 @@ class ModuleApiTestCase(BaseModuleApiTestCase): self.assertEqual(channel.json_body["creator"], user_id) # Check room alias. - self.assertEquals(room_alias, f"#foo-bar:{self.module_api.server_name}") + self.assertEqual(room_alias, f"#foo-bar:{self.module_api.server_name}") # Let's try a room with no alias. room_id, room_alias = self.get_success( diff --git a/tests/replication/test_multi_media_repo.py b/tests/replication/test_multi_media_repo.py index 1527b4a82d..6e78daa830 100644 --- a/tests/replication/test_multi_media_repo.py +++ b/tests/replication/test_multi_media_repo.py @@ -116,7 +116,7 @@ class MediaRepoShardTestCase(BaseMultiWorkerStreamTestCase): self.assertEqual(request.method, b"GET") self.assertEqual( request.path, - f"/_matrix/media/r0/download/{target}/{media_id}".encode("utf-8"), + f"/_matrix/media/r0/download/{target}/{media_id}".encode(), ) self.assertEqual( request.requestHeaders.getRawHeaders(b"host"), [target.encode("utf-8")] diff --git a/tests/rest/client/test_redactions.py b/tests/rest/client/test_redactions.py index 180b635ea6..4e0a387bd3 100644 --- a/tests/rest/client/test_redactions.py +++ b/tests/rest/client/test_redactions.py @@ -627,8 +627,8 @@ class RedactionsTestCase(HomeserverTestCase): redact_event = timeline[-1] self.assertEqual(redact_event["type"], EventTypes.Redaction) # The redacts key should be in the content and the redacts keys. - self.assertEquals(redact_event["content"]["redacts"], event_id) - self.assertEquals(redact_event["redacts"], event_id) + self.assertEqual(redact_event["content"]["redacts"], event_id) + self.assertEqual(redact_event["redacts"], event_id) # But it isn't actually part of the event. def get_event(txn: LoggingTransaction) -> JsonDict: @@ -642,10 +642,10 @@ class RedactionsTestCase(HomeserverTestCase): event_json = self.get_success( main_datastore.db_pool.runInteraction("get_event", get_event) ) - self.assertEquals(event_json["type"], EventTypes.Redaction) + self.assertEqual(event_json["type"], EventTypes.Redaction) if expect_content: self.assertNotIn("redacts", event_json) - self.assertEquals(event_json["content"]["redacts"], event_id) + self.assertEqual(event_json["content"]["redacts"], event_id) else: - self.assertEquals(event_json["redacts"], event_id) + self.assertEqual(event_json["redacts"], event_id) self.assertNotIn("redacts", event_json["content"]) diff --git a/tests/rest/client/test_relations.py b/tests/rest/client/test_relations.py index 75439416c1..9bfe913e45 100644 --- a/tests/rest/client/test_relations.py +++ b/tests/rest/client/test_relations.py @@ -129,7 +129,7 @@ class BaseRelationsTestCase(unittest.HomeserverTestCase): f"/_matrix/client/v1/rooms/{self.room}/relations/{self.parent_id}", access_token=self.user_token, ) - self.assertEquals(200, channel.code, channel.json_body) + self.assertEqual(200, channel.code, channel.json_body) return [ev["event_id"] for ev in channel.json_body["chunk"]] def _get_bundled_aggregations(self) -> JsonDict: @@ -142,7 +142,7 @@ class BaseRelationsTestCase(unittest.HomeserverTestCase): f"/_matrix/client/v3/rooms/{self.room}/event/{self.parent_id}", access_token=self.user_token, ) - self.assertEquals(200, channel.code, channel.json_body) + self.assertEqual(200, channel.code, channel.json_body) return channel.json_body["unsigned"].get("m.relations", {}) def _find_event_in_chunk(self, events: List[JsonDict]) -> JsonDict: @@ -1602,7 +1602,7 @@ class RelationRedactionTestCase(BaseRelationsTestCase): f"/_matrix/client/v1/rooms/{self.room}/threads", access_token=self.user_token, ) - self.assertEquals(200, channel.code, channel.json_body) + self.assertEqual(200, channel.code, channel.json_body) threads = channel.json_body["chunk"] return [ ( @@ -1634,7 +1634,7 @@ class RelationRedactionTestCase(BaseRelationsTestCase): ################################################## # Check the test data is configured as expected. # ################################################## - self.assertEquals(self._get_related_events(), list(reversed(thread_replies))) + self.assertEqual(self._get_related_events(), list(reversed(thread_replies))) relations = self._get_bundled_aggregations() self.assertDictContainsSubset( {"count": 3, "current_user_participated": True}, @@ -1655,7 +1655,7 @@ class RelationRedactionTestCase(BaseRelationsTestCase): self._redact(thread_replies.pop()) # The thread should still exist, but the latest event should be updated. - self.assertEquals(self._get_related_events(), list(reversed(thread_replies))) + self.assertEqual(self._get_related_events(), list(reversed(thread_replies))) relations = self._get_bundled_aggregations() self.assertDictContainsSubset( {"count": 2, "current_user_participated": True}, @@ -1674,7 +1674,7 @@ class RelationRedactionTestCase(BaseRelationsTestCase): self._redact(thread_replies.pop(0)) # Nothing should have changed (except the thread count). - self.assertEquals(self._get_related_events(), thread_replies) + self.assertEqual(self._get_related_events(), thread_replies) relations = self._get_bundled_aggregations() self.assertDictContainsSubset( {"count": 1, "current_user_participated": True}, @@ -1691,11 +1691,11 @@ class RelationRedactionTestCase(BaseRelationsTestCase): # Redact the last remaining event. # #################################### self._redact(thread_replies.pop(0)) - self.assertEquals(thread_replies, []) + self.assertEqual(thread_replies, []) # The event should no longer be considered a thread. - self.assertEquals(self._get_related_events(), []) - self.assertEquals(self._get_bundled_aggregations(), {}) + self.assertEqual(self._get_related_events(), []) + self.assertEqual(self._get_bundled_aggregations(), {}) self.assertEqual(self._get_threads(), []) def test_redact_parent_edit(self) -> None: @@ -1749,8 +1749,8 @@ class RelationRedactionTestCase(BaseRelationsTestCase): # The relations are returned. event_ids = self._get_related_events() relations = self._get_bundled_aggregations() - self.assertEquals(event_ids, [related_event_id]) - self.assertEquals( + self.assertEqual(event_ids, [related_event_id]) + self.assertEqual( relations[RelationTypes.REFERENCE], {"chunk": [{"event_id": related_event_id}]}, ) @@ -1772,7 +1772,7 @@ class RelationRedactionTestCase(BaseRelationsTestCase): # The unredacted relation should still exist. event_ids = self._get_related_events() relations = self._get_bundled_aggregations() - self.assertEquals(len(event_ids), 1) + self.assertEqual(len(event_ids), 1) self.assertDictContainsSubset( { "count": 1, @@ -1816,7 +1816,7 @@ class ThreadsTestCase(BaseRelationsTestCase): f"/_matrix/client/v1/rooms/{self.room}/threads", access_token=self.user_token, ) - self.assertEquals(200, channel.code, channel.json_body) + self.assertEqual(200, channel.code, channel.json_body) threads = self._get_threads(channel.json_body) self.assertEqual(threads, [(thread_2, reply_2), (thread_1, reply_1)]) @@ -1829,7 +1829,7 @@ class ThreadsTestCase(BaseRelationsTestCase): f"/_matrix/client/v1/rooms/{self.room}/threads", access_token=self.user_token, ) - self.assertEquals(200, channel.code, channel.json_body) + self.assertEqual(200, channel.code, channel.json_body) # Tuple of (thread ID, latest event ID) for each thread. threads = self._get_threads(channel.json_body) self.assertEqual(threads, [(thread_1, reply_3), (thread_2, reply_2)]) @@ -1850,7 +1850,7 @@ class ThreadsTestCase(BaseRelationsTestCase): f"/_matrix/client/v1/rooms/{self.room}/threads?limit=1", access_token=self.user_token, ) - self.assertEquals(200, channel.code, channel.json_body) + self.assertEqual(200, channel.code, channel.json_body) thread_roots = [ev["event_id"] for ev in channel.json_body["chunk"]] self.assertEqual(thread_roots, [thread_2]) @@ -1864,7 +1864,7 @@ class ThreadsTestCase(BaseRelationsTestCase): f"/_matrix/client/v1/rooms/{self.room}/threads?limit=1&from={next_batch}", access_token=self.user_token, ) - self.assertEquals(200, channel.code, channel.json_body) + self.assertEqual(200, channel.code, channel.json_body) thread_roots = [ev["event_id"] for ev in channel.json_body["chunk"]] self.assertEqual(thread_roots, [thread_1], channel.json_body) @@ -1899,7 +1899,7 @@ class ThreadsTestCase(BaseRelationsTestCase): f"/_matrix/client/v1/rooms/{self.room}/threads", access_token=self.user_token, ) - self.assertEquals(200, channel.code, channel.json_body) + self.assertEqual(200, channel.code, channel.json_body) thread_roots = [ev["event_id"] for ev in channel.json_body["chunk"]] self.assertEqual( thread_roots, [thread_3, thread_2, thread_1], channel.json_body @@ -1911,7 +1911,7 @@ class ThreadsTestCase(BaseRelationsTestCase): f"/_matrix/client/v1/rooms/{self.room}/threads?include=participated", access_token=self.user_token, ) - self.assertEquals(200, channel.code, channel.json_body) + self.assertEqual(200, channel.code, channel.json_body) thread_roots = [ev["event_id"] for ev in channel.json_body["chunk"]] self.assertEqual(thread_roots, [thread_2, thread_1], channel.json_body) @@ -1943,6 +1943,6 @@ class ThreadsTestCase(BaseRelationsTestCase): f"/_matrix/client/v1/rooms/{self.room}/threads", access_token=self.user_token, ) - self.assertEquals(200, channel.code, channel.json_body) + self.assertEqual(200, channel.code, channel.json_body) thread_roots = [ev["event_id"] for ev in channel.json_body["chunk"]] self.assertEqual(thread_roots, [thread_1], channel.json_body) diff --git a/tests/rest/client/test_rooms.py b/tests/rest/client/test_rooms.py index 4f6347be15..88e579dc39 100644 --- a/tests/rest/client/test_rooms.py +++ b/tests/rest/client/test_rooms.py @@ -1362,7 +1362,7 @@ class RoomAppserviceTsParamTestCase(unittest.HomeserverTestCase): # Ensure the event was persisted with the correct timestamp. res = self.get_success(self.main_store.get_event(event_id)) - self.assertEquals(ts, res.origin_server_ts) + self.assertEqual(ts, res.origin_server_ts) def test_send_state_event_ts(self) -> None: """Test sending a state event with a custom timestamp.""" @@ -1384,7 +1384,7 @@ class RoomAppserviceTsParamTestCase(unittest.HomeserverTestCase): # Ensure the event was persisted with the correct timestamp. res = self.get_success(self.main_store.get_event(event_id)) - self.assertEquals(ts, res.origin_server_ts) + self.assertEqual(ts, res.origin_server_ts) def test_send_membership_event_ts(self) -> None: """Test sending a membership event with a custom timestamp.""" @@ -1406,7 +1406,7 @@ class RoomAppserviceTsParamTestCase(unittest.HomeserverTestCase): # Ensure the event was persisted with the correct timestamp. res = self.get_success(self.main_store.get_event(event_id)) - self.assertEquals(ts, res.origin_server_ts) + self.assertEqual(ts, res.origin_server_ts) class RoomJoinRatelimitTestCase(RoomBase): diff --git a/tests/server.py b/tests/server.py index c84a524e8c..481fe34c5c 100644 --- a/tests/server.py +++ b/tests/server.py @@ -26,6 +26,7 @@ from typing import ( Any, Awaitable, Callable, + Deque, Dict, Iterable, List, @@ -41,7 +42,7 @@ from typing import ( from unittest.mock import Mock import attr -from typing_extensions import Deque, ParamSpec +from typing_extensions import ParamSpec from zope.interface import implementer from twisted.internet import address, threads, udp diff --git a/tests/storage/test_appservice.py b/tests/storage/test_appservice.py index 5e1324a169..71302facd1 100644 --- a/tests/storage/test_appservice.py +++ b/tests/storage/test_appservice.py @@ -40,7 +40,7 @@ from tests.test_utils import make_awaitable class ApplicationServiceStoreTestCase(unittest.HomeserverTestCase): def setUp(self) -> None: - super(ApplicationServiceStoreTestCase, self).setUp() + super().setUp() self.as_yaml_files: List[str] = [] @@ -71,7 +71,7 @@ class ApplicationServiceStoreTestCase(unittest.HomeserverTestCase): except Exception: pass - super(ApplicationServiceStoreTestCase, self).tearDown() + super().tearDown() def _add_appservice( self, as_token: str, id: str, url: str, hs_token: str, sender: str @@ -110,7 +110,7 @@ class ApplicationServiceStoreTestCase(unittest.HomeserverTestCase): class ApplicationServiceTransactionStoreTestCase(unittest.HomeserverTestCase): def setUp(self) -> None: - super(ApplicationServiceTransactionStoreTestCase, self).setUp() + super().setUp() self.as_yaml_files: List[str] = [] self.hs.config.appservice.app_service_config_files = self.as_yaml_files diff --git a/tests/storage/test_main.py b/tests/storage/test_main.py index 27f450e22d..b8823d6993 100644 --- a/tests/storage/test_main.py +++ b/tests/storage/test_main.py @@ -20,7 +20,7 @@ from tests import unittest class DataStoreTestCase(unittest.HomeserverTestCase): def setUp(self) -> None: - super(DataStoreTestCase, self).setUp() + super().setUp() self.store = self.hs.get_datastores().main diff --git a/tests/storage/test_room_search.py b/tests/storage/test_room_search.py index f183c38477..52ffa91c81 100644 --- a/tests/storage/test_room_search.py +++ b/tests/storage/test_room_search.py @@ -318,14 +318,14 @@ class MessageSearchTest(HomeserverTestCase): result = self.get_success( store.search_msgs([self.room_id], query, ["content.body"]) ) - self.assertEquals( + self.assertEqual( result["count"], 1 if expect_to_contain else 0, f"expected '{query}' to match '{self.PHRASE}'" if expect_to_contain else f"'{query}' unexpectedly matched '{self.PHRASE}'", ) - self.assertEquals( + self.assertEqual( len(result["results"]), 1 if expect_to_contain else 0, "results array length should match count", @@ -336,14 +336,14 @@ class MessageSearchTest(HomeserverTestCase): result = self.get_success( store.search_rooms([self.room_id], query, ["content.body"], 10) ) - self.assertEquals( + self.assertEqual( result["count"], 1 if expect_to_contain else 0, f"expected '{query}' to match '{self.PHRASE}'" if expect_to_contain else f"'{query}' unexpectedly matched '{self.PHRASE}'", ) - self.assertEquals( + self.assertEqual( len(result["results"]), 1 if expect_to_contain else 0, "results array length should match count", diff --git a/tests/test_visibility.py b/tests/test_visibility.py index 9ed330f554..a46c29ddf4 100644 --- a/tests/test_visibility.py +++ b/tests/test_visibility.py @@ -31,7 +31,7 @@ TEST_ROOM_ID = "!TEST:ROOM" class FilterEventsForServerTestCase(unittest.HomeserverTestCase): def setUp(self) -> None: - super(FilterEventsForServerTestCase, self).setUp() + super().setUp() self.event_creation_handler = self.hs.get_event_creation_handler() self.event_builder_factory = self.hs.get_event_builder_factory() self._storage_controllers = self.hs.get_storage_controllers() -- cgit 1.5.1 From d38d0dffc94b6269ed7ff5163d60958be3e6c304 Mon Sep 17 00:00:00 2001 From: Patrick Cloke Date: Wed, 13 Sep 2023 07:57:19 -0400 Subject: Use StrCollection in additional places. (#16301) --- changelog.d/16301.misc | 1 + synapse/app/_base.py | 12 ++++---- synapse/config/_base.py | 3 +- synapse/events/__init__.py | 5 ++-- synapse/events/builder.py | 8 +++--- synapse/events/validator.py | 6 ++-- synapse/http/client.py | 5 ++-- synapse/http/servlet.py | 33 +++++++++++----------- synapse/metrics/__init__.py | 8 +++--- synapse/notifier.py | 6 ++-- synapse/rest/client/_base.py | 4 +-- synapse/state/__init__.py | 13 ++++----- synapse/state/v1.py | 5 ++-- synapse/state/v2.py | 7 ++--- synapse/storage/databases/main/event_federation.py | 4 +-- synapse/visibility.py | 6 ++-- 16 files changed, 59 insertions(+), 67 deletions(-) create mode 100644 changelog.d/16301.misc (limited to 'synapse/visibility.py') diff --git a/changelog.d/16301.misc b/changelog.d/16301.misc new file mode 100644 index 0000000000..93ceaeafc9 --- /dev/null +++ b/changelog.d/16301.misc @@ -0,0 +1 @@ +Improve type hints. diff --git a/synapse/app/_base.py b/synapse/app/_base.py index a94b57a671..9ac7e4313e 100644 --- a/synapse/app/_base.py +++ b/synapse/app/_base.py @@ -27,9 +27,7 @@ from typing import ( Any, Awaitable, Callable, - Collection, Dict, - Iterable, List, NoReturn, Optional, @@ -76,7 +74,7 @@ from synapse.module_api.callbacks.spamchecker_callbacks import load_legacy_spam_ from synapse.module_api.callbacks.third_party_event_rules_callbacks import ( load_legacy_third_party_event_rules, ) -from synapse.types import ISynapseReactor +from synapse.types import ISynapseReactor, StrCollection from synapse.util import SYNAPSE_VERSION from synapse.util.caches.lrucache import setup_expire_lru_cache_entries from synapse.util.daemonize import daemonize_process @@ -278,7 +276,7 @@ def register_start( reactor.callWhenRunning(lambda: defer.ensureDeferred(wrapper())) -def listen_metrics(bind_addresses: Iterable[str], port: int) -> None: +def listen_metrics(bind_addresses: StrCollection, port: int) -> None: """ Start Prometheus metrics server. """ @@ -315,7 +313,7 @@ def _set_prometheus_client_use_created_metrics(new_value: bool) -> None: def listen_manhole( - bind_addresses: Collection[str], + bind_addresses: StrCollection, port: int, manhole_settings: ManholeConfig, manhole_globals: dict, @@ -339,7 +337,7 @@ def listen_manhole( def listen_tcp( - bind_addresses: Collection[str], + bind_addresses: StrCollection, port: int, factory: ServerFactory, reactor: IReactorTCP = reactor, @@ -448,7 +446,7 @@ def listen_http( def listen_ssl( - bind_addresses: Collection[str], + bind_addresses: StrCollection, port: int, factory: ServerFactory, context_factory: IOpenSSLContextFactory, diff --git a/synapse/config/_base.py b/synapse/config/_base.py index 58856839e1..c5816105f4 100644 --- a/synapse/config/_base.py +++ b/synapse/config/_base.py @@ -26,7 +26,6 @@ from textwrap import dedent from typing import ( Any, ClassVar, - Collection, Dict, Iterable, Iterator, @@ -384,7 +383,7 @@ class RootConfig: config_classes: List[Type[Config]] = [] - def __init__(self, config_files: Collection[str] = ()): + def __init__(self, config_files: StrSequence = ()): # Capture absolute paths here, so we can reload config after we daemonize. self.config_files = [os.path.abspath(path) for path in config_files] diff --git a/synapse/events/__init__.py b/synapse/events/__init__.py index 35257a3b1b..3c1777b7ec 100644 --- a/synapse/events/__init__.py +++ b/synapse/events/__init__.py @@ -25,7 +25,6 @@ from typing import ( Iterable, List, Optional, - Sequence, Tuple, Type, TypeVar, @@ -408,7 +407,7 @@ class EventBase(metaclass=abc.ABCMeta): def keys(self) -> Iterable[str]: return self._dict.keys() - def prev_event_ids(self) -> Sequence[str]: + def prev_event_ids(self) -> List[str]: """Returns the list of prev event IDs. The order matches the order specified in the event, though there is no meaning to it. @@ -553,7 +552,7 @@ class FrozenEventV2(EventBase): self._event_id = "$" + encode_base64(compute_event_reference_hash(self)[1]) return self._event_id - def prev_event_ids(self) -> Sequence[str]: + def prev_event_ids(self) -> List[str]: """Returns the list of prev event IDs. The order matches the order specified in the event, though there is no meaning to it. diff --git a/synapse/events/builder.py b/synapse/events/builder.py index 14ea0e6640..1165c017ba 100644 --- a/synapse/events/builder.py +++ b/synapse/events/builder.py @@ -12,7 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. import logging -from typing import TYPE_CHECKING, Any, Collection, Dict, List, Optional, Tuple, Union +from typing import TYPE_CHECKING, Any, Dict, List, Optional, Tuple, Union import attr from signedjson.types import SigningKey @@ -28,7 +28,7 @@ from synapse.event_auth import auth_types_for_event from synapse.events import EventBase, _EventInternalMetadata, make_event_from_dict from synapse.state import StateHandler from synapse.storage.databases.main import DataStore -from synapse.types import EventID, JsonDict +from synapse.types import EventID, JsonDict, StrCollection from synapse.types.state import StateFilter from synapse.util import Clock from synapse.util.stringutils import random_string @@ -103,7 +103,7 @@ class EventBuilder: async def build( self, - prev_event_ids: Collection[str], + prev_event_ids: StrCollection, auth_event_ids: Optional[List[str]], depth: Optional[int] = None, ) -> EventBase: @@ -136,7 +136,7 @@ class EventBuilder: format_version = self.room_version.event_format # The types of auth/prev events changes between event versions. - prev_events: Union[Collection[str], List[Tuple[str, Dict[str, str]]]] + prev_events: Union[StrCollection, List[Tuple[str, Dict[str, str]]]] auth_events: Union[List[str], List[Tuple[str, Dict[str, str]]]] if format_version == EventFormatVersions.ROOM_V1_V2: auth_events = await self._store.add_event_hashes(auth_event_ids) diff --git a/synapse/events/validator.py b/synapse/events/validator.py index 34625dd7a1..5da50cb0d2 100644 --- a/synapse/events/validator.py +++ b/synapse/events/validator.py @@ -12,7 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. import collections.abc -from typing import Iterable, List, Type, Union, cast +from typing import List, Type, Union, cast import jsonschema from pydantic import Field, StrictBool, StrictStr @@ -36,7 +36,7 @@ from synapse.events.utils import ( from synapse.federation.federation_server import server_matches_acl_event from synapse.http.servlet import validate_json_object from synapse.rest.models import RequestBodyModel -from synapse.types import EventID, JsonDict, RoomID, UserID +from synapse.types import EventID, JsonDict, RoomID, StrCollection, UserID class EventValidator: @@ -225,7 +225,7 @@ class EventValidator: self._ensure_state_event(event) - def _ensure_strings(self, d: JsonDict, keys: Iterable[str]) -> None: + def _ensure_strings(self, d: JsonDict, keys: StrCollection) -> None: for s in keys: if s not in d: raise SynapseError(400, "'%s' not in content" % (s,)) diff --git a/synapse/http/client.py b/synapse/http/client.py index ca2cdbc6e2..c750e03b36 100644 --- a/synapse/http/client.py +++ b/synapse/http/client.py @@ -78,7 +78,7 @@ from synapse.http.replicationagent import ReplicationAgent from synapse.http.types import QueryParams from synapse.logging.context import make_deferred_yieldable, run_in_background from synapse.logging.opentracing import set_tag, start_active_span, tags -from synapse.types import ISynapseReactor +from synapse.types import ISynapseReactor, StrSequence from synapse.util import json_decoder from synapse.util.async_helpers import timeout_deferred @@ -108,10 +108,9 @@ RawHeaders = Union[Mapping[str, "RawHeaderValue"], Mapping[bytes, "RawHeaderValu # the value actually has to be a List, but List is invariant so we can't specify that # the entries can either be Lists or bytes. RawHeaderValue = Union[ - List[str], + StrSequence, List[bytes], List[Union[str, bytes]], - Tuple[str, ...], Tuple[bytes, ...], Tuple[Union[str, bytes], ...], ] diff --git a/synapse/http/servlet.py b/synapse/http/servlet.py index fc62793628..5d79d31579 100644 --- a/synapse/http/servlet.py +++ b/synapse/http/servlet.py @@ -18,7 +18,6 @@ import logging from http import HTTPStatus from typing import ( TYPE_CHECKING, - Iterable, List, Mapping, Optional, @@ -38,7 +37,7 @@ from twisted.web.server import Request from synapse.api.errors import Codes, SynapseError from synapse.http import redact_uri from synapse.http.server import HttpServer -from synapse.types import JsonDict, RoomAlias, RoomID +from synapse.types import JsonDict, RoomAlias, RoomID, StrCollection from synapse.util import json_decoder if TYPE_CHECKING: @@ -340,7 +339,7 @@ def parse_string( name: str, default: str, *, - allowed_values: Optional[Iterable[str]] = None, + allowed_values: Optional[StrCollection] = None, encoding: str = "ascii", ) -> str: ... @@ -352,7 +351,7 @@ def parse_string( name: str, *, required: Literal[True], - allowed_values: Optional[Iterable[str]] = None, + allowed_values: Optional[StrCollection] = None, encoding: str = "ascii", ) -> str: ... @@ -365,7 +364,7 @@ def parse_string( *, default: Optional[str] = None, required: bool = False, - allowed_values: Optional[Iterable[str]] = None, + allowed_values: Optional[StrCollection] = None, encoding: str = "ascii", ) -> Optional[str]: ... @@ -376,7 +375,7 @@ def parse_string( name: str, default: Optional[str] = None, required: bool = False, - allowed_values: Optional[Iterable[str]] = None, + allowed_values: Optional[StrCollection] = None, encoding: str = "ascii", ) -> Optional[str]: """ @@ -485,7 +484,7 @@ def parse_enum( def _parse_string_value( value: bytes, - allowed_values: Optional[Iterable[str]], + allowed_values: Optional[StrCollection], name: str, encoding: str, ) -> str: @@ -511,7 +510,7 @@ def parse_strings_from_args( args: Mapping[bytes, Sequence[bytes]], name: str, *, - allowed_values: Optional[Iterable[str]] = None, + allowed_values: Optional[StrCollection] = None, encoding: str = "ascii", ) -> Optional[List[str]]: ... @@ -523,7 +522,7 @@ def parse_strings_from_args( name: str, default: List[str], *, - allowed_values: Optional[Iterable[str]] = None, + allowed_values: Optional[StrCollection] = None, encoding: str = "ascii", ) -> List[str]: ... @@ -535,7 +534,7 @@ def parse_strings_from_args( name: str, *, required: Literal[True], - allowed_values: Optional[Iterable[str]] = None, + allowed_values: Optional[StrCollection] = None, encoding: str = "ascii", ) -> List[str]: ... @@ -548,7 +547,7 @@ def parse_strings_from_args( default: Optional[List[str]] = None, *, required: bool = False, - allowed_values: Optional[Iterable[str]] = None, + allowed_values: Optional[StrCollection] = None, encoding: str = "ascii", ) -> Optional[List[str]]: ... @@ -559,7 +558,7 @@ def parse_strings_from_args( name: str, default: Optional[List[str]] = None, required: bool = False, - allowed_values: Optional[Iterable[str]] = None, + allowed_values: Optional[StrCollection] = None, encoding: str = "ascii", ) -> Optional[List[str]]: """ @@ -610,7 +609,7 @@ def parse_string_from_args( name: str, default: Optional[str] = None, *, - allowed_values: Optional[Iterable[str]] = None, + allowed_values: Optional[StrCollection] = None, encoding: str = "ascii", ) -> Optional[str]: ... @@ -623,7 +622,7 @@ def parse_string_from_args( default: Optional[str] = None, *, required: Literal[True], - allowed_values: Optional[Iterable[str]] = None, + allowed_values: Optional[StrCollection] = None, encoding: str = "ascii", ) -> str: ... @@ -635,7 +634,7 @@ def parse_string_from_args( name: str, default: Optional[str] = None, required: bool = False, - allowed_values: Optional[Iterable[str]] = None, + allowed_values: Optional[StrCollection] = None, encoding: str = "ascii", ) -> Optional[str]: ... @@ -646,7 +645,7 @@ def parse_string_from_args( name: str, default: Optional[str] = None, required: bool = False, - allowed_values: Optional[Iterable[str]] = None, + allowed_values: Optional[StrCollection] = None, encoding: str = "ascii", ) -> Optional[str]: """ @@ -821,7 +820,7 @@ def parse_and_validate_json_object_from_request( return validate_json_object(content, model_type) -def assert_params_in_dict(body: JsonDict, required: Iterable[str]) -> None: +def assert_params_in_dict(body: JsonDict, required: StrCollection) -> None: absent = [] for k in required: if k not in body: diff --git a/synapse/metrics/__init__.py b/synapse/metrics/__init__.py index 39fc629937..3cf2fbc3e2 100644 --- a/synapse/metrics/__init__.py +++ b/synapse/metrics/__init__.py @@ -25,7 +25,6 @@ from typing import ( Iterable, Mapping, Optional, - Sequence, Set, Tuple, Type, @@ -49,6 +48,7 @@ import synapse.metrics._reactor_metrics # noqa: F401 from synapse.metrics._gc import MIN_TIME_BETWEEN_GCS, install_gc_manager from synapse.metrics._twisted_exposition import MetricsResource, generate_latest from synapse.metrics._types import Collector +from synapse.types import StrSequence from synapse.util import SYNAPSE_VERSION logger = logging.getLogger(__name__) @@ -81,7 +81,7 @@ class LaterGauge(Collector): name: str desc: str - labels: Optional[Sequence[str]] = attr.ib(hash=False) + labels: Optional[StrSequence] = attr.ib(hash=False) # callback: should either return a value (if there are no labels for this metric), # or dict mapping from a label tuple to a value caller: Callable[ @@ -143,8 +143,8 @@ class InFlightGauge(Generic[MetricsEntry], Collector): self, name: str, desc: str, - labels: Sequence[str], - sub_metrics: Sequence[str], + labels: StrSequence, + sub_metrics: StrSequence, ): self.name = name self.desc = desc diff --git a/synapse/notifier.py b/synapse/notifier.py index 68115bca70..fc39e5c963 100644 --- a/synapse/notifier.py +++ b/synapse/notifier.py @@ -104,7 +104,7 @@ class _NotifierUserStream: def __init__( self, user_id: str, - rooms: Collection[str], + rooms: StrCollection, current_token: StreamToken, time_now_ms: int, ): @@ -457,7 +457,7 @@ class Notifier: stream_key: str, new_token: Union[int, RoomStreamToken], users: Optional[Collection[Union[str, UserID]]] = None, - rooms: Optional[Collection[str]] = None, + rooms: Optional[StrCollection] = None, ) -> None: """Used to inform listeners that something has happened event wise. @@ -529,7 +529,7 @@ class Notifier: user_id: str, timeout: int, callback: Callable[[StreamToken, StreamToken], Awaitable[T]], - room_ids: Optional[Collection[str]] = None, + room_ids: Optional[StrCollection] = None, from_token: StreamToken = StreamToken.START, ) -> T: """Wait until the callback returns a non empty response or the diff --git a/synapse/rest/client/_base.py b/synapse/rest/client/_base.py index 5c1c19e1f3..73c568ef75 100644 --- a/synapse/rest/client/_base.py +++ b/synapse/rest/client/_base.py @@ -20,14 +20,14 @@ from typing import Any, Awaitable, Callable, Iterable, Pattern, Tuple, TypeVar, from synapse.api.errors import InteractiveAuthIncompleteError from synapse.api.urls import CLIENT_API_PREFIX -from synapse.types import JsonDict +from synapse.types import JsonDict, StrCollection logger = logging.getLogger(__name__) def client_patterns( path_regex: str, - releases: Iterable[str] = ("r0", "v3"), + releases: StrCollection = ("r0", "v3"), unstable: bool = True, v1: bool = False, ) -> Iterable[Pattern]: diff --git a/synapse/state/__init__.py b/synapse/state/__init__.py index 1b91cf5eaa..e977ed1044 100644 --- a/synapse/state/__init__.py +++ b/synapse/state/__init__.py @@ -20,7 +20,6 @@ from typing import ( Any, Awaitable, Callable, - Collection, DefaultDict, Dict, FrozenSet, @@ -49,7 +48,7 @@ from synapse.logging.opentracing import tag_args, trace from synapse.replication.http.state import ReplicationUpdateCurrentStateRestServlet from synapse.state import v1, v2 from synapse.storage.databases.main.events_worker import EventRedactBehaviour -from synapse.types import StateMap +from synapse.types import StateMap, StrCollection from synapse.types.state import StateFilter from synapse.util.async_helpers import Linearizer from synapse.util.caches.expiringcache import ExpiringCache @@ -197,7 +196,7 @@ class StateHandler: async def compute_state_after_events( self, room_id: str, - event_ids: Collection[str], + event_ids: StrCollection, state_filter: Optional[StateFilter] = None, await_full_state: bool = True, ) -> StateMap[str]: @@ -231,7 +230,7 @@ class StateHandler: return await ret.get_state(self._state_storage_controller, state_filter) async def get_current_user_ids_in_room( - self, room_id: str, latest_event_ids: Collection[str] + self, room_id: str, latest_event_ids: StrCollection ) -> Set[str]: """ Get the users IDs who are currently in a room. @@ -256,7 +255,7 @@ class StateHandler: return await self.store.get_joined_user_ids_from_state(room_id, state) async def get_hosts_in_room_at_events( - self, room_id: str, event_ids: Collection[str] + self, room_id: str, event_ids: StrCollection ) -> FrozenSet[str]: """Get the hosts that were in a room at the given event ids @@ -470,7 +469,7 @@ class StateHandler: @trace @measure_func() async def resolve_state_groups_for_events( - self, room_id: str, event_ids: Collection[str], await_full_state: bool = True + self, room_id: str, event_ids: StrCollection, await_full_state: bool = True ) -> _StateCacheEntry: """Given a list of event_ids this method fetches the state at each event, resolves conflicts between them and returns them. @@ -882,7 +881,7 @@ class StateResolutionStore: store: "DataStore" def get_events( - self, event_ids: Collection[str], allow_rejected: bool = False + self, event_ids: StrCollection, allow_rejected: bool = False ) -> Awaitable[Dict[str, EventBase]]: """Get events from the database diff --git a/synapse/state/v1.py b/synapse/state/v1.py index 500e384695..c76a2f082e 100644 --- a/synapse/state/v1.py +++ b/synapse/state/v1.py @@ -17,7 +17,6 @@ import logging from typing import ( Awaitable, Callable, - Collection, Dict, Iterable, List, @@ -32,7 +31,7 @@ from synapse.api.constants import EventTypes from synapse.api.errors import AuthError from synapse.api.room_versions import RoomVersion from synapse.events import EventBase -from synapse.types import MutableStateMap, StateMap +from synapse.types import MutableStateMap, StateMap, StrCollection logger = logging.getLogger(__name__) @@ -45,7 +44,7 @@ async def resolve_events_with_store( room_version: RoomVersion, state_sets: Sequence[StateMap[str]], event_map: Optional[Dict[str, EventBase]], - state_map_factory: Callable[[Collection[str]], Awaitable[Dict[str, EventBase]]], + state_map_factory: Callable[[StrCollection], Awaitable[Dict[str, EventBase]]], ) -> StateMap[str]: """ Args: diff --git a/synapse/state/v2.py b/synapse/state/v2.py index 44c49274a9..1752f95db8 100644 --- a/synapse/state/v2.py +++ b/synapse/state/v2.py @@ -19,7 +19,6 @@ from typing import ( Any, Awaitable, Callable, - Collection, Dict, Generator, Iterable, @@ -39,7 +38,7 @@ from synapse.api.constants import EventTypes from synapse.api.errors import AuthError from synapse.api.room_versions import RoomVersion from synapse.events import EventBase -from synapse.types import MutableStateMap, StateMap +from synapse.types import MutableStateMap, StateMap, StrCollection logger = logging.getLogger(__name__) @@ -56,7 +55,7 @@ class StateResolutionStore(Protocol): # This is usually synapse.state.StateResolutionStore, but it's replaced with a # TestStateResolutionStore in tests. def get_events( - self, event_ids: Collection[str], allow_rejected: bool = False + self, event_ids: StrCollection, allow_rejected: bool = False ) -> Awaitable[Dict[str, EventBase]]: ... @@ -366,7 +365,7 @@ async def _get_auth_chain_difference( union = unpersisted_set_ids[0].union(*unpersisted_set_ids[1:]) intersection = unpersisted_set_ids[0].intersection(*unpersisted_set_ids[1:]) - auth_difference_unpersisted_part: Collection[str] = union - intersection + auth_difference_unpersisted_part: StrCollection = union - intersection else: auth_difference_unpersisted_part = () state_sets_ids = [set(state_set.values()) for state_set in state_sets] diff --git a/synapse/storage/databases/main/event_federation.py b/synapse/storage/databases/main/event_federation.py index fab7008a8f..09de8f55e2 100644 --- a/synapse/storage/databases/main/event_federation.py +++ b/synapse/storage/databases/main/event_federation.py @@ -47,7 +47,7 @@ from synapse.storage.database import ( from synapse.storage.databases.main.events_worker import EventsWorkerStore from synapse.storage.databases.main.signatures import SignatureWorkerStore from synapse.storage.engines import PostgresEngine, Sqlite3Engine -from synapse.types import JsonDict, StrCollection +from synapse.types import JsonDict, StrCollection, StrSequence from synapse.util import json_encoder from synapse.util.caches.descriptors import cached from synapse.util.caches.lrucache import LruCache @@ -1179,7 +1179,7 @@ class EventFederationWorkerStore(SignatureWorkerStore, EventsWorkerStore, SQLBas ) @cached(max_entries=5000, iterable=True) - async def get_latest_event_ids_in_room(self, room_id: str) -> Sequence[str]: + async def get_latest_event_ids_in_room(self, room_id: str) -> StrSequence: return await self.db_pool.simple_select_onecol( table="event_forward_extremities", keyvalues={"room_id": room_id}, diff --git a/synapse/visibility.py b/synapse/visibility.py index eac10f6438..f15fdd8314 100644 --- a/synapse/visibility.py +++ b/synapse/visibility.py @@ -36,7 +36,7 @@ from synapse.events.utils import prune_event from synapse.logging.opentracing import trace from synapse.storage.controllers import StorageControllers from synapse.storage.databases.main import DataStore -from synapse.types import RetentionPolicy, StateMap, get_domain_from_id +from synapse.types import RetentionPolicy, StateMap, StrCollection, get_domain_from_id from synapse.types.state import StateFilter from synapse.util import Clock @@ -150,12 +150,12 @@ async def filter_events_for_client( async def filter_event_for_clients_with_state( store: DataStore, - user_ids: Collection[str], + user_ids: StrCollection, event: EventBase, context: EventContext, is_peeking: bool = False, filter_send_to_client: bool = True, -) -> Collection[str]: +) -> StrCollection: """ Checks to see if an event is visible to the users in the list at the time of the event. -- cgit 1.5.1