summary refs log tree commit diff
path: root/packages/overlays/matrix-synapse/patches/0021-Bump-ruff-from-0.7.3-to-0.11.10-18451.patch
diff options
context:
space:
mode:
Diffstat (limited to 'packages/overlays/matrix-synapse/patches/0021-Bump-ruff-from-0.7.3-to-0.11.10-18451.patch')
-rw-r--r--packages/overlays/matrix-synapse/patches/0021-Bump-ruff-from-0.7.3-to-0.11.10-18451.patch1259
1 files changed, 0 insertions, 1259 deletions
diff --git a/packages/overlays/matrix-synapse/patches/0021-Bump-ruff-from-0.7.3-to-0.11.10-18451.patch b/packages/overlays/matrix-synapse/patches/0021-Bump-ruff-from-0.7.3-to-0.11.10-18451.patch
deleted file mode 100644

index f2c0d5c..0000000 --- a/packages/overlays/matrix-synapse/patches/0021-Bump-ruff-from-0.7.3-to-0.11.10-18451.patch +++ /dev/null
@@ -1,1259 +0,0 @@ -From 9d43bec3268d9a454fe992f25edfc013a50fb9cc Mon Sep 17 00:00:00 2001 -From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> -Date: Tue, 20 May 2025 15:23:30 +0100 -Subject: [PATCH 21/34] Bump ruff from 0.7.3 to 0.11.10 (#18451) - -Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> -Co-authored-by: Andrew Morgan <andrew@amorgan.xyz> -Co-authored-by: Andrew Morgan <1342360+anoadragon453@users.noreply.github.com> ---- - changelog.d/18451.misc | 1 + - poetry.lock | 40 +++++++++---------- - pyproject.toml | 2 +- - synapse/_scripts/synapse_port_db.py | 2 +- - synapse/_scripts/synctl.py | 6 +-- - synapse/app/generic_worker.py | 3 +- - synapse/app/homeserver.py | 3 +- - synapse/config/tls.py | 3 +- - synapse/event_auth.py | 3 +- - synapse/handlers/e2e_keys.py | 12 ++---- - synapse/handlers/federation.py | 6 +-- - synapse/handlers/message.py | 22 +++++----- - synapse/handlers/sso.py | 6 +-- - synapse/http/matrixfederationclient.py | 6 +-- - synapse/http/proxyagent.py | 12 +++--- - synapse/http/servlet.py | 6 +-- - synapse/module_api/__init__.py | 6 +-- - synapse/replication/http/_base.py | 6 +-- - synapse/replication/tcp/streams/events.py | 6 +-- - synapse/rest/admin/__init__.py | 3 +- - synapse/rest/client/receipts.py | 4 +- - synapse/rest/client/rendezvous.py | 6 +-- - synapse/rest/client/transactions.py | 6 +-- - synapse/storage/background_updates.py | 12 +++--- - synapse/storage/controllers/persist_events.py | 3 +- - synapse/storage/databases/main/client_ips.py | 12 +++--- - synapse/storage/databases/main/deviceinbox.py | 6 +-- - synapse/storage/databases/main/devices.py | 2 +- - synapse/storage/databases/main/events.py | 27 ++++++------- - .../storage/databases/main/events_worker.py | 6 +-- - .../databases/main/monthly_active_users.py | 24 +++++------ - .../storage/databases/main/purge_events.py | 3 +- - .../storage/databases/main/state_deltas.py | 6 +-- - synapse/storage/databases/main/tags.py | 5 +-- - .../storage/databases/main/user_directory.py | 6 +-- - synapse/storage/databases/state/bg_updates.py | 3 +- - synapse/storage/schema/main/delta/25/fts.py | 3 +- - synapse/storage/schema/main/delta/27/ts.py | 3 +- - .../schema/main/delta/31/search_update.py | 3 +- - .../schema/main/delta/33/event_fields.py | 3 +- - synapse/types/__init__.py | 3 +- - synapse/types/state.py | 2 +- - synapse/util/iterutils.py | 4 +- - .../test_federation_out_of_band_membership.py | 18 ++++----- - tests/handlers/test_user_directory.py | 4 +- - tests/http/test_matrixfederationclient.py | 8 +--- - tests/media/test_media_storage.py | 4 +- - tests/replication/tcp/streams/test_events.py | 2 +- - tests/rest/admin/test_room.py | 2 +- - tests/rest/admin/test_user.py | 4 +- - .../sliding_sync/test_rooms_timeline.py | 6 +-- - tests/rest/client/test_media.py | 2 +- - tests/rest/client/utils.py | 6 +-- - tests/rest/media/test_url_preview.py | 2 +- - tests/server.py | 6 +-- - tests/storage/test_base.py | 2 +- - tests/storage/test_devices.py | 6 +-- - tests/storage/test_event_federation.py | 2 +- - tests/test_state.py | 2 +- - tests/test_utils/logging_setup.py | 2 +- - 60 files changed, 178 insertions(+), 206 deletions(-) - create mode 100644 changelog.d/18451.misc - -diff --git a/changelog.d/18451.misc b/changelog.d/18451.misc -new file mode 100644 -index 0000000000..593e83eb7f ---- /dev/null -+++ b/changelog.d/18451.misc -@@ -0,0 +1 @@ -+Bump ruff from 0.7.3 to 0.11.10. -\ No newline at end of file -diff --git a/poetry.lock b/poetry.lock -index 3c53dfb376..ada0646215 100644 ---- a/poetry.lock -+++ b/poetry.lock -@@ -2440,30 +2440,30 @@ files = [ - - [[package]] - name = "ruff" --version = "0.7.3" -+version = "0.11.10" - description = "An extremely fast Python linter and code formatter, written in Rust." - optional = false - python-versions = ">=3.7" - groups = ["dev"] - files = [ -- {file = "ruff-0.7.3-py3-none-linux_armv6l.whl", hash = "sha256:34f2339dc22687ec7e7002792d1f50712bf84a13d5152e75712ac08be565d344"}, -- {file = "ruff-0.7.3-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:fb397332a1879b9764a3455a0bb1087bda876c2db8aca3a3cbb67b3dbce8cda0"}, -- {file = "ruff-0.7.3-py3-none-macosx_11_0_arm64.whl", hash = "sha256:37d0b619546103274e7f62643d14e1adcbccb242efda4e4bdb9544d7764782e9"}, -- {file = "ruff-0.7.3-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5d59f0c3ee4d1a6787614e7135b72e21024875266101142a09a61439cb6e38a5"}, -- {file = "ruff-0.7.3-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:44eb93c2499a169d49fafd07bc62ac89b1bc800b197e50ff4633aed212569299"}, -- {file = "ruff-0.7.3-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6d0242ce53f3a576c35ee32d907475a8d569944c0407f91d207c8af5be5dae4e"}, -- {file = "ruff-0.7.3-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:6b6224af8b5e09772c2ecb8dc9f3f344c1aa48201c7f07e7315367f6dd90ac29"}, -- {file = "ruff-0.7.3-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c50f95a82b94421c964fae4c27c0242890a20fe67d203d127e84fbb8013855f5"}, -- {file = "ruff-0.7.3-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7f3eff9961b5d2644bcf1616c606e93baa2d6b349e8aa8b035f654df252c8c67"}, -- {file = "ruff-0.7.3-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b8963cab06d130c4df2fd52c84e9f10d297826d2e8169ae0c798b6221be1d1d2"}, -- {file = "ruff-0.7.3-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:61b46049d6edc0e4317fb14b33bd693245281a3007288b68a3f5b74a22a0746d"}, -- {file = "ruff-0.7.3-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:10ebce7696afe4644e8c1a23b3cf8c0f2193a310c18387c06e583ae9ef284de2"}, -- {file = "ruff-0.7.3-py3-none-musllinux_1_2_i686.whl", hash = "sha256:3f36d56326b3aef8eeee150b700e519880d1aab92f471eefdef656fd57492aa2"}, -- {file = "ruff-0.7.3-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:5d024301109a0007b78d57ab0ba190087b43dce852e552734ebf0b0b85e4fb16"}, -- {file = "ruff-0.7.3-py3-none-win32.whl", hash = "sha256:4ba81a5f0c5478aa61674c5a2194de8b02652f17addf8dfc40c8937e6e7d79fc"}, -- {file = "ruff-0.7.3-py3-none-win_amd64.whl", hash = "sha256:588a9ff2fecf01025ed065fe28809cd5a53b43505f48b69a1ac7707b1b7e4088"}, -- {file = "ruff-0.7.3-py3-none-win_arm64.whl", hash = "sha256:1713e2c5545863cdbfe2cbce21f69ffaf37b813bfd1fb3b90dc9a6f1963f5a8c"}, -- {file = "ruff-0.7.3.tar.gz", hash = "sha256:e1d1ba2e40b6e71a61b063354d04be669ab0d39c352461f3d789cac68b54a313"}, -+ {file = "ruff-0.11.10-py3-none-linux_armv6l.whl", hash = "sha256:859a7bfa7bc8888abbea31ef8a2b411714e6a80f0d173c2a82f9041ed6b50f58"}, -+ {file = "ruff-0.11.10-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:968220a57e09ea5e4fd48ed1c646419961a0570727c7e069842edd018ee8afed"}, -+ {file = "ruff-0.11.10-py3-none-macosx_11_0_arm64.whl", hash = "sha256:1067245bad978e7aa7b22f67113ecc6eb241dca0d9b696144256c3a879663bca"}, -+ {file = "ruff-0.11.10-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f4854fd09c7aed5b1590e996a81aeff0c9ff51378b084eb5a0b9cd9518e6cff2"}, -+ {file = "ruff-0.11.10-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:8b4564e9f99168c0f9195a0fd5fa5928004b33b377137f978055e40008a082c5"}, -+ {file = "ruff-0.11.10-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5b6a9cc5b62c03cc1fea0044ed8576379dbaf751d5503d718c973d5418483641"}, -+ {file = "ruff-0.11.10-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:607ecbb6f03e44c9e0a93aedacb17b4eb4f3563d00e8b474298a201622677947"}, -+ {file = "ruff-0.11.10-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7b3a522fa389402cd2137df9ddefe848f727250535c70dafa840badffb56b7a4"}, -+ {file = "ruff-0.11.10-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2f071b0deed7e9245d5820dac235cbdd4ef99d7b12ff04c330a241ad3534319f"}, -+ {file = "ruff-0.11.10-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4a60e3a0a617eafba1f2e4186d827759d65348fa53708ca547e384db28406a0b"}, -+ {file = "ruff-0.11.10-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:da8ec977eaa4b7bf75470fb575bea2cb41a0e07c7ea9d5a0a97d13dbca697bf2"}, -+ {file = "ruff-0.11.10-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:ddf8967e08227d1bd95cc0851ef80d2ad9c7c0c5aab1eba31db49cf0a7b99523"}, -+ {file = "ruff-0.11.10-py3-none-musllinux_1_2_i686.whl", hash = "sha256:5a94acf798a82db188f6f36575d80609072b032105d114b0f98661e1679c9125"}, -+ {file = "ruff-0.11.10-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:3afead355f1d16d95630df28d4ba17fb2cb9c8dfac8d21ced14984121f639bad"}, -+ {file = "ruff-0.11.10-py3-none-win32.whl", hash = "sha256:dc061a98d32a97211af7e7f3fa1d4ca2fcf919fb96c28f39551f35fc55bdbc19"}, -+ {file = "ruff-0.11.10-py3-none-win_amd64.whl", hash = "sha256:5cc725fbb4d25b0f185cb42df07ab6b76c4489b4bfb740a175f3a59c70e8a224"}, -+ {file = "ruff-0.11.10-py3-none-win_arm64.whl", hash = "sha256:ef69637b35fb8b210743926778d0e45e1bffa850a7c61e428c6b971549b5f5d1"}, -+ {file = "ruff-0.11.10.tar.gz", hash = "sha256:d522fb204b4959909ecac47da02830daec102eeb100fb50ea9554818d47a5fa6"}, - ] - - [[package]] -@@ -3394,4 +3394,4 @@ user-search = ["pyicu"] - [metadata] - lock-version = "2.1" - python-versions = "^3.9.0" --content-hash = "d71159b19349fdc0b7cd8e06e8c8778b603fc37b941c6df34ddc31746783d94d" -+content-hash = "522f5bacf5610646876452e0e397038dd5c959692d2ab76214431bff78562d01" -diff --git a/pyproject.toml b/pyproject.toml -index 914a5804aa..6ce05805a9 100644 ---- a/pyproject.toml -+++ b/pyproject.toml -@@ -320,7 +320,7 @@ all = [ - # failing on new releases. Keeping lower bounds loose here means that dependabot - # can bump versions without having to update the content-hash in the lockfile. - # This helps prevents merge conflicts when running a batch of dependabot updates. --ruff = "0.7.3" -+ruff = "0.11.10" - # Type checking only works with the pydantic.v1 compat module from pydantic v2 - pydantic = "^2" - -diff --git a/synapse/_scripts/synapse_port_db.py b/synapse/_scripts/synapse_port_db.py -index 438b2ff8a0..573c70696e 100755 ---- a/synapse/_scripts/synapse_port_db.py -+++ b/synapse/_scripts/synapse_port_db.py -@@ -1065,7 +1065,7 @@ class Porter: - - def get_sent_table_size(txn: LoggingTransaction) -> int: - txn.execute( -- "SELECT count(*) FROM sent_transactions" " WHERE ts >= ?", (yesterday,) -+ "SELECT count(*) FROM sent_transactions WHERE ts >= ?", (yesterday,) - ) - result = txn.fetchone() - assert result is not None -diff --git a/synapse/_scripts/synctl.py b/synapse/_scripts/synctl.py -index 688df9485c..2e2aa27a17 100755 ---- a/synapse/_scripts/synctl.py -+++ b/synapse/_scripts/synctl.py -@@ -292,9 +292,9 @@ def main() -> None: - for key in worker_config: - if key == "worker_app": # But we allow worker_app - continue -- assert not key.startswith( -- "worker_" -- ), "Main process cannot use worker_* config" -+ assert not key.startswith("worker_"), ( -+ "Main process cannot use worker_* config" -+ ) - else: - worker_pidfile = worker_config["worker_pid_file"] - worker_cache_factor = worker_config.get("synctl_cache_factor") -diff --git a/synapse/app/generic_worker.py b/synapse/app/generic_worker.py -index f495d5b7e4..75c65ccc0d 100644 ---- a/synapse/app/generic_worker.py -+++ b/synapse/app/generic_worker.py -@@ -287,8 +287,7 @@ class GenericWorkerServer(HomeServer): - elif listener.type == "metrics": - if not self.config.metrics.enable_metrics: - logger.warning( -- "Metrics listener configured, but " -- "enable_metrics is not True!" -+ "Metrics listener configured, but enable_metrics is not True!" - ) - else: - if isinstance(listener, TCPListenerConfig): -diff --git a/synapse/app/homeserver.py b/synapse/app/homeserver.py -index 6da2194cf7..e027b5eaea 100644 ---- a/synapse/app/homeserver.py -+++ b/synapse/app/homeserver.py -@@ -289,8 +289,7 @@ class SynapseHomeServer(HomeServer): - elif listener.type == "metrics": - if not self.config.metrics.enable_metrics: - logger.warning( -- "Metrics listener configured, but " -- "enable_metrics is not True!" -+ "Metrics listener configured, but enable_metrics is not True!" - ) - else: - if isinstance(listener, TCPListenerConfig): -diff --git a/synapse/config/tls.py b/synapse/config/tls.py -index 51dc15eb61..a48d81fdc3 100644 ---- a/synapse/config/tls.py -+++ b/synapse/config/tls.py -@@ -108,8 +108,7 @@ class TlsConfig(Config): - # Raise an error if this option has been specified without any - # corresponding certificates. - raise ConfigError( -- "federation_custom_ca_list specified without " -- "any certificate files" -+ "federation_custom_ca_list specified without any certificate files" - ) - - certs = [] -diff --git a/synapse/event_auth.py b/synapse/event_auth.py -index 5ecf493f98..5999c264dc 100644 ---- a/synapse/event_auth.py -+++ b/synapse/event_auth.py -@@ -986,8 +986,7 @@ def _check_power_levels( - if old_level == user_level: - raise AuthError( - 403, -- "You don't have permission to remove ops level equal " -- "to your own", -+ "You don't have permission to remove ops level equal to your own", - ) - - # Check if the old and new levels are greater than the user level -diff --git a/synapse/handlers/e2e_keys.py b/synapse/handlers/e2e_keys.py -index 540995e062..f2b2e30bf4 100644 ---- a/synapse/handlers/e2e_keys.py -+++ b/synapse/handlers/e2e_keys.py -@@ -1163,7 +1163,7 @@ class E2eKeysHandler: - devices = devices[user_id] - except SynapseError as e: - failure = _exception_to_failure(e) -- failures[user_id] = {device: failure for device in signatures.keys()} -+ failures[user_id] = dict.fromkeys(signatures.keys(), failure) - return signature_list, failures - - for device_id, device in signatures.items(): -@@ -1303,7 +1303,7 @@ class E2eKeysHandler: - except SynapseError as e: - failure = _exception_to_failure(e) - for user, devicemap in signatures.items(): -- failures[user] = {device_id: failure for device_id in devicemap.keys()} -+ failures[user] = dict.fromkeys(devicemap.keys(), failure) - return signature_list, failures - - for target_user, devicemap in signatures.items(): -@@ -1344,9 +1344,7 @@ class E2eKeysHandler: - # other devices were signed -- mark those as failures - logger.debug("upload signature: too many devices specified") - failure = _exception_to_failure(NotFoundError("Unknown device")) -- failures[target_user] = { -- device: failure for device in other_devices -- } -+ failures[target_user] = dict.fromkeys(other_devices, failure) - - if user_signing_key_id in master_key.get("signatures", {}).get( - user_id, {} -@@ -1367,9 +1365,7 @@ class E2eKeysHandler: - except SynapseError as e: - failure = _exception_to_failure(e) - if device_id is None: -- failures[target_user] = { -- device_id: failure for device_id in devicemap.keys() -- } -+ failures[target_user] = dict.fromkeys(devicemap.keys(), failure) - else: - failures.setdefault(target_user, {})[device_id] = failure - -diff --git a/synapse/handlers/federation.py b/synapse/handlers/federation.py -index 17dd4af13e..b1640e3246 100644 ---- a/synapse/handlers/federation.py -+++ b/synapse/handlers/federation.py -@@ -1312,9 +1312,9 @@ class FederationHandler: - if state_key is not None: - # the event was not rejected (get_event raises a NotFoundError for rejected - # events) so the state at the event should include the event itself. -- assert ( -- state_map.get((event.type, state_key)) == event.event_id -- ), "State at event did not include event itself" -+ assert state_map.get((event.type, state_key)) == event.event_id, ( -+ "State at event did not include event itself" -+ ) - - # ... but we need the state *before* that event - if "replaces_state" in event.unsigned: -diff --git a/synapse/handlers/message.py b/synapse/handlers/message.py -index 52c61cfa54..ff6eb5a514 100644 ---- a/synapse/handlers/message.py -+++ b/synapse/handlers/message.py -@@ -143,9 +143,9 @@ class MessageHandler: - elif membership == Membership.LEAVE: - key = (event_type, state_key) - # If the membership is not JOIN, then the event ID should exist. -- assert ( -- membership_event_id is not None -- ), "check_user_in_room_or_world_readable returned invalid data" -+ assert membership_event_id is not None, ( -+ "check_user_in_room_or_world_readable returned invalid data" -+ ) - room_state = await self._state_storage_controller.get_state_for_events( - [membership_event_id], StateFilter.from_types([key]) - ) -@@ -242,9 +242,9 @@ class MessageHandler: - room_state = await self.store.get_events(state_ids.values()) - elif membership == Membership.LEAVE: - # If the membership is not JOIN, then the event ID should exist. -- assert ( -- membership_event_id is not None -- ), "check_user_in_room_or_world_readable returned invalid data" -+ assert membership_event_id is not None, ( -+ "check_user_in_room_or_world_readable returned invalid data" -+ ) - room_state_events = ( - await self._state_storage_controller.get_state_for_events( - [membership_event_id], state_filter=state_filter -@@ -1266,12 +1266,14 @@ class EventCreationHandler: - # Allow an event to have empty list of prev_event_ids - # only if it has auth_event_ids. - or auth_event_ids -- ), "Attempting to create a non-m.room.create event with no prev_events or auth_event_ids" -+ ), ( -+ "Attempting to create a non-m.room.create event with no prev_events or auth_event_ids" -+ ) - else: - # we now ought to have some prev_events (unless it's a create event). -- assert ( -- builder.type == EventTypes.Create or prev_event_ids -- ), "Attempting to create a non-m.room.create event with no prev_events" -+ assert builder.type == EventTypes.Create or prev_event_ids, ( -+ "Attempting to create a non-m.room.create event with no prev_events" -+ ) - - if for_batch: - assert prev_event_ids is not None -diff --git a/synapse/handlers/sso.py b/synapse/handlers/sso.py -index 9c0d665461..07827cf95b 100644 ---- a/synapse/handlers/sso.py -+++ b/synapse/handlers/sso.py -@@ -1192,9 +1192,9 @@ class SsoHandler: - """ - - # It is expected that this is the main process. -- assert isinstance( -- self._device_handler, DeviceHandler -- ), "revoking SSO sessions can only be called on the main process" -+ assert isinstance(self._device_handler, DeviceHandler), ( -+ "revoking SSO sessions can only be called on the main process" -+ ) - - # Invalidate any running user-mapping sessions - to_delete = [] -diff --git a/synapse/http/matrixfederationclient.py b/synapse/http/matrixfederationclient.py -index f6d2536957..88bf98045c 100644 ---- a/synapse/http/matrixfederationclient.py -+++ b/synapse/http/matrixfederationclient.py -@@ -425,9 +425,9 @@ class MatrixFederationHttpClient: - ) - else: - proxy_authorization_secret = hs.config.worker.worker_replication_secret -- assert ( -- proxy_authorization_secret is not None -- ), "`worker_replication_secret` must be set when using `outbound_federation_restricted_to` (used to authenticate requests across workers)" -+ assert proxy_authorization_secret is not None, ( -+ "`worker_replication_secret` must be set when using `outbound_federation_restricted_to` (used to authenticate requests across workers)" -+ ) - federation_proxy_credentials = BearerProxyCredentials( - proxy_authorization_secret.encode("ascii") - ) -diff --git a/synapse/http/proxyagent.py b/synapse/http/proxyagent.py -index fd16ee42dd..6817199035 100644 ---- a/synapse/http/proxyagent.py -+++ b/synapse/http/proxyagent.py -@@ -173,9 +173,9 @@ class ProxyAgent(_AgentBase): - self._federation_proxy_endpoint: Optional[IStreamClientEndpoint] = None - self._federation_proxy_credentials: Optional[ProxyCredentials] = None - if federation_proxy_locations: -- assert ( -- federation_proxy_credentials is not None -- ), "`federation_proxy_credentials` are required when using `federation_proxy_locations`" -+ assert federation_proxy_credentials is not None, ( -+ "`federation_proxy_credentials` are required when using `federation_proxy_locations`" -+ ) - - endpoints: List[IStreamClientEndpoint] = [] - for federation_proxy_location in federation_proxy_locations: -@@ -302,9 +302,9 @@ class ProxyAgent(_AgentBase): - parsed_uri.scheme == b"matrix-federation" - and self._federation_proxy_endpoint - ): -- assert ( -- self._federation_proxy_credentials is not None -- ), "`federation_proxy_credentials` are required when using `federation_proxy_locations`" -+ assert self._federation_proxy_credentials is not None, ( -+ "`federation_proxy_credentials` are required when using `federation_proxy_locations`" -+ ) - - # Set a Proxy-Authorization header - if headers is None: -diff --git a/synapse/http/servlet.py b/synapse/http/servlet.py -index ed6ab08336..47d8bd5eaf 100644 ---- a/synapse/http/servlet.py -+++ b/synapse/http/servlet.py -@@ -582,9 +582,9 @@ def parse_enum( - is not one of those allowed values. - """ - # Assert the enum values are strings. -- assert all( -- isinstance(e.value, str) for e in E -- ), "parse_enum only works with string values" -+ assert all(isinstance(e.value, str) for e in E), ( -+ "parse_enum only works with string values" -+ ) - str_value = parse_string( - request, - name, -diff --git a/synapse/module_api/__init__.py b/synapse/module_api/__init__.py -index bf9532e891..7834da759c 100644 ---- a/synapse/module_api/__init__.py -+++ b/synapse/module_api/__init__.py -@@ -894,9 +894,9 @@ class ModuleApi: - Raises: - synapse.api.errors.AuthError: the access token is invalid - """ -- assert isinstance( -- self._device_handler, DeviceHandler -- ), "invalidate_access_token can only be called on the main process" -+ assert isinstance(self._device_handler, DeviceHandler), ( -+ "invalidate_access_token can only be called on the main process" -+ ) - - # see if the access token corresponds to a device - user_info = yield defer.ensureDeferred( -diff --git a/synapse/replication/http/_base.py b/synapse/replication/http/_base.py -index 9aa8d90bfe..0002538680 100644 ---- a/synapse/replication/http/_base.py -+++ b/synapse/replication/http/_base.py -@@ -128,9 +128,9 @@ class ReplicationEndpoint(metaclass=abc.ABCMeta): - - # We reserve `instance_name` as a parameter to sending requests, so we - # assert here that sub classes don't try and use the name. -- assert ( -- "instance_name" not in self.PATH_ARGS -- ), "`instance_name` is a reserved parameter name" -+ assert "instance_name" not in self.PATH_ARGS, ( -+ "`instance_name` is a reserved parameter name" -+ ) - assert ( - "instance_name" - not in signature(self.__class__._serialize_payload).parameters -diff --git a/synapse/replication/tcp/streams/events.py b/synapse/replication/tcp/streams/events.py -index ea0803dfc2..05b55fb033 100644 ---- a/synapse/replication/tcp/streams/events.py -+++ b/synapse/replication/tcp/streams/events.py -@@ -200,9 +200,9 @@ class EventsStream(_StreamFromIdGen): - - # we rely on get_all_new_forward_event_rows strictly honouring the limit, so - # that we know it is safe to just take upper_limit = event_rows[-1][0]. -- assert ( -- len(event_rows) <= target_row_count -- ), "get_all_new_forward_event_rows did not honour row limit" -+ assert len(event_rows) <= target_row_count, ( -+ "get_all_new_forward_event_rows did not honour row limit" -+ ) - - # if we hit the limit on event_updates, there's no point in going beyond the - # last stream_id in the batch for the other sources. -diff --git a/synapse/rest/admin/__init__.py b/synapse/rest/admin/__init__.py -index b1335fed66..e55cdc0470 100644 ---- a/synapse/rest/admin/__init__.py -+++ b/synapse/rest/admin/__init__.py -@@ -207,8 +207,7 @@ class PurgeHistoryRestServlet(RestServlet): - (stream, topo, _event_id) = r - token = "t%d-%d" % (topo, stream) - logger.info( -- "[purge] purging up to token %s (received_ts %i => " -- "stream_ordering %i)", -+ "[purge] purging up to token %s (received_ts %i => stream_ordering %i)", - token, - ts, - stream_ordering, -diff --git a/synapse/rest/client/receipts.py b/synapse/rest/client/receipts.py -index 89203dc45a..4bf93f485c 100644 ---- a/synapse/rest/client/receipts.py -+++ b/synapse/rest/client/receipts.py -@@ -39,9 +39,7 @@ logger = logging.getLogger(__name__) - - class ReceiptRestServlet(RestServlet): - PATTERNS = client_patterns( -- "/rooms/(?P<room_id>[^/]*)" -- "/receipt/(?P<receipt_type>[^/]*)" -- "/(?P<event_id>[^/]*)$" -+ "/rooms/(?P<room_id>[^/]*)/receipt/(?P<receipt_type>[^/]*)/(?P<event_id>[^/]*)$" - ) - CATEGORY = "Receipts requests" - -diff --git a/synapse/rest/client/rendezvous.py b/synapse/rest/client/rendezvous.py -index 02f166b4ea..a1808847f0 100644 ---- a/synapse/rest/client/rendezvous.py -+++ b/synapse/rest/client/rendezvous.py -@@ -44,9 +44,9 @@ class MSC4108DelegationRendezvousServlet(RestServlet): - redirection_target: Optional[str] = ( - hs.config.experimental.msc4108_delegation_endpoint - ) -- assert ( -- redirection_target is not None -- ), "Servlet is only registered if there is a delegation target" -+ assert redirection_target is not None, ( -+ "Servlet is only registered if there is a delegation target" -+ ) - self.endpoint = redirection_target.encode("utf-8") - - async def on_POST(self, request: SynapseRequest) -> None: -diff --git a/synapse/rest/client/transactions.py b/synapse/rest/client/transactions.py -index f791904168..1a57996aec 100644 ---- a/synapse/rest/client/transactions.py -+++ b/synapse/rest/client/transactions.py -@@ -94,9 +94,9 @@ class HttpTransactionCache: - # (appservice and guest users), but does not cover access tokens minted - # by the admin API. Use the access token ID instead. - else: -- assert ( -- requester.access_token_id is not None -- ), "Requester must have an access_token_id" -+ assert requester.access_token_id is not None, ( -+ "Requester must have an access_token_id" -+ ) - return (path, "user_admin", requester.access_token_id) - - def fetch_or_execute_request( -diff --git a/synapse/storage/background_updates.py b/synapse/storage/background_updates.py -index a02b4cc9ce..d170bbddaa 100644 ---- a/synapse/storage/background_updates.py -+++ b/synapse/storage/background_updates.py -@@ -739,9 +739,9 @@ class BackgroundUpdater: - c.execute(sql) - - async def updater(progress: JsonDict, batch_size: int) -> int: -- assert isinstance( -- self.db_pool.engine, engines.PostgresEngine -- ), "validate constraint background update registered for non-Postres database" -+ assert isinstance(self.db_pool.engine, engines.PostgresEngine), ( -+ "validate constraint background update registered for non-Postres database" -+ ) - - logger.info("Validating constraint %s to %s", constraint_name, table) - await self.db_pool.runWithConnection(runner) -@@ -900,9 +900,9 @@ class BackgroundUpdater: - on the table. Used to iterate over the table. - """ - -- assert isinstance( -- self.db_pool.engine, engines.PostgresEngine -- ), "validate constraint background update registered for non-Postres database" -+ assert isinstance(self.db_pool.engine, engines.PostgresEngine), ( -+ "validate constraint background update registered for non-Postres database" -+ ) - - async def updater(progress: JsonDict, batch_size: int) -> int: - return await self.validate_constraint_and_delete_in_background( -diff --git a/synapse/storage/controllers/persist_events.py b/synapse/storage/controllers/persist_events.py -index 7963905479..f5131fe291 100644 ---- a/synapse/storage/controllers/persist_events.py -+++ b/synapse/storage/controllers/persist_events.py -@@ -870,8 +870,7 @@ class EventsPersistenceStorageController: - # This should only happen for outlier events. - if not ev.internal_metadata.is_outlier(): - raise Exception( -- "Context for new event %s has no state " -- "group" % (ev.event_id,) -+ "Context for new event %s has no state group" % (ev.event_id,) - ) - continue - if ctx.state_group_deltas: -diff --git a/synapse/storage/databases/main/client_ips.py b/synapse/storage/databases/main/client_ips.py -index e8c322ab5c..69008804bd 100644 ---- a/synapse/storage/databases/main/client_ips.py -+++ b/synapse/storage/databases/main/client_ips.py -@@ -650,9 +650,9 @@ class ClientIpWorkerStore(ClientIpBackgroundUpdateStore, MonthlyActiveUsersWorke - - @wrap_as_background_process("update_client_ips") - async def _update_client_ips_batch(self) -> None: -- assert ( -- self._update_on_this_worker -- ), "This worker is not designated to update client IPs" -+ assert self._update_on_this_worker, ( -+ "This worker is not designated to update client IPs" -+ ) - - # If the DB pool has already terminated, don't try updating - if not self.db_pool.is_running(): -@@ -671,9 +671,9 @@ class ClientIpWorkerStore(ClientIpBackgroundUpdateStore, MonthlyActiveUsersWorke - txn: LoggingTransaction, - to_update: Mapping[Tuple[str, str, str], Tuple[str, Optional[str], int]], - ) -> None: -- assert ( -- self._update_on_this_worker -- ), "This worker is not designated to update client IPs" -+ assert self._update_on_this_worker, ( -+ "This worker is not designated to update client IPs" -+ ) - - # Keys and values for the `user_ips` upsert. - user_ips_keys = [] -diff --git a/synapse/storage/databases/main/deviceinbox.py b/synapse/storage/databases/main/deviceinbox.py -index 0612b82b9b..d47833655d 100644 ---- a/synapse/storage/databases/main/deviceinbox.py -+++ b/synapse/storage/databases/main/deviceinbox.py -@@ -200,9 +200,9 @@ class DeviceInboxWorkerStore(SQLBaseStore): - to_stream_id=to_stream_id, - ) - -- assert ( -- last_processed_stream_id == to_stream_id -- ), "Expected _get_device_messages to process all to-device messages up to `to_stream_id`" -+ assert last_processed_stream_id == to_stream_id, ( -+ "Expected _get_device_messages to process all to-device messages up to `to_stream_id`" -+ ) - - return user_id_device_id_to_messages - -diff --git a/synapse/storage/databases/main/devices.py b/synapse/storage/databases/main/devices.py -index 3f0b2f5d84..6191f22cd6 100644 ---- a/synapse/storage/databases/main/devices.py -+++ b/synapse/storage/databases/main/devices.py -@@ -1092,7 +1092,7 @@ class DeviceWorkerStore(RoomMemberWorkerStore, EndToEndKeyWorkerStore): - ), - ) - -- results: Dict[str, Optional[str]] = {user_id: None for user_id in user_ids} -+ results: Dict[str, Optional[str]] = dict.fromkeys(user_ids) - results.update(rows) - - return results -diff --git a/synapse/storage/databases/main/events.py b/synapse/storage/databases/main/events.py -index 26fbc1a483..b7cc0433e7 100644 ---- a/synapse/storage/databases/main/events.py -+++ b/synapse/storage/databases/main/events.py -@@ -246,9 +246,9 @@ class PersistEventsStore: - self.is_mine_id = hs.is_mine_id - - # This should only exist on instances that are configured to write -- assert ( -- hs.get_instance_name() in hs.config.worker.writers.events -- ), "Can only instantiate EventsStore on master" -+ assert hs.get_instance_name() in hs.config.worker.writers.events, ( -+ "Can only instantiate EventsStore on master" -+ ) - - # Since we have been configured to write, we ought to have id generators, - # rather than id trackers. -@@ -465,9 +465,9 @@ class PersistEventsStore: - missing_membership_event_ids - ) - # There shouldn't be any missing events -- assert ( -- remaining_events.keys() == missing_membership_event_ids -- ), missing_membership_event_ids.difference(remaining_events.keys()) -+ assert remaining_events.keys() == missing_membership_event_ids, ( -+ missing_membership_event_ids.difference(remaining_events.keys()) -+ ) - membership_event_map.update(remaining_events) - - for ( -@@ -534,9 +534,9 @@ class PersistEventsStore: - missing_state_event_ids - ) - # There shouldn't be any missing events -- assert ( -- remaining_events.keys() == missing_state_event_ids -- ), missing_state_event_ids.difference(remaining_events.keys()) -+ assert remaining_events.keys() == missing_state_event_ids, ( -+ missing_state_event_ids.difference(remaining_events.keys()) -+ ) - for event in remaining_events.values(): - current_state_map[(event.type, event.state_key)] = event - -@@ -644,9 +644,9 @@ class PersistEventsStore: - if missing_event_ids: - remaining_events = await self.store.get_events(missing_event_ids) - # There shouldn't be any missing events -- assert ( -- remaining_events.keys() == missing_event_ids -- ), missing_event_ids.difference(remaining_events.keys()) -+ assert remaining_events.keys() == missing_event_ids, ( -+ missing_event_ids.difference(remaining_events.keys()) -+ ) - for event in remaining_events.values(): - current_state_map[(event.type, event.state_key)] = event - -@@ -3448,8 +3448,7 @@ class PersistEventsStore: - # Delete all these events that we've already fetched and now know that their - # prev events are the new backwards extremeties. - query = ( -- "DELETE FROM event_backward_extremities" -- " WHERE event_id = ? AND room_id = ?" -+ "DELETE FROM event_backward_extremities WHERE event_id = ? AND room_id = ?" - ) - backward_extremity_tuples_to_remove = [ - (ev.event_id, ev.room_id) -diff --git a/synapse/storage/databases/main/events_worker.py b/synapse/storage/databases/main/events_worker.py -index 82b2ad4408..3db4460f57 100644 ---- a/synapse/storage/databases/main/events_worker.py -+++ b/synapse/storage/databases/main/events_worker.py -@@ -824,9 +824,9 @@ class EventsWorkerStore(SQLBaseStore): - - if missing_events_ids: - -- async def get_missing_events_from_cache_or_db() -> ( -- Dict[str, EventCacheEntry] -- ): -+ async def get_missing_events_from_cache_or_db() -> Dict[ -+ str, EventCacheEntry -+ ]: - """Fetches the events in `missing_event_ids` from the database. - - Also creates entries in `self._current_event_fetches` to allow -diff --git a/synapse/storage/databases/main/monthly_active_users.py b/synapse/storage/databases/main/monthly_active_users.py -index 8e948c5e8d..659ee13d71 100644 ---- a/synapse/storage/databases/main/monthly_active_users.py -+++ b/synapse/storage/databases/main/monthly_active_users.py -@@ -304,9 +304,9 @@ class MonthlyActiveUsersWorkerStore(RegistrationWorkerStore): - txn: - threepids: List of threepid dicts to reserve - """ -- assert ( -- self._update_on_this_worker -- ), "This worker is not designated to update MAUs" -+ assert self._update_on_this_worker, ( -+ "This worker is not designated to update MAUs" -+ ) - - # XXX what is this function trying to achieve? It upserts into - # monthly_active_users for each *registered* reserved mau user, but why? -@@ -340,9 +340,9 @@ class MonthlyActiveUsersWorkerStore(RegistrationWorkerStore): - Args: - user_id: user to add/update - """ -- assert ( -- self._update_on_this_worker -- ), "This worker is not designated to update MAUs" -+ assert self._update_on_this_worker, ( -+ "This worker is not designated to update MAUs" -+ ) - - # Support user never to be included in MAU stats. Note I can't easily call this - # from upsert_monthly_active_user_txn because then I need a _txn form of -@@ -379,9 +379,9 @@ class MonthlyActiveUsersWorkerStore(RegistrationWorkerStore): - txn: - user_id: user to add/update - """ -- assert ( -- self._update_on_this_worker -- ), "This worker is not designated to update MAUs" -+ assert self._update_on_this_worker, ( -+ "This worker is not designated to update MAUs" -+ ) - - # Am consciously deciding to lock the table on the basis that is ought - # never be a big table and alternative approaches (batching multiple -@@ -409,9 +409,9 @@ class MonthlyActiveUsersWorkerStore(RegistrationWorkerStore): - Args: - user_id: the user_id to query - """ -- assert ( -- self._update_on_this_worker -- ), "This worker is not designated to update MAUs" -+ assert self._update_on_this_worker, ( -+ "This worker is not designated to update MAUs" -+ ) - - if self._limit_usage_by_mau or self._mau_stats_only: - # Trial users and guests should not be included as part of MAU group -diff --git a/synapse/storage/databases/main/purge_events.py b/synapse/storage/databases/main/purge_events.py -index ebdeb8fbd7..a11f522f03 100644 ---- a/synapse/storage/databases/main/purge_events.py -+++ b/synapse/storage/databases/main/purge_events.py -@@ -199,8 +199,7 @@ class PurgeEventsStore(StateGroupWorkerStore, CacheInvalidationWorkerStore): - - # Update backward extremeties - txn.execute_batch( -- "INSERT INTO event_backward_extremities (room_id, event_id)" -- " VALUES (?, ?)", -+ "INSERT INTO event_backward_extremities (room_id, event_id) VALUES (?, ?)", - [(room_id, event_id) for (event_id,) in new_backwards_extrems], - ) - -diff --git a/synapse/storage/databases/main/state_deltas.py b/synapse/storage/databases/main/state_deltas.py -index b90f667da8..00f87cc3a1 100644 ---- a/synapse/storage/databases/main/state_deltas.py -+++ b/synapse/storage/databases/main/state_deltas.py -@@ -98,9 +98,9 @@ class StateDeltasStore(SQLBaseStore): - prev_stream_id = int(prev_stream_id) - - # check we're not going backwards -- assert ( -- prev_stream_id <= max_stream_id -- ), f"New stream id {max_stream_id} is smaller than prev stream id {prev_stream_id}" -+ assert prev_stream_id <= max_stream_id, ( -+ f"New stream id {max_stream_id} is smaller than prev stream id {prev_stream_id}" -+ ) - - if not self._curr_state_delta_stream_cache.has_any_entity_changed( - prev_stream_id -diff --git a/synapse/storage/databases/main/tags.py b/synapse/storage/databases/main/tags.py -index 44f395f315..97b190bccc 100644 ---- a/synapse/storage/databases/main/tags.py -+++ b/synapse/storage/databases/main/tags.py -@@ -274,10 +274,7 @@ class TagsWorkerStore(AccountDataWorkerStore): - assert isinstance(self._account_data_id_gen, AbstractStreamIdGenerator) - - def remove_tag_txn(txn: LoggingTransaction, next_id: int) -> None: -- sql = ( -- "DELETE FROM room_tags " -- " WHERE user_id = ? AND room_id = ? AND tag = ?" -- ) -+ sql = "DELETE FROM room_tags WHERE user_id = ? AND room_id = ? AND tag = ?" - txn.execute(sql, (user_id, room_id, tag)) - self._update_revision_txn(txn, user_id, room_id, next_id) - -diff --git a/synapse/storage/databases/main/user_directory.py b/synapse/storage/databases/main/user_directory.py -index 391f0dd638..2b867cdb6e 100644 ---- a/synapse/storage/databases/main/user_directory.py -+++ b/synapse/storage/databases/main/user_directory.py -@@ -582,9 +582,9 @@ class UserDirectoryBackgroundUpdateStore(StateDeltasStore): - retry_counter: number of failures in refreshing the profile so far. Used for - exponential backoff calculations. - """ -- assert not self.hs.is_mine_id( -- user_id -- ), "Can't mark a local user as a stale remote user." -+ assert not self.hs.is_mine_id(user_id), ( -+ "Can't mark a local user as a stale remote user." -+ ) - - server_name = UserID.from_string(user_id).domain - -diff --git a/synapse/storage/databases/state/bg_updates.py b/synapse/storage/databases/state/bg_updates.py -index 95fd0ae73a..5b594fe8dd 100644 ---- a/synapse/storage/databases/state/bg_updates.py -+++ b/synapse/storage/databases/state/bg_updates.py -@@ -396,8 +396,7 @@ class StateBackgroundUpdateStore(StateGroupBackgroundUpdateStore): - return True, count - - txn.execute( -- "SELECT state_group FROM state_group_edges" -- " WHERE state_group = ?", -+ "SELECT state_group FROM state_group_edges WHERE state_group = ?", - (state_group,), - ) - -diff --git a/synapse/storage/schema/main/delta/25/fts.py b/synapse/storage/schema/main/delta/25/fts.py -index b050cc16a7..c01c1325cb 100644 ---- a/synapse/storage/schema/main/delta/25/fts.py -+++ b/synapse/storage/schema/main/delta/25/fts.py -@@ -75,8 +75,7 @@ def run_create(cur: LoggingTransaction, database_engine: BaseDatabaseEngine) -> - progress_json = json.dumps(progress) - - sql = ( -- "INSERT into background_updates (update_name, progress_json)" -- " VALUES (?, ?)" -+ "INSERT into background_updates (update_name, progress_json) VALUES (?, ?)" - ) - - cur.execute(sql, ("event_search", progress_json)) -diff --git a/synapse/storage/schema/main/delta/27/ts.py b/synapse/storage/schema/main/delta/27/ts.py -index d7f360b6e6..e6e73e1b77 100644 ---- a/synapse/storage/schema/main/delta/27/ts.py -+++ b/synapse/storage/schema/main/delta/27/ts.py -@@ -55,8 +55,7 @@ def run_create(cur: LoggingTransaction, database_engine: BaseDatabaseEngine) -> - progress_json = json.dumps(progress) - - sql = ( -- "INSERT into background_updates (update_name, progress_json)" -- " VALUES (?, ?)" -+ "INSERT into background_updates (update_name, progress_json) VALUES (?, ?)" - ) - - cur.execute(sql, ("event_origin_server_ts", progress_json)) -diff --git a/synapse/storage/schema/main/delta/31/search_update.py b/synapse/storage/schema/main/delta/31/search_update.py -index 0e65c9a841..46355122bb 100644 ---- a/synapse/storage/schema/main/delta/31/search_update.py -+++ b/synapse/storage/schema/main/delta/31/search_update.py -@@ -59,8 +59,7 @@ def run_create(cur: LoggingTransaction, database_engine: BaseDatabaseEngine) -> - progress_json = json.dumps(progress) - - sql = ( -- "INSERT into background_updates (update_name, progress_json)" -- " VALUES (?, ?)" -+ "INSERT into background_updates (update_name, progress_json) VALUES (?, ?)" - ) - - cur.execute(sql, ("event_search_order", progress_json)) -diff --git a/synapse/storage/schema/main/delta/33/event_fields.py b/synapse/storage/schema/main/delta/33/event_fields.py -index 9c02aeda88..53d215337e 100644 ---- a/synapse/storage/schema/main/delta/33/event_fields.py -+++ b/synapse/storage/schema/main/delta/33/event_fields.py -@@ -55,8 +55,7 @@ def run_create(cur: LoggingTransaction, database_engine: BaseDatabaseEngine) -> - progress_json = json.dumps(progress) - - sql = ( -- "INSERT into background_updates (update_name, progress_json)" -- " VALUES (?, ?)" -+ "INSERT into background_updates (update_name, progress_json) VALUES (?, ?)" - ) - - cur.execute(sql, ("event_fields_sender_url", progress_json)) -diff --git a/synapse/types/__init__.py b/synapse/types/__init__.py -index e9cdd19868..5549f3c9f8 100644 ---- a/synapse/types/__init__.py -+++ b/synapse/types/__init__.py -@@ -889,8 +889,7 @@ class MultiWriterStreamToken(AbstractMultiWriterStreamToken): - def __str__(self) -> str: - instances = ", ".join(f"{k}: {v}" for k, v in sorted(self.instance_map.items())) - return ( -- f"MultiWriterStreamToken(stream: {self.stream}, " -- f"instances: {{{instances}}})" -+ f"MultiWriterStreamToken(stream: {self.stream}, instances: {{{instances}}})" - ) - - -diff --git a/synapse/types/state.py b/synapse/types/state.py -index e641215f18..6420e050a5 100644 ---- a/synapse/types/state.py -+++ b/synapse/types/state.py -@@ -462,7 +462,7 @@ class StateFilter: - new_types.update({state_type: set() for state_type in minus_wildcards}) - - # insert the plus wildcards -- new_types.update({state_type: None for state_type in plus_wildcards}) -+ new_types.update(dict.fromkeys(plus_wildcards)) - - # insert the specific state keys - for state_type, state_key in plus_state_keys: -diff --git a/synapse/util/iterutils.py b/synapse/util/iterutils.py -index ff6adeb716..0a6a30aab2 100644 ---- a/synapse/util/iterutils.py -+++ b/synapse/util/iterutils.py -@@ -114,7 +114,7 @@ def sorted_topologically( - - # This is implemented by Kahn's algorithm. - -- degree_map = {node: 0 for node in nodes} -+ degree_map = dict.fromkeys(nodes, 0) - reverse_graph: Dict[T, Set[T]] = {} - - for node, edges in graph.items(): -@@ -164,7 +164,7 @@ def sorted_topologically_batched( - persisted. - """ - -- degree_map = {node: 0 for node in nodes} -+ degree_map = dict.fromkeys(nodes, 0) - reverse_graph: Dict[T, Set[T]] = {} - - for node, edges in graph.items(): -diff --git a/tests/federation/test_federation_out_of_band_membership.py b/tests/federation/test_federation_out_of_band_membership.py -index a4a266cf06..f77b8fe300 100644 ---- a/tests/federation/test_federation_out_of_band_membership.py -+++ b/tests/federation/test_federation_out_of_band_membership.py -@@ -65,20 +65,20 @@ def required_state_json_to_state_map(required_state: Any) -> StateMap[EventBase] - if isinstance(required_state, list): - for state_event_dict in required_state: - # Yell because we're in a test and this is unexpected -- assert isinstance( -- state_event_dict, dict -- ), "`required_state` should be a list of event dicts" -+ assert isinstance(state_event_dict, dict), ( -+ "`required_state` should be a list of event dicts" -+ ) - - event_type = state_event_dict["type"] - event_state_key = state_event_dict["state_key"] - - # Yell because we're in a test and this is unexpected -- assert isinstance( -- event_type, str -- ), "Each event in `required_state` should have a string `type`" -- assert isinstance( -- event_state_key, str -- ), "Each event in `required_state` should have a string `state_key`" -+ assert isinstance(event_type, str), ( -+ "Each event in `required_state` should have a string `type`" -+ ) -+ assert isinstance(event_state_key, str), ( -+ "Each event in `required_state` should have a string `state_key`" -+ ) - - state_map[(event_type, event_state_key)] = make_event_from_dict( - state_event_dict -diff --git a/tests/handlers/test_user_directory.py b/tests/handlers/test_user_directory.py -index a9e9d7d7ea..b12ffc3665 100644 ---- a/tests/handlers/test_user_directory.py -+++ b/tests/handlers/test_user_directory.py -@@ -1178,10 +1178,10 @@ class UserDirectoryTestCase(unittest.HomeserverTestCase): - for use_numeric in [False, True]: - if use_numeric: - prefix1 = f"{i}" -- prefix2 = f"{i+1}" -+ prefix2 = f"{i + 1}" - else: - prefix1 = f"a{i}" -- prefix2 = f"a{i+1}" -+ prefix2 = f"a{i + 1}" - - local_user_1 = self.register_user(f"user{char}{prefix1}", "password") - local_user_2 = self.register_user(f"user{char}{prefix2}", "password") -diff --git a/tests/http/test_matrixfederationclient.py b/tests/http/test_matrixfederationclient.py -index e34df54e13..d5ebf10eac 100644 ---- a/tests/http/test_matrixfederationclient.py -+++ b/tests/http/test_matrixfederationclient.py -@@ -436,8 +436,7 @@ class FederationClientTests(HomeserverTestCase): - - # Send it the HTTP response - client.dataReceived( -- b"HTTP/1.1 200 OK\r\nContent-Type: application/json\r\n" -- b"Server: Fake\r\n\r\n" -+ b"HTTP/1.1 200 OK\r\nContent-Type: application/json\r\nServer: Fake\r\n\r\n" - ) - - # Push by enough to time it out -@@ -691,10 +690,7 @@ class FederationClientTests(HomeserverTestCase): - - # Send it a huge HTTP response - protocol.dataReceived( -- b"HTTP/1.1 200 OK\r\n" -- b"Server: Fake\r\n" -- b"Content-Type: application/json\r\n" -- b"\r\n" -+ b"HTTP/1.1 200 OK\r\nServer: Fake\r\nContent-Type: application/json\r\n\r\n" - ) - - self.pump() -diff --git a/tests/media/test_media_storage.py b/tests/media/test_media_storage.py -index 35e16a99ba..31dc32d67e 100644 ---- a/tests/media/test_media_storage.py -+++ b/tests/media/test_media_storage.py -@@ -250,9 +250,7 @@ small_cmyk_jpeg = TestImage( - ) - - small_lossless_webp = TestImage( -- unhexlify( -- b"524946461a000000574542505650384c0d0000002f0000001007" b"1011118888fe0700" -- ), -+ unhexlify(b"524946461a000000574542505650384c0d0000002f00000010071011118888fe0700"), - b"image/webp", - b".webp", - ) -diff --git a/tests/replication/tcp/streams/test_events.py b/tests/replication/tcp/streams/test_events.py -index fdc74efb5a..2a0189a4e1 100644 ---- a/tests/replication/tcp/streams/test_events.py -+++ b/tests/replication/tcp/streams/test_events.py -@@ -324,7 +324,7 @@ class EventsStreamTestCase(BaseStreamTestCase): - pls = self.helper.get_state( - self.room_id, EventTypes.PowerLevels, tok=self.user_tok - ) -- pls["users"].update({u: 50 for u in user_ids}) -+ pls["users"].update(dict.fromkeys(user_ids, 50)) - self.helper.send_state( - self.room_id, - EventTypes.PowerLevels, -diff --git a/tests/rest/admin/test_room.py b/tests/rest/admin/test_room.py -index 1d44106bd7..165d175ab2 100644 ---- a/tests/rest/admin/test_room.py -+++ b/tests/rest/admin/test_room.py -@@ -1312,7 +1312,7 @@ class RoomTestCase(unittest.HomeserverTestCase): - # Check that response json body contains a "rooms" key - self.assertTrue( - "rooms" in channel.json_body, -- msg="Response body does not " "contain a 'rooms' key", -+ msg="Response body does not contain a 'rooms' key", - ) - - # Check that 3 rooms were returned -diff --git a/tests/rest/admin/test_user.py b/tests/rest/admin/test_user.py -index 874c29c935..f09f66da00 100644 ---- a/tests/rest/admin/test_user.py -+++ b/tests/rest/admin/test_user.py -@@ -3901,9 +3901,7 @@ class UserMediaRestTestCase(unittest.HomeserverTestCase): - image_data1 = SMALL_PNG - # Resolution: 1×1, MIME type: image/gif, Extension: gif, Size: 35 B - image_data2 = unhexlify( -- b"47494638376101000100800100000000" -- b"ffffff2c00000000010001000002024c" -- b"01003b" -+ b"47494638376101000100800100000000ffffff2c00000000010001000002024c01003b" - ) - # Resolution: 1×1, MIME type: image/bmp, Extension: bmp, Size: 54 B - image_data3 = unhexlify( -diff --git a/tests/rest/client/sliding_sync/test_rooms_timeline.py b/tests/rest/client/sliding_sync/test_rooms_timeline.py -index 2293994793..535420209b 100644 ---- a/tests/rest/client/sliding_sync/test_rooms_timeline.py -+++ b/tests/rest/client/sliding_sync/test_rooms_timeline.py -@@ -309,8 +309,8 @@ class SlidingSyncRoomsTimelineTestCase(SlidingSyncBase): - self.assertEqual( - response_body["rooms"][room_id1]["limited"], - False, -- f'Our `timeline_limit` was {sync_body["lists"]["foo-list"]["timeline_limit"]} ' -- + f'and {len(response_body["rooms"][room_id1]["timeline"])} events were returned in the timeline. ' -+ f"Our `timeline_limit` was {sync_body['lists']['foo-list']['timeline_limit']} " -+ + f"and {len(response_body['rooms'][room_id1]['timeline'])} events were returned in the timeline. " - + str(response_body["rooms"][room_id1]), - ) - # Check to make sure the latest events are returned -@@ -387,7 +387,7 @@ class SlidingSyncRoomsTimelineTestCase(SlidingSyncBase): - response_body["rooms"][room_id1]["limited"], - True, - f"Our `timeline_limit` was {timeline_limit} " -- + f'and {len(response_body["rooms"][room_id1]["timeline"])} events were returned in the timeline. ' -+ + f"and {len(response_body['rooms'][room_id1]['timeline'])} events were returned in the timeline. " - + str(response_body["rooms"][room_id1]), - ) - # Check to make sure that the "live" and historical events are returned -diff --git a/tests/rest/client/test_media.py b/tests/rest/client/test_media.py -index 1ea2a5c884..9ad8ecf1cd 100644 ---- a/tests/rest/client/test_media.py -+++ b/tests/rest/client/test_media.py -@@ -1006,7 +1006,7 @@ class URLPreviewTests(unittest.HomeserverTestCase): - data = base64.b64encode(SMALL_PNG) - - end_content = ( -- b"<html><head>" b'<img src="data:image/png;base64,%s" />' b"</head></html>" -+ b'<html><head><img src="data:image/png;base64,%s" /></head></html>' - ) % (data,) - - channel = self.make_request( -diff --git a/tests/rest/client/utils.py b/tests/rest/client/utils.py -index 53f1782d59..280486da08 100644 ---- a/tests/rest/client/utils.py -+++ b/tests/rest/client/utils.py -@@ -716,9 +716,9 @@ class RestHelper: - "/login", - content={"type": "m.login.token", "token": login_token}, - ) -- assert ( -- channel.code == expected_status -- ), f"unexpected status in response: {channel.code}" -+ assert channel.code == expected_status, ( -+ f"unexpected status in response: {channel.code}" -+ ) - return channel.json_body - - def auth_via_oidc( -diff --git a/tests/rest/media/test_url_preview.py b/tests/rest/media/test_url_preview.py -index 103d7662d9..2a7bee19f9 100644 ---- a/tests/rest/media/test_url_preview.py -+++ b/tests/rest/media/test_url_preview.py -@@ -878,7 +878,7 @@ class URLPreviewTests(unittest.HomeserverTestCase): - data = base64.b64encode(SMALL_PNG) - - end_content = ( -- b"<html><head>" b'<img src="data:image/png;base64,%s" />' b"</head></html>" -+ b'<html><head><img src="data:image/png;base64,%s" /></head></html>' - ) % (data,) - - channel = self.make_request( -diff --git a/tests/server.py b/tests/server.py -index 84ed9f68eb..f01708b77f 100644 ---- a/tests/server.py -+++ b/tests/server.py -@@ -225,9 +225,9 @@ class FakeChannel: - new_headers.addRawHeader(k, v) - headers = new_headers - -- assert isinstance( -- headers, Headers -- ), f"headers are of the wrong type: {headers!r}" -+ assert isinstance(headers, Headers), ( -+ f"headers are of the wrong type: {headers!r}" -+ ) - - self.result["headers"] = headers - -diff --git a/tests/storage/test_base.py b/tests/storage/test_base.py -index 9420d03841..11313fc933 100644 ---- a/tests/storage/test_base.py -+++ b/tests/storage/test_base.py -@@ -349,7 +349,7 @@ class SQLBaseStoreTestCase(unittest.TestCase): - ) - - self.mock_txn.execute.assert_called_once_with( -- "UPDATE tablename SET colC = ?, colD = ? WHERE" " colA = ? AND colB = ?", -+ "UPDATE tablename SET colC = ?, colD = ? WHERE colA = ? AND colB = ?", - [3, 4, 1, 2], - ) - -diff --git a/tests/storage/test_devices.py b/tests/storage/test_devices.py -index ba01b038ab..74edca7523 100644 ---- a/tests/storage/test_devices.py -+++ b/tests/storage/test_devices.py -@@ -211,9 +211,9 @@ class DeviceStoreTestCase(HomeserverTestCase): - even if that means leaving an earlier batch one EDU short of the limit. - """ - -- assert self.hs.is_mine_id( -- "@user_id:test" -- ), "Test not valid: this MXID should be considered local" -+ assert self.hs.is_mine_id("@user_id:test"), ( -+ "Test not valid: this MXID should be considered local" -+ ) - - self.get_success( - self.store.set_e2e_cross_signing_key( -diff --git a/tests/storage/test_event_federation.py b/tests/storage/test_event_federation.py -index 088f0d24f9..0500c68e9d 100644 ---- a/tests/storage/test_event_federation.py -+++ b/tests/storage/test_event_federation.py -@@ -114,7 +114,7 @@ def get_all_topologically_sorted_orders( - # This is implemented by Kahn's algorithm, and forking execution each time - # we have a choice over which node to consider next. - -- degree_map = {node: 0 for node in nodes} -+ degree_map = dict.fromkeys(nodes, 0) - reverse_graph: Dict[T, Set[T]] = {} - - for node, edges in graph.items(): -diff --git a/tests/test_state.py b/tests/test_state.py -index dce56fe78a..adb72b0730 100644 ---- a/tests/test_state.py -+++ b/tests/test_state.py -@@ -149,7 +149,7 @@ class _DummyStore: - async def get_partial_state_events( - self, event_ids: Collection[str] - ) -> Dict[str, bool]: -- return {e: False for e in event_ids} -+ return dict.fromkeys(event_ids, False) - - async def get_state_group_delta( - self, name: str -diff --git a/tests/test_utils/logging_setup.py b/tests/test_utils/logging_setup.py -index dd40c338d6..d58222a9f6 100644 ---- a/tests/test_utils/logging_setup.py -+++ b/tests/test_utils/logging_setup.py -@@ -48,7 +48,7 @@ def setup_logging() -> None: - - # We exclude `%(asctime)s` from this format because the Twisted logger adds its own - # timestamp -- log_format = "%(name)s - %(lineno)d - " "%(levelname)s - %(request)s - %(message)s" -+ log_format = "%(name)s - %(lineno)d - %(levelname)s - %(request)s - %(message)s" - - handler = ToTwistedHandler() - formatter = logging.Formatter(log_format) --- -2.49.0 -