summary refs log tree commit diff
diff options
context:
space:
mode:
-rw-r--r--.github/workflows/docs-pr-netlify.yaml2
-rw-r--r--Cargo.lock44
-rw-r--r--changelog.d/16840.misc1
-rw-r--r--changelog.d/16892.doc1
-rw-r--r--changelog.d/16919.misc1
-rw-r--r--changelog.d/16929.misc2
-rw-r--r--changelog.d/16949.bugfix1
-rw-r--r--changelog.d/16965.doc1
-rw-r--r--changelog.d/16966.doc1
-rw-r--r--changelog.d/16986.misc1
-rwxr-xr-xdocker/prefix-log7
-rw-r--r--docs/upgrade.md6
-rw-r--r--docs/usage/administration/useful_sql_for_admins.md9
-rw-r--r--docs/website_files/version-picker.js26
-rw-r--r--poetry.lock218
-rw-r--r--pyproject.toml4
-rwxr-xr-xsynapse/_scripts/synapse_port_db.py22
-rw-r--r--synapse/api/constants.py1
-rw-r--r--synapse/api/room_versions.py8
-rw-r--r--synapse/app/homeserver.py6
-rw-r--r--synapse/appservice/api.py18
-rw-r--r--synapse/config/registration.py6
-rw-r--r--synapse/config/repository.py6
-rw-r--r--synapse/event_auth.py3
-rw-r--r--synapse/events/__init__.py12
-rw-r--r--synapse/events/utils.py6
-rw-r--r--synapse/federation/federation_server.py6
-rw-r--r--synapse/federation/send_queue.py6
-rw-r--r--synapse/handlers/account.py8
-rw-r--r--synapse/handlers/auth.py4
-rw-r--r--synapse/handlers/directory.py6
-rw-r--r--synapse/handlers/federation.py10
-rw-r--r--synapse/handlers/federation_event.py6
-rw-r--r--synapse/handlers/message.py6
-rw-r--r--synapse/handlers/presence.py12
-rw-r--r--synapse/handlers/profile.py6
-rw-r--r--synapse/handlers/relations.py14
-rw-r--r--synapse/handlers/room.py16
-rw-r--r--synapse/handlers/room_member.py10
-rw-r--r--synapse/handlers/sso.py2
-rw-r--r--synapse/handlers/sync.py398
-rw-r--r--synapse/handlers/worker_lock.py15
-rw-r--r--synapse/http/matrixfederationclient.py18
-rw-r--r--synapse/http/servlet.py91
-rw-r--r--synapse/logging/context.py12
-rw-r--r--synapse/logging/opentracing.py6
-rw-r--r--synapse/media/media_repository.py6
-rw-r--r--synapse/metrics/jemalloc.py6
-rw-r--r--synapse/module_api/callbacks/spamchecker_callbacks.py2
-rw-r--r--synapse/notifier.py9
-rw-r--r--synapse/push/mailer.py14
-rw-r--r--synapse/replication/http/_base.py6
-rw-r--r--synapse/replication/tcp/external_cache.py6
-rw-r--r--synapse/rest/admin/__init__.py10
-rw-r--r--synapse/rest/admin/users.py14
-rw-r--r--synapse/rest/client/account_data.py6
-rw-r--r--synapse/rest/client/sync.py18
-rw-r--r--synapse/rest/key/v2/remote_key_resource.py8
-rw-r--r--synapse/state/__init__.py18
-rw-r--r--synapse/state/v2.py15
-rw-r--r--synapse/storage/background_updates.py6
-rw-r--r--synapse/storage/controllers/persist_events.py6
-rw-r--r--synapse/storage/database.py30
-rw-r--r--synapse/storage/databases/main/devices.py6
-rw-r--r--synapse/storage/databases/main/end_to_end_keys.py9
-rw-r--r--synapse/storage/databases/main/events.py6
-rw-r--r--synapse/storage/databases/main/events_worker.py36
-rw-r--r--synapse/storage/databases/main/lock.py6
-rw-r--r--synapse/storage/databases/main/media_repository.py6
-rw-r--r--synapse/storage/databases/main/receipts.py12
-rw-r--r--synapse/storage/databases/main/state.py6
-rw-r--r--synapse/storage/databases/main/stream.py18
-rw-r--r--synapse/storage/databases/main/task_scheduler.py12
-rw-r--r--synapse/storage/databases/main/user_directory.py8
-rw-r--r--synapse/storage/databases/state/store.py10
-rw-r--r--synapse/storage/engines/_base.py21
-rw-r--r--synapse/storage/types.py74
-rw-r--r--synapse/streams/events.py9
-rw-r--r--synapse/synapse_rust/events.pyi13
-rw-r--r--synapse/types/__init__.py14
-rw-r--r--synapse/util/async_helpers.py36
-rw-r--r--synapse/util/caches/dictionary_cache.py2
-rw-r--r--synapse/util/caches/expiringcache.py8
-rw-r--r--synapse/util/caches/lrucache.py18
-rw-r--r--synapse/util/iterutils.py3
-rw-r--r--synapse/util/ratelimitutils.py6
-rw-r--r--synapse/visibility.py6
-rw-r--r--tests/handlers/test_worker_lock.py23
-rw-r--r--tests/replication/_base.py6
-rw-r--r--tests/rest/client/test_filter.py2
-rw-r--r--tests/rest/client/test_rooms.py12
-rw-r--r--tests/rest/client/utils.py6
-rw-r--r--tests/storage/test_cleanup_extrems.py18
-rw-r--r--tests/storage/test_room_search.py16
-rw-r--r--tests/unittest.py3
-rw-r--r--tests/util/test_linearizer.py3
-rw-r--r--tests/utils.py48
97 files changed, 915 insertions, 812 deletions
diff --git a/.github/workflows/docs-pr-netlify.yaml b/.github/workflows/docs-pr-netlify.yaml
index c42077d3af..3fdf170f20 100644
--- a/.github/workflows/docs-pr-netlify.yaml
+++ b/.github/workflows/docs-pr-netlify.yaml
@@ -14,7 +14,7 @@ jobs:
       # There's a 'download artifact' action, but it hasn't been updated for the workflow_run action
       # (https://github.com/actions/download-artifact/issues/60) so instead we get this mess:
       - name: 📥 Download artifact
-        uses: dawidd6/action-download-artifact@72aaadce3bc708349fc665eee3785cbb1b6e51d0 # v3.1.1
+        uses: dawidd6/action-download-artifact@71072fbb1229e1317f1a8de6b04206afb461bd67 # v3.1.2
         with:
           workflow: docs-pr.yaml
           run_id: ${{ github.event.workflow_run.id }}
diff --git a/Cargo.lock b/Cargo.lock
index d41a498fc7..d2ddf5ab6b 100644
--- a/Cargo.lock
+++ b/Cargo.lock
@@ -138,9 +138,9 @@ dependencies = [
 
 [[package]]
 name = "log"
-version = "0.4.20"
+version = "0.4.21"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "b5e6163cb8c49088c2c36f57875e58ccd8c87c7427f7fbd50ea6710b2f3f2e8f"
+checksum = "90ed8c1e510134f979dbc4f070f87d4313098b704861a105fe34231c70a3901c"
 
 [[package]]
 name = "memchr"
@@ -187,6 +187,12 @@ dependencies = [
 ]
 
 [[package]]
+name = "portable-atomic"
+version = "1.6.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "7170ef9988bc169ba16dd36a7fa041e5c4cbeb6a35b76d4c03daded371eae7c0"
+
+[[package]]
 name = "proc-macro2"
 version = "1.0.76"
 source = "registry+https://github.com/rust-lang/crates.io-index"
@@ -197,9 +203,9 @@ dependencies = [
 
 [[package]]
 name = "pyo3"
-version = "0.20.2"
+version = "0.20.3"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "9a89dc7a5850d0e983be1ec2a463a171d20990487c3cfcd68b5363f1ee3d6fe0"
+checksum = "53bdbb96d49157e65d45cc287af5f32ffadd5f4761438b527b055fb0d4bb8233"
 dependencies = [
  "anyhow",
  "cfg-if",
@@ -207,6 +213,7 @@ dependencies = [
  "libc",
  "memoffset",
  "parking_lot",
+ "portable-atomic",
  "pyo3-build-config",
  "pyo3-ffi",
  "pyo3-macros",
@@ -215,9 +222,9 @@ dependencies = [
 
 [[package]]
 name = "pyo3-build-config"
-version = "0.20.2"
+version = "0.20.3"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "07426f0d8fe5a601f26293f300afd1a7b1ed5e78b2a705870c5f30893c5163be"
+checksum = "deaa5745de3f5231ce10517a1f5dd97d53e5a2fd77aa6b5842292085831d48d7"
 dependencies = [
  "once_cell",
  "target-lexicon",
@@ -225,9 +232,9 @@ dependencies = [
 
 [[package]]
 name = "pyo3-ffi"
-version = "0.20.2"
+version = "0.20.3"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "dbb7dec17e17766b46bca4f1a4215a85006b4c2ecde122076c562dd058da6cf1"
+checksum = "62b42531d03e08d4ef1f6e85a2ed422eb678b8cd62b762e53891c05faf0d4afa"
 dependencies = [
  "libc",
  "pyo3-build-config",
@@ -246,9 +253,9 @@ dependencies = [
 
 [[package]]
 name = "pyo3-macros"
-version = "0.20.2"
+version = "0.20.3"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "05f738b4e40d50b5711957f142878cfa0f28e054aa0ebdfc3fd137a843f74ed3"
+checksum = "7305c720fa01b8055ec95e484a6eca7a83c841267f0dd5280f0c8b8551d2c158"
 dependencies = [
  "proc-macro2",
  "pyo3-macros-backend",
@@ -258,12 +265,13 @@ dependencies = [
 
 [[package]]
 name = "pyo3-macros-backend"
-version = "0.20.2"
+version = "0.20.3"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "0fc910d4851847827daf9d6cdd4a823fbdaab5b8818325c5e97a86da79e8881f"
+checksum = "7c7e9b68bb9c3149c5b0cade5d07f953d6d125eb4337723c4ccdb665f1f96185"
 dependencies = [
  "heck",
  "proc-macro2",
+ "pyo3-build-config",
  "quote",
  "syn",
 ]
@@ -339,18 +347,18 @@ checksum = "d29ab0c6d3fc0ee92fe66e2d99f700eab17a8d57d1c1d3b748380fb20baa78cd"
 
 [[package]]
 name = "serde"
-version = "1.0.196"
+version = "1.0.197"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "870026e60fa08c69f064aa766c10f10b1d62db9ccd4d0abb206472bee0ce3b32"
+checksum = "3fb1c873e1b9b056a4dc4c0c198b24c3ffa059243875552b2bd0933b1aee4ce2"
 dependencies = [
  "serde_derive",
 ]
 
 [[package]]
 name = "serde_derive"
-version = "1.0.196"
+version = "1.0.197"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "33c85360c95e7d137454dc81d9a4ed2b8efd8fbe19cee57357b32b9771fccb67"
+checksum = "7eb0b34b42edc17f6b7cac84a52a1c5f0e1bb2227e997ca9011ea3dd34e8610b"
 dependencies = [
  "proc-macro2",
  "quote",
@@ -359,9 +367,9 @@ dependencies = [
 
 [[package]]
 name = "serde_json"
-version = "1.0.113"
+version = "1.0.114"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "69801b70b1c3dac963ecb03a364ba0ceda9cf60c71cfe475e99864759c8b8a79"
+checksum = "c5f09b1bd632ef549eaa9f60a1f8de742bdbc698e6cee2095fc84dde5f549ae0"
 dependencies = [
  "itoa",
  "ryu",
diff --git a/changelog.d/16840.misc b/changelog.d/16840.misc
new file mode 100644
index 0000000000..1175e6de71
--- /dev/null
+++ b/changelog.d/16840.misc
@@ -0,0 +1 @@
+Improve lock performance when a lot of locks are all waiting for a single lock to be released.
diff --git a/changelog.d/16892.doc b/changelog.d/16892.doc
new file mode 100644
index 0000000000..dd82b49112
--- /dev/null
+++ b/changelog.d/16892.doc
@@ -0,0 +1 @@
+Add a query to force a refresh of a remote user's device list to the "Useful SQL for Admins" documentation page.
\ No newline at end of file
diff --git a/changelog.d/16919.misc b/changelog.d/16919.misc
new file mode 100644
index 0000000000..2c76f25379
--- /dev/null
+++ b/changelog.d/16919.misc
@@ -0,0 +1 @@
+Multi-worker-docker-container: disable log buffering.
diff --git a/changelog.d/16929.misc b/changelog.d/16929.misc
new file mode 100644
index 0000000000..9489784e4a
--- /dev/null
+++ b/changelog.d/16929.misc
@@ -0,0 +1,2 @@
+Refactor state delta calculation in `/sync` handler.
+
diff --git a/changelog.d/16949.bugfix b/changelog.d/16949.bugfix
new file mode 100644
index 0000000000..99ed435d75
--- /dev/null
+++ b/changelog.d/16949.bugfix
@@ -0,0 +1 @@
+Fix various long-standing bugs which could cause incorrect state to be returned from `/sync` in certain situations.
diff --git a/changelog.d/16965.doc b/changelog.d/16965.doc
new file mode 100644
index 0000000000..36f8093298
--- /dev/null
+++ b/changelog.d/16965.doc
@@ -0,0 +1 @@
+Minor grammatical corrections to the upgrade documentation.
diff --git a/changelog.d/16966.doc b/changelog.d/16966.doc
new file mode 100644
index 0000000000..06f4093aee
--- /dev/null
+++ b/changelog.d/16966.doc
@@ -0,0 +1 @@
+Fix the sort order for the documentation version picker, so that newer releases appear above older ones.
diff --git a/changelog.d/16986.misc b/changelog.d/16986.misc
new file mode 100644
index 0000000000..0a556ba8b4
--- /dev/null
+++ b/changelog.d/16986.misc
@@ -0,0 +1 @@
+Raise poetry-core version cap to 1.9.0.
diff --git a/docker/prefix-log b/docker/prefix-log
index 0e26a4f19d..32dddbbfd4 100755
--- a/docker/prefix-log
+++ b/docker/prefix-log
@@ -7,6 +7,9 @@
 #   prefix-log command [args...]
 #
 
-exec 1> >(awk '{print "'"${SUPERVISOR_PROCESS_NAME}"' | "$0}' >&1)
-exec 2> >(awk '{print "'"${SUPERVISOR_PROCESS_NAME}"' | "$0}' >&2)
+# '-W interactive' is a `mawk` extension which disables buffering on stdout and sets line-buffered reads on
+# stdin. The effect is that the output is flushed after each line, rather than being batched, which helps reduce
+# confusion due to to interleaving of the different processes.
+exec 1> >(awk -W interactive '{print "'"${SUPERVISOR_PROCESS_NAME}"' | "$0 }' >&1)
+exec 2> >(awk -W interactive '{print "'"${SUPERVISOR_PROCESS_NAME}"' | "$0 }' >&2)
 exec "$@"
diff --git a/docs/upgrade.md b/docs/upgrade.md
index 640fed3ae3..e7247676d1 100644
--- a/docs/upgrade.md
+++ b/docs/upgrade.md
@@ -88,11 +88,11 @@ process, for example:
     dpkg -i matrix-synapse-py3_1.3.0+stretch1_amd64.deb
     ```
 
-Generally Synapse database schemas are compatible across multiple versions, once
-a version of Synapse is deployed you may not be able to rollback automatically.
+Generally Synapse database schemas are compatible across multiple versions, but once
+a version of Synapse is deployed you may not be able to roll back automatically.
 The following table gives the version ranges and the earliest version they can
 be rolled back to. E.g. Synapse versions v1.58.0 through v1.61.1 can be rolled
-back safely to v1.57.0, but starting with v1.62.0 it is only safe to rollback to
+back safely to v1.57.0, but starting with v1.62.0 it is only safe to roll back to
 v1.61.0.
 
 <!-- REPLACE_WITH_SCHEMA_VERSIONS -->
diff --git a/docs/usage/administration/useful_sql_for_admins.md b/docs/usage/administration/useful_sql_for_admins.md
index 9f2cc9b957..41755cd3b6 100644
--- a/docs/usage/administration/useful_sql_for_admins.md
+++ b/docs/usage/administration/useful_sql_for_admins.md
@@ -205,3 +205,12 @@ SELECT user_id, device_id, user_agent, TO_TIMESTAMP(last_seen / 1000) AS "last_s
   FROM devices
   WHERE last_seen < DATE_PART('epoch', NOW() - INTERVAL '3 month') * 1000;
 ```
+
+## Clear the cache of a remote user's device list
+
+Forces the resync of a remote user's device list - if you have somehow cached a bad state, and the remote server is
+will not send out a device list update.
+```sql
+INSERT INTO device_lists_remote_resync
+VALUES ('USER_ID', (EXTRACT(epoch FROM NOW()) * 1000)::BIGINT);
+```
diff --git a/docs/website_files/version-picker.js b/docs/website_files/version-picker.js
index b6f35f29c7..3174b5d0bc 100644
--- a/docs/website_files/version-picker.js
+++ b/docs/website_files/version-picker.js
@@ -100,10 +100,30 @@ function sortVersions(a, b) {
     if (a === 'develop' || a === 'latest') return -1;
     if (b === 'develop' || b === 'latest') return 1;
 
-    const versionA = (a.match(/v\d+(\.\d+)+/) || [])[0];
-    const versionB = (b.match(/v\d+(\.\d+)+/) || [])[0];
+    // If any of the versions do not confrom to a semantic version string, they
+    // will be sorted behind a valid version.
+    const versionA = (a.match(/v(\d+(\.\d+)+)/) || [])[1]?.split('.') ?? '';
+    const versionB = (b.match(/v(\d+(\.\d+)+)/) || [])[1]?.split('.') ?? '';
 
-    return versionB.localeCompare(versionA);
+    for (let i = 0; i < Math.max(versionA.length, versionB.length); i++) {
+        if (versionB[i] === undefined) {
+            return -1;
+        }
+        if (versionA[i] === undefined) {
+            return 1;
+        }
+
+        const partA = parseInt(versionA[i], 10);
+        const partB = parseInt(versionB[i], 10);
+
+        if (partA > partB) {
+            return -1;
+        } else if (partB > partA) {
+            return 1;
+        }
+    }
+
+    return 0;
 }
 
 /**
diff --git a/poetry.lock b/poetry.lock
index 9257d2ccfa..b0ec52a2f0 100644
--- a/poetry.lock
+++ b/poetry.lock
@@ -169,29 +169,33 @@ lxml = ["lxml"]
 
 [[package]]
 name = "black"
-version = "23.10.1"
+version = "24.2.0"
 description = "The uncompromising code formatter."
 optional = false
 python-versions = ">=3.8"
 files = [
-    {file = "black-23.10.1-cp310-cp310-macosx_10_16_arm64.whl", hash = "sha256:ec3f8e6234c4e46ff9e16d9ae96f4ef69fa328bb4ad08198c8cee45bb1f08c69"},
-    {file = "black-23.10.1-cp310-cp310-macosx_10_16_x86_64.whl", hash = "sha256:1b917a2aa020ca600483a7b340c165970b26e9029067f019e3755b56e8dd5916"},
-    {file = "black-23.10.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9c74de4c77b849e6359c6f01987e94873c707098322b91490d24296f66d067dc"},
-    {file = "black-23.10.1-cp310-cp310-win_amd64.whl", hash = "sha256:7b4d10b0f016616a0d93d24a448100adf1699712fb7a4efd0e2c32bbb219b173"},
-    {file = "black-23.10.1-cp311-cp311-macosx_10_16_arm64.whl", hash = "sha256:b15b75fc53a2fbcac8a87d3e20f69874d161beef13954747e053bca7a1ce53a0"},
-    {file = "black-23.10.1-cp311-cp311-macosx_10_16_x86_64.whl", hash = "sha256:e293e4c2f4a992b980032bbd62df07c1bcff82d6964d6c9496f2cd726e246ace"},
-    {file = "black-23.10.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7d56124b7a61d092cb52cce34182a5280e160e6aff3137172a68c2c2c4b76bcb"},
-    {file = "black-23.10.1-cp311-cp311-win_amd64.whl", hash = "sha256:3f157a8945a7b2d424da3335f7ace89c14a3b0625e6593d21139c2d8214d55ce"},
-    {file = "black-23.10.1-cp38-cp38-macosx_10_16_arm64.whl", hash = "sha256:cfcce6f0a384d0da692119f2d72d79ed07c7159879d0bb1bb32d2e443382bf3a"},
-    {file = "black-23.10.1-cp38-cp38-macosx_10_16_x86_64.whl", hash = "sha256:33d40f5b06be80c1bbce17b173cda17994fbad096ce60eb22054da021bf933d1"},
-    {file = "black-23.10.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:840015166dbdfbc47992871325799fd2dc0dcf9395e401ada6d88fe11498abad"},
-    {file = "black-23.10.1-cp38-cp38-win_amd64.whl", hash = "sha256:037e9b4664cafda5f025a1728c50a9e9aedb99a759c89f760bd83730e76ba884"},
-    {file = "black-23.10.1-cp39-cp39-macosx_10_16_arm64.whl", hash = "sha256:7cb5936e686e782fddb1c73f8aa6f459e1ad38a6a7b0e54b403f1f05a1507ee9"},
-    {file = "black-23.10.1-cp39-cp39-macosx_10_16_x86_64.whl", hash = "sha256:7670242e90dc129c539e9ca17665e39a146a761e681805c54fbd86015c7c84f7"},
-    {file = "black-23.10.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5ed45ac9a613fb52dad3b61c8dea2ec9510bf3108d4db88422bacc7d1ba1243d"},
-    {file = "black-23.10.1-cp39-cp39-win_amd64.whl", hash = "sha256:6d23d7822140e3fef190734216cefb262521789367fbdc0b3f22af6744058982"},
-    {file = "black-23.10.1-py3-none-any.whl", hash = "sha256:d431e6739f727bb2e0495df64a6c7a5310758e87505f5f8cde9ff6c0f2d7e4fe"},
-    {file = "black-23.10.1.tar.gz", hash = "sha256:1f8ce316753428ff68749c65a5f7844631aa18c8679dfd3ca9dc1a289979c258"},
+    {file = "black-24.2.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:6981eae48b3b33399c8757036c7f5d48a535b962a7c2310d19361edeef64ce29"},
+    {file = "black-24.2.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:d533d5e3259720fdbc1b37444491b024003e012c5173f7d06825a77508085430"},
+    {file = "black-24.2.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:61a0391772490ddfb8a693c067df1ef5227257e72b0e4108482b8d41b5aee13f"},
+    {file = "black-24.2.0-cp310-cp310-win_amd64.whl", hash = "sha256:992e451b04667116680cb88f63449267c13e1ad134f30087dec8527242e9862a"},
+    {file = "black-24.2.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:163baf4ef40e6897a2a9b83890e59141cc8c2a98f2dda5080dc15c00ee1e62cd"},
+    {file = "black-24.2.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:e37c99f89929af50ffaf912454b3e3b47fd64109659026b678c091a4cd450fb2"},
+    {file = "black-24.2.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4f9de21bafcba9683853f6c96c2d515e364aee631b178eaa5145fc1c61a3cc92"},
+    {file = "black-24.2.0-cp311-cp311-win_amd64.whl", hash = "sha256:9db528bccb9e8e20c08e716b3b09c6bdd64da0dd129b11e160bf082d4642ac23"},
+    {file = "black-24.2.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:d84f29eb3ee44859052073b7636533ec995bd0f64e2fb43aeceefc70090e752b"},
+    {file = "black-24.2.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:1e08fb9a15c914b81dd734ddd7fb10513016e5ce7e6704bdd5e1251ceee51ac9"},
+    {file = "black-24.2.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:810d445ae6069ce64030c78ff6127cd9cd178a9ac3361435708b907d8a04c693"},
+    {file = "black-24.2.0-cp312-cp312-win_amd64.whl", hash = "sha256:ba15742a13de85e9b8f3239c8f807723991fbfae24bad92d34a2b12e81904982"},
+    {file = "black-24.2.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:7e53a8c630f71db01b28cd9602a1ada68c937cbf2c333e6ed041390d6968faf4"},
+    {file = "black-24.2.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:93601c2deb321b4bad8f95df408e3fb3943d85012dddb6121336b8e24a0d1218"},
+    {file = "black-24.2.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a0057f800de6acc4407fe75bb147b0c2b5cbb7c3ed110d3e5999cd01184d53b0"},
+    {file = "black-24.2.0-cp38-cp38-win_amd64.whl", hash = "sha256:faf2ee02e6612577ba0181f4347bcbcf591eb122f7841ae5ba233d12c39dcb4d"},
+    {file = "black-24.2.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:057c3dc602eaa6fdc451069bd027a1b2635028b575a6c3acfd63193ced20d9c8"},
+    {file = "black-24.2.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:08654d0797e65f2423f850fc8e16a0ce50925f9337fb4a4a176a7aa4026e63f8"},
+    {file = "black-24.2.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ca610d29415ee1a30a3f30fab7a8f4144e9d34c89a235d81292a1edb2b55f540"},
+    {file = "black-24.2.0-cp39-cp39-win_amd64.whl", hash = "sha256:4dd76e9468d5536abd40ffbc7a247f83b2324f0c050556d9c371c2b9a9a95e31"},
+    {file = "black-24.2.0-py3-none-any.whl", hash = "sha256:e8a6ae970537e67830776488bca52000eaa37fa63b9988e8c487458d9cd5ace6"},
+    {file = "black-24.2.0.tar.gz", hash = "sha256:bce4f25c27c3435e4dace4815bcb2008b87e167e3bf4ee47ccdc5ce906eb4894"},
 ]
 
 [package.dependencies]
@@ -205,7 +209,7 @@ typing-extensions = {version = ">=4.0.1", markers = "python_version < \"3.11\""}
 
 [package.extras]
 colorama = ["colorama (>=0.4.3)"]
-d = ["aiohttp (>=3.7.4)"]
+d = ["aiohttp (>=3.7.4)", "aiohttp (>=3.7.4,!=3.9.0)"]
 jupyter = ["ipython (>=7.8.0)", "tokenize-rt (>=3.2.0)"]
 uvloop = ["uvloop (>=0.15.2)"]
 
@@ -461,47 +465,56 @@ files = [
 
 [[package]]
 name = "cryptography"
-version = "41.0.7"
+version = "42.0.5"
 description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers."
 optional = false
 python-versions = ">=3.7"
 files = [
-    {file = "cryptography-41.0.7-cp37-abi3-macosx_10_12_universal2.whl", hash = "sha256:3c78451b78313fa81607fa1b3f1ae0a5ddd8014c38a02d9db0616133987b9cdf"},
-    {file = "cryptography-41.0.7-cp37-abi3-macosx_10_12_x86_64.whl", hash = "sha256:928258ba5d6f8ae644e764d0f996d61a8777559f72dfeb2eea7e2fe0ad6e782d"},
-    {file = "cryptography-41.0.7-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5a1b41bc97f1ad230a41657d9155113c7521953869ae57ac39ac7f1bb471469a"},
-    {file = "cryptography-41.0.7-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:841df4caa01008bad253bce2a6f7b47f86dc9f08df4b433c404def869f590a15"},
-    {file = "cryptography-41.0.7-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:5429ec739a29df2e29e15d082f1d9ad683701f0ec7709ca479b3ff2708dae65a"},
-    {file = "cryptography-41.0.7-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:43f2552a2378b44869fe8827aa19e69512e3245a219104438692385b0ee119d1"},
-    {file = "cryptography-41.0.7-cp37-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:af03b32695b24d85a75d40e1ba39ffe7db7ffcb099fe507b39fd41a565f1b157"},
-    {file = "cryptography-41.0.7-cp37-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:49f0805fc0b2ac8d4882dd52f4a3b935b210935d500b6b805f321addc8177406"},
-    {file = "cryptography-41.0.7-cp37-abi3-win32.whl", hash = "sha256:f983596065a18a2183e7f79ab3fd4c475205b839e02cbc0efbbf9666c4b3083d"},
-    {file = "cryptography-41.0.7-cp37-abi3-win_amd64.whl", hash = "sha256:90452ba79b8788fa380dfb587cca692976ef4e757b194b093d845e8d99f612f2"},
-    {file = "cryptography-41.0.7-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:079b85658ea2f59c4f43b70f8119a52414cdb7be34da5d019a77bf96d473b960"},
-    {file = "cryptography-41.0.7-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:b640981bf64a3e978a56167594a0e97db71c89a479da8e175d8bb5be5178c003"},
-    {file = "cryptography-41.0.7-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:e3114da6d7f95d2dee7d3f4eec16dacff819740bbab931aff8648cb13c5ff5e7"},
-    {file = "cryptography-41.0.7-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:d5ec85080cce7b0513cfd233914eb8b7bbd0633f1d1703aa28d1dd5a72f678ec"},
-    {file = "cryptography-41.0.7-pp38-pypy38_pp73-macosx_10_12_x86_64.whl", hash = "sha256:7a698cb1dac82c35fcf8fe3417a3aaba97de16a01ac914b89a0889d364d2f6be"},
-    {file = "cryptography-41.0.7-pp38-pypy38_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:37a138589b12069efb424220bf78eac59ca68b95696fc622b6ccc1c0a197204a"},
-    {file = "cryptography-41.0.7-pp38-pypy38_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:68a2dec79deebc5d26d617bfdf6e8aab065a4f34934b22d3b5010df3ba36612c"},
-    {file = "cryptography-41.0.7-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:09616eeaef406f99046553b8a40fbf8b1e70795a91885ba4c96a70793de5504a"},
-    {file = "cryptography-41.0.7-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:48a0476626da912a44cc078f9893f292f0b3e4c739caf289268168d8f4702a39"},
-    {file = "cryptography-41.0.7-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:c7f3201ec47d5207841402594f1d7950879ef890c0c495052fa62f58283fde1a"},
-    {file = "cryptography-41.0.7-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:c5ca78485a255e03c32b513f8c2bc39fedb7f5c5f8535545bdc223a03b24f248"},
-    {file = "cryptography-41.0.7-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:d6c391c021ab1f7a82da5d8d0b3cee2f4b2c455ec86c8aebbc84837a631ff309"},
-    {file = "cryptography-41.0.7.tar.gz", hash = "sha256:13f93ce9bea8016c253b34afc6bd6a75993e5c40672ed5405a9c832f0d4a00bc"},
-]
-
-[package.dependencies]
-cffi = ">=1.12"
+    {file = "cryptography-42.0.5-cp37-abi3-macosx_10_12_universal2.whl", hash = "sha256:a30596bae9403a342c978fb47d9b0ee277699fa53bbafad14706af51fe543d16"},
+    {file = "cryptography-42.0.5-cp37-abi3-macosx_10_12_x86_64.whl", hash = "sha256:b7ffe927ee6531c78f81aa17e684e2ff617daeba7f189f911065b2ea2d526dec"},
+    {file = "cryptography-42.0.5-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2424ff4c4ac7f6b8177b53c17ed5d8fa74ae5955656867f5a8affaca36a27abb"},
+    {file = "cryptography-42.0.5-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:329906dcc7b20ff3cad13c069a78124ed8247adcac44b10bea1130e36caae0b4"},
+    {file = "cryptography-42.0.5-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:b03c2ae5d2f0fc05f9a2c0c997e1bc18c8229f392234e8a0194f202169ccd278"},
+    {file = "cryptography-42.0.5-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:f8837fe1d6ac4a8052a9a8ddab256bc006242696f03368a4009be7ee3075cdb7"},
+    {file = "cryptography-42.0.5-cp37-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:0270572b8bd2c833c3981724b8ee9747b3ec96f699a9665470018594301439ee"},
+    {file = "cryptography-42.0.5-cp37-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:b8cac287fafc4ad485b8a9b67d0ee80c66bf3574f655d3b97ef2e1082360faf1"},
+    {file = "cryptography-42.0.5-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:16a48c23a62a2f4a285699dba2e4ff2d1cff3115b9df052cdd976a18856d8e3d"},
+    {file = "cryptography-42.0.5-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:2bce03af1ce5a5567ab89bd90d11e7bbdff56b8af3acbbec1faded8f44cb06da"},
+    {file = "cryptography-42.0.5-cp37-abi3-win32.whl", hash = "sha256:b6cd2203306b63e41acdf39aa93b86fb566049aeb6dc489b70e34bcd07adca74"},
+    {file = "cryptography-42.0.5-cp37-abi3-win_amd64.whl", hash = "sha256:98d8dc6d012b82287f2c3d26ce1d2dd130ec200c8679b6213b3c73c08b2b7940"},
+    {file = "cryptography-42.0.5-cp39-abi3-macosx_10_12_universal2.whl", hash = "sha256:5e6275c09d2badf57aea3afa80d975444f4be8d3bc58f7f80d2a484c6f9485c8"},
+    {file = "cryptography-42.0.5-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e4985a790f921508f36f81831817cbc03b102d643b5fcb81cd33df3fa291a1a1"},
+    {file = "cryptography-42.0.5-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7cde5f38e614f55e28d831754e8a3bacf9ace5d1566235e39d91b35502d6936e"},
+    {file = "cryptography-42.0.5-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:7367d7b2eca6513681127ebad53b2582911d1736dc2ffc19f2c3ae49997496bc"},
+    {file = "cryptography-42.0.5-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:cd2030f6650c089aeb304cf093f3244d34745ce0cfcc39f20c6fbfe030102e2a"},
+    {file = "cryptography-42.0.5-cp39-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:a2913c5375154b6ef2e91c10b5720ea6e21007412f6437504ffea2109b5a33d7"},
+    {file = "cryptography-42.0.5-cp39-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:c41fb5e6a5fe9ebcd58ca3abfeb51dffb5d83d6775405305bfa8715b76521922"},
+    {file = "cryptography-42.0.5-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:3eaafe47ec0d0ffcc9349e1708be2aaea4c6dd4978d76bf6eb0cb2c13636c6fc"},
+    {file = "cryptography-42.0.5-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:1b95b98b0d2af784078fa69f637135e3c317091b615cd0905f8b8a087e86fa30"},
+    {file = "cryptography-42.0.5-cp39-abi3-win32.whl", hash = "sha256:1f71c10d1e88467126f0efd484bd44bca5e14c664ec2ede64c32f20875c0d413"},
+    {file = "cryptography-42.0.5-cp39-abi3-win_amd64.whl", hash = "sha256:a011a644f6d7d03736214d38832e030d8268bcff4a41f728e6030325fea3e400"},
+    {file = "cryptography-42.0.5-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:9481ffe3cf013b71b2428b905c4f7a9a4f76ec03065b05ff499bb5682a8d9ad8"},
+    {file = "cryptography-42.0.5-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:ba334e6e4b1d92442b75ddacc615c5476d4ad55cc29b15d590cc6b86efa487e2"},
+    {file = "cryptography-42.0.5-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:ba3e4a42397c25b7ff88cdec6e2a16c2be18720f317506ee25210f6d31925f9c"},
+    {file = "cryptography-42.0.5-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:111a0d8553afcf8eb02a4fea6ca4f59d48ddb34497aa8706a6cf536f1a5ec576"},
+    {file = "cryptography-42.0.5-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:cd65d75953847815962c84a4654a84850b2bb4aed3f26fadcc1c13892e1e29f6"},
+    {file = "cryptography-42.0.5-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:e807b3188f9eb0eaa7bbb579b462c5ace579f1cedb28107ce8b48a9f7ad3679e"},
+    {file = "cryptography-42.0.5-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:f12764b8fffc7a123f641d7d049d382b73f96a34117e0b637b80643169cec8ac"},
+    {file = "cryptography-42.0.5-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:37dd623507659e08be98eec89323469e8c7b4c1407c85112634ae3dbdb926fdd"},
+    {file = "cryptography-42.0.5.tar.gz", hash = "sha256:6fe07eec95dfd477eb9530aef5bead34fec819b3aaf6c5bd6d20565da607bfe1"},
+]
+
+[package.dependencies]
+cffi = {version = ">=1.12", markers = "platform_python_implementation != \"PyPy\""}
 
 [package.extras]
 docs = ["sphinx (>=5.3.0)", "sphinx-rtd-theme (>=1.1.1)"]
-docstest = ["pyenchant (>=1.6.11)", "sphinxcontrib-spelling (>=4.0.1)", "twine (>=1.12.0)"]
+docstest = ["pyenchant (>=1.6.11)", "readme-renderer", "sphinxcontrib-spelling (>=4.0.1)"]
 nox = ["nox"]
-pep8test = ["black", "check-sdist", "mypy", "ruff"]
+pep8test = ["check-sdist", "click", "mypy", "ruff"]
 sdist = ["build"]
 ssh = ["bcrypt (>=3.1.5)"]
-test = ["pretend", "pytest (>=6.2.0)", "pytest-benchmark", "pytest-cov", "pytest-xdist"]
+test = ["certifi", "pretend", "pytest (>=6.2.0)", "pytest-benchmark", "pytest-cov", "pytest-xdist"]
 test-randomorder = ["pytest-randomly"]
 
 [[package]]
@@ -1459,38 +1472,38 @@ files = [
 
 [[package]]
 name = "mypy"
-version = "1.5.1"
+version = "1.8.0"
 description = "Optional static typing for Python"
 optional = false
 python-versions = ">=3.8"
 files = [
-    {file = "mypy-1.5.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:f33592ddf9655a4894aef22d134de7393e95fcbdc2d15c1ab65828eee5c66c70"},
-    {file = "mypy-1.5.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:258b22210a4a258ccd077426c7a181d789d1121aca6db73a83f79372f5569ae0"},
-    {file = "mypy-1.5.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a9ec1f695f0c25986e6f7f8778e5ce61659063268836a38c951200c57479cc12"},
-    {file = "mypy-1.5.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:abed92d9c8f08643c7d831300b739562b0a6c9fcb028d211134fc9ab20ccad5d"},
-    {file = "mypy-1.5.1-cp310-cp310-win_amd64.whl", hash = "sha256:a156e6390944c265eb56afa67c74c0636f10283429171018446b732f1a05af25"},
-    {file = "mypy-1.5.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6ac9c21bfe7bc9f7f1b6fae441746e6a106e48fc9de530dea29e8cd37a2c0cc4"},
-    {file = "mypy-1.5.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:51cb1323064b1099e177098cb939eab2da42fea5d818d40113957ec954fc85f4"},
-    {file = "mypy-1.5.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:596fae69f2bfcb7305808c75c00f81fe2829b6236eadda536f00610ac5ec2243"},
-    {file = "mypy-1.5.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:32cb59609b0534f0bd67faebb6e022fe534bdb0e2ecab4290d683d248be1b275"},
-    {file = "mypy-1.5.1-cp311-cp311-win_amd64.whl", hash = "sha256:159aa9acb16086b79bbb0016145034a1a05360626046a929f84579ce1666b315"},
-    {file = "mypy-1.5.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:f6b0e77db9ff4fda74de7df13f30016a0a663928d669c9f2c057048ba44f09bb"},
-    {file = "mypy-1.5.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:26f71b535dfc158a71264e6dc805a9f8d2e60b67215ca0bfa26e2e1aa4d4d373"},
-    {file = "mypy-1.5.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2fc3a600f749b1008cc75e02b6fb3d4db8dbcca2d733030fe7a3b3502902f161"},
-    {file = "mypy-1.5.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:26fb32e4d4afa205b24bf645eddfbb36a1e17e995c5c99d6d00edb24b693406a"},
-    {file = "mypy-1.5.1-cp312-cp312-win_amd64.whl", hash = "sha256:82cb6193de9bbb3844bab4c7cf80e6227d5225cc7625b068a06d005d861ad5f1"},
-    {file = "mypy-1.5.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:4a465ea2ca12804d5b34bb056be3a29dc47aea5973b892d0417c6a10a40b2d65"},
-    {file = "mypy-1.5.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:9fece120dbb041771a63eb95e4896791386fe287fefb2837258925b8326d6160"},
-    {file = "mypy-1.5.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d28ddc3e3dfeab553e743e532fb95b4e6afad51d4706dd22f28e1e5e664828d2"},
-    {file = "mypy-1.5.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:57b10c56016adce71fba6bc6e9fd45d8083f74361f629390c556738565af8eeb"},
-    {file = "mypy-1.5.1-cp38-cp38-win_amd64.whl", hash = "sha256:ff0cedc84184115202475bbb46dd99f8dcb87fe24d5d0ddfc0fe6b8575c88d2f"},
-    {file = "mypy-1.5.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:8f772942d372c8cbac575be99f9cc9d9fb3bd95c8bc2de6c01411e2c84ebca8a"},
-    {file = "mypy-1.5.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:5d627124700b92b6bbaa99f27cbe615c8ea7b3402960f6372ea7d65faf376c14"},
-    {file = "mypy-1.5.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:361da43c4f5a96173220eb53340ace68cda81845cd88218f8862dfb0adc8cddb"},
-    {file = "mypy-1.5.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:330857f9507c24de5c5724235e66858f8364a0693894342485e543f5b07c8693"},
-    {file = "mypy-1.5.1-cp39-cp39-win_amd64.whl", hash = "sha256:c543214ffdd422623e9fedd0869166c2f16affe4ba37463975043ef7d2ea8770"},
-    {file = "mypy-1.5.1-py3-none-any.whl", hash = "sha256:f757063a83970d67c444f6e01d9550a7402322af3557ce7630d3c957386fa8f5"},
-    {file = "mypy-1.5.1.tar.gz", hash = "sha256:b031b9601f1060bf1281feab89697324726ba0c0bae9d7cd7ab4b690940f0b92"},
+    {file = "mypy-1.8.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:485a8942f671120f76afffff70f259e1cd0f0cfe08f81c05d8816d958d4577d3"},
+    {file = "mypy-1.8.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:df9824ac11deaf007443e7ed2a4a26bebff98d2bc43c6da21b2b64185da011c4"},
+    {file = "mypy-1.8.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2afecd6354bbfb6e0160f4e4ad9ba6e4e003b767dd80d85516e71f2e955ab50d"},
+    {file = "mypy-1.8.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:8963b83d53ee733a6e4196954502b33567ad07dfd74851f32be18eb932fb1cb9"},
+    {file = "mypy-1.8.0-cp310-cp310-win_amd64.whl", hash = "sha256:e46f44b54ebddbeedbd3d5b289a893219065ef805d95094d16a0af6630f5d410"},
+    {file = "mypy-1.8.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:855fe27b80375e5c5878492f0729540db47b186509c98dae341254c8f45f42ae"},
+    {file = "mypy-1.8.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:4c886c6cce2d070bd7df4ec4a05a13ee20c0aa60cb587e8d1265b6c03cf91da3"},
+    {file = "mypy-1.8.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d19c413b3c07cbecf1f991e2221746b0d2a9410b59cb3f4fb9557f0365a1a817"},
+    {file = "mypy-1.8.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:9261ed810972061388918c83c3f5cd46079d875026ba97380f3e3978a72f503d"},
+    {file = "mypy-1.8.0-cp311-cp311-win_amd64.whl", hash = "sha256:51720c776d148bad2372ca21ca29256ed483aa9a4cdefefcef49006dff2a6835"},
+    {file = "mypy-1.8.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:52825b01f5c4c1c4eb0db253ec09c7aa17e1a7304d247c48b6f3599ef40db8bd"},
+    {file = "mypy-1.8.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:f5ac9a4eeb1ec0f1ccdc6f326bcdb464de5f80eb07fb38b5ddd7b0de6bc61e55"},
+    {file = "mypy-1.8.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:afe3fe972c645b4632c563d3f3eff1cdca2fa058f730df2b93a35e3b0c538218"},
+    {file = "mypy-1.8.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:42c6680d256ab35637ef88891c6bd02514ccb7e1122133ac96055ff458f93fc3"},
+    {file = "mypy-1.8.0-cp312-cp312-win_amd64.whl", hash = "sha256:720a5ca70e136b675af3af63db533c1c8c9181314d207568bbe79051f122669e"},
+    {file = "mypy-1.8.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:028cf9f2cae89e202d7b6593cd98db6759379f17a319b5faf4f9978d7084cdc6"},
+    {file = "mypy-1.8.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:4e6d97288757e1ddba10dd9549ac27982e3e74a49d8d0179fc14d4365c7add66"},
+    {file = "mypy-1.8.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7f1478736fcebb90f97e40aff11a5f253af890c845ee0c850fe80aa060a267c6"},
+    {file = "mypy-1.8.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:42419861b43e6962a649068a61f4a4839205a3ef525b858377a960b9e2de6e0d"},
+    {file = "mypy-1.8.0-cp38-cp38-win_amd64.whl", hash = "sha256:2b5b6c721bd4aabaadead3a5e6fa85c11c6c795e0c81a7215776ef8afc66de02"},
+    {file = "mypy-1.8.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5c1538c38584029352878a0466f03a8ee7547d7bd9f641f57a0f3017a7c905b8"},
+    {file = "mypy-1.8.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4ef4be7baf08a203170f29e89d79064463b7fc7a0908b9d0d5114e8009c3a259"},
+    {file = "mypy-1.8.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7178def594014aa6c35a8ff411cf37d682f428b3b5617ca79029d8ae72f5402b"},
+    {file = "mypy-1.8.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ab3c84fa13c04aeeeabb2a7f67a25ef5d77ac9d6486ff33ded762ef353aa5592"},
+    {file = "mypy-1.8.0-cp39-cp39-win_amd64.whl", hash = "sha256:99b00bc72855812a60d253420d8a2eae839b0afa4938f09f4d2aa9bb4654263a"},
+    {file = "mypy-1.8.0-py3-none-any.whl", hash = "sha256:538fd81bb5e430cc1381a443971c0475582ff9f434c16cd46d2c66763ce85d9d"},
+    {file = "mypy-1.8.0.tar.gz", hash = "sha256:6ff8b244d7085a0b425b56d327b480c3b29cafbd2eff27316a004f9a7391ae07"},
 ]
 
 [package.dependencies]
@@ -1501,6 +1514,7 @@ typing-extensions = ">=4.1.0"
 [package.extras]
 dmypy = ["psutil (>=4.0)"]
 install-types = ["pip"]
+mypyc = ["setuptools (>=50)"]
 reports = ["lxml"]
 
 [[package]]
@@ -2427,28 +2441,28 @@ files = [
 
 [[package]]
 name = "ruff"
-version = "0.1.14"
+version = "0.3.2"
 description = "An extremely fast Python linter and code formatter, written in Rust."
 optional = false
 python-versions = ">=3.7"
 files = [
-    {file = "ruff-0.1.14-py3-none-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:96f76536df9b26622755c12ed8680f159817be2f725c17ed9305b472a757cdbb"},
-    {file = "ruff-0.1.14-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:ab3f71f64498c7241123bb5a768544cf42821d2a537f894b22457a543d3ca7a9"},
-    {file = "ruff-0.1.14-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7060156ecc572b8f984fd20fd8b0fcb692dd5d837b7606e968334ab7ff0090ab"},
-    {file = "ruff-0.1.14-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:a53d8e35313d7b67eb3db15a66c08434809107659226a90dcd7acb2afa55faea"},
-    {file = "ruff-0.1.14-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bea9be712b8f5b4ebed40e1949379cfb2a7d907f42921cf9ab3aae07e6fba9eb"},
-    {file = "ruff-0.1.14-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:2270504d629a0b064247983cbc495bed277f372fb9eaba41e5cf51f7ba705a6a"},
-    {file = "ruff-0.1.14-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:80258bb3b8909b1700610dfabef7876423eed1bc930fe177c71c414921898efa"},
-    {file = "ruff-0.1.14-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:653230dd00aaf449eb5ff25d10a6e03bc3006813e2cb99799e568f55482e5cae"},
-    {file = "ruff-0.1.14-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:87b3acc6c4e6928459ba9eb7459dd4f0c4bf266a053c863d72a44c33246bfdbf"},
-    {file = "ruff-0.1.14-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:6b3dadc9522d0eccc060699a9816e8127b27addbb4697fc0c08611e4e6aeb8b5"},
-    {file = "ruff-0.1.14-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:1c8eca1a47b4150dc0fbec7fe68fc91c695aed798532a18dbb1424e61e9b721f"},
-    {file = "ruff-0.1.14-py3-none-musllinux_1_2_i686.whl", hash = "sha256:62ce2ae46303ee896fc6811f63d6dabf8d9c389da0f3e3f2bce8bc7f15ef5488"},
-    {file = "ruff-0.1.14-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:b2027dde79d217b211d725fc833e8965dc90a16d0d3213f1298f97465956661b"},
-    {file = "ruff-0.1.14-py3-none-win32.whl", hash = "sha256:722bafc299145575a63bbd6b5069cb643eaa62546a5b6398f82b3e4403329cab"},
-    {file = "ruff-0.1.14-py3-none-win_amd64.whl", hash = "sha256:e3d241aa61f92b0805a7082bd89a9990826448e4d0398f0e2bc8f05c75c63d99"},
-    {file = "ruff-0.1.14-py3-none-win_arm64.whl", hash = "sha256:269302b31ade4cde6cf6f9dd58ea593773a37ed3f7b97e793c8594b262466b67"},
-    {file = "ruff-0.1.14.tar.gz", hash = "sha256:ad3f8088b2dfd884820289a06ab718cde7d38b94972212cc4ba90d5fbc9955f3"},
+    {file = "ruff-0.3.2-py3-none-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:77f2612752e25f730da7421ca5e3147b213dca4f9a0f7e0b534e9562c5441f01"},
+    {file = "ruff-0.3.2-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:9966b964b2dd1107797be9ca7195002b874424d1d5472097701ae8f43eadef5d"},
+    {file = "ruff-0.3.2-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b83d17ff166aa0659d1e1deaf9f2f14cbe387293a906de09bc4860717eb2e2da"},
+    {file = "ruff-0.3.2-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:bb875c6cc87b3703aeda85f01c9aebdce3d217aeaca3c2e52e38077383f7268a"},
+    {file = "ruff-0.3.2-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:be75e468a6a86426430373d81c041b7605137a28f7014a72d2fc749e47f572aa"},
+    {file = "ruff-0.3.2-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:967978ac2d4506255e2f52afe70dda023fc602b283e97685c8447d036863a302"},
+    {file = "ruff-0.3.2-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1231eacd4510f73222940727ac927bc5d07667a86b0cbe822024dd00343e77e9"},
+    {file = "ruff-0.3.2-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2c6d613b19e9a8021be2ee1d0e27710208d1603b56f47203d0abbde906929a9b"},
+    {file = "ruff-0.3.2-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c8439338a6303585d27b66b4626cbde89bb3e50fa3cae86ce52c1db7449330a7"},
+    {file = "ruff-0.3.2-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:de8b480d8379620cbb5ea466a9e53bb467d2fb07c7eca54a4aa8576483c35d36"},
+    {file = "ruff-0.3.2-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:b74c3de9103bd35df2bb05d8b2899bf2dbe4efda6474ea9681280648ec4d237d"},
+    {file = "ruff-0.3.2-py3-none-musllinux_1_2_i686.whl", hash = "sha256:f380be9fc15a99765c9cf316b40b9da1f6ad2ab9639e551703e581a5e6da6745"},
+    {file = "ruff-0.3.2-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:0ac06a3759c3ab9ef86bbeca665d31ad3aa9a4b1c17684aadb7e61c10baa0df4"},
+    {file = "ruff-0.3.2-py3-none-win32.whl", hash = "sha256:9bd640a8f7dd07a0b6901fcebccedadeb1a705a50350fb86b4003b805c81385a"},
+    {file = "ruff-0.3.2-py3-none-win_amd64.whl", hash = "sha256:0c1bdd9920cab5707c26c8b3bf33a064a4ca7842d91a99ec0634fec68f9f4037"},
+    {file = "ruff-0.3.2-py3-none-win_arm64.whl", hash = "sha256:5f65103b1d76e0d600cabd577b04179ff592064eaa451a70a81085930e907d0b"},
+    {file = "ruff-0.3.2.tar.gz", hash = "sha256:fa78ec9418eb1ca3db392811df3376b46471ae93792a81af2d1cbb0e5dcb5142"},
 ]
 
 [[package]]
@@ -3103,13 +3117,13 @@ files = [
 
 [[package]]
 name = "types-psycopg2"
-version = "2.9.21.16"
+version = "2.9.21.20240311"
 description = "Typing stubs for psycopg2"
 optional = false
-python-versions = ">=3.7"
+python-versions = ">=3.8"
 files = [
-    {file = "types-psycopg2-2.9.21.16.tar.gz", hash = "sha256:44a3ae748173bb637cff31654d6bd12de9ad0c7ad73afe737df6152830ed82ed"},
-    {file = "types_psycopg2-2.9.21.16-py3-none-any.whl", hash = "sha256:e2f24b651239ccfda320ab3457099af035cf37962c36c9fa26a4dc65991aebed"},
+    {file = "types-psycopg2-2.9.21.20240311.tar.gz", hash = "sha256:722945dffa6a729bebc660f14137f37edfcead5a2c15eb234212a7d017ee8072"},
+    {file = "types_psycopg2-2.9.21.20240311-py3-none-any.whl", hash = "sha256:2e137ae2b516ee0dbaab6f555086b6cfb723ba4389d67f551b0336adf4efcf1b"},
 ]
 
 [[package]]
@@ -3434,4 +3448,4 @@ user-search = ["pyicu"]
 [metadata]
 lock-version = "2.0"
 python-versions = "^3.8.0"
-content-hash = "e4ca55af1dcb6b28b8064b7551008fd16f6cdfa9cb9bf90d18c6b47766b56ae6"
+content-hash = "b510fa05f4ea33194bec079f5d04ebb3f9ffbb5c1ea96a0341d57ba770ef81e6"
diff --git a/pyproject.toml b/pyproject.toml
index d0b119fab1..3bd5736755 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -321,7 +321,7 @@ all = [
 # This helps prevents merge conflicts when running a batch of dependabot updates.
 isort = ">=5.10.1"
 black = ">=22.7.0"
-ruff = "0.1.14"
+ruff = "0.3.2"
 # Type checking only works with the pydantic.v1 compat module from pydantic v2
 pydantic = "^2"
 
@@ -382,7 +382,7 @@ furo = ">=2022.12.7,<2025.0.0"
 # runtime errors caused by build system changes.
 # We are happy to raise these upper bounds upon request,
 # provided we check that it's safe to do so (i.e. that CI passes).
-requires = ["poetry-core>=1.1.0,<=1.8.1", "setuptools_rust>=1.3,<=1.8.1"]
+requires = ["poetry-core>=1.1.0,<=1.9.0", "setuptools_rust>=1.3,<=1.8.1"]
 build-backend = "poetry.core.masonry.api"
 
 
diff --git a/synapse/_scripts/synapse_port_db.py b/synapse/_scripts/synapse_port_db.py
index 1dcc289df3..a533cad5ae 100755
--- a/synapse/_scripts/synapse_port_db.py
+++ b/synapse/_scripts/synapse_port_db.py
@@ -1040,10 +1040,10 @@ class Porter:
         return done, remaining + done
 
     async def _setup_state_group_id_seq(self) -> None:
-        curr_id: Optional[
-            int
-        ] = await self.sqlite_store.db_pool.simple_select_one_onecol(
-            table="state_groups", keyvalues={}, retcol="MAX(id)", allow_none=True
+        curr_id: Optional[int] = (
+            await self.sqlite_store.db_pool.simple_select_one_onecol(
+                table="state_groups", keyvalues={}, retcol="MAX(id)", allow_none=True
+            )
         )
 
         if not curr_id:
@@ -1132,13 +1132,13 @@ class Porter:
         )
 
     async def _setup_auth_chain_sequence(self) -> None:
-        curr_chain_id: Optional[
-            int
-        ] = await self.sqlite_store.db_pool.simple_select_one_onecol(
-            table="event_auth_chains",
-            keyvalues={},
-            retcol="MAX(chain_id)",
-            allow_none=True,
+        curr_chain_id: Optional[int] = (
+            await self.sqlite_store.db_pool.simple_select_one_onecol(
+                table="event_auth_chains",
+                keyvalues={},
+                retcol="MAX(chain_id)",
+                allow_none=True,
+            )
         )
 
         def r(txn: LoggingTransaction) -> None:
diff --git a/synapse/api/constants.py b/synapse/api/constants.py
index f3d2c8073d..d25aff98ff 100644
--- a/synapse/api/constants.py
+++ b/synapse/api/constants.py
@@ -43,7 +43,6 @@ MAIN_TIMELINE: Final = "main"
 
 
 class Membership:
-
     """Represents the membership states of a user in a room."""
 
     INVITE: Final = "invite"
diff --git a/synapse/api/room_versions.py b/synapse/api/room_versions.py
index 7ff8ad2d55..fbc1d58ecb 100644
--- a/synapse/api/room_versions.py
+++ b/synapse/api/room_versions.py
@@ -370,9 +370,11 @@ class RoomVersionCapability:
 
 MSC3244_CAPABILITIES = {
     cap.identifier: {
-        "preferred": cap.preferred_version.identifier
-        if cap.preferred_version is not None
-        else None,
+        "preferred": (
+            cap.preferred_version.identifier
+            if cap.preferred_version is not None
+            else None
+        ),
         "support": [
             v.identifier
             for v in KNOWN_ROOM_VERSIONS.values()
diff --git a/synapse/app/homeserver.py b/synapse/app/homeserver.py
index b241dbf627..8a545a86c1 100644
--- a/synapse/app/homeserver.py
+++ b/synapse/app/homeserver.py
@@ -188,9 +188,9 @@ class SynapseHomeServer(HomeServer):
                     PasswordResetSubmitTokenResource,
                 )
 
-                resources[
-                    "/_synapse/client/password_reset/email/submit_token"
-                ] = PasswordResetSubmitTokenResource(self)
+                resources["/_synapse/client/password_reset/email/submit_token"] = (
+                    PasswordResetSubmitTokenResource(self)
+                )
 
         if name == "consent":
             from synapse.rest.consent.consent_resource import ConsentResource
diff --git a/synapse/appservice/api.py b/synapse/appservice/api.py
index 34fa2bb655..19322471dc 100644
--- a/synapse/appservice/api.py
+++ b/synapse/appservice/api.py
@@ -362,16 +362,16 @@ class ApplicationServiceApi(SimpleHttpClient):
         # TODO: Update to stable prefixes once MSC3202 completes FCP merge
         if service.msc3202_transaction_extensions:
             if one_time_keys_count:
-                body[
-                    "org.matrix.msc3202.device_one_time_key_counts"
-                ] = one_time_keys_count
-                body[
-                    "org.matrix.msc3202.device_one_time_keys_count"
-                ] = one_time_keys_count
+                body["org.matrix.msc3202.device_one_time_key_counts"] = (
+                    one_time_keys_count
+                )
+                body["org.matrix.msc3202.device_one_time_keys_count"] = (
+                    one_time_keys_count
+                )
             if unused_fallback_keys:
-                body[
-                    "org.matrix.msc3202.device_unused_fallback_key_types"
-                ] = unused_fallback_keys
+                body["org.matrix.msc3202.device_unused_fallback_key_types"] = (
+                    unused_fallback_keys
+                )
             if device_list_summary:
                 body["org.matrix.msc3202.device_lists"] = {
                     "changed": list(device_list_summary.changed),
diff --git a/synapse/config/registration.py b/synapse/config/registration.py
index 3fe0f050cd..c7f3e6d35e 100644
--- a/synapse/config/registration.py
+++ b/synapse/config/registration.py
@@ -171,9 +171,9 @@ class RegistrationConfig(Config):
             refreshable_access_token_lifetime = self.parse_duration(
                 refreshable_access_token_lifetime
             )
-        self.refreshable_access_token_lifetime: Optional[
-            int
-        ] = refreshable_access_token_lifetime
+        self.refreshable_access_token_lifetime: Optional[int] = (
+            refreshable_access_token_lifetime
+        )
 
         if (
             self.session_lifetime is not None
diff --git a/synapse/config/repository.py b/synapse/config/repository.py
index 4655882b4b..1645470499 100644
--- a/synapse/config/repository.py
+++ b/synapse/config/repository.py
@@ -199,9 +199,9 @@ class ContentRepositoryConfig(Config):
                 provider_config["module"] == "file_system"
                 or provider_config["module"] == "synapse.rest.media.v1.storage_provider"
             ):
-                provider_config[
-                    "module"
-                ] = "synapse.media.storage_provider.FileStorageProviderBackend"
+                provider_config["module"] = (
+                    "synapse.media.storage_provider.FileStorageProviderBackend"
+                )
 
             provider_class, parsed_config = load_module(
                 provider_config, ("media_storage_providers", "<item %i>" % i)
diff --git a/synapse/event_auth.py b/synapse/event_auth.py
index c8b06f760e..f5abcde2db 100644
--- a/synapse/event_auth.py
+++ b/synapse/event_auth.py
@@ -88,8 +88,7 @@ class _EventSourceStore(Protocol):
         redact_behaviour: EventRedactBehaviour,
         get_prev_content: bool = False,
         allow_rejected: bool = False,
-    ) -> Dict[str, "EventBase"]:
-        ...
+    ) -> Dict[str, "EventBase"]: ...
 
 
 def validate_event_for_room_version(event: "EventBase") -> None:
diff --git a/synapse/events/__init__.py b/synapse/events/__init__.py
index 7ec696c6c0..36e0f47e51 100644
--- a/synapse/events/__init__.py
+++ b/synapse/events/__init__.py
@@ -93,16 +93,14 @@ class DictProperty(Generic[T]):
         self,
         instance: Literal[None],
         owner: Optional[Type[_DictPropertyInstance]] = None,
-    ) -> "DictProperty":
-        ...
+    ) -> "DictProperty": ...
 
     @overload
     def __get__(
         self,
         instance: _DictPropertyInstance,
         owner: Optional[Type[_DictPropertyInstance]] = None,
-    ) -> T:
-        ...
+    ) -> T: ...
 
     def __get__(
         self,
@@ -161,16 +159,14 @@ class DefaultDictProperty(DictProperty, Generic[T]):
         self,
         instance: Literal[None],
         owner: Optional[Type[_DictPropertyInstance]] = None,
-    ) -> "DefaultDictProperty":
-        ...
+    ) -> "DefaultDictProperty": ...
 
     @overload
     def __get__(
         self,
         instance: _DictPropertyInstance,
         owner: Optional[Type[_DictPropertyInstance]] = None,
-    ) -> T:
-        ...
+    ) -> T: ...
 
     def __get__(
         self,
diff --git a/synapse/events/utils.py b/synapse/events/utils.py
index cc52d0d1e9..e0613d0dbc 100644
--- a/synapse/events/utils.py
+++ b/synapse/events/utils.py
@@ -612,9 +612,9 @@ class EventClientSerializer:
         serialized_aggregations = {}
 
         if event_aggregations.references:
-            serialized_aggregations[
-                RelationTypes.REFERENCE
-            ] = event_aggregations.references
+            serialized_aggregations[RelationTypes.REFERENCE] = (
+                event_aggregations.references
+            )
 
         if event_aggregations.replace:
             # Include information about it in the relations dict.
diff --git a/synapse/federation/federation_server.py b/synapse/federation/federation_server.py
index dc8cd5ec9a..65d3a661fe 100644
--- a/synapse/federation/federation_server.py
+++ b/synapse/federation/federation_server.py
@@ -169,9 +169,9 @@ class FederationServer(FederationBase):
 
         # We cache responses to state queries, as they take a while and often
         # come in waves.
-        self._state_resp_cache: ResponseCache[
-            Tuple[str, Optional[str]]
-        ] = ResponseCache(hs.get_clock(), "state_resp", timeout_ms=30000)
+        self._state_resp_cache: ResponseCache[Tuple[str, Optional[str]]] = (
+            ResponseCache(hs.get_clock(), "state_resp", timeout_ms=30000)
+        )
         self._state_ids_resp_cache: ResponseCache[Tuple[str, str]] = ResponseCache(
             hs.get_clock(), "state_ids_resp", timeout_ms=30000
         )
diff --git a/synapse/federation/send_queue.py b/synapse/federation/send_queue.py
index e9a2386a5c..b5c9fcff7c 100644
--- a/synapse/federation/send_queue.py
+++ b/synapse/federation/send_queue.py
@@ -88,9 +88,9 @@ class FederationRemoteSendQueue(AbstractFederationSender):
         # Stores the destinations we need to explicitly send presence to about a
         # given user.
         # Stream position -> (user_id, destinations)
-        self.presence_destinations: SortedDict[
-            int, Tuple[str, Iterable[str]]
-        ] = SortedDict()
+        self.presence_destinations: SortedDict[int, Tuple[str, Iterable[str]]] = (
+            SortedDict()
+        )
 
         # (destination, key) -> EDU
         self.keyed_edu: Dict[Tuple[str, tuple], Edu] = {}
diff --git a/synapse/handlers/account.py b/synapse/handlers/account.py
index 37cc3d3ff5..89e944bc17 100644
--- a/synapse/handlers/account.py
+++ b/synapse/handlers/account.py
@@ -118,10 +118,10 @@ class AccountHandler:
             }
 
             if self._use_account_validity_in_account_status:
-                status[
-                    "org.matrix.expired"
-                ] = await self._account_validity_handler.is_user_expired(
-                    user_id.to_string()
+                status["org.matrix.expired"] = (
+                    await self._account_validity_handler.is_user_expired(
+                        user_id.to_string()
+                    )
                 )
 
         return status
diff --git a/synapse/handlers/auth.py b/synapse/handlers/auth.py
index f233f1b034..a1fab99f6b 100644
--- a/synapse/handlers/auth.py
+++ b/synapse/handlers/auth.py
@@ -2185,7 +2185,7 @@ class PasswordAuthProvider:
                 # result is always the right type, but as it is 3rd party code it might not be
 
                 if not isinstance(result, tuple) or len(result) != 2:
-                    logger.warning(
+                    logger.warning(  # type: ignore[unreachable]
                         "Wrong type returned by module API callback %s: %s, expected"
                         " Optional[Tuple[str, Optional[Callable]]]",
                         callback,
@@ -2248,7 +2248,7 @@ class PasswordAuthProvider:
                 # result is always the right type, but as it is 3rd party code it might not be
 
                 if not isinstance(result, tuple) or len(result) != 2:
-                    logger.warning(
+                    logger.warning(  # type: ignore[unreachable]
                         "Wrong type returned by module API callback %s: %s, expected"
                         " Optional[Tuple[str, Optional[Callable]]]",
                         callback,
diff --git a/synapse/handlers/directory.py b/synapse/handlers/directory.py
index 5f3dc30b63..ad2b0f5fcc 100644
--- a/synapse/handlers/directory.py
+++ b/synapse/handlers/directory.py
@@ -265,9 +265,9 @@ class DirectoryHandler:
     async def get_association(self, room_alias: RoomAlias) -> JsonDict:
         room_id = None
         if self.hs.is_mine(room_alias):
-            result: Optional[
-                RoomAliasMapping
-            ] = await self.get_association_from_room_alias(room_alias)
+            result: Optional[RoomAliasMapping] = (
+                await self.get_association_from_room_alias(room_alias)
+            )
 
             if result:
                 room_id = result.room_id
diff --git a/synapse/handlers/federation.py b/synapse/handlers/federation.py
index 2b7aad5b58..299588e476 100644
--- a/synapse/handlers/federation.py
+++ b/synapse/handlers/federation.py
@@ -1001,11 +1001,11 @@ class FederationHandler:
                     )
 
                 if include_auth_user_id:
-                    event_content[
-                        EventContentFields.AUTHORISING_USER
-                    ] = await self._event_auth_handler.get_user_which_could_invite(
-                        room_id,
-                        state_ids,
+                    event_content[EventContentFields.AUTHORISING_USER] = (
+                        await self._event_auth_handler.get_user_which_could_invite(
+                            room_id,
+                            state_ids,
+                        )
                     )
 
         builder = self.event_builder_factory.for_room_version(
diff --git a/synapse/handlers/federation_event.py b/synapse/handlers/federation_event.py
index 83f6a25981..c85deaed56 100644
--- a/synapse/handlers/federation_event.py
+++ b/synapse/handlers/federation_event.py
@@ -1367,9 +1367,9 @@ class FederationEventHandler:
             )
 
         if remote_event.is_state() and remote_event.rejected_reason is None:
-            state_map[
-                (remote_event.type, remote_event.state_key)
-            ] = remote_event.event_id
+            state_map[(remote_event.type, remote_event.state_key)] = (
+                remote_event.event_id
+            )
 
         return state_map
 
diff --git a/synapse/handlers/message.py b/synapse/handlers/message.py
index 7e5bb97f2a..0ce6eeee15 100644
--- a/synapse/handlers/message.py
+++ b/synapse/handlers/message.py
@@ -1654,9 +1654,9 @@ class EventCreationHandler:
                     expiry_ms=60 * 60 * 1000,
                 )
 
-                self._external_cache_joined_hosts_updates[
-                    state_entry.state_group
-                ] = None
+                self._external_cache_joined_hosts_updates[state_entry.state_group] = (
+                    None
+                )
 
     async def _validate_canonical_alias(
         self,
diff --git a/synapse/handlers/presence.py b/synapse/handlers/presence.py
index 21d3c71d8e..37ee625f71 100644
--- a/synapse/handlers/presence.py
+++ b/synapse/handlers/presence.py
@@ -493,9 +493,9 @@ class WorkerPresenceHandler(BasePresenceHandler):
 
         # The number of ongoing syncs on this process, by (user ID, device ID).
         # Empty if _presence_enabled is false.
-        self._user_device_to_num_current_syncs: Dict[
-            Tuple[str, Optional[str]], int
-        ] = {}
+        self._user_device_to_num_current_syncs: Dict[Tuple[str, Optional[str]], int] = (
+            {}
+        )
 
         self.notifier = hs.get_notifier()
         self.instance_id = hs.get_instance_id()
@@ -818,9 +818,9 @@ class PresenceHandler(BasePresenceHandler):
 
         # Keeps track of the number of *ongoing* syncs on this process. While
         # this is non zero a user will never go offline.
-        self._user_device_to_num_current_syncs: Dict[
-            Tuple[str, Optional[str]], int
-        ] = {}
+        self._user_device_to_num_current_syncs: Dict[Tuple[str, Optional[str]], int] = (
+            {}
+        )
 
         # Keeps track of the number of *ongoing* syncs on other processes.
         #
diff --git a/synapse/handlers/profile.py b/synapse/handlers/profile.py
index 279d393a5a..e51e282a9f 100644
--- a/synapse/handlers/profile.py
+++ b/synapse/handlers/profile.py
@@ -320,9 +320,9 @@ class ProfileHandler:
             server_name = host
 
         if self._is_mine_server_name(server_name):
-            media_info: Optional[
-                Union[LocalMedia, RemoteMedia]
-            ] = await self.store.get_local_media(media_id)
+            media_info: Optional[Union[LocalMedia, RemoteMedia]] = (
+                await self.store.get_local_media(media_id)
+            )
         else:
             media_info = await self.store.get_cached_remote_media(server_name, media_id)
 
diff --git a/synapse/handlers/relations.py b/synapse/handlers/relations.py
index 828a4b4cbd..931ac0c813 100644
--- a/synapse/handlers/relations.py
+++ b/synapse/handlers/relations.py
@@ -188,13 +188,13 @@ class RelationsHandler:
         if include_original_event:
             # Do not bundle aggregations when retrieving the original event because
             # we want the content before relations are applied to it.
-            return_value[
-                "original_event"
-            ] = await self._event_serializer.serialize_event(
-                event,
-                now,
-                bundle_aggregations=None,
-                config=serialize_options,
+            return_value["original_event"] = (
+                await self._event_serializer.serialize_event(
+                    event,
+                    now,
+                    bundle_aggregations=None,
+                    config=serialize_options,
+                )
             )
 
         if next_token:
diff --git a/synapse/handlers/room.py b/synapse/handlers/room.py
index 6b116dce8c..3278426ca3 100644
--- a/synapse/handlers/room.py
+++ b/synapse/handlers/room.py
@@ -538,10 +538,10 @@ class RoomCreationHandler:
         # deep-copy the power-levels event before we start modifying it
         # note that if frozen_dicts are enabled, `power_levels` will be a frozen
         # dict so we can't just copy.deepcopy it.
-        initial_state[
-            (EventTypes.PowerLevels, "")
-        ] = power_levels = copy_and_fixup_power_levels_contents(
-            initial_state[(EventTypes.PowerLevels, "")]
+        initial_state[(EventTypes.PowerLevels, "")] = power_levels = (
+            copy_and_fixup_power_levels_contents(
+                initial_state[(EventTypes.PowerLevels, "")]
+            )
         )
 
         # Resolve the minimum power level required to send any state event
@@ -1362,9 +1362,11 @@ class RoomCreationHandler:
         visibility = room_config.get("visibility", "private")
         preset_name = room_config.get(
             "preset",
-            RoomCreationPreset.PRIVATE_CHAT
-            if visibility == "private"
-            else RoomCreationPreset.PUBLIC_CHAT,
+            (
+                RoomCreationPreset.PRIVATE_CHAT
+                if visibility == "private"
+                else RoomCreationPreset.PUBLIC_CHAT
+            ),
         )
         try:
             preset_config = self._presets_dict[preset_name]
diff --git a/synapse/handlers/room_member.py b/synapse/handlers/room_member.py
index d238c40bcf..9e9f6cd062 100644
--- a/synapse/handlers/room_member.py
+++ b/synapse/handlers/room_member.py
@@ -1236,11 +1236,11 @@ class RoomMemberHandler(metaclass=abc.ABCMeta):
         # If this is going to be a local join, additional information must
         # be included in the event content in order to efficiently validate
         # the event.
-        content[
-            EventContentFields.AUTHORISING_USER
-        ] = await self.event_auth_handler.get_user_which_could_invite(
-            room_id,
-            state_before_join,
+        content[EventContentFields.AUTHORISING_USER] = (
+            await self.event_auth_handler.get_user_which_could_invite(
+                room_id,
+                state_before_join,
+            )
         )
 
         return False, []
diff --git a/synapse/handlers/sso.py b/synapse/handlers/sso.py
index 437cb5509c..8e39e76c97 100644
--- a/synapse/handlers/sso.py
+++ b/synapse/handlers/sso.py
@@ -150,7 +150,7 @@ class UserAttributes:
     display_name: Optional[str] = None
     picture: Optional[str] = None
     # mypy thinks these are incompatible for some reason.
-    emails: StrCollection = attr.Factory(list)  # type: ignore[assignment]
+    emails: StrCollection = attr.Factory(list)
 
 
 @attr.s(slots=True, auto_attribs=True)
diff --git a/synapse/handlers/sync.py b/synapse/handlers/sync.py
index 9122a79b4c..0aedb37f16 100644
--- a/synapse/handlers/sync.py
+++ b/synapse/handlers/sync.py
@@ -1014,30 +1014,6 @@ class SyncHandler:
                     if event.is_state():
                         timeline_state[(event.type, event.state_key)] = event.event_id
 
-                if full_state:
-                    # always make sure we LL ourselves so we know we're in the room
-                    # (if we are) to fix https://github.com/vector-im/riot-web/issues/7209
-                    # We only need apply this on full state syncs given we disabled
-                    # LL for incr syncs in https://github.com/matrix-org/synapse/pull/3840.
-                    # We don't insert ourselves into `members_to_fetch`, because in some
-                    # rare cases (an empty event batch with a now_token after the user's
-                    # leave in a partial state room which another local user has
-                    # joined), the room state will be missing our membership and there
-                    # is no guarantee that our membership will be in the auth events of
-                    # timeline events when the room is partial stated.
-                    state_filter = StateFilter.from_lazy_load_member_list(
-                        members_to_fetch.union((sync_config.user.to_string(),))
-                    )
-                else:
-                    state_filter = StateFilter.from_lazy_load_member_list(
-                        members_to_fetch
-                    )
-
-                # We are happy to use partial state to compute the `/sync` response.
-                # Since partial state may not include the lazy-loaded memberships we
-                # require, we fix up the state response afterwards with memberships from
-                # auth events.
-                await_full_state = False
             else:
                 timeline_state = {
                     (event.type, event.state_key): event.event_id
@@ -1045,9 +1021,6 @@ class SyncHandler:
                     if event.is_state()
                 }
 
-                state_filter = StateFilter.all()
-                await_full_state = True
-
             # Now calculate the state to return in the sync response for the room.
             # This is more or less the change in state between the end of the previous
             # sync's timeline and the start of the current sync's timeline.
@@ -1057,131 +1030,28 @@ class SyncHandler:
             # whether the room is partial stated *before* fetching it.
             is_partial_state_room = await self.store.is_partial_state_room(room_id)
             if full_state:
-                if batch:
-                    state_at_timeline_end = (
-                        await self._state_storage_controller.get_state_ids_for_event(
-                            batch.events[-1].event_id,
-                            state_filter=state_filter,
-                            await_full_state=await_full_state,
-                        )
-                    )
-
-                    state_at_timeline_start = (
-                        await self._state_storage_controller.get_state_ids_for_event(
-                            batch.events[0].event_id,
-                            state_filter=state_filter,
-                            await_full_state=await_full_state,
-                        )
-                    )
-
-                else:
-                    state_at_timeline_end = await self.get_state_at(
-                        room_id,
-                        stream_position=now_token,
-                        state_filter=state_filter,
-                        await_full_state=await_full_state,
-                    )
-
-                    state_at_timeline_start = state_at_timeline_end
-
-                state_ids = _calculate_state(
-                    timeline_contains=timeline_state,
-                    timeline_start=state_at_timeline_start,
-                    timeline_end=state_at_timeline_end,
-                    previous_timeline_end={},
-                    lazy_load_members=lazy_load_members,
+                state_ids = await self._compute_state_delta_for_full_sync(
+                    room_id,
+                    sync_config.user,
+                    batch,
+                    now_token,
+                    members_to_fetch,
+                    timeline_state,
                 )
-            elif batch.limited:
-                if batch:
-                    state_at_timeline_start = (
-                        await self._state_storage_controller.get_state_ids_for_event(
-                            batch.events[0].event_id,
-                            state_filter=state_filter,
-                            await_full_state=await_full_state,
-                        )
-                    )
-                else:
-                    # We can get here if the user has ignored the senders of all
-                    # the recent events.
-                    state_at_timeline_start = await self.get_state_at(
-                        room_id,
-                        stream_position=now_token,
-                        state_filter=state_filter,
-                        await_full_state=await_full_state,
-                    )
-
-                # for now, we disable LL for gappy syncs - see
-                # https://github.com/vector-im/riot-web/issues/7211#issuecomment-419976346
-                # N.B. this slows down incr syncs as we are now processing way
-                # more state in the server than if we were LLing.
-                #
-                # We still have to filter timeline_start to LL entries (above) in order
-                # for _calculate_state's LL logic to work, as we have to include LL
-                # members for timeline senders in case they weren't loaded in the initial
-                # sync.  We do this by (counterintuitively) by filtering timeline_start
-                # members to just be ones which were timeline senders, which then ensures
-                # all of the rest get included in the state block (if we need to know
-                # about them).
-                state_filter = StateFilter.all()
-
+            else:
                 # If this is an initial sync then full_state should be set, and
                 # that case is handled above. We assert here to ensure that this
                 # is indeed the case.
                 assert since_token is not None
-                state_at_previous_sync = await self.get_state_at(
-                    room_id,
-                    stream_position=since_token,
-                    state_filter=state_filter,
-                    await_full_state=await_full_state,
-                )
 
-                if batch:
-                    state_at_timeline_end = (
-                        await self._state_storage_controller.get_state_ids_for_event(
-                            batch.events[-1].event_id,
-                            state_filter=state_filter,
-                            await_full_state=await_full_state,
-                        )
-                    )
-                else:
-                    # We can get here if the user has ignored the senders of all
-                    # the recent events.
-                    state_at_timeline_end = await self.get_state_at(
-                        room_id,
-                        stream_position=now_token,
-                        state_filter=state_filter,
-                        await_full_state=await_full_state,
-                    )
-
-                state_ids = _calculate_state(
-                    timeline_contains=timeline_state,
-                    timeline_start=state_at_timeline_start,
-                    timeline_end=state_at_timeline_end,
-                    previous_timeline_end=state_at_previous_sync,
-                    # we have to include LL members in case LL initial sync missed them
-                    lazy_load_members=lazy_load_members,
+                state_ids = await self._compute_state_delta_for_incremental_sync(
+                    room_id,
+                    batch,
+                    since_token,
+                    now_token,
+                    members_to_fetch,
+                    timeline_state,
                 )
-            else:
-                state_ids = {}
-                if lazy_load_members:
-                    if members_to_fetch and batch.events:
-                        # We're returning an incremental sync, with no
-                        # "gap" since the previous sync, so normally there would be
-                        # no state to return.
-                        # But we're lazy-loading, so the client might need some more
-                        # member events to understand the events in this timeline.
-                        # So we fish out all the member events corresponding to the
-                        # timeline here, and then dedupe any redundant ones below.
-
-                        state_ids = await self._state_storage_controller.get_state_ids_for_event(
-                            batch.events[0].event_id,
-                            # we only want members!
-                            state_filter=StateFilter.from_types(
-                                (EventTypes.Member, member)
-                                for member in members_to_fetch
-                            ),
-                            await_full_state=False,
-                        )
 
             # If we only have partial state for the room, `state_ids` may be missing the
             # memberships we wanted. We attempt to find some by digging through the auth
@@ -1245,6 +1115,227 @@ class SyncHandler:
             if e.type != EventTypes.Aliases  # until MSC2261 or alternative solution
         }
 
+    async def _compute_state_delta_for_full_sync(
+        self,
+        room_id: str,
+        syncing_user: UserID,
+        batch: TimelineBatch,
+        now_token: StreamToken,
+        members_to_fetch: Optional[Set[str]],
+        timeline_state: StateMap[str],
+    ) -> StateMap[str]:
+        """Calculate the state events to be included in a full sync response.
+
+        As with `_compute_state_delta_for_incremental_sync`, the result will include
+        the membership events for the senders of each event in `members_to_fetch`.
+
+        Args:
+            room_id: The room we are calculating for.
+            syncing_user: The user that is calling `/sync`.
+            batch: The timeline batch for the room that will be sent to the user.
+            now_token: Token of the end of the current batch.
+            members_to_fetch: If lazy-loading is enabled, the memberships needed for
+                events in the timeline.
+            timeline_state: The contribution to the room state from state events in
+                `batch`. Only contains the last event for any given state key.
+
+        Returns:
+            A map from (type, state_key) to event_id, for each event that we believe
+            should be included in the `state` part of the sync response.
+        """
+        if members_to_fetch is not None:
+            # Lazy-loading of membership events is enabled.
+            #
+            # Always make sure we load our own membership event so we know if
+            # we're in the room, to fix https://github.com/vector-im/riot-web/issues/7209.
+            #
+            # We only need apply this on full state syncs given we disabled
+            # LL for incr syncs in https://github.com/matrix-org/synapse/pull/3840.
+            #
+            # We don't insert ourselves into `members_to_fetch`, because in some
+            # rare cases (an empty event batch with a now_token after the user's
+            # leave in a partial state room which another local user has
+            # joined), the room state will be missing our membership and there
+            # is no guarantee that our membership will be in the auth events of
+            # timeline events when the room is partial stated.
+            state_filter = StateFilter.from_lazy_load_member_list(
+                members_to_fetch.union((syncing_user.to_string(),))
+            )
+
+            # We are happy to use partial state to compute the `/sync` response.
+            # Since partial state may not include the lazy-loaded memberships we
+            # require, we fix up the state response afterwards with memberships from
+            # auth events.
+            await_full_state = False
+            lazy_load_members = True
+        else:
+            state_filter = StateFilter.all()
+            await_full_state = True
+            lazy_load_members = False
+
+        if batch:
+            state_at_timeline_end = (
+                await self._state_storage_controller.get_state_ids_for_event(
+                    batch.events[-1].event_id,
+                    state_filter=state_filter,
+                    await_full_state=await_full_state,
+                )
+            )
+
+            state_at_timeline_start = (
+                await self._state_storage_controller.get_state_ids_for_event(
+                    batch.events[0].event_id,
+                    state_filter=state_filter,
+                    await_full_state=await_full_state,
+                )
+            )
+        else:
+            state_at_timeline_end = await self.get_state_at(
+                room_id,
+                stream_position=now_token,
+                state_filter=state_filter,
+                await_full_state=await_full_state,
+            )
+
+            state_at_timeline_start = state_at_timeline_end
+
+        state_ids = _calculate_state(
+            timeline_contains=timeline_state,
+            timeline_start=state_at_timeline_start,
+            timeline_end=state_at_timeline_end,
+            previous_timeline_end={},
+            lazy_load_members=lazy_load_members,
+        )
+        return state_ids
+
+    async def _compute_state_delta_for_incremental_sync(
+        self,
+        room_id: str,
+        batch: TimelineBatch,
+        since_token: StreamToken,
+        now_token: StreamToken,
+        members_to_fetch: Optional[Set[str]],
+        timeline_state: StateMap[str],
+    ) -> StateMap[str]:
+        """Calculate the state events to be included in an incremental sync response.
+
+        If lazy-loading of membership events is enabled (as indicated by
+        `members_to_fetch` being not-`None`), the result will include the membership
+        events for each member in `members_to_fetch`. The caller
+        (`compute_state_delta`) is responsible for keeping track of which membership
+        events we have already sent to the client, and hence ripping them out.
+
+        Args:
+            room_id: The room we are calculating for.
+            batch: The timeline batch for the room that will be sent to the user.
+            since_token: Token of the end of the previous batch.
+            now_token: Token of the end of the current batch.
+            members_to_fetch: If lazy-loading is enabled, the memberships needed for
+                events in the timeline. Otherwise, `None`.
+            timeline_state: The contribution to the room state from state events in
+                `batch`. Only contains the last event for any given state key.
+
+        Returns:
+            A map from (type, state_key) to event_id, for each event that we believe
+            should be included in the `state` part of the sync response.
+        """
+        if members_to_fetch is not None:
+            # Lazy-loading is enabled. Only return the state that is needed.
+            state_filter = StateFilter.from_lazy_load_member_list(members_to_fetch)
+            await_full_state = False
+            lazy_load_members = True
+        else:
+            state_filter = StateFilter.all()
+            await_full_state = True
+            lazy_load_members = False
+
+        if batch.limited:
+            if batch:
+                state_at_timeline_start = (
+                    await self._state_storage_controller.get_state_ids_for_event(
+                        batch.events[0].event_id,
+                        state_filter=state_filter,
+                        await_full_state=await_full_state,
+                    )
+                )
+            else:
+                # We can get here if the user has ignored the senders of all
+                # the recent events.
+                state_at_timeline_start = await self.get_state_at(
+                    room_id,
+                    stream_position=now_token,
+                    state_filter=state_filter,
+                    await_full_state=await_full_state,
+                )
+
+            # for now, we disable LL for gappy syncs - see
+            # https://github.com/vector-im/riot-web/issues/7211#issuecomment-419976346
+            # N.B. this slows down incr syncs as we are now processing way
+            # more state in the server than if we were LLing.
+            #
+            # We still have to filter timeline_start to LL entries (above) in order
+            # for _calculate_state's LL logic to work, as we have to include LL
+            # members for timeline senders in case they weren't loaded in the initial
+            # sync.  We do this by (counterintuitively) by filtering timeline_start
+            # members to just be ones which were timeline senders, which then ensures
+            # all of the rest get included in the state block (if we need to know
+            # about them).
+            state_filter = StateFilter.all()
+
+            state_at_previous_sync = await self.get_state_at(
+                room_id,
+                stream_position=since_token,
+                state_filter=state_filter,
+                await_full_state=await_full_state,
+            )
+
+            if batch:
+                state_at_timeline_end = (
+                    await self._state_storage_controller.get_state_ids_for_event(
+                        batch.events[-1].event_id,
+                        state_filter=state_filter,
+                        await_full_state=await_full_state,
+                    )
+                )
+            else:
+                # We can get here if the user has ignored the senders of all
+                # the recent events.
+                state_at_timeline_end = await self.get_state_at(
+                    room_id,
+                    stream_position=now_token,
+                    state_filter=state_filter,
+                    await_full_state=await_full_state,
+                )
+
+            state_ids = _calculate_state(
+                timeline_contains=timeline_state,
+                timeline_start=state_at_timeline_start,
+                timeline_end=state_at_timeline_end,
+                previous_timeline_end=state_at_previous_sync,
+                lazy_load_members=lazy_load_members,
+            )
+        else:
+            state_ids = {}
+            if lazy_load_members:
+                if members_to_fetch and batch.events:
+                    # We're returning an incremental sync, with no
+                    # "gap" since the previous sync, so normally there would be
+                    # no state to return.
+                    # But we're lazy-loading, so the client might need some more
+                    # member events to understand the events in this timeline.
+                    # So we fish out all the member events corresponding to the
+                    # timeline here. The caller will then dedupe any redundant ones.
+
+                    state_ids = await self._state_storage_controller.get_state_ids_for_event(
+                        batch.events[0].event_id,
+                        # we only want members!
+                        state_filter=StateFilter.from_types(
+                            (EventTypes.Member, member) for member in members_to_fetch
+                        ),
+                        await_full_state=False,
+                    )
+        return state_ids
+
     async def _find_missing_partial_state_memberships(
         self,
         room_id: str,
@@ -1333,9 +1424,9 @@ class SyncHandler:
                     and auth_event.state_key == member
                 ):
                     missing_members.discard(member)
-                    additional_state_ids[
-                        (EventTypes.Member, member)
-                    ] = auth_event.event_id
+                    additional_state_ids[(EventTypes.Member, member)] = (
+                        auth_event.event_id
+                    )
                     break
 
         if missing_members:
@@ -2746,7 +2837,7 @@ class SyncResultBuilder:
         if self.since_token:
             for joined_sync in self.joined:
                 it = itertools.chain(
-                    joined_sync.timeline.events, joined_sync.state.values()
+                    joined_sync.state.values(), joined_sync.timeline.events
                 )
                 for event in it:
                     if event.type == EventTypes.Member:
@@ -2758,13 +2849,20 @@ class SyncResultBuilder:
                             newly_joined_or_invited_or_knocked_users.add(
                                 event.state_key
                             )
+                            # If the user left and rejoined in the same batch, they
+                            # count as a newly-joined user, *not* a newly-left user.
+                            newly_left_users.discard(event.state_key)
                         else:
                             prev_content = event.unsigned.get("prev_content", {})
                             prev_membership = prev_content.get("membership", None)
                             if prev_membership == Membership.JOIN:
                                 newly_left_users.add(event.state_key)
+                            # If the user joined and left in the same batch, they
+                            # count as a newly-left user, not a newly-joined user.
+                            newly_joined_or_invited_or_knocked_users.discard(
+                                event.state_key
+                            )
 
-        newly_left_users -= newly_joined_or_invited_or_knocked_users
         return newly_joined_or_invited_or_knocked_users, newly_left_users
 
 
diff --git a/synapse/handlers/worker_lock.py b/synapse/handlers/worker_lock.py
index a870fd1124..7e578cf462 100644
--- a/synapse/handlers/worker_lock.py
+++ b/synapse/handlers/worker_lock.py
@@ -182,12 +182,15 @@ class WorkerLocksHandler:
         if not locks:
             return
 
-        def _wake_deferred(deferred: defer.Deferred) -> None:
-            if not deferred.called:
-                deferred.callback(None)
-
-        for lock in locks:
-            self._clock.call_later(0, _wake_deferred, lock.deferred)
+        def _wake_all_locks(
+            locks: Collection[Union[WaitingLock, WaitingMultiLock]]
+        ) -> None:
+            for lock in locks:
+                deferred = lock.deferred
+                if not deferred.called:
+                    deferred.callback(None)
+
+        self._clock.call_later(0, _wake_all_locks, locks)
 
     @wrap_as_background_process("_cleanup_locks")
     async def _cleanup_locks(self) -> None:
diff --git a/synapse/http/matrixfederationclient.py b/synapse/http/matrixfederationclient.py
index 884ecdacdd..c73a589e6c 100644
--- a/synapse/http/matrixfederationclient.py
+++ b/synapse/http/matrixfederationclient.py
@@ -931,8 +931,7 @@ class MatrixFederationHttpClient:
         try_trailing_slash_on_400: bool = False,
         parser: Literal[None] = None,
         backoff_on_all_error_codes: bool = False,
-    ) -> JsonDict:
-        ...
+    ) -> JsonDict: ...
 
     @overload
     async def put_json(
@@ -949,8 +948,7 @@ class MatrixFederationHttpClient:
         try_trailing_slash_on_400: bool = False,
         parser: Optional[ByteParser[T]] = None,
         backoff_on_all_error_codes: bool = False,
-    ) -> T:
-        ...
+    ) -> T: ...
 
     async def put_json(
         self,
@@ -1140,8 +1138,7 @@ class MatrixFederationHttpClient:
         ignore_backoff: bool = False,
         try_trailing_slash_on_400: bool = False,
         parser: Literal[None] = None,
-    ) -> JsonDict:
-        ...
+    ) -> JsonDict: ...
 
     @overload
     async def get_json(
@@ -1154,8 +1151,7 @@ class MatrixFederationHttpClient:
         ignore_backoff: bool = ...,
         try_trailing_slash_on_400: bool = ...,
         parser: ByteParser[T] = ...,
-    ) -> T:
-        ...
+    ) -> T: ...
 
     async def get_json(
         self,
@@ -1236,8 +1232,7 @@ class MatrixFederationHttpClient:
         ignore_backoff: bool = False,
         try_trailing_slash_on_400: bool = False,
         parser: Literal[None] = None,
-    ) -> Tuple[JsonDict, Dict[bytes, List[bytes]]]:
-        ...
+    ) -> Tuple[JsonDict, Dict[bytes, List[bytes]]]: ...
 
     @overload
     async def get_json_with_headers(
@@ -1250,8 +1245,7 @@ class MatrixFederationHttpClient:
         ignore_backoff: bool = ...,
         try_trailing_slash_on_400: bool = ...,
         parser: ByteParser[T] = ...,
-    ) -> Tuple[T, Dict[bytes, List[bytes]]]:
-        ...
+    ) -> Tuple[T, Dict[bytes, List[bytes]]]: ...
 
     async def get_json_with_headers(
         self,
diff --git a/synapse/http/servlet.py b/synapse/http/servlet.py
index b22eb727b1..b73d06f1d3 100644
--- a/synapse/http/servlet.py
+++ b/synapse/http/servlet.py
@@ -61,20 +61,17 @@ logger = logging.getLogger(__name__)
 
 
 @overload
-def parse_integer(request: Request, name: str, default: int) -> int:
-    ...
+def parse_integer(request: Request, name: str, default: int) -> int: ...
 
 
 @overload
-def parse_integer(request: Request, name: str, *, required: Literal[True]) -> int:
-    ...
+def parse_integer(request: Request, name: str, *, required: Literal[True]) -> int: ...
 
 
 @overload
 def parse_integer(
     request: Request, name: str, default: Optional[int] = None, required: bool = False
-) -> Optional[int]:
-    ...
+) -> Optional[int]: ...
 
 
 def parse_integer(
@@ -105,8 +102,7 @@ def parse_integer_from_args(
     args: Mapping[bytes, Sequence[bytes]],
     name: str,
     default: Optional[int] = None,
-) -> Optional[int]:
-    ...
+) -> Optional[int]: ...
 
 
 @overload
@@ -115,8 +111,7 @@ def parse_integer_from_args(
     name: str,
     *,
     required: Literal[True],
-) -> int:
-    ...
+) -> int: ...
 
 
 @overload
@@ -125,8 +120,7 @@ def parse_integer_from_args(
     name: str,
     default: Optional[int] = None,
     required: bool = False,
-) -> Optional[int]:
-    ...
+) -> Optional[int]: ...
 
 
 def parse_integer_from_args(
@@ -172,20 +166,17 @@ def parse_integer_from_args(
 
 
 @overload
-def parse_boolean(request: Request, name: str, default: bool) -> bool:
-    ...
+def parse_boolean(request: Request, name: str, default: bool) -> bool: ...
 
 
 @overload
-def parse_boolean(request: Request, name: str, *, required: Literal[True]) -> bool:
-    ...
+def parse_boolean(request: Request, name: str, *, required: Literal[True]) -> bool: ...
 
 
 @overload
 def parse_boolean(
     request: Request, name: str, default: Optional[bool] = None, required: bool = False
-) -> Optional[bool]:
-    ...
+) -> Optional[bool]: ...
 
 
 def parse_boolean(
@@ -216,8 +207,7 @@ def parse_boolean_from_args(
     args: Mapping[bytes, Sequence[bytes]],
     name: str,
     default: bool,
-) -> bool:
-    ...
+) -> bool: ...
 
 
 @overload
@@ -226,8 +216,7 @@ def parse_boolean_from_args(
     name: str,
     *,
     required: Literal[True],
-) -> bool:
-    ...
+) -> bool: ...
 
 
 @overload
@@ -236,8 +225,7 @@ def parse_boolean_from_args(
     name: str,
     default: Optional[bool] = None,
     required: bool = False,
-) -> Optional[bool]:
-    ...
+) -> Optional[bool]: ...
 
 
 def parse_boolean_from_args(
@@ -289,8 +277,7 @@ def parse_bytes_from_args(
     args: Mapping[bytes, Sequence[bytes]],
     name: str,
     default: Optional[bytes] = None,
-) -> Optional[bytes]:
-    ...
+) -> Optional[bytes]: ...
 
 
 @overload
@@ -300,8 +287,7 @@ def parse_bytes_from_args(
     default: Literal[None] = None,
     *,
     required: Literal[True],
-) -> bytes:
-    ...
+) -> bytes: ...
 
 
 @overload
@@ -310,8 +296,7 @@ def parse_bytes_from_args(
     name: str,
     default: Optional[bytes] = None,
     required: bool = False,
-) -> Optional[bytes]:
-    ...
+) -> Optional[bytes]: ...
 
 
 def parse_bytes_from_args(
@@ -355,8 +340,7 @@ def parse_string(
     *,
     allowed_values: Optional[StrCollection] = None,
     encoding: str = "ascii",
-) -> str:
-    ...
+) -> str: ...
 
 
 @overload
@@ -367,8 +351,7 @@ def parse_string(
     required: Literal[True],
     allowed_values: Optional[StrCollection] = None,
     encoding: str = "ascii",
-) -> str:
-    ...
+) -> str: ...
 
 
 @overload
@@ -380,8 +363,7 @@ def parse_string(
     required: bool = False,
     allowed_values: Optional[StrCollection] = None,
     encoding: str = "ascii",
-) -> Optional[str]:
-    ...
+) -> Optional[str]: ...
 
 
 def parse_string(
@@ -437,8 +419,7 @@ def parse_enum(
     name: str,
     E: Type[EnumT],
     default: EnumT,
-) -> EnumT:
-    ...
+) -> EnumT: ...
 
 
 @overload
@@ -448,8 +429,7 @@ def parse_enum(
     E: Type[EnumT],
     *,
     required: Literal[True],
-) -> EnumT:
-    ...
+) -> EnumT: ...
 
 
 def parse_enum(
@@ -526,8 +506,7 @@ def parse_strings_from_args(
     *,
     allowed_values: Optional[StrCollection] = None,
     encoding: str = "ascii",
-) -> Optional[List[str]]:
-    ...
+) -> Optional[List[str]]: ...
 
 
 @overload
@@ -538,8 +517,7 @@ def parse_strings_from_args(
     *,
     allowed_values: Optional[StrCollection] = None,
     encoding: str = "ascii",
-) -> List[str]:
-    ...
+) -> List[str]: ...
 
 
 @overload
@@ -550,8 +528,7 @@ def parse_strings_from_args(
     required: Literal[True],
     allowed_values: Optional[StrCollection] = None,
     encoding: str = "ascii",
-) -> List[str]:
-    ...
+) -> List[str]: ...
 
 
 @overload
@@ -563,8 +540,7 @@ def parse_strings_from_args(
     required: bool = False,
     allowed_values: Optional[StrCollection] = None,
     encoding: str = "ascii",
-) -> Optional[List[str]]:
-    ...
+) -> Optional[List[str]]: ...
 
 
 def parse_strings_from_args(
@@ -625,8 +601,7 @@ def parse_string_from_args(
     *,
     allowed_values: Optional[StrCollection] = None,
     encoding: str = "ascii",
-) -> Optional[str]:
-    ...
+) -> Optional[str]: ...
 
 
 @overload
@@ -638,8 +613,7 @@ def parse_string_from_args(
     required: Literal[True],
     allowed_values: Optional[StrCollection] = None,
     encoding: str = "ascii",
-) -> str:
-    ...
+) -> str: ...
 
 
 @overload
@@ -650,8 +624,7 @@ def parse_string_from_args(
     required: bool = False,
     allowed_values: Optional[StrCollection] = None,
     encoding: str = "ascii",
-) -> Optional[str]:
-    ...
+) -> Optional[str]: ...
 
 
 def parse_string_from_args(
@@ -704,22 +677,19 @@ def parse_string_from_args(
 
 
 @overload
-def parse_json_value_from_request(request: Request) -> JsonDict:
-    ...
+def parse_json_value_from_request(request: Request) -> JsonDict: ...
 
 
 @overload
 def parse_json_value_from_request(
     request: Request, allow_empty_body: Literal[False]
-) -> JsonDict:
-    ...
+) -> JsonDict: ...
 
 
 @overload
 def parse_json_value_from_request(
     request: Request, allow_empty_body: bool = False
-) -> Optional[JsonDict]:
-    ...
+) -> Optional[JsonDict]: ...
 
 
 def parse_json_value_from_request(
@@ -847,7 +817,6 @@ def assert_params_in_dict(body: JsonDict, required: StrCollection) -> None:
 
 
 class RestServlet:
-
     """A Synapse REST Servlet.
 
     An implementing class can either provide its own custom 'register' method,
diff --git a/synapse/logging/context.py b/synapse/logging/context.py
index 548d255b69..4650b60962 100644
--- a/synapse/logging/context.py
+++ b/synapse/logging/context.py
@@ -744,8 +744,7 @@ def preserve_fn(
 
 
 @overload
-def preserve_fn(f: Callable[P, R]) -> Callable[P, "defer.Deferred[R]"]:
-    ...
+def preserve_fn(f: Callable[P, R]) -> Callable[P, "defer.Deferred[R]"]: ...
 
 
 def preserve_fn(
@@ -774,15 +773,10 @@ def run_in_background(
 @overload
 def run_in_background(
     f: Callable[P, R], *args: P.args, **kwargs: P.kwargs
-) -> "defer.Deferred[R]":
-    ...
+) -> "defer.Deferred[R]": ...
 
 
-def run_in_background(  # type: ignore[misc]
-    # The `type: ignore[misc]` above suppresses
-    # "Overloaded function implementation does not accept all possible arguments of signature 1"
-    # "Overloaded function implementation does not accept all possible arguments of signature 2"
-    # which seems like a bug in mypy.
+def run_in_background(
     f: Union[
         Callable[P, R],
         Callable[P, Awaitable[R]],
diff --git a/synapse/logging/opentracing.py b/synapse/logging/opentracing.py
index 78b9fffbfb..7a3c805cc5 100644
--- a/synapse/logging/opentracing.py
+++ b/synapse/logging/opentracing.py
@@ -388,15 +388,13 @@ def only_if_tracing(func: Callable[P, R]) -> Callable[P, Optional[R]]:
 @overload
 def ensure_active_span(
     message: str,
-) -> Callable[[Callable[P, R]], Callable[P, Optional[R]]]:
-    ...
+) -> Callable[[Callable[P, R]], Callable[P, Optional[R]]]: ...
 
 
 @overload
 def ensure_active_span(
     message: str, ret: T
-) -> Callable[[Callable[P, R]], Callable[P, Union[T, R]]]:
-    ...
+) -> Callable[[Callable[P, R]], Callable[P, Union[T, R]]]: ...
 
 
 def ensure_active_span(
diff --git a/synapse/media/media_repository.py b/synapse/media/media_repository.py
index 52859ed490..0e875132f6 100644
--- a/synapse/media/media_repository.py
+++ b/synapse/media/media_repository.py
@@ -1002,9 +1002,9 @@ class MediaRepository:
                     )
                     t_width = min(m_width, t_width)
                     t_height = min(m_height, t_height)
-                    thumbnails[
-                        (t_width, t_height, requirement.media_type)
-                    ] = requirement.method
+                    thumbnails[(t_width, t_height, requirement.media_type)] = (
+                        requirement.method
+                    )
 
             # Now we generate the thumbnails for each dimension, store it
             for (t_width, t_height, t_type), t_method in thumbnails.items():
diff --git a/synapse/metrics/jemalloc.py b/synapse/metrics/jemalloc.py
index 6b4c64f7a5..bd25985686 100644
--- a/synapse/metrics/jemalloc.py
+++ b/synapse/metrics/jemalloc.py
@@ -42,14 +42,12 @@ class JemallocStats:
     @overload
     def _mallctl(
         self, name: str, read: Literal[True] = True, write: Optional[int] = None
-    ) -> int:
-        ...
+    ) -> int: ...
 
     @overload
     def _mallctl(
         self, name: str, read: Literal[False], write: Optional[int] = None
-    ) -> None:
-        ...
+    ) -> None: ...
 
     def _mallctl(
         self, name: str, read: bool = True, write: Optional[int] = None
diff --git a/synapse/module_api/callbacks/spamchecker_callbacks.py b/synapse/module_api/callbacks/spamchecker_callbacks.py
index 6ec56a7f14..17079ff781 100644
--- a/synapse/module_api/callbacks/spamchecker_callbacks.py
+++ b/synapse/module_api/callbacks/spamchecker_callbacks.py
@@ -455,7 +455,7 @@ class SpamCheckerModuleApiCallbacks:
                     # mypy complains that we can't reach this code because of the
                     # return type in CHECK_EVENT_FOR_SPAM_CALLBACK, but we don't know
                     # for sure that the module actually returns it.
-                    logger.warning(
+                    logger.warning(  # type: ignore[unreachable]
                         "Module returned invalid value, rejecting message as spam"
                     )
                     res = "This message has been rejected as probable spam"
diff --git a/synapse/notifier.py b/synapse/notifier.py
index 62d954298c..e87333a80a 100644
--- a/synapse/notifier.py
+++ b/synapse/notifier.py
@@ -469,8 +469,7 @@ class Notifier:
         new_token: RoomStreamToken,
         users: Optional[Collection[Union[str, UserID]]] = None,
         rooms: Optional[StrCollection] = None,
-    ) -> None:
-        ...
+    ) -> None: ...
 
     @overload
     def on_new_event(
@@ -479,8 +478,7 @@ class Notifier:
         new_token: MultiWriterStreamToken,
         users: Optional[Collection[Union[str, UserID]]] = None,
         rooms: Optional[StrCollection] = None,
-    ) -> None:
-        ...
+    ) -> None: ...
 
     @overload
     def on_new_event(
@@ -497,8 +495,7 @@ class Notifier:
         new_token: int,
         users: Optional[Collection[Union[str, UserID]]] = None,
         rooms: Optional[StrCollection] = None,
-    ) -> None:
-        ...
+    ) -> None: ...
 
     def on_new_event(
         self,
diff --git a/synapse/push/mailer.py b/synapse/push/mailer.py
index b4bd88f308..f1ffc8115f 100644
--- a/synapse/push/mailer.py
+++ b/synapse/push/mailer.py
@@ -377,12 +377,14 @@ class Mailer:
             #
             # Note that many email clients will not render the unsubscribe link
             # unless DKIM, etc. is properly setup.
-            additional_headers={
-                "List-Unsubscribe-Post": "List-Unsubscribe=One-Click",
-                "List-Unsubscribe": f"<{unsubscribe_link}>",
-            }
-            if unsubscribe_link
-            else None,
+            additional_headers=(
+                {
+                    "List-Unsubscribe-Post": "List-Unsubscribe=One-Click",
+                    "List-Unsubscribe": f"<{unsubscribe_link}>",
+                }
+                if unsubscribe_link
+                else None
+            ),
         )
 
     async def _get_room_vars(
diff --git a/synapse/replication/http/_base.py b/synapse/replication/http/_base.py
index a82ad49e01..9aa8d90bfe 100644
--- a/synapse/replication/http/_base.py
+++ b/synapse/replication/http/_base.py
@@ -259,9 +259,9 @@ class ReplicationEndpoint(metaclass=abc.ABCMeta):
                     url_args.append(txn_id)
 
                 if cls.METHOD == "POST":
-                    request_func: Callable[
-                        ..., Awaitable[Any]
-                    ] = client.post_json_get_json
+                    request_func: Callable[..., Awaitable[Any]] = (
+                        client.post_json_get_json
+                    )
                 elif cls.METHOD == "PUT":
                     request_func = client.put_json
                 elif cls.METHOD == "GET":
diff --git a/synapse/replication/tcp/external_cache.py b/synapse/replication/tcp/external_cache.py
index ce47d8035c..a95771b5f6 100644
--- a/synapse/replication/tcp/external_cache.py
+++ b/synapse/replication/tcp/external_cache.py
@@ -70,9 +70,9 @@ class ExternalCache:
 
     def __init__(self, hs: "HomeServer"):
         if hs.config.redis.redis_enabled:
-            self._redis_connection: Optional[
-                "ConnectionHandler"
-            ] = hs.get_outbound_redis_connection()
+            self._redis_connection: Optional["ConnectionHandler"] = (
+                hs.get_outbound_redis_connection()
+            )
         else:
             self._redis_connection = None
 
diff --git a/synapse/rest/admin/__init__.py b/synapse/rest/admin/__init__.py
index 07e0fb71f2..6da1d79168 100644
--- a/synapse/rest/admin/__init__.py
+++ b/synapse/rest/admin/__init__.py
@@ -237,10 +237,12 @@ class PurgeHistoryStatusRestServlet(RestServlet):
             raise NotFoundError("purge id '%s' not found" % purge_id)
 
         result: JsonDict = {
-            "status": purge_task.status
-            if purge_task.status == TaskStatus.COMPLETE
-            or purge_task.status == TaskStatus.FAILED
-            else "active",
+            "status": (
+                purge_task.status
+                if purge_task.status == TaskStatus.COMPLETE
+                or purge_task.status == TaskStatus.FAILED
+                else "active"
+            ),
         }
         if purge_task.error:
             result["error"] = purge_task.error
diff --git a/synapse/rest/admin/users.py b/synapse/rest/admin/users.py
index a9645e4af7..4e34e46512 100644
--- a/synapse/rest/admin/users.py
+++ b/synapse/rest/admin/users.py
@@ -1184,12 +1184,14 @@ class RateLimitRestServlet(RestServlet):
             # convert `null` to `0` for consistency
             # both values do the same in retelimit handler
             ret = {
-                "messages_per_second": 0
-                if ratelimit.messages_per_second is None
-                else ratelimit.messages_per_second,
-                "burst_count": 0
-                if ratelimit.burst_count is None
-                else ratelimit.burst_count,
+                "messages_per_second": (
+                    0
+                    if ratelimit.messages_per_second is None
+                    else ratelimit.messages_per_second
+                ),
+                "burst_count": (
+                    0 if ratelimit.burst_count is None else ratelimit.burst_count
+                ),
             }
         else:
             ret = {}
diff --git a/synapse/rest/client/account_data.py b/synapse/rest/client/account_data.py
index 0cdc4cc4f7..12ffca984f 100644
--- a/synapse/rest/client/account_data.py
+++ b/synapse/rest/client/account_data.py
@@ -112,9 +112,9 @@ class AccountDataServlet(RestServlet):
             self._hs.config.experimental.msc4010_push_rules_account_data
             and account_data_type == AccountDataTypes.PUSH_RULES
         ):
-            account_data: Optional[
-                JsonMapping
-            ] = await self._push_rules_handler.push_rules_for_user(requester.user)
+            account_data: Optional[JsonMapping] = (
+                await self._push_rules_handler.push_rules_for_user(requester.user)
+            )
         else:
             account_data = await self.store.get_global_account_data_by_type_for_user(
                 user_id, account_data_type
diff --git a/synapse/rest/client/sync.py b/synapse/rest/client/sync.py
index 3af2b7dfd9..2b103ca6a8 100644
--- a/synapse/rest/client/sync.py
+++ b/synapse/rest/client/sync.py
@@ -313,12 +313,12 @@ class SyncRestServlet(RestServlet):
 
         # https://github.com/matrix-org/matrix-doc/blob/54255851f642f84a4f1aaf7bc063eebe3d76752b/proposals/2732-olm-fallback-keys.md
         # states that this field should always be included, as long as the server supports the feature.
-        response[
-            "org.matrix.msc2732.device_unused_fallback_key_types"
-        ] = sync_result.device_unused_fallback_key_types
-        response[
-            "device_unused_fallback_key_types"
-        ] = sync_result.device_unused_fallback_key_types
+        response["org.matrix.msc2732.device_unused_fallback_key_types"] = (
+            sync_result.device_unused_fallback_key_types
+        )
+        response["device_unused_fallback_key_types"] = (
+            sync_result.device_unused_fallback_key_types
+        )
 
         if joined:
             response["rooms"][Membership.JOIN] = joined
@@ -543,9 +543,9 @@ class SyncRestServlet(RestServlet):
             if room.unread_thread_notifications:
                 result["unread_thread_notifications"] = room.unread_thread_notifications
                 if self._msc3773_enabled:
-                    result[
-                        "org.matrix.msc3773.unread_thread_notifications"
-                    ] = room.unread_thread_notifications
+                    result["org.matrix.msc3773.unread_thread_notifications"] = (
+                        room.unread_thread_notifications
+                    )
             result["summary"] = room.summary
             if self._msc2654_enabled:
                 result["org.matrix.msc2654.unread_count"] = room.unread_count
diff --git a/synapse/rest/key/v2/remote_key_resource.py b/synapse/rest/key/v2/remote_key_resource.py
index 6afe4a7bcc..dc7325fc57 100644
--- a/synapse/rest/key/v2/remote_key_resource.py
+++ b/synapse/rest/key/v2/remote_key_resource.py
@@ -191,10 +191,10 @@ class RemoteKey(RestServlet):
         server_keys: Dict[Tuple[str, str], Optional[FetchKeyResultForRemote]] = {}
         for server_name, key_ids in query.items():
             if key_ids:
-                results: Mapping[
-                    str, Optional[FetchKeyResultForRemote]
-                ] = await self.store.get_server_keys_json_for_remote(
-                    server_name, key_ids
+                results: Mapping[str, Optional[FetchKeyResultForRemote]] = (
+                    await self.store.get_server_keys_json_for_remote(
+                        server_name, key_ids
+                    )
                 )
             else:
                 results = await self.store.get_all_server_keys_json_for_remote(
diff --git a/synapse/state/__init__.py b/synapse/state/__init__.py
index 015e49ab81..72b291889b 100644
--- a/synapse/state/__init__.py
+++ b/synapse/state/__init__.py
@@ -603,15 +603,15 @@ class StateResolutionHandler:
         self.resolve_linearizer = Linearizer(name="state_resolve_lock")
 
         # dict of set of event_ids -> _StateCacheEntry.
-        self._state_cache: ExpiringCache[
-            FrozenSet[int], _StateCacheEntry
-        ] = ExpiringCache(
-            cache_name="state_cache",
-            clock=self.clock,
-            max_len=100000,
-            expiry_ms=EVICTION_TIMEOUT_SECONDS * 1000,
-            iterable=True,
-            reset_expiry_on_get=True,
+        self._state_cache: ExpiringCache[FrozenSet[int], _StateCacheEntry] = (
+            ExpiringCache(
+                cache_name="state_cache",
+                clock=self.clock,
+                max_len=100000,
+                expiry_ms=EVICTION_TIMEOUT_SECONDS * 1000,
+                iterable=True,
+                reset_expiry_on_get=True,
+            )
         )
 
         #
diff --git a/synapse/state/v2.py b/synapse/state/v2.py
index 8de16db1d0..da926ad146 100644
--- a/synapse/state/v2.py
+++ b/synapse/state/v2.py
@@ -52,8 +52,7 @@ class Clock(Protocol):
     # This is usually synapse.util.Clock, but it's replaced with a FakeClock in tests.
     # We only ever sleep(0) though, so that other async functions can make forward
     # progress without waiting for stateres to complete.
-    def sleep(self, duration_ms: float) -> Awaitable[None]:
-        ...
+    def sleep(self, duration_ms: float) -> Awaitable[None]: ...
 
 
 class StateResolutionStore(Protocol):
@@ -61,13 +60,11 @@ class StateResolutionStore(Protocol):
     # TestStateResolutionStore in tests.
     def get_events(
         self, event_ids: StrCollection, allow_rejected: bool = False
-    ) -> Awaitable[Dict[str, EventBase]]:
-        ...
+    ) -> Awaitable[Dict[str, EventBase]]: ...
 
     def get_auth_chain_difference(
         self, room_id: str, state_sets: List[Set[str]]
-    ) -> Awaitable[Set[str]]:
-        ...
+    ) -> Awaitable[Set[str]]: ...
 
 
 # We want to await to the reactor occasionally during state res when dealing
@@ -742,8 +739,7 @@ async def _get_event(
     event_map: Dict[str, EventBase],
     state_res_store: StateResolutionStore,
     allow_none: Literal[False] = False,
-) -> EventBase:
-    ...
+) -> EventBase: ...
 
 
 @overload
@@ -753,8 +749,7 @@ async def _get_event(
     event_map: Dict[str, EventBase],
     state_res_store: StateResolutionStore,
     allow_none: Literal[True],
-) -> Optional[EventBase]:
-    ...
+) -> Optional[EventBase]: ...
 
 
 async def _get_event(
diff --git a/synapse/storage/background_updates.py b/synapse/storage/background_updates.py
index 9df4edee38..f473294070 100644
--- a/synapse/storage/background_updates.py
+++ b/synapse/storage/background_updates.py
@@ -836,9 +836,9 @@ class BackgroundUpdater:
                 c.execute(sql)
 
         if isinstance(self.db_pool.engine, engines.PostgresEngine):
-            runner: Optional[
-                Callable[[LoggingDatabaseConnection], None]
-            ] = create_index_psql
+            runner: Optional[Callable[[LoggingDatabaseConnection], None]] = (
+                create_index_psql
+            )
         elif psql_only:
             runner = None
         else:
diff --git a/synapse/storage/controllers/persist_events.py b/synapse/storage/controllers/persist_events.py
index 69d5999c0a..84699a2ee1 100644
--- a/synapse/storage/controllers/persist_events.py
+++ b/synapse/storage/controllers/persist_events.py
@@ -773,9 +773,9 @@ class EventsPersistenceStorageController:
         )
 
         # Remove any events which are prev_events of any existing events.
-        existing_prevs: Collection[
-            str
-        ] = await self.persist_events_store._get_events_which_are_prevs(result)
+        existing_prevs: Collection[str] = (
+            await self.persist_events_store._get_events_which_are_prevs(result)
+        )
         result.difference_update(existing_prevs)
 
         # Finally handle the case where the new events have soft-failed prev
diff --git a/synapse/storage/database.py b/synapse/storage/database.py
index 8dc9080842..d9c85e411e 100644
--- a/synapse/storage/database.py
+++ b/synapse/storage/database.py
@@ -111,8 +111,7 @@ class _PoolConnection(Connection):
     A Connection from twisted.enterprise.adbapi.Connection.
     """
 
-    def reconnect(self) -> None:
-        ...
+    def reconnect(self) -> None: ...
 
 
 def make_pool(
@@ -914,9 +913,9 @@ class DatabasePool:
 
             try:
                 with opentracing.start_active_span(f"db.{desc}"):
-                    result = await self.runWithConnection(
+                    result: R = await self.runWithConnection(
                         # mypy seems to have an issue with this, maybe a bug?
-                        self.new_transaction,  # type: ignore[arg-type]
+                        self.new_transaction,
                         desc,
                         after_callbacks,
                         async_after_callbacks,
@@ -935,7 +934,7 @@ class DatabasePool:
                     await async_callback(*async_args, **async_kwargs)
                 for after_callback, after_args, after_kwargs in after_callbacks:
                     after_callback(*after_args, **after_kwargs)
-                return cast(R, result)
+                return result
             except Exception:
                 for exception_callback, after_args, after_kwargs in exception_callbacks:
                     exception_callback(*after_args, **after_kwargs)
@@ -1603,8 +1602,7 @@ class DatabasePool:
         retcols: Collection[str],
         allow_none: Literal[False] = False,
         desc: str = "simple_select_one",
-    ) -> Tuple[Any, ...]:
-        ...
+    ) -> Tuple[Any, ...]: ...
 
     @overload
     async def simple_select_one(
@@ -1614,8 +1612,7 @@ class DatabasePool:
         retcols: Collection[str],
         allow_none: Literal[True] = True,
         desc: str = "simple_select_one",
-    ) -> Optional[Tuple[Any, ...]]:
-        ...
+    ) -> Optional[Tuple[Any, ...]]: ...
 
     async def simple_select_one(
         self,
@@ -1654,8 +1651,7 @@ class DatabasePool:
         retcol: str,
         allow_none: Literal[False] = False,
         desc: str = "simple_select_one_onecol",
-    ) -> Any:
-        ...
+    ) -> Any: ...
 
     @overload
     async def simple_select_one_onecol(
@@ -1665,8 +1661,7 @@ class DatabasePool:
         retcol: str,
         allow_none: Literal[True] = True,
         desc: str = "simple_select_one_onecol",
-    ) -> Optional[Any]:
-        ...
+    ) -> Optional[Any]: ...
 
     async def simple_select_one_onecol(
         self,
@@ -1706,8 +1701,7 @@ class DatabasePool:
         keyvalues: Dict[str, Any],
         retcol: str,
         allow_none: Literal[False] = False,
-    ) -> Any:
-        ...
+    ) -> Any: ...
 
     @overload
     @classmethod
@@ -1718,8 +1712,7 @@ class DatabasePool:
         keyvalues: Dict[str, Any],
         retcol: str,
         allow_none: Literal[True] = True,
-    ) -> Optional[Any]:
-        ...
+    ) -> Optional[Any]: ...
 
     @classmethod
     def simple_select_one_onecol_txn(
@@ -2501,8 +2494,7 @@ def make_tuple_in_list_sql_clause(
     database_engine: BaseDatabaseEngine,
     columns: Tuple[str, str],
     iterable: Collection[Tuple[Any, Any]],
-) -> Tuple[str, list]:
-    ...
+) -> Tuple[str, list]: ...
 
 
 def make_tuple_in_list_sql_clause(
diff --git a/synapse/storage/databases/main/devices.py b/synapse/storage/databases/main/devices.py
index 3e011f3340..8dbcb3f5a0 100644
--- a/synapse/storage/databases/main/devices.py
+++ b/synapse/storage/databases/main/devices.py
@@ -1701,9 +1701,9 @@ class DeviceStore(DeviceWorkerStore, DeviceBackgroundUpdateStore):
 
         # Map of (user_id, device_id) -> bool. If there is an entry that implies
         # the device exists.
-        self.device_id_exists_cache: LruCache[
-            Tuple[str, str], Literal[True]
-        ] = LruCache(cache_name="device_id_exists", max_size=10000)
+        self.device_id_exists_cache: LruCache[Tuple[str, str], Literal[True]] = (
+            LruCache(cache_name="device_id_exists", max_size=10000)
+        )
 
     async def store_device(
         self,
diff --git a/synapse/storage/databases/main/end_to_end_keys.py b/synapse/storage/databases/main/end_to_end_keys.py
index c96371a0d3..b219ea70ee 100644
--- a/synapse/storage/databases/main/end_to_end_keys.py
+++ b/synapse/storage/databases/main/end_to_end_keys.py
@@ -256,8 +256,7 @@ class EndToEndKeyWorkerStore(EndToEndKeyBackgroundStore, CacheInvalidationWorker
         self,
         query_list: Collection[Tuple[str, Optional[str]]],
         include_all_devices: Literal[False] = False,
-    ) -> Dict[str, Dict[str, DeviceKeyLookupResult]]:
-        ...
+    ) -> Dict[str, Dict[str, DeviceKeyLookupResult]]: ...
 
     @overload
     async def get_e2e_device_keys_and_signatures(
@@ -265,8 +264,7 @@ class EndToEndKeyWorkerStore(EndToEndKeyBackgroundStore, CacheInvalidationWorker
         query_list: Collection[Tuple[str, Optional[str]]],
         include_all_devices: bool = False,
         include_deleted_devices: Literal[False] = False,
-    ) -> Dict[str, Dict[str, DeviceKeyLookupResult]]:
-        ...
+    ) -> Dict[str, Dict[str, DeviceKeyLookupResult]]: ...
 
     @overload
     async def get_e2e_device_keys_and_signatures(
@@ -274,8 +272,7 @@ class EndToEndKeyWorkerStore(EndToEndKeyBackgroundStore, CacheInvalidationWorker
         query_list: Collection[Tuple[str, Optional[str]]],
         include_all_devices: Literal[True],
         include_deleted_devices: Literal[True],
-    ) -> Dict[str, Dict[str, Optional[DeviceKeyLookupResult]]]:
-        ...
+    ) -> Dict[str, Dict[str, Optional[DeviceKeyLookupResult]]]: ...
 
     @trace
     @cancellable
diff --git a/synapse/storage/databases/main/events.py b/synapse/storage/databases/main/events.py
index d5942a10b2..a6fda3f43c 100644
--- a/synapse/storage/databases/main/events.py
+++ b/synapse/storage/databases/main/events.py
@@ -1292,9 +1292,9 @@ class PersistEventsStore:
         Returns:
             filtered list
         """
-        new_events_and_contexts: OrderedDict[
-            str, Tuple[EventBase, EventContext]
-        ] = OrderedDict()
+        new_events_and_contexts: OrderedDict[str, Tuple[EventBase, EventContext]] = (
+            OrderedDict()
+        )
         for event, context in events_and_contexts:
             prev_event_context = new_events_and_contexts.get(event.event_id)
             if prev_event_context:
diff --git a/synapse/storage/databases/main/events_worker.py b/synapse/storage/databases/main/events_worker.py
index 9c3775bb7c..81fccfbccb 100644
--- a/synapse/storage/databases/main/events_worker.py
+++ b/synapse/storage/databases/main/events_worker.py
@@ -263,13 +263,13 @@ class EventsWorkerStore(SQLBaseStore):
                 5 * 60 * 1000,
             )
 
-        self._get_event_cache: AsyncLruCache[
-            Tuple[str], EventCacheEntry
-        ] = AsyncLruCache(
-            cache_name="*getEvent*",
-            max_size=hs.config.caches.event_cache_size,
-            # `extra_index_cb` Returns a tuple as that is the key type
-            extra_index_cb=lambda _, v: (v.event.room_id,),
+        self._get_event_cache: AsyncLruCache[Tuple[str], EventCacheEntry] = (
+            AsyncLruCache(
+                cache_name="*getEvent*",
+                max_size=hs.config.caches.event_cache_size,
+                # `extra_index_cb` Returns a tuple as that is the key type
+                extra_index_cb=lambda _, v: (v.event.room_id,),
+            )
         )
 
         # Map from event ID to a deferred that will result in a map from event
@@ -459,8 +459,7 @@ class EventsWorkerStore(SQLBaseStore):
         allow_rejected: bool = ...,
         allow_none: Literal[False] = ...,
         check_room_id: Optional[str] = ...,
-    ) -> EventBase:
-        ...
+    ) -> EventBase: ...
 
     @overload
     async def get_event(
@@ -471,8 +470,7 @@ class EventsWorkerStore(SQLBaseStore):
         allow_rejected: bool = ...,
         allow_none: Literal[True] = ...,
         check_room_id: Optional[str] = ...,
-    ) -> Optional[EventBase]:
-        ...
+    ) -> Optional[EventBase]: ...
 
     @cancellable
     async def get_event(
@@ -800,9 +798,9 @@ class EventsWorkerStore(SQLBaseStore):
                 # to all the events we pulled from the DB (this will result in this
                 # function returning more events than requested, but that can happen
                 # already due to `_get_events_from_db`).
-                fetching_deferred: ObservableDeferred[
-                    Dict[str, EventCacheEntry]
-                ] = ObservableDeferred(defer.Deferred(), consumeErrors=True)
+                fetching_deferred: ObservableDeferred[Dict[str, EventCacheEntry]] = (
+                    ObservableDeferred(defer.Deferred(), consumeErrors=True)
+                )
                 for event_id in missing_events_ids:
                     self._current_event_fetches[event_id] = fetching_deferred
 
@@ -1871,14 +1869,14 @@ class EventsWorkerStore(SQLBaseStore):
                 " LIMIT ?"
             )
             txn.execute(sql, (-last_id, -current_id, instance_name, limit))
-            new_event_updates: List[
-                Tuple[int, Tuple[str, str, str, str, str, str]]
-            ] = []
+            new_event_updates: List[Tuple[int, Tuple[str, str, str, str, str, str]]] = (
+                []
+            )
             row: Tuple[int, str, str, str, str, str, str]
             # Type safety: iterating over `txn` yields `Tuple`, i.e.
             # `Tuple[Any, ...]` of arbitrary length. Mypy detects assigning a
             # variadic tuple to a fixed length tuple and flags it up as an error.
-            for row in txn:  # type: ignore[assignment]
+            for row in txn:
                 new_event_updates.append((row[0], row[1:]))
 
             limited = False
@@ -1905,7 +1903,7 @@ class EventsWorkerStore(SQLBaseStore):
             # Type safety: iterating over `txn` yields `Tuple`, i.e.
             # `Tuple[Any, ...]` of arbitrary length. Mypy detects assigning a
             # variadic tuple to a fixed length tuple and flags it up as an error.
-            for row in txn:  # type: ignore[assignment]
+            for row in txn:
                 new_event_updates.append((row[0], row[1:]))
 
             if len(new_event_updates) >= limit:
diff --git a/synapse/storage/databases/main/lock.py b/synapse/storage/databases/main/lock.py
index 0794cc6d25..8277ad8c33 100644
--- a/synapse/storage/databases/main/lock.py
+++ b/synapse/storage/databases/main/lock.py
@@ -79,9 +79,9 @@ class LockStore(SQLBaseStore):
 
         # A map from `(lock_name, lock_key)` to lock that we think we
         # currently hold.
-        self._live_lock_tokens: WeakValueDictionary[
-            Tuple[str, str], Lock
-        ] = WeakValueDictionary()
+        self._live_lock_tokens: WeakValueDictionary[Tuple[str, str], Lock] = (
+            WeakValueDictionary()
+        )
 
         # A map from `(lock_name, lock_key, token)` to read/write lock that we
         # think we currently hold. For a given lock_name/lock_key, there can be
diff --git a/synapse/storage/databases/main/media_repository.py b/synapse/storage/databases/main/media_repository.py
index b5ed1bf9c8..6128332af8 100644
--- a/synapse/storage/databases/main/media_repository.py
+++ b/synapse/storage/databases/main/media_repository.py
@@ -158,9 +158,9 @@ class MediaRepositoryBackgroundUpdateStore(SQLBaseStore):
         )
 
         if hs.config.media.can_load_media_repo:
-            self.unused_expiration_time: Optional[
-                int
-            ] = hs.config.media.unused_expiration_time
+            self.unused_expiration_time: Optional[int] = (
+                hs.config.media.unused_expiration_time
+            )
         else:
             self.unused_expiration_time = None
 
diff --git a/synapse/storage/databases/main/receipts.py b/synapse/storage/databases/main/receipts.py
index 8a426d2875..d513c42530 100644
--- a/synapse/storage/databases/main/receipts.py
+++ b/synapse/storage/databases/main/receipts.py
@@ -394,9 +394,9 @@ class ReceiptsWorkerStore(SQLBaseStore):
 
         content: JsonDict = {}
         for receipt_type, user_id, event_id, data in rows:
-            content.setdefault(event_id, {}).setdefault(receipt_type, {})[
-                user_id
-            ] = db_to_json(data)
+            content.setdefault(event_id, {}).setdefault(receipt_type, {})[user_id] = (
+                db_to_json(data)
+            )
 
         return [{"type": EduTypes.RECEIPT, "room_id": room_id, "content": content}]
 
@@ -483,9 +483,9 @@ class ReceiptsWorkerStore(SQLBaseStore):
             if user_id in receipt_type_dict:  # existing receipt
                 # is the existing receipt threaded and we are currently processing an unthreaded one?
                 if "thread_id" in receipt_type_dict[user_id] and not thread_id:
-                    receipt_type_dict[
-                        user_id
-                    ] = receipt_data  # replace with unthreaded one
+                    receipt_type_dict[user_id] = (
+                        receipt_data  # replace with unthreaded one
+                    )
             else:  # receipt does not exist, just set it
                 receipt_type_dict[user_id] = receipt_data
                 if thread_id:
diff --git a/synapse/storage/databases/main/state.py b/synapse/storage/databases/main/state.py
index 3220d515d9..b2a67aff89 100644
--- a/synapse/storage/databases/main/state.py
+++ b/synapse/storage/databases/main/state.py
@@ -768,12 +768,10 @@ class StateMapWrapper(Dict[StateKey, str]):
         return super().__getitem__(key)
 
     @overload
-    def get(self, key: Tuple[str, str]) -> Optional[str]:
-        ...
+    def get(self, key: Tuple[str, str]) -> Optional[str]: ...
 
     @overload
-    def get(self, key: Tuple[str, str], default: Union[str, _T]) -> Union[str, _T]:
-        ...
+    def get(self, key: Tuple[str, str], default: Union[str, _T]) -> Union[str, _T]: ...
 
     def get(
         self, key: StateKey, default: Union[str, _T, None] = None
diff --git a/synapse/storage/databases/main/stream.py b/synapse/storage/databases/main/stream.py
index 19041cc35b..7ab6003f61 100644
--- a/synapse/storage/databases/main/stream.py
+++ b/synapse/storage/databases/main/stream.py
@@ -988,8 +988,7 @@ class StreamWorkerStore(EventsWorkerStore, SQLBaseStore):
         txn: LoggingTransaction,
         event_id: str,
         allow_none: Literal[False] = False,
-    ) -> int:
-        ...
+    ) -> int: ...
 
     @overload
     def get_stream_id_for_event_txn(
@@ -997,8 +996,7 @@ class StreamWorkerStore(EventsWorkerStore, SQLBaseStore):
         txn: LoggingTransaction,
         event_id: str,
         allow_none: bool = False,
-    ) -> Optional[int]:
-        ...
+    ) -> Optional[int]: ...
 
     def get_stream_id_for_event_txn(
         self,
@@ -1476,12 +1474,12 @@ class StreamWorkerStore(EventsWorkerStore, SQLBaseStore):
             _EventDictReturn(event_id, topological_ordering, stream_ordering)
             for event_id, instance_name, topological_ordering, stream_ordering in txn
             if _filter_results(
-                lower_token=to_token
-                if direction == Direction.BACKWARDS
-                else from_token,
-                upper_token=from_token
-                if direction == Direction.BACKWARDS
-                else to_token,
+                lower_token=(
+                    to_token if direction == Direction.BACKWARDS else from_token
+                ),
+                upper_token=(
+                    from_token if direction == Direction.BACKWARDS else to_token
+                ),
                 instance_name=instance_name,
                 topological_ordering=topological_ordering,
                 stream_ordering=stream_ordering,
diff --git a/synapse/storage/databases/main/task_scheduler.py b/synapse/storage/databases/main/task_scheduler.py
index 7b95616432..4956870b1a 100644
--- a/synapse/storage/databases/main/task_scheduler.py
+++ b/synapse/storage/databases/main/task_scheduler.py
@@ -136,12 +136,12 @@ class TaskSchedulerWorkerStore(SQLBaseStore):
                 "status": task.status,
                 "timestamp": task.timestamp,
                 "resource_id": task.resource_id,
-                "params": None
-                if task.params is None
-                else json_encoder.encode(task.params),
-                "result": None
-                if task.result is None
-                else json_encoder.encode(task.result),
+                "params": (
+                    None if task.params is None else json_encoder.encode(task.params)
+                ),
+                "result": (
+                    None if task.result is None else json_encoder.encode(task.result)
+                ),
                 "error": task.error,
             },
             desc="insert_scheduled_task",
diff --git a/synapse/storage/databases/main/user_directory.py b/synapse/storage/databases/main/user_directory.py
index a1c4b8c6c3..0513e7dc06 100644
--- a/synapse/storage/databases/main/user_directory.py
+++ b/synapse/storage/databases/main/user_directory.py
@@ -745,9 +745,11 @@ class UserDirectoryBackgroundUpdateStore(StateDeltasStore):
                         p.user_id,
                         get_localpart_from_id(p.user_id),
                         get_domain_from_id(p.user_id),
-                        _filter_text_for_index(p.display_name)
-                        if p.display_name
-                        else None,
+                        (
+                            _filter_text_for_index(p.display_name)
+                            if p.display_name
+                            else None
+                        ),
                     )
                     for p in profiles
                 ],
diff --git a/synapse/storage/databases/state/store.py b/synapse/storage/databases/state/store.py
index e64495ba8d..d4ac74c1ee 100644
--- a/synapse/storage/databases/state/store.py
+++ b/synapse/storage/databases/state/store.py
@@ -120,11 +120,11 @@ class StateGroupDataStore(StateBackgroundUpdateStore, SQLBaseStore):
             # TODO: this hasn't been tuned yet
             50000,
         )
-        self._state_group_members_cache: DictionaryCache[
-            int, StateKey, str
-        ] = DictionaryCache(
-            "*stateGroupMembersCache*",
-            500000,
+        self._state_group_members_cache: DictionaryCache[int, StateKey, str] = (
+            DictionaryCache(
+                "*stateGroupMembersCache*",
+                500000,
+            )
         )
 
         def get_max_state_group_txn(txn: Cursor) -> int:
diff --git a/synapse/storage/engines/_base.py b/synapse/storage/engines/_base.py
index 8c29236b59..ad222e7e2d 100644
--- a/synapse/storage/engines/_base.py
+++ b/synapse/storage/engines/_base.py
@@ -48,8 +48,7 @@ class BaseDatabaseEngine(Generic[ConnectionType, CursorType], metaclass=abc.ABCM
 
     @property
     @abc.abstractmethod
-    def single_threaded(self) -> bool:
-        ...
+    def single_threaded(self) -> bool: ...
 
     @property
     @abc.abstractmethod
@@ -68,8 +67,7 @@ class BaseDatabaseEngine(Generic[ConnectionType, CursorType], metaclass=abc.ABCM
     @abc.abstractmethod
     def check_database(
         self, db_conn: ConnectionType, allow_outdated_version: bool = False
-    ) -> None:
-        ...
+    ) -> None: ...
 
     @abc.abstractmethod
     def check_new_database(self, txn: CursorType) -> None:
@@ -79,27 +77,22 @@ class BaseDatabaseEngine(Generic[ConnectionType, CursorType], metaclass=abc.ABCM
         ...
 
     @abc.abstractmethod
-    def convert_param_style(self, sql: str) -> str:
-        ...
+    def convert_param_style(self, sql: str) -> str: ...
 
     # This method would ideally take a plain ConnectionType, but it seems that
     # the Sqlite engine expects to use LoggingDatabaseConnection.cursor
     # instead of sqlite3.Connection.cursor: only the former takes a txn_name.
     @abc.abstractmethod
-    def on_new_connection(self, db_conn: "LoggingDatabaseConnection") -> None:
-        ...
+    def on_new_connection(self, db_conn: "LoggingDatabaseConnection") -> None: ...
 
     @abc.abstractmethod
-    def is_deadlock(self, error: Exception) -> bool:
-        ...
+    def is_deadlock(self, error: Exception) -> bool: ...
 
     @abc.abstractmethod
-    def is_connection_closed(self, conn: ConnectionType) -> bool:
-        ...
+    def is_connection_closed(self, conn: ConnectionType) -> bool: ...
 
     @abc.abstractmethod
-    def lock_table(self, txn: Cursor, table: str) -> None:
-        ...
+    def lock_table(self, txn: Cursor, table: str) -> None: ...
 
     @property
     @abc.abstractmethod
diff --git a/synapse/storage/types.py b/synapse/storage/types.py
index b4e0a8f576..74f60cc590 100644
--- a/synapse/storage/types.py
+++ b/synapse/storage/types.py
@@ -42,20 +42,17 @@ SQLQueryParameters = Union[Sequence[Any], Mapping[str, Any]]
 
 
 class Cursor(Protocol):
-    def execute(self, sql: str, parameters: SQLQueryParameters = ...) -> Any:
-        ...
+    def execute(self, sql: str, parameters: SQLQueryParameters = ...) -> Any: ...
 
-    def executemany(self, sql: str, parameters: Sequence[SQLQueryParameters]) -> Any:
-        ...
+    def executemany(
+        self, sql: str, parameters: Sequence[SQLQueryParameters]
+    ) -> Any: ...
 
-    def fetchone(self) -> Optional[Tuple]:
-        ...
+    def fetchone(self) -> Optional[Tuple]: ...
 
-    def fetchmany(self, size: Optional[int] = ...) -> List[Tuple]:
-        ...
+    def fetchmany(self, size: Optional[int] = ...) -> List[Tuple]: ...
 
-    def fetchall(self) -> List[Tuple]:
-        ...
+    def fetchall(self) -> List[Tuple]: ...
 
     @property
     def description(
@@ -70,36 +67,28 @@ class Cursor(Protocol):
     def rowcount(self) -> int:
         return 0
 
-    def __iter__(self) -> Iterator[Tuple]:
-        ...
+    def __iter__(self) -> Iterator[Tuple]: ...
 
-    def close(self) -> None:
-        ...
+    def close(self) -> None: ...
 
 
 class Connection(Protocol):
-    def cursor(self) -> Cursor:
-        ...
+    def cursor(self) -> Cursor: ...
 
-    def close(self) -> None:
-        ...
+    def close(self) -> None: ...
 
-    def commit(self) -> None:
-        ...
+    def commit(self) -> None: ...
 
-    def rollback(self) -> None:
-        ...
+    def rollback(self) -> None: ...
 
-    def __enter__(self) -> "Connection":
-        ...
+    def __enter__(self) -> "Connection": ...
 
     def __exit__(
         self,
         exc_type: Optional[Type[BaseException]],
         exc_value: Optional[BaseException],
         traceback: Optional[TracebackType],
-    ) -> Optional[bool]:
-        ...
+    ) -> Optional[bool]: ...
 
 
 class DBAPI2Module(Protocol):
@@ -129,24 +118,20 @@ class DBAPI2Module(Protocol):
     # explain why this is necessary for safety. TL;DR: we shouldn't be able to write
     # to `x`, only read from it. See also https://github.com/python/mypy/issues/6002 .
     @property
-    def Warning(self) -> Type[Exception]:
-        ...
+    def Warning(self) -> Type[Exception]: ...
 
     @property
-    def Error(self) -> Type[Exception]:
-        ...
+    def Error(self) -> Type[Exception]: ...
 
     # Errors are divided into `InterfaceError`s (something went wrong in the database
     # driver) and `DatabaseError`s (something went wrong in the database). These are
     # both subclasses of `Error`, but we can't currently express this in type
     # annotations due to https://github.com/python/mypy/issues/8397
     @property
-    def InterfaceError(self) -> Type[Exception]:
-        ...
+    def InterfaceError(self) -> Type[Exception]: ...
 
     @property
-    def DatabaseError(self) -> Type[Exception]:
-        ...
+    def DatabaseError(self) -> Type[Exception]: ...
 
     # Everything below is a subclass of `DatabaseError`.
 
@@ -155,8 +140,7 @@ class DBAPI2Module(Protocol):
     # - An invalid date time was provided.
     # - A string contained a null code point.
     @property
-    def DataError(self) -> Type[Exception]:
-        ...
+    def DataError(self) -> Type[Exception]: ...
 
     # Roughly: something went wrong in the database, but it's not within the application
     # programmer's control. Examples:
@@ -167,21 +151,18 @@ class DBAPI2Module(Protocol):
     # - The database ran out of resources, such as storage, memory, connections, etc.
     # - The database encountered an error from the operating system.
     @property
-    def OperationalError(self) -> Type[Exception]:
-        ...
+    def OperationalError(self) -> Type[Exception]: ...
 
     # Roughly: we've given the database data which breaks a rule we asked it to enforce.
     # Examples:
     # - Stop, criminal scum! You violated the foreign key constraint
     # - Also check constraints, non-null constraints, etc.
     @property
-    def IntegrityError(self) -> Type[Exception]:
-        ...
+    def IntegrityError(self) -> Type[Exception]: ...
 
     # Roughly: something went wrong within the database server itself.
     @property
-    def InternalError(self) -> Type[Exception]:
-        ...
+    def InternalError(self) -> Type[Exception]: ...
 
     # Roughly: the application did something silly that needs to be fixed. Examples:
     # - We don't have permissions to do something.
@@ -189,13 +170,11 @@ class DBAPI2Module(Protocol):
     # - We tried to use a reserved name.
     # - We referred to a column that doesn't exist.
     @property
-    def ProgrammingError(self) -> Type[Exception]:
-        ...
+    def ProgrammingError(self) -> Type[Exception]: ...
 
     # Roughly: we've tried to do something that this database doesn't support.
     @property
-    def NotSupportedError(self) -> Type[Exception]:
-        ...
+    def NotSupportedError(self) -> Type[Exception]: ...
 
     # We originally wrote
     # def connect(self, *args, **kwargs) -> Connection: ...
@@ -204,8 +183,7 @@ class DBAPI2Module(Protocol):
     # psycopg2.connect doesn't have a mandatory positional argument. Instead, we use
     # the following slightly unusual workaround.
     @property
-    def connect(self) -> Callable[..., Connection]:
-        ...
+    def connect(self) -> Callable[..., Connection]: ...
 
 
 __all__ = ["Cursor", "Connection", "DBAPI2Module"]
diff --git a/synapse/streams/events.py b/synapse/streams/events.py
index 7466488157..dd7401ac8e 100644
--- a/synapse/streams/events.py
+++ b/synapse/streams/events.py
@@ -57,12 +57,13 @@ class _EventSourcesInner:
 class EventSources:
     def __init__(self, hs: "HomeServer"):
         self.sources = _EventSourcesInner(
-            # mypy previously warned that attribute.type is `Optional`, but we know it's
+            # attribute.type is `Optional`, but we know it's
             # never `None` here since all the attributes of `_EventSourcesInner` are
             # annotated.
-            # As of the stubs in attrs 22.1.0, `attr.fields()` now returns Any,
-            # so the call to `attribute.type` is not checked.
-            *(attribute.type(hs) for attribute in attr.fields(_EventSourcesInner))
+            *(
+                attribute.type(hs)  # type: ignore[misc]
+                for attribute in attr.fields(_EventSourcesInner)
+            )
         )
         self.store = hs.get_datastores().main
         self._instance_name = hs.get_instance_name()
diff --git a/synapse/synapse_rust/events.pyi b/synapse/synapse_rust/events.pyi
index 423ede5969..69837617f5 100644
--- a/synapse/synapse_rust/events.pyi
+++ b/synapse/synapse_rust/events.pyi
@@ -56,7 +56,7 @@ class EventInternalMetadata:
 
         (Added in synapse 0.99.0, so may be unreliable for events received before that)
         """
-        ...
+
     def get_send_on_behalf_of(self) -> Optional[str]:
         """Whether this server should send the event on behalf of another server.
         This is used by the federation "send_join" API to forward the initial join
@@ -64,7 +64,7 @@ class EventInternalMetadata:
 
         returns a str with the name of the server this event is sent on behalf of.
         """
-        ...
+
     def need_to_check_redaction(self) -> bool:
         """Whether the redaction event needs to be rechecked when fetching
         from the database.
@@ -75,7 +75,7 @@ class EventInternalMetadata:
         If the sender of the redaction event is allowed to redact any event
         due to auth rules, then this will always return false.
         """
-        ...
+
     def is_soft_failed(self) -> bool:
         """Whether the event has been soft failed.
 
@@ -85,7 +85,7 @@ class EventInternalMetadata:
             2. They should not be added to the forward extremities (and
                therefore not to current state).
         """
-        ...
+
     def should_proactively_send(self) -> bool:
         """Whether the event, if ours, should be sent to other clients and
         servers.
@@ -93,14 +93,13 @@ class EventInternalMetadata:
         This is used for sending dummy events internally. Servers and clients
         can still explicitly fetch the event.
         """
-        ...
+
     def is_redacted(self) -> bool:
         """Whether the event has been redacted.
 
         This is used for efficiently checking whether an event has been
         marked as redacted without needing to make another database call.
         """
-        ...
+
     def is_notifiable(self) -> bool:
         """Whether this event can trigger a push notification"""
-        ...
diff --git a/synapse/types/__init__.py b/synapse/types/__init__.py
index d3ee718375..a88982a04c 100644
--- a/synapse/types/__init__.py
+++ b/synapse/types/__init__.py
@@ -976,12 +976,12 @@ class StreamToken:
         return attr.evolve(self, **{key.value: new_value})
 
     @overload
-    def get_field(self, key: Literal[StreamKeyType.ROOM]) -> RoomStreamToken:
-        ...
+    def get_field(self, key: Literal[StreamKeyType.ROOM]) -> RoomStreamToken: ...
 
     @overload
-    def get_field(self, key: Literal[StreamKeyType.RECEIPT]) -> MultiWriterStreamToken:
-        ...
+    def get_field(
+        self, key: Literal[StreamKeyType.RECEIPT]
+    ) -> MultiWriterStreamToken: ...
 
     @overload
     def get_field(
@@ -995,14 +995,12 @@ class StreamToken:
             StreamKeyType.TYPING,
             StreamKeyType.UN_PARTIAL_STATED_ROOMS,
         ],
-    ) -> int:
-        ...
+    ) -> int: ...
 
     @overload
     def get_field(
         self, key: StreamKeyType
-    ) -> Union[int, RoomStreamToken, MultiWriterStreamToken]:
-        ...
+    ) -> Union[int, RoomStreamToken, MultiWriterStreamToken]: ...
 
     def get_field(
         self, key: StreamKeyType
diff --git a/synapse/util/async_helpers.py b/synapse/util/async_helpers.py
index 914d4fd747..70139beef2 100644
--- a/synapse/util/async_helpers.py
+++ b/synapse/util/async_helpers.py
@@ -284,15 +284,7 @@ async def yieldable_gather_results(
     try:
         return await make_deferred_yieldable(
             defer.gatherResults(
-                # type-ignore: mypy reports two errors:
-                # error: Argument 1 to "run_in_background" has incompatible type
-                #     "Callable[[T, **P], Awaitable[R]]"; expected
-                #     "Callable[[T, **P], Awaitable[R]]"  [arg-type]
-                # error: Argument 2 to "run_in_background" has incompatible type
-                #     "T"; expected "[T, **P.args]"  [arg-type]
-                # The former looks like a mypy bug, and the latter looks like a
-                # false positive.
-                [run_in_background(func, item, *args, **kwargs) for item in iter],  # type: ignore[arg-type]
+                [run_in_background(func, item, *args, **kwargs) for item in iter],
                 consumeErrors=True,
             )
         )
@@ -338,7 +330,7 @@ async def yieldable_gather_results_delaying_cancellation(
         return await make_deferred_yieldable(
             delay_cancellation(
                 defer.gatherResults(
-                    [run_in_background(func, item, *args, **kwargs) for item in iter],  # type: ignore[arg-type]
+                    [run_in_background(func, item, *args, **kwargs) for item in iter],
                     consumeErrors=True,
                 )
             )
@@ -357,24 +349,21 @@ T4 = TypeVar("T4")
 @overload
 def gather_results(
     deferredList: Tuple[()], consumeErrors: bool = ...
-) -> "defer.Deferred[Tuple[()]]":
-    ...
+) -> "defer.Deferred[Tuple[()]]": ...
 
 
 @overload
 def gather_results(
     deferredList: Tuple["defer.Deferred[T1]"],
     consumeErrors: bool = ...,
-) -> "defer.Deferred[Tuple[T1]]":
-    ...
+) -> "defer.Deferred[Tuple[T1]]": ...
 
 
 @overload
 def gather_results(
     deferredList: Tuple["defer.Deferred[T1]", "defer.Deferred[T2]"],
     consumeErrors: bool = ...,
-) -> "defer.Deferred[Tuple[T1, T2]]":
-    ...
+) -> "defer.Deferred[Tuple[T1, T2]]": ...
 
 
 @overload
@@ -383,8 +372,7 @@ def gather_results(
         "defer.Deferred[T1]", "defer.Deferred[T2]", "defer.Deferred[T3]"
     ],
     consumeErrors: bool = ...,
-) -> "defer.Deferred[Tuple[T1, T2, T3]]":
-    ...
+) -> "defer.Deferred[Tuple[T1, T2, T3]]": ...
 
 
 @overload
@@ -396,8 +384,7 @@ def gather_results(
         "defer.Deferred[T4]",
     ],
     consumeErrors: bool = ...,
-) -> "defer.Deferred[Tuple[T1, T2, T3, T4]]":
-    ...
+) -> "defer.Deferred[Tuple[T1, T2, T3, T4]]": ...
 
 
 def gather_results(  # type: ignore[misc]
@@ -782,18 +769,15 @@ def stop_cancellation(deferred: "defer.Deferred[T]") -> "defer.Deferred[T]":
 
 
 @overload
-def delay_cancellation(awaitable: "defer.Deferred[T]") -> "defer.Deferred[T]":
-    ...
+def delay_cancellation(awaitable: "defer.Deferred[T]") -> "defer.Deferred[T]": ...
 
 
 @overload
-def delay_cancellation(awaitable: Coroutine[Any, Any, T]) -> "defer.Deferred[T]":
-    ...
+def delay_cancellation(awaitable: Coroutine[Any, Any, T]) -> "defer.Deferred[T]": ...
 
 
 @overload
-def delay_cancellation(awaitable: Awaitable[T]) -> Awaitable[T]:
-    ...
+def delay_cancellation(awaitable: Awaitable[T]) -> Awaitable[T]: ...
 
 
 def delay_cancellation(awaitable: Awaitable[T]) -> Awaitable[T]:
diff --git a/synapse/util/caches/dictionary_cache.py b/synapse/util/caches/dictionary_cache.py
index 4245b7289c..1e6696332f 100644
--- a/synapse/util/caches/dictionary_cache.py
+++ b/synapse/util/caches/dictionary_cache.py
@@ -229,7 +229,7 @@ class DictionaryCache(Generic[KT, DKT, DV]):
         for dict_key in missing:
             # We explicitly add each dict key to the cache, so that cache hit
             # rates and LRU times for each key can be tracked separately.
-            value = entry.get(dict_key, _Sentinel.sentinel)  # type: ignore[arg-type]
+            value = entry.get(dict_key, _Sentinel.sentinel)
             self.cache[(key, dict_key)] = _PerKeyValue(value)
 
             if value is not _Sentinel.sentinel:
diff --git a/synapse/util/caches/expiringcache.py b/synapse/util/caches/expiringcache.py
index a52ba59a34..8017c031ee 100644
--- a/synapse/util/caches/expiringcache.py
+++ b/synapse/util/caches/expiringcache.py
@@ -142,7 +142,7 @@ class ExpiringCache(Generic[KT, VT]):
             return default
 
         if self.iterable:
-            self.metrics.inc_evictions(EvictionReason.invalidation, len(value.value))  # type: ignore[arg-type]
+            self.metrics.inc_evictions(EvictionReason.invalidation, len(value.value))
         else:
             self.metrics.inc_evictions(EvictionReason.invalidation)
 
@@ -152,12 +152,10 @@ class ExpiringCache(Generic[KT, VT]):
         return key in self._cache
 
     @overload
-    def get(self, key: KT, default: Literal[None] = None) -> Optional[VT]:
-        ...
+    def get(self, key: KT, default: Literal[None] = None) -> Optional[VT]: ...
 
     @overload
-    def get(self, key: KT, default: T) -> Union[VT, T]:
-        ...
+    def get(self, key: KT, default: T) -> Union[VT, T]: ...
 
     def get(self, key: KT, default: Optional[T] = None) -> Union[VT, Optional[T]]:
         try:
diff --git a/synapse/util/caches/lrucache.py b/synapse/util/caches/lrucache.py
index a1b4f5b6a7..481a1a621e 100644
--- a/synapse/util/caches/lrucache.py
+++ b/synapse/util/caches/lrucache.py
@@ -580,8 +580,7 @@ class LruCache(Generic[KT, VT]):
             callbacks: Collection[Callable[[], None]] = ...,
             update_metrics: bool = ...,
             update_last_access: bool = ...,
-        ) -> Optional[VT]:
-            ...
+        ) -> Optional[VT]: ...
 
         @overload
         def cache_get(
@@ -590,8 +589,7 @@ class LruCache(Generic[KT, VT]):
             callbacks: Collection[Callable[[], None]] = ...,
             update_metrics: bool = ...,
             update_last_access: bool = ...,
-        ) -> Union[T, VT]:
-            ...
+        ) -> Union[T, VT]: ...
 
         @synchronized
         def cache_get(
@@ -634,16 +632,14 @@ class LruCache(Generic[KT, VT]):
             key: tuple,
             default: Literal[None] = None,
             update_metrics: bool = True,
-        ) -> Union[None, Iterable[Tuple[KT, VT]]]:
-            ...
+        ) -> Union[None, Iterable[Tuple[KT, VT]]]: ...
 
         @overload
         def cache_get_multi(
             key: tuple,
             default: T,
             update_metrics: bool = True,
-        ) -> Union[T, Iterable[Tuple[KT, VT]]]:
-            ...
+        ) -> Union[T, Iterable[Tuple[KT, VT]]]: ...
 
         @synchronized
         def cache_get_multi(
@@ -728,12 +724,10 @@ class LruCache(Generic[KT, VT]):
                 return value
 
         @overload
-        def cache_pop(key: KT, default: Literal[None] = None) -> Optional[VT]:
-            ...
+        def cache_pop(key: KT, default: Literal[None] = None) -> Optional[VT]: ...
 
         @overload
-        def cache_pop(key: KT, default: T) -> Union[T, VT]:
-            ...
+        def cache_pop(key: KT, default: T) -> Union[T, VT]: ...
 
         @synchronized
         def cache_pop(key: KT, default: Optional[T] = None) -> Union[None, T, VT]:
diff --git a/synapse/util/iterutils.py b/synapse/util/iterutils.py
index 082ad8cedb..b73f690b88 100644
--- a/synapse/util/iterutils.py
+++ b/synapse/util/iterutils.py
@@ -50,8 +50,7 @@ class _SelfSlice(Sized, Protocol):
     returned.
     """
 
-    def __getitem__(self: S, i: slice) -> S:
-        ...
+    def __getitem__(self: S, i: slice) -> S: ...
 
 
 def batch_iter(iterable: Iterable[T], size: int) -> Iterator[Tuple[T, ...]]:
diff --git a/synapse/util/ratelimitutils.py b/synapse/util/ratelimitutils.py
index dc9bddb00d..8ead72bb7a 100644
--- a/synapse/util/ratelimitutils.py
+++ b/synapse/util/ratelimitutils.py
@@ -177,9 +177,9 @@ class FederationRateLimiter:
                 clock=clock, config=config, metrics_name=metrics_name
             )
 
-        self.ratelimiters: DefaultDict[
-            str, "_PerHostRatelimiter"
-        ] = collections.defaultdict(new_limiter)
+        self.ratelimiters: DefaultDict[str, "_PerHostRatelimiter"] = (
+            collections.defaultdict(new_limiter)
+        )
 
         with _rate_limiter_instances_lock:
             _rate_limiter_instances.add(self)
diff --git a/synapse/visibility.py b/synapse/visibility.py
index e58f649aaf..d1d478129f 100644
--- a/synapse/visibility.py
+++ b/synapse/visibility.py
@@ -129,9 +129,9 @@ async def filter_events_for_client(
         retention_policies: Dict[str, RetentionPolicy] = {}
 
         for room_id in room_ids:
-            retention_policies[
-                room_id
-            ] = await storage.main.get_retention_policy_for_room(room_id)
+            retention_policies[room_id] = (
+                await storage.main.get_retention_policy_for_room(room_id)
+            )
 
     def allowed(event: EventBase) -> Optional[EventBase]:
         return _check_client_allowed_to_see_event(
diff --git a/tests/handlers/test_worker_lock.py b/tests/handlers/test_worker_lock.py
index 3a4cf82094..6e9a15c8ee 100644
--- a/tests/handlers/test_worker_lock.py
+++ b/tests/handlers/test_worker_lock.py
@@ -27,6 +27,7 @@ from synapse.util import Clock
 
 from tests import unittest
 from tests.replication._base import BaseMultiWorkerStreamTestCase
+from tests.utils import test_timeout
 
 
 class WorkerLockTestCase(unittest.HomeserverTestCase):
@@ -50,6 +51,28 @@ class WorkerLockTestCase(unittest.HomeserverTestCase):
         self.get_success(d2)
         self.get_success(lock2.__aexit__(None, None, None))
 
+    def test_lock_contention(self) -> None:
+        """Test lock contention when a lot of locks wait on a single worker"""
+
+        # It takes around 0.5s on a 5+ years old laptop
+        with test_timeout(5):
+            nb_locks = 500
+            d = self._take_locks(nb_locks)
+            self.assertEqual(self.get_success(d), nb_locks)
+
+    async def _take_locks(self, nb_locks: int) -> int:
+        locks = [
+            self.hs.get_worker_locks_handler().acquire_lock("test_lock", "")
+            for _ in range(nb_locks)
+        ]
+
+        nb_locks_taken = 0
+        for lock in locks:
+            async with lock:
+                nb_locks_taken += 1
+
+        return nb_locks_taken
+
 
 class WorkerLockWorkersTestCase(BaseMultiWorkerStreamTestCase):
     def prepare(
diff --git a/tests/replication/_base.py b/tests/replication/_base.py
index d2220f8195..8437da1cdd 100644
--- a/tests/replication/_base.py
+++ b/tests/replication/_base.py
@@ -495,9 +495,9 @@ class FakeRedisPubSubServer:
     """A fake Redis server for pub/sub."""
 
     def __init__(self) -> None:
-        self._subscribers_by_channel: Dict[
-            bytes, Set["FakeRedisPubSubProtocol"]
-        ] = defaultdict(set)
+        self._subscribers_by_channel: Dict[bytes, Set["FakeRedisPubSubProtocol"]] = (
+            defaultdict(set)
+        )
 
     def add_subscriber(self, conn: "FakeRedisPubSubProtocol", channel: bytes) -> None:
         """A connection has called SUBSCRIBE"""
diff --git a/tests/rest/client/test_filter.py b/tests/rest/client/test_filter.py
index 0a894ad081..9cfc6b224f 100644
--- a/tests/rest/client/test_filter.py
+++ b/tests/rest/client/test_filter.py
@@ -72,7 +72,7 @@ class FilterTestCase(unittest.HomeserverTestCase):
 
     def test_add_filter_non_local_user(self) -> None:
         _is_mine = self.hs.is_mine
-        self.hs.is_mine = lambda target_user: False  # type: ignore[method-assign]
+        self.hs.is_mine = lambda target_user: False  # type: ignore[assignment]
         channel = self.make_request(
             "POST",
             "/_matrix/client/r0/user/%s/filter" % (self.user_id),
diff --git a/tests/rest/client/test_rooms.py b/tests/rest/client/test_rooms.py
index b11a73e92b..d2f2ded487 100644
--- a/tests/rest/client/test_rooms.py
+++ b/tests/rest/client/test_rooms.py
@@ -1222,9 +1222,9 @@ class RoomJoinTestCase(RoomBase):
         """
 
         # Register a dummy callback. Make it allow all room joins for now.
-        return_value: Union[
-            Literal["NOT_SPAM"], Tuple[Codes, dict], Codes
-        ] = synapse.module_api.NOT_SPAM
+        return_value: Union[Literal["NOT_SPAM"], Tuple[Codes, dict], Codes] = (
+            synapse.module_api.NOT_SPAM
+        )
 
         async def user_may_join_room(
             userid: str,
@@ -1664,9 +1664,9 @@ class RoomMessagesTestCase(RoomBase):
         expected_fields: dict,
     ) -> None:
         class SpamCheck:
-            mock_return_value: Union[
-                str, bool, Codes, Tuple[Codes, JsonDict], bool
-            ] = "NOT_SPAM"
+            mock_return_value: Union[str, bool, Codes, Tuple[Codes, JsonDict], bool] = (
+                "NOT_SPAM"
+            )
             mock_content: Optional[JsonDict] = None
 
             async def check_event_for_spam(
diff --git a/tests/rest/client/utils.py b/tests/rest/client/utils.py
index 10cfe22d8e..daa68d78b9 100644
--- a/tests/rest/client/utils.py
+++ b/tests/rest/client/utils.py
@@ -87,8 +87,7 @@ class RestHelper:
         expect_code: Literal[200] = ...,
         extra_content: Optional[Dict] = ...,
         custom_headers: Optional[Iterable[Tuple[AnyStr, AnyStr]]] = ...,
-    ) -> str:
-        ...
+    ) -> str: ...
 
     @overload
     def create_room_as(
@@ -100,8 +99,7 @@ class RestHelper:
         expect_code: int = ...,
         extra_content: Optional[Dict] = ...,
         custom_headers: Optional[Iterable[Tuple[AnyStr, AnyStr]]] = ...,
-    ) -> Optional[str]:
-        ...
+    ) -> Optional[str]: ...
 
     def create_room_as(
         self,
diff --git a/tests/storage/test_cleanup_extrems.py b/tests/storage/test_cleanup_extrems.py
index 249c6b39f7..d5b9996284 100644
--- a/tests/storage/test_cleanup_extrems.py
+++ b/tests/storage/test_cleanup_extrems.py
@@ -337,15 +337,15 @@ class CleanupExtremDummyEventsTestCase(HomeserverTestCase):
         """Simple test to ensure that _expire_rooms_to_exclude_from_dummy_event_insertion()
         expires old entries correctly.
         """
-        self.event_creator_handler._rooms_to_exclude_from_dummy_event_insertion[
-            "1"
-        ] = 100000
-        self.event_creator_handler._rooms_to_exclude_from_dummy_event_insertion[
-            "2"
-        ] = 200000
-        self.event_creator_handler._rooms_to_exclude_from_dummy_event_insertion[
-            "3"
-        ] = 300000
+        self.event_creator_handler._rooms_to_exclude_from_dummy_event_insertion["1"] = (
+            100000
+        )
+        self.event_creator_handler._rooms_to_exclude_from_dummy_event_insertion["2"] = (
+            200000
+        )
+        self.event_creator_handler._rooms_to_exclude_from_dummy_event_insertion["3"] = (
+            300000
+        )
 
         self.event_creator_handler._expire_rooms_to_exclude_from_dummy_event_insertion()
         # All entries within time frame
diff --git a/tests/storage/test_room_search.py b/tests/storage/test_room_search.py
index 01c5324802..1eab89f140 100644
--- a/tests/storage/test_room_search.py
+++ b/tests/storage/test_room_search.py
@@ -328,9 +328,11 @@ class MessageSearchTest(HomeserverTestCase):
             self.assertEqual(
                 result["count"],
                 1 if expect_to_contain else 0,
-                f"expected '{query}' to match '{self.PHRASE}'"
-                if expect_to_contain
-                else f"'{query}' unexpectedly matched '{self.PHRASE}'",
+                (
+                    f"expected '{query}' to match '{self.PHRASE}'"
+                    if expect_to_contain
+                    else f"'{query}' unexpectedly matched '{self.PHRASE}'"
+                ),
             )
             self.assertEqual(
                 len(result["results"]),
@@ -346,9 +348,11 @@ class MessageSearchTest(HomeserverTestCase):
             self.assertEqual(
                 result["count"],
                 1 if expect_to_contain else 0,
-                f"expected '{query}' to match '{self.PHRASE}'"
-                if expect_to_contain
-                else f"'{query}' unexpectedly matched '{self.PHRASE}'",
+                (
+                    f"expected '{query}' to match '{self.PHRASE}'"
+                    if expect_to_contain
+                    else f"'{query}' unexpectedly matched '{self.PHRASE}'"
+                ),
             )
             self.assertEqual(
                 len(result["results"]),
diff --git a/tests/unittest.py b/tests/unittest.py
index 33c9a384ea..6fe0cd4a2d 100644
--- a/tests/unittest.py
+++ b/tests/unittest.py
@@ -109,8 +109,7 @@ class _TypedFailure(Generic[_ExcType], Protocol):
     """Extension to twisted.Failure, where the 'value' has a certain type."""
 
     @property
-    def value(self) -> _ExcType:
-        ...
+    def value(self) -> _ExcType: ...
 
 
 def around(target: TV) -> Callable[[Callable[Concatenate[S, P], R]], None]:
diff --git a/tests/util/test_linearizer.py b/tests/util/test_linearizer.py
index d4268bc2e2..7cbb1007da 100644
--- a/tests/util/test_linearizer.py
+++ b/tests/util/test_linearizer.py
@@ -34,8 +34,7 @@ from tests import unittest
 
 
 class UnblockFunction(Protocol):
-    def __call__(self, pump_reactor: bool = True) -> None:
-        ...
+    def __call__(self, pump_reactor: bool = True) -> None: ...
 
 
 class LinearizerTestCase(unittest.TestCase):
diff --git a/tests/utils.py b/tests/utils.py
index b5dbd60a9c..9fd26ef348 100644
--- a/tests/utils.py
+++ b/tests/utils.py
@@ -21,7 +21,20 @@
 
 import atexit
 import os
-from typing import Any, Callable, Dict, List, Tuple, Type, TypeVar, Union, overload
+import signal
+from types import FrameType, TracebackType
+from typing import (
+    Any,
+    Callable,
+    Dict,
+    List,
+    Optional,
+    Tuple,
+    Type,
+    TypeVar,
+    Union,
+    overload,
+)
 
 import attr
 from typing_extensions import Literal, ParamSpec
@@ -121,13 +134,11 @@ def setupdb() -> None:
 
 
 @overload
-def default_config(name: str, parse: Literal[False] = ...) -> Dict[str, object]:
-    ...
+def default_config(name: str, parse: Literal[False] = ...) -> Dict[str, object]: ...
 
 
 @overload
-def default_config(name: str, parse: Literal[True]) -> HomeServerConfig:
-    ...
+def default_config(name: str, parse: Literal[True]) -> HomeServerConfig: ...
 
 
 def default_config(
@@ -381,3 +392,30 @@ def checked_cast(type: Type[T], x: object) -> T:
     """
     assert isinstance(x, type)
     return x
+
+
+class TestTimeout(Exception):
+    pass
+
+
+class test_timeout:
+    def __init__(self, seconds: int, error_message: Optional[str] = None) -> None:
+        if error_message is None:
+            error_message = "test timed out after {}s.".format(seconds)
+        self.seconds = seconds
+        self.error_message = error_message
+
+    def handle_timeout(self, signum: int, frame: Optional[FrameType]) -> None:
+        raise TestTimeout(self.error_message)
+
+    def __enter__(self) -> None:
+        signal.signal(signal.SIGALRM, self.handle_timeout)
+        signal.alarm(self.seconds)
+
+    def __exit__(
+        self,
+        exc_type: Optional[Type[BaseException]],
+        exc_val: Optional[BaseException],
+        exc_tb: Optional[TracebackType],
+    ) -> None:
+        signal.alarm(0)