summary refs log tree commit diff
diff options
context:
space:
mode:
authorBrendan Abolivier <babolivier@matrix.org>2022-03-02 17:52:39 +0000
committerBrendan Abolivier <babolivier@matrix.org>2022-03-02 17:52:39 +0000
commit571d3bf002e9b54a03e940a86232f025d0f27b32 (patch)
tree4254a149cd2bbe2ad2b1c4c7cffb1c357ef727da
parentChangelog (diff)
parentRemove unused mocks from `test_typing` (#12136) (diff)
downloadsynapse-571d3bf002e9b54a03e940a86232f025d0f27b32.tar.xz
Merge branch 'develop' of github.com:matrix-org/synapse into babolivier/sign_json_module
-rwxr-xr-x.ci/scripts/test_export_data_command.sh4
-rwxr-xr-x.ci/scripts/test_synapse_port_db.sh12
-rw-r--r--.dockerignore1
-rw-r--r--CHANGES.md99
-rw-r--r--MANIFEST.in1
-rw-r--r--changelog.d/10870.misc1
-rw-r--r--changelog.d/11599.doc1
-rw-r--r--changelog.d/11608.misc1
-rw-r--r--changelog.d/11610.misc1
-rw-r--r--changelog.d/11617.feature1
-rw-r--r--changelog.d/11835.feature1
-rw-r--r--changelog.d/11865.removal1
-rw-r--r--changelog.d/11900.misc1
-rw-r--r--changelog.d/11972.misc1
-rw-r--r--changelog.d/11974.misc1
-rw-r--r--changelog.d/11984.misc1
-rw-r--r--changelog.d/11985.feature1
-rw-r--r--changelog.d/11991.misc1
-rw-r--r--changelog.d/11992.bugfix1
-rw-r--r--changelog.d/11994.misc1
-rw-r--r--changelog.d/11996.misc1
-rw-r--r--changelog.d/11997.docker1
-rw-r--r--changelog.d/11998.doc1
-rw-r--r--changelog.d/11999.bugfix1
-rw-r--r--changelog.d/12000.feature1
-rw-r--r--changelog.d/12001.feature1
-rw-r--r--changelog.d/12003.doc1
-rw-r--r--changelog.d/12004.doc1
-rw-r--r--changelog.d/12005.misc1
-rw-r--r--changelog.d/12008.removal1
-rw-r--r--changelog.d/12009.feature1
-rw-r--r--changelog.d/12011.misc1
-rw-r--r--changelog.d/12012.misc1
-rw-r--r--changelog.d/12013.misc1
-rw-r--r--changelog.d/12015.misc1
-rw-r--r--changelog.d/12016.misc1
-rw-r--r--changelog.d/12018.removal1
-rw-r--r--changelog.d/12019.misc1
-rw-r--r--changelog.d/12020.feature1
-rw-r--r--changelog.d/12021.feature1
-rw-r--r--changelog.d/12022.feature1
-rw-r--r--changelog.d/12024.bugfix1
-rw-r--r--changelog.d/12025.misc1
-rw-r--r--changelog.d/12030.misc1
-rw-r--r--changelog.d/12031.misc1
-rw-r--r--changelog.d/12033.misc1
-rw-r--r--changelog.d/12034.misc1
-rw-r--r--changelog.d/12037.bugfix1
-rw-r--r--changelog.d/12039.misc1
-rw-r--r--changelog.d/12041.misc1
-rw-r--r--changelog.d/12051.misc1
-rw-r--r--changelog.d/12052.misc1
-rw-r--r--changelog.d/12056.bugfix1
-rw-r--r--changelog.d/12058.feature1
-rw-r--r--changelog.d/12059.misc1
-rw-r--r--changelog.d/12060.misc1
-rw-r--r--changelog.d/12062.feature1
-rw-r--r--changelog.d/12063.misc1
-rw-r--r--changelog.d/12067.feature1
-rw-r--r--changelog.d/12068.misc1
-rw-r--r--changelog.d/12069.misc1
-rw-r--r--changelog.d/12070.misc1
-rw-r--r--changelog.d/12072.misc1
-rw-r--r--changelog.d/12073.removal1
-rw-r--r--changelog.d/12077.bugfix1
-rw-r--r--changelog.d/12084.misc1
-rw-r--r--changelog.d/12088.misc1
-rw-r--r--changelog.d/12089.bugfix1
-rw-r--r--changelog.d/12092.misc1
-rw-r--r--changelog.d/12094.misc1
-rw-r--r--changelog.d/12098.bugfix1
-rw-r--r--changelog.d/12099.misc1
-rw-r--r--changelog.d/12100.bugfix1
-rw-r--r--changelog.d/12105.bugfix1
-rw-r--r--changelog.d/12106.misc1
-rw-r--r--changelog.d/12108.misc (renamed from changelog.d/12066.misc)0
-rw-r--r--changelog.d/12109.misc1
-rw-r--r--changelog.d/12111.misc1
-rw-r--r--changelog.d/12112.docker1
-rw-r--r--changelog.d/12113.misc1
-rw-r--r--changelog.d/12118.misc1
-rw-r--r--changelog.d/12119.misc1
-rw-r--r--changelog.d/12128.misc1
-rw-r--r--changelog.d/12136.misc1
-rw-r--r--debian/changelog6
-rw-r--r--docker/Dockerfile1
-rw-r--r--docs/development/database_schema.md6
-rw-r--r--docs/other/running_synapse_on_single_board_computers.md19
-rw-r--r--docs/usage/administration/admin_api/README.md2
-rw-r--r--mypy.ini10
-rwxr-xr-xscripts-dev/generate_sample_config10
-rwxr-xr-xscripts-dev/lint.sh7
-rwxr-xr-xscripts-dev/make_full_schema.sh6
-rwxr-xr-xscripts/register_new_matrix_user19
-rwxr-xr-xscripts/synapse_review_recent_signups19
-rwxr-xr-xscripts/sync_room_to_group.pl45
-rwxr-xr-xsetup.py14
-rw-r--r--snap/snapcraft.yaml2
-rw-r--r--synapse/__init__.py2
-rwxr-xr-xsynapse/_scripts/export_signing_key.py (renamed from scripts/export_signing_key)7
-rwxr-xr-xsynapse/_scripts/generate_config.py (renamed from scripts/generate_config)7
-rwxr-xr-xsynapse/_scripts/generate_log_config.py (renamed from scripts/generate_log_config)7
-rwxr-xr-xsynapse/_scripts/generate_signing_key.py (renamed from scripts/generate_signing_key.py)7
-rwxr-xr-xsynapse/_scripts/hash_password.py (renamed from scripts/hash_password)12
-rwxr-xr-xsynapse/_scripts/move_remote_media_to_new_store.py (renamed from scripts/move_remote_media_to_new_store.py)2
-rwxr-xr-xsynapse/_scripts/synapse_port_db.py (renamed from scripts/synapse_port_db)6
-rwxr-xr-xsynapse/_scripts/update_synapse_database.py (renamed from scripts/update_synapse_database)0
-rw-r--r--synapse/config/_base.py2
-rw-r--r--synapse/federation/federation_client.py8
-rw-r--r--synapse/handlers/room_summary.py2
-rw-r--r--synapse/storage/databases/main/state.py27
-rw-r--r--synapse/storage/databases/state/store.py243
-rw-r--r--tests/handlers/test_room_summary.py5
-rw-r--r--tests/rest/client/test_account.py290
-rw-r--r--tests/rest/client/test_filter.py29
-rw-r--r--tests/rest/client/test_relations.py390
-rw-r--r--tests/rest/client/test_report_event.py25
-rw-r--r--tests/rest/client/test_rooms.py271
-rw-r--r--tests/rest/client/test_third_party_rules.py108
-rw-r--r--tests/rest/client/test_typing.py39
-rw-r--r--tests/storage/databases/test_state_store.py454
-rw-r--r--tox.ini8
122 files changed, 828 insertions, 1484 deletions
diff --git a/.ci/scripts/test_export_data_command.sh b/.ci/scripts/test_export_data_command.sh
index ab96387a0a..224cae9216 100755
--- a/.ci/scripts/test_export_data_command.sh
+++ b/.ci/scripts/test_export_data_command.sh
@@ -21,7 +21,7 @@ python -m synapse.app.homeserver --generate-keys -c .ci/sqlite-config.yaml
 echo "--- Prepare test database"
 
 # Make sure the SQLite3 database is using the latest schema and has no pending background update.
-scripts/update_synapse_database --database-config .ci/sqlite-config.yaml --run-background-updates
+update_synapse_database --database-config .ci/sqlite-config.yaml --run-background-updates
 
 # Run the export-data command on the sqlite test database
 python -m synapse.app.admin_cmd -c .ci/sqlite-config.yaml  export-data @anon-20191002_181700-832:localhost:8800 \
@@ -41,7 +41,7 @@ fi
 
 # Port the SQLite databse to postgres so we can check command works against postgres
 echo "+++ Port SQLite3 databse to postgres"
-scripts/synapse_port_db --sqlite-database .ci/test_db.db --postgres-config .ci/postgres-config.yaml
+synapse_port_db --sqlite-database .ci/test_db.db --postgres-config .ci/postgres-config.yaml
 
 # Run the export-data command on postgres database
 python -m synapse.app.admin_cmd -c .ci/postgres-config.yaml  export-data @anon-20191002_181700-832:localhost:8800 \
diff --git a/.ci/scripts/test_synapse_port_db.sh b/.ci/scripts/test_synapse_port_db.sh
index 797904e64c..91bd966f32 100755
--- a/.ci/scripts/test_synapse_port_db.sh
+++ b/.ci/scripts/test_synapse_port_db.sh
@@ -25,17 +25,19 @@ python -m synapse.app.homeserver --generate-keys -c .ci/sqlite-config.yaml
 echo "--- Prepare test database"
 
 # Make sure the SQLite3 database is using the latest schema and has no pending background update.
-scripts/update_synapse_database --database-config .ci/sqlite-config.yaml --run-background-updates
+update_synapse_database --database-config .ci/sqlite-config.yaml --run-background-updates
 
 # Create the PostgreSQL database.
 .ci/scripts/postgres_exec.py "CREATE DATABASE synapse"
 
 echo "+++ Run synapse_port_db against test database"
-coverage run scripts/synapse_port_db --sqlite-database .ci/test_db.db --postgres-config .ci/postgres-config.yaml
+# TODO: this invocation of synapse_port_db (and others below) used to be prepended with `coverage run`,
+# but coverage seems unable to find the entrypoints installed by `pip install -e .`.
+synapse_port_db --sqlite-database .ci/test_db.db --postgres-config .ci/postgres-config.yaml
 
 # We should be able to run twice against the same database.
 echo "+++ Run synapse_port_db a second time"
-coverage run scripts/synapse_port_db --sqlite-database .ci/test_db.db --postgres-config .ci/postgres-config.yaml
+synapse_port_db --sqlite-database .ci/test_db.db --postgres-config .ci/postgres-config.yaml
 
 #####
 
@@ -46,7 +48,7 @@ echo "--- Prepare empty SQLite database"
 # we do this by deleting the sqlite db, and then doing the same again.
 rm .ci/test_db.db
 
-scripts/update_synapse_database --database-config .ci/sqlite-config.yaml --run-background-updates
+update_synapse_database --database-config .ci/sqlite-config.yaml --run-background-updates
 
 # re-create the PostgreSQL database.
 .ci/scripts/postgres_exec.py \
@@ -54,4 +56,4 @@ scripts/update_synapse_database --database-config .ci/sqlite-config.yaml --run-b
   "CREATE DATABASE synapse"
 
 echo "+++ Run synapse_port_db against empty database"
-coverage run scripts/synapse_port_db --sqlite-database .ci/test_db.db --postgres-config .ci/postgres-config.yaml
+synapse_port_db --sqlite-database .ci/test_db.db --postgres-config .ci/postgres-config.yaml
diff --git a/.dockerignore b/.dockerignore
index f6c638b0a2..617f701597 100644
--- a/.dockerignore
+++ b/.dockerignore
@@ -3,7 +3,6 @@
 
 # things to include
 !docker
-!scripts
 !synapse
 !MANIFEST.in
 !README.rst
diff --git a/CHANGES.md b/CHANGES.md
index 81333097ae..0a87f5cd42 100644
--- a/CHANGES.md
+++ b/CHANGES.md
@@ -1,3 +1,102 @@
+Synapse 1.54.0rc1 (2022-03-02)
+==============================
+
+Please note that this will be the last release of Synapse that is compatible with Mjolnir 1.3.1 and earlier.
+Administrators of servers which have the Mjolnir module installed are advised to upgrade Mjolnir to version 1.3.2 or later.
+
+
+Features
+--------
+
+- Add support for [MSC3202](https://github.com/matrix-org/matrix-doc/pull/3202): sending one-time key counts and fallback key usage states to Application Services. ([\#11617](https://github.com/matrix-org/synapse/issues/11617))
+- Improve the generated URL previews for some web pages. Contributed by @AndrewRyanChama. ([\#11985](https://github.com/matrix-org/synapse/issues/11985))
+- Track cache invalidations in Prometheus metrics, as already happens for cache eviction based on size or time. ([\#12000](https://github.com/matrix-org/synapse/issues/12000))
+- Implement experimental support for [MSC3720](https://github.com/matrix-org/matrix-doc/pull/3720) (account status endpoints). ([\#12001](https://github.com/matrix-org/synapse/issues/12001), [\#12067](https://github.com/matrix-org/synapse/issues/12067))
+- Enable modules to set a custom display name when registering a user. ([\#12009](https://github.com/matrix-org/synapse/issues/12009))
+- Advertise Matrix 1.1 and 1.2 support on `/_matrix/client/versions`. ([\#12020](https://github.com/matrix-org/synapse/issues/12020), ([\#12022](https://github.com/matrix-org/synapse/issues/12022))
+- Support only the stable identifier for [MSC3069](https://github.com/matrix-org/matrix-doc/pull/3069)'s `is_guest` on `/_matrix/client/v3/account/whoami`. ([\#12021](https://github.com/matrix-org/synapse/issues/12021))
+- Use room version 9 as the default room version (per [MSC3589](https://github.com/matrix-org/matrix-doc/pull/3589)). ([\#12058](https://github.com/matrix-org/synapse/issues/12058))
+- Add module callbacks to react to user deactivation status changes (i.e. deactivations and reactivations) and profile updates. ([\#12062](https://github.com/matrix-org/synapse/issues/12062))
+
+
+Bugfixes
+--------
+
+- Fix a bug introduced in Synapse 1.48.0 where an edit of the latest event in a thread would not be properly applied to the thread summary. ([\#11992](https://github.com/matrix-org/synapse/issues/11992))
+- Fix long-standing bug where the `get_rooms_for_user` cache was not correctly invalidated for remote users when the server left a room. ([\#11999](https://github.com/matrix-org/synapse/issues/11999))
+- Fix a 500 error with Postgres when looking backwards with the [MSC3030](https://github.com/matrix-org/matrix-doc/pull/3030) `/timestamp_to_event?dir=b` endpoint. ([\#12024](https://github.com/matrix-org/synapse/issues/12024))
+- Properly fix a long-standing bug where wrong data could be inserted into the `event_search` table when using SQLite. This could block running `synapse_port_db` with an `argument of type 'int' is not iterable` error. This bug was partially fixed by a change in Synapse 1.44.0. ([\#12037](https://github.com/matrix-org/synapse/issues/12037))
+- Fix slow performance of `/logout` in some cases where refresh tokens are in use. The slowness existed since the initial implementation of refresh tokens in version 1.38.0. ([\#12056](https://github.com/matrix-org/synapse/issues/12056))
+- Fix a long-standing bug where Synapse would make additional failing requests over federation for missing data. ([\#12077](https://github.com/matrix-org/synapse/issues/12077))
+- Fix occasional `Unhandled error in Deferred` error message. ([\#12089](https://github.com/matrix-org/synapse/issues/12089))
+- Fix a bug introduced in Synapse 1.51.0 where incoming federation transactions containing at least one EDU would be dropped if debug logging was enabled for `synapse.8631_debug`. ([\#12098](https://github.com/matrix-org/synapse/issues/12098))
+- Fix a long-standing bug which could cause push notifications to malfunction if `use_frozen_dicts` was set in the configuration. ([\#12100](https://github.com/matrix-org/synapse/issues/12100))
+- Fix an extremely rare, long-standing bug in `ReadWriteLock` that would cause an error when a newly unblocked writer completes instantly. ([\#12105](https://github.com/matrix-org/synapse/issues/12105))
+- Make a `POST` to `/rooms/<room_id>/receipt/m.read/<event_id>` only trigger a push notification if the count of unread messages is different to the one in the last successfully sent push. This reduces server load and load on the receiving device. ([\#11835](https://github.com/matrix-org/synapse/issues/11835))
+
+
+Updates to the Docker image
+---------------------------
+
+- The Docker image no longer automatically creates a temporary volume at `/data`. This is not expected to affect normal usage. ([\#11997](https://github.com/matrix-org/synapse/issues/11997))
+- Use Python 3.9 in Docker images by default. ([\#12112](https://github.com/matrix-org/synapse/issues/12112))
+
+
+Improved Documentation
+----------------------
+
+- Document support for the `to_device`, `account_data`, `receipts`, and `presence` stream writers for workers. ([\#11599](https://github.com/matrix-org/synapse/issues/11599))
+- Explain the meaning of spam checker callbacks' return values. ([\#12003](https://github.com/matrix-org/synapse/issues/12003))
+- Clarify information about external Identity Provider IDs. ([\#12004](https://github.com/matrix-org/synapse/issues/12004))
+
+
+Deprecations and Removals
+-------------------------
+
+- Deprecate using `synctl` with the config option `synctl_cache_factor` and print a warning if a user still uses this option. ([\#11865](https://github.com/matrix-org/synapse/issues/11865))
+- Remove support for the legacy structured logging configuration (please see the the [upgrade notes](https://matrix-org.github.io/synapse/develop/upgrade#legacy-structured-logging-configuration-removal) if you are using `structured: true` in the Synapse configuration). ([\#12008](https://github.com/matrix-org/synapse/issues/12008))
+- Drop support for [MSC3283](https://github.com/matrix-org/matrix-doc/pull/3283) unstable flags now that the stable flags are supported. ([\#12018](https://github.com/matrix-org/synapse/issues/12018))
+- Remove the unstable `/spaces` endpoint from [MSC2946](https://github.com/matrix-org/matrix-doc/pull/2946). ([\#12073](https://github.com/matrix-org/synapse/issues/12073))
+
+
+Internal Changes
+----------------
+
+- Make the `get_room_version` method use `get_room_version_id` to benefit from caching. ([\#11808](https://github.com/matrix-org/synapse/issues/11808))
+- Remove unnecessary condition on knock -> leave auth rule check. ([\#11900](https://github.com/matrix-org/synapse/issues/11900))
+- Add tests for device list changes between local users. ([\#11972](https://github.com/matrix-org/synapse/issues/11972))
+- Optimise calculating `device_list` changes in `/sync`. ([\#11974](https://github.com/matrix-org/synapse/issues/11974))
+- Add missing type hints to storage classes. ([\#11984](https://github.com/matrix-org/synapse/issues/11984))
+- Refactor the search code for improved readability. ([\#11991](https://github.com/matrix-org/synapse/issues/11991))
+- Move common deduplication code down into `_auth_and_persist_outliers`. ([\#11994](https://github.com/matrix-org/synapse/issues/11994))
+- Limit concurrent joins from applications services. ([\#11996](https://github.com/matrix-org/synapse/issues/11996))
+- Preparation for faster-room-join work: when parsing the `send_join` response, get the `m.room.create` event from `state`, not `auth_chain`. ([\#12005](https://github.com/matrix-org/synapse/issues/12005), [\#12039](https://github.com/matrix-org/synapse/issues/12039))
+- Preparation for faster-room-join work: parse MSC3706 fields in send_join response. ([\#12011](https://github.com/matrix-org/synapse/issues/12011))
+- Preparation for faster-room-join work: persist information on which events and rooms have partial state to the database. ([\#12012](https://github.com/matrix-org/synapse/issues/12012))
+- Preparation for faster-room-join work: Support for calling `/federation/v1/state` on a remote server. ([\#12013](https://github.com/matrix-org/synapse/issues/12013))
+- Configure `tox` to use `venv` rather than `virtualenv`. ([\#12015](https://github.com/matrix-org/synapse/issues/12015))
+- Fix bug in `StateFilter.return_expanded()` and add some tests. ([\#12016](https://github.com/matrix-org/synapse/issues/12016))
+- Use Matrix v1.1 endpoints (`/_matrix/client/v3/auth/...`) in fallback auth HTML forms. ([\#12019](https://github.com/matrix-org/synapse/issues/12019))
+- Update the `olddeps` CI job to use an old version of `markupsafe`. ([\#12025](https://github.com/matrix-org/synapse/issues/12025))
+- Upgrade Mypy to version 0.931. ([\#12030](https://github.com/matrix-org/synapse/issues/12030))
+- Remove legacy `HomeServer.get_datastore()`. ([\#12031](https://github.com/matrix-org/synapse/issues/12031), [\#12070](https://github.com/matrix-org/synapse/issues/12070))
+- Minor typing fixes. ([\#12034](https://github.com/matrix-org/synapse/issues/12034), [\#12069](https://github.com/matrix-org/synapse/issues/12069))
+- After joining a room, create a dedicated logcontext to process the queued events. ([\#12041](https://github.com/matrix-org/synapse/issues/12041))
+- Tidy up GitHub Actions config which builds distributions for PyPI. ([\#12051](https://github.com/matrix-org/synapse/issues/12051))
+- Move configuration out of `setup.cfg`. ([\#12052](https://github.com/matrix-org/synapse/issues/12052), [\#12059](https://github.com/matrix-org/synapse/issues/12059))
+- Fix error message when a worker process fails to talk to another worker process. ([\#12060](https://github.com/matrix-org/synapse/issues/12060))
+- Fix using the `complement.sh` script without specifying a directory or a branch. Contributed by Nico on behalf of Famedly. ([\#12063](https://github.com/matrix-org/synapse/issues/12063))
+- Add type hints to `tests/rest/client`. ([\#12066](https://github.com/matrix-org/synapse/issues/12066), [\#12072](https://github.com/matrix-org/synapse/issues/12072), [\#12084](https://github.com/matrix-org/synapse/issues/12084), [\#12094](https://github.com/matrix-org/synapse/issues/12094))
+- Add some logging to `/sync` to try and track down #11916. ([\#12068](https://github.com/matrix-org/synapse/issues/12068))
+- Inspect application dependencies using `importlib.metadata` or its backport. ([\#12088](https://github.com/matrix-org/synapse/issues/12088))
+- Use `assertEqual` instead of the deprecated `assertEquals` in test code. ([\#12092](https://github.com/matrix-org/synapse/issues/12092))
+- Move experimental support for [MSC3440](https://github.com/matrix-org/matrix-doc/pull/3440) to `/versions`. ([\#12099](https://github.com/matrix-org/synapse/issues/12099))
+- Add `stop_cancellation` utility function to stop `Deferred`s from being cancelled. ([\#12106](https://github.com/matrix-org/synapse/issues/12106))
+- Improve exception handling for concurrent execution. ([\#12109](https://github.com/matrix-org/synapse/issues/12109))
+- Advertise support for Python 3.10 in packaging files. ([\#12111](https://github.com/matrix-org/synapse/issues/12111))
+- Move CI checks out of tox, to facilitate a move to using poetry. ([\#12119](https://github.com/matrix-org/synapse/issues/12119))
+
+
 Synapse 1.53.0 (2022-02-22)
 ===========================
 
diff --git a/MANIFEST.in b/MANIFEST.in
index 76d14eb642..7e903518e1 100644
--- a/MANIFEST.in
+++ b/MANIFEST.in
@@ -17,7 +17,6 @@ recursive-include synapse/storage *.txt
 recursive-include synapse/storage *.md
 
 recursive-include docs *
-recursive-include scripts *
 recursive-include scripts-dev *
 recursive-include synapse *.pyi
 recursive-include tests *.py
diff --git a/changelog.d/10870.misc b/changelog.d/10870.misc
deleted file mode 100644
index 3af049b969..0000000000
--- a/changelog.d/10870.misc
+++ /dev/null
@@ -1 +0,0 @@
-Deduplicate in-flight requests in `_get_state_for_groups`.
diff --git a/changelog.d/11599.doc b/changelog.d/11599.doc
deleted file mode 100644
index f07cfbef4e..0000000000
--- a/changelog.d/11599.doc
+++ /dev/null
@@ -1 +0,0 @@
-Document support for the `to_device`, `account_data`, `receipts`, and `presence` stream writers for workers.
diff --git a/changelog.d/11608.misc b/changelog.d/11608.misc
deleted file mode 100644
index 3af049b969..0000000000
--- a/changelog.d/11608.misc
+++ /dev/null
@@ -1 +0,0 @@
-Deduplicate in-flight requests in `_get_state_for_groups`.
diff --git a/changelog.d/11610.misc b/changelog.d/11610.misc
deleted file mode 100644
index 3af049b969..0000000000
--- a/changelog.d/11610.misc
+++ /dev/null
@@ -1 +0,0 @@
-Deduplicate in-flight requests in `_get_state_for_groups`.
diff --git a/changelog.d/11617.feature b/changelog.d/11617.feature
deleted file mode 100644
index cf03f00e7c..0000000000
--- a/changelog.d/11617.feature
+++ /dev/null
@@ -1 +0,0 @@
-Add support for MSC3202: sending one-time key counts and fallback key usage states to Application Services.
\ No newline at end of file
diff --git a/changelog.d/11835.feature b/changelog.d/11835.feature
deleted file mode 100644
index 7cee39b08c..0000000000
--- a/changelog.d/11835.feature
+++ /dev/null
@@ -1 +0,0 @@
-Make a `POST` to `/rooms/<room_id>/receipt/m.read/<event_id>` only trigger a push notification if the count of unread messages is different to the one in the last successfully sent push.
diff --git a/changelog.d/11865.removal b/changelog.d/11865.removal
deleted file mode 100644
index 9fcabfc720..0000000000
--- a/changelog.d/11865.removal
+++ /dev/null
@@ -1 +0,0 @@
-Deprecate using `synctl` with the config option `synctl_cache_factor` and print a warning if a user still uses this option.
diff --git a/changelog.d/11900.misc b/changelog.d/11900.misc
deleted file mode 100644
index edd2852fd4..0000000000
--- a/changelog.d/11900.misc
+++ /dev/null
@@ -1 +0,0 @@
-Remove unnecessary condition on knock->leave auth rule check.
\ No newline at end of file
diff --git a/changelog.d/11972.misc b/changelog.d/11972.misc
deleted file mode 100644
index 29c38bfd82..0000000000
--- a/changelog.d/11972.misc
+++ /dev/null
@@ -1 +0,0 @@
-Add tests for device list changes between local users.
\ No newline at end of file
diff --git a/changelog.d/11974.misc b/changelog.d/11974.misc
deleted file mode 100644
index 1debad2361..0000000000
--- a/changelog.d/11974.misc
+++ /dev/null
@@ -1 +0,0 @@
-Optimise calculating device_list changes in `/sync`.
diff --git a/changelog.d/11984.misc b/changelog.d/11984.misc
deleted file mode 100644
index 8e405b9226..0000000000
--- a/changelog.d/11984.misc
+++ /dev/null
@@ -1 +0,0 @@
-Add missing type hints to storage classes.
\ No newline at end of file
diff --git a/changelog.d/11985.feature b/changelog.d/11985.feature
deleted file mode 100644
index 120d888a49..0000000000
--- a/changelog.d/11985.feature
+++ /dev/null
@@ -1 +0,0 @@
-Fetch images when previewing Twitter URLs. Contributed by @AndrewRyanChama.
diff --git a/changelog.d/11991.misc b/changelog.d/11991.misc
deleted file mode 100644
index 34a3b3a6b9..0000000000
--- a/changelog.d/11991.misc
+++ /dev/null
@@ -1 +0,0 @@
-Refactor the search code for improved readability.
diff --git a/changelog.d/11992.bugfix b/changelog.d/11992.bugfix
deleted file mode 100644
index f73c86bb25..0000000000
--- a/changelog.d/11992.bugfix
+++ /dev/null
@@ -1 +0,0 @@
-Fix a bug introduced in Synapse v1.48.0 where an edit of the latest event in a thread would not be properly applied to the thread summary.
diff --git a/changelog.d/11994.misc b/changelog.d/11994.misc
deleted file mode 100644
index d64297dd78..0000000000
--- a/changelog.d/11994.misc
+++ /dev/null
@@ -1 +0,0 @@
-Move common deduplication code down into `_auth_and_persist_outliers`.
diff --git a/changelog.d/11996.misc b/changelog.d/11996.misc
deleted file mode 100644
index 6c675fd193..0000000000
--- a/changelog.d/11996.misc
+++ /dev/null
@@ -1 +0,0 @@
-Limit concurrent joins from applications services.
\ No newline at end of file
diff --git a/changelog.d/11997.docker b/changelog.d/11997.docker
deleted file mode 100644
index 1b3271457e..0000000000
--- a/changelog.d/11997.docker
+++ /dev/null
@@ -1 +0,0 @@
-The docker image no longer automatically creates a temporary volume at `/data`. This is not expected to affect normal usage.
diff --git a/changelog.d/11998.doc b/changelog.d/11998.doc
new file mode 100644
index 0000000000..33ab7b7880
--- /dev/null
+++ b/changelog.d/11998.doc
@@ -0,0 +1 @@
+Fix complexity checking config example in [Resource Constrained Devices](https://matrix-org.github.io/synapse/v1.54/other/running_synapse_on_single_board_computers.html) docs page.
\ No newline at end of file
diff --git a/changelog.d/11999.bugfix b/changelog.d/11999.bugfix
deleted file mode 100644
index fd84095900..0000000000
--- a/changelog.d/11999.bugfix
+++ /dev/null
@@ -1 +0,0 @@
-Fix long standing bug where `get_rooms_for_user` was not correctly invalidated for remote users when the server left a room.
diff --git a/changelog.d/12000.feature b/changelog.d/12000.feature
deleted file mode 100644
index 246cc87f0b..0000000000
--- a/changelog.d/12000.feature
+++ /dev/null
@@ -1 +0,0 @@
-Track cache invalidations in Prometheus metrics, as already happens for cache eviction based on size or time.
diff --git a/changelog.d/12001.feature b/changelog.d/12001.feature
deleted file mode 100644
index dc1153c49e..0000000000
--- a/changelog.d/12001.feature
+++ /dev/null
@@ -1 +0,0 @@
-Implement experimental support for [MSC3720](https://github.com/matrix-org/matrix-doc/pull/3720) (account status endpoints).
diff --git a/changelog.d/12003.doc b/changelog.d/12003.doc
deleted file mode 100644
index 1ac8163559..0000000000
--- a/changelog.d/12003.doc
+++ /dev/null
@@ -1 +0,0 @@
-Explain the meaning of spam checker callbacks' return values.
diff --git a/changelog.d/12004.doc b/changelog.d/12004.doc
deleted file mode 100644
index 0b4baef210..0000000000
--- a/changelog.d/12004.doc
+++ /dev/null
@@ -1 +0,0 @@
-Clarify information about external Identity Provider IDs.
diff --git a/changelog.d/12005.misc b/changelog.d/12005.misc
deleted file mode 100644
index 45e21dbe59..0000000000
--- a/changelog.d/12005.misc
+++ /dev/null
@@ -1 +0,0 @@
-Preparation for faster-room-join work: when parsing the `send_join` response, get the `m.room.create` event from `state`, not `auth_chain`.
diff --git a/changelog.d/12008.removal b/changelog.d/12008.removal
deleted file mode 100644
index 57599d9ee9..0000000000
--- a/changelog.d/12008.removal
+++ /dev/null
@@ -1 +0,0 @@
-Remove support for the legacy structured logging configuration (please see the the [upgrade notes](https://matrix-org.github.io/synapse/develop/upgrade#legacy-structured-logging-configuration-removal) if you are using `structured: true` in the Synapse configuration).
diff --git a/changelog.d/12009.feature b/changelog.d/12009.feature
deleted file mode 100644
index c8a531481e..0000000000
--- a/changelog.d/12009.feature
+++ /dev/null
@@ -1 +0,0 @@
-Enable modules to set a custom display name when registering a user.
diff --git a/changelog.d/12011.misc b/changelog.d/12011.misc
deleted file mode 100644
index 258b0e389f..0000000000
--- a/changelog.d/12011.misc
+++ /dev/null
@@ -1 +0,0 @@
-Preparation for faster-room-join work: parse msc3706 fields in send_join response.
diff --git a/changelog.d/12012.misc b/changelog.d/12012.misc
deleted file mode 100644
index a473f41e78..0000000000
--- a/changelog.d/12012.misc
+++ /dev/null
@@ -1 +0,0 @@
-Preparation for faster-room-join work: persist information on which events and rooms have partial state to the database.
diff --git a/changelog.d/12013.misc b/changelog.d/12013.misc
deleted file mode 100644
index c0fca8dccb..0000000000
--- a/changelog.d/12013.misc
+++ /dev/null
@@ -1 +0,0 @@
-Preparation for faster-room-join work: Support for calling `/federation/v1/state` on a remote server.
diff --git a/changelog.d/12015.misc b/changelog.d/12015.misc
deleted file mode 100644
index 3aa32ab4cf..0000000000
--- a/changelog.d/12015.misc
+++ /dev/null
@@ -1 +0,0 @@
-Configure `tox` to use `venv` rather than `virtualenv`.
diff --git a/changelog.d/12016.misc b/changelog.d/12016.misc
deleted file mode 100644
index 8856ef46a9..0000000000
--- a/changelog.d/12016.misc
+++ /dev/null
@@ -1 +0,0 @@
-Fix bug in `StateFilter.return_expanded()` and add some tests.
\ No newline at end of file
diff --git a/changelog.d/12018.removal b/changelog.d/12018.removal
deleted file mode 100644
index e940b62228..0000000000
--- a/changelog.d/12018.removal
+++ /dev/null
@@ -1 +0,0 @@
-Drop support for [MSC3283](https://github.com/matrix-org/matrix-doc/pull/3283) unstable flags now that the stable flags are supported.
diff --git a/changelog.d/12019.misc b/changelog.d/12019.misc
deleted file mode 100644
index b2186320ea..0000000000
--- a/changelog.d/12019.misc
+++ /dev/null
@@ -1 +0,0 @@
-Use Matrix v1.1 endpoints (`/_matrix/client/v3/auth/...`) in fallback auth HTML forms.
\ No newline at end of file
diff --git a/changelog.d/12020.feature b/changelog.d/12020.feature
deleted file mode 100644
index 1ac9d2060e..0000000000
--- a/changelog.d/12020.feature
+++ /dev/null
@@ -1 +0,0 @@
-Advertise Matrix 1.1 support on `/_matrix/client/versions`.
\ No newline at end of file
diff --git a/changelog.d/12021.feature b/changelog.d/12021.feature
deleted file mode 100644
index 01378df8ca..0000000000
--- a/changelog.d/12021.feature
+++ /dev/null
@@ -1 +0,0 @@
-Support only the stable identifier for [MSC3069](https://github.com/matrix-org/matrix-doc/pull/3069)'s `is_guest` on `/_matrix/client/v3/account/whoami`.
\ No newline at end of file
diff --git a/changelog.d/12022.feature b/changelog.d/12022.feature
deleted file mode 100644
index 188fb12570..0000000000
--- a/changelog.d/12022.feature
+++ /dev/null
@@ -1 +0,0 @@
-Advertise Matrix 1.2 support on `/_matrix/client/versions`.
\ No newline at end of file
diff --git a/changelog.d/12024.bugfix b/changelog.d/12024.bugfix
deleted file mode 100644
index 59bcdb93a5..0000000000
--- a/changelog.d/12024.bugfix
+++ /dev/null
@@ -1 +0,0 @@
-Fix 500 error with Postgres when looking backwards with the [MSC3030](https://github.com/matrix-org/matrix-doc/pull/3030) `/timestamp_to_event?dir=b` endpoint.
diff --git a/changelog.d/12025.misc b/changelog.d/12025.misc
deleted file mode 100644
index d9475a7718..0000000000
--- a/changelog.d/12025.misc
+++ /dev/null
@@ -1 +0,0 @@
-Update the `olddeps` CI job to use an old version of `markupsafe`.
diff --git a/changelog.d/12030.misc b/changelog.d/12030.misc
deleted file mode 100644
index 607ee97ce6..0000000000
--- a/changelog.d/12030.misc
+++ /dev/null
@@ -1 +0,0 @@
-Upgrade mypy to version 0.931.
diff --git a/changelog.d/12031.misc b/changelog.d/12031.misc
deleted file mode 100644
index d4bedc6b97..0000000000
--- a/changelog.d/12031.misc
+++ /dev/null
@@ -1 +0,0 @@
-Remove legacy `HomeServer.get_datastore()`.
diff --git a/changelog.d/12033.misc b/changelog.d/12033.misc
deleted file mode 100644
index 3af049b969..0000000000
--- a/changelog.d/12033.misc
+++ /dev/null
@@ -1 +0,0 @@
-Deduplicate in-flight requests in `_get_state_for_groups`.
diff --git a/changelog.d/12034.misc b/changelog.d/12034.misc
deleted file mode 100644
index 8374a63220..0000000000
--- a/changelog.d/12034.misc
+++ /dev/null
@@ -1 +0,0 @@
-Minor typing fixes.
diff --git a/changelog.d/12037.bugfix b/changelog.d/12037.bugfix
deleted file mode 100644
index 9295cb4dc0..0000000000
--- a/changelog.d/12037.bugfix
+++ /dev/null
@@ -1 +0,0 @@
-Properly fix a long-standing bug where wrong data could be inserted in the `event_search` table when using sqlite. This could block running `synapse_port_db` with an "argument of type 'int' is not iterable" error. This bug was partially fixed by a change in Synapse 1.44.0.
diff --git a/changelog.d/12039.misc b/changelog.d/12039.misc
deleted file mode 100644
index 45e21dbe59..0000000000
--- a/changelog.d/12039.misc
+++ /dev/null
@@ -1 +0,0 @@
-Preparation for faster-room-join work: when parsing the `send_join` response, get the `m.room.create` event from `state`, not `auth_chain`.
diff --git a/changelog.d/12041.misc b/changelog.d/12041.misc
deleted file mode 100644
index e56dc093de..0000000000
--- a/changelog.d/12041.misc
+++ /dev/null
@@ -1 +0,0 @@
-After joining a room, create a dedicated logcontext to process the queued events.
diff --git a/changelog.d/12051.misc b/changelog.d/12051.misc
deleted file mode 100644
index 9959191352..0000000000
--- a/changelog.d/12051.misc
+++ /dev/null
@@ -1 +0,0 @@
-Tidy up GitHub Actions config which builds distributions for PyPI.
\ No newline at end of file
diff --git a/changelog.d/12052.misc b/changelog.d/12052.misc
deleted file mode 100644
index 11755ae61b..0000000000
--- a/changelog.d/12052.misc
+++ /dev/null
@@ -1 +0,0 @@
-Move configuration out of `setup.cfg`.
diff --git a/changelog.d/12056.bugfix b/changelog.d/12056.bugfix
deleted file mode 100644
index 210e30c63f..0000000000
--- a/changelog.d/12056.bugfix
+++ /dev/null
@@ -1 +0,0 @@
-Fix slow performance of `/logout` in some cases where refresh tokens are in use. The slowness existed since the initial implementation of refresh tokens.
\ No newline at end of file
diff --git a/changelog.d/12058.feature b/changelog.d/12058.feature
deleted file mode 100644
index 7b71692229..0000000000
--- a/changelog.d/12058.feature
+++ /dev/null
@@ -1 +0,0 @@
-Use room version 9 as the default room version (per [MSC3589](https://github.com/matrix-org/matrix-doc/pull/3589)).
diff --git a/changelog.d/12059.misc b/changelog.d/12059.misc
deleted file mode 100644
index 9ba4759d99..0000000000
--- a/changelog.d/12059.misc
+++ /dev/null
@@ -1 +0,0 @@
-Move configuration out of `setup.cfg`.
\ No newline at end of file
diff --git a/changelog.d/12060.misc b/changelog.d/12060.misc
deleted file mode 100644
index d771e6a1b3..0000000000
--- a/changelog.d/12060.misc
+++ /dev/null
@@ -1 +0,0 @@
-Fix error message when a worker process fails to talk to another worker process.
diff --git a/changelog.d/12062.feature b/changelog.d/12062.feature
deleted file mode 100644
index 46a606709d..0000000000
--- a/changelog.d/12062.feature
+++ /dev/null
@@ -1 +0,0 @@
-Add module callbacks to react to user deactivation status changes (i.e. deactivations and reactivations) and profile updates.
diff --git a/changelog.d/12063.misc b/changelog.d/12063.misc
deleted file mode 100644
index e48c5dd08b..0000000000
--- a/changelog.d/12063.misc
+++ /dev/null
@@ -1 +0,0 @@
-Fix using the complement.sh script without specifying a dir or a branch. Contributed by Nico on behalf of Famedly.
diff --git a/changelog.d/12067.feature b/changelog.d/12067.feature
deleted file mode 100644
index dc1153c49e..0000000000
--- a/changelog.d/12067.feature
+++ /dev/null
@@ -1 +0,0 @@
-Implement experimental support for [MSC3720](https://github.com/matrix-org/matrix-doc/pull/3720) (account status endpoints).
diff --git a/changelog.d/12068.misc b/changelog.d/12068.misc
deleted file mode 100644
index 72b211e4f5..0000000000
--- a/changelog.d/12068.misc
+++ /dev/null
@@ -1 +0,0 @@
-Add some logging to `/sync` to try and track down #11916.
diff --git a/changelog.d/12069.misc b/changelog.d/12069.misc
deleted file mode 100644
index 8374a63220..0000000000
--- a/changelog.d/12069.misc
+++ /dev/null
@@ -1 +0,0 @@
-Minor typing fixes.
diff --git a/changelog.d/12070.misc b/changelog.d/12070.misc
deleted file mode 100644
index d4bedc6b97..0000000000
--- a/changelog.d/12070.misc
+++ /dev/null
@@ -1 +0,0 @@
-Remove legacy `HomeServer.get_datastore()`.
diff --git a/changelog.d/12072.misc b/changelog.d/12072.misc
deleted file mode 100644
index 0360dbd61e..0000000000
--- a/changelog.d/12072.misc
+++ /dev/null
@@ -1 +0,0 @@
-Add type hints to `tests/rest/client`.
diff --git a/changelog.d/12073.removal b/changelog.d/12073.removal
deleted file mode 100644
index 1f39792712..0000000000
--- a/changelog.d/12073.removal
+++ /dev/null
@@ -1 +0,0 @@
-Remove the unstable `/spaces` endpoint from [MSC2946](https://github.com/matrix-org/matrix-doc/pull/2946).
diff --git a/changelog.d/12077.bugfix b/changelog.d/12077.bugfix
deleted file mode 100644
index 1bce82082d..0000000000
--- a/changelog.d/12077.bugfix
+++ /dev/null
@@ -1 +0,0 @@
-Fix a long-standing bug where Synapse would make additional failing requests over federation for missing data.
diff --git a/changelog.d/12084.misc b/changelog.d/12084.misc
deleted file mode 100644
index 0360dbd61e..0000000000
--- a/changelog.d/12084.misc
+++ /dev/null
@@ -1 +0,0 @@
-Add type hints to `tests/rest/client`.
diff --git a/changelog.d/12088.misc b/changelog.d/12088.misc
deleted file mode 100644
index ce4213650c..0000000000
--- a/changelog.d/12088.misc
+++ /dev/null
@@ -1 +0,0 @@
-Inspect application dependencies using `importlib.metadata` or its backport.
\ No newline at end of file
diff --git a/changelog.d/12089.bugfix b/changelog.d/12089.bugfix
deleted file mode 100644
index 27172c4828..0000000000
--- a/changelog.d/12089.bugfix
+++ /dev/null
@@ -1 +0,0 @@
-Fix occasional 'Unhandled error in Deferred' error message.
diff --git a/changelog.d/12092.misc b/changelog.d/12092.misc
deleted file mode 100644
index 62653d6f8d..0000000000
--- a/changelog.d/12092.misc
+++ /dev/null
@@ -1 +0,0 @@
-User `assertEqual` instead of the deprecated `assertEquals` in test code.
diff --git a/changelog.d/12094.misc b/changelog.d/12094.misc
deleted file mode 100644
index 0360dbd61e..0000000000
--- a/changelog.d/12094.misc
+++ /dev/null
@@ -1 +0,0 @@
-Add type hints to `tests/rest/client`.
diff --git a/changelog.d/12098.bugfix b/changelog.d/12098.bugfix
deleted file mode 100644
index 6b696692e3..0000000000
--- a/changelog.d/12098.bugfix
+++ /dev/null
@@ -1 +0,0 @@
-Fix a bug introduced in Synapse 1.51.0rc1 where incoming federation transactions containing at least one EDU would be dropped if debug logging was enabled for `synapse.8631_debug`.
\ No newline at end of file
diff --git a/changelog.d/12099.misc b/changelog.d/12099.misc
deleted file mode 100644
index 0553825dbc..0000000000
--- a/changelog.d/12099.misc
+++ /dev/null
@@ -1 +0,0 @@
-Move experimental support for [MSC3440](https://github.com/matrix-org/matrix-doc/pull/3440) to /versions.
diff --git a/changelog.d/12100.bugfix b/changelog.d/12100.bugfix
deleted file mode 100644
index 181095ad99..0000000000
--- a/changelog.d/12100.bugfix
+++ /dev/null
@@ -1 +0,0 @@
-Fix a long-standing bug which could cause push notifications to malfunction if `use_frozen_dicts` was set in the configuration.
diff --git a/changelog.d/12105.bugfix b/changelog.d/12105.bugfix
deleted file mode 100644
index f42e63e01f..0000000000
--- a/changelog.d/12105.bugfix
+++ /dev/null
@@ -1 +0,0 @@
-Fix an extremely rare, long-standing bug in `ReadWriteLock` that would cause an error when a newly unblocked writer completes instantly.
diff --git a/changelog.d/12106.misc b/changelog.d/12106.misc
deleted file mode 100644
index d918e9e3b1..0000000000
--- a/changelog.d/12106.misc
+++ /dev/null
@@ -1 +0,0 @@
-Add `stop_cancellation` utility function to stop `Deferred`s from being cancelled.
diff --git a/changelog.d/12066.misc b/changelog.d/12108.misc
index 0360dbd61e..0360dbd61e 100644
--- a/changelog.d/12066.misc
+++ b/changelog.d/12108.misc
diff --git a/changelog.d/12109.misc b/changelog.d/12109.misc
deleted file mode 100644
index 3295e49f43..0000000000
--- a/changelog.d/12109.misc
+++ /dev/null
@@ -1 +0,0 @@
-Improve exception handling for concurrent execution.
diff --git a/changelog.d/12111.misc b/changelog.d/12111.misc
deleted file mode 100644
index be84789c9d..0000000000
--- a/changelog.d/12111.misc
+++ /dev/null
@@ -1 +0,0 @@
-Advertise support for Python 3.10 in packaging files.
\ No newline at end of file
diff --git a/changelog.d/12112.docker b/changelog.d/12112.docker
deleted file mode 100644
index b9e630653d..0000000000
--- a/changelog.d/12112.docker
+++ /dev/null
@@ -1 +0,0 @@
-Use Python 3.9 in Docker images by default.
\ No newline at end of file
diff --git a/changelog.d/12113.misc b/changelog.d/12113.misc
new file mode 100644
index 0000000000..102e064053
--- /dev/null
+++ b/changelog.d/12113.misc
@@ -0,0 +1 @@
+Refactor the tests for event relations.
diff --git a/changelog.d/12118.misc b/changelog.d/12118.misc
new file mode 100644
index 0000000000..a2c397d907
--- /dev/null
+++ b/changelog.d/12118.misc
@@ -0,0 +1 @@
+Move scripts to Synapse package and expose as setuptools entry points.
diff --git a/changelog.d/12119.misc b/changelog.d/12119.misc
deleted file mode 100644
index f02d140f38..0000000000
--- a/changelog.d/12119.misc
+++ /dev/null
@@ -1 +0,0 @@
-Move CI checks out of tox, to facilitate a move to using poetry.
\ No newline at end of file
diff --git a/changelog.d/12128.misc b/changelog.d/12128.misc
new file mode 100644
index 0000000000..0570a8e327
--- /dev/null
+++ b/changelog.d/12128.misc
@@ -0,0 +1 @@
+Fix data validation to compare to lists, not sequences.
diff --git a/changelog.d/12136.misc b/changelog.d/12136.misc
new file mode 100644
index 0000000000..98b1c1c9d8
--- /dev/null
+++ b/changelog.d/12136.misc
@@ -0,0 +1 @@
+Remove unused mocks from `test_typing`.
\ No newline at end of file
diff --git a/debian/changelog b/debian/changelog
index 574930c085..df3db85b8e 100644
--- a/debian/changelog
+++ b/debian/changelog
@@ -1,3 +1,9 @@
+matrix-synapse-py3 (1.54.0~rc1) stable; urgency=medium
+
+  * New synapse release 1.54.0~rc1.
+
+ -- Synapse Packaging team <packages@matrix.org>  Wed, 02 Mar 2022 10:43:22 +0000
+
 matrix-synapse-py3 (1.53.0) stable; urgency=medium
 
   * New synapse release 1.53.0.
diff --git a/docker/Dockerfile b/docker/Dockerfile
index a8bb9b0e7f..327275a9ca 100644
--- a/docker/Dockerfile
+++ b/docker/Dockerfile
@@ -46,7 +46,6 @@ RUN \
     && rm -rf /var/lib/apt/lists/*
 
 # Copy just what we need to pip install
-COPY scripts /synapse/scripts/
 COPY MANIFEST.in README.rst setup.py synctl /synapse/
 COPY synapse/__init__.py /synapse/synapse/__init__.py
 COPY synapse/python_dependencies.py /synapse/synapse/python_dependencies.py
diff --git a/docs/development/database_schema.md b/docs/development/database_schema.md
index a767d3af9f..d996a7caa2 100644
--- a/docs/development/database_schema.md
+++ b/docs/development/database_schema.md
@@ -158,9 +158,9 @@ same as integers.
 There are three separate aspects to this:
 
  * Any new boolean column must be added to the `BOOLEAN_COLUMNS` list in
-   `scripts/synapse_port_db`. This tells the port script to cast the integer
-   value from SQLite to a boolean before writing the value to the postgres
-   database.
+   `synapse/_scripts/synapse_port_db.py`. This tells the port script to cast
+   the integer value from SQLite to a boolean before writing the value to the
+   postgres database.
 
  * Before SQLite 3.23, `TRUE` and `FALSE` were not recognised as constants by
    SQLite, and the `IS [NOT] TRUE`/`IS [NOT] FALSE` operators were not
diff --git a/docs/other/running_synapse_on_single_board_computers.md b/docs/other/running_synapse_on_single_board_computers.md
index ea14afa8b2..dcf96f0056 100644
--- a/docs/other/running_synapse_on_single_board_computers.md
+++ b/docs/other/running_synapse_on_single_board_computers.md
@@ -31,28 +31,29 @@ Anything that requires modifying the device list [#7721](https://github.com/matr
 Put the below in a new file at /etc/matrix-synapse/conf.d/sbc.yaml to override the defaults in homeserver.yaml.
 
 ```
-# Set to false to disable presence tracking on this homeserver.
+# Disable presence tracking, which is currently fairly resource intensive
+# More info: https://github.com/matrix-org/synapse/issues/9478
 use_presence: false
 
-# When this is enabled, the room "complexity" will be checked before a user
-# joins a new remote room. If it is above the complexity limit, the server will
-# disallow joining, or will instantly leave.
+# Set a small complexity limit, preventing users from joining large rooms
+# which may be resource-intensive to remain a part of.
+#
+# Note that this will not prevent users from joining smaller rooms that
+# eventually become complex.
 limit_remote_rooms:
-  # Uncomment to enable room complexity checking.
-  #enabled: true
+  enabled: true
   complexity: 3.0
 
 # Database configuration
 database:
+  # Use postgres for the best performance
   name: psycopg2
   args:
     user: matrix-synapse
-    # Generate a long, secure one with a password manager
+    # Generate a long, secure password using a password manager
     password: hunter2
     database: matrix-synapse
     host: localhost
-    cp_min: 5
-    cp_max: 10
 ```
 
 Currently the complexity is measured by [current_state_events / 500](https://github.com/matrix-org/synapse/blob/v1.20.1/synapse/storage/databases/main/events_worker.py#L986). You can find join times and your most complex rooms like this:
diff --git a/docs/usage/administration/admin_api/README.md b/docs/usage/administration/admin_api/README.md
index 2fca96f8be..3cbedc5dfa 100644
--- a/docs/usage/administration/admin_api/README.md
+++ b/docs/usage/administration/admin_api/README.md
@@ -12,7 +12,7 @@ UPDATE users SET admin = 1 WHERE name = '@foo:bar.com';
 ```
 
 A new server admin user can also be created using the `register_new_matrix_user`
-command. This is a script that is located in the `scripts/` directory, or possibly
+command. This is a script that is distributed as part of synapse. It is possibly
 already on your `$PATH` depending on how Synapse was installed.
 
 Finding your user's `access_token` is client-dependent, but will usually be shown in the client's settings.
diff --git a/mypy.ini b/mypy.ini
index 38ff787609..23ca4eaa5a 100644
--- a/mypy.ini
+++ b/mypy.ini
@@ -23,6 +23,10 @@ files =
 # https://docs.python.org/3/library/re.html#re.X
 exclude = (?x)
   ^(
+   |synapse/_scripts/export_signing_key.py
+   |synapse/_scripts/move_remote_media_to_new_store.py
+   |synapse/_scripts/synapse_port_db.py
+   |synapse/_scripts/update_synapse_database.py
    |synapse/storage/databases/__init__.py
    |synapse/storage/databases/main/__init__.py
    |synapse/storage/databases/main/cache.py
@@ -74,13 +78,7 @@ exclude = (?x)
    |tests/push/test_http.py
    |tests/push/test_presentable_names.py
    |tests/push/test_push_rule_evaluator.py
-   |tests/rest/client/test_account.py
-   |tests/rest/client/test_filter.py
-   |tests/rest/client/test_report_event.py
-   |tests/rest/client/test_rooms.py
-   |tests/rest/client/test_third_party_rules.py
    |tests/rest/client/test_transactions.py
-   |tests/rest/client/test_typing.py
    |tests/rest/key/v2/test_remote_key_resource.py
    |tests/rest/media/v1/test_base.py
    |tests/rest/media/v1/test_media_storage.py
diff --git a/scripts-dev/generate_sample_config b/scripts-dev/generate_sample_config
index 4cd1d1d5b8..185e277933 100755
--- a/scripts-dev/generate_sample_config
+++ b/scripts-dev/generate_sample_config
@@ -10,19 +10,19 @@ SAMPLE_CONFIG="docs/sample_config.yaml"
 SAMPLE_LOG_CONFIG="docs/sample_log_config.yaml"
 
 check() {
-    diff -u "$SAMPLE_LOG_CONFIG" <(./scripts/generate_log_config) >/dev/null || return 1
+    diff -u "$SAMPLE_LOG_CONFIG" <(synapse/_scripts/generate_log_config.py) >/dev/null || return 1
 }
 
 if [ "$1" == "--check" ]; then
-    diff -u "$SAMPLE_CONFIG" <(./scripts/generate_config --header-file docs/.sample_config_header.yaml) >/dev/null || {
+    diff -u "$SAMPLE_CONFIG" <(synapse/_scripts/generate_config.py --header-file docs/.sample_config_header.yaml) >/dev/null || {
         echo -e "\e[1m\e[31m$SAMPLE_CONFIG is not up-to-date. Regenerate it with \`scripts-dev/generate_sample_config\`.\e[0m" >&2
         exit 1
     }
-    diff -u "$SAMPLE_LOG_CONFIG" <(./scripts/generate_log_config) >/dev/null || {
+    diff -u "$SAMPLE_LOG_CONFIG" <(synapse/_scripts/generate_log_config.py) >/dev/null || {
         echo -e "\e[1m\e[31m$SAMPLE_LOG_CONFIG is not up-to-date. Regenerate it with \`scripts-dev/generate_sample_config\`.\e[0m" >&2
         exit 1
     }
 else
-    ./scripts/generate_config --header-file docs/.sample_config_header.yaml -o "$SAMPLE_CONFIG"
-    ./scripts/generate_log_config -o "$SAMPLE_LOG_CONFIG"
+    synapse/_scripts/generate_config.py --header-file docs/.sample_config_header.yaml -o "$SAMPLE_CONFIG"
+    synapse/_scripts/generate_log_config.py -o "$SAMPLE_LOG_CONFIG"
 fi
diff --git a/scripts-dev/lint.sh b/scripts-dev/lint.sh
index b6554a73c1..df4d4934d0 100755
--- a/scripts-dev/lint.sh
+++ b/scripts-dev/lint.sh
@@ -84,13 +84,6 @@ else
       files=(
           "synapse" "docker" "tests"
           # annoyingly, black doesn't find these so we have to list them
-          "scripts/export_signing_key"
-          "scripts/generate_config"
-          "scripts/generate_log_config"
-          "scripts/hash_password"
-          "scripts/register_new_matrix_user"
-          "scripts/synapse_port_db"
-          "scripts/update_synapse_database"
           "scripts-dev"
           "scripts-dev/build_debian_packages"
           "scripts-dev/sign_json"
diff --git a/scripts-dev/make_full_schema.sh b/scripts-dev/make_full_schema.sh
index c3c90f4ec6..f0e22d4ca2 100755
--- a/scripts-dev/make_full_schema.sh
+++ b/scripts-dev/make_full_schema.sh
@@ -147,7 +147,7 @@ python -m synapse.app.homeserver --generate-keys -c "$SQLITE_CONFIG"
 
 # Make sure the SQLite3 database is using the latest schema and has no pending background update.
 echo "Running db background jobs..."
-scripts/update_synapse_database --database-config --run-background-updates "$SQLITE_CONFIG"
+synapse/_scripts/update_synapse_database.py --database-config --run-background-updates "$SQLITE_CONFIG"
 
 # Create the PostgreSQL database.
 echo "Creating postgres database..."
@@ -156,10 +156,10 @@ createdb --lc-collate=C --lc-ctype=C --template=template0 "$POSTGRES_DB_NAME"
 echo "Copying data from SQLite3 to Postgres with synapse_port_db..."
 if [ -z "$COVERAGE" ]; then
   # No coverage needed
-  scripts/synapse_port_db --sqlite-database "$SQLITE_DB" --postgres-config "$POSTGRES_CONFIG"
+  synapse/_scripts/synapse_port_db.py --sqlite-database "$SQLITE_DB" --postgres-config "$POSTGRES_CONFIG"
 else
   # Coverage desired
-  coverage run scripts/synapse_port_db --sqlite-database "$SQLITE_DB" --postgres-config "$POSTGRES_CONFIG"
+  coverage run synapse/_scripts/synapse_port_db.py --sqlite-database "$SQLITE_DB" --postgres-config "$POSTGRES_CONFIG"
 fi
 
 # Delete schema_version, applied_schema_deltas and applied_module_schemas tables
diff --git a/scripts/register_new_matrix_user b/scripts/register_new_matrix_user
deleted file mode 100755
index 00104b9d62..0000000000
--- a/scripts/register_new_matrix_user
+++ /dev/null
@@ -1,19 +0,0 @@
-#!/usr/bin/env python
-# Copyright 2015, 2016 OpenMarket Ltd
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from synapse._scripts.register_new_matrix_user import main
-
-if __name__ == "__main__":
-    main()
diff --git a/scripts/synapse_review_recent_signups b/scripts/synapse_review_recent_signups
deleted file mode 100755
index a36d46e14c..0000000000
--- a/scripts/synapse_review_recent_signups
+++ /dev/null
@@ -1,19 +0,0 @@
-#!/usr/bin/env python
-# Copyright 2021 The Matrix.org Foundation C.I.C.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from synapse._scripts.review_recent_signups import main
-
-if __name__ == "__main__":
-    main()
diff --git a/scripts/sync_room_to_group.pl b/scripts/sync_room_to_group.pl
deleted file mode 100755
index f0c2dfadfa..0000000000
--- a/scripts/sync_room_to_group.pl
+++ /dev/null
@@ -1,45 +0,0 @@
-#!/usr/bin/env perl
-
-use strict;
-use warnings;
-
-use JSON::XS;
-use LWP::UserAgent;
-use URI::Escape;
-
-if (@ARGV < 4) {
-    die "usage: $0 <homeserver url> <access_token> <room_id|room_alias> <group_id>\n";
-}
-
-my ($hs, $access_token, $room_id, $group_id) = @ARGV;
-my $ua = LWP::UserAgent->new();
-$ua->timeout(10);
-
-if ($room_id =~ /^#/) {
-    $room_id = uri_escape($room_id);
-    $room_id = decode_json($ua->get("${hs}/_matrix/client/r0/directory/room/${room_id}?access_token=${access_token}")->decoded_content)->{room_id};
-}
-
-my $room_users  = [ keys %{decode_json($ua->get("${hs}/_matrix/client/r0/rooms/${room_id}/joined_members?access_token=${access_token}")->decoded_content)->{joined}} ];
-my $group_users = [
-    (map { $_->{user_id} } @{decode_json($ua->get("${hs}/_matrix/client/unstable/groups/${group_id}/users?access_token=${access_token}" )->decoded_content)->{chunk}}),
-    (map { $_->{user_id} } @{decode_json($ua->get("${hs}/_matrix/client/unstable/groups/${group_id}/invited_users?access_token=${access_token}" )->decoded_content)->{chunk}}),
-];
-
-die "refusing to sync from empty room" unless (@$room_users);
-die "refusing to sync to empty group" unless (@$group_users);
-
-my $diff = {};
-foreach my $user (@$room_users) { $diff->{$user}++ }
-foreach my $user (@$group_users) { $diff->{$user}-- }
-
-foreach my $user (keys %$diff) {
-    if ($diff->{$user} == 1) {
-        warn "inviting $user";
-        print STDERR $ua->put("${hs}/_matrix/client/unstable/groups/${group_id}/admin/users/invite/${user}?access_token=${access_token}", Content=>'{}')->status_line."\n";
-    }
-    elsif ($diff->{$user} == -1) {
-        warn "removing $user";
-        print STDERR $ua->put("${hs}/_matrix/client/unstable/groups/${group_id}/admin/users/remove/${user}?access_token=${access_token}", Content=>'{}')->status_line."\n";
-    }
-}
diff --git a/setup.py b/setup.py
index 26f4650348..318df16766 100755
--- a/setup.py
+++ b/setup.py
@@ -15,7 +15,6 @@
 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 # See the License for the specific language governing permissions and
 # limitations under the License.
-import glob
 import os
 from typing import Any, Dict
 
@@ -153,8 +152,19 @@ setup(
     python_requires="~=3.7",
     entry_points={
         "console_scripts": [
+            # Application
             "synapse_homeserver = synapse.app.homeserver:main",
             "synapse_worker = synapse.app.generic_worker:main",
+            # Scripts
+            "export_signing_key = synapse._scripts.export_signing_key:main",
+            "generate_config = synapse._scripts.generate_config:main",
+            "generate_log_config = synapse._scripts.generate_log_config:main",
+            "generate_signing_key = synapse._scripts.generate_signing_key:main",
+            "hash_password = synapse._scripts.hash_password:main",
+            "register_new_matrix_user = synapse._scripts.register_new_matrix_user:main",
+            "synapse_port_db = synapse._scripts.synapse_port_db:main",
+            "synapse_review_recent_signups = synapse._scripts.review_recent_signups:main",
+            "update_synapse_database = synapse._scripts.update_synapse_database:main",
         ]
     },
     classifiers=[
@@ -167,6 +177,6 @@ setup(
         "Programming Language :: Python :: 3.9",
         "Programming Language :: Python :: 3.10",
     ],
-    scripts=["synctl"] + glob.glob("scripts/*"),
+    scripts=["synctl"],
     cmdclass={"test": TestCommand},
 )
diff --git a/snap/snapcraft.yaml b/snap/snapcraft.yaml
index 9a01152c15..dd4c8478d5 100644
--- a/snap/snapcraft.yaml
+++ b/snap/snapcraft.yaml
@@ -20,7 +20,7 @@ apps:
   generate-config:
     command: generate_config
   generate-signing-key:
-    command: generate_signing_key.py
+    command: generate_signing_key
   register-new-matrix-user:
     command: register_new_matrix_user
     plugs: [network]
diff --git a/synapse/__init__.py b/synapse/__init__.py
index 903f2e815d..b21e1ed0f3 100644
--- a/synapse/__init__.py
+++ b/synapse/__init__.py
@@ -47,7 +47,7 @@ try:
 except ImportError:
     pass
 
-__version__ = "1.53.0"
+__version__ = "1.54.0rc1"
 
 if bool(os.environ.get("SYNAPSE_TEST_PATCH_LOG_CONTEXTS", False)):
     # We import here so that we don't have to install a bunch of deps when
diff --git a/scripts/export_signing_key b/synapse/_scripts/export_signing_key.py
index bf0139bd64..3d254348f1 100755
--- a/scripts/export_signing_key
+++ b/synapse/_scripts/export_signing_key.py
@@ -50,7 +50,7 @@ def format_for_config(public_key: nacl.signing.VerifyKey, expiry_ts: int):
     )
 
 
-if __name__ == "__main__":
+def main():
     parser = argparse.ArgumentParser()
 
     parser.add_argument(
@@ -85,7 +85,6 @@ if __name__ == "__main__":
         else format_plain
     )
 
-    keys = []
     for file in args.key_file:
         try:
             res = read_signing_keys(file)
@@ -98,3 +97,7 @@ if __name__ == "__main__":
             res = []
         for key in res:
             formatter(get_verify_key(key))
+
+
+if __name__ == "__main__":
+    main()
diff --git a/scripts/generate_config b/synapse/_scripts/generate_config.py
index 931b40c045..75fce20b12 100755
--- a/scripts/generate_config
+++ b/synapse/_scripts/generate_config.py
@@ -6,7 +6,8 @@ import sys
 
 from synapse.config.homeserver import HomeServerConfig
 
-if __name__ == "__main__":
+
+def main():
     parser = argparse.ArgumentParser()
     parser.add_argument(
         "--config-dir",
@@ -76,3 +77,7 @@ if __name__ == "__main__":
         shutil.copyfileobj(args.header_file, args.output_file)
 
     args.output_file.write(conf)
+
+
+if __name__ == "__main__":
+    main()
diff --git a/scripts/generate_log_config b/synapse/_scripts/generate_log_config.py
index e72a0dafb7..82fc763140 100755
--- a/scripts/generate_log_config
+++ b/synapse/_scripts/generate_log_config.py
@@ -19,7 +19,8 @@ import sys
 
 from synapse.config.logger import DEFAULT_LOG_CONFIG
 
-if __name__ == "__main__":
+
+def main():
     parser = argparse.ArgumentParser()
 
     parser.add_argument(
@@ -42,3 +43,7 @@ if __name__ == "__main__":
     out = args.output_file
     out.write(DEFAULT_LOG_CONFIG.substitute(log_file=args.log_file))
     out.flush()
+
+
+if __name__ == "__main__":
+    main()
diff --git a/scripts/generate_signing_key.py b/synapse/_scripts/generate_signing_key.py
index 07df25a809..bc26d25bfd 100755
--- a/scripts/generate_signing_key.py
+++ b/synapse/_scripts/generate_signing_key.py
@@ -19,7 +19,8 @@ from signedjson.key import generate_signing_key, write_signing_keys
 
 from synapse.util.stringutils import random_string
 
-if __name__ == "__main__":
+
+def main():
     parser = argparse.ArgumentParser()
 
     parser.add_argument(
@@ -34,3 +35,7 @@ if __name__ == "__main__":
     key_id = "a_" + random_string(4)
     key = (generate_signing_key(key_id),)
     write_signing_keys(args.output_file, key)
+
+
+if __name__ == "__main__":
+    main()
diff --git a/scripts/hash_password b/synapse/_scripts/hash_password.py
index 1d6fb0d700..708640c7de 100755
--- a/scripts/hash_password
+++ b/synapse/_scripts/hash_password.py
@@ -8,9 +8,6 @@ import unicodedata
 import bcrypt
 import yaml
 
-bcrypt_rounds = 12
-password_pepper = ""
-
 
 def prompt_for_pass():
     password = getpass.getpass("Password: ")
@@ -26,7 +23,10 @@ def prompt_for_pass():
     return password
 
 
-if __name__ == "__main__":
+def main():
+    bcrypt_rounds = 12
+    password_pepper = ""
+
     parser = argparse.ArgumentParser(
         description=(
             "Calculate the hash of a new password, so that passwords can be reset"
@@ -77,3 +77,7 @@ if __name__ == "__main__":
     ).decode("ascii")
 
     print(hashed)
+
+
+if __name__ == "__main__":
+    main()
diff --git a/scripts/move_remote_media_to_new_store.py b/synapse/_scripts/move_remote_media_to_new_store.py
index 875aa4781f..9667d95dfe 100755
--- a/scripts/move_remote_media_to_new_store.py
+++ b/synapse/_scripts/move_remote_media_to_new_store.py
@@ -28,7 +28,7 @@ This can be extracted from postgres with::
 
 To use, pipe the above into::
 
-    PYTHON_PATH=. ./scripts/move_remote_media_to_new_store.py <source repo> <dest repo>
+    PYTHON_PATH=. synapse/_scripts/move_remote_media_to_new_store.py <source repo> <dest repo>
 """
 
 import argparse
diff --git a/scripts/synapse_port_db b/synapse/_scripts/synapse_port_db.py
index db354b3c8c..c38666da18 100755
--- a/scripts/synapse_port_db
+++ b/synapse/_scripts/synapse_port_db.py
@@ -1146,7 +1146,7 @@ class TerminalProgress(Progress):
 ##############################################
 
 
-if __name__ == "__main__":
+def main():
     parser = argparse.ArgumentParser(
         description="A script to port an existing synapse SQLite database to"
         " a new PostgreSQL database."
@@ -1251,3 +1251,7 @@ if __name__ == "__main__":
         sys.stderr.write(end_error)
 
         sys.exit(5)
+
+
+if __name__ == "__main__":
+    main()
diff --git a/scripts/update_synapse_database b/synapse/_scripts/update_synapse_database.py
index f43676afaa..f43676afaa 100755
--- a/scripts/update_synapse_database
+++ b/synapse/_scripts/update_synapse_database.py
diff --git a/synapse/config/_base.py b/synapse/config/_base.py
index 1265738dc1..8e19e2fc26 100644
--- a/synapse/config/_base.py
+++ b/synapse/config/_base.py
@@ -383,7 +383,7 @@ class RootConfig:
         Build a default configuration file
 
         This is used when the user explicitly asks us to generate a config file
-        (eg with --generate_config).
+        (eg with --generate-config).
 
         Args:
             config_dir_path: The path where the config files are kept. Used to
diff --git a/synapse/federation/federation_client.py b/synapse/federation/federation_client.py
index 64e595e748..467275b98c 100644
--- a/synapse/federation/federation_client.py
+++ b/synapse/federation/federation_client.py
@@ -1428,7 +1428,7 @@ class FederationClient(FederationBase):
 
             # Validate children_state of the room.
             children_state = room.pop("children_state", [])
-            if not isinstance(children_state, Sequence):
+            if not isinstance(children_state, list):
                 raise InvalidResponseError("'room.children_state' must be a list")
             if any(not isinstance(e, dict) for e in children_state):
                 raise InvalidResponseError("Invalid event in 'children_state' list")
@@ -1440,14 +1440,14 @@ class FederationClient(FederationBase):
 
             # Validate the children rooms.
             children = res.get("children", [])
-            if not isinstance(children, Sequence):
+            if not isinstance(children, list):
                 raise InvalidResponseError("'children' must be a list")
             if any(not isinstance(r, dict) for r in children):
                 raise InvalidResponseError("Invalid room in 'children' list")
 
             # Validate the inaccessible children.
             inaccessible_children = res.get("inaccessible_children", [])
-            if not isinstance(inaccessible_children, Sequence):
+            if not isinstance(inaccessible_children, list):
                 raise InvalidResponseError("'inaccessible_children' must be a list")
             if any(not isinstance(r, str) for r in inaccessible_children):
                 raise InvalidResponseError(
@@ -1630,7 +1630,7 @@ def _validate_hierarchy_event(d: JsonDict) -> None:
         raise ValueError("Invalid event: 'content' must be a dict")
 
     via = content.get("via")
-    if not isinstance(via, Sequence):
+    if not isinstance(via, list):
         raise ValueError("Invalid event: 'via' must be a list")
     if any(not isinstance(v, str) for v in via):
         raise ValueError("Invalid event: 'via' must be a list of strings")
diff --git a/synapse/handlers/room_summary.py b/synapse/handlers/room_summary.py
index 55c2cbdba8..3979cbba71 100644
--- a/synapse/handlers/room_summary.py
+++ b/synapse/handlers/room_summary.py
@@ -857,7 +857,7 @@ class _RoomEntry:
 
 def _has_valid_via(e: EventBase) -> bool:
     via = e.content.get("via")
-    if not via or not isinstance(via, Sequence):
+    if not via or not isinstance(via, list):
         return False
     for v in via:
         if not isinstance(v, str):
diff --git a/synapse/storage/databases/main/state.py b/synapse/storage/databases/main/state.py
index 2fb3e65192..417aef1dbc 100644
--- a/synapse/storage/databases/main/state.py
+++ b/synapse/storage/databases/main/state.py
@@ -42,6 +42,16 @@ logger = logging.getLogger(__name__)
 MAX_STATE_DELTA_HOPS = 100
 
 
+def _retrieve_and_check_room_version(room_id: str, room_version_id: str) -> RoomVersion:
+    v = KNOWN_ROOM_VERSIONS.get(room_version_id)
+    if not v:
+        raise UnsupportedRoomVersionError(
+            "Room %s uses a room version %s which is no longer supported"
+            % (room_id, room_version_id)
+        )
+    return v
+
+
 # this inherits from EventsWorkerStore because it calls self.get_events
 class StateGroupWorkerStore(EventsWorkerStore, SQLBaseStore):
     """The parts of StateGroupStore that can be called from workers."""
@@ -62,11 +72,8 @@ class StateGroupWorkerStore(EventsWorkerStore, SQLBaseStore):
                 Typically this happens if support for the room's version has been
                 removed from Synapse.
         """
-        return await self.db_pool.runInteraction(
-            "get_room_version_txn",
-            self.get_room_version_txn,
-            room_id,
-        )
+        room_version_id = await self.get_room_version_id(room_id)
+        return _retrieve_and_check_room_version(room_id, room_version_id)
 
     def get_room_version_txn(
         self, txn: LoggingTransaction, room_id: str
@@ -82,15 +89,7 @@ class StateGroupWorkerStore(EventsWorkerStore, SQLBaseStore):
                 removed from Synapse.
         """
         room_version_id = self.get_room_version_id_txn(txn, room_id)
-        v = KNOWN_ROOM_VERSIONS.get(room_version_id)
-
-        if not v:
-            raise UnsupportedRoomVersionError(
-                "Room %s uses a room version %s which is no longer supported"
-                % (room_id, room_version_id)
-            )
-
-        return v
+        return _retrieve_and_check_room_version(room_id, room_version_id)
 
     @cached(max_entries=10000)
     async def get_room_version_id(self, room_id: str) -> str:
diff --git a/synapse/storage/databases/state/store.py b/synapse/storage/databases/state/store.py
index dadf3d1e3a..7614d76ac6 100644
--- a/synapse/storage/databases/state/store.py
+++ b/synapse/storage/databases/state/store.py
@@ -13,24 +13,11 @@
 # limitations under the License.
 
 import logging
-from typing import (
-    TYPE_CHECKING,
-    Collection,
-    Dict,
-    Iterable,
-    Optional,
-    Sequence,
-    Set,
-    Tuple,
-)
+from typing import TYPE_CHECKING, Collection, Dict, Iterable, List, Optional, Set, Tuple
 
 import attr
-from sortedcontainers import SortedDict
-
-from twisted.internet import defer
 
 from synapse.api.constants import EventTypes
-from synapse.logging.context import make_deferred_yieldable, run_in_background
 from synapse.storage._base import SQLBaseStore
 from synapse.storage.database import (
     DatabasePool,
@@ -42,12 +29,6 @@ from synapse.storage.state import StateFilter
 from synapse.storage.types import Cursor
 from synapse.storage.util.sequence import build_sequence_generator
 from synapse.types import MutableStateMap, StateKey, StateMap
-from synapse.util import unwrapFirstError
-from synapse.util.async_helpers import (
-    AbstractObservableDeferred,
-    ObservableDeferred,
-    yieldable_gather_results,
-)
 from synapse.util.caches.descriptors import cached
 from synapse.util.caches.dictionary_cache import DictionaryCache
 
@@ -56,8 +37,8 @@ if TYPE_CHECKING:
 
 logger = logging.getLogger(__name__)
 
+
 MAX_STATE_DELTA_HOPS = 100
-MAX_INFLIGHT_REQUESTS_PER_GROUP = 5
 
 
 @attr.s(slots=True, frozen=True, auto_attribs=True)
@@ -73,24 +54,6 @@ class _GetStateGroupDelta:
         return len(self.delta_ids) if self.delta_ids else 0
 
 
-def state_filter_rough_priority_comparator(
-    state_filter: StateFilter,
-) -> Tuple[int, int]:
-    """
-    Returns a comparable value that roughly indicates the relative size of this
-    state filter compared to others.
-    'Larger' state filters should sort first when using ascending order, so
-    this is essentially the opposite of 'size'.
-    It should be treated as a rough guide only and should not be interpreted to
-    have any particular meaning. The representation may also change
-
-    The current implementation returns a tuple of the form:
-        * -1 for include_others, 0 otherwise
-        * -(number of entries in state_filter.types)
-    """
-    return -int(state_filter.include_others), -len(state_filter.types)
-
-
 class StateGroupDataStore(StateBackgroundUpdateStore, SQLBaseStore):
     """A data store for fetching/storing state groups."""
 
@@ -143,12 +106,6 @@ class StateGroupDataStore(StateBackgroundUpdateStore, SQLBaseStore):
             500000,
         )
 
-        # Current ongoing get_state_for_groups in-flight requests
-        # {group ID -> {StateFilter -> ObservableDeferred}}
-        self._state_group_inflight_requests: Dict[
-            int, SortedDict[StateFilter, AbstractObservableDeferred[StateMap[str]]]
-        ] = {}
-
         def get_max_state_group_txn(txn: Cursor) -> int:
             txn.execute("SELECT COALESCE(max(id), 0) FROM state_groups")
             return txn.fetchone()[0]  # type: ignore
@@ -200,7 +157,7 @@ class StateGroupDataStore(StateBackgroundUpdateStore, SQLBaseStore):
         )
 
     async def _get_state_groups_from_groups(
-        self, groups: Sequence[int], state_filter: StateFilter
+        self, groups: List[int], state_filter: StateFilter
     ) -> Dict[int, StateMap[str]]:
         """Returns the state groups for a given set of groups from the
         database, filtering on types of state events.
@@ -271,170 +228,6 @@ class StateGroupDataStore(StateBackgroundUpdateStore, SQLBaseStore):
 
         return state_filter.filter_state(state_dict_ids), not missing_types
 
-    def _get_state_for_group_gather_inflight_requests(
-        self, group: int, state_filter_left_over: StateFilter
-    ) -> Tuple[Sequence[AbstractObservableDeferred[StateMap[str]]], StateFilter]:
-        """
-        Attempts to gather in-flight requests and re-use them to retrieve state
-        for the given state group, filtered with the given state filter.
-
-        If there are more than MAX_INFLIGHT_REQUESTS_PER_GROUP in-flight requests,
-        and there *still* isn't enough information to complete the request by solely
-        reusing others, a full state filter will be requested to ensure that subsequent
-        requests can reuse this request.
-
-        Used as part of _get_state_for_group_using_inflight_cache.
-
-        Returns:
-            Tuple of two values:
-                A sequence of ObservableDeferreds to observe
-                A StateFilter representing what else needs to be requested to fulfill the request
-        """
-
-        inflight_requests = self._state_group_inflight_requests.get(group)
-        if inflight_requests is None:
-            # no requests for this group, need to retrieve it all ourselves
-            return (), state_filter_left_over
-
-        # The list of ongoing requests which will help narrow the current request.
-        reusable_requests = []
-
-        # Iterate over existing requests in roughly biggest-first order.
-        for request_state_filter in inflight_requests:
-            request_deferred = inflight_requests[request_state_filter]
-            new_state_filter_left_over = state_filter_left_over.approx_difference(
-                request_state_filter
-            )
-            if new_state_filter_left_over == state_filter_left_over:
-                # Reusing this request would not gain us anything, so don't bother.
-                continue
-
-            reusable_requests.append(request_deferred)
-            state_filter_left_over = new_state_filter_left_over
-            if state_filter_left_over == StateFilter.none():
-                # we have managed to collect enough of the in-flight requests
-                # to cover our StateFilter and give us the state we need.
-                break
-
-        if (
-            state_filter_left_over != StateFilter.none()
-            and len(inflight_requests) >= MAX_INFLIGHT_REQUESTS_PER_GROUP
-        ):
-            # There are too many requests for this group.
-            # To prevent even more from building up, we request the whole
-            # state filter to guarantee that we can be reused by any subsequent
-            # requests for this state group.
-            return (), StateFilter.all()
-
-        return reusable_requests, state_filter_left_over
-
-    async def _get_state_for_group_fire_request(
-        self, group: int, state_filter: StateFilter
-    ) -> StateMap[str]:
-        """
-        Fires off a request to get the state at a state group,
-        potentially filtering by type and/or state key.
-
-        This request will be tracked in the in-flight request cache and automatically
-        removed when it is finished.
-
-        Used as part of _get_state_for_group_using_inflight_cache.
-
-        Args:
-            group: ID of the state group for which we want to get state
-            state_filter: the state filter used to fetch state from the database
-        """
-        cache_sequence_nm = self._state_group_cache.sequence
-        cache_sequence_m = self._state_group_members_cache.sequence
-
-        # Help the cache hit ratio by expanding the filter a bit
-        db_state_filter = state_filter.return_expanded()
-
-        async def _the_request() -> StateMap[str]:
-            group_to_state_dict = await self._get_state_groups_from_groups(
-                (group,), state_filter=db_state_filter
-            )
-
-            # Now let's update the caches
-            self._insert_into_cache(
-                group_to_state_dict,
-                db_state_filter,
-                cache_seq_num_members=cache_sequence_m,
-                cache_seq_num_non_members=cache_sequence_nm,
-            )
-
-            # Remove ourselves from the in-flight cache
-            group_request_dict = self._state_group_inflight_requests[group]
-            del group_request_dict[db_state_filter]
-            if not group_request_dict:
-                # If there are no more requests in-flight for this group,
-                # clean up the cache by removing the empty dictionary
-                del self._state_group_inflight_requests[group]
-
-            return group_to_state_dict[group]
-
-        # We don't immediately await the result, so must use run_in_background
-        # But we DO await the result before the current log context (request)
-        # finishes, so don't need to run it as a background process.
-        request_deferred = run_in_background(_the_request)
-        observable_deferred = ObservableDeferred(request_deferred, consumeErrors=True)
-
-        # Insert the ObservableDeferred into the cache
-        group_request_dict = self._state_group_inflight_requests.setdefault(
-            group, SortedDict(state_filter_rough_priority_comparator)
-        )
-        group_request_dict[db_state_filter] = observable_deferred
-
-        return await make_deferred_yieldable(observable_deferred.observe())
-
-    async def _get_state_for_group_using_inflight_cache(
-        self, group: int, state_filter: StateFilter
-    ) -> MutableStateMap[str]:
-        """
-        Gets the state at a state group, potentially filtering by type and/or
-        state key.
-
-        1. Calls _get_state_for_group_gather_inflight_requests to gather any
-           ongoing requests which might overlap with the current request.
-        2. Fires a new request, using _get_state_for_group_fire_request,
-           for any state which cannot be gathered from ongoing requests.
-
-        Args:
-            group: ID of the state group for which we want to get state
-            state_filter: the state filter used to fetch state from the database
-        Returns:
-            state map
-        """
-
-        # first, figure out whether we can re-use any in-flight requests
-        # (and if so, what would be left over)
-        (
-            reusable_requests,
-            state_filter_left_over,
-        ) = self._get_state_for_group_gather_inflight_requests(group, state_filter)
-
-        if state_filter_left_over != StateFilter.none():
-            # Fetch remaining state
-            remaining = await self._get_state_for_group_fire_request(
-                group, state_filter_left_over
-            )
-            assembled_state: MutableStateMap[str] = dict(remaining)
-        else:
-            assembled_state = {}
-
-        gathered = await make_deferred_yieldable(
-            defer.gatherResults(
-                (r.observe() for r in reusable_requests), consumeErrors=True
-            )
-        ).addErrback(unwrapFirstError)
-
-        # assemble our result.
-        for result_piece in gathered:
-            assembled_state.update(result_piece)
-
-        # Filter out any state that may be more than what we asked for.
-        return state_filter.filter_state(assembled_state)
-
     async def _get_state_for_groups(
         self, groups: Iterable[int], state_filter: Optional[StateFilter] = None
     ) -> Dict[int, MutableStateMap[str]]:
@@ -476,17 +269,31 @@ class StateGroupDataStore(StateBackgroundUpdateStore, SQLBaseStore):
         if not incomplete_groups:
             return state
 
-        async def get_from_cache(group: int, state_filter: StateFilter) -> None:
-            state[group] = await self._get_state_for_group_using_inflight_cache(
-                group, state_filter
-            )
+        cache_sequence_nm = self._state_group_cache.sequence
+        cache_sequence_m = self._state_group_members_cache.sequence
 
-        await yieldable_gather_results(
-            get_from_cache,
-            incomplete_groups,
-            state_filter,
+        # Help the cache hit ratio by expanding the filter a bit
+        db_state_filter = state_filter.return_expanded()
+
+        group_to_state_dict = await self._get_state_groups_from_groups(
+            list(incomplete_groups), state_filter=db_state_filter
         )
 
+        # Now lets update the caches
+        self._insert_into_cache(
+            group_to_state_dict,
+            db_state_filter,
+            cache_seq_num_members=cache_sequence_m,
+            cache_seq_num_non_members=cache_sequence_nm,
+        )
+
+        # And finally update the result dict, by filtering out any extra
+        # stuff we pulled out of the database.
+        for group, group_state_dict in group_to_state_dict.items():
+            # We just replace any existing entries, as we will have loaded
+            # everything we need from the database anyway.
+            state[group] = state_filter.filter_state(group_state_dict)
+
         return state
 
     def _get_state_for_groups_using_cache(
diff --git a/tests/handlers/test_room_summary.py b/tests/handlers/test_room_summary.py
index b33ff94a39..cff07a8973 100644
--- a/tests/handlers/test_room_summary.py
+++ b/tests/handlers/test_room_summary.py
@@ -658,7 +658,7 @@ class SpaceSummaryTestCase(unittest.HomeserverTestCase):
 
     def test_unknown_room_version(self):
         """
-        If an room with an unknown room version is encountered it should not cause
+        If a room with an unknown room version is encountered it should not cause
         the entire summary to skip.
         """
         # Poke the database and update the room version to an unknown one.
@@ -670,6 +670,9 @@ class SpaceSummaryTestCase(unittest.HomeserverTestCase):
                 desc="updated-room-version",
             )
         )
+        # Invalidate method so that it returns the currently updated version
+        # instead of the cached version.
+        self.hs.get_datastores().main.get_room_version_id.invalidate((self.room,))
 
         # The result should have only the space, along with a link from space -> room.
         expected = [(self.space, [self.room])]
diff --git a/tests/rest/client/test_account.py b/tests/rest/client/test_account.py
index 6c4462e74a..def836054d 100644
--- a/tests/rest/client/test_account.py
+++ b/tests/rest/client/test_account.py
@@ -15,11 +15,12 @@ import json
 import os
 import re
 from email.parser import Parser
-from typing import Dict, List, Optional
+from typing import Any, Dict, List, Optional, Union
 from unittest.mock import Mock
 
 import pkg_resources
 
+from twisted.internet.interfaces import IReactorTCP
 from twisted.test.proto_helpers import MemoryReactor
 
 import synapse.rest.admin
@@ -30,6 +31,7 @@ from synapse.rest import admin
 from synapse.rest.client import account, login, register, room
 from synapse.rest.synapse.client.password_reset import PasswordResetSubmitTokenResource
 from synapse.server import HomeServer
+from synapse.types import JsonDict
 from synapse.util import Clock
 
 from tests import unittest
@@ -46,7 +48,7 @@ class PasswordResetTestCase(unittest.HomeserverTestCase):
         login.register_servlets,
     ]
 
-    def make_homeserver(self, reactor, clock):
+    def make_homeserver(self, reactor: MemoryReactor, clock: Clock) -> HomeServer:
         config = self.default_config()
 
         # Email config.
@@ -67,20 +69,27 @@ class PasswordResetTestCase(unittest.HomeserverTestCase):
         hs = self.setup_test_homeserver(config=config)
 
         async def sendmail(
-            reactor, smtphost, smtpport, from_addr, to_addrs, msg, **kwargs
-        ):
-            self.email_attempts.append(msg)
-
-        self.email_attempts = []
+            reactor: IReactorTCP,
+            smtphost: str,
+            smtpport: int,
+            from_addr: str,
+            to_addr: str,
+            msg_bytes: bytes,
+            *args: Any,
+            **kwargs: Any,
+        ) -> None:
+            self.email_attempts.append(msg_bytes)
+
+        self.email_attempts: List[bytes] = []
         hs.get_send_email_handler()._sendmail = sendmail
 
         return hs
 
-    def prepare(self, reactor, clock, hs):
+    def prepare(self, reactor: MemoryReactor, clock: Clock, hs: HomeServer) -> None:
         self.store = hs.get_datastores().main
         self.submit_token_resource = PasswordResetSubmitTokenResource(hs)
 
-    def test_basic_password_reset(self):
+    def test_basic_password_reset(self) -> None:
         """Test basic password reset flow"""
         old_password = "monkey"
         new_password = "kangeroo"
@@ -118,7 +127,7 @@ class PasswordResetTestCase(unittest.HomeserverTestCase):
         self.attempt_wrong_password_login("kermit", old_password)
 
     @override_config({"rc_3pid_validation": {"burst_count": 3}})
-    def test_ratelimit_by_email(self):
+    def test_ratelimit_by_email(self) -> None:
         """Test that we ratelimit /requestToken for the same email."""
         old_password = "monkey"
         new_password = "kangeroo"
@@ -139,7 +148,7 @@ class PasswordResetTestCase(unittest.HomeserverTestCase):
             )
         )
 
-        def reset(ip):
+        def reset(ip: str) -> None:
             client_secret = "foobar"
             session_id = self._request_token(email, client_secret, ip)
 
@@ -166,7 +175,7 @@ class PasswordResetTestCase(unittest.HomeserverTestCase):
 
         self.assertEqual(cm.exception.code, 429)
 
-    def test_basic_password_reset_canonicalise_email(self):
+    def test_basic_password_reset_canonicalise_email(self) -> None:
         """Test basic password reset flow
         Request password reset with different spelling
         """
@@ -206,7 +215,7 @@ class PasswordResetTestCase(unittest.HomeserverTestCase):
         # Assert we can't log in with the old password
         self.attempt_wrong_password_login("kermit", old_password)
 
-    def test_cant_reset_password_without_clicking_link(self):
+    def test_cant_reset_password_without_clicking_link(self) -> None:
         """Test that we do actually need to click the link in the email"""
         old_password = "monkey"
         new_password = "kangeroo"
@@ -241,7 +250,7 @@ class PasswordResetTestCase(unittest.HomeserverTestCase):
         # Assert we can't log in with the new password
         self.attempt_wrong_password_login("kermit", new_password)
 
-    def test_no_valid_token(self):
+    def test_no_valid_token(self) -> None:
         """Test that we do actually need to request a token and can't just
         make a session up.
         """
@@ -277,7 +286,7 @@ class PasswordResetTestCase(unittest.HomeserverTestCase):
         self.attempt_wrong_password_login("kermit", new_password)
 
     @unittest.override_config({"request_token_inhibit_3pid_errors": True})
-    def test_password_reset_bad_email_inhibit_error(self):
+    def test_password_reset_bad_email_inhibit_error(self) -> None:
         """Test that triggering a password reset with an email address that isn't bound
         to an account doesn't leak the lack of binding for that address if configured
         that way.
@@ -292,7 +301,12 @@ class PasswordResetTestCase(unittest.HomeserverTestCase):
 
         self.assertIsNotNone(session_id)
 
-    def _request_token(self, email, client_secret, ip="127.0.0.1"):
+    def _request_token(
+        self,
+        email: str,
+        client_secret: str,
+        ip: str = "127.0.0.1",
+    ) -> str:
         channel = self.make_request(
             "POST",
             b"account/password/email/requestToken",
@@ -309,7 +323,7 @@ class PasswordResetTestCase(unittest.HomeserverTestCase):
 
         return channel.json_body["sid"]
 
-    def _validate_token(self, link):
+    def _validate_token(self, link: str) -> None:
         # Remove the host
         path = link.replace("https://example.com", "")
 
@@ -339,7 +353,7 @@ class PasswordResetTestCase(unittest.HomeserverTestCase):
         )
         self.assertEqual(200, channel.code, channel.result)
 
-    def _get_link_from_email(self):
+    def _get_link_from_email(self) -> str:
         assert self.email_attempts, "No emails have been sent"
 
         raw_msg = self.email_attempts[-1].decode("UTF-8")
@@ -354,14 +368,19 @@ class PasswordResetTestCase(unittest.HomeserverTestCase):
         if not text:
             self.fail("Could not find text portion of email to parse")
 
+        assert text is not None
         match = re.search(r"https://example.com\S+", text)
         assert match, "Could not find link in email"
 
         return match.group(0)
 
     def _reset_password(
-        self, new_password, session_id, client_secret, expected_code=200
-    ):
+        self,
+        new_password: str,
+        session_id: str,
+        client_secret: str,
+        expected_code: int = 200,
+    ) -> None:
         channel = self.make_request(
             "POST",
             b"account/password",
@@ -388,11 +407,11 @@ class DeactivateTestCase(unittest.HomeserverTestCase):
         room.register_servlets,
     ]
 
-    def make_homeserver(self, reactor, clock):
+    def make_homeserver(self, reactor: MemoryReactor, clock: Clock) -> HomeServer:
         self.hs = self.setup_test_homeserver()
         return self.hs
 
-    def test_deactivate_account(self):
+    def test_deactivate_account(self) -> None:
         user_id = self.register_user("kermit", "test")
         tok = self.login("kermit", "test")
 
@@ -407,7 +426,7 @@ class DeactivateTestCase(unittest.HomeserverTestCase):
         channel = self.make_request("GET", "account/whoami", access_token=tok)
         self.assertEqual(channel.code, 401)
 
-    def test_pending_invites(self):
+    def test_pending_invites(self) -> None:
         """Tests that deactivating a user rejects every pending invite for them."""
         store = self.hs.get_datastores().main
 
@@ -448,7 +467,7 @@ class DeactivateTestCase(unittest.HomeserverTestCase):
         self.assertEqual(len(memberships), 1, memberships)
         self.assertEqual(memberships[0].room_id, room_id, memberships)
 
-    def deactivate(self, user_id, tok):
+    def deactivate(self, user_id: str, tok: str) -> None:
         request_data = json.dumps(
             {
                 "auth": {
@@ -474,12 +493,12 @@ class WhoamiTestCase(unittest.HomeserverTestCase):
         register.register_servlets,
     ]
 
-    def default_config(self):
+    def default_config(self) -> Dict[str, Any]:
         config = super().default_config()
         config["allow_guest_access"] = True
         return config
 
-    def test_GET_whoami(self):
+    def test_GET_whoami(self) -> None:
         device_id = "wouldgohere"
         user_id = self.register_user("kermit", "test")
         tok = self.login("kermit", "test", device_id=device_id)
@@ -496,7 +515,7 @@ class WhoamiTestCase(unittest.HomeserverTestCase):
             },
         )
 
-    def test_GET_whoami_guests(self):
+    def test_GET_whoami_guests(self) -> None:
         channel = self.make_request(
             b"POST", b"/_matrix/client/r0/register?kind=guest", b"{}"
         )
@@ -516,7 +535,7 @@ class WhoamiTestCase(unittest.HomeserverTestCase):
             },
         )
 
-    def test_GET_whoami_appservices(self):
+    def test_GET_whoami_appservices(self) -> None:
         user_id = "@as:test"
         as_token = "i_am_an_app_service"
 
@@ -541,7 +560,7 @@ class WhoamiTestCase(unittest.HomeserverTestCase):
         )
         self.assertFalse(hasattr(whoami, "device_id"))
 
-    def _whoami(self, tok):
+    def _whoami(self, tok: str) -> JsonDict:
         channel = self.make_request("GET", "account/whoami", {}, access_token=tok)
         self.assertEqual(channel.code, 200)
         return channel.json_body
@@ -555,7 +574,7 @@ class ThreepidEmailRestTestCase(unittest.HomeserverTestCase):
         synapse.rest.admin.register_servlets_for_client_rest_resource,
     ]
 
-    def make_homeserver(self, reactor, clock):
+    def make_homeserver(self, reactor: MemoryReactor, clock: Clock) -> HomeServer:
         config = self.default_config()
 
         # Email config.
@@ -576,16 +595,23 @@ class ThreepidEmailRestTestCase(unittest.HomeserverTestCase):
         self.hs = self.setup_test_homeserver(config=config)
 
         async def sendmail(
-            reactor, smtphost, smtpport, from_addr, to_addrs, msg, **kwargs
-        ):
-            self.email_attempts.append(msg)
-
-        self.email_attempts = []
+            reactor: IReactorTCP,
+            smtphost: str,
+            smtpport: int,
+            from_addr: str,
+            to_addr: str,
+            msg_bytes: bytes,
+            *args: Any,
+            **kwargs: Any,
+        ) -> None:
+            self.email_attempts.append(msg_bytes)
+
+        self.email_attempts: List[bytes] = []
         self.hs.get_send_email_handler()._sendmail = sendmail
 
         return self.hs
 
-    def prepare(self, reactor, clock, hs):
+    def prepare(self, reactor: MemoryReactor, clock: Clock, hs: HomeServer) -> None:
         self.store = hs.get_datastores().main
 
         self.user_id = self.register_user("kermit", "test")
@@ -593,83 +619,73 @@ class ThreepidEmailRestTestCase(unittest.HomeserverTestCase):
         self.email = "test@example.com"
         self.url_3pid = b"account/3pid"
 
-    def test_add_valid_email(self):
-        self.get_success(self._add_email(self.email, self.email))
+    def test_add_valid_email(self) -> None:
+        self._add_email(self.email, self.email)
 
-    def test_add_valid_email_second_time(self):
-        self.get_success(self._add_email(self.email, self.email))
-        self.get_success(
-            self._request_token_invalid_email(
-                self.email,
-                expected_errcode=Codes.THREEPID_IN_USE,
-                expected_error="Email is already in use",
-            )
+    def test_add_valid_email_second_time(self) -> None:
+        self._add_email(self.email, self.email)
+        self._request_token_invalid_email(
+            self.email,
+            expected_errcode=Codes.THREEPID_IN_USE,
+            expected_error="Email is already in use",
         )
 
-    def test_add_valid_email_second_time_canonicalise(self):
-        self.get_success(self._add_email(self.email, self.email))
-        self.get_success(
-            self._request_token_invalid_email(
-                "TEST@EXAMPLE.COM",
-                expected_errcode=Codes.THREEPID_IN_USE,
-                expected_error="Email is already in use",
-            )
+    def test_add_valid_email_second_time_canonicalise(self) -> None:
+        self._add_email(self.email, self.email)
+        self._request_token_invalid_email(
+            "TEST@EXAMPLE.COM",
+            expected_errcode=Codes.THREEPID_IN_USE,
+            expected_error="Email is already in use",
         )
 
-    def test_add_email_no_at(self):
-        self.get_success(
-            self._request_token_invalid_email(
-                "address-without-at.bar",
-                expected_errcode=Codes.UNKNOWN,
-                expected_error="Unable to parse email address",
-            )
+    def test_add_email_no_at(self) -> None:
+        self._request_token_invalid_email(
+            "address-without-at.bar",
+            expected_errcode=Codes.UNKNOWN,
+            expected_error="Unable to parse email address",
         )
 
-    def test_add_email_two_at(self):
-        self.get_success(
-            self._request_token_invalid_email(
-                "foo@foo@test.bar",
-                expected_errcode=Codes.UNKNOWN,
-                expected_error="Unable to parse email address",
-            )
+    def test_add_email_two_at(self) -> None:
+        self._request_token_invalid_email(
+            "foo@foo@test.bar",
+            expected_errcode=Codes.UNKNOWN,
+            expected_error="Unable to parse email address",
         )
 
-    def test_add_email_bad_format(self):
-        self.get_success(
-            self._request_token_invalid_email(
-                "user@bad.example.net@good.example.com",
-                expected_errcode=Codes.UNKNOWN,
-                expected_error="Unable to parse email address",
-            )
+    def test_add_email_bad_format(self) -> None:
+        self._request_token_invalid_email(
+            "user@bad.example.net@good.example.com",
+            expected_errcode=Codes.UNKNOWN,
+            expected_error="Unable to parse email address",
         )
 
-    def test_add_email_domain_to_lower(self):
-        self.get_success(self._add_email("foo@TEST.BAR", "foo@test.bar"))
+    def test_add_email_domain_to_lower(self) -> None:
+        self._add_email("foo@TEST.BAR", "foo@test.bar")
 
-    def test_add_email_domain_with_umlaut(self):
-        self.get_success(self._add_email("foo@Öumlaut.com", "foo@öumlaut.com"))
+    def test_add_email_domain_with_umlaut(self) -> None:
+        self._add_email("foo@Öumlaut.com", "foo@öumlaut.com")
 
-    def test_add_email_address_casefold(self):
-        self.get_success(self._add_email("Strauß@Example.com", "strauss@example.com"))
+    def test_add_email_address_casefold(self) -> None:
+        self._add_email("Strauß@Example.com", "strauss@example.com")
 
-    def test_address_trim(self):
-        self.get_success(self._add_email(" foo@test.bar ", "foo@test.bar"))
+    def test_address_trim(self) -> None:
+        self._add_email(" foo@test.bar ", "foo@test.bar")
 
     @override_config({"rc_3pid_validation": {"burst_count": 3}})
-    def test_ratelimit_by_ip(self):
+    def test_ratelimit_by_ip(self) -> None:
         """Tests that adding emails is ratelimited by IP"""
 
         # We expect to be able to set three emails before getting ratelimited.
-        self.get_success(self._add_email("foo1@test.bar", "foo1@test.bar"))
-        self.get_success(self._add_email("foo2@test.bar", "foo2@test.bar"))
-        self.get_success(self._add_email("foo3@test.bar", "foo3@test.bar"))
+        self._add_email("foo1@test.bar", "foo1@test.bar")
+        self._add_email("foo2@test.bar", "foo2@test.bar")
+        self._add_email("foo3@test.bar", "foo3@test.bar")
 
         with self.assertRaises(HttpResponseException) as cm:
-            self.get_success(self._add_email("foo4@test.bar", "foo4@test.bar"))
+            self._add_email("foo4@test.bar", "foo4@test.bar")
 
         self.assertEqual(cm.exception.code, 429)
 
-    def test_add_email_if_disabled(self):
+    def test_add_email_if_disabled(self) -> None:
         """Test adding email to profile when doing so is disallowed"""
         self.hs.config.registration.enable_3pid_changes = False
 
@@ -695,7 +711,7 @@ class ThreepidEmailRestTestCase(unittest.HomeserverTestCase):
             },
             access_token=self.user_id_tok,
         )
-        self.assertEqual(400, int(channel.result["code"]), msg=channel.result["body"])
+        self.assertEqual(400, channel.code, msg=channel.result["body"])
         self.assertEqual(Codes.FORBIDDEN, channel.json_body["errcode"])
 
         # Get user
@@ -705,10 +721,10 @@ class ThreepidEmailRestTestCase(unittest.HomeserverTestCase):
             access_token=self.user_id_tok,
         )
 
-        self.assertEqual(200, int(channel.result["code"]), msg=channel.result["body"])
+        self.assertEqual(200, channel.code, msg=channel.result["body"])
         self.assertFalse(channel.json_body["threepids"])
 
-    def test_delete_email(self):
+    def test_delete_email(self) -> None:
         """Test deleting an email from profile"""
         # Add a threepid
         self.get_success(
@@ -727,7 +743,7 @@ class ThreepidEmailRestTestCase(unittest.HomeserverTestCase):
             {"medium": "email", "address": self.email},
             access_token=self.user_id_tok,
         )
-        self.assertEqual(200, int(channel.result["code"]), msg=channel.result["body"])
+        self.assertEqual(200, channel.code, msg=channel.result["body"])
 
         # Get user
         channel = self.make_request(
@@ -736,10 +752,10 @@ class ThreepidEmailRestTestCase(unittest.HomeserverTestCase):
             access_token=self.user_id_tok,
         )
 
-        self.assertEqual(200, int(channel.result["code"]), msg=channel.result["body"])
+        self.assertEqual(200, channel.code, msg=channel.result["body"])
         self.assertFalse(channel.json_body["threepids"])
 
-    def test_delete_email_if_disabled(self):
+    def test_delete_email_if_disabled(self) -> None:
         """Test deleting an email from profile when disallowed"""
         self.hs.config.registration.enable_3pid_changes = False
 
@@ -761,7 +777,7 @@ class ThreepidEmailRestTestCase(unittest.HomeserverTestCase):
             access_token=self.user_id_tok,
         )
 
-        self.assertEqual(400, int(channel.result["code"]), msg=channel.result["body"])
+        self.assertEqual(400, channel.code, msg=channel.result["body"])
         self.assertEqual(Codes.FORBIDDEN, channel.json_body["errcode"])
 
         # Get user
@@ -771,11 +787,11 @@ class ThreepidEmailRestTestCase(unittest.HomeserverTestCase):
             access_token=self.user_id_tok,
         )
 
-        self.assertEqual(200, int(channel.result["code"]), msg=channel.result["body"])
+        self.assertEqual(200, channel.code, msg=channel.result["body"])
         self.assertEqual("email", channel.json_body["threepids"][0]["medium"])
         self.assertEqual(self.email, channel.json_body["threepids"][0]["address"])
 
-    def test_cant_add_email_without_clicking_link(self):
+    def test_cant_add_email_without_clicking_link(self) -> None:
         """Test that we do actually need to click the link in the email"""
         client_secret = "foobar"
         session_id = self._request_token(self.email, client_secret)
@@ -797,7 +813,7 @@ class ThreepidEmailRestTestCase(unittest.HomeserverTestCase):
             },
             access_token=self.user_id_tok,
         )
-        self.assertEqual(400, int(channel.result["code"]), msg=channel.result["body"])
+        self.assertEqual(400, channel.code, msg=channel.result["body"])
         self.assertEqual(Codes.THREEPID_AUTH_FAILED, channel.json_body["errcode"])
 
         # Get user
@@ -807,10 +823,10 @@ class ThreepidEmailRestTestCase(unittest.HomeserverTestCase):
             access_token=self.user_id_tok,
         )
 
-        self.assertEqual(200, int(channel.result["code"]), msg=channel.result["body"])
+        self.assertEqual(200, channel.code, msg=channel.result["body"])
         self.assertFalse(channel.json_body["threepids"])
 
-    def test_no_valid_token(self):
+    def test_no_valid_token(self) -> None:
         """Test that we do actually need to request a token and can't just
         make a session up.
         """
@@ -832,7 +848,7 @@ class ThreepidEmailRestTestCase(unittest.HomeserverTestCase):
             },
             access_token=self.user_id_tok,
         )
-        self.assertEqual(400, int(channel.result["code"]), msg=channel.result["body"])
+        self.assertEqual(400, channel.code, msg=channel.result["body"])
         self.assertEqual(Codes.THREEPID_AUTH_FAILED, channel.json_body["errcode"])
 
         # Get user
@@ -842,11 +858,11 @@ class ThreepidEmailRestTestCase(unittest.HomeserverTestCase):
             access_token=self.user_id_tok,
         )
 
-        self.assertEqual(200, int(channel.result["code"]), msg=channel.result["body"])
+        self.assertEqual(200, channel.code, msg=channel.result["body"])
         self.assertFalse(channel.json_body["threepids"])
 
     @override_config({"next_link_domain_whitelist": None})
-    def test_next_link(self):
+    def test_next_link(self) -> None:
         """Tests a valid next_link parameter value with no whitelist (good case)"""
         self._request_token(
             "something@example.com",
@@ -856,7 +872,7 @@ class ThreepidEmailRestTestCase(unittest.HomeserverTestCase):
         )
 
     @override_config({"next_link_domain_whitelist": None})
-    def test_next_link_exotic_protocol(self):
+    def test_next_link_exotic_protocol(self) -> None:
         """Tests using a esoteric protocol as a next_link parameter value.
         Someone may be hosting a client on IPFS etc.
         """
@@ -868,7 +884,7 @@ class ThreepidEmailRestTestCase(unittest.HomeserverTestCase):
         )
 
     @override_config({"next_link_domain_whitelist": None})
-    def test_next_link_file_uri(self):
+    def test_next_link_file_uri(self) -> None:
         """Tests next_link parameters cannot be file URI"""
         # Attempt to use a next_link value that points to the local disk
         self._request_token(
@@ -879,7 +895,7 @@ class ThreepidEmailRestTestCase(unittest.HomeserverTestCase):
         )
 
     @override_config({"next_link_domain_whitelist": ["example.com", "example.org"]})
-    def test_next_link_domain_whitelist(self):
+    def test_next_link_domain_whitelist(self) -> None:
         """Tests next_link parameters must fit the whitelist if provided"""
 
         # Ensure not providing a next_link parameter still works
@@ -912,7 +928,7 @@ class ThreepidEmailRestTestCase(unittest.HomeserverTestCase):
         )
 
     @override_config({"next_link_domain_whitelist": []})
-    def test_empty_next_link_domain_whitelist(self):
+    def test_empty_next_link_domain_whitelist(self) -> None:
         """Tests an empty next_lint_domain_whitelist value, meaning next_link is essentially
         disallowed
         """
@@ -962,28 +978,28 @@ class ThreepidEmailRestTestCase(unittest.HomeserverTestCase):
 
     def _request_token_invalid_email(
         self,
-        email,
-        expected_errcode,
-        expected_error,
-        client_secret="foobar",
-    ):
+        email: str,
+        expected_errcode: str,
+        expected_error: str,
+        client_secret: str = "foobar",
+    ) -> None:
         channel = self.make_request(
             "POST",
             b"account/3pid/email/requestToken",
             {"client_secret": client_secret, "email": email, "send_attempt": 1},
         )
-        self.assertEqual(400, int(channel.result["code"]), msg=channel.result["body"])
+        self.assertEqual(400, channel.code, msg=channel.result["body"])
         self.assertEqual(expected_errcode, channel.json_body["errcode"])
         self.assertEqual(expected_error, channel.json_body["error"])
 
-    def _validate_token(self, link):
+    def _validate_token(self, link: str) -> None:
         # Remove the host
         path = link.replace("https://example.com", "")
 
         channel = self.make_request("GET", path, shorthand=False)
         self.assertEqual(200, channel.code, channel.result)
 
-    def _get_link_from_email(self):
+    def _get_link_from_email(self) -> str:
         assert self.email_attempts, "No emails have been sent"
 
         raw_msg = self.email_attempts[-1].decode("UTF-8")
@@ -998,12 +1014,13 @@ class ThreepidEmailRestTestCase(unittest.HomeserverTestCase):
         if not text:
             self.fail("Could not find text portion of email to parse")
 
+        assert text is not None
         match = re.search(r"https://example.com\S+", text)
         assert match, "Could not find link in email"
 
         return match.group(0)
 
-    def _add_email(self, request_email, expected_email):
+    def _add_email(self, request_email: str, expected_email: str) -> None:
         """Test adding an email to profile"""
         previous_email_attempts = len(self.email_attempts)
 
@@ -1030,7 +1047,7 @@ class ThreepidEmailRestTestCase(unittest.HomeserverTestCase):
             access_token=self.user_id_tok,
         )
 
-        self.assertEqual(200, int(channel.result["code"]), msg=channel.result["body"])
+        self.assertEqual(200, channel.code, msg=channel.result["body"])
 
         # Get user
         channel = self.make_request(
@@ -1039,7 +1056,7 @@ class ThreepidEmailRestTestCase(unittest.HomeserverTestCase):
             access_token=self.user_id_tok,
         )
 
-        self.assertEqual(200, int(channel.result["code"]), msg=channel.result["body"])
+        self.assertEqual(200, channel.code, msg=channel.result["body"])
         self.assertEqual("email", channel.json_body["threepids"][0]["medium"])
 
         threepids = {threepid["address"] for threepid in channel.json_body["threepids"]}
@@ -1055,18 +1072,18 @@ class AccountStatusTestCase(unittest.HomeserverTestCase):
 
     url = "/_matrix/client/unstable/org.matrix.msc3720/account_status"
 
-    def make_homeserver(self, reactor, clock):
+    def make_homeserver(self, reactor: MemoryReactor, clock: Clock) -> HomeServer:
         config = self.default_config()
         config["experimental_features"] = {"msc3720_enabled": True}
 
         return self.setup_test_homeserver(config=config)
 
-    def prepare(self, reactor: MemoryReactor, clock: Clock, homeserver: HomeServer):
+    def prepare(self, reactor: MemoryReactor, clock: Clock, hs: HomeServer) -> None:
         self.requester = self.register_user("requester", "password")
         self.requester_tok = self.login("requester", "password")
-        self.server_name = homeserver.config.server.server_name
+        self.server_name = hs.config.server.server_name
 
-    def test_missing_mxid(self):
+    def test_missing_mxid(self) -> None:
         """Tests that not providing any MXID raises an error."""
         self._test_status(
             users=None,
@@ -1074,7 +1091,7 @@ class AccountStatusTestCase(unittest.HomeserverTestCase):
             expected_errcode=Codes.MISSING_PARAM,
         )
 
-    def test_invalid_mxid(self):
+    def test_invalid_mxid(self) -> None:
         """Tests that providing an invalid MXID raises an error."""
         self._test_status(
             users=["bad:test"],
@@ -1082,7 +1099,7 @@ class AccountStatusTestCase(unittest.HomeserverTestCase):
             expected_errcode=Codes.INVALID_PARAM,
         )
 
-    def test_local_user_not_exists(self):
+    def test_local_user_not_exists(self) -> None:
         """Tests that the account status endpoints correctly reports that a user doesn't
         exist.
         """
@@ -1098,7 +1115,7 @@ class AccountStatusTestCase(unittest.HomeserverTestCase):
             expected_failures=[],
         )
 
-    def test_local_user_exists(self):
+    def test_local_user_exists(self) -> None:
         """Tests that the account status endpoint correctly reports that a user doesn't
         exist.
         """
@@ -1115,7 +1132,7 @@ class AccountStatusTestCase(unittest.HomeserverTestCase):
             expected_failures=[],
         )
 
-    def test_local_user_deactivated(self):
+    def test_local_user_deactivated(self) -> None:
         """Tests that the account status endpoint correctly reports a deactivated user."""
         user = self.register_user("someuser", "password")
         self.get_success(
@@ -1135,7 +1152,7 @@ class AccountStatusTestCase(unittest.HomeserverTestCase):
             expected_failures=[],
         )
 
-    def test_mixed_local_and_remote_users(self):
+    def test_mixed_local_and_remote_users(self) -> None:
         """Tests that if some users are remote the account status endpoint correctly
         merges the remote responses with the local result.
         """
@@ -1150,7 +1167,13 @@ class AccountStatusTestCase(unittest.HomeserverTestCase):
             "@bad:badremote",
         ]
 
-        async def post_json(destination, path, data, *a, **kwa):
+        async def post_json(
+            destination: str,
+            path: str,
+            data: Optional[JsonDict] = None,
+            *a: Any,
+            **kwa: Any,
+        ) -> Union[JsonDict, list]:
             if destination == "remote":
                 return {
                     "account_statuses": {
@@ -1160,9 +1183,7 @@ class AccountStatusTestCase(unittest.HomeserverTestCase):
                         },
                     }
                 }
-            if destination == "otherremote":
-                return {}
-            if destination == "badremote":
+            elif destination == "badremote":
                 # badremote tries to overwrite the status of a user that doesn't belong
                 # to it (i.e. users[1]) with false data, which Synapse is expected to
                 # ignore.
@@ -1176,6 +1197,9 @@ class AccountStatusTestCase(unittest.HomeserverTestCase):
                         },
                     }
                 }
+            # if destination == "otherremote"
+            else:
+                return {}
 
         # Register a mock that will return the expected result depending on the remote.
         self.hs.get_federation_http_client().post_json = Mock(side_effect=post_json)
@@ -1205,7 +1229,7 @@ class AccountStatusTestCase(unittest.HomeserverTestCase):
         expected_statuses: Optional[Dict[str, Dict[str, bool]]] = None,
         expected_failures: Optional[List[str]] = None,
         expected_errcode: Optional[str] = None,
-    ):
+    ) -> None:
         """Send a request to the account status endpoint and check that the response
         matches with what's expected.
 
diff --git a/tests/rest/client/test_filter.py b/tests/rest/client/test_filter.py
index 5c31a54421..823e8ab8c4 100644
--- a/tests/rest/client/test_filter.py
+++ b/tests/rest/client/test_filter.py
@@ -12,10 +12,12 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-from twisted.internet import defer
+from twisted.test.proto_helpers import MemoryReactor
 
 from synapse.api.errors import Codes
 from synapse.rest.client import filter
+from synapse.server import HomeServer
+from synapse.util import Clock
 
 from tests import unittest
 
@@ -30,11 +32,11 @@ class FilterTestCase(unittest.HomeserverTestCase):
     EXAMPLE_FILTER_JSON = b'{"room": {"timeline": {"types": ["m.room.message"]}}}'
     servlets = [filter.register_servlets]
 
-    def prepare(self, reactor, clock, hs):
+    def prepare(self, reactor: MemoryReactor, clock: Clock, hs: HomeServer) -> None:
         self.filtering = hs.get_filtering()
         self.store = hs.get_datastores().main
 
-    def test_add_filter(self):
+    def test_add_filter(self) -> None:
         channel = self.make_request(
             "POST",
             "/_matrix/client/r0/user/%s/filter" % (self.user_id),
@@ -43,11 +45,13 @@ class FilterTestCase(unittest.HomeserverTestCase):
 
         self.assertEqual(channel.result["code"], b"200")
         self.assertEqual(channel.json_body, {"filter_id": "0"})
-        filter = self.store.get_user_filter(user_localpart="apple", filter_id=0)
+        filter = self.get_success(
+            self.store.get_user_filter(user_localpart="apple", filter_id=0)
+        )
         self.pump()
-        self.assertEqual(filter.result, self.EXAMPLE_FILTER)
+        self.assertEqual(filter, self.EXAMPLE_FILTER)
 
-    def test_add_filter_for_other_user(self):
+    def test_add_filter_for_other_user(self) -> None:
         channel = self.make_request(
             "POST",
             "/_matrix/client/r0/user/%s/filter" % ("@watermelon:test"),
@@ -57,7 +61,7 @@ class FilterTestCase(unittest.HomeserverTestCase):
         self.assertEqual(channel.result["code"], b"403")
         self.assertEqual(channel.json_body["errcode"], Codes.FORBIDDEN)
 
-    def test_add_filter_non_local_user(self):
+    def test_add_filter_non_local_user(self) -> None:
         _is_mine = self.hs.is_mine
         self.hs.is_mine = lambda target_user: False
         channel = self.make_request(
@@ -70,14 +74,13 @@ class FilterTestCase(unittest.HomeserverTestCase):
         self.assertEqual(channel.result["code"], b"403")
         self.assertEqual(channel.json_body["errcode"], Codes.FORBIDDEN)
 
-    def test_get_filter(self):
-        filter_id = defer.ensureDeferred(
+    def test_get_filter(self) -> None:
+        filter_id = self.get_success(
             self.filtering.add_user_filter(
                 user_localpart="apple", user_filter=self.EXAMPLE_FILTER
             )
         )
         self.reactor.advance(1)
-        filter_id = filter_id.result
         channel = self.make_request(
             "GET", "/_matrix/client/r0/user/%s/filter/%s" % (self.user_id, filter_id)
         )
@@ -85,7 +88,7 @@ class FilterTestCase(unittest.HomeserverTestCase):
         self.assertEqual(channel.result["code"], b"200")
         self.assertEqual(channel.json_body, self.EXAMPLE_FILTER)
 
-    def test_get_filter_non_existant(self):
+    def test_get_filter_non_existant(self) -> None:
         channel = self.make_request(
             "GET", "/_matrix/client/r0/user/%s/filter/12382148321" % (self.user_id)
         )
@@ -95,7 +98,7 @@ class FilterTestCase(unittest.HomeserverTestCase):
 
     # Currently invalid params do not have an appropriate errcode
     # in errors.py
-    def test_get_filter_invalid_id(self):
+    def test_get_filter_invalid_id(self) -> None:
         channel = self.make_request(
             "GET", "/_matrix/client/r0/user/%s/filter/foobar" % (self.user_id)
         )
@@ -103,7 +106,7 @@ class FilterTestCase(unittest.HomeserverTestCase):
         self.assertEqual(channel.result["code"], b"400")
 
     # No ID also returns an invalid_id error
-    def test_get_filter_no_id(self):
+    def test_get_filter_no_id(self) -> None:
         channel = self.make_request(
             "GET", "/_matrix/client/r0/user/%s/filter/" % (self.user_id)
         )
diff --git a/tests/rest/client/test_relations.py b/tests/rest/client/test_relations.py
index c8db45719e..709f851a38 100644
--- a/tests/rest/client/test_relations.py
+++ b/tests/rest/client/test_relations.py
@@ -15,7 +15,7 @@
 
 import itertools
 import urllib.parse
-from typing import Dict, List, Optional, Tuple
+from typing import Any, Dict, List, Optional, Tuple
 from unittest.mock import patch
 
 from twisted.test.proto_helpers import MemoryReactor
@@ -34,7 +34,7 @@ from tests.test_utils import make_awaitable
 from tests.test_utils.event_injection import inject_event
 
 
-class RelationsTestCase(unittest.HomeserverTestCase):
+class BaseRelationsTestCase(unittest.HomeserverTestCase):
     servlets = [
         relations.register_servlets,
         room.register_servlets,
@@ -45,10 +45,9 @@ class RelationsTestCase(unittest.HomeserverTestCase):
     ]
     hijack_auth = False
 
-    def default_config(self) -> dict:
+    def default_config(self) -> Dict[str, Any]:
         # We need to enable msc1849 support for aggregations
         config = super().default_config()
-        config["experimental_msc1849_support_enabled"] = True
 
         # We enable frozen dicts as relations/edits change event contents, so we
         # want to test that we don't modify the events in the caches.
@@ -67,10 +66,62 @@ class RelationsTestCase(unittest.HomeserverTestCase):
         res = self.helper.send(self.room, body="Hi!", tok=self.user_token)
         self.parent_id = res["event_id"]
 
-    def test_send_relation(self) -> None:
-        """Tests that sending a relation using the new /send_relation works
-        creates the right shape of event.
+    def _create_user(self, localpart: str) -> Tuple[str, str]:
+        user_id = self.register_user(localpart, "abc123")
+        access_token = self.login(localpart, "abc123")
+
+        return user_id, access_token
+
+    def _send_relation(
+        self,
+        relation_type: str,
+        event_type: str,
+        key: Optional[str] = None,
+        content: Optional[dict] = None,
+        access_token: Optional[str] = None,
+        parent_id: Optional[str] = None,
+    ) -> FakeChannel:
+        """Helper function to send a relation pointing at `self.parent_id`
+
+        Args:
+            relation_type: One of `RelationTypes`
+            event_type: The type of the event to create
+            key: The aggregation key used for m.annotation relation type.
+            content: The content of the created event. Will be modified to configure
+                the m.relates_to key based on the other provided parameters.
+            access_token: The access token used to send the relation, defaults
+                to `self.user_token`
+            parent_id: The event_id this relation relates to. If None, then self.parent_id
+
+        Returns:
+            FakeChannel
         """
+        if not access_token:
+            access_token = self.user_token
+
+        original_id = parent_id if parent_id else self.parent_id
+
+        if content is None:
+            content = {}
+        content["m.relates_to"] = {
+            "event_id": original_id,
+            "rel_type": relation_type,
+        }
+        if key is not None:
+            content["m.relates_to"]["key"] = key
+
+        channel = self.make_request(
+            "POST",
+            f"/_matrix/client/v3/rooms/{self.room}/send/{event_type}",
+            content,
+            access_token=access_token,
+        )
+        return channel
+
+
+class RelationsTestCase(BaseRelationsTestCase):
+    def test_send_relation(self) -> None:
+        """Tests that sending a relation works."""
 
         channel = self._send_relation(RelationTypes.ANNOTATION, "m.reaction", key="👍")
         self.assertEqual(200, channel.code, channel.json_body)
@@ -79,7 +130,7 @@ class RelationsTestCase(unittest.HomeserverTestCase):
 
         channel = self.make_request(
             "GET",
-            "/rooms/%s/event/%s" % (self.room, event_id),
+            f"/rooms/{self.room}/event/{event_id}",
             access_token=self.user_token,
         )
         self.assertEqual(200, channel.code, channel.json_body)
@@ -317,9 +368,7 @@ class RelationsTestCase(unittest.HomeserverTestCase):
 
         # Request /sync, limiting it such that only the latest event is returned
         # (and not the relation).
-        filter = urllib.parse.quote_plus(
-            '{"room": {"timeline": {"limit": 1}}}'.encode()
-        )
+        filter = urllib.parse.quote_plus(b'{"room": {"timeline": {"limit": 1}}}')
         channel = self.make_request(
             "GET", f"/sync?filter={filter}", access_token=self.user_token
         )
@@ -404,8 +453,7 @@ class RelationsTestCase(unittest.HomeserverTestCase):
 
             channel = self.make_request(
                 "GET",
-                "/_matrix/client/unstable/rooms/%s/aggregations/%s?limit=1%s"
-                % (self.room, self.parent_id, from_token),
+                f"/_matrix/client/unstable/rooms/{self.room}/aggregations/{self.parent_id}?limit=1{from_token}",
                 access_token=self.user_token,
             )
             self.assertEqual(200, channel.code, channel.json_body)
@@ -544,8 +592,7 @@ class RelationsTestCase(unittest.HomeserverTestCase):
 
         channel = self.make_request(
             "GET",
-            "/_matrix/client/unstable/rooms/%s/aggregations/%s"
-            % (self.room, self.parent_id),
+            f"/_matrix/client/unstable/rooms/{self.room}/aggregations/{self.parent_id}",
             access_token=self.user_token,
         )
         self.assertEqual(200, channel.code, channel.json_body)
@@ -560,47 +607,13 @@ class RelationsTestCase(unittest.HomeserverTestCase):
             },
         )
 
-    def test_aggregation_redactions(self) -> None:
-        """Test that annotations get correctly aggregated after a redaction."""
-
-        channel = self._send_relation(RelationTypes.ANNOTATION, "m.reaction", "a")
-        self.assertEqual(200, channel.code, channel.json_body)
-        to_redact_event_id = channel.json_body["event_id"]
-
-        channel = self._send_relation(
-            RelationTypes.ANNOTATION, "m.reaction", "a", access_token=self.user2_token
-        )
-        self.assertEqual(200, channel.code, channel.json_body)
-
-        # Now lets redact one of the 'a' reactions
-        channel = self.make_request(
-            "POST",
-            "/_matrix/client/r0/rooms/%s/redact/%s" % (self.room, to_redact_event_id),
-            access_token=self.user_token,
-            content={},
-        )
-        self.assertEqual(200, channel.code, channel.json_body)
-
-        channel = self.make_request(
-            "GET",
-            "/_matrix/client/unstable/rooms/%s/aggregations/%s"
-            % (self.room, self.parent_id),
-            access_token=self.user_token,
-        )
-        self.assertEqual(200, channel.code, channel.json_body)
-
-        self.assertEqual(
-            channel.json_body,
-            {"chunk": [{"type": "m.reaction", "key": "a", "count": 1}]},
-        )
-
     def test_aggregation_must_be_annotation(self) -> None:
         """Test that aggregations must be annotations."""
 
         channel = self.make_request(
             "GET",
-            "/_matrix/client/unstable/rooms/%s/aggregations/%s/%s?limit=1"
-            % (self.room, self.parent_id, RelationTypes.REPLACE),
+            f"/_matrix/client/unstable/rooms/{self.room}/aggregations"
+            f"/{self.parent_id}/{RelationTypes.REPLACE}?limit=1",
             access_token=self.user_token,
         )
         self.assertEqual(400, channel.code, channel.json_body)
@@ -986,9 +999,7 @@ class RelationsTestCase(unittest.HomeserverTestCase):
 
         # Request sync, but limit the timeline so it becomes limited (and includes
         # bundled aggregations).
-        filter = urllib.parse.quote_plus(
-            '{"room": {"timeline": {"limit": 2}}}'.encode()
-        )
+        filter = urllib.parse.quote_plus(b'{"room": {"timeline": {"limit": 2}}}')
         channel = self.make_request(
             "GET", f"/sync?filter={filter}", access_token=self.user_token
         )
@@ -1053,7 +1064,7 @@ class RelationsTestCase(unittest.HomeserverTestCase):
 
         channel = self.make_request(
             "GET",
-            "/rooms/%s/event/%s" % (self.room, self.parent_id),
+            f"/rooms/{self.room}/event/{self.parent_id}",
             access_token=self.user_token,
         )
         self.assertEqual(200, channel.code, channel.json_body)
@@ -1096,7 +1107,7 @@ class RelationsTestCase(unittest.HomeserverTestCase):
 
         channel = self.make_request(
             "GET",
-            "/rooms/%s/event/%s" % (self.room, reply),
+            f"/rooms/{self.room}/event/{reply}",
             access_token=self.user_token,
         )
         self.assertEqual(200, channel.code, channel.json_body)
@@ -1198,7 +1209,7 @@ class RelationsTestCase(unittest.HomeserverTestCase):
         # Request the original event.
         channel = self.make_request(
             "GET",
-            "/rooms/%s/event/%s" % (self.room, self.parent_id),
+            f"/rooms/{self.room}/event/{self.parent_id}",
             access_token=self.user_token,
         )
         self.assertEqual(200, channel.code, channel.json_body)
@@ -1217,102 +1228,6 @@ class RelationsTestCase(unittest.HomeserverTestCase):
             {"event_id": edit_event_id, "sender": self.user_id}, m_replace_dict
         )
 
-    def test_relations_redaction_redacts_edits(self) -> None:
-        """Test that edits of an event are redacted when the original event
-        is redacted.
-        """
-        # Send a new event
-        res = self.helper.send(self.room, body="Heyo!", tok=self.user_token)
-        original_event_id = res["event_id"]
-
-        # Add a relation
-        channel = self._send_relation(
-            RelationTypes.REPLACE,
-            "m.room.message",
-            parent_id=original_event_id,
-            content={
-                "msgtype": "m.text",
-                "body": "Wibble",
-                "m.new_content": {"msgtype": "m.text", "body": "First edit"},
-            },
-        )
-        self.assertEqual(200, channel.code, channel.json_body)
-
-        # Check the relation is returned
-        channel = self.make_request(
-            "GET",
-            "/_matrix/client/unstable/rooms/%s/relations/%s/m.replace/m.room.message"
-            % (self.room, original_event_id),
-            access_token=self.user_token,
-        )
-        self.assertEqual(200, channel.code, channel.json_body)
-
-        self.assertIn("chunk", channel.json_body)
-        self.assertEqual(len(channel.json_body["chunk"]), 1)
-
-        # Redact the original event
-        channel = self.make_request(
-            "PUT",
-            "/rooms/%s/redact/%s/%s"
-            % (self.room, original_event_id, "test_relations_redaction_redacts_edits"),
-            access_token=self.user_token,
-            content="{}",
-        )
-        self.assertEqual(200, channel.code, channel.json_body)
-
-        # Try to check for remaining m.replace relations
-        channel = self.make_request(
-            "GET",
-            "/_matrix/client/unstable/rooms/%s/relations/%s/m.replace/m.room.message"
-            % (self.room, original_event_id),
-            access_token=self.user_token,
-        )
-        self.assertEqual(200, channel.code, channel.json_body)
-
-        # Check that no relations are returned
-        self.assertIn("chunk", channel.json_body)
-        self.assertEqual(channel.json_body["chunk"], [])
-
-    def test_aggregations_redaction_prevents_access_to_aggregations(self) -> None:
-        """Test that annotations of an event are redacted when the original event
-        is redacted.
-        """
-        # Send a new event
-        res = self.helper.send(self.room, body="Hello!", tok=self.user_token)
-        original_event_id = res["event_id"]
-
-        # Add a relation
-        channel = self._send_relation(
-            RelationTypes.ANNOTATION, "m.reaction", key="👍", parent_id=original_event_id
-        )
-        self.assertEqual(200, channel.code, channel.json_body)
-
-        # Redact the original
-        channel = self.make_request(
-            "PUT",
-            "/rooms/%s/redact/%s/%s"
-            % (
-                self.room,
-                original_event_id,
-                "test_aggregations_redaction_prevents_access_to_aggregations",
-            ),
-            access_token=self.user_token,
-            content="{}",
-        )
-        self.assertEqual(200, channel.code, channel.json_body)
-
-        # Check that aggregations returns zero
-        channel = self.make_request(
-            "GET",
-            "/_matrix/client/unstable/rooms/%s/aggregations/%s/m.annotation/m.reaction"
-            % (self.room, original_event_id),
-            access_token=self.user_token,
-        )
-        self.assertEqual(200, channel.code, channel.json_body)
-
-        self.assertIn("chunk", channel.json_body)
-        self.assertEqual(channel.json_body["chunk"], [])
-
     def test_unknown_relations(self) -> None:
         """Unknown relations should be accepted."""
         channel = self._send_relation("m.relation.test", "m.room.test")
@@ -1321,8 +1236,7 @@ class RelationsTestCase(unittest.HomeserverTestCase):
 
         channel = self.make_request(
             "GET",
-            "/_matrix/client/unstable/rooms/%s/relations/%s?limit=1"
-            % (self.room, self.parent_id),
+            f"/_matrix/client/unstable/rooms/{self.room}/relations/{self.parent_id}?limit=1",
             access_token=self.user_token,
         )
         self.assertEqual(200, channel.code, channel.json_body)
@@ -1343,7 +1257,7 @@ class RelationsTestCase(unittest.HomeserverTestCase):
         # When bundling the unknown relation is not included.
         channel = self.make_request(
             "GET",
-            "/rooms/%s/event/%s" % (self.room, self.parent_id),
+            f"/rooms/{self.room}/event/{self.parent_id}",
             access_token=self.user_token,
         )
         self.assertEqual(200, channel.code, channel.json_body)
@@ -1352,8 +1266,7 @@ class RelationsTestCase(unittest.HomeserverTestCase):
         # But unknown relations can be directly queried.
         channel = self.make_request(
             "GET",
-            "/_matrix/client/unstable/rooms/%s/aggregations/%s?limit=1"
-            % (self.room, self.parent_id),
+            f"/_matrix/client/unstable/rooms/{self.room}/aggregations/{self.parent_id}?limit=1",
             access_token=self.user_token,
         )
         self.assertEqual(200, channel.code, channel.json_body)
@@ -1369,58 +1282,6 @@ class RelationsTestCase(unittest.HomeserverTestCase):
 
         raise AssertionError(f"Event {self.parent_id} not found in chunk")
 
-    def _send_relation(
-        self,
-        relation_type: str,
-        event_type: str,
-        key: Optional[str] = None,
-        content: Optional[dict] = None,
-        access_token: Optional[str] = None,
-        parent_id: Optional[str] = None,
-    ) -> FakeChannel:
-        """Helper function to send a relation pointing at `self.parent_id`
-
-        Args:
-            relation_type: One of `RelationTypes`
-            event_type: The type of the event to create
-            key: The aggregation key used for m.annotation relation type.
-            content: The content of the created event. Will be modified to configure
-                the m.relates_to key based on the other provided parameters.
-            access_token: The access token used to send the relation, defaults
-                to `self.user_token`
-            parent_id: The event_id this relation relates to. If None, then self.parent_id
-
-        Returns:
-            FakeChannel
-        """
-        if not access_token:
-            access_token = self.user_token
-
-        original_id = parent_id if parent_id else self.parent_id
-
-        if content is None:
-            content = {}
-        content["m.relates_to"] = {
-            "event_id": original_id,
-            "rel_type": relation_type,
-        }
-        if key is not None:
-            content["m.relates_to"]["key"] = key
-
-        channel = self.make_request(
-            "POST",
-            f"/_matrix/client/v3/rooms/{self.room}/send/{event_type}",
-            content,
-            access_token=access_token,
-        )
-        return channel
-
-    def _create_user(self, localpart: str) -> Tuple[str, str]:
-        user_id = self.register_user(localpart, "abc123")
-        access_token = self.login(localpart, "abc123")
-
-        return user_id, access_token
-
     def test_background_update(self) -> None:
         """Test the event_arbitrary_relations background update."""
         channel = self._send_relation(RelationTypes.ANNOTATION, "m.reaction", key="👍")
@@ -1482,3 +1343,112 @@ class RelationsTestCase(unittest.HomeserverTestCase):
             [ev["event_id"] for ev in channel.json_body["chunk"]],
             [annotation_event_id_good, thread_event_id],
         )
+
+
+class RelationRedactionTestCase(BaseRelationsTestCase):
+    """Test the behaviour of relations when the parent or child event is redacted."""
+
+    def _redact(self, event_id: str) -> None:
+        channel = self.make_request(
+            "POST",
+            f"/_matrix/client/r0/rooms/{self.room}/redact/{event_id}",
+            access_token=self.user_token,
+            content={},
+        )
+        self.assertEqual(200, channel.code, channel.json_body)
+
+    def test_redact_relation_annotation(self) -> None:
+        """Test that annotations of an event are properly handled after the
+        annotation is redacted.
+        """
+        channel = self._send_relation(RelationTypes.ANNOTATION, "m.reaction", "a")
+        self.assertEqual(200, channel.code, channel.json_body)
+        to_redact_event_id = channel.json_body["event_id"]
+
+        channel = self._send_relation(
+            RelationTypes.ANNOTATION, "m.reaction", "a", access_token=self.user2_token
+        )
+        self.assertEqual(200, channel.code, channel.json_body)
+
+        # Redact one of the reactions.
+        self._redact(to_redact_event_id)
+
+        # Ensure that the aggregations are correct.
+        channel = self.make_request(
+            "GET",
+            f"/_matrix/client/unstable/rooms/{self.room}/aggregations/{self.parent_id}",
+            access_token=self.user_token,
+        )
+        self.assertEqual(200, channel.code, channel.json_body)
+
+        self.assertEqual(
+            channel.json_body,
+            {"chunk": [{"type": "m.reaction", "key": "a", "count": 1}]},
+        )
+
+    def test_redact_relation_edit(self) -> None:
+        """Test that edits of an event are redacted when the original event
+        is redacted.
+        """
+        # Add a relation
+        channel = self._send_relation(
+            RelationTypes.REPLACE,
+            "m.room.message",
+            parent_id=self.parent_id,
+            content={
+                "msgtype": "m.text",
+                "body": "Wibble",
+                "m.new_content": {"msgtype": "m.text", "body": "First edit"},
+            },
+        )
+        self.assertEqual(200, channel.code, channel.json_body)
+
+        # Check the relation is returned
+        channel = self.make_request(
+            "GET",
+            f"/_matrix/client/unstable/rooms/{self.room}/relations"
+            f"/{self.parent_id}/m.replace/m.room.message",
+            access_token=self.user_token,
+        )
+        self.assertEqual(200, channel.code, channel.json_body)
+
+        self.assertIn("chunk", channel.json_body)
+        self.assertEqual(len(channel.json_body["chunk"]), 1)
+
+        # Redact the original event
+        self._redact(self.parent_id)
+
+        # Try to check for remaining m.replace relations
+        channel = self.make_request(
+            "GET",
+            f"/_matrix/client/unstable/rooms/{self.room}/relations"
+            f"/{self.parent_id}/m.replace/m.room.message",
+            access_token=self.user_token,
+        )
+        self.assertEqual(200, channel.code, channel.json_body)
+
+        # Check that no relations are returned
+        self.assertIn("chunk", channel.json_body)
+        self.assertEqual(channel.json_body["chunk"], [])
+
+    def test_redact_parent(self) -> None:
+        """Test that annotations of an event are redacted when the original event
+        is redacted.
+        """
+        # Add a relation
+        channel = self._send_relation(RelationTypes.ANNOTATION, "m.reaction", key="👍")
+        self.assertEqual(200, channel.code, channel.json_body)
+
+        # Redact the original event.
+        self._redact(self.parent_id)
+
+        # Check that aggregations returns zero
+        channel = self.make_request(
+            "GET",
+            f"/_matrix/client/unstable/rooms/{self.room}/aggregations/{self.parent_id}/m.annotation/m.reaction",
+            access_token=self.user_token,
+        )
+        self.assertEqual(200, channel.code, channel.json_body)
+
+        self.assertIn("chunk", channel.json_body)
+        self.assertEqual(channel.json_body["chunk"], [])
diff --git a/tests/rest/client/test_report_event.py b/tests/rest/client/test_report_event.py
index ee6b0b9ebf..20a259fc43 100644
--- a/tests/rest/client/test_report_event.py
+++ b/tests/rest/client/test_report_event.py
@@ -14,8 +14,13 @@
 
 import json
 
+from twisted.test.proto_helpers import MemoryReactor
+
 import synapse.rest.admin
 from synapse.rest.client import login, report_event, room
+from synapse.server import HomeServer
+from synapse.types import JsonDict
+from synapse.util import Clock
 
 from tests import unittest
 
@@ -28,7 +33,7 @@ class ReportEventTestCase(unittest.HomeserverTestCase):
         report_event.register_servlets,
     ]
 
-    def prepare(self, reactor, clock, hs):
+    def prepare(self, reactor: MemoryReactor, clock: Clock, hs: HomeServer) -> None:
         self.admin_user = self.register_user("admin", "pass", admin=True)
         self.admin_user_tok = self.login("admin", "pass")
         self.other_user = self.register_user("user", "pass")
@@ -42,35 +47,35 @@ class ReportEventTestCase(unittest.HomeserverTestCase):
         self.event_id = resp["event_id"]
         self.report_path = f"rooms/{self.room_id}/report/{self.event_id}"
 
-    def test_reason_str_and_score_int(self):
+    def test_reason_str_and_score_int(self) -> None:
         data = {"reason": "this makes me sad", "score": -100}
         self._assert_status(200, data)
 
-    def test_no_reason(self):
+    def test_no_reason(self) -> None:
         data = {"score": 0}
         self._assert_status(200, data)
 
-    def test_no_score(self):
+    def test_no_score(self) -> None:
         data = {"reason": "this makes me sad"}
         self._assert_status(200, data)
 
-    def test_no_reason_and_no_score(self):
-        data = {}
+    def test_no_reason_and_no_score(self) -> None:
+        data: JsonDict = {}
         self._assert_status(200, data)
 
-    def test_reason_int_and_score_str(self):
+    def test_reason_int_and_score_str(self) -> None:
         data = {"reason": 10, "score": "string"}
         self._assert_status(400, data)
 
-    def test_reason_zero_and_score_blank(self):
+    def test_reason_zero_and_score_blank(self) -> None:
         data = {"reason": 0, "score": ""}
         self._assert_status(400, data)
 
-    def test_reason_and_score_null(self):
+    def test_reason_and_score_null(self) -> None:
         data = {"reason": None, "score": None}
         self._assert_status(400, data)
 
-    def _assert_status(self, response_status, data):
+    def _assert_status(self, response_status: int, data: JsonDict) -> None:
         channel = self.make_request(
             "POST",
             self.report_path,
diff --git a/tests/rest/client/test_rooms.py b/tests/rest/client/test_rooms.py
index e0b11e7264..37866ee330 100644
--- a/tests/rest/client/test_rooms.py
+++ b/tests/rest/client/test_rooms.py
@@ -18,11 +18,12 @@
 """Tests REST events for /rooms paths."""
 
 import json
-from typing import Iterable, List
+from typing import Any, Dict, Iterable, List, Optional
 from unittest.mock import Mock, call
 from urllib import parse as urlparse
 
 from twisted.internet import defer
+from twisted.test.proto_helpers import MemoryReactor
 
 import synapse.rest.admin
 from synapse.api.constants import (
@@ -35,7 +36,9 @@ from synapse.api.errors import Codes, HttpResponseException
 from synapse.handlers.pagination import PurgeStatus
 from synapse.rest import admin
 from synapse.rest.client import account, directory, login, profile, room, sync
+from synapse.server import HomeServer
 from synapse.types import JsonDict, RoomAlias, UserID, create_requester
+from synapse.util import Clock
 from synapse.util.stringutils import random_string
 
 from tests import unittest
@@ -45,11 +48,11 @@ PATH_PREFIX = b"/_matrix/client/api/v1"
 
 
 class RoomBase(unittest.HomeserverTestCase):
-    rmcreator_id = None
+    rmcreator_id: Optional[str] = None
 
     servlets = [room.register_servlets, room.register_deprecated_servlets]
 
-    def make_homeserver(self, reactor, clock):
+    def make_homeserver(self, reactor: MemoryReactor, clock: Clock) -> HomeServer:
 
         self.hs = self.setup_test_homeserver(
             "red",
@@ -57,15 +60,15 @@ class RoomBase(unittest.HomeserverTestCase):
             federation_client=Mock(),
         )
 
-        self.hs.get_federation_handler = Mock()
+        self.hs.get_federation_handler = Mock()  # type: ignore[assignment]
         self.hs.get_federation_handler.return_value.maybe_backfill = Mock(
             return_value=make_awaitable(None)
         )
 
-        async def _insert_client_ip(*args, **kwargs):
+        async def _insert_client_ip(*args: Any, **kwargs: Any) -> None:
             return None
 
-        self.hs.get_datastores().main.insert_client_ip = _insert_client_ip
+        self.hs.get_datastores().main.insert_client_ip = _insert_client_ip  # type: ignore[assignment]
 
         return self.hs
 
@@ -76,7 +79,7 @@ class RoomPermissionsTestCase(RoomBase):
     user_id = "@sid1:red"
     rmcreator_id = "@notme:red"
 
-    def prepare(self, reactor, clock, hs):
+    def prepare(self, reactor: MemoryReactor, clock: Clock, hs: HomeServer) -> None:
 
         self.helper.auth_user_id = self.rmcreator_id
         # create some rooms under the name rmcreator_id
@@ -108,12 +111,12 @@ class RoomPermissionsTestCase(RoomBase):
         # auth as user_id now
         self.helper.auth_user_id = self.user_id
 
-    def test_can_do_action(self):
+    def test_can_do_action(self) -> None:
         msg_content = b'{"msgtype":"m.text","body":"hello"}'
 
         seq = iter(range(100))
 
-        def send_msg_path():
+        def send_msg_path() -> str:
             return "/rooms/%s/send/m.room.message/mid%s" % (
                 self.created_rmid,
                 str(next(seq)),
@@ -148,7 +151,7 @@ class RoomPermissionsTestCase(RoomBase):
         channel = self.make_request("PUT", send_msg_path(), msg_content)
         self.assertEqual(403, channel.code, msg=channel.result["body"])
 
-    def test_topic_perms(self):
+    def test_topic_perms(self) -> None:
         topic_content = b'{"topic":"My Topic Name"}'
         topic_path = "/rooms/%s/state/m.room.topic" % self.created_rmid
 
@@ -214,14 +217,14 @@ class RoomPermissionsTestCase(RoomBase):
         self.assertEqual(403, channel.code, msg=channel.result["body"])
 
     def _test_get_membership(
-        self, room=None, members: Iterable = frozenset(), expect_code=None
-    ):
+        self, room: str, members: Iterable = frozenset(), expect_code: int = 200
+    ) -> None:
         for member in members:
             path = "/rooms/%s/state/m.room.member/%s" % (room, member)
             channel = self.make_request("GET", path)
             self.assertEqual(expect_code, channel.code)
 
-    def test_membership_basic_room_perms(self):
+    def test_membership_basic_room_perms(self) -> None:
         # === room does not exist ===
         room = self.uncreated_rmid
         # get membership of self, get membership of other, uncreated room
@@ -241,7 +244,7 @@ class RoomPermissionsTestCase(RoomBase):
             self.helper.join(room=room, user=usr, expect_code=404)
             self.helper.leave(room=room, user=usr, expect_code=404)
 
-    def test_membership_private_room_perms(self):
+    def test_membership_private_room_perms(self) -> None:
         room = self.created_rmid
         # get membership of self, get membership of other, private room + invite
         # expect all 403s
@@ -264,7 +267,7 @@ class RoomPermissionsTestCase(RoomBase):
             members=[self.user_id, self.rmcreator_id], room=room, expect_code=200
         )
 
-    def test_membership_public_room_perms(self):
+    def test_membership_public_room_perms(self) -> None:
         room = self.created_public_rmid
         # get membership of self, get membership of other, public room + invite
         # expect 403
@@ -287,7 +290,7 @@ class RoomPermissionsTestCase(RoomBase):
             members=[self.user_id, self.rmcreator_id], room=room, expect_code=200
         )
 
-    def test_invited_permissions(self):
+    def test_invited_permissions(self) -> None:
         room = self.created_rmid
         self.helper.invite(room=room, src=self.rmcreator_id, targ=self.user_id)
 
@@ -310,7 +313,7 @@ class RoomPermissionsTestCase(RoomBase):
             expect_code=403,
         )
 
-    def test_joined_permissions(self):
+    def test_joined_permissions(self) -> None:
         room = self.created_rmid
         self.helper.invite(room=room, src=self.rmcreator_id, targ=self.user_id)
         self.helper.join(room=room, user=self.user_id)
@@ -348,7 +351,7 @@ class RoomPermissionsTestCase(RoomBase):
         # set left of self, expect 200
         self.helper.leave(room=room, user=self.user_id)
 
-    def test_leave_permissions(self):
+    def test_leave_permissions(self) -> None:
         room = self.created_rmid
         self.helper.invite(room=room, src=self.rmcreator_id, targ=self.user_id)
         self.helper.join(room=room, user=self.user_id)
@@ -383,7 +386,7 @@ class RoomPermissionsTestCase(RoomBase):
         )
 
     # tests the "from banned" line from the table in https://spec.matrix.org/unstable/client-server-api/#mroommember
-    def test_member_event_from_ban(self):
+    def test_member_event_from_ban(self) -> None:
         room = self.created_rmid
         self.helper.invite(room=room, src=self.rmcreator_id, targ=self.user_id)
         self.helper.join(room=room, user=self.user_id)
@@ -475,21 +478,21 @@ class RoomsMemberListTestCase(RoomBase):
 
     user_id = "@sid1:red"
 
-    def test_get_member_list(self):
+    def test_get_member_list(self) -> None:
         room_id = self.helper.create_room_as(self.user_id)
         channel = self.make_request("GET", "/rooms/%s/members" % room_id)
         self.assertEqual(200, channel.code, msg=channel.result["body"])
 
-    def test_get_member_list_no_room(self):
+    def test_get_member_list_no_room(self) -> None:
         channel = self.make_request("GET", "/rooms/roomdoesnotexist/members")
         self.assertEqual(403, channel.code, msg=channel.result["body"])
 
-    def test_get_member_list_no_permission(self):
+    def test_get_member_list_no_permission(self) -> None:
         room_id = self.helper.create_room_as("@some_other_guy:red")
         channel = self.make_request("GET", "/rooms/%s/members" % room_id)
         self.assertEqual(403, channel.code, msg=channel.result["body"])
 
-    def test_get_member_list_no_permission_with_at_token(self):
+    def test_get_member_list_no_permission_with_at_token(self) -> None:
         """
         Tests that a stranger to the room cannot get the member list
         (in the case that they use an at token).
@@ -509,7 +512,7 @@ class RoomsMemberListTestCase(RoomBase):
         )
         self.assertEqual(403, channel.code, msg=channel.result["body"])
 
-    def test_get_member_list_no_permission_former_member(self):
+    def test_get_member_list_no_permission_former_member(self) -> None:
         """
         Tests that a former member of the room can not get the member list.
         """
@@ -529,7 +532,7 @@ class RoomsMemberListTestCase(RoomBase):
         channel = self.make_request("GET", "/rooms/%s/members" % room_id)
         self.assertEqual(403, channel.code, msg=channel.result["body"])
 
-    def test_get_member_list_no_permission_former_member_with_at_token(self):
+    def test_get_member_list_no_permission_former_member_with_at_token(self) -> None:
         """
         Tests that a former member of the room can not get the member list
         (in the case that they use an at token).
@@ -569,7 +572,7 @@ class RoomsMemberListTestCase(RoomBase):
         )
         self.assertEqual(403, channel.code, msg=channel.result["body"])
 
-    def test_get_member_list_mixed_memberships(self):
+    def test_get_member_list_mixed_memberships(self) -> None:
         room_creator = "@some_other_guy:red"
         room_id = self.helper.create_room_as(room_creator)
         room_path = "/rooms/%s/members" % room_id
@@ -594,26 +597,26 @@ class RoomsCreateTestCase(RoomBase):
 
     user_id = "@sid1:red"
 
-    def test_post_room_no_keys(self):
+    def test_post_room_no_keys(self) -> None:
         # POST with no config keys, expect new room id
         channel = self.make_request("POST", "/createRoom", "{}")
 
         self.assertEqual(200, channel.code, channel.result)
         self.assertTrue("room_id" in channel.json_body)
 
-    def test_post_room_visibility_key(self):
+    def test_post_room_visibility_key(self) -> None:
         # POST with visibility config key, expect new room id
         channel = self.make_request("POST", "/createRoom", b'{"visibility":"private"}')
         self.assertEqual(200, channel.code)
         self.assertTrue("room_id" in channel.json_body)
 
-    def test_post_room_custom_key(self):
+    def test_post_room_custom_key(self) -> None:
         # POST with custom config keys, expect new room id
         channel = self.make_request("POST", "/createRoom", b'{"custom":"stuff"}')
         self.assertEqual(200, channel.code)
         self.assertTrue("room_id" in channel.json_body)
 
-    def test_post_room_known_and_unknown_keys(self):
+    def test_post_room_known_and_unknown_keys(self) -> None:
         # POST with custom + known config keys, expect new room id
         channel = self.make_request(
             "POST", "/createRoom", b'{"visibility":"private","custom":"things"}'
@@ -621,7 +624,7 @@ class RoomsCreateTestCase(RoomBase):
         self.assertEqual(200, channel.code)
         self.assertTrue("room_id" in channel.json_body)
 
-    def test_post_room_invalid_content(self):
+    def test_post_room_invalid_content(self) -> None:
         # POST with invalid content / paths, expect 400
         channel = self.make_request("POST", "/createRoom", b'{"visibili')
         self.assertEqual(400, channel.code)
@@ -629,7 +632,7 @@ class RoomsCreateTestCase(RoomBase):
         channel = self.make_request("POST", "/createRoom", b'["hello"]')
         self.assertEqual(400, channel.code)
 
-    def test_post_room_invitees_invalid_mxid(self):
+    def test_post_room_invitees_invalid_mxid(self) -> None:
         # POST with invalid invitee, see https://github.com/matrix-org/synapse/issues/4088
         # Note the trailing space in the MXID here!
         channel = self.make_request(
@@ -638,7 +641,7 @@ class RoomsCreateTestCase(RoomBase):
         self.assertEqual(400, channel.code)
 
     @unittest.override_config({"rc_invites": {"per_room": {"burst_count": 3}}})
-    def test_post_room_invitees_ratelimit(self):
+    def test_post_room_invitees_ratelimit(self) -> None:
         """Test that invites sent when creating a room are ratelimited by a RateLimiter,
         which ratelimits them correctly, including by not limiting when the requester is
         exempt from ratelimiting.
@@ -674,7 +677,7 @@ class RoomsCreateTestCase(RoomBase):
         channel = self.make_request("POST", "/createRoom", content)
         self.assertEqual(200, channel.code)
 
-    def test_spam_checker_may_join_room(self):
+    def test_spam_checker_may_join_room(self) -> None:
         """Tests that the user_may_join_room spam checker callback is correctly bypassed
         when creating a new room.
         """
@@ -704,12 +707,12 @@ class RoomTopicTestCase(RoomBase):
 
     user_id = "@sid1:red"
 
-    def prepare(self, reactor, clock, hs):
+    def prepare(self, reactor: MemoryReactor, clock: Clock, hs: HomeServer) -> None:
         # create the room
         self.room_id = self.helper.create_room_as(self.user_id)
         self.path = "/rooms/%s/state/m.room.topic" % (self.room_id,)
 
-    def test_invalid_puts(self):
+    def test_invalid_puts(self) -> None:
         # missing keys or invalid json
         channel = self.make_request("PUT", self.path, "{}")
         self.assertEqual(400, channel.code, msg=channel.result["body"])
@@ -736,7 +739,7 @@ class RoomTopicTestCase(RoomBase):
         channel = self.make_request("PUT", self.path, content)
         self.assertEqual(400, channel.code, msg=channel.result["body"])
 
-    def test_rooms_topic(self):
+    def test_rooms_topic(self) -> None:
         # nothing should be there
         channel = self.make_request("GET", self.path)
         self.assertEqual(404, channel.code, msg=channel.result["body"])
@@ -751,7 +754,7 @@ class RoomTopicTestCase(RoomBase):
         self.assertEqual(200, channel.code, msg=channel.result["body"])
         self.assert_dict(json.loads(content), channel.json_body)
 
-    def test_rooms_topic_with_extra_keys(self):
+    def test_rooms_topic_with_extra_keys(self) -> None:
         # valid put with extra keys
         content = '{"topic":"Seasons","subtopic":"Summer"}'
         channel = self.make_request("PUT", self.path, content)
@@ -768,10 +771,10 @@ class RoomMemberStateTestCase(RoomBase):
 
     user_id = "@sid1:red"
 
-    def prepare(self, reactor, clock, hs):
+    def prepare(self, reactor: MemoryReactor, clock: Clock, hs: HomeServer) -> None:
         self.room_id = self.helper.create_room_as(self.user_id)
 
-    def test_invalid_puts(self):
+    def test_invalid_puts(self) -> None:
         path = "/rooms/%s/state/m.room.member/%s" % (self.room_id, self.user_id)
         # missing keys or invalid json
         channel = self.make_request("PUT", path, "{}")
@@ -801,7 +804,7 @@ class RoomMemberStateTestCase(RoomBase):
         channel = self.make_request("PUT", path, content.encode("ascii"))
         self.assertEqual(400, channel.code, msg=channel.result["body"])
 
-    def test_rooms_members_self(self):
+    def test_rooms_members_self(self) -> None:
         path = "/rooms/%s/state/m.room.member/%s" % (
             urlparse.quote(self.room_id),
             self.user_id,
@@ -812,13 +815,13 @@ class RoomMemberStateTestCase(RoomBase):
         channel = self.make_request("PUT", path, content.encode("ascii"))
         self.assertEqual(200, channel.code, msg=channel.result["body"])
 
-        channel = self.make_request("GET", path, None)
+        channel = self.make_request("GET", path, content=b"")
         self.assertEqual(200, channel.code, msg=channel.result["body"])
 
         expected_response = {"membership": Membership.JOIN}
         self.assertEqual(expected_response, channel.json_body)
 
-    def test_rooms_members_other(self):
+    def test_rooms_members_other(self) -> None:
         self.other_id = "@zzsid1:red"
         path = "/rooms/%s/state/m.room.member/%s" % (
             urlparse.quote(self.room_id),
@@ -830,11 +833,11 @@ class RoomMemberStateTestCase(RoomBase):
         channel = self.make_request("PUT", path, content)
         self.assertEqual(200, channel.code, msg=channel.result["body"])
 
-        channel = self.make_request("GET", path, None)
+        channel = self.make_request("GET", path, content=b"")
         self.assertEqual(200, channel.code, msg=channel.result["body"])
         self.assertEqual(json.loads(content), channel.json_body)
 
-    def test_rooms_members_other_custom_keys(self):
+    def test_rooms_members_other_custom_keys(self) -> None:
         self.other_id = "@zzsid1:red"
         path = "/rooms/%s/state/m.room.member/%s" % (
             urlparse.quote(self.room_id),
@@ -849,7 +852,7 @@ class RoomMemberStateTestCase(RoomBase):
         channel = self.make_request("PUT", path, content)
         self.assertEqual(200, channel.code, msg=channel.result["body"])
 
-        channel = self.make_request("GET", path, None)
+        channel = self.make_request("GET", path, content=b"")
         self.assertEqual(200, channel.code, msg=channel.result["body"])
         self.assertEqual(json.loads(content), channel.json_body)
 
@@ -866,7 +869,7 @@ class RoomInviteRatelimitTestCase(RoomBase):
     @unittest.override_config(
         {"rc_invites": {"per_room": {"per_second": 0.5, "burst_count": 3}}}
     )
-    def test_invites_by_rooms_ratelimit(self):
+    def test_invites_by_rooms_ratelimit(self) -> None:
         """Tests that invites in a room are actually rate-limited."""
         room_id = self.helper.create_room_as(self.user_id)
 
@@ -878,7 +881,7 @@ class RoomInviteRatelimitTestCase(RoomBase):
     @unittest.override_config(
         {"rc_invites": {"per_user": {"per_second": 0.5, "burst_count": 3}}}
     )
-    def test_invites_by_users_ratelimit(self):
+    def test_invites_by_users_ratelimit(self) -> None:
         """Tests that invites to a specific user are actually rate-limited."""
 
         for _ in range(3):
@@ -897,7 +900,7 @@ class RoomJoinTestCase(RoomBase):
         room.register_servlets,
     ]
 
-    def prepare(self, reactor, clock, homeserver):
+    def prepare(self, reactor: MemoryReactor, clock: Clock, hs: HomeServer) -> None:
         self.user1 = self.register_user("thomas", "hackme")
         self.tok1 = self.login("thomas", "hackme")
 
@@ -908,7 +911,7 @@ class RoomJoinTestCase(RoomBase):
         self.room2 = self.helper.create_room_as(room_creator=self.user1, tok=self.tok1)
         self.room3 = self.helper.create_room_as(room_creator=self.user1, tok=self.tok1)
 
-    def test_spam_checker_may_join_room(self):
+    def test_spam_checker_may_join_room(self) -> None:
         """Tests that the user_may_join_room spam checker callback is correctly called
         and blocks room joins when needed.
         """
@@ -975,8 +978,8 @@ class RoomJoinRatelimitTestCase(RoomBase):
         room.register_servlets,
     ]
 
-    def prepare(self, reactor, clock, homeserver):
-        super().prepare(reactor, clock, homeserver)
+    def prepare(self, reactor: MemoryReactor, clock: Clock, hs: HomeServer) -> None:
+        super().prepare(reactor, clock, hs)
         # profile changes expect that the user is actually registered
         user = UserID.from_string(self.user_id)
         self.get_success(self.register_user(user.localpart, "supersecretpassword"))
@@ -984,7 +987,7 @@ class RoomJoinRatelimitTestCase(RoomBase):
     @unittest.override_config(
         {"rc_joins": {"local": {"per_second": 0.5, "burst_count": 3}}}
     )
-    def test_join_local_ratelimit(self):
+    def test_join_local_ratelimit(self) -> None:
         """Tests that local joins are actually rate-limited."""
         for _ in range(3):
             self.helper.create_room_as(self.user_id)
@@ -994,7 +997,7 @@ class RoomJoinRatelimitTestCase(RoomBase):
     @unittest.override_config(
         {"rc_joins": {"local": {"per_second": 0.5, "burst_count": 3}}}
     )
-    def test_join_local_ratelimit_profile_change(self):
+    def test_join_local_ratelimit_profile_change(self) -> None:
         """Tests that sending a profile update into all of the user's joined rooms isn't
         rate-limited by the rate-limiter on joins."""
 
@@ -1031,7 +1034,7 @@ class RoomJoinRatelimitTestCase(RoomBase):
     @unittest.override_config(
         {"rc_joins": {"local": {"per_second": 0.5, "burst_count": 3}}}
     )
-    def test_join_local_ratelimit_idempotent(self):
+    def test_join_local_ratelimit_idempotent(self) -> None:
         """Tests that the room join endpoints remain idempotent despite rate-limiting
         on room joins."""
         room_id = self.helper.create_room_as(self.user_id)
@@ -1056,7 +1059,7 @@ class RoomJoinRatelimitTestCase(RoomBase):
             "autocreate_auto_join_rooms": True,
         },
     )
-    def test_autojoin_rooms(self):
+    def test_autojoin_rooms(self) -> None:
         user_id = self.register_user("testuser", "password")
 
         # Check that the new user successfully joined the four rooms
@@ -1071,10 +1074,10 @@ class RoomMessagesTestCase(RoomBase):
 
     user_id = "@sid1:red"
 
-    def prepare(self, reactor, clock, hs):
+    def prepare(self, reactor: MemoryReactor, clock: Clock, hs: HomeServer) -> None:
         self.room_id = self.helper.create_room_as(self.user_id)
 
-    def test_invalid_puts(self):
+    def test_invalid_puts(self) -> None:
         path = "/rooms/%s/send/m.room.message/mid1" % (urlparse.quote(self.room_id))
         # missing keys or invalid json
         channel = self.make_request("PUT", path, b"{}")
@@ -1095,7 +1098,7 @@ class RoomMessagesTestCase(RoomBase):
         channel = self.make_request("PUT", path, b"")
         self.assertEqual(400, channel.code, msg=channel.result["body"])
 
-    def test_rooms_messages_sent(self):
+    def test_rooms_messages_sent(self) -> None:
         path = "/rooms/%s/send/m.room.message/mid1" % (urlparse.quote(self.room_id))
 
         content = b'{"body":"test","msgtype":{"type":"a"}}'
@@ -1119,11 +1122,11 @@ class RoomInitialSyncTestCase(RoomBase):
 
     user_id = "@sid1:red"
 
-    def prepare(self, reactor, clock, hs):
+    def prepare(self, reactor: MemoryReactor, clock: Clock, hs: HomeServer) -> None:
         # create the room
         self.room_id = self.helper.create_room_as(self.user_id)
 
-    def test_initial_sync(self):
+    def test_initial_sync(self) -> None:
         channel = self.make_request("GET", "/rooms/%s/initialSync" % self.room_id)
         self.assertEqual(200, channel.code)
 
@@ -1131,7 +1134,7 @@ class RoomInitialSyncTestCase(RoomBase):
         self.assertEqual("join", channel.json_body["membership"])
 
         # Room state is easier to assert on if we unpack it into a dict
-        state = {}
+        state: JsonDict = {}
         for event in channel.json_body["state"]:
             if "state_key" not in event:
                 continue
@@ -1160,10 +1163,10 @@ class RoomMessageListTestCase(RoomBase):
 
     user_id = "@sid1:red"
 
-    def prepare(self, reactor, clock, hs):
+    def prepare(self, reactor: MemoryReactor, clock: Clock, hs: HomeServer) -> None:
         self.room_id = self.helper.create_room_as(self.user_id)
 
-    def test_topo_token_is_accepted(self):
+    def test_topo_token_is_accepted(self) -> None:
         token = "t1-0_0_0_0_0_0_0_0_0"
         channel = self.make_request(
             "GET", "/rooms/%s/messages?access_token=x&from=%s" % (self.room_id, token)
@@ -1174,7 +1177,7 @@ class RoomMessageListTestCase(RoomBase):
         self.assertTrue("chunk" in channel.json_body)
         self.assertTrue("end" in channel.json_body)
 
-    def test_stream_token_is_accepted_for_fwd_pagianation(self):
+    def test_stream_token_is_accepted_for_fwd_pagianation(self) -> None:
         token = "s0_0_0_0_0_0_0_0_0"
         channel = self.make_request(
             "GET", "/rooms/%s/messages?access_token=x&from=%s" % (self.room_id, token)
@@ -1185,7 +1188,7 @@ class RoomMessageListTestCase(RoomBase):
         self.assertTrue("chunk" in channel.json_body)
         self.assertTrue("end" in channel.json_body)
 
-    def test_room_messages_purge(self):
+    def test_room_messages_purge(self) -> None:
         store = self.hs.get_datastores().main
         pagination_handler = self.hs.get_pagination_handler()
 
@@ -1278,10 +1281,10 @@ class RoomSearchTestCase(unittest.HomeserverTestCase):
     user_id = True
     hijack_auth = False
 
-    def prepare(self, reactor, clock, hs):
+    def prepare(self, reactor: MemoryReactor, clock: Clock, hs: HomeServer) -> None:
 
         # Register the user who does the searching
-        self.user_id = self.register_user("user", "pass")
+        self.user_id2 = self.register_user("user", "pass")
         self.access_token = self.login("user", "pass")
 
         # Register the user who sends the message
@@ -1289,12 +1292,12 @@ class RoomSearchTestCase(unittest.HomeserverTestCase):
         self.other_access_token = self.login("otheruser", "pass")
 
         # Create a room
-        self.room = self.helper.create_room_as(self.user_id, tok=self.access_token)
+        self.room = self.helper.create_room_as(self.user_id2, tok=self.access_token)
 
         # Invite the other person
         self.helper.invite(
             room=self.room,
-            src=self.user_id,
+            src=self.user_id2,
             tok=self.access_token,
             targ=self.other_user_id,
         )
@@ -1304,7 +1307,7 @@ class RoomSearchTestCase(unittest.HomeserverTestCase):
             room=self.room, user=self.other_user_id, tok=self.other_access_token
         )
 
-    def test_finds_message(self):
+    def test_finds_message(self) -> None:
         """
         The search functionality will search for content in messages if asked to
         do so.
@@ -1333,7 +1336,7 @@ class RoomSearchTestCase(unittest.HomeserverTestCase):
         # No context was requested, so we should get none.
         self.assertEqual(results["results"][0]["context"], {})
 
-    def test_include_context(self):
+    def test_include_context(self) -> None:
         """
         When event_context includes include_profile, profile information will be
         included in the search response.
@@ -1379,7 +1382,7 @@ class PublicRoomsRestrictedTestCase(unittest.HomeserverTestCase):
         login.register_servlets,
     ]
 
-    def make_homeserver(self, reactor, clock):
+    def make_homeserver(self, reactor: MemoryReactor, clock: Clock) -> HomeServer:
 
         self.url = b"/_matrix/client/r0/publicRooms"
 
@@ -1389,11 +1392,11 @@ class PublicRoomsRestrictedTestCase(unittest.HomeserverTestCase):
 
         return self.hs
 
-    def test_restricted_no_auth(self):
+    def test_restricted_no_auth(self) -> None:
         channel = self.make_request("GET", self.url)
         self.assertEqual(channel.code, 401, channel.result)
 
-    def test_restricted_auth(self):
+    def test_restricted_auth(self) -> None:
         self.register_user("user", "pass")
         tok = self.login("user", "pass")
 
@@ -1412,19 +1415,19 @@ class PublicRoomsTestRemoteSearchFallbackTestCase(unittest.HomeserverTestCase):
         login.register_servlets,
     ]
 
-    def make_homeserver(self, reactor, clock):
+    def make_homeserver(self, reactor: MemoryReactor, clock: Clock) -> HomeServer:
         return self.setup_test_homeserver(federation_client=Mock())
 
-    def prepare(self, reactor, clock, hs):
+    def prepare(self, reactor: MemoryReactor, clock: Clock, hs: HomeServer) -> None:
         self.register_user("user", "pass")
         self.token = self.login("user", "pass")
 
         self.federation_client = hs.get_federation_client()
 
-    def test_simple(self):
+    def test_simple(self) -> None:
         "Simple test for searching rooms over federation"
-        self.federation_client.get_public_rooms.side_effect = (
-            lambda *a, **k: defer.succeed({})
+        self.federation_client.get_public_rooms.side_effect = lambda *a, **k: defer.succeed(  # type: ignore[attr-defined]
+            {}
         )
 
         search_filter = {"generic_search_term": "foobar"}
@@ -1437,7 +1440,7 @@ class PublicRoomsTestRemoteSearchFallbackTestCase(unittest.HomeserverTestCase):
         )
         self.assertEqual(channel.code, 200, channel.result)
 
-        self.federation_client.get_public_rooms.assert_called_once_with(
+        self.federation_client.get_public_rooms.assert_called_once_with(  # type: ignore[attr-defined]
             "testserv",
             limit=100,
             since_token=None,
@@ -1446,12 +1449,12 @@ class PublicRoomsTestRemoteSearchFallbackTestCase(unittest.HomeserverTestCase):
             third_party_instance_id=None,
         )
 
-    def test_fallback(self):
+    def test_fallback(self) -> None:
         "Test that searching public rooms over federation falls back if it gets a 404"
 
         # The `get_public_rooms` should be called again if the first call fails
         # with a 404, when using search filters.
-        self.federation_client.get_public_rooms.side_effect = (
+        self.federation_client.get_public_rooms.side_effect = (  # type: ignore[attr-defined]
             HttpResponseException(404, "Not Found", b""),
             defer.succeed({}),
         )
@@ -1466,7 +1469,7 @@ class PublicRoomsTestRemoteSearchFallbackTestCase(unittest.HomeserverTestCase):
         )
         self.assertEqual(channel.code, 200, channel.result)
 
-        self.federation_client.get_public_rooms.assert_has_calls(
+        self.federation_client.get_public_rooms.assert_has_calls(  # type: ignore[attr-defined]
             [
                 call(
                     "testserv",
@@ -1497,14 +1500,14 @@ class PerRoomProfilesForbiddenTestCase(unittest.HomeserverTestCase):
         profile.register_servlets,
     ]
 
-    def make_homeserver(self, reactor, clock):
+    def make_homeserver(self, reactor: MemoryReactor, clock: Clock) -> HomeServer:
         config = self.default_config()
         config["allow_per_room_profiles"] = False
         self.hs = self.setup_test_homeserver(config=config)
 
         return self.hs
 
-    def prepare(self, reactor, clock, homeserver):
+    def prepare(self, reactor: MemoryReactor, clock: Clock, hs: HomeServer) -> None:
         self.user_id = self.register_user("test", "test")
         self.tok = self.login("test", "test")
 
@@ -1522,7 +1525,7 @@ class PerRoomProfilesForbiddenTestCase(unittest.HomeserverTestCase):
 
         self.room_id = self.helper.create_room_as(self.user_id, tok=self.tok)
 
-    def test_per_room_profile_forbidden(self):
+    def test_per_room_profile_forbidden(self) -> None:
         data = {"membership": "join", "displayname": "other test user"}
         request_data = json.dumps(data)
         channel = self.make_request(
@@ -1557,7 +1560,7 @@ class RoomMembershipReasonTestCase(unittest.HomeserverTestCase):
         login.register_servlets,
     ]
 
-    def prepare(self, reactor, clock, homeserver):
+    def prepare(self, reactor: MemoryReactor, clock: Clock, hs: HomeServer) -> None:
         self.creator = self.register_user("creator", "test")
         self.creator_tok = self.login("creator", "test")
 
@@ -1566,7 +1569,7 @@ class RoomMembershipReasonTestCase(unittest.HomeserverTestCase):
 
         self.room_id = self.helper.create_room_as(self.creator, tok=self.creator_tok)
 
-    def test_join_reason(self):
+    def test_join_reason(self) -> None:
         reason = "hello"
         channel = self.make_request(
             "POST",
@@ -1578,7 +1581,7 @@ class RoomMembershipReasonTestCase(unittest.HomeserverTestCase):
 
         self._check_for_reason(reason)
 
-    def test_leave_reason(self):
+    def test_leave_reason(self) -> None:
         self.helper.join(self.room_id, user=self.second_user_id, tok=self.second_tok)
 
         reason = "hello"
@@ -1592,7 +1595,7 @@ class RoomMembershipReasonTestCase(unittest.HomeserverTestCase):
 
         self._check_for_reason(reason)
 
-    def test_kick_reason(self):
+    def test_kick_reason(self) -> None:
         self.helper.join(self.room_id, user=self.second_user_id, tok=self.second_tok)
 
         reason = "hello"
@@ -1606,7 +1609,7 @@ class RoomMembershipReasonTestCase(unittest.HomeserverTestCase):
 
         self._check_for_reason(reason)
 
-    def test_ban_reason(self):
+    def test_ban_reason(self) -> None:
         self.helper.join(self.room_id, user=self.second_user_id, tok=self.second_tok)
 
         reason = "hello"
@@ -1620,7 +1623,7 @@ class RoomMembershipReasonTestCase(unittest.HomeserverTestCase):
 
         self._check_for_reason(reason)
 
-    def test_unban_reason(self):
+    def test_unban_reason(self) -> None:
         reason = "hello"
         channel = self.make_request(
             "POST",
@@ -1632,7 +1635,7 @@ class RoomMembershipReasonTestCase(unittest.HomeserverTestCase):
 
         self._check_for_reason(reason)
 
-    def test_invite_reason(self):
+    def test_invite_reason(self) -> None:
         reason = "hello"
         channel = self.make_request(
             "POST",
@@ -1644,7 +1647,7 @@ class RoomMembershipReasonTestCase(unittest.HomeserverTestCase):
 
         self._check_for_reason(reason)
 
-    def test_reject_invite_reason(self):
+    def test_reject_invite_reason(self) -> None:
         self.helper.invite(
             self.room_id,
             src=self.creator,
@@ -1663,7 +1666,7 @@ class RoomMembershipReasonTestCase(unittest.HomeserverTestCase):
 
         self._check_for_reason(reason)
 
-    def _check_for_reason(self, reason):
+    def _check_for_reason(self, reason: str) -> None:
         channel = self.make_request(
             "GET",
             "/_matrix/client/r0/rooms/{}/state/m.room.member/{}".format(
@@ -1704,12 +1707,12 @@ class LabelsTestCase(unittest.HomeserverTestCase):
         "org.matrix.not_labels": ["#notfun"],
     }
 
-    def prepare(self, reactor, clock, homeserver):
+    def prepare(self, reactor: MemoryReactor, clock: Clock, hs: HomeServer) -> None:
         self.user_id = self.register_user("test", "test")
         self.tok = self.login("test", "test")
         self.room_id = self.helper.create_room_as(self.user_id, tok=self.tok)
 
-    def test_context_filter_labels(self):
+    def test_context_filter_labels(self) -> None:
         """Test that we can filter by a label on a /context request."""
         event_id = self._send_labelled_messages_in_room()
 
@@ -1739,7 +1742,7 @@ class LabelsTestCase(unittest.HomeserverTestCase):
             events_after[0]["content"]["body"], "with right label", events_after[0]
         )
 
-    def test_context_filter_not_labels(self):
+    def test_context_filter_not_labels(self) -> None:
         """Test that we can filter by the absence of a label on a /context request."""
         event_id = self._send_labelled_messages_in_room()
 
@@ -1772,7 +1775,7 @@ class LabelsTestCase(unittest.HomeserverTestCase):
             events_after[1]["content"]["body"], "with two wrong labels", events_after[1]
         )
 
-    def test_context_filter_labels_not_labels(self):
+    def test_context_filter_labels_not_labels(self) -> None:
         """Test that we can filter by both a label and the absence of another label on a
         /context request.
         """
@@ -1801,7 +1804,7 @@ class LabelsTestCase(unittest.HomeserverTestCase):
             events_after[0]["content"]["body"], "with wrong label", events_after[0]
         )
 
-    def test_messages_filter_labels(self):
+    def test_messages_filter_labels(self) -> None:
         """Test that we can filter by a label on a /messages request."""
         self._send_labelled_messages_in_room()
 
@@ -1818,7 +1821,7 @@ class LabelsTestCase(unittest.HomeserverTestCase):
         self.assertEqual(events[0]["content"]["body"], "with right label", events[0])
         self.assertEqual(events[1]["content"]["body"], "with right label", events[1])
 
-    def test_messages_filter_not_labels(self):
+    def test_messages_filter_not_labels(self) -> None:
         """Test that we can filter by the absence of a label on a /messages request."""
         self._send_labelled_messages_in_room()
 
@@ -1839,7 +1842,7 @@ class LabelsTestCase(unittest.HomeserverTestCase):
             events[3]["content"]["body"], "with two wrong labels", events[3]
         )
 
-    def test_messages_filter_labels_not_labels(self):
+    def test_messages_filter_labels_not_labels(self) -> None:
         """Test that we can filter by both a label and the absence of another label on a
         /messages request.
         """
@@ -1862,7 +1865,7 @@ class LabelsTestCase(unittest.HomeserverTestCase):
         self.assertEqual(len(events), 1, [event["content"] for event in events])
         self.assertEqual(events[0]["content"]["body"], "with wrong label", events[0])
 
-    def test_search_filter_labels(self):
+    def test_search_filter_labels(self) -> None:
         """Test that we can filter by a label on a /search request."""
         request_data = json.dumps(
             {
@@ -1899,7 +1902,7 @@ class LabelsTestCase(unittest.HomeserverTestCase):
             results[1]["result"]["content"]["body"],
         )
 
-    def test_search_filter_not_labels(self):
+    def test_search_filter_not_labels(self) -> None:
         """Test that we can filter by the absence of a label on a /search request."""
         request_data = json.dumps(
             {
@@ -1946,7 +1949,7 @@ class LabelsTestCase(unittest.HomeserverTestCase):
             results[3]["result"]["content"]["body"],
         )
 
-    def test_search_filter_labels_not_labels(self):
+    def test_search_filter_labels_not_labels(self) -> None:
         """Test that we can filter by both a label and the absence of another label on a
         /search request.
         """
@@ -1980,7 +1983,7 @@ class LabelsTestCase(unittest.HomeserverTestCase):
             results[0]["result"]["content"]["body"],
         )
 
-    def _send_labelled_messages_in_room(self):
+    def _send_labelled_messages_in_room(self) -> str:
         """Sends several messages to a room with different labels (or without any) to test
         filtering by label.
         Returns:
@@ -2056,12 +2059,12 @@ class RelationsTestCase(unittest.HomeserverTestCase):
         login.register_servlets,
     ]
 
-    def default_config(self):
+    def default_config(self) -> Dict[str, Any]:
         config = super().default_config()
         config["experimental_features"] = {"msc3440_enabled": True}
         return config
 
-    def prepare(self, reactor, clock, homeserver):
+    def prepare(self, reactor: MemoryReactor, clock: Clock, hs: HomeServer) -> None:
         self.user_id = self.register_user("test", "test")
         self.tok = self.login("test", "test")
         self.room_id = self.helper.create_room_as(self.user_id, tok=self.tok)
@@ -2136,7 +2139,7 @@ class RelationsTestCase(unittest.HomeserverTestCase):
 
         return channel.json_body["chunk"]
 
-    def test_filter_relation_senders(self):
+    def test_filter_relation_senders(self) -> None:
         # Messages which second user reacted to.
         filter = {"io.element.relation_senders": [self.second_user_id]}
         chunk = self._filter_messages(filter)
@@ -2159,7 +2162,7 @@ class RelationsTestCase(unittest.HomeserverTestCase):
             [c["event_id"] for c in chunk], [self.event_id_1, self.event_id_2]
         )
 
-    def test_filter_relation_type(self):
+    def test_filter_relation_type(self) -> None:
         # Messages which have annotations.
         filter = {"io.element.relation_types": [RelationTypes.ANNOTATION]}
         chunk = self._filter_messages(filter)
@@ -2185,7 +2188,7 @@ class RelationsTestCase(unittest.HomeserverTestCase):
             [c["event_id"] for c in chunk], [self.event_id_1, self.event_id_2]
         )
 
-    def test_filter_relation_senders_and_type(self):
+    def test_filter_relation_senders_and_type(self) -> None:
         # Messages which second user reacted to.
         filter = {
             "io.element.relation_senders": [self.second_user_id],
@@ -2205,7 +2208,7 @@ class ContextTestCase(unittest.HomeserverTestCase):
         account.register_servlets,
     ]
 
-    def prepare(self, reactor, clock, homeserver):
+    def prepare(self, reactor: MemoryReactor, clock: Clock, hs: HomeServer) -> None:
         self.user_id = self.register_user("user", "password")
         self.tok = self.login("user", "password")
         self.room_id = self.helper.create_room_as(
@@ -2218,7 +2221,7 @@ class ContextTestCase(unittest.HomeserverTestCase):
         self.helper.invite(self.room_id, self.user_id, self.other_user_id, tok=self.tok)
         self.helper.join(self.room_id, self.other_user_id, tok=self.other_tok)
 
-    def test_erased_sender(self):
+    def test_erased_sender(self) -> None:
         """Test that an erasure request results in the requester's events being hidden
         from any new member of the room.
         """
@@ -2332,7 +2335,7 @@ class RoomAliasListTestCase(unittest.HomeserverTestCase):
         room.register_servlets,
     ]
 
-    def prepare(self, reactor, clock, homeserver):
+    def prepare(self, reactor: MemoryReactor, clock: Clock, hs: HomeServer) -> None:
         self.room_owner = self.register_user("room_owner", "test")
         self.room_owner_tok = self.login("room_owner", "test")
 
@@ -2340,17 +2343,17 @@ class RoomAliasListTestCase(unittest.HomeserverTestCase):
             self.room_owner, tok=self.room_owner_tok
         )
 
-    def test_no_aliases(self):
+    def test_no_aliases(self) -> None:
         res = self._get_aliases(self.room_owner_tok)
         self.assertEqual(res["aliases"], [])
 
-    def test_not_in_room(self):
+    def test_not_in_room(self) -> None:
         self.register_user("user", "test")
         user_tok = self.login("user", "test")
         res = self._get_aliases(user_tok, expected_code=403)
         self.assertEqual(res["errcode"], "M_FORBIDDEN")
 
-    def test_admin_user(self):
+    def test_admin_user(self) -> None:
         alias1 = self._random_alias()
         self._set_alias_via_directory(alias1)
 
@@ -2360,7 +2363,7 @@ class RoomAliasListTestCase(unittest.HomeserverTestCase):
         res = self._get_aliases(user_tok)
         self.assertEqual(res["aliases"], [alias1])
 
-    def test_with_aliases(self):
+    def test_with_aliases(self) -> None:
         alias1 = self._random_alias()
         alias2 = self._random_alias()
 
@@ -2370,7 +2373,7 @@ class RoomAliasListTestCase(unittest.HomeserverTestCase):
         res = self._get_aliases(self.room_owner_tok)
         self.assertEqual(set(res["aliases"]), {alias1, alias2})
 
-    def test_peekable_room(self):
+    def test_peekable_room(self) -> None:
         alias1 = self._random_alias()
         self._set_alias_via_directory(alias1)
 
@@ -2404,7 +2407,7 @@ class RoomAliasListTestCase(unittest.HomeserverTestCase):
     def _random_alias(self) -> str:
         return RoomAlias(random_string(5), self.hs.hostname).to_string()
 
-    def _set_alias_via_directory(self, alias: str, expected_code: int = 200):
+    def _set_alias_via_directory(self, alias: str, expected_code: int = 200) -> None:
         url = "/_matrix/client/r0/directory/room/" + alias
         data = {"room_id": self.room_id}
         request_data = json.dumps(data)
@@ -2423,7 +2426,7 @@ class RoomCanonicalAliasTestCase(unittest.HomeserverTestCase):
         room.register_servlets,
     ]
 
-    def prepare(self, reactor, clock, homeserver):
+    def prepare(self, reactor: MemoryReactor, clock: Clock, hs: HomeServer) -> None:
         self.room_owner = self.register_user("room_owner", "test")
         self.room_owner_tok = self.login("room_owner", "test")
 
@@ -2434,7 +2437,7 @@ class RoomCanonicalAliasTestCase(unittest.HomeserverTestCase):
         self.alias = "#alias:test"
         self._set_alias_via_directory(self.alias)
 
-    def _set_alias_via_directory(self, alias: str, expected_code: int = 200):
+    def _set_alias_via_directory(self, alias: str, expected_code: int = 200) -> None:
         url = "/_matrix/client/r0/directory/room/" + alias
         data = {"room_id": self.room_id}
         request_data = json.dumps(data)
@@ -2456,7 +2459,9 @@ class RoomCanonicalAliasTestCase(unittest.HomeserverTestCase):
         self.assertIsInstance(res, dict)
         return res
 
-    def _set_canonical_alias(self, content: str, expected_code: int = 200) -> JsonDict:
+    def _set_canonical_alias(
+        self, content: JsonDict, expected_code: int = 200
+    ) -> JsonDict:
         """Calls the endpoint under test. returns the json response object."""
         channel = self.make_request(
             "PUT",
@@ -2469,7 +2474,7 @@ class RoomCanonicalAliasTestCase(unittest.HomeserverTestCase):
         self.assertIsInstance(res, dict)
         return res
 
-    def test_canonical_alias(self):
+    def test_canonical_alias(self) -> None:
         """Test a basic alias message."""
         # There is no canonical alias to start with.
         self._get_canonical_alias(expected_code=404)
@@ -2488,7 +2493,7 @@ class RoomCanonicalAliasTestCase(unittest.HomeserverTestCase):
         res = self._get_canonical_alias()
         self.assertEqual(res, {})
 
-    def test_alt_aliases(self):
+    def test_alt_aliases(self) -> None:
         """Test a canonical alias message with alt_aliases."""
         # Create an alias.
         self._set_canonical_alias({"alt_aliases": [self.alias]})
@@ -2504,7 +2509,7 @@ class RoomCanonicalAliasTestCase(unittest.HomeserverTestCase):
         res = self._get_canonical_alias()
         self.assertEqual(res, {})
 
-    def test_alias_alt_aliases(self):
+    def test_alias_alt_aliases(self) -> None:
         """Test a canonical alias message with an alias and alt_aliases."""
         # Create an alias.
         self._set_canonical_alias({"alias": self.alias, "alt_aliases": [self.alias]})
@@ -2520,7 +2525,7 @@ class RoomCanonicalAliasTestCase(unittest.HomeserverTestCase):
         res = self._get_canonical_alias()
         self.assertEqual(res, {})
 
-    def test_partial_modify(self):
+    def test_partial_modify(self) -> None:
         """Test removing only the alt_aliases."""
         # Create an alias.
         self._set_canonical_alias({"alias": self.alias, "alt_aliases": [self.alias]})
@@ -2536,7 +2541,7 @@ class RoomCanonicalAliasTestCase(unittest.HomeserverTestCase):
         res = self._get_canonical_alias()
         self.assertEqual(res, {"alias": self.alias})
 
-    def test_add_alias(self):
+    def test_add_alias(self) -> None:
         """Test removing only the alt_aliases."""
         # Create an additional alias.
         second_alias = "#second:test"
@@ -2556,7 +2561,7 @@ class RoomCanonicalAliasTestCase(unittest.HomeserverTestCase):
             res, {"alias": self.alias, "alt_aliases": [self.alias, second_alias]}
         )
 
-    def test_bad_data(self):
+    def test_bad_data(self) -> None:
         """Invalid data for alt_aliases should cause errors."""
         self._set_canonical_alias({"alt_aliases": "@bad:test"}, expected_code=400)
         self._set_canonical_alias({"alt_aliases": None}, expected_code=400)
@@ -2566,7 +2571,7 @@ class RoomCanonicalAliasTestCase(unittest.HomeserverTestCase):
         self._set_canonical_alias({"alt_aliases": True}, expected_code=400)
         self._set_canonical_alias({"alt_aliases": {}}, expected_code=400)
 
-    def test_bad_alias(self):
+    def test_bad_alias(self) -> None:
         """An alias which does not point to the room raises a SynapseError."""
         self._set_canonical_alias({"alias": "@unknown:test"}, expected_code=400)
         self._set_canonical_alias({"alt_aliases": ["@unknown:test"]}, expected_code=400)
@@ -2580,13 +2585,13 @@ class ThreepidInviteTestCase(unittest.HomeserverTestCase):
         room.register_servlets,
     ]
 
-    def prepare(self, reactor, clock, homeserver):
+    def prepare(self, reactor: MemoryReactor, clock: Clock, hs: HomeServer) -> None:
         self.user_id = self.register_user("thomas", "hackme")
         self.tok = self.login("thomas", "hackme")
 
         self.room_id = self.helper.create_room_as(self.user_id, tok=self.tok)
 
-    def test_threepid_invite_spamcheck(self):
+    def test_threepid_invite_spamcheck(self) -> None:
         # Mock a few functions to prevent the test from failing due to failing to talk to
         # a remote IS. We keep the mock for _mock_make_and_store_3pid_invite around so we
         # can check its call_count later on during the test.
diff --git a/tests/rest/client/test_third_party_rules.py b/tests/rest/client/test_third_party_rules.py
index bfc04785b7..58f1ea11b7 100644
--- a/tests/rest/client/test_third_party_rules.py
+++ b/tests/rest/client/test_third_party_rules.py
@@ -12,16 +12,22 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 import threading
-from typing import TYPE_CHECKING, Dict, Optional, Tuple
+from typing import TYPE_CHECKING, Any, Dict, Optional, Tuple, Union
 from unittest.mock import Mock
 
+from twisted.test.proto_helpers import MemoryReactor
+
 from synapse.api.constants import EventTypes, LoginType, Membership
 from synapse.api.errors import SynapseError
+from synapse.api.room_versions import RoomVersion
 from synapse.events import EventBase
+from synapse.events.snapshot import EventContext
 from synapse.events.third_party_rules import load_legacy_third_party_event_rules
 from synapse.rest import admin
 from synapse.rest.client import account, login, profile, room
+from synapse.server import HomeServer
 from synapse.types import JsonDict, Requester, StateMap
+from synapse.util import Clock
 from synapse.util.frozenutils import unfreeze
 
 from tests import unittest
@@ -34,7 +40,7 @@ thread_local = threading.local()
 
 
 class LegacyThirdPartyRulesTestModule:
-    def __init__(self, config: Dict, module_api: "ModuleApi"):
+    def __init__(self, config: Dict, module_api: "ModuleApi") -> None:
         # keep a record of the "current" rules module, so that the test can patch
         # it if desired.
         thread_local.rules_module = self
@@ -42,32 +48,36 @@ class LegacyThirdPartyRulesTestModule:
 
     async def on_create_room(
         self, requester: Requester, config: dict, is_requester_admin: bool
-    ):
+    ) -> bool:
         return True
 
-    async def check_event_allowed(self, event: EventBase, state: StateMap[EventBase]):
+    async def check_event_allowed(
+        self, event: EventBase, state: StateMap[EventBase]
+    ) -> Union[bool, dict]:
         return True
 
     @staticmethod
-    def parse_config(config):
+    def parse_config(config: Dict[str, Any]) -> Dict[str, Any]:
         return config
 
 
 class LegacyDenyNewRooms(LegacyThirdPartyRulesTestModule):
-    def __init__(self, config: Dict, module_api: "ModuleApi"):
+    def __init__(self, config: Dict, module_api: "ModuleApi") -> None:
         super().__init__(config, module_api)
 
-    def on_create_room(
+    async def on_create_room(
         self, requester: Requester, config: dict, is_requester_admin: bool
-    ):
+    ) -> bool:
         return False
 
 
 class LegacyChangeEvents(LegacyThirdPartyRulesTestModule):
-    def __init__(self, config: Dict, module_api: "ModuleApi"):
+    def __init__(self, config: Dict, module_api: "ModuleApi") -> None:
         super().__init__(config, module_api)
 
-    async def check_event_allowed(self, event: EventBase, state: StateMap[EventBase]):
+    async def check_event_allowed(
+        self, event: EventBase, state: StateMap[EventBase]
+    ) -> JsonDict:
         d = event.get_dict()
         content = unfreeze(event.content)
         content["foo"] = "bar"
@@ -84,7 +94,7 @@ class ThirdPartyRulesTestCase(unittest.FederatingHomeserverTestCase):
         account.register_servlets,
     ]
 
-    def make_homeserver(self, reactor, clock):
+    def make_homeserver(self, reactor: MemoryReactor, clock: Clock) -> HomeServer:
         hs = self.setup_test_homeserver()
 
         load_legacy_third_party_event_rules(hs)
@@ -94,22 +104,30 @@ class ThirdPartyRulesTestCase(unittest.FederatingHomeserverTestCase):
         # Note that these checks are not relevant to this test case.
 
         # Have this homeserver auto-approve all event signature checking.
-        async def approve_all_signature_checking(_, pdu):
+        async def approve_all_signature_checking(
+            _: RoomVersion, pdu: EventBase
+        ) -> EventBase:
             return pdu
 
-        hs.get_federation_server()._check_sigs_and_hash = approve_all_signature_checking
+        hs.get_federation_server()._check_sigs_and_hash = approve_all_signature_checking  # type: ignore[assignment]
 
         # Have this homeserver skip event auth checks. This is necessary due to
         # event auth checks ensuring that events were signed by the sender's homeserver.
-        async def _check_event_auth(origin, event, context, *args, **kwargs):
+        async def _check_event_auth(
+            origin: str,
+            event: EventBase,
+            context: EventContext,
+            *args: Any,
+            **kwargs: Any,
+        ) -> EventContext:
             return context
 
-        hs.get_federation_event_handler()._check_event_auth = _check_event_auth
+        hs.get_federation_event_handler()._check_event_auth = _check_event_auth  # type: ignore[assignment]
 
         return hs
 
-    def prepare(self, reactor, clock, homeserver):
-        super().prepare(reactor, clock, homeserver)
+    def prepare(self, reactor: MemoryReactor, clock: Clock, hs: HomeServer) -> None:
+        super().prepare(reactor, clock, hs)
         # Create some users and a room to play with during the tests
         self.user_id = self.register_user("kermit", "monkey")
         self.invitee = self.register_user("invitee", "hackme")
@@ -121,13 +139,15 @@ class ThirdPartyRulesTestCase(unittest.FederatingHomeserverTestCase):
         except Exception:
             pass
 
-    def test_third_party_rules(self):
+    def test_third_party_rules(self) -> None:
         """Tests that a forbidden event is forbidden from being sent, but an allowed one
         can be sent.
         """
         # patch the rules module with a Mock which will return False for some event
         # types
-        async def check(ev, state):
+        async def check(
+            ev: EventBase, state: StateMap[EventBase]
+        ) -> Tuple[bool, Optional[JsonDict]]:
             return ev.type != "foo.bar.forbidden", None
 
         callback = Mock(spec=[], side_effect=check)
@@ -161,7 +181,7 @@ class ThirdPartyRulesTestCase(unittest.FederatingHomeserverTestCase):
         )
         self.assertEqual(channel.result["code"], b"403", channel.result)
 
-    def test_third_party_rules_workaround_synapse_errors_pass_through(self):
+    def test_third_party_rules_workaround_synapse_errors_pass_through(self) -> None:
         """
         Tests that the workaround introduced by https://github.com/matrix-org/synapse/pull/11042
         is functional: that SynapseErrors are passed through from check_event_allowed
@@ -172,7 +192,7 @@ class ThirdPartyRulesTestCase(unittest.FederatingHomeserverTestCase):
         """
 
         class NastyHackException(SynapseError):
-            def error_dict(self):
+            def error_dict(self) -> JsonDict:
                 """
                 This overrides SynapseError's `error_dict` to nastily inject
                 JSON into the error response.
@@ -182,7 +202,9 @@ class ThirdPartyRulesTestCase(unittest.FederatingHomeserverTestCase):
                 return result
 
         # add a callback that will raise our hacky exception
-        async def check(ev, state) -> Tuple[bool, Optional[JsonDict]]:
+        async def check(
+            ev: EventBase, state: StateMap[EventBase]
+        ) -> Tuple[bool, Optional[JsonDict]]:
             raise NastyHackException(429, "message")
 
         self.hs.get_third_party_event_rules()._check_event_allowed_callbacks = [check]
@@ -202,11 +224,13 @@ class ThirdPartyRulesTestCase(unittest.FederatingHomeserverTestCase):
             {"errcode": "M_UNKNOWN", "error": "message", "nasty": "very"},
         )
 
-    def test_cannot_modify_event(self):
+    def test_cannot_modify_event(self) -> None:
         """cannot accidentally modify an event before it is persisted"""
 
         # first patch the event checker so that it will try to modify the event
-        async def check(ev: EventBase, state):
+        async def check(
+            ev: EventBase, state: StateMap[EventBase]
+        ) -> Tuple[bool, Optional[JsonDict]]:
             ev.content = {"x": "y"}
             return True, None
 
@@ -223,10 +247,12 @@ class ThirdPartyRulesTestCase(unittest.FederatingHomeserverTestCase):
         # 500 Internal Server Error
         self.assertEqual(channel.code, 500, channel.result)
 
-    def test_modify_event(self):
+    def test_modify_event(self) -> None:
         """The module can return a modified version of the event"""
         # first patch the event checker so that it will modify the event
-        async def check(ev: EventBase, state):
+        async def check(
+            ev: EventBase, state: StateMap[EventBase]
+        ) -> Tuple[bool, Optional[JsonDict]]:
             d = ev.get_dict()
             d["content"] = {"x": "y"}
             return True, d
@@ -253,10 +279,12 @@ class ThirdPartyRulesTestCase(unittest.FederatingHomeserverTestCase):
         ev = channel.json_body
         self.assertEqual(ev["content"]["x"], "y")
 
-    def test_message_edit(self):
+    def test_message_edit(self) -> None:
         """Ensure that the module doesn't cause issues with edited messages."""
         # first patch the event checker so that it will modify the event
-        async def check(ev: EventBase, state):
+        async def check(
+            ev: EventBase, state: StateMap[EventBase]
+        ) -> Tuple[bool, Optional[JsonDict]]:
             d = ev.get_dict()
             d["content"] = {
                 "msgtype": "m.text",
@@ -315,7 +343,7 @@ class ThirdPartyRulesTestCase(unittest.FederatingHomeserverTestCase):
         ev = channel.json_body
         self.assertEqual(ev["content"]["body"], "EDITED BODY")
 
-    def test_send_event(self):
+    def test_send_event(self) -> None:
         """Tests that a module can send an event into a room via the module api"""
         content = {
             "msgtype": "m.text",
@@ -344,7 +372,7 @@ class ThirdPartyRulesTestCase(unittest.FederatingHomeserverTestCase):
             }
         }
     )
-    def test_legacy_check_event_allowed(self):
+    def test_legacy_check_event_allowed(self) -> None:
         """Tests that the wrapper for legacy check_event_allowed callbacks works
         correctly.
         """
@@ -379,13 +407,13 @@ class ThirdPartyRulesTestCase(unittest.FederatingHomeserverTestCase):
             }
         }
     )
-    def test_legacy_on_create_room(self):
+    def test_legacy_on_create_room(self) -> None:
         """Tests that the wrapper for legacy on_create_room callbacks works
         correctly.
         """
         self.helper.create_room_as(self.user_id, tok=self.tok, expect_code=403)
 
-    def test_sent_event_end_up_in_room_state(self):
+    def test_sent_event_end_up_in_room_state(self) -> None:
         """Tests that a state event sent by a module while processing another state event
         doesn't get dropped from the state of the room. This is to guard against a bug
         where Synapse has been observed doing so, see https://github.com/matrix-org/synapse/issues/10830
@@ -400,7 +428,9 @@ class ThirdPartyRulesTestCase(unittest.FederatingHomeserverTestCase):
         api = self.hs.get_module_api()
 
         # Define a callback that sends a custom event on power levels update.
-        async def test_fn(event: EventBase, state_events):
+        async def test_fn(
+            event: EventBase, state_events: StateMap[EventBase]
+        ) -> Tuple[bool, Optional[JsonDict]]:
             if event.is_state and event.type == EventTypes.PowerLevels:
                 await api.create_and_send_event_into_room(
                     {
@@ -436,7 +466,7 @@ class ThirdPartyRulesTestCase(unittest.FederatingHomeserverTestCase):
             self.assertEqual(channel.code, 200)
             self.assertEqual(channel.json_body["i"], i)
 
-    def test_on_new_event(self):
+    def test_on_new_event(self) -> None:
         """Test that the on_new_event callback is called on new events"""
         on_new_event = Mock(make_awaitable(None))
         self.hs.get_third_party_event_rules()._on_new_event_callbacks.append(
@@ -501,7 +531,7 @@ class ThirdPartyRulesTestCase(unittest.FederatingHomeserverTestCase):
 
         self.assertEqual(channel.code, 200, channel.result)
 
-    def _update_power_levels(self, event_default: int = 0):
+    def _update_power_levels(self, event_default: int = 0) -> None:
         """Updates the room's power levels.
 
         Args:
@@ -533,7 +563,7 @@ class ThirdPartyRulesTestCase(unittest.FederatingHomeserverTestCase):
             tok=self.tok,
         )
 
-    def test_on_profile_update(self):
+    def test_on_profile_update(self) -> None:
         """Tests that the on_profile_update module callback is correctly called on
         profile updates.
         """
@@ -592,7 +622,7 @@ class ThirdPartyRulesTestCase(unittest.FederatingHomeserverTestCase):
         self.assertEqual(profile_info.display_name, displayname)
         self.assertEqual(profile_info.avatar_url, avatar_url)
 
-    def test_on_profile_update_admin(self):
+    def test_on_profile_update_admin(self) -> None:
         """Tests that the on_profile_update module callback is correctly called on
         profile updates triggered by a server admin.
         """
@@ -634,7 +664,7 @@ class ThirdPartyRulesTestCase(unittest.FederatingHomeserverTestCase):
         self.assertEqual(profile_info.display_name, displayname)
         self.assertEqual(profile_info.avatar_url, avatar_url)
 
-    def test_on_user_deactivation_status_changed(self):
+    def test_on_user_deactivation_status_changed(self) -> None:
         """Tests that the on_user_deactivation_status_changed module callback is called
         correctly when processing a user's deactivation.
         """
@@ -691,7 +721,7 @@ class ThirdPartyRulesTestCase(unittest.FederatingHomeserverTestCase):
         args = profile_mock.call_args[0]
         self.assertTrue(args[3])
 
-    def test_on_user_deactivation_status_changed_admin(self):
+    def test_on_user_deactivation_status_changed_admin(self) -> None:
         """Tests that the on_user_deactivation_status_changed module callback is called
         correctly when processing a user's deactivation triggered by a server admin as
         well as a reactivation.
diff --git a/tests/rest/client/test_typing.py b/tests/rest/client/test_typing.py
index 8b2da88e8a..d6da510773 100644
--- a/tests/rest/client/test_typing.py
+++ b/tests/rest/client/test_typing.py
@@ -15,10 +15,12 @@
 
 """Tests REST events for /rooms paths."""
 
-from unittest.mock import Mock
+from twisted.test.proto_helpers import MemoryReactor
 
 from synapse.rest.client import room
+from synapse.server import HomeServer
 from synapse.types import UserID
+from synapse.util import Clock
 
 from tests import unittest
 
@@ -33,40 +35,17 @@ class RoomTypingTestCase(unittest.HomeserverTestCase):
     user = UserID.from_string(user_id)
     servlets = [room.register_servlets]
 
-    def make_homeserver(self, reactor, clock):
-
-        hs = self.setup_test_homeserver(
-            "red",
-            federation_http_client=None,
-            federation_client=Mock(),
-        )
-
+    def make_homeserver(self, reactor: MemoryReactor, clock: Clock) -> HomeServer:
+        hs = self.setup_test_homeserver("red")
         self.event_source = hs.get_event_sources().sources.typing
-
-        hs.get_federation_handler = Mock()
-
-        async def get_user_by_access_token(token=None, allow_guest=False):
-            return {
-                "user": UserID.from_string(self.auth_user_id),
-                "token_id": 1,
-                "is_guest": False,
-            }
-
-        hs.get_auth().get_user_by_access_token = get_user_by_access_token
-
-        async def _insert_client_ip(*args, **kwargs):
-            return None
-
-        hs.get_datastores().main.insert_client_ip = _insert_client_ip
-
         return hs
 
-    def prepare(self, reactor, clock, hs):
+    def prepare(self, reactor: MemoryReactor, clock: Clock, hs: HomeServer) -> None:
         self.room_id = self.helper.create_room_as(self.user_id)
         # Need another user to make notifications actually work
         self.helper.join(self.room_id, user="@jim:red")
 
-    def test_set_typing(self):
+    def test_set_typing(self) -> None:
         channel = self.make_request(
             "PUT",
             "/rooms/%s/typing/%s" % (self.room_id, self.user_id),
@@ -95,7 +74,7 @@ class RoomTypingTestCase(unittest.HomeserverTestCase):
             ],
         )
 
-    def test_set_not_typing(self):
+    def test_set_not_typing(self) -> None:
         channel = self.make_request(
             "PUT",
             "/rooms/%s/typing/%s" % (self.room_id, self.user_id),
@@ -103,7 +82,7 @@ class RoomTypingTestCase(unittest.HomeserverTestCase):
         )
         self.assertEqual(200, channel.code)
 
-    def test_typing_timeout(self):
+    def test_typing_timeout(self) -> None:
         channel = self.make_request(
             "PUT",
             "/rooms/%s/typing/%s" % (self.room_id, self.user_id),
diff --git a/tests/storage/databases/test_state_store.py b/tests/storage/databases/test_state_store.py
deleted file mode 100644
index 2b484c95a9..0000000000
--- a/tests/storage/databases/test_state_store.py
+++ /dev/null
@@ -1,454 +0,0 @@
-# Copyright 2022 The Matrix.org Foundation C.I.C.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-import typing
-from typing import Dict, List, Sequence, Tuple
-from unittest.mock import patch
-
-from parameterized import parameterized
-
-from twisted.internet.defer import Deferred, ensureDeferred
-from twisted.test.proto_helpers import MemoryReactor
-
-from synapse.api.constants import EventTypes
-from synapse.storage.databases.state.store import (
-    MAX_INFLIGHT_REQUESTS_PER_GROUP,
-    state_filter_rough_priority_comparator,
-)
-from synapse.storage.state import StateFilter
-from synapse.types import StateMap
-from synapse.util import Clock
-
-from tests.unittest import HomeserverTestCase
-
-if typing.TYPE_CHECKING:
-    from synapse.server import HomeServer
-
-# StateFilter for ALL non-m.room.member state events
-ALL_NON_MEMBERS_STATE_FILTER = StateFilter.freeze(
-    types={EventTypes.Member: set()},
-    include_others=True,
-)
-
-FAKE_STATE = {
-    (EventTypes.Member, "@alice:test"): "join",
-    (EventTypes.Member, "@bob:test"): "leave",
-    (EventTypes.Member, "@charlie:test"): "invite",
-    ("test.type", "a"): "AAA",
-    ("test.type", "b"): "BBB",
-    ("other.event.type", "state.key"): "123",
-}
-
-
-class StateGroupInflightCachingTestCase(HomeserverTestCase):
-    def prepare(
-        self, reactor: MemoryReactor, clock: Clock, homeserver: "HomeServer"
-    ) -> None:
-        self.state_storage = homeserver.get_storage().state
-        self.state_datastore = homeserver.get_datastores().state
-        # Patch out the `_get_state_groups_from_groups`.
-        # This is useful because it lets us pretend we have a slow database.
-        get_state_groups_patch = patch.object(
-            self.state_datastore,
-            "_get_state_groups_from_groups",
-            self._fake_get_state_groups_from_groups,
-        )
-        get_state_groups_patch.start()
-
-        self.addCleanup(get_state_groups_patch.stop)
-        self.get_state_group_calls: List[
-            Tuple[Tuple[int, ...], StateFilter, Deferred[Dict[int, StateMap[str]]]]
-        ] = []
-
-    def _fake_get_state_groups_from_groups(
-        self, groups: Sequence[int], state_filter: StateFilter
-    ) -> "Deferred[Dict[int, StateMap[str]]]":
-        d: Deferred[Dict[int, StateMap[str]]] = Deferred()
-        self.get_state_group_calls.append((tuple(groups), state_filter, d))
-        return d
-
-    def _complete_request_fake(
-        self,
-        groups: Tuple[int, ...],
-        state_filter: StateFilter,
-        d: "Deferred[Dict[int, StateMap[str]]]",
-    ) -> None:
-        """
-        Assemble a fake database response and complete the database request.
-        """
-
-        # Return a filtered copy of the fake state
-        d.callback({group: state_filter.filter_state(FAKE_STATE) for group in groups})
-
-    def test_duplicate_requests_deduplicated(self) -> None:
-        """
-        Tests that duplicate requests for state are deduplicated.
-
-        This test:
-        - requests some state (state group 42, 'all' state filter)
-        - requests it again, before the first request finishes
-        - checks to see that only one database query was made
-        - completes the database query
-        - checks that both requests see the same retrieved state
-        """
-        req1 = ensureDeferred(
-            self.state_datastore._get_state_for_group_using_inflight_cache(
-                42, StateFilter.all()
-            )
-        )
-        self.pump(by=0.1)
-
-        # This should have gone to the database
-        self.assertEqual(len(self.get_state_group_calls), 1)
-        self.assertFalse(req1.called)
-
-        req2 = ensureDeferred(
-            self.state_datastore._get_state_for_group_using_inflight_cache(
-                42, StateFilter.all()
-            )
-        )
-        self.pump(by=0.1)
-
-        # No more calls should have gone to the database
-        self.assertEqual(len(self.get_state_group_calls), 1)
-        self.assertFalse(req1.called)
-        self.assertFalse(req2.called)
-
-        groups, sf, d = self.get_state_group_calls[0]
-        self.assertEqual(groups, (42,))
-        self.assertEqual(sf, StateFilter.all())
-
-        # Now we can complete the request
-        self._complete_request_fake(groups, sf, d)
-
-        self.assertEqual(self.get_success(req1), FAKE_STATE)
-        self.assertEqual(self.get_success(req2), FAKE_STATE)
-
-    def test_smaller_request_deduplicated(self) -> None:
-        """
-        Tests that duplicate requests for state are deduplicated.
-
-        This test:
-        - requests some state (state group 42, 'all' state filter)
-        - requests a subset of that state, before the first request finishes
-        - checks to see that only one database query was made
-        - completes the database query
-        - checks that both requests see the correct retrieved state
-        """
-        req1 = ensureDeferred(
-            self.state_datastore._get_state_for_group_using_inflight_cache(
-                42, StateFilter.from_types((("test.type", None),))
-            )
-        )
-        self.pump(by=0.1)
-
-        # This should have gone to the database
-        self.assertEqual(len(self.get_state_group_calls), 1)
-        self.assertFalse(req1.called)
-
-        req2 = ensureDeferred(
-            self.state_datastore._get_state_for_group_using_inflight_cache(
-                42, StateFilter.from_types((("test.type", "b"),))
-            )
-        )
-        self.pump(by=0.1)
-
-        # No more calls should have gone to the database, because the second
-        # request was already in the in-flight cache!
-        self.assertEqual(len(self.get_state_group_calls), 1)
-        self.assertFalse(req1.called)
-        self.assertFalse(req2.called)
-
-        groups, sf, d = self.get_state_group_calls[0]
-        self.assertEqual(groups, (42,))
-        # The state filter is expanded internally for increased cache hit rate,
-        # so we the database sees a wider state filter than requested.
-        self.assertEqual(sf, ALL_NON_MEMBERS_STATE_FILTER)
-
-        # Now we can complete the request
-        self._complete_request_fake(groups, sf, d)
-
-        self.assertEqual(
-            self.get_success(req1),
-            {("test.type", "a"): "AAA", ("test.type", "b"): "BBB"},
-        )
-        self.assertEqual(self.get_success(req2), {("test.type", "b"): "BBB"})
-
-    def test_partially_overlapping_request_deduplicated(self) -> None:
-        """
-        Tests that partially-overlapping requests are partially deduplicated.
-
-        This test:
-        - requests a single type of wildcard state
-          (This is internally expanded to be all non-member state)
-        - requests the entire state in parallel
-        - checks to see that two database queries were made, but that the second
-          one is only for member state.
-        - completes the database queries
-        - checks that both requests have the correct result.
-        """
-
-        req1 = ensureDeferred(
-            self.state_datastore._get_state_for_group_using_inflight_cache(
-                42, StateFilter.from_types((("test.type", None),))
-            )
-        )
-        self.pump(by=0.1)
-
-        # This should have gone to the database
-        self.assertEqual(len(self.get_state_group_calls), 1)
-        self.assertFalse(req1.called)
-
-        req2 = ensureDeferred(
-            self.state_datastore._get_state_for_group_using_inflight_cache(
-                42, StateFilter.all()
-            )
-        )
-        self.pump(by=0.1)
-
-        # Because it only partially overlaps, this also went to the database
-        self.assertEqual(len(self.get_state_group_calls), 2)
-        self.assertFalse(req1.called)
-        self.assertFalse(req2.called)
-
-        # First request:
-        groups, sf, d = self.get_state_group_calls[0]
-        self.assertEqual(groups, (42,))
-        # The state filter is expanded internally for increased cache hit rate,
-        # so we the database sees a wider state filter than requested.
-        self.assertEqual(sf, ALL_NON_MEMBERS_STATE_FILTER)
-        self._complete_request_fake(groups, sf, d)
-
-        # Second request:
-        groups, sf, d = self.get_state_group_calls[1]
-        self.assertEqual(groups, (42,))
-        # The state filter is narrowed to only request membership state, because
-        # the remainder of the state is already being queried in the first request!
-        self.assertEqual(
-            sf, StateFilter.freeze({EventTypes.Member: None}, include_others=False)
-        )
-        self._complete_request_fake(groups, sf, d)
-
-        # Check the results are correct
-        self.assertEqual(
-            self.get_success(req1),
-            {("test.type", "a"): "AAA", ("test.type", "b"): "BBB"},
-        )
-        self.assertEqual(self.get_success(req2), FAKE_STATE)
-
-    def test_in_flight_requests_stop_being_in_flight(self) -> None:
-        """
-        Tests that in-flight request deduplication doesn't somehow 'hold on'
-        to completed requests: once they're done, they're taken out of the
-        in-flight cache.
-        """
-        req1 = ensureDeferred(
-            self.state_datastore._get_state_for_group_using_inflight_cache(
-                42, StateFilter.all()
-            )
-        )
-        self.pump(by=0.1)
-
-        # This should have gone to the database
-        self.assertEqual(len(self.get_state_group_calls), 1)
-        self.assertFalse(req1.called)
-
-        # Complete the request right away.
-        self._complete_request_fake(*self.get_state_group_calls[0])
-        self.assertTrue(req1.called)
-
-        # Send off another request
-        req2 = ensureDeferred(
-            self.state_datastore._get_state_for_group_using_inflight_cache(
-                42, StateFilter.all()
-            )
-        )
-        self.pump(by=0.1)
-
-        # It should have gone to the database again, because the previous request
-        # isn't in-flight and therefore isn't available for deduplication.
-        self.assertEqual(len(self.get_state_group_calls), 2)
-        self.assertFalse(req2.called)
-
-        # Complete the request right away.
-        self._complete_request_fake(*self.get_state_group_calls[1])
-        self.assertTrue(req2.called)
-        groups, sf, d = self.get_state_group_calls[0]
-
-        self.assertEqual(self.get_success(req1), FAKE_STATE)
-        self.assertEqual(self.get_success(req2), FAKE_STATE)
-
-    def test_inflight_requests_capped(self) -> None:
-        """
-        Tests that the number of in-flight requests is capped to 5.
-
-        - requests several pieces of state separately
-          (5 to hit the limit, 1 to 'shunt out', another that comes after the
-          group has been 'shunted out')
-        - checks to see that the torrent of requests is shunted out by
-          rewriting one of the filters as the 'all' state filter
-        - requests after that one do not cause any additional queries
-        """
-        # 5 at the time of writing.
-        CAP_COUNT = MAX_INFLIGHT_REQUESTS_PER_GROUP
-
-        reqs = []
-
-        # Request 7 different keys (1 to 7) of the `some.state` type.
-        for req_id in range(CAP_COUNT + 2):
-            reqs.append(
-                ensureDeferred(
-                    self.state_datastore._get_state_for_group_using_inflight_cache(
-                        42,
-                        StateFilter.freeze(
-                            {"some.state": {str(req_id + 1)}}, include_others=False
-                        ),
-                    )
-                )
-            )
-        self.pump(by=0.1)
-
-        # There should only be 6 calls to the database, not 7.
-        self.assertEqual(len(self.get_state_group_calls), CAP_COUNT + 1)
-
-        # Assert that the first 5 are exact requests for the individual pieces
-        # wanted
-        for req_id in range(CAP_COUNT):
-            groups, sf, d = self.get_state_group_calls[req_id]
-            self.assertEqual(
-                sf,
-                StateFilter.freeze(
-                    {"some.state": {str(req_id + 1)}}, include_others=False
-                ),
-            )
-
-        # The 6th request should be the 'all' state filter
-        groups, sf, d = self.get_state_group_calls[CAP_COUNT]
-        self.assertEqual(sf, StateFilter.all())
-
-        # Complete the queries and check which requests complete as a result
-        for req_id in range(CAP_COUNT):
-            # This request should not have been completed yet
-            self.assertFalse(reqs[req_id].called)
-
-            groups, sf, d = self.get_state_group_calls[req_id]
-            self._complete_request_fake(groups, sf, d)
-
-            # This should have only completed this one request
-            self.assertTrue(reqs[req_id].called)
-
-        # Now complete the final query; the last 2 requests should complete
-        # as a result
-        self.assertFalse(reqs[CAP_COUNT].called)
-        self.assertFalse(reqs[CAP_COUNT + 1].called)
-        groups, sf, d = self.get_state_group_calls[CAP_COUNT]
-        self._complete_request_fake(groups, sf, d)
-        self.assertTrue(reqs[CAP_COUNT].called)
-        self.assertTrue(reqs[CAP_COUNT + 1].called)
-
-    @parameterized.expand([(False,), (True,)])
-    def test_ordering_of_request_reuse(self, reverse: bool) -> None:
-        """
-        Tests that 'larger' in-flight requests are ordered first.
-
-        This is mostly a design decision in order to prevent a request from
-        hanging on to multiple queries when it would have been sufficient to
-        hang on to only one bigger query.
-
-        The 'size' of a state filter is a rough heuristic.
-
-        - requests two pieces of state, one 'larger' than the other, but each
-          spawning a query
-        - requests a third piece of state
-        - completes the larger of the first two queries
-        - checks that the third request gets completed (and doesn't needlessly
-          wait for the other query)
-
-        Parameters:
-            reverse: whether to reverse the order of the initial requests, to ensure
-                     that the effect doesn't depend on the order of request submission.
-        """
-
-        # We add in an extra state type to make sure that both requests spawn
-        # queries which are not optimised out.
-        state_filters = [
-            StateFilter.freeze(
-                {"state.type": {"A"}, "other.state.type": {"a"}}, include_others=False
-            ),
-            StateFilter.freeze(
-                {
-                    "state.type": None,
-                    "other.state.type": {"b"},
-                    # The current rough size comparator uses the number of state types
-                    # as an indicator of size.
-                    # To influence it to make this state filter bigger than the previous one,
-                    # we add another dummy state type.
-                    "extra.state.type": {"c"},
-                },
-                include_others=False,
-            ),
-        ]
-
-        if reverse:
-            # For fairness, we perform one test run with the list reversed.
-            state_filters.reverse()
-            smallest_state_filter_idx = 1
-            biggest_state_filter_idx = 0
-        else:
-            smallest_state_filter_idx = 0
-            biggest_state_filter_idx = 1
-
-        # This assertion is for our own sanity more than anything else.
-        self.assertLess(
-            state_filter_rough_priority_comparator(
-                state_filters[biggest_state_filter_idx]
-            ),
-            state_filter_rough_priority_comparator(
-                state_filters[smallest_state_filter_idx]
-            ),
-            "Test invalid: bigger state filter is not actually bigger.",
-        )
-
-        # Spawn the initial two requests
-        for state_filter in state_filters:
-            ensureDeferred(
-                self.state_datastore._get_state_for_group_using_inflight_cache(
-                    42,
-                    state_filter,
-                )
-            )
-
-        # Spawn a third request
-        req = ensureDeferred(
-            self.state_datastore._get_state_for_group_using_inflight_cache(
-                42,
-                StateFilter.freeze(
-                    {
-                        "state.type": {"A"},
-                    },
-                    include_others=False,
-                ),
-            )
-        )
-        self.pump(by=0.1)
-
-        self.assertFalse(req.called)
-
-        # Complete the largest request's query to make sure that the final request
-        # only waits for that one (and doesn't needlessly wait for both queries)
-        self._complete_request_fake(
-            *self.get_state_group_calls[biggest_state_filter_idx]
-        )
-
-        # That should have been sufficient to complete the third request
-        self.assertTrue(req.called)
diff --git a/tox.ini b/tox.ini
index 04b972e2c5..8d6aa7580b 100644
--- a/tox.ini
+++ b/tox.ini
@@ -38,15 +38,7 @@ lint_targets =
     setup.py
     synapse
     tests
-    scripts
     # annoyingly, black doesn't find these so we have to list them
-    scripts/export_signing_key
-    scripts/generate_config
-    scripts/generate_log_config
-    scripts/hash_password
-    scripts/register_new_matrix_user
-    scripts/synapse_port_db
-    scripts/update_synapse_database
     scripts-dev
     scripts-dev/build_debian_packages
     scripts-dev/sign_json