summary refs log tree commit diff
diff options
context:
space:
mode:
authorAndrew Morgan <andrew@amorgan.xyz>2020-04-24 13:36:35 +0100
committerAndrew Morgan <andrew@amorgan.xyz>2020-04-24 13:36:35 +0100
commit0ddaae83c3f59b9bca7460a493b7e930c10d6b20 (patch)
tree2524f1948c0974489d281037b96ff4748d3187ba
parentMerge remote-tracking branch 'origin/develop' into hawkowl/cache-config-witho... (diff)
parent1.12.4 (diff)
downloadsynapse-github/anoa/temp_working_cache_config.tar.xz
Merge branch 'release-v1.12.4' of github.com:matrix-org/synapse into anoa/temp_working_cache_config github/anoa/temp_working_cache_config anoa/temp_working_cache_config
* 'release-v1.12.4' of github.com:matrix-org/synapse: (123 commits)
  1.12.4
  formatting for the changelog
  1.12.4rc1
  1.12.4rc1
  Do not treat display names as globs for push rules. (#7271)
  Query missing cross-signing keys on local sig upload (#7289)
  Fix changelog file
  Support GET account_data requests on a worker (#7311)
  Revert "Query missing cross-signing keys on local sig upload"
  Always send the user updates to their own device list (#7160)
  Query missing cross-signing keys on local sig upload
  Only register devices edu handler on the master process (#7255)
  tweak changelog
  1.12.3
  Fix the debian build in a better way. (#7212)
  Fix changelog wording
  1.12.2
  Pin Pillow>=4.3.0,<7.1.0 to fix dep issue
  1.12.1
  Note where bugs were introduced
  ...
-rwxr-xr-x.buildkite/scripts/test_old_deps.sh7
-rw-r--r--CHANGES.md209
-rw-r--r--INSTALL.md15
-rw-r--r--changelog.d/6572.bugfix1
-rw-r--r--changelog.d/6615.misc1
-rw-r--r--changelog.d/6941.removal1
-rw-r--r--changelog.d/6952.misc1
-rw-r--r--changelog.d/6953.misc1
-rw-r--r--changelog.d/6954.misc1
-rw-r--r--changelog.d/6956.misc1
-rw-r--r--changelog.d/6957.misc1
-rw-r--r--changelog.d/6962.bugfix1
-rw-r--r--changelog.d/6964.misc1
-rw-r--r--changelog.d/6965.feature1
-rw-r--r--changelog.d/6966.removal1
-rw-r--r--changelog.d/6967.bugfix1
-rw-r--r--changelog.d/6968.bugfix1
-rw-r--r--changelog.d/6970.removal1
-rw-r--r--changelog.d/6979.misc1
-rw-r--r--changelog.d/6982.feature1
-rw-r--r--changelog.d/6983.misc1
-rw-r--r--changelog.d/6984.docker1
-rw-r--r--changelog.d/6985.misc1
-rw-r--r--changelog.d/6990.bugfix1
-rw-r--r--changelog.d/6991.misc1
-rw-r--r--contrib/docker/docker-compose.yml3
-rw-r--r--contrib/grafana/README.md2
-rw-r--r--contrib/grafana/synapse.json245
-rw-r--r--debian/changelog41
-rwxr-xr-xdebian/rules33
-rw-r--r--docs/admin_api/user_admin_api.rst6
-rw-r--r--docs/sample_config.yaml69
-rw-r--r--docs/workers.md3
-rw-r--r--snap/snapcraft.yaml21
-rw-r--r--synapse/__init__.py2
-rw-r--r--synapse/api/auth.py9
-rw-r--r--synapse/api/errors.py1
-rw-r--r--synapse/api/room_versions.py9
-rw-r--r--synapse/app/_base.py1
-rw-r--r--synapse/app/generic_worker.py52
-rw-r--r--synapse/app/homeserver.py14
-rw-r--r--synapse/config/_base.pyi2
-rw-r--r--synapse/config/homeserver.py2
-rw-r--r--synapse/config/saml2_config.py30
-rw-r--r--synapse/config/sso.py92
-rw-r--r--synapse/crypto/context_factory.py68
-rw-r--r--synapse/crypto/event_signing.py2
-rw-r--r--synapse/event_auth.py8
-rw-r--r--synapse/events/__init__.py53
-rw-r--r--synapse/events/utils.py26
-rw-r--r--synapse/federation/federation_base.py110
-rw-r--r--synapse/federation/federation_client.py90
-rw-r--r--synapse/federation/federation_server.py51
-rw-r--r--synapse/federation/transport/client.py49
-rw-r--r--synapse/federation/transport/server.py12
-rw-r--r--synapse/handlers/account_validity.py6
-rw-r--r--synapse/handlers/auth.py265
-rw-r--r--synapse/handlers/device.py14
-rw-r--r--synapse/handlers/directory.py121
-rw-r--r--synapse/handlers/e2e_keys.py166
-rw-r--r--synapse/handlers/e2e_room_keys.py7
-rw-r--r--synapse/handlers/message.py49
-rw-r--r--synapse/handlers/room.py16
-rw-r--r--synapse/handlers/saml_handler.py25
-rw-r--r--synapse/handlers/set_password.py41
-rw-r--r--synapse/handlers/sync.py7
-rw-r--r--synapse/http/client.py3
-rw-r--r--synapse/http/federation/matrix_federation_agent.py2
-rw-r--r--synapse/logging/context.py122
-rw-r--r--synapse/metrics/__init__.py12
-rw-r--r--synapse/metrics/background_process_metrics.py5
-rw-r--r--synapse/module_api/__init__.py19
-rw-r--r--synapse/push/mailer.py4
-rw-r--r--synapse/push/push_rule_evaluator.py69
-rw-r--r--synapse/push/pusherpool.py43
-rw-r--r--synapse/replication/http/federation.py13
-rw-r--r--synapse/replication/http/send_event.py14
-rw-r--r--synapse/res/templates/saml_error.html45
-rw-r--r--synapse/res/templates/sso_redirect_confirm.html14
-rw-r--r--synapse/rest/admin/users.py12
-rw-r--r--synapse/rest/client/v1/login.py43
-rw-r--r--synapse/rest/client/v1/room.py12
-rw-r--r--synapse/rest/client/v2_alpha/account.py5
-rw-r--r--synapse/rest/client/v2_alpha/account_data.py8
-rw-r--r--synapse/rest/key/v2/remote_key_resource.py11
-rw-r--r--synapse/rest/media/v1/_base.py25
-rw-r--r--synapse/rest/saml2/response_resource.py18
-rw-r--r--synapse/server.py6
-rw-r--r--synapse/state/__init__.py28
-rw-r--r--synapse/state/v2.py32
-rw-r--r--synapse/storage/data_stores/main/devices.py24
-rw-r--r--synapse/storage/data_stores/main/event_federation.py150
-rw-r--r--synapse/storage/data_stores/main/event_push_actions.py34
-rw-r--r--synapse/storage/data_stores/main/events.py10
-rw-r--r--synapse/storage/data_stores/main/events_worker.py84
-rw-r--r--synapse/storage/data_stores/main/monthly_active_users.py32
-rw-r--r--synapse/storage/data_stores/main/registration.py16
-rw-r--r--synapse/storage/data_stores/main/schema/delta/57/rooms_version_column_3.sql.postgres39
-rw-r--r--synapse/storage/data_stores/main/schema/delta/57/rooms_version_column_3.sql.sqlite23
-rw-r--r--synapse/storage/data_stores/main/state_deltas.py4
-rw-r--r--synapse/storage/database.py153
-rw-r--r--synapse/storage/engines/__init__.py28
-rw-r--r--synapse/storage/engines/_base.py87
-rw-r--r--synapse/storage/engines/postgres.py12
-rw-r--r--synapse/storage/engines/sqlite.py16
-rw-r--r--synapse/storage/types.py65
-rw-r--r--synapse/types.py15
-rw-r--r--synapse/visibility.py62
-rw-r--r--tests/app/test_frontend_proxy.py5
-rw-r--r--tests/config/test_tls.py29
-rw-r--r--tests/events/test_utils.py35
-rw-r--r--tests/handlers/test_directory.py190
-rw-r--r--tests/http/federation/test_matrix_federation_agent.py6
-rw-r--r--tests/push/test_push_rule_evaluator.py65
-rw-r--r--tests/replication/slave/storage/test_events.py10
-rw-r--r--tests/rest/admin/test_admin.py7
-rw-r--r--tests/rest/admin/test_user.py218
-rw-r--r--tests/rest/client/v1/test_directory.py41
-rw-r--r--tests/rest/client/v1/test_login.py111
-rw-r--r--tests/rest/client/v1/test_rooms.py160
-rw-r--r--tests/state/test_v2.py13
-rw-r--r--tests/storage/test_event_federation.py157
-rw-r--r--tests/storage/test_monthly_active_users.py42
-rw-r--r--tests/test_event_auth.py93
-rw-r--r--tests/test_types.py2
-rw-r--r--tox.ini12
126 files changed, 3644 insertions, 1047 deletions
diff --git a/.buildkite/scripts/test_old_deps.sh b/.buildkite/scripts/test_old_deps.sh
index dfd71b2511..cdb77b556c 100755
--- a/.buildkite/scripts/test_old_deps.sh
+++ b/.buildkite/scripts/test_old_deps.sh
@@ -6,12 +6,7 @@
 set -ex
 
 apt-get update
-apt-get install -y python3.5 python3.5-dev python3-pip libxml2-dev libxslt-dev zlib1g-dev
-
-# workaround for https://github.com/jaraco/zipp/issues/40
-python3.5 -m pip install 'setuptools>=34.4.0'
-
-python3.5 -m pip install tox
+apt-get install -y python3.5 python3.5-dev python3-pip libxml2-dev libxslt-dev zlib1g-dev tox
 
 export LANG="C.UTF-8"
 
diff --git a/CHANGES.md b/CHANGES.md
index ff681762cd..342768c8c8 100644
--- a/CHANGES.md
+++ b/CHANGES.md
@@ -1,3 +1,212 @@
+Synapse 1.12.4 (2020-04-23)
+===========================
+
+No significant changes.
+
+
+Synapse 1.12.4rc1 (2020-04-22)
+==============================
+
+Features
+--------
+
+- Always send users their own device updates. ([\#7160](https://github.com/matrix-org/synapse/issues/7160))
+- Add support for handling GET requests for `account_data` on a worker. ([\#7311](https://github.com/matrix-org/synapse/issues/7311))
+
+
+Bugfixes
+--------
+
+- Fix a bug that prevented cross-signing with users on worker-mode synapses. ([\#7255](https://github.com/matrix-org/synapse/issues/7255))
+- Do not treat display names as globs in push rules. ([\#7271](https://github.com/matrix-org/synapse/issues/7271))
+- Fix a bug with cross-signing devices belonging to remote users who did not share a room with any user on the local homeserver. ([\#7289](https://github.com/matrix-org/synapse/issues/7289))
+
+
+Synapse 1.12.3 (2020-04-03)
+===========================
+
+- Remove the the pin to Pillow 7.0 which was introduced in Synapse 1.12.2, and
+correctly fix the issue with building the Debian packages. ([\#7212](https://github.com/matrix-org/synapse/issues/7212))
+
+Synapse 1.12.2 (2020-04-02)
+===========================
+
+This release works around [an
+issue](https://github.com/matrix-org/synapse/issues/7208) with building the
+debian packages.
+
+No other significant changes since 1.12.1.
+
+
+Synapse 1.12.1 (2020-04-02)
+===========================
+
+No significant changes since 1.12.1rc1.
+
+
+Synapse 1.12.1rc1 (2020-03-31)
+==============================
+
+Bugfixes
+--------
+
+- Fix starting workers when federation sending not split out. ([\#7133](https://github.com/matrix-org/synapse/issues/7133)). Introduced in v1.12.0.
+- Avoid importing `sqlite3` when using the postgres backend. Contributed by David Vo. ([\#7155](https://github.com/matrix-org/synapse/issues/7155)). Introduced in v1.12.0rc1.
+- Fix a bug which could cause outbound federation traffic to stop working if a client uploaded an incorrect e2e device signature. ([\#7177](https://github.com/matrix-org/synapse/issues/7177)). Introduced in v1.11.0.
+
+
+Synapse 1.12.0 (2020-03-23)
+===========================
+
+No significant changes since 1.12.0rc1.
+
+Debian packages and Docker images are rebuilt using the latest versions of
+dependency libraries, including Twisted 20.3.0. **Please see security advisory
+below**.
+
+Security advisory
+-----------------
+
+Synapse may be vulnerable to request-smuggling attacks when it is used with a
+reverse-proxy. The vulnerabilties are fixed in Twisted 20.3.0, and are
+described in
+[CVE-2020-10108](https://cve.mitre.org/cgi-bin/cvename.cgi?name=CVE-2020-10108)
+and
+[CVE-2020-10109](https://cve.mitre.org/cgi-bin/cvename.cgi?name=CVE-2020-10109).
+For a good introduction to this class of request-smuggling attacks, see
+https://portswigger.net/research/http-desync-attacks-request-smuggling-reborn.
+
+We are not aware of these vulnerabilities being exploited in the wild, and
+do not believe that they are exploitable with current versions of any reverse
+proxies. Nevertheless, we recommend that all Synapse administrators ensure that
+they have the latest versions of the Twisted library to ensure that their
+installation remains secure.
+
+* Administrators using the [`matrix.org` Docker
+  image](https://hub.docker.com/r/matrixdotorg/synapse/) or the [Debian/Ubuntu
+  packages from
+  `matrix.org`](https://github.com/matrix-org/synapse/blob/master/INSTALL.md#matrixorg-packages)
+  should ensure that they have version 1.12.0 installed: these images include
+  Twisted 20.3.0.
+* Administrators who have [installed Synapse from
+  source](https://github.com/matrix-org/synapse/blob/master/INSTALL.md#installing-from-source)
+  should upgrade Twisted within their virtualenv by running:
+  ```sh
+  <path_to_virtualenv>/bin/pip install 'Twisted>=20.3.0'
+  ```
+* Administrators who have installed Synapse from distribution packages should
+  consult the information from their distributions.
+
+The `matrix.org` Synapse instance was not vulnerable to these vulnerabilities.
+
+Advance notice of change to the default `git` branch for Synapse
+----------------------------------------------------------------
+
+Currently, the default `git` branch for Synapse is `master`, which tracks the
+latest release.
+
+After the release of Synapse 1.13.0, we intend to change this default to
+`develop`, which is the development tip. This is more consistent with common
+practice and modern `git` usage.
+
+Although we try to keep `develop` in a stable state, there may be occasions
+where regressions creep in. Developers and distributors who have scripts which
+run builds using the default branch of `Synapse` should therefore consider
+pinning their scripts to `master`.
+
+
+Synapse 1.12.0rc1 (2020-03-19)
+==============================
+
+Features
+--------
+
+- Changes related to room alias management ([MSC2432](https://github.com/matrix-org/matrix-doc/pull/2432)):
+  - Publishing/removing a room from the room directory now requires the user to have a power level capable of modifying the canonical alias, instead of the room aliases. ([\#6965](https://github.com/matrix-org/synapse/issues/6965))
+  - Validate the `alt_aliases` property of canonical alias events. ([\#6971](https://github.com/matrix-org/synapse/issues/6971))
+  - Users with a power level sufficient to modify the canonical alias of a room can now delete room aliases. ([\#6986](https://github.com/matrix-org/synapse/issues/6986))
+  - Implement updated authorization rules and redaction rules for aliases events, from [MSC2261](https://github.com/matrix-org/matrix-doc/pull/2261) and [MSC2432](https://github.com/matrix-org/matrix-doc/pull/2432). ([\#7037](https://github.com/matrix-org/synapse/issues/7037))
+  - Stop sending m.room.aliases events during room creation and upgrade. ([\#6941](https://github.com/matrix-org/synapse/issues/6941))
+  - Synapse no longer uses room alias events to calculate room names for push notifications. ([\#6966](https://github.com/matrix-org/synapse/issues/6966))
+  - The room list endpoint no longer returns a list of aliases. ([\#6970](https://github.com/matrix-org/synapse/issues/6970))
+  - Remove special handling of aliases events from [MSC2260](https://github.com/matrix-org/matrix-doc/pull/2260) added in v1.10.0rc1. ([\#7034](https://github.com/matrix-org/synapse/issues/7034))
+- Expose the `synctl`, `hash_password` and `generate_config` commands in the snapcraft package. Contributed by @devec0. ([\#6315](https://github.com/matrix-org/synapse/issues/6315))
+- Check that server_name is correctly set before running database updates. ([\#6982](https://github.com/matrix-org/synapse/issues/6982))
+- Break down monthly active users by `appservice_id` and emit via Prometheus. ([\#7030](https://github.com/matrix-org/synapse/issues/7030))
+- Render a configurable and comprehensible error page if something goes wrong during the SAML2 authentication process. ([\#7058](https://github.com/matrix-org/synapse/issues/7058), [\#7067](https://github.com/matrix-org/synapse/issues/7067))
+- Add an optional parameter to control whether other sessions are logged out when a user's password is modified. ([\#7085](https://github.com/matrix-org/synapse/issues/7085))
+- Add prometheus metrics for the number of active pushers. ([\#7103](https://github.com/matrix-org/synapse/issues/7103), [\#7106](https://github.com/matrix-org/synapse/issues/7106))
+- Improve performance when making HTTPS requests to sygnal, sydent, etc, by sharing the SSL context object between connections. ([\#7094](https://github.com/matrix-org/synapse/issues/7094))
+
+
+Bugfixes
+--------
+
+- When a user's profile is updated via the admin API, also generate a displayname/avatar update for that user in each room. ([\#6572](https://github.com/matrix-org/synapse/issues/6572))
+- Fix a couple of bugs in email configuration handling. ([\#6962](https://github.com/matrix-org/synapse/issues/6962))
+- Fix an issue affecting worker-based deployments where replication would stop working, necessitating a full restart, after joining a large room. ([\#6967](https://github.com/matrix-org/synapse/issues/6967))
+- Fix `duplicate key` error which was logged when rejoining a room over federation. ([\#6968](https://github.com/matrix-org/synapse/issues/6968))
+- Prevent user from setting 'deactivated' to anything other than a bool on the v2 PUT /users Admin API. ([\#6990](https://github.com/matrix-org/synapse/issues/6990))
+- Fix py35-old CI by using native tox package. ([\#7018](https://github.com/matrix-org/synapse/issues/7018))
+- Fix a bug causing `org.matrix.dummy_event` to be included in responses from `/sync`. ([\#7035](https://github.com/matrix-org/synapse/issues/7035))
+- Fix a bug that renders UTF-8 text files incorrectly when loaded from media. Contributed by @TheStranjer. ([\#7044](https://github.com/matrix-org/synapse/issues/7044))
+- Fix a bug that would cause Synapse to respond with an error about event visibility if a client tried to request the state of a room at a given token. ([\#7066](https://github.com/matrix-org/synapse/issues/7066))
+- Repair a data-corruption issue which was introduced in Synapse 1.10, and fixed in Synapse 1.11, and which could cause `/sync` to return with 404 errors about missing events and unknown rooms. ([\#7070](https://github.com/matrix-org/synapse/issues/7070))
+- Fix a bug causing account validity renewal emails to be sent even if the feature is turned off in some cases. ([\#7074](https://github.com/matrix-org/synapse/issues/7074))
+
+
+Improved Documentation
+----------------------
+
+- Updated CentOS8 install instructions. Contributed by Richard Kellner. ([\#6925](https://github.com/matrix-org/synapse/issues/6925))
+- Fix `POSTGRES_INITDB_ARGS` in the `contrib/docker/docker-compose.yml` example docker-compose configuration. ([\#6984](https://github.com/matrix-org/synapse/issues/6984))
+- Change date in [INSTALL.md](./INSTALL.md#tls-certificates) for last date of getting TLS certificates to November 2019. ([\#7015](https://github.com/matrix-org/synapse/issues/7015))
+- Document that the fallback auth endpoints must be routed to the same worker node as the register endpoints. ([\#7048](https://github.com/matrix-org/synapse/issues/7048))
+
+
+Deprecations and Removals
+-------------------------
+
+- Remove the unused query_auth federation endpoint per [MSC2451](https://github.com/matrix-org/matrix-doc/pull/2451). ([\#7026](https://github.com/matrix-org/synapse/issues/7026))
+
+
+Internal Changes
+----------------
+
+- Add type hints to `logging/context.py`. ([\#6309](https://github.com/matrix-org/synapse/issues/6309))
+- Add some clarifications to `README.md` in the database schema directory. ([\#6615](https://github.com/matrix-org/synapse/issues/6615))
+- Refactoring work in preparation for changing the event redaction algorithm. ([\#6874](https://github.com/matrix-org/synapse/issues/6874), [\#6875](https://github.com/matrix-org/synapse/issues/6875), [\#6983](https://github.com/matrix-org/synapse/issues/6983), [\#7003](https://github.com/matrix-org/synapse/issues/7003))
+- Improve performance of v2 state resolution for large rooms. ([\#6952](https://github.com/matrix-org/synapse/issues/6952), [\#7095](https://github.com/matrix-org/synapse/issues/7095))
+- Reduce time spent doing GC, by freezing objects on startup. ([\#6953](https://github.com/matrix-org/synapse/issues/6953))
+- Minor perfermance fixes to `get_auth_chain_ids`. ([\#6954](https://github.com/matrix-org/synapse/issues/6954))
+- Don't record remote cross-signing keys in the `devices` table. ([\#6956](https://github.com/matrix-org/synapse/issues/6956))
+- Use flake8-comprehensions to enforce good hygiene of list/set/dict comprehensions. ([\#6957](https://github.com/matrix-org/synapse/issues/6957))
+- Merge worker apps together. ([\#6964](https://github.com/matrix-org/synapse/issues/6964), [\#7002](https://github.com/matrix-org/synapse/issues/7002), [\#7055](https://github.com/matrix-org/synapse/issues/7055), [\#7104](https://github.com/matrix-org/synapse/issues/7104))
+- Remove redundant `store_room` call from `FederationHandler._process_received_pdu`. ([\#6979](https://github.com/matrix-org/synapse/issues/6979))
+- Update warning for incorrect database collation/ctype to include link to documentation. ([\#6985](https://github.com/matrix-org/synapse/issues/6985))
+- Add some type annotations to the database storage classes. ([\#6987](https://github.com/matrix-org/synapse/issues/6987))
+- Port `synapse.handlers.presence` to async/await. ([\#6991](https://github.com/matrix-org/synapse/issues/6991), [\#7019](https://github.com/matrix-org/synapse/issues/7019))
+- Add some type annotations to the federation base & client classes. ([\#6995](https://github.com/matrix-org/synapse/issues/6995))
+- Port `synapse.rest.keys` to async/await. ([\#7020](https://github.com/matrix-org/synapse/issues/7020))
+- Add a type check to `is_verified` when processing room keys. ([\#7045](https://github.com/matrix-org/synapse/issues/7045))
+- Add type annotations and comments to the auth handler. ([\#7063](https://github.com/matrix-org/synapse/issues/7063))
+
+
+Synapse 1.11.1 (2020-03-03)
+===========================
+
+This release includes a security fix impacting installations using Single Sign-On (i.e. SAML2 or CAS) for authentication. Administrators of such installations are encouraged to upgrade as soon as possible.
+
+The release also includes fixes for a couple of other bugs.
+
+Bugfixes
+--------
+
+- Add a confirmation step to the SSO login flow before redirecting users to the redirect URL. ([b2bd54a2](https://github.com/matrix-org/synapse/commit/b2bd54a2e31d9a248f73fadb184ae9b4cbdb49f9), [65c73cdf](https://github.com/matrix-org/synapse/commit/65c73cdfec1876a9fec2fd2c3a74923cd146fe0b), [a0178df1](https://github.com/matrix-org/synapse/commit/a0178df10422a76fd403b82d2b2a4ed28a9a9d1e))
+- Fixed set a user as an admin with the admin API `PUT /_synapse/admin/v2/users/<user_id>`. Contributed by @dklimpel. ([\#6910](https://github.com/matrix-org/synapse/issues/6910))
+- Fix bug introduced in Synapse 1.11.0 which sometimes caused errors when joining rooms over federation, with `'coroutine' object has no attribute 'event_id'`. ([\#6996](https://github.com/matrix-org/synapse/issues/6996))
+
+
 Synapse 1.11.0 (2020-02-21)
 ===========================
 
diff --git a/INSTALL.md b/INSTALL.md
index aa5eb882bb..c0926ba590 100644
--- a/INSTALL.md
+++ b/INSTALL.md
@@ -124,12 +124,21 @@ sudo pacman -S base-devel python python-pip \
 
 #### CentOS/Fedora
 
-Installing prerequisites on CentOS 7 or Fedora 25:
+Installing prerequisites on CentOS 8 or Fedora>26:
+
+```
+sudo dnf install libtiff-devel libjpeg-devel libzip-devel freetype-devel \
+                 libwebp-devel tk-devel redhat-rpm-config \
+                 python3-virtualenv libffi-devel openssl-devel
+sudo dnf groupinstall "Development Tools"
+```
+
+Installing prerequisites on CentOS 7 or Fedora<=25:
 
 ```
 sudo yum install libtiff-devel libjpeg-devel libzip-devel freetype-devel \
                  lcms2-devel libwebp-devel tcl-devel tk-devel redhat-rpm-config \
-                 python-virtualenv libffi-devel openssl-devel
+                 python3-virtualenv libffi-devel openssl-devel
 sudo yum groupinstall "Development Tools"
 ```
 
@@ -418,7 +427,7 @@ so, you will need to edit `homeserver.yaml`, as follows:
   for having Synapse automatically provision and renew federation
   certificates through ACME can be found at [ACME.md](docs/ACME.md).
   Note that, as pointed out in that document, this feature will not
-  work with installs set up after November 2020. 
+  work with installs set up after November 2019. 
   
   If you are using your own certificate, be sure to use a `.pem` file that
   includes the full certificate chain including any intermediate certificates
diff --git a/changelog.d/6572.bugfix b/changelog.d/6572.bugfix
deleted file mode 100644
index 4f708f409f..0000000000
--- a/changelog.d/6572.bugfix
+++ /dev/null
@@ -1 +0,0 @@
-When a user's profile is updated via the admin API, also generate a displayname/avatar update for that user in each room.
diff --git a/changelog.d/6615.misc b/changelog.d/6615.misc
deleted file mode 100644
index 9f93152565..0000000000
--- a/changelog.d/6615.misc
+++ /dev/null
@@ -1 +0,0 @@
-Add some clarifications to `README.md` in the database schema directory.
diff --git a/changelog.d/6941.removal b/changelog.d/6941.removal
deleted file mode 100644
index 8573be84b3..0000000000
--- a/changelog.d/6941.removal
+++ /dev/null
@@ -1 +0,0 @@
-Stop sending m.room.aliases events during room creation and upgrade.
diff --git a/changelog.d/6952.misc b/changelog.d/6952.misc
deleted file mode 100644
index e26dc5cab8..0000000000
--- a/changelog.d/6952.misc
+++ /dev/null
@@ -1 +0,0 @@
-Improve perf of v2 state res for large rooms.
diff --git a/changelog.d/6953.misc b/changelog.d/6953.misc
deleted file mode 100644
index 0ab52041cf..0000000000
--- a/changelog.d/6953.misc
+++ /dev/null
@@ -1 +0,0 @@
-Reduce time spent doing GC by freezing objects on startup.
diff --git a/changelog.d/6954.misc b/changelog.d/6954.misc
deleted file mode 100644
index 8b84ce2f19..0000000000
--- a/changelog.d/6954.misc
+++ /dev/null
@@ -1 +0,0 @@
-Minor perf fixes to `get_auth_chain_ids`.
diff --git a/changelog.d/6956.misc b/changelog.d/6956.misc
deleted file mode 100644
index 5cb0894182..0000000000
--- a/changelog.d/6956.misc
+++ /dev/null
@@ -1 +0,0 @@
-Don't record remote cross-signing keys in the `devices` table.
diff --git a/changelog.d/6957.misc b/changelog.d/6957.misc
deleted file mode 100644
index 4f98030110..0000000000
--- a/changelog.d/6957.misc
+++ /dev/null
@@ -1 +0,0 @@
-Use flake8-comprehensions to enforce good hygiene of list/set/dict comprehensions.
diff --git a/changelog.d/6962.bugfix b/changelog.d/6962.bugfix
deleted file mode 100644
index 9f5229d400..0000000000
--- a/changelog.d/6962.bugfix
+++ /dev/null
@@ -1 +0,0 @@
-Fix a couple of bugs in email configuration handling.
diff --git a/changelog.d/6964.misc b/changelog.d/6964.misc
deleted file mode 100644
index ec5c004bbe..0000000000
--- a/changelog.d/6964.misc
+++ /dev/null
@@ -1 +0,0 @@
-Merge worker apps together.
diff --git a/changelog.d/6965.feature b/changelog.d/6965.feature
deleted file mode 100644
index 6ad9956e40..0000000000
--- a/changelog.d/6965.feature
+++ /dev/null
@@ -1 +0,0 @@
-Publishing/removing a room from the room directory now requires the user to have a power level capable of modifying the canonical alias, instead of the room aliases.
diff --git a/changelog.d/6966.removal b/changelog.d/6966.removal
deleted file mode 100644
index 69673d9139..0000000000
--- a/changelog.d/6966.removal
+++ /dev/null
@@ -1 +0,0 @@
-Synapse no longer uses room alias events to calculate room names for email notifications.
diff --git a/changelog.d/6967.bugfix b/changelog.d/6967.bugfix
deleted file mode 100644
index b65f80cf1d..0000000000
--- a/changelog.d/6967.bugfix
+++ /dev/null
@@ -1 +0,0 @@
-Fix an issue affecting worker-based deployments where replication would stop working, necessitating a full restart, after joining a large room.
diff --git a/changelog.d/6968.bugfix b/changelog.d/6968.bugfix
deleted file mode 100644
index 9965bfc0c3..0000000000
--- a/changelog.d/6968.bugfix
+++ /dev/null
@@ -1 +0,0 @@
-Fix `duplicate key` error which was logged when rejoining a room over federation.
diff --git a/changelog.d/6970.removal b/changelog.d/6970.removal
deleted file mode 100644
index 89bd363b95..0000000000
--- a/changelog.d/6970.removal
+++ /dev/null
@@ -1 +0,0 @@
-The room list endpoint no longer returns a list of aliases.
diff --git a/changelog.d/6979.misc b/changelog.d/6979.misc
deleted file mode 100644
index c57b398c2f..0000000000
--- a/changelog.d/6979.misc
+++ /dev/null
@@ -1 +0,0 @@
-Remove redundant `store_room` call from `FederationHandler._process_received_pdu`.
diff --git a/changelog.d/6982.feature b/changelog.d/6982.feature
deleted file mode 100644
index 934cc5141a..0000000000
--- a/changelog.d/6982.feature
+++ /dev/null
@@ -1 +0,0 @@
-Check that server_name is correctly set before running database updates.
diff --git a/changelog.d/6983.misc b/changelog.d/6983.misc
deleted file mode 100644
index 08aa80bcd9..0000000000
--- a/changelog.d/6983.misc
+++ /dev/null
@@ -1 +0,0 @@
-Refactoring work in preparation for changing the event redaction algorithm.
diff --git a/changelog.d/6984.docker b/changelog.d/6984.docker
deleted file mode 100644
index 84a55e1267..0000000000
--- a/changelog.d/6984.docker
+++ /dev/null
@@ -1 +0,0 @@
-Fix `POSTGRES_INITDB_ARGS` in the `contrib/docker/docker-compose.yml` example docker-compose configuration.
diff --git a/changelog.d/6985.misc b/changelog.d/6985.misc
deleted file mode 100644
index ba367fa9af..0000000000
--- a/changelog.d/6985.misc
+++ /dev/null
@@ -1 +0,0 @@
-Update warning for incorrect database collation/ctype to include link to documentation.
diff --git a/changelog.d/6990.bugfix b/changelog.d/6990.bugfix
deleted file mode 100644
index 8c1c48f4d4..0000000000
--- a/changelog.d/6990.bugfix
+++ /dev/null
@@ -1 +0,0 @@
-Prevent user from setting 'deactivated' to anything other than a bool on the v2 PUT /users Admin API.
\ No newline at end of file
diff --git a/changelog.d/6991.misc b/changelog.d/6991.misc
deleted file mode 100644
index 5130f4e8af..0000000000
--- a/changelog.d/6991.misc
+++ /dev/null
@@ -1 +0,0 @@
-Port `synapse.handlers.presence` to async/await.
diff --git a/contrib/docker/docker-compose.yml b/contrib/docker/docker-compose.yml
index 453b305053..17354b6610 100644
--- a/contrib/docker/docker-compose.yml
+++ b/contrib/docker/docker-compose.yml
@@ -15,10 +15,9 @@ services:
     restart: unless-stopped
     # See the readme for a full documentation of the environment settings
     environment:
-      - SYNAPSE_CONFIG_PATH=/etc/homeserver.yaml
+      - SYNAPSE_CONFIG_PATH=/data/homeserver.yaml
     volumes:
       # You may either store all the files in a local folder
-      - ./matrix-config/homeserver.yaml:/etc/homeserver.yaml
       - ./files:/data
       # .. or you may split this between different storage points
       # - ./files:/data
diff --git a/contrib/grafana/README.md b/contrib/grafana/README.md
index 6a6cc0bed4..ca780d412e 100644
--- a/contrib/grafana/README.md
+++ b/contrib/grafana/README.md
@@ -1,6 +1,6 @@
 # Using the Synapse Grafana dashboard
 
 0. Set up Prometheus and Grafana. Out of scope for this readme. Useful documentation about using Grafana with Prometheus: http://docs.grafana.org/features/datasources/prometheus/
-1. Have your Prometheus scrape your Synapse. https://github.com/matrix-org/synapse/blob/master/docs/metrics-howto.rst
+1. Have your Prometheus scrape your Synapse. https://github.com/matrix-org/synapse/blob/master/docs/metrics-howto.md
 2. Import dashboard into Grafana. Download `synapse.json`. Import it to Grafana and select the correct Prometheus datasource. http://docs.grafana.org/reference/export_import/
 3. Set up additional recording rules
diff --git a/contrib/grafana/synapse.json b/contrib/grafana/synapse.json
index 5b1bfd1679..656a442597 100644
--- a/contrib/grafana/synapse.json
+++ b/contrib/grafana/synapse.json
@@ -18,7 +18,7 @@
   "gnetId": null,
   "graphTooltip": 0,
   "id": 1,
-  "iteration": 1561447718159,
+  "iteration": 1584612489167,
   "links": [
     {
       "asDropdown": true,
@@ -34,6 +34,7 @@
   "panels": [
     {
       "collapsed": false,
+      "datasource": null,
       "gridPos": {
         "h": 1,
         "w": 24,
@@ -52,12 +53,14 @@
       "dashes": false,
       "datasource": "$datasource",
       "fill": 1,
+      "fillGradient": 0,
       "gridPos": {
         "h": 9,
         "w": 12,
         "x": 0,
         "y": 1
       },
+      "hiddenSeries": false,
       "id": 75,
       "legend": {
         "avg": false,
@@ -72,7 +75,9 @@
       "linewidth": 1,
       "links": [],
       "nullPointMode": "null",
-      "options": {},
+      "options": {
+        "dataLinks": []
+      },
       "paceLength": 10,
       "percentage": false,
       "pointradius": 5,
@@ -151,6 +156,7 @@
       "editable": true,
       "error": false,
       "fill": 1,
+      "fillGradient": 0,
       "grid": {},
       "gridPos": {
         "h": 9,
@@ -158,6 +164,7 @@
         "x": 12,
         "y": 1
       },
+      "hiddenSeries": false,
       "id": 33,
       "legend": {
         "avg": false,
@@ -172,7 +179,9 @@
       "linewidth": 2,
       "links": [],
       "nullPointMode": "null",
-      "options": {},
+      "options": {
+        "dataLinks": []
+      },
       "paceLength": 10,
       "percentage": false,
       "pointradius": 5,
@@ -302,12 +311,14 @@
       "dashes": false,
       "datasource": "$datasource",
       "fill": 0,
+      "fillGradient": 0,
       "gridPos": {
         "h": 9,
         "w": 12,
         "x": 12,
         "y": 10
       },
+      "hiddenSeries": false,
       "id": 107,
       "legend": {
         "avg": false,
@@ -322,7 +333,9 @@
       "linewidth": 1,
       "links": [],
       "nullPointMode": "null",
-      "options": {},
+      "options": {
+        "dataLinks": []
+      },
       "paceLength": 10,
       "percentage": false,
       "pointradius": 5,
@@ -425,12 +438,14 @@
       "dashes": false,
       "datasource": "$datasource",
       "fill": 0,
+      "fillGradient": 0,
       "gridPos": {
         "h": 9,
         "w": 12,
         "x": 0,
         "y": 19
       },
+      "hiddenSeries": false,
       "id": 118,
       "legend": {
         "avg": false,
@@ -445,7 +460,9 @@
       "linewidth": 1,
       "links": [],
       "nullPointMode": "null",
-      "options": {},
+      "options": {
+        "dataLinks": []
+      },
       "paceLength": 10,
       "percentage": false,
       "pointradius": 5,
@@ -542,6 +559,7 @@
     },
     {
       "collapsed": true,
+      "datasource": null,
       "gridPos": {
         "h": 1,
         "w": 24,
@@ -1361,6 +1379,7 @@
     },
     {
       "collapsed": true,
+      "datasource": null,
       "gridPos": {
         "h": 1,
         "w": 24,
@@ -1732,6 +1751,7 @@
     },
     {
       "collapsed": true,
+      "datasource": null,
       "gridPos": {
         "h": 1,
         "w": 24,
@@ -2439,6 +2459,7 @@
     },
     {
       "collapsed": true,
+      "datasource": null,
       "gridPos": {
         "h": 1,
         "w": 24,
@@ -2635,6 +2656,7 @@
     },
     {
       "collapsed": true,
+      "datasource": null,
       "gridPos": {
         "h": 1,
         "w": 24,
@@ -2650,11 +2672,12 @@
           "dashes": false,
           "datasource": "$datasource",
           "fill": 1,
+          "fillGradient": 0,
           "gridPos": {
             "h": 9,
             "w": 12,
             "x": 0,
-            "y": 61
+            "y": 33
           },
           "id": 79,
           "legend": {
@@ -2670,6 +2693,9 @@
           "linewidth": 1,
           "links": [],
           "nullPointMode": "null",
+          "options": {
+            "dataLinks": []
+          },
           "paceLength": 10,
           "percentage": false,
           "pointradius": 5,
@@ -2684,8 +2710,13 @@
               "expr": "sum(rate(synapse_federation_client_sent_transactions{instance=\"$instance\"}[$bucket_size]))",
               "format": "time_series",
               "intervalFactor": 1,
-              "legendFormat": "txn rate",
+              "legendFormat": "successful txn rate",
               "refId": "A"
+            },
+            {
+              "expr": "sum(rate(synapse_util_metrics_block_count{block_name=\"_send_new_transaction\",instance=\"$instance\"}[$bucket_size]) - ignoring (block_name) rate(synapse_federation_client_sent_transactions{instance=\"$instance\"}[$bucket_size]))",
+              "legendFormat": "failed txn rate",
+              "refId": "B"
             }
           ],
           "thresholds": [],
@@ -2736,11 +2767,12 @@
           "dashes": false,
           "datasource": "$datasource",
           "fill": 1,
+          "fillGradient": 0,
           "gridPos": {
             "h": 9,
             "w": 12,
             "x": 12,
-            "y": 61
+            "y": 33
           },
           "id": 83,
           "legend": {
@@ -2756,6 +2788,9 @@
           "linewidth": 1,
           "links": [],
           "nullPointMode": "null",
+          "options": {
+            "dataLinks": []
+          },
           "paceLength": 10,
           "percentage": false,
           "pointradius": 5,
@@ -2829,11 +2864,12 @@
           "dashes": false,
           "datasource": "$datasource",
           "fill": 1,
+          "fillGradient": 0,
           "gridPos": {
             "h": 9,
             "w": 12,
             "x": 0,
-            "y": 70
+            "y": 42
           },
           "id": 109,
           "legend": {
@@ -2849,6 +2885,9 @@
           "linewidth": 1,
           "links": [],
           "nullPointMode": "null",
+          "options": {
+            "dataLinks": []
+          },
           "paceLength": 10,
           "percentage": false,
           "pointradius": 5,
@@ -2923,11 +2962,12 @@
           "dashes": false,
           "datasource": "$datasource",
           "fill": 1,
+          "fillGradient": 0,
           "gridPos": {
             "h": 9,
             "w": 12,
             "x": 12,
-            "y": 70
+            "y": 42
           },
           "id": 111,
           "legend": {
@@ -2943,6 +2983,9 @@
           "linewidth": 1,
           "links": [],
           "nullPointMode": "null",
+          "options": {
+            "dataLinks": []
+          },
           "paceLength": 10,
           "percentage": false,
           "pointradius": 5,
@@ -3009,6 +3052,7 @@
     },
     {
       "collapsed": true,
+      "datasource": null,
       "gridPos": {
         "h": 1,
         "w": 24,
@@ -3024,12 +3068,14 @@
           "dashes": false,
           "datasource": "$datasource",
           "fill": 1,
+          "fillGradient": 0,
           "gridPos": {
-            "h": 7,
+            "h": 8,
             "w": 12,
             "x": 0,
-            "y": 62
+            "y": 34
           },
+          "hiddenSeries": false,
           "id": 51,
           "legend": {
             "avg": false,
@@ -3044,6 +3090,9 @@
           "linewidth": 1,
           "links": [],
           "nullPointMode": "null",
+          "options": {
+            "dataLinks": []
+          },
           "paceLength": 10,
           "percentage": false,
           "pointradius": 5,
@@ -3112,6 +3161,95 @@
             "align": false,
             "alignLevel": null
           }
+        },
+        {
+          "aliasColors": {},
+          "bars": false,
+          "dashLength": 10,
+          "dashes": false,
+          "datasource": "$datasource",
+          "description": "",
+          "fill": 1,
+          "fillGradient": 0,
+          "gridPos": {
+            "h": 8,
+            "w": 12,
+            "x": 12,
+            "y": 34
+          },
+          "hiddenSeries": false,
+          "id": 134,
+          "legend": {
+            "avg": false,
+            "current": false,
+            "hideZero": false,
+            "max": false,
+            "min": false,
+            "show": true,
+            "total": false,
+            "values": false
+          },
+          "lines": true,
+          "linewidth": 1,
+          "nullPointMode": "null",
+          "options": {
+            "dataLinks": []
+          },
+          "percentage": false,
+          "pointradius": 2,
+          "points": false,
+          "renderer": "flot",
+          "seriesOverrides": [],
+          "spaceLength": 10,
+          "stack": false,
+          "steppedLine": false,
+          "targets": [
+            {
+              "expr": "topk(10,synapse_pushers{job=~\"$job\",index=~\"$index\", instance=\"$instance\"})",
+              "legendFormat": "{{kind}} {{app_id}}",
+              "refId": "A"
+            }
+          ],
+          "thresholds": [],
+          "timeFrom": null,
+          "timeRegions": [],
+          "timeShift": null,
+          "title": "Active pusher instances by app",
+          "tooltip": {
+            "shared": false,
+            "sort": 2,
+            "value_type": "individual"
+          },
+          "type": "graph",
+          "xaxis": {
+            "buckets": null,
+            "mode": "time",
+            "name": null,
+            "show": true,
+            "values": []
+          },
+          "yaxes": [
+            {
+              "format": "short",
+              "label": null,
+              "logBase": 1,
+              "max": null,
+              "min": null,
+              "show": true
+            },
+            {
+              "format": "short",
+              "label": null,
+              "logBase": 1,
+              "max": null,
+              "min": null,
+              "show": true
+            }
+          ],
+          "yaxis": {
+            "align": false,
+            "alignLevel": null
+          }
         }
       ],
       "repeat": null,
@@ -3120,6 +3258,7 @@
     },
     {
       "collapsed": true,
+      "datasource": null,
       "gridPos": {
         "h": 1,
         "w": 24,
@@ -3523,6 +3662,7 @@
     },
     {
       "collapsed": true,
+      "datasource": null,
       "gridPos": {
         "h": 1,
         "w": 24,
@@ -3540,6 +3680,7 @@
           "editable": true,
           "error": false,
           "fill": 1,
+          "fillGradient": 0,
           "grid": {},
           "gridPos": {
             "h": 13,
@@ -3562,6 +3703,9 @@
           "linewidth": 2,
           "links": [],
           "nullPointMode": "null",
+          "options": {
+            "dataLinks": []
+          },
           "paceLength": 10,
           "percentage": false,
           "pointradius": 5,
@@ -3630,6 +3774,7 @@
           "editable": true,
           "error": false,
           "fill": 1,
+          "fillGradient": 0,
           "grid": {},
           "gridPos": {
             "h": 13,
@@ -3652,6 +3797,9 @@
           "linewidth": 2,
           "links": [],
           "nullPointMode": "null",
+          "options": {
+            "dataLinks": []
+          },
           "paceLength": 10,
           "percentage": false,
           "pointradius": 5,
@@ -3720,6 +3868,7 @@
           "editable": true,
           "error": false,
           "fill": 1,
+          "fillGradient": 0,
           "grid": {},
           "gridPos": {
             "h": 13,
@@ -3742,6 +3891,9 @@
           "linewidth": 2,
           "links": [],
           "nullPointMode": "null",
+          "options": {
+            "dataLinks": []
+          },
           "paceLength": 10,
           "percentage": false,
           "pointradius": 5,
@@ -3810,6 +3962,7 @@
           "editable": true,
           "error": false,
           "fill": 1,
+          "fillGradient": 0,
           "grid": {},
           "gridPos": {
             "h": 13,
@@ -3832,6 +3985,9 @@
           "linewidth": 2,
           "links": [],
           "nullPointMode": "null",
+          "options": {
+            "dataLinks": []
+          },
           "paceLength": 10,
           "percentage": false,
           "pointradius": 5,
@@ -3921,6 +4077,7 @@
           "linewidth": 2,
           "links": [],
           "nullPointMode": "null",
+          "options": {},
           "paceLength": 10,
           "percentage": false,
           "pointradius": 5,
@@ -4010,6 +4167,7 @@
           "linewidth": 2,
           "links": [],
           "nullPointMode": "null",
+          "options": {},
           "paceLength": 10,
           "percentage": false,
           "pointradius": 5,
@@ -4076,6 +4234,7 @@
     },
     {
       "collapsed": true,
+      "datasource": null,
       "gridPos": {
         "h": 1,
         "w": 24,
@@ -4540,6 +4699,7 @@
     },
     {
       "collapsed": true,
+      "datasource": null,
       "gridPos": {
         "h": 1,
         "w": 24,
@@ -5060,6 +5220,7 @@
     },
     {
       "collapsed": true,
+      "datasource": null,
       "gridPos": {
         "h": 1,
         "w": 24,
@@ -5079,7 +5240,7 @@
             "h": 7,
             "w": 12,
             "x": 0,
-            "y": 67
+            "y": 39
           },
           "id": 2,
           "legend": {
@@ -5095,6 +5256,7 @@
           "linewidth": 1,
           "links": [],
           "nullPointMode": "null",
+          "options": {},
           "paceLength": 10,
           "percentage": false,
           "pointradius": 5,
@@ -5198,7 +5360,7 @@
             "h": 7,
             "w": 12,
             "x": 12,
-            "y": 67
+            "y": 39
           },
           "id": 41,
           "legend": {
@@ -5214,6 +5376,7 @@
           "linewidth": 1,
           "links": [],
           "nullPointMode": "null",
+          "options": {},
           "paceLength": 10,
           "percentage": false,
           "pointradius": 5,
@@ -5286,7 +5449,7 @@
             "h": 7,
             "w": 12,
             "x": 0,
-            "y": 74
+            "y": 46
           },
           "id": 42,
           "legend": {
@@ -5302,6 +5465,7 @@
           "linewidth": 1,
           "links": [],
           "nullPointMode": "null",
+          "options": {},
           "paceLength": 10,
           "percentage": false,
           "pointradius": 5,
@@ -5373,7 +5537,7 @@
             "h": 7,
             "w": 12,
             "x": 12,
-            "y": 74
+            "y": 46
           },
           "id": 43,
           "legend": {
@@ -5389,6 +5553,7 @@
           "linewidth": 1,
           "links": [],
           "nullPointMode": "null",
+          "options": {},
           "paceLength": 10,
           "percentage": false,
           "pointradius": 5,
@@ -5460,7 +5625,7 @@
             "h": 7,
             "w": 12,
             "x": 0,
-            "y": 81
+            "y": 53
           },
           "id": 113,
           "legend": {
@@ -5476,6 +5641,7 @@
           "linewidth": 1,
           "links": [],
           "nullPointMode": "null",
+          "options": {},
           "paceLength": 10,
           "percentage": false,
           "pointradius": 5,
@@ -5546,7 +5712,7 @@
             "h": 7,
             "w": 12,
             "x": 12,
-            "y": 81
+            "y": 53
           },
           "id": 115,
           "legend": {
@@ -5562,6 +5728,7 @@
           "linewidth": 1,
           "links": [],
           "nullPointMode": "null",
+          "options": {},
           "paceLength": 10,
           "percentage": false,
           "pointradius": 5,
@@ -5573,7 +5740,7 @@
           "steppedLine": false,
           "targets": [
             {
-              "expr": "rate(synapse_replication_tcp_protocol_close_reason{job=\"$job\",index=~\"$index\",instance=\"$instance\"}[$bucket_size])",
+              "expr": "rate(synapse_replication_tcp_protocol_close_reason{job=~\"$job\",index=~\"$index\",instance=\"$instance\"}[$bucket_size])",
               "format": "time_series",
               "intervalFactor": 1,
               "legendFormat": "{{job}}-{{index}} {{reason_type}}",
@@ -5628,6 +5795,7 @@
     },
     {
       "collapsed": true,
+      "datasource": null,
       "gridPos": {
         "h": 1,
         "w": 24,
@@ -5643,11 +5811,12 @@
           "dashes": false,
           "datasource": "$datasource",
           "fill": 1,
+          "fillGradient": 0,
           "gridPos": {
             "h": 9,
             "w": 12,
             "x": 0,
-            "y": 13
+            "y": 40
           },
           "id": 67,
           "legend": {
@@ -5663,7 +5832,9 @@
           "linewidth": 1,
           "links": [],
           "nullPointMode": "connected",
-          "options": {},
+          "options": {
+            "dataLinks": []
+          },
           "paceLength": 10,
           "percentage": false,
           "pointradius": 5,
@@ -5679,7 +5850,7 @@
               "format": "time_series",
               "interval": "",
               "intervalFactor": 1,
-              "legendFormat": "{{job}}-{{index}} ",
+              "legendFormat": "{{job}}-{{index}} {{name}}",
               "refId": "A"
             }
           ],
@@ -5731,11 +5902,12 @@
           "dashes": false,
           "datasource": "$datasource",
           "fill": 1,
+          "fillGradient": 0,
           "gridPos": {
             "h": 9,
             "w": 12,
             "x": 12,
-            "y": 13
+            "y": 40
           },
           "id": 71,
           "legend": {
@@ -5751,7 +5923,9 @@
           "linewidth": 1,
           "links": [],
           "nullPointMode": "connected",
-          "options": {},
+          "options": {
+            "dataLinks": []
+          },
           "paceLength": 10,
           "percentage": false,
           "pointradius": 5,
@@ -5819,11 +5993,12 @@
           "dashes": false,
           "datasource": "$datasource",
           "fill": 1,
+          "fillGradient": 0,
           "gridPos": {
             "h": 9,
             "w": 12,
             "x": 0,
-            "y": 22
+            "y": 49
           },
           "id": 121,
           "interval": "",
@@ -5840,7 +6015,9 @@
           "linewidth": 1,
           "links": [],
           "nullPointMode": "connected",
-          "options": {},
+          "options": {
+            "dataLinks": []
+          },
           "paceLength": 10,
           "percentage": false,
           "pointradius": 5,
@@ -5909,6 +6086,7 @@
     },
     {
       "collapsed": true,
+      "datasource": null,
       "gridPos": {
         "h": 1,
         "w": 24,
@@ -6607,7 +6785,7 @@
     }
   ],
   "refresh": "5m",
-  "schemaVersion": 18,
+  "schemaVersion": 22,
   "style": "dark",
   "tags": [
     "matrix"
@@ -6616,7 +6794,7 @@
     "list": [
       {
         "current": {
-          "tags": [],
+          "selected": true,
           "text": "Prometheus",
           "value": "Prometheus"
         },
@@ -6638,6 +6816,7 @@
         "auto_count": 100,
         "auto_min": "30s",
         "current": {
+          "selected": false,
           "text": "auto",
           "value": "$__auto_interval_bucket_size"
         },
@@ -6719,9 +6898,9 @@
         "allFormat": "regex wildcard",
         "allValue": "",
         "current": {
-          "text": "All",
+          "text": "synapse",
           "value": [
-            "$__all"
+            "synapse"
           ]
         },
         "datasource": "$datasource",
@@ -6751,7 +6930,9 @@
         "allValue": ".*",
         "current": {
           "text": "All",
-          "value": "$__all"
+          "value": [
+            "$__all"
+          ]
         },
         "datasource": "$datasource",
         "definition": "",
@@ -6810,5 +6991,5 @@
   "timezone": "",
   "title": "Synapse",
   "uid": "000000012",
-  "version": 10
+  "version": 19
 }
\ No newline at end of file
diff --git a/debian/changelog b/debian/changelog
index fbb44cb94b..8eaca85232 100644
--- a/debian/changelog
+++ b/debian/changelog
@@ -1,3 +1,44 @@
+matrix-synapse-py3 (1.12.4) stable; urgency=medium
+
+  * New synapse release 1.12.4.
+
+ -- Synapse Packaging team <packages@matrix.org>  Thu, 23 Apr 2020 10:58:14 -0400
+
+matrix-synapse-py3 (1.12.3) stable; urgency=medium
+
+  [ Richard van der Hoff ]
+  * Update the Debian build scripts to handle the new installation paths
+   for the support libraries introduced by Pillow 7.1.1.
+
+  [ Synapse Packaging team ]
+  * New synapse release 1.12.3.
+
+ -- Synapse Packaging team <packages@matrix.org>  Fri, 03 Apr 2020 10:55:03 +0100
+
+matrix-synapse-py3 (1.12.2) stable; urgency=medium
+
+  * New synapse release 1.12.2.
+
+ -- Synapse Packaging team <packages@matrix.org>  Mon, 02 Apr 2020 19:02:17 +0000
+
+matrix-synapse-py3 (1.12.1) stable; urgency=medium
+
+  * New synapse release 1.12.1.
+
+ -- Synapse Packaging team <packages@matrix.org>  Mon, 02 Apr 2020 11:30:47 +0000
+
+matrix-synapse-py3 (1.12.0) stable; urgency=medium
+
+  * New synapse release 1.12.0.
+
+ -- Synapse Packaging team <packages@matrix.org>  Mon, 23 Mar 2020 12:13:03 +0000
+
+matrix-synapse-py3 (1.11.1) stable; urgency=medium
+
+  * New synapse release 1.11.1.
+
+ -- Synapse Packaging team <packages@matrix.org>  Tue, 03 Mar 2020 15:01:22 +0000
+
 matrix-synapse-py3 (1.11.0) stable; urgency=medium
 
   * New synapse release 1.11.0.
diff --git a/debian/rules b/debian/rules
index a4d2ce2ba4..c744060a57 100755
--- a/debian/rules
+++ b/debian/rules
@@ -15,17 +15,38 @@ override_dh_installinit:
 # we don't really want to strip the symbols from our object files.
 override_dh_strip:
 
+# dh_shlibdeps calls dpkg-shlibdeps, which finds all the binary files
+# (executables and shared libs) in the package, and looks for the shared
+# libraries that they depend on. It then adds a dependency on the package that
+# contains that library to the package.
+#
+# We make two modifications to that process...
+#
 override_dh_shlibdeps:
-        # make the postgres package's dependencies a recommendation
-        # rather than a hard dependency.
+        # Firstly, postgres is not a hard dependency for us, so we want to make
+        # the things that psycopg2 depends on (such as libpq) be
+        # recommendations rather than hard dependencies. We do so by
+        # running dpkg-shlibdeps manually on psycopg2's libs.
+        #
 	find debian/$(PACKAGE_NAME)/ -path '*/site-packages/psycopg2/*.so' | \
 	    xargs dpkg-shlibdeps -Tdebian/$(PACKAGE_NAME).substvars \
 	        -pshlibs1 -dRecommends
 
-        # all the other dependencies can be normal 'Depends' requirements,
-        # except for PIL's, which is self-contained and which confuses
-        # dpkg-shlibdeps.
-	dh_shlibdeps -X site-packages/PIL/.libs -X site-packages/psycopg2
+        # secondly, we exclude PIL's libraries from the process. They are known
+        # to be self-contained, but they have interdependencies and
+        # dpkg-shlibdeps doesn't know how to resolve them.
+        #
+        # As of Pillow 7.1.0, these libraries are in
+        # site-packages/Pillow.libs. Previously, they were in
+        # site-packages/PIL/.libs.
+        #
+        # (we also need to exclude psycopg2, of course, since we've already
+        # dealt with that.)
+        #
+	dh_shlibdeps \
+	    -X site-packages/PIL/.libs \
+	    -X site-packages/Pillow.libs \
+	    -X site-packages/psycopg2
 
 override_dh_virtualenv:
 	./debian/build_virtualenv
diff --git a/docs/admin_api/user_admin_api.rst b/docs/admin_api/user_admin_api.rst
index 6b02d963e6..9ce10119ff 100644
--- a/docs/admin_api/user_admin_api.rst
+++ b/docs/admin_api/user_admin_api.rst
@@ -38,6 +38,7 @@ The parameter ``threepids`` is optional.
 The parameter ``avatar_url`` is optional.
 The parameter ``admin`` is optional and defaults to 'false'.
 The parameter ``deactivated`` is optional and defaults to 'false'.
+The parameter ``password`` is optional. If provided the user's password is updated and all devices are logged out.
 If the user already exists then optional parameters default to the current value.
 
 List Accounts
@@ -168,11 +169,14 @@ with a body of:
 .. code:: json
 
    {
-       "new_password": "<secret>"
+       "new_password": "<secret>",
+       "logout_devices": true,
    }
 
 including an ``access_token`` of a server admin.
 
+The parameter ``new_password`` is required.
+The parameter ``logout_devices`` is optional and defaults to ``true``.
 
 Get whether a user is a server administrator or not
 ===================================================
diff --git a/docs/sample_config.yaml b/docs/sample_config.yaml
index 54cbe840d5..2ff0dd05a2 100644
--- a/docs/sample_config.yaml
+++ b/docs/sample_config.yaml
@@ -1347,6 +1347,25 @@ saml2_config:
   #
   #grandfathered_mxid_source_attribute: upn
 
+  # Directory in which Synapse will try to find the template files below.
+  # If not set, default templates from within the Synapse package will be used.
+  #
+  # DO NOT UNCOMMENT THIS SETTING unless you want to customise the templates.
+  # If you *do* uncomment it, you will need to make sure that all the templates
+  # below are in the directory.
+  #
+  # Synapse will look for the following templates in this directory:
+  #
+  # * HTML page to display to users if something goes wrong during the
+  #   authentication process: 'saml_error.html'.
+  #
+  #   This template doesn't currently need any variable to render.
+  #
+  # You can see the default templates at:
+  # https://github.com/matrix-org/synapse/tree/master/synapse/res/templates
+  #
+  #template_dir: "res/templates"
+
 
 
 # Enable CAS for registration and login.
@@ -1360,6 +1379,56 @@ saml2_config:
 #   #    name: value
 
 
+# Additional settings to use with single-sign on systems such as SAML2 and CAS.
+#
+sso:
+    # A list of client URLs which are whitelisted so that the user does not
+    # have to confirm giving access to their account to the URL. Any client
+    # whose URL starts with an entry in the following list will not be subject
+    # to an additional confirmation step after the SSO login is completed.
+    #
+    # WARNING: An entry such as "https://my.client" is insecure, because it
+    # will also match "https://my.client.evil.site", exposing your users to
+    # phishing attacks from evil.site. To avoid this, include a slash after the
+    # hostname: "https://my.client/".
+    #
+    # By default, this list is empty.
+    #
+    #client_whitelist:
+    #  - https://riot.im/develop
+    #  - https://my.custom.client/
+
+    # Directory in which Synapse will try to find the template files below.
+    # If not set, default templates from within the Synapse package will be used.
+    #
+    # DO NOT UNCOMMENT THIS SETTING unless you want to customise the templates.
+    # If you *do* uncomment it, you will need to make sure that all the templates
+    # below are in the directory.
+    #
+    # Synapse will look for the following templates in this directory:
+    #
+    # * HTML page for a confirmation step before redirecting back to the client
+    #   with the login token: 'sso_redirect_confirm.html'.
+    #
+    #   When rendering, this template is given three variables:
+    #     * redirect_url: the URL the user is about to be redirected to. Needs
+    #                     manual escaping (see
+    #                     https://jinja.palletsprojects.com/en/2.11.x/templates/#html-escaping).
+    #
+    #     * display_url: the same as `redirect_url`, but with the query
+    #                    parameters stripped. The intention is to have a
+    #                    human-readable URL to show to users, not to use it as
+    #                    the final address to redirect to. Needs manual escaping
+    #                    (see https://jinja.palletsprojects.com/en/2.11.x/templates/#html-escaping).
+    #
+    #     * server_name: the homeserver's name.
+    #
+    # You can see the default templates at:
+    # https://github.com/matrix-org/synapse/tree/master/synapse/res/templates
+    #
+    #template_dir: "res/templates"
+
+
 # The JWT needs to contain a globally unique "sub" (subject) claim.
 #
 #jwt_config:
diff --git a/docs/workers.md b/docs/workers.md
index 0d84a58958..cb3b9f8e68 100644
--- a/docs/workers.md
+++ b/docs/workers.md
@@ -268,11 +268,14 @@ Additionally, the following REST endpoints can be handled for GET requests:
 
     ^/_matrix/client/(api/v1|r0|unstable)/pushrules/.*$
     ^/_matrix/client/(api/v1|r0|unstable)/groups/.*$
+    ^/_matrix/client/(api/v1|r0|unstable)/user/[^/]*/account_data/
+    ^/_matrix/client/(api/v1|r0|unstable)/user/[^/]*/rooms/[^/]*/account_data/
 
 Additionally, the following REST endpoints can be handled, but all requests must
 be routed to the same instance:
 
     ^/_matrix/client/(r0|unstable)/register$
+    ^/_matrix/client/(r0|unstable)/auth/.*/fallback/web$
 
 Pagination requests can also be handled, but all requests with the same path
 room must be routed to the same instance. Additionally, care must be taken to
diff --git a/snap/snapcraft.yaml b/snap/snapcraft.yaml
index 9e644e8567..6b62b79114 100644
--- a/snap/snapcraft.yaml
+++ b/snap/snapcraft.yaml
@@ -1,20 +1,31 @@
 name: matrix-synapse
 base: core18
-version: git 
+version: git
 summary: Reference Matrix homeserver
 description: |
   Synapse is the reference Matrix homeserver.
   Matrix is a federated and decentralised instant messaging and VoIP system.
 
-grade: stable 
-confinement: strict 
+grade: stable
+confinement: strict
 
 apps:
-  matrix-synapse: 
+  matrix-synapse:
     command: synctl --no-daemonize start $SNAP_COMMON/homeserver.yaml
     stop-command: synctl -c $SNAP_COMMON stop
     plugs: [network-bind, network]
-    daemon: simple 
+    daemon: simple
+  hash-password:
+    command: hash_password
+  generate-config:
+    command: generate_config
+  generate-signing-key:
+    command: generate_signing_key.py
+  register-new-matrix-user:
+    command: register_new_matrix_user
+    plugs: [network]
+  synctl:
+    command: synctl
 parts:
   matrix-synapse:
     source: .
diff --git a/synapse/__init__.py b/synapse/__init__.py
index 3406ce634f..d8d340f426 100644
--- a/synapse/__init__.py
+++ b/synapse/__init__.py
@@ -36,7 +36,7 @@ try:
 except ImportError:
     pass
 
-__version__ = "1.11.0"
+__version__ = "1.12.4"
 
 if bool(os.environ.get("SYNAPSE_TEST_PATCH_LOG_CONTEXTS", False)):
     # We import here so that we don't have to install a bunch of deps when
diff --git a/synapse/api/auth.py b/synapse/api/auth.py
index fc172ba854..b46cf93105 100644
--- a/synapse/api/auth.py
+++ b/synapse/api/auth.py
@@ -539,7 +539,7 @@ class Auth(object):
 
     @defer.inlineCallbacks
     def check_can_change_room_list(self, room_id: str, user: UserID):
-        """Check if the user is allowed to edit the room's entry in the
+        """Determine whether the user is allowed to edit the room's entry in the
         published room list.
 
         Args:
@@ -570,12 +570,7 @@ class Auth(object):
         )
         user_level = event_auth.get_user_power_level(user_id, auth_events)
 
-        if user_level < send_level:
-            raise AuthError(
-                403,
-                "This server requires you to be a moderator in the room to"
-                " edit its room list entry",
-            )
+        return user_level >= send_level
 
     @staticmethod
     def has_access_token(request):
diff --git a/synapse/api/errors.py b/synapse/api/errors.py
index 0c20601600..616942b057 100644
--- a/synapse/api/errors.py
+++ b/synapse/api/errors.py
@@ -66,6 +66,7 @@ class Codes(object):
     EXPIRED_ACCOUNT = "ORG_MATRIX_EXPIRED_ACCOUNT"
     INVALID_SIGNATURE = "M_INVALID_SIGNATURE"
     USER_DEACTIVATED = "M_USER_DEACTIVATED"
+    BAD_ALIAS = "M_BAD_ALIAS"
 
 
 class CodeMessageException(RuntimeError):
diff --git a/synapse/api/room_versions.py b/synapse/api/room_versions.py
index cf7ee60d3a..871179749a 100644
--- a/synapse/api/room_versions.py
+++ b/synapse/api/room_versions.py
@@ -57,7 +57,7 @@ class RoomVersion(object):
     state_res = attr.ib()  # int; one of the StateResolutionVersions
     enforce_key_validity = attr.ib()  # bool
 
-    # bool: before MSC2260, anyone was allowed to send an aliases event
+    # bool: before MSC2261/MSC2432, m.room.aliases had special auth rules and redaction rules
     special_case_aliases_auth = attr.ib(type=bool, default=False)
 
 
@@ -102,12 +102,13 @@ class RoomVersions(object):
         enforce_key_validity=True,
         special_case_aliases_auth=True,
     )
-    MSC2260_DEV = RoomVersion(
-        "org.matrix.msc2260",
+    MSC2432_DEV = RoomVersion(
+        "org.matrix.msc2432",
         RoomDisposition.UNSTABLE,
         EventFormatVersions.V3,
         StateResolutionVersions.V2,
         enforce_key_validity=True,
+        special_case_aliases_auth=False,
     )
 
 
@@ -119,6 +120,6 @@ KNOWN_ROOM_VERSIONS = {
         RoomVersions.V3,
         RoomVersions.V4,
         RoomVersions.V5,
-        RoomVersions.MSC2260_DEV,
+        RoomVersions.MSC2432_DEV,
     )
 }  # type: Dict[str, RoomVersion]
diff --git a/synapse/app/_base.py b/synapse/app/_base.py
index 9ffd23c6df..4d84f4595a 100644
--- a/synapse/app/_base.py
+++ b/synapse/app/_base.py
@@ -276,6 +276,7 @@ def start(hs, listeners=None):
         # It is now safe to start your Synapse.
         hs.start_listening(listeners)
         hs.get_datastore().db.start_profiling()
+        hs.get_pusherpool().start()
 
         setup_sentry(hs)
         setup_sdnotify(hs)
diff --git a/synapse/app/generic_worker.py b/synapse/app/generic_worker.py
index 30efd39092..66be6ea2ec 100644
--- a/synapse/app/generic_worker.py
+++ b/synapse/app/generic_worker.py
@@ -98,6 +98,10 @@ from synapse.rest.client.v1.voip import VoipRestServlet
 from synapse.rest.client.v2_alpha import groups, sync, user_directory
 from synapse.rest.client.v2_alpha._base import client_patterns
 from synapse.rest.client.v2_alpha.account import ThreepidRestServlet
+from synapse.rest.client.v2_alpha.account_data import (
+    AccountDataServlet,
+    RoomAccountDataServlet,
+)
 from synapse.rest.client.v2_alpha.keys import KeyChangesServlet, KeyQueryServlet
 from synapse.rest.client.v2_alpha.register import RegisterRestServlet
 from synapse.rest.client.versions import VersionsRestServlet
@@ -475,6 +479,8 @@ class GenericWorkerServer(HomeServer):
                     ProfileDisplaynameRestServlet(self).register(resource)
                     ProfileRestServlet(self).register(resource)
                     KeyUploadServlet(self).register(resource)
+                    AccountDataServlet(self).register(resource)
+                    RoomAccountDataServlet(self).register(resource)
 
                     sync.register_servlets(self, resource)
                     events.register_servlets(self, resource)
@@ -494,20 +500,26 @@ class GenericWorkerServer(HomeServer):
                 elif name == "federation":
                     resources.update({FEDERATION_PREFIX: TransportLayerServer(self)})
                 elif name == "media":
-                    media_repo = self.get_media_repository_resource()
-
-                    # We need to serve the admin servlets for media on the
-                    # worker.
-                    admin_resource = JsonResource(self, canonical_json=False)
-                    register_servlets_for_media_repo(self, admin_resource)
-
-                    resources.update(
-                        {
-                            MEDIA_PREFIX: media_repo,
-                            LEGACY_MEDIA_PREFIX: media_repo,
-                            "/_synapse/admin": admin_resource,
-                        }
-                    )
+                    if self.config.can_load_media_repo:
+                        media_repo = self.get_media_repository_resource()
+
+                        # We need to serve the admin servlets for media on the
+                        # worker.
+                        admin_resource = JsonResource(self, canonical_json=False)
+                        register_servlets_for_media_repo(self, admin_resource)
+
+                        resources.update(
+                            {
+                                MEDIA_PREFIX: media_repo,
+                                LEGACY_MEDIA_PREFIX: media_repo,
+                                "/_synapse/admin": admin_resource,
+                            }
+                        )
+                    else:
+                        logger.warning(
+                            "A 'media' listener is configured but the media"
+                            " repository is disabled. Ignoring."
+                        )
 
                 if name == "openid" and "federation" not in res["names"]:
                     # Only load the openid resource separately if federation resource
@@ -854,6 +866,9 @@ def start(config_options):
 
         # Force the appservice to start since they will be disabled in the main config
         config.notify_appservices = True
+    else:
+        # For other worker types we force this to off.
+        config.notify_appservices = False
 
     if config.worker_app == "synapse.app.pusher":
         if config.start_pushers:
@@ -867,6 +882,9 @@ def start(config_options):
 
         # Force the pushers to start since they will be disabled in the main config
         config.start_pushers = True
+    else:
+        # For other worker types we force this to off.
+        config.start_pushers = False
 
     if config.worker_app == "synapse.app.user_dir":
         if config.update_user_directory:
@@ -880,6 +898,9 @@ def start(config_options):
 
         # Force the pushers to start since they will be disabled in the main config
         config.update_user_directory = True
+    else:
+        # For other worker types we force this to off.
+        config.update_user_directory = False
 
     if config.worker_app == "synapse.app.federation_sender":
         if config.send_federation:
@@ -893,6 +914,9 @@ def start(config_options):
 
         # Force the pushers to start since they will be disabled in the main config
         config.send_federation = True
+    else:
+        # For other worker types we force this to off.
+        config.send_federation = False
 
     synapse.events.USE_FROZEN_DICTS = config.use_frozen_dicts
 
diff --git a/synapse/app/homeserver.py b/synapse/app/homeserver.py
index 2b114551a6..80144cd711 100644
--- a/synapse/app/homeserver.py
+++ b/synapse/app/homeserver.py
@@ -297,6 +297,11 @@ class SynapseHomeServer(HomeServer):
 
 # Gauges to expose monthly active user control metrics
 current_mau_gauge = Gauge("synapse_admin_mau:current", "Current MAU")
+current_mau_by_service_gauge = Gauge(
+    "synapse_admin_mau_current_mau_by_service",
+    "Current MAU by service",
+    ["app_service"],
+)
 max_mau_gauge = Gauge("synapse_admin_mau:max", "MAU Limit")
 registered_reserved_users_mau_gauge = Gauge(
     "synapse_admin_mau:registered_reserved_users",
@@ -402,7 +407,6 @@ def setup(config_options):
 
             _base.start(hs, config.listeners)
 
-            hs.get_pusherpool().start()
             hs.get_datastore().db.updates.start_doing_background_updates()
         except Exception:
             # Print the exception and bail out.
@@ -584,12 +588,20 @@ def run(hs):
     @defer.inlineCallbacks
     def generate_monthly_active_users():
         current_mau_count = 0
+        current_mau_count_by_service = {}
         reserved_users = ()
         store = hs.get_datastore()
         if hs.config.limit_usage_by_mau or hs.config.mau_stats_only:
             current_mau_count = yield store.get_monthly_active_count()
+            current_mau_count_by_service = (
+                yield store.get_monthly_active_count_by_service()
+            )
             reserved_users = yield store.get_registered_reserved_users()
         current_mau_gauge.set(float(current_mau_count))
+
+        for app_service, count in current_mau_count_by_service.items():
+            current_mau_by_service_gauge.labels(app_service).set(float(count))
+
         registered_reserved_users_mau_gauge.set(float(len(reserved_users)))
         max_mau_gauge.set(float(hs.config.max_mau_value))
 
diff --git a/synapse/config/_base.pyi b/synapse/config/_base.pyi
index 86bc965ee4..3053fc9d27 100644
--- a/synapse/config/_base.pyi
+++ b/synapse/config/_base.pyi
@@ -24,6 +24,7 @@ from synapse.config import (
     server,
     server_notices_config,
     spam_checker,
+    sso,
     stats,
     third_party_event_rules,
     tls,
@@ -57,6 +58,7 @@ class RootConfig:
     key: key.KeyConfig
     saml2: saml2_config.SAML2Config
     cas: cas.CasConfig
+    sso: sso.SSOConfig
     jwt: jwt_config.JWTConfig
     password: password.PasswordConfig
     email: emailconfig.EmailConfig
diff --git a/synapse/config/homeserver.py b/synapse/config/homeserver.py
index 0372d19836..a55cf81e68 100644
--- a/synapse/config/homeserver.py
+++ b/synapse/config/homeserver.py
@@ -39,6 +39,7 @@ from .saml2_config import SAML2Config
 from .server import ServerConfig
 from .server_notices_config import ServerNoticesConfig
 from .spam_checker import SpamCheckerConfig
+from .sso import SSOConfig
 from .stats import StatsConfig
 from .third_party_event_rules import ThirdPartyRulesConfig
 from .tls import TlsConfig
@@ -66,6 +67,7 @@ class HomeServerConfig(RootConfig):
         KeyConfig,
         SAML2Config,
         CasConfig,
+        SSOConfig,
         JWTConfig,
         PasswordConfig,
         EmailConfig,
diff --git a/synapse/config/saml2_config.py b/synapse/config/saml2_config.py
index 423c158b11..8fe64d90f8 100644
--- a/synapse/config/saml2_config.py
+++ b/synapse/config/saml2_config.py
@@ -15,6 +15,9 @@
 # limitations under the License.
 
 import logging
+import os
+
+import pkg_resources
 
 from synapse.python_dependencies import DependencyException, check_requirements
 from synapse.util.module_loader import load_module, load_python_module
@@ -160,6 +163,14 @@ class SAML2Config(Config):
             saml2_config.get("saml_session_lifetime", "5m")
         )
 
+        template_dir = saml2_config.get("template_dir")
+        if not template_dir:
+            template_dir = pkg_resources.resource_filename("synapse", "res/templates",)
+
+        self.saml2_error_html_content = self.read_file(
+            os.path.join(template_dir, "saml_error.html"), "saml2_config.saml_error",
+        )
+
     def _default_saml_config_dict(
         self, required_attributes: set, optional_attributes: set
     ):
@@ -325,6 +336,25 @@ class SAML2Config(Config):
           # The default is 'uid'.
           #
           #grandfathered_mxid_source_attribute: upn
+
+          # Directory in which Synapse will try to find the template files below.
+          # If not set, default templates from within the Synapse package will be used.
+          #
+          # DO NOT UNCOMMENT THIS SETTING unless you want to customise the templates.
+          # If you *do* uncomment it, you will need to make sure that all the templates
+          # below are in the directory.
+          #
+          # Synapse will look for the following templates in this directory:
+          #
+          # * HTML page to display to users if something goes wrong during the
+          #   authentication process: 'saml_error.html'.
+          #
+          #   This template doesn't currently need any variable to render.
+          #
+          # You can see the default templates at:
+          # https://github.com/matrix-org/synapse/tree/master/synapse/res/templates
+          #
+          #template_dir: "res/templates"
         """ % {
             "config_dir_path": config_dir_path
         }
diff --git a/synapse/config/sso.py b/synapse/config/sso.py
new file mode 100644
index 0000000000..95762689bc
--- /dev/null
+++ b/synapse/config/sso.py
@@ -0,0 +1,92 @@
+# -*- coding: utf-8 -*-
+# Copyright 2020 The Matrix.org Foundation C.I.C.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+from typing import Any, Dict
+
+import pkg_resources
+
+from ._base import Config
+
+
+class SSOConfig(Config):
+    """SSO Configuration
+    """
+
+    section = "sso"
+
+    def read_config(self, config, **kwargs):
+        sso_config = config.get("sso") or {}  # type: Dict[str, Any]
+
+        # Pick a template directory in order of:
+        # * The sso-specific template_dir
+        # * /path/to/synapse/install/res/templates
+        template_dir = sso_config.get("template_dir")
+        if not template_dir:
+            template_dir = pkg_resources.resource_filename("synapse", "res/templates",)
+
+        self.sso_redirect_confirm_template_dir = template_dir
+
+        self.sso_client_whitelist = sso_config.get("client_whitelist") or []
+
+    def generate_config_section(self, **kwargs):
+        return """\
+        # Additional settings to use with single-sign on systems such as SAML2 and CAS.
+        #
+        sso:
+            # A list of client URLs which are whitelisted so that the user does not
+            # have to confirm giving access to their account to the URL. Any client
+            # whose URL starts with an entry in the following list will not be subject
+            # to an additional confirmation step after the SSO login is completed.
+            #
+            # WARNING: An entry such as "https://my.client" is insecure, because it
+            # will also match "https://my.client.evil.site", exposing your users to
+            # phishing attacks from evil.site. To avoid this, include a slash after the
+            # hostname: "https://my.client/".
+            #
+            # By default, this list is empty.
+            #
+            #client_whitelist:
+            #  - https://riot.im/develop
+            #  - https://my.custom.client/
+
+            # Directory in which Synapse will try to find the template files below.
+            # If not set, default templates from within the Synapse package will be used.
+            #
+            # DO NOT UNCOMMENT THIS SETTING unless you want to customise the templates.
+            # If you *do* uncomment it, you will need to make sure that all the templates
+            # below are in the directory.
+            #
+            # Synapse will look for the following templates in this directory:
+            #
+            # * HTML page for a confirmation step before redirecting back to the client
+            #   with the login token: 'sso_redirect_confirm.html'.
+            #
+            #   When rendering, this template is given three variables:
+            #     * redirect_url: the URL the user is about to be redirected to. Needs
+            #                     manual escaping (see
+            #                     https://jinja.palletsprojects.com/en/2.11.x/templates/#html-escaping).
+            #
+            #     * display_url: the same as `redirect_url`, but with the query
+            #                    parameters stripped. The intention is to have a
+            #                    human-readable URL to show to users, not to use it as
+            #                    the final address to redirect to. Needs manual escaping
+            #                    (see https://jinja.palletsprojects.com/en/2.11.x/templates/#html-escaping).
+            #
+            #     * server_name: the homeserver's name.
+            #
+            # You can see the default templates at:
+            # https://github.com/matrix-org/synapse/tree/master/synapse/res/templates
+            #
+            #template_dir: "res/templates"
+        """
diff --git a/synapse/crypto/context_factory.py b/synapse/crypto/context_factory.py
index e93f0b3705..a5a2a7815d 100644
--- a/synapse/crypto/context_factory.py
+++ b/synapse/crypto/context_factory.py
@@ -75,7 +75,7 @@ class ServerContextFactory(ContextFactory):
 
 
 @implementer(IPolicyForHTTPS)
-class ClientTLSOptionsFactory(object):
+class FederationPolicyForHTTPS(object):
     """Factory for Twisted SSLClientConnectionCreators that are used to make connections
     to remote servers for federation.
 
@@ -103,15 +103,15 @@ class ClientTLSOptionsFactory(object):
         # let us do).
         minTLS = _TLS_VERSION_MAP[config.federation_client_minimum_tls_version]
 
-        self._verify_ssl = CertificateOptions(
+        _verify_ssl = CertificateOptions(
             trustRoot=trust_root, insecurelyLowerMinimumTo=minTLS
         )
-        self._verify_ssl_context = self._verify_ssl.getContext()
-        self._verify_ssl_context.set_info_callback(self._context_info_cb)
+        self._verify_ssl_context = _verify_ssl.getContext()
+        self._verify_ssl_context.set_info_callback(_context_info_cb)
 
-        self._no_verify_ssl = CertificateOptions(insecurelyLowerMinimumTo=minTLS)
-        self._no_verify_ssl_context = self._no_verify_ssl.getContext()
-        self._no_verify_ssl_context.set_info_callback(self._context_info_cb)
+        _no_verify_ssl = CertificateOptions(insecurelyLowerMinimumTo=minTLS)
+        self._no_verify_ssl_context = _no_verify_ssl.getContext()
+        self._no_verify_ssl_context.set_info_callback(_context_info_cb)
 
     def get_options(self, host: bytes):
 
@@ -136,23 +136,6 @@ class ClientTLSOptionsFactory(object):
 
         return SSLClientConnectionCreator(host, ssl_context, should_verify)
 
-    @staticmethod
-    def _context_info_cb(ssl_connection, where, ret):
-        """The 'information callback' for our openssl context object."""
-        # we assume that the app_data on the connection object has been set to
-        # a TLSMemoryBIOProtocol object. (This is done by SSLClientConnectionCreator)
-        tls_protocol = ssl_connection.get_app_data()
-        try:
-            # ... we further assume that SSLClientConnectionCreator has set the
-            # '_synapse_tls_verifier' attribute to a ConnectionVerifier object.
-            tls_protocol._synapse_tls_verifier.verify_context_info_cb(
-                ssl_connection, where
-            )
-        except:  # noqa: E722, taken from the twisted implementation
-            logger.exception("Error during info_callback")
-            f = Failure()
-            tls_protocol.failVerification(f)
-
     def creatorForNetloc(self, hostname, port):
         """Implements the IPolicyForHTTPS interace so that this can be passed
         directly to agents.
@@ -160,6 +143,43 @@ class ClientTLSOptionsFactory(object):
         return self.get_options(hostname)
 
 
+@implementer(IPolicyForHTTPS)
+class RegularPolicyForHTTPS(object):
+    """Factory for Twisted SSLClientConnectionCreators that are used to make connections
+    to remote servers, for other than federation.
+
+    Always uses the same OpenSSL context object, which uses the default OpenSSL CA
+    trust root.
+    """
+
+    def __init__(self):
+        trust_root = platformTrust()
+        self._ssl_context = CertificateOptions(trustRoot=trust_root).getContext()
+        self._ssl_context.set_info_callback(_context_info_cb)
+
+    def creatorForNetloc(self, hostname, port):
+        return SSLClientConnectionCreator(hostname, self._ssl_context, True)
+
+
+def _context_info_cb(ssl_connection, where, ret):
+    """The 'information callback' for our openssl context objects.
+
+    Note: Once this is set as the info callback on a Context object, the Context should
+    only be used with the SSLClientConnectionCreator.
+    """
+    # we assume that the app_data on the connection object has been set to
+    # a TLSMemoryBIOProtocol object. (This is done by SSLClientConnectionCreator)
+    tls_protocol = ssl_connection.get_app_data()
+    try:
+        # ... we further assume that SSLClientConnectionCreator has set the
+        # '_synapse_tls_verifier' attribute to a ConnectionVerifier object.
+        tls_protocol._synapse_tls_verifier.verify_context_info_cb(ssl_connection, where)
+    except:  # noqa: E722, taken from the twisted implementation
+        logger.exception("Error during info_callback")
+        f = Failure()
+        tls_protocol.failVerification(f)
+
+
 @implementer(IOpenSSLClientConnectionCreator)
 class SSLClientConnectionCreator(object):
     """Creates openssl connection objects for client connections.
diff --git a/synapse/crypto/event_signing.py b/synapse/crypto/event_signing.py
index 5f733c1cf5..0422c43fab 100644
--- a/synapse/crypto/event_signing.py
+++ b/synapse/crypto/event_signing.py
@@ -140,7 +140,7 @@ def compute_event_signature(
     Returns:
         a dictionary in the same format of an event's signatures field.
     """
-    redact_json = prune_event_dict(event_dict)
+    redact_json = prune_event_dict(room_version, event_dict)
     redact_json.pop("age_ts", None)
     redact_json.pop("unsigned", None)
     if logger.isEnabledFor(logging.DEBUG):
diff --git a/synapse/event_auth.py b/synapse/event_auth.py
index 472f165044..46beb5334f 100644
--- a/synapse/event_auth.py
+++ b/synapse/event_auth.py
@@ -137,7 +137,7 @@ def check(
             raise AuthError(403, "This room has been marked as unfederatable.")
 
     # 4. If type is m.room.aliases
-    if event.type == EventTypes.Aliases:
+    if event.type == EventTypes.Aliases and room_version_obj.special_case_aliases_auth:
         # 4a. If event has no state_key, reject
         if not event.is_state():
             raise AuthError(403, "Alias event must be a state event")
@@ -152,10 +152,8 @@ def check(
             )
 
         # 4c. Otherwise, allow.
-        # This is removed by https://github.com/matrix-org/matrix-doc/pull/2260
-        if room_version_obj.special_case_aliases_auth:
-            logger.debug("Allowing! %s", event)
-            return
+        logger.debug("Allowing! %s", event)
+        return
 
     if logger.isEnabledFor(logging.DEBUG):
         logger.debug("Auth events: %s", [a.event_id for a in auth_events.values()])
diff --git a/synapse/events/__init__.py b/synapse/events/__init__.py
index 7307116556..533ba327f5 100644
--- a/synapse/events/__init__.py
+++ b/synapse/events/__init__.py
@@ -15,9 +15,10 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
+import abc
 import os
 from distutils.util import strtobool
-from typing import Optional, Type
+from typing import Dict, Optional, Type
 
 import six
 
@@ -199,15 +200,25 @@ class _EventInternalMetadata(object):
         return self._dict.get("redacted", False)
 
 
-class EventBase(object):
+class EventBase(metaclass=abc.ABCMeta):
+    @property
+    @abc.abstractmethod
+    def format_version(self) -> int:
+        """The EventFormatVersion implemented by this event"""
+        ...
+
     def __init__(
         self,
-        event_dict,
-        signatures={},
-        unsigned={},
-        internal_metadata_dict={},
-        rejected_reason=None,
+        event_dict: JsonDict,
+        room_version: RoomVersion,
+        signatures: Dict[str, Dict[str, str]],
+        unsigned: JsonDict,
+        internal_metadata_dict: JsonDict,
+        rejected_reason: Optional[str],
     ):
+        assert room_version.event_format == self.format_version
+
+        self.room_version = room_version
         self.signatures = signatures
         self.unsigned = unsigned
         self.rejected_reason = rejected_reason
@@ -303,7 +314,13 @@ class EventBase(object):
 class FrozenEvent(EventBase):
     format_version = EventFormatVersions.V1  # All events of this type are V1
 
-    def __init__(self, event_dict, internal_metadata_dict={}, rejected_reason=None):
+    def __init__(
+        self,
+        event_dict: JsonDict,
+        room_version: RoomVersion,
+        internal_metadata_dict: JsonDict = {},
+        rejected_reason: Optional[str] = None,
+    ):
         event_dict = dict(event_dict)
 
         # Signatures is a dict of dicts, and this is faster than doing a
@@ -326,8 +343,9 @@ class FrozenEvent(EventBase):
 
         self._event_id = event_dict["event_id"]
 
-        super(FrozenEvent, self).__init__(
+        super().__init__(
             frozen_dict,
+            room_version=room_version,
             signatures=signatures,
             unsigned=unsigned,
             internal_metadata_dict=internal_metadata_dict,
@@ -352,7 +370,13 @@ class FrozenEvent(EventBase):
 class FrozenEventV2(EventBase):
     format_version = EventFormatVersions.V2  # All events of this type are V2
 
-    def __init__(self, event_dict, internal_metadata_dict={}, rejected_reason=None):
+    def __init__(
+        self,
+        event_dict: JsonDict,
+        room_version: RoomVersion,
+        internal_metadata_dict: JsonDict = {},
+        rejected_reason: Optional[str] = None,
+    ):
         event_dict = dict(event_dict)
 
         # Signatures is a dict of dicts, and this is faster than doing a
@@ -377,8 +401,9 @@ class FrozenEventV2(EventBase):
 
         self._event_id = None
 
-        super(FrozenEventV2, self).__init__(
+        super().__init__(
             frozen_dict,
+            room_version=room_version,
             signatures=signatures,
             unsigned=unsigned,
             internal_metadata_dict=internal_metadata_dict,
@@ -445,7 +470,7 @@ class FrozenEventV3(FrozenEventV2):
         return self._event_id
 
 
-def event_type_from_format_version(format_version: int) -> Type[EventBase]:
+def _event_type_from_format_version(format_version: int) -> Type[EventBase]:
     """Returns the python type to use to construct an Event object for the
     given event format version.
 
@@ -474,5 +499,5 @@ def make_event_from_dict(
     rejected_reason: Optional[str] = None,
 ) -> EventBase:
     """Construct an EventBase from the given event dict"""
-    event_type = event_type_from_format_version(room_version.event_format)
-    return event_type(event_dict, internal_metadata_dict, rejected_reason)
+    event_type = _event_type_from_format_version(room_version.event_format)
+    return event_type(event_dict, room_version, internal_metadata_dict, rejected_reason)
diff --git a/synapse/events/utils.py b/synapse/events/utils.py
index f70f5032fb..b75b097e5e 100644
--- a/synapse/events/utils.py
+++ b/synapse/events/utils.py
@@ -23,6 +23,7 @@ from frozendict import frozendict
 from twisted.internet import defer
 
 from synapse.api.constants import EventTypes, RelationTypes
+from synapse.api.room_versions import RoomVersion
 from synapse.util.async_helpers import yieldable_gather_results
 
 from . import EventBase
@@ -35,26 +36,20 @@ from . import EventBase
 SPLIT_FIELD_REGEX = re.compile(r"(?<!\\)\.")
 
 
-def prune_event(event):
+def prune_event(event: EventBase) -> EventBase:
     """ Returns a pruned version of the given event, which removes all keys we
     don't know about or think could potentially be dodgy.
 
     This is used when we "redact" an event. We want to remove all fields that
     the user has specified, but we do want to keep necessary information like
     type, state_key etc.
-
-    Args:
-        event (FrozenEvent)
-
-    Returns:
-        FrozenEvent
     """
-    pruned_event_dict = prune_event_dict(event.get_dict())
+    pruned_event_dict = prune_event_dict(event.room_version, event.get_dict())
 
-    from . import event_type_from_format_version
+    from . import make_event_from_dict
 
-    pruned_event = event_type_from_format_version(event.format_version)(
-        pruned_event_dict, event.internal_metadata.get_dict()
+    pruned_event = make_event_from_dict(
+        pruned_event_dict, event.room_version, event.internal_metadata.get_dict()
     )
 
     # Mark the event as redacted
@@ -63,15 +58,12 @@ def prune_event(event):
     return pruned_event
 
 
-def prune_event_dict(event_dict):
+def prune_event_dict(room_version: RoomVersion, event_dict: dict) -> dict:
     """Redacts the event_dict in the same way as `prune_event`, except it
     operates on dicts rather than event objects
 
-    Args:
-        event_dict (dict)
-
     Returns:
-        dict: A copy of the pruned event dict
+        A copy of the pruned event dict
     """
 
     allowed_keys = [
@@ -118,7 +110,7 @@ def prune_event_dict(event_dict):
             "kick",
             "redact",
         )
-    elif event_type == EventTypes.Aliases:
+    elif event_type == EventTypes.Aliases and room_version.special_case_aliases_auth:
         add_fields("aliases")
     elif event_type == EventTypes.RoomHistoryVisibility:
         add_fields("history_visibility")
diff --git a/synapse/federation/federation_base.py b/synapse/federation/federation_base.py
index eea64c1c9f..5c991e5412 100644
--- a/synapse/federation/federation_base.py
+++ b/synapse/federation/federation_base.py
@@ -15,11 +15,13 @@
 # limitations under the License.
 import logging
 from collections import namedtuple
+from typing import Iterable, List
 
 import six
 
 from twisted.internet import defer
-from twisted.internet.defer import DeferredList
+from twisted.internet.defer import Deferred, DeferredList
+from twisted.python.failure import Failure
 
 from synapse.api.constants import MAX_DEPTH, EventTypes, Membership
 from synapse.api.errors import Codes, SynapseError
@@ -29,6 +31,7 @@ from synapse.api.room_versions import (
     RoomVersion,
 )
 from synapse.crypto.event_signing import check_event_content_hash
+from synapse.crypto.keyring import Keyring
 from synapse.events import EventBase, make_event_from_dict
 from synapse.events.utils import prune_event
 from synapse.http.servlet import assert_params_in_dict
@@ -36,10 +39,8 @@ from synapse.logging.context import (
     LoggingContext,
     PreserveLoggingContext,
     make_deferred_yieldable,
-    preserve_fn,
 )
 from synapse.types import JsonDict, get_domain_from_id
-from synapse.util import unwrapFirstError
 
 logger = logging.getLogger(__name__)
 
@@ -54,92 +55,23 @@ class FederationBase(object):
         self.store = hs.get_datastore()
         self._clock = hs.get_clock()
 
-    @defer.inlineCallbacks
-    def _check_sigs_and_hash_and_fetch(
-        self, origin, pdus, room_version, outlier=False, include_none=False
-    ):
-        """Takes a list of PDUs and checks the signatures and hashs of each
-        one. If a PDU fails its signature check then we check if we have it in
-        the database and if not then request if from the originating server of
-        that PDU.
-
-        If a PDU fails its content hash check then it is redacted.
-
-        The given list of PDUs are not modified, instead the function returns
-        a new list.
-
-        Args:
-            origin (str)
-            pdu (list)
-            room_version (str)
-            outlier (bool): Whether the events are outliers or not
-            include_none (str): Whether to include None in the returned list
-                for events that have failed their checks
-
-        Returns:
-            Deferred : A list of PDUs that have valid signatures and hashes.
-        """
-        deferreds = self._check_sigs_and_hashes(room_version, pdus)
-
-        @defer.inlineCallbacks
-        def handle_check_result(pdu, deferred):
-            try:
-                res = yield make_deferred_yieldable(deferred)
-            except SynapseError:
-                res = None
-
-            if not res:
-                # Check local db.
-                res = yield self.store.get_event(
-                    pdu.event_id, allow_rejected=True, allow_none=True
-                )
-
-            if not res and pdu.origin != origin:
-                try:
-                    res = yield self.get_pdu(
-                        destinations=[pdu.origin],
-                        event_id=pdu.event_id,
-                        room_version=room_version,
-                        outlier=outlier,
-                        timeout=10000,
-                    )
-                except SynapseError:
-                    pass
-
-            if not res:
-                logger.warning(
-                    "Failed to find copy of %s with valid signature", pdu.event_id
-                )
-
-            return res
-
-        handle = preserve_fn(handle_check_result)
-        deferreds2 = [handle(pdu, deferred) for pdu, deferred in zip(pdus, deferreds)]
-
-        valid_pdus = yield make_deferred_yieldable(
-            defer.gatherResults(deferreds2, consumeErrors=True)
-        ).addErrback(unwrapFirstError)
-
-        if include_none:
-            return valid_pdus
-        else:
-            return [p for p in valid_pdus if p]
-
-    def _check_sigs_and_hash(self, room_version, pdu):
+    def _check_sigs_and_hash(self, room_version: str, pdu: EventBase) -> Deferred:
         return make_deferred_yieldable(
             self._check_sigs_and_hashes(room_version, [pdu])[0]
         )
 
-    def _check_sigs_and_hashes(self, room_version, pdus):
+    def _check_sigs_and_hashes(
+        self, room_version: str, pdus: List[EventBase]
+    ) -> List[Deferred]:
         """Checks that each of the received events is correctly signed by the
         sending server.
 
         Args:
-            room_version (str): The room version of the PDUs
-            pdus (list[FrozenEvent]): the events to be checked
+            room_version: The room version of the PDUs
+            pdus: the events to be checked
 
         Returns:
-            list[Deferred]: for each input event, a deferred which:
+            For each input event, a deferred which:
               * returns the original event if the checks pass
               * returns a redacted version of the event (if the signature
                 matched but the hash did not)
@@ -150,7 +82,7 @@ class FederationBase(object):
 
         ctx = LoggingContext.current_context()
 
-        def callback(_, pdu):
+        def callback(_, pdu: EventBase):
             with PreserveLoggingContext(ctx):
                 if not check_event_content_hash(pdu):
                     # let's try to distinguish between failures because the event was
@@ -187,7 +119,7 @@ class FederationBase(object):
 
                 return pdu
 
-        def errback(failure, pdu):
+        def errback(failure: Failure, pdu: EventBase):
             failure.trap(SynapseError)
             with PreserveLoggingContext(ctx):
                 logger.warning(
@@ -213,16 +145,18 @@ class PduToCheckSig(
     pass
 
 
-def _check_sigs_on_pdus(keyring, room_version, pdus):
+def _check_sigs_on_pdus(
+    keyring: Keyring, room_version: str, pdus: Iterable[EventBase]
+) -> List[Deferred]:
     """Check that the given events are correctly signed
 
     Args:
-        keyring (synapse.crypto.Keyring): keyring object to do the checks
-        room_version (str): the room version of the PDUs
-        pdus (Collection[EventBase]): the events to be checked
+        keyring: keyring object to do the checks
+        room_version: the room version of the PDUs
+        pdus: the events to be checked
 
     Returns:
-        List[Deferred]: a Deferred for each event in pdus, which will either succeed if
+        A Deferred for each event in pdus, which will either succeed if
            the signatures are valid, or fail (with a SynapseError) if not.
     """
 
@@ -327,7 +261,7 @@ def _check_sigs_on_pdus(keyring, room_version, pdus):
     return [_flatten_deferred_list(p.deferreds) for p in pdus_to_check]
 
 
-def _flatten_deferred_list(deferreds):
+def _flatten_deferred_list(deferreds: List[Deferred]) -> Deferred:
     """Given a list of deferreds, either return the single deferred,
     combine into a DeferredList, or return an already resolved deferred.
     """
@@ -339,7 +273,7 @@ def _flatten_deferred_list(deferreds):
         return defer.succeed(None)
 
 
-def _is_invite_via_3pid(event):
+def _is_invite_via_3pid(event: EventBase) -> bool:
     return (
         event.type == EventTypes.Member
         and event.membership == Membership.INVITE
diff --git a/synapse/federation/federation_client.py b/synapse/federation/federation_client.py
index 4870e39652..8c6b839478 100644
--- a/synapse/federation/federation_client.py
+++ b/synapse/federation/federation_client.py
@@ -33,6 +33,7 @@ from typing import (
 from prometheus_client import Counter
 
 from twisted.internet import defer
+from twisted.internet.defer import Deferred
 
 from synapse.api.constants import EventTypes, Membership
 from synapse.api.errors import (
@@ -51,7 +52,7 @@ from synapse.api.room_versions import (
 )
 from synapse.events import EventBase, builder
 from synapse.federation.federation_base import FederationBase, event_from_pdu_json
-from synapse.logging.context import make_deferred_yieldable
+from synapse.logging.context import make_deferred_yieldable, preserve_fn
 from synapse.logging.utils import log_function
 from synapse.types import JsonDict
 from synapse.util import unwrapFirstError
@@ -187,7 +188,7 @@ class FederationClient(FederationBase):
 
     async def backfill(
         self, dest: str, room_id: str, limit: int, extremities: Iterable[str]
-    ) -> List[EventBase]:
+    ) -> Optional[List[EventBase]]:
         """Requests some more historic PDUs for the given room from the
         given destination server.
 
@@ -199,9 +200,9 @@ class FederationClient(FederationBase):
         """
         logger.debug("backfill extrem=%s", extremities)
 
-        # If there are no extremeties then we've (probably) reached the start.
+        # If there are no extremities then we've (probably) reached the start.
         if not extremities:
-            return
+            return None
 
         transaction_data = await self.transport_layer.backfill(
             dest, room_id, extremities, limit
@@ -284,7 +285,7 @@ class FederationClient(FederationBase):
                 pdu_list = [
                     event_from_pdu_json(p, room_version, outlier=outlier)
                     for p in transaction_data["pdus"]
-                ]
+                ]  # type: List[EventBase]
 
                 if pdu_list and pdu_list[0]:
                     pdu = pdu_list[0]
@@ -345,6 +346,83 @@ class FederationClient(FederationBase):
 
         return state_event_ids, auth_event_ids
 
+    async def _check_sigs_and_hash_and_fetch(
+        self,
+        origin: str,
+        pdus: List[EventBase],
+        room_version: str,
+        outlier: bool = False,
+        include_none: bool = False,
+    ) -> List[EventBase]:
+        """Takes a list of PDUs and checks the signatures and hashs of each
+        one. If a PDU fails its signature check then we check if we have it in
+        the database and if not then request if from the originating server of
+        that PDU.
+
+        If a PDU fails its content hash check then it is redacted.
+
+        The given list of PDUs are not modified, instead the function returns
+        a new list.
+
+        Args:
+            origin
+            pdu
+            room_version
+            outlier: Whether the events are outliers or not
+            include_none: Whether to include None in the returned list
+                for events that have failed their checks
+
+        Returns:
+            Deferred : A list of PDUs that have valid signatures and hashes.
+        """
+        deferreds = self._check_sigs_and_hashes(room_version, pdus)
+
+        @defer.inlineCallbacks
+        def handle_check_result(pdu: EventBase, deferred: Deferred):
+            try:
+                res = yield make_deferred_yieldable(deferred)
+            except SynapseError:
+                res = None
+
+            if not res:
+                # Check local db.
+                res = yield self.store.get_event(
+                    pdu.event_id, allow_rejected=True, allow_none=True
+                )
+
+            if not res and pdu.origin != origin:
+                try:
+                    res = yield defer.ensureDeferred(
+                        self.get_pdu(
+                            destinations=[pdu.origin],
+                            event_id=pdu.event_id,
+                            room_version=room_version,  # type: ignore
+                            outlier=outlier,
+                            timeout=10000,
+                        )
+                    )
+                except SynapseError:
+                    pass
+
+            if not res:
+                logger.warning(
+                    "Failed to find copy of %s with valid signature", pdu.event_id
+                )
+
+            return res
+
+        handle = preserve_fn(handle_check_result)
+        deferreds2 = [handle(pdu, deferred) for pdu, deferred in zip(pdus, deferreds)]
+
+        valid_pdus = await make_deferred_yieldable(
+            defer.gatherResults(deferreds2, consumeErrors=True)
+        ).addErrback(unwrapFirstError)
+
+        if include_none:
+            return valid_pdus
+        else:
+            return [p for p in valid_pdus if p]
+
     async def get_event_auth(self, destination, room_id, event_id):
         res = await self.transport_layer.get_event_auth(destination, room_id, event_id)
 
@@ -615,7 +693,7 @@ class FederationClient(FederationBase):
             ]
             if auth_chain_create_events != [create_event.event_id]:
                 raise InvalidResponseError(
-                    "Unexpected create event(s) in auth chain"
+                    "Unexpected create event(s) in auth chain: %s"
                     % (auth_chain_create_events,)
                 )
 
diff --git a/synapse/federation/federation_server.py b/synapse/federation/federation_server.py
index 7f9da49326..275b9c99d7 100644
--- a/synapse/federation/federation_server.py
+++ b/synapse/federation/federation_server.py
@@ -470,57 +470,6 @@ class FederationServer(FederationBase):
             res = {"auth_chain": [a.get_pdu_json(time_now) for a in auth_pdus]}
         return 200, res
 
-    async def on_query_auth_request(self, origin, content, room_id, event_id):
-        """
-        Content is a dict with keys::
-            auth_chain (list): A list of events that give the auth chain.
-            missing (list): A list of event_ids indicating what the other
-              side (`origin`) think we're missing.
-            rejects (dict): A mapping from event_id to a 2-tuple of reason
-              string and a proof (or None) of why the event was rejected.
-              The keys of this dict give the list of events the `origin` has
-              rejected.
-
-        Args:
-            origin (str)
-            content (dict)
-            event_id (str)
-
-        Returns:
-            Deferred: Results in `dict` with the same format as `content`
-        """
-        with (await self._server_linearizer.queue((origin, room_id))):
-            origin_host, _ = parse_server_name(origin)
-            await self.check_server_matches_acl(origin_host, room_id)
-
-            room_version = await self.store.get_room_version(room_id)
-
-            auth_chain = [
-                event_from_pdu_json(e, room_version) for e in content["auth_chain"]
-            ]
-
-            signed_auth = await self._check_sigs_and_hash_and_fetch(
-                origin, auth_chain, outlier=True, room_version=room_version.identifier
-            )
-
-            ret = await self.handler.on_query_auth(
-                origin,
-                event_id,
-                room_id,
-                signed_auth,
-                content.get("rejects", []),
-                content.get("missing", []),
-            )
-
-            time_now = self._clock.time_msec()
-            send_content = {
-                "auth_chain": [e.get_pdu_json(time_now) for e in ret["auth_chain"]],
-                "rejects": ret.get("rejects", []),
-                "missing": ret.get("missing", []),
-            }
-
-        return 200, send_content
-
     @log_function
     def on_query_client_keys(self, origin, content):
         return self.on_query_request("client_keys", content)
diff --git a/synapse/federation/transport/client.py b/synapse/federation/transport/client.py
index dc563538de..383e3fdc8b 100644
--- a/synapse/federation/transport/client.py
+++ b/synapse/federation/transport/client.py
@@ -399,20 +399,30 @@ class TransportLayerClient(object):
             {
               "device_keys": {
                 "<user_id>": ["<device_id>"]
-            } }
+              }
+            }
 
         Response:
             {
               "device_keys": {
                 "<user_id>": {
                   "<device_id>": {...}
-            } } }
+                }
+              },
+              "master_key": {
+                "<user_id>": {...}
+                }
+              },
+              "self_signing_key": {
+                "<user_id>": {...}
+              }
+            }
 
         Args:
             destination(str): The server to query.
             query_content(dict): The user ids to query.
         Returns:
-            A dict containg the device keys.
+            A dict containing device and cross-signing keys.
         """
         path = _create_v1_path("/user/keys/query")
 
@@ -429,14 +439,30 @@ class TransportLayerClient(object):
         Response:
             {
               "stream_id": "...",
-              "devices": [ { ... } ]
+              "devices": [ { ... } ],
+              "master_key": {
+                "user_id": "<user_id>",
+                "usage": [...],
+                "keys": {...},
+                "signatures": {
+                  "<user_id>": {...}
+                }
+              },
+              "self_signing_key": {
+                "user_id": "<user_id>",
+                "usage": [...],
+                "keys": {...},
+                "signatures": {
+                  "<user_id>": {...}
+                }
+              }
             }
 
         Args:
             destination(str): The server to query.
             query_content(dict): The user ids to query.
         Returns:
-            A dict containg the device keys.
+            A dict containing device and cross-signing keys.
         """
         path = _create_v1_path("/user/devices/%s", user_id)
 
@@ -454,8 +480,10 @@ class TransportLayerClient(object):
             {
               "one_time_keys": {
                 "<user_id>": {
-                    "<device_id>": "<algorithm>"
-            } } }
+                  "<device_id>": "<algorithm>"
+                }
+              }
+            }
 
         Response:
             {
@@ -463,13 +491,16 @@ class TransportLayerClient(object):
                 "<user_id>": {
                   "<device_id>": {
                     "<algorithm>:<key_id>": "<key_base64>"
-            } } } }
+                  }
+                }
+              }
+            }
 
         Args:
             destination(str): The server to query.
             query_content(dict): The user ids to query.
         Returns:
-            A dict containg the one-time keys.
+            A dict containing the one-time keys.
         """
 
         path = _create_v1_path("/user/keys/claim")
diff --git a/synapse/federation/transport/server.py b/synapse/federation/transport/server.py
index 92a9ae2320..af4595498c 100644
--- a/synapse/federation/transport/server.py
+++ b/synapse/federation/transport/server.py
@@ -643,17 +643,6 @@ class FederationClientKeysClaimServlet(BaseFederationServlet):
         return 200, response
 
 
-class FederationQueryAuthServlet(BaseFederationServlet):
-    PATH = "/query_auth/(?P<context>[^/]*)/(?P<event_id>[^/]*)"
-
-    async def on_POST(self, origin, content, query, context, event_id):
-        new_content = await self.handler.on_query_auth_request(
-            origin, content, context, event_id
-        )
-
-        return 200, new_content
-
-
 class FederationGetMissingEventsServlet(BaseFederationServlet):
     # TODO(paul): Why does this path alone end with "/?" optional?
     PATH = "/get_missing_events/(?P<room_id>[^/]*)/?"
@@ -1412,7 +1401,6 @@ FEDERATION_SERVLET_CLASSES = (
     FederationV2SendLeaveServlet,
     FederationV1InviteServlet,
     FederationV2InviteServlet,
-    FederationQueryAuthServlet,
     FederationGetMissingEventsServlet,
     FederationEventAuthServlet,
     FederationClientKeysQueryServlet,
diff --git a/synapse/handlers/account_validity.py b/synapse/handlers/account_validity.py
index 829f52eca1..590135d19c 100644
--- a/synapse/handlers/account_validity.py
+++ b/synapse/handlers/account_validity.py
@@ -44,7 +44,11 @@ class AccountValidityHandler(object):
 
         self._account_validity = self.hs.config.account_validity
 
-        if self._account_validity.renew_by_email_enabled and load_jinja2_templates:
+        if (
+            self._account_validity.enabled
+            and self._account_validity.renew_by_email_enabled
+            and load_jinja2_templates
+        ):
             # Don't do email-specific configuration if renewal by email is disabled.
             try:
                 app_name = self.hs.config.email_app_name
diff --git a/synapse/handlers/auth.py b/synapse/handlers/auth.py
index 48a88d3c2a..7860f9625e 100644
--- a/synapse/handlers/auth.py
+++ b/synapse/handlers/auth.py
@@ -17,9 +17,11 @@
 import logging
 import time
 import unicodedata
+import urllib.parse
+from typing import Any, Dict, Iterable, List, Optional
 
 import attr
-import bcrypt
+import bcrypt  # type: ignore[import]
 import pymacaroons
 
 from twisted.internet import defer
@@ -38,9 +40,12 @@ from synapse.api.errors import (
 from synapse.api.ratelimiting import Ratelimiter
 from synapse.handlers.ui_auth import INTERACTIVE_AUTH_CHECKERS
 from synapse.handlers.ui_auth.checkers import UserInteractiveAuthChecker
+from synapse.http.server import finish_request
+from synapse.http.site import SynapseRequest
 from synapse.logging.context import defer_to_thread
 from synapse.module_api import ModuleApi
-from synapse.types import UserID
+from synapse.push.mailer import load_jinja2_templates
+from synapse.types import Requester, UserID
 from synapse.util.caches.expiringcache import ExpiringCache
 
 from ._base import BaseHandler
@@ -58,11 +63,11 @@ class AuthHandler(BaseHandler):
         """
         super(AuthHandler, self).__init__(hs)
 
-        self.checkers = {}  # type: dict[str, UserInteractiveAuthChecker]
+        self.checkers = {}  # type: Dict[str, UserInteractiveAuthChecker]
         for auth_checker_class in INTERACTIVE_AUTH_CHECKERS:
             inst = auth_checker_class(hs)
             if inst.is_enabled():
-                self.checkers[inst.AUTH_TYPE] = inst
+                self.checkers[inst.AUTH_TYPE] = inst  # type: ignore
 
         self.bcrypt_rounds = hs.config.bcrypt_rounds
 
@@ -108,8 +113,20 @@ class AuthHandler(BaseHandler):
 
         self._clock = self.hs.get_clock()
 
+        # Load the SSO redirect confirmation page HTML template
+        self._sso_redirect_confirm_template = load_jinja2_templates(
+            hs.config.sso_redirect_confirm_template_dir, ["sso_redirect_confirm.html"],
+        )[0]
+
+        self._server_name = hs.config.server_name
+
+        # cast to tuple for use with str.startswith
+        self._whitelisted_sso_clients = tuple(hs.config.sso_client_whitelist)
+
     @defer.inlineCallbacks
-    def validate_user_via_ui_auth(self, requester, request_body, clientip):
+    def validate_user_via_ui_auth(
+        self, requester: Requester, request_body: Dict[str, Any], clientip: str
+    ):
         """
         Checks that the user is who they claim to be, via a UI auth.
 
@@ -118,11 +135,11 @@ class AuthHandler(BaseHandler):
         that it isn't stolen by re-authenticating them.
 
         Args:
-            requester (Requester): The user, as given by the access token
+            requester: The user, as given by the access token
 
-            request_body (dict): The body of the request sent by the client
+            request_body: The body of the request sent by the client
 
-            clientip (str): The IP address of the client.
+            clientip: The IP address of the client.
 
         Returns:
             defer.Deferred[dict]: the parameters for this request (which may
@@ -193,7 +210,9 @@ class AuthHandler(BaseHandler):
         return self.checkers.keys()
 
     @defer.inlineCallbacks
-    def check_auth(self, flows, clientdict, clientip):
+    def check_auth(
+        self, flows: List[List[str]], clientdict: Dict[str, Any], clientip: str
+    ):
         """
         Takes a dictionary sent by the client in the login / registration
         protocol and handles the User-Interactive Auth flow.
@@ -208,14 +227,14 @@ class AuthHandler(BaseHandler):
         decorator.
 
         Args:
-            flows (list): A list of login flows. Each flow is an ordered list of
-                          strings representing auth-types. At least one full
-                          flow must be completed in order for auth to be successful.
+            flows: A list of login flows. Each flow is an ordered list of
+                   strings representing auth-types. At least one full
+                   flow must be completed in order for auth to be successful.
 
             clientdict: The dictionary from the client root level, not the
                         'auth' key: this method prompts for auth if none is sent.
 
-            clientip (str): The IP address of the client.
+            clientip: The IP address of the client.
 
         Returns:
             defer.Deferred[dict, dict, str]: a deferred tuple of
@@ -235,7 +254,7 @@ class AuthHandler(BaseHandler):
         """
 
         authdict = None
-        sid = None
+        sid = None  # type: Optional[str]
         if clientdict and "auth" in clientdict:
             authdict = clientdict["auth"]
             del clientdict["auth"]
@@ -268,9 +287,9 @@ class AuthHandler(BaseHandler):
         creds = session["creds"]
 
         # check auth type currently being presented
-        errordict = {}
+        errordict = {}  # type: Dict[str, Any]
         if "type" in authdict:
-            login_type = authdict["type"]
+            login_type = authdict["type"]  # type: str
             try:
                 result = yield self._check_auth_dict(authdict, clientip)
                 if result:
@@ -311,7 +330,7 @@ class AuthHandler(BaseHandler):
         raise InteractiveAuthIncompleteError(ret)
 
     @defer.inlineCallbacks
-    def add_oob_auth(self, stagetype, authdict, clientip):
+    def add_oob_auth(self, stagetype: str, authdict: Dict[str, Any], clientip: str):
         """
         Adds the result of out-of-band authentication into an existing auth
         session. Currently used for adding the result of fallback auth.
@@ -333,7 +352,7 @@ class AuthHandler(BaseHandler):
             return True
         return False
 
-    def get_session_id(self, clientdict):
+    def get_session_id(self, clientdict: Dict[str, Any]) -> Optional[str]:
         """
         Gets the session ID for a client given the client dictionary
 
@@ -341,7 +360,7 @@ class AuthHandler(BaseHandler):
             clientdict: The dictionary sent by the client in the request
 
         Returns:
-            str|None: The string session ID the client sent. If the client did
+            The string session ID the client sent. If the client did
                 not send a session ID, returns None.
         """
         sid = None
@@ -351,40 +370,42 @@ class AuthHandler(BaseHandler):
                 sid = authdict["session"]
         return sid
 
-    def set_session_data(self, session_id, key, value):
+    def set_session_data(self, session_id: str, key: str, value: Any) -> None:
         """
         Store a key-value pair into the sessions data associated with this
         request. This data is stored server-side and cannot be modified by
         the client.
 
         Args:
-            session_id (string): The ID of this session as returned from check_auth
-            key (string): The key to store the data under
-            value (any): The data to store
+            session_id: The ID of this session as returned from check_auth
+            key: The key to store the data under
+            value: The data to store
         """
         sess = self._get_session_info(session_id)
         sess.setdefault("serverdict", {})[key] = value
         self._save_session(sess)
 
-    def get_session_data(self, session_id, key, default=None):
+    def get_session_data(
+        self, session_id: str, key: str, default: Optional[Any] = None
+    ) -> Any:
         """
         Retrieve data stored with set_session_data
 
         Args:
-            session_id (string): The ID of this session as returned from check_auth
-            key (string): The key to store the data under
-            default (any): Value to return if the key has not been set
+            session_id: The ID of this session as returned from check_auth
+            key: The key to store the data under
+            default: Value to return if the key has not been set
         """
         sess = self._get_session_info(session_id)
         return sess.setdefault("serverdict", {}).get(key, default)
 
     @defer.inlineCallbacks
-    def _check_auth_dict(self, authdict, clientip):
+    def _check_auth_dict(self, authdict: Dict[str, Any], clientip: str):
         """Attempt to validate the auth dict provided by a client
 
         Args:
-            authdict (object): auth dict provided by the client
-            clientip (str): IP address of the client
+            authdict: auth dict provided by the client
+            clientip: IP address of the client
 
         Returns:
             Deferred: result of the stage verification.
@@ -410,10 +431,10 @@ class AuthHandler(BaseHandler):
         (canonical_id, callback) = yield self.validate_login(user_id, authdict)
         return canonical_id
 
-    def _get_params_recaptcha(self):
+    def _get_params_recaptcha(self) -> dict:
         return {"public_key": self.hs.config.recaptcha_public_key}
 
-    def _get_params_terms(self):
+    def _get_params_terms(self) -> dict:
         return {
             "policies": {
                 "privacy_policy": {
@@ -430,7 +451,9 @@ class AuthHandler(BaseHandler):
             }
         }
 
-    def _auth_dict_for_flows(self, flows, session):
+    def _auth_dict_for_flows(
+        self, flows: List[List[str]], session: Dict[str, Any]
+    ) -> Dict[str, Any]:
         public_flows = []
         for f in flows:
             public_flows.append(f)
@@ -440,7 +463,7 @@ class AuthHandler(BaseHandler):
             LoginType.TERMS: self._get_params_terms,
         }
 
-        params = {}
+        params = {}  # type: Dict[str, Any]
 
         for f in public_flows:
             for stage in f:
@@ -453,7 +476,13 @@ class AuthHandler(BaseHandler):
             "params": params,
         }
 
-    def _get_session_info(self, session_id):
+    def _get_session_info(self, session_id: Optional[str]) -> dict:
+        """
+        Gets or creates a session given a session ID.
+
+        The session can be used to track data across multiple requests, e.g. for
+        interactive authentication.
+        """
         if session_id not in self.sessions:
             session_id = None
 
@@ -466,7 +495,9 @@ class AuthHandler(BaseHandler):
         return self.sessions[session_id]
 
     @defer.inlineCallbacks
-    def get_access_token_for_user_id(self, user_id, device_id, valid_until_ms):
+    def get_access_token_for_user_id(
+        self, user_id: str, device_id: Optional[str], valid_until_ms: Optional[int]
+    ):
         """
         Creates a new access token for the user with the given user ID.
 
@@ -476,11 +507,11 @@ class AuthHandler(BaseHandler):
         The device will be recorded in the table if it is not there already.
 
         Args:
-            user_id (str): canonical User ID
-            device_id (str|None): the device ID to associate with the tokens.
+            user_id: canonical User ID
+            device_id: the device ID to associate with the tokens.
                None to leave the tokens unassociated with a device (deprecated:
                we should always have a device ID)
-            valid_until_ms (int|None): when the token is valid until. None for
+            valid_until_ms: when the token is valid until. None for
                 no expiry.
         Returns:
               The access token for the user's session.
@@ -515,13 +546,13 @@ class AuthHandler(BaseHandler):
         return access_token
 
     @defer.inlineCallbacks
-    def check_user_exists(self, user_id):
+    def check_user_exists(self, user_id: str):
         """
         Checks to see if a user with the given id exists. Will check case
         insensitively, but return None if there are multiple inexact matches.
 
         Args:
-            (unicode|bytes) user_id: complete @user:id
+            user_id: complete @user:id
 
         Returns:
             defer.Deferred: (unicode) canonical_user_id, or None if zero or
@@ -536,7 +567,7 @@ class AuthHandler(BaseHandler):
         return None
 
     @defer.inlineCallbacks
-    def _find_user_id_and_pwd_hash(self, user_id):
+    def _find_user_id_and_pwd_hash(self, user_id: str):
         """Checks to see if a user with the given id exists. Will check case
         insensitively, but will return None if there are multiple inexact
         matches.
@@ -566,7 +597,7 @@ class AuthHandler(BaseHandler):
             )
         return result
 
-    def get_supported_login_types(self):
+    def get_supported_login_types(self) -> Iterable[str]:
         """Get a the login types supported for the /login API
 
         By default this is just 'm.login.password' (unless password_enabled is
@@ -574,20 +605,20 @@ class AuthHandler(BaseHandler):
         other login types.
 
         Returns:
-            Iterable[str]: login types
+            login types
         """
         return self._supported_login_types
 
     @defer.inlineCallbacks
-    def validate_login(self, username, login_submission):
+    def validate_login(self, username: str, login_submission: Dict[str, Any]):
         """Authenticates the user for the /login API
 
         Also used by the user-interactive auth flow to validate
         m.login.password auth types.
 
         Args:
-            username (str): username supplied by the user
-            login_submission (dict): the whole of the login submission
+            username: username supplied by the user
+            login_submission: the whole of the login submission
                 (including 'type' and other relevant fields)
         Returns:
             Deferred[str, func]: canonical user id, and optional callback
@@ -675,13 +706,13 @@ class AuthHandler(BaseHandler):
         raise LoginError(403, "Invalid password", errcode=Codes.FORBIDDEN)
 
     @defer.inlineCallbacks
-    def check_password_provider_3pid(self, medium, address, password):
+    def check_password_provider_3pid(self, medium: str, address: str, password: str):
         """Check if a password provider is able to validate a thirdparty login
 
         Args:
-            medium (str): The medium of the 3pid (ex. email).
-            address (str): The address of the 3pid (ex. jdoe@example.com).
-            password (str): The password of the user.
+            medium: The medium of the 3pid (ex. email).
+            address: The address of the 3pid (ex. jdoe@example.com).
+            password: The password of the user.
 
         Returns:
             Deferred[(str|None, func|None)]: A tuple of `(user_id,
@@ -709,15 +740,15 @@ class AuthHandler(BaseHandler):
         return None, None
 
     @defer.inlineCallbacks
-    def _check_local_password(self, user_id, password):
+    def _check_local_password(self, user_id: str, password: str):
         """Authenticate a user against the local password database.
 
         user_id is checked case insensitively, but will return None if there are
         multiple inexact matches.
 
         Args:
-            user_id (unicode): complete @user:id
-            password (unicode): the provided password
+            user_id: complete @user:id
+            password: the provided password
         Returns:
             Deferred[unicode] the canonical_user_id, or Deferred[None] if
                 unknown user/bad password
@@ -740,7 +771,7 @@ class AuthHandler(BaseHandler):
         return user_id
 
     @defer.inlineCallbacks
-    def validate_short_term_login_token_and_get_user_id(self, login_token):
+    def validate_short_term_login_token_and_get_user_id(self, login_token: str):
         auth_api = self.hs.get_auth()
         user_id = None
         try:
@@ -754,11 +785,11 @@ class AuthHandler(BaseHandler):
         return user_id
 
     @defer.inlineCallbacks
-    def delete_access_token(self, access_token):
+    def delete_access_token(self, access_token: str):
         """Invalidate a single access token
 
         Args:
-            access_token (str): access token to be deleted
+            access_token: access token to be deleted
 
         Returns:
             Deferred
@@ -783,15 +814,17 @@ class AuthHandler(BaseHandler):
 
     @defer.inlineCallbacks
     def delete_access_tokens_for_user(
-        self, user_id, except_token_id=None, device_id=None
+        self,
+        user_id: str,
+        except_token_id: Optional[str] = None,
+        device_id: Optional[str] = None,
     ):
         """Invalidate access tokens belonging to a user
 
         Args:
-            user_id (str):  ID of user the tokens belong to
-            except_token_id (str|None): access_token ID which should *not* be
-                deleted
-            device_id (str|None):  ID of device the tokens are associated with.
+            user_id:  ID of user the tokens belong to
+            except_token_id: access_token ID which should *not* be deleted
+            device_id:  ID of device the tokens are associated with.
                 If None, tokens associated with any device (or no device) will
                 be deleted
         Returns:
@@ -815,7 +848,7 @@ class AuthHandler(BaseHandler):
         )
 
     @defer.inlineCallbacks
-    def add_threepid(self, user_id, medium, address, validated_at):
+    def add_threepid(self, user_id: str, medium: str, address: str, validated_at: int):
         # check if medium has a valid value
         if medium not in ["email", "msisdn"]:
             raise SynapseError(
@@ -841,19 +874,20 @@ class AuthHandler(BaseHandler):
         )
 
     @defer.inlineCallbacks
-    def delete_threepid(self, user_id, medium, address, id_server=None):
+    def delete_threepid(
+        self, user_id: str, medium: str, address: str, id_server: Optional[str] = None
+    ):
         """Attempts to unbind the 3pid on the identity servers and deletes it
         from the local database.
 
         Args:
-            user_id (str)
-            medium (str)
-            address (str)
-            id_server (str|None): Use the given identity server when unbinding
+            user_id: ID of user to remove the 3pid from.
+            medium: The medium of the 3pid being removed: "email" or "msisdn".
+            address: The 3pid address to remove.
+            id_server: Use the given identity server when unbinding
                 any threepids. If None then will attempt to unbind using the
                 identity server specified when binding (if known).
 
-
         Returns:
             Deferred[bool]: Returns True if successfully unbound the 3pid on
             the identity server, False if identity server doesn't support the
@@ -872,17 +906,18 @@ class AuthHandler(BaseHandler):
         yield self.store.user_delete_threepid(user_id, medium, address)
         return result
 
-    def _save_session(self, session):
+    def _save_session(self, session: Dict[str, Any]) -> None:
+        """Update the last used time on the session to now and add it back to the session store."""
         # TODO: Persistent storage
         logger.debug("Saving session %s", session)
         session["last_used"] = self.hs.get_clock().time_msec()
         self.sessions[session["id"]] = session
 
-    def hash(self, password):
+    def hash(self, password: str):
         """Computes a secure hash of password.
 
         Args:
-            password (unicode): Password to hash.
+            password: Password to hash.
 
         Returns:
             Deferred(unicode): Hashed password.
@@ -899,12 +934,12 @@ class AuthHandler(BaseHandler):
 
         return defer_to_thread(self.hs.get_reactor(), _do_hash)
 
-    def validate_hash(self, password, stored_hash):
+    def validate_hash(self, password: str, stored_hash: bytes):
         """Validates that self.hash(password) == stored_hash.
 
         Args:
-            password (unicode): Password to hash.
-            stored_hash (bytes): Expected hash value.
+            password: Password to hash.
+            stored_hash: Expected hash value.
 
         Returns:
             Deferred(bool): Whether self.hash(password) == stored_hash.
@@ -927,13 +962,74 @@ class AuthHandler(BaseHandler):
         else:
             return defer.succeed(False)
 
+    def complete_sso_login(
+        self,
+        registered_user_id: str,
+        request: SynapseRequest,
+        client_redirect_url: str,
+    ):
+        """Having figured out a mxid for this user, complete the HTTP request
+
+        Args:
+            registered_user_id: The registered user ID to complete SSO login for.
+            request: The request to complete.
+            client_redirect_url: The URL to which to redirect the user at the end of the
+                process.
+        """
+        # Create a login token
+        login_token = self.macaroon_gen.generate_short_term_login_token(
+            registered_user_id
+        )
+
+        # Append the login token to the original redirect URL (i.e. with its query
+        # parameters kept intact) to build the URL to which the template needs to
+        # redirect the users once they have clicked on the confirmation link.
+        redirect_url = self.add_query_param_to_url(
+            client_redirect_url, "loginToken", login_token
+        )
+
+        # if the client is whitelisted, we can redirect straight to it
+        if client_redirect_url.startswith(self._whitelisted_sso_clients):
+            request.redirect(redirect_url)
+            finish_request(request)
+            return
+
+        # Otherwise, serve the redirect confirmation page.
+
+        # Remove the query parameters from the redirect URL to get a shorter version of
+        # it. This is only to display a human-readable URL in the template, but not the
+        # URL we redirect users to.
+        redirect_url_no_params = client_redirect_url.split("?")[0]
+
+        html = self._sso_redirect_confirm_template.render(
+            display_url=redirect_url_no_params,
+            redirect_url=redirect_url,
+            server_name=self._server_name,
+        ).encode("utf-8")
+
+        request.setResponseCode(200)
+        request.setHeader(b"Content-Type", b"text/html; charset=utf-8")
+        request.setHeader(b"Content-Length", b"%d" % (len(html),))
+        request.write(html)
+        finish_request(request)
+
+    @staticmethod
+    def add_query_param_to_url(url: str, param_name: str, param: Any):
+        url_parts = list(urllib.parse.urlparse(url))
+        query = dict(urllib.parse.parse_qsl(url_parts[4]))
+        query.update({param_name: param})
+        url_parts[4] = urllib.parse.urlencode(query)
+        return urllib.parse.urlunparse(url_parts)
+
 
 @attr.s
 class MacaroonGenerator(object):
 
     hs = attr.ib()
 
-    def generate_access_token(self, user_id, extra_caveats=None):
+    def generate_access_token(
+        self, user_id: str, extra_caveats: Optional[List[str]] = None
+    ) -> str:
         extra_caveats = extra_caveats or []
         macaroon = self._generate_base_macaroon(user_id)
         macaroon.add_first_party_caveat("type = access")
@@ -946,16 +1042,9 @@ class MacaroonGenerator(object):
             macaroon.add_first_party_caveat(caveat)
         return macaroon.serialize()
 
-    def generate_short_term_login_token(self, user_id, duration_in_ms=(2 * 60 * 1000)):
-        """
-
-        Args:
-            user_id (unicode):
-            duration_in_ms (int):
-
-        Returns:
-            unicode
-        """
+    def generate_short_term_login_token(
+        self, user_id: str, duration_in_ms: int = (2 * 60 * 1000)
+    ) -> str:
         macaroon = self._generate_base_macaroon(user_id)
         macaroon.add_first_party_caveat("type = login")
         now = self.hs.get_clock().time_msec()
@@ -963,12 +1052,12 @@ class MacaroonGenerator(object):
         macaroon.add_first_party_caveat("time < %d" % (expiry,))
         return macaroon.serialize()
 
-    def generate_delete_pusher_token(self, user_id):
+    def generate_delete_pusher_token(self, user_id: str) -> str:
         macaroon = self._generate_base_macaroon(user_id)
         macaroon.add_first_party_caveat("type = delete_pusher")
         return macaroon.serialize()
 
-    def _generate_base_macaroon(self, user_id):
+    def _generate_base_macaroon(self, user_id: str) -> pymacaroons.Macaroon:
         macaroon = pymacaroons.Macaroon(
             location=self.hs.config.server_name,
             identifier="key",
diff --git a/synapse/handlers/device.py b/synapse/handlers/device.py
index a514c30714..993499f446 100644
--- a/synapse/handlers/device.py
+++ b/synapse/handlers/device.py
@@ -125,8 +125,14 @@ class DeviceWorkerHandler(BaseHandler):
         users_who_share_room = yield self.store.get_users_who_share_room_with_user(
             user_id
         )
+
+        tracked_users = set(users_who_share_room)
+
+        # Always tell the user about their own devices
+        tracked_users.add(user_id)
+
         changed = yield self.store.get_users_whose_devices_changed(
-            from_token.device_list_key, users_who_share_room
+            from_token.device_list_key, tracked_users
         )
 
         # Then work out if any users have since joined
@@ -456,7 +462,11 @@ class DeviceHandler(DeviceWorkerHandler):
 
         room_ids = yield self.store.get_rooms_for_user(user_id)
 
-        yield self.notifier.on_new_event("device_list_key", position, rooms=room_ids)
+        # specify the user ID too since the user should always get their own device list
+        # updates, even if they aren't in any rooms.
+        yield self.notifier.on_new_event(
+            "device_list_key", position, users=[user_id], rooms=room_ids
+        )
 
         if hosts:
             logger.info(
diff --git a/synapse/handlers/directory.py b/synapse/handlers/directory.py
index 0b23ca919a..1d842c369b 100644
--- a/synapse/handlers/directory.py
+++ b/synapse/handlers/directory.py
@@ -13,11 +13,9 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-
-import collections
 import logging
 import string
-from typing import List
+from typing import Iterable, List, Optional
 
 from twisted.internet import defer
 
@@ -30,6 +28,7 @@ from synapse.api.errors import (
     StoreError,
     SynapseError,
 )
+from synapse.appservice import ApplicationService
 from synapse.types import Requester, RoomAlias, UserID, get_domain_from_id
 
 from ._base import BaseHandler
@@ -57,7 +56,13 @@ class DirectoryHandler(BaseHandler):
         self.spam_checker = hs.get_spam_checker()
 
     @defer.inlineCallbacks
-    def _create_association(self, room_alias, room_id, servers=None, creator=None):
+    def _create_association(
+        self,
+        room_alias: RoomAlias,
+        room_id: str,
+        servers: Optional[Iterable[str]] = None,
+        creator: Optional[str] = None,
+    ):
         # general association creation for both human users and app services
 
         for wchar in string.whitespace:
@@ -83,17 +88,21 @@ class DirectoryHandler(BaseHandler):
 
     @defer.inlineCallbacks
     def create_association(
-        self, requester, room_alias, room_id, servers=None, check_membership=True,
+        self,
+        requester: Requester,
+        room_alias: RoomAlias,
+        room_id: str,
+        servers: Optional[List[str]] = None,
+        check_membership: bool = True,
     ):
         """Attempt to create a new alias
 
         Args:
-            requester (Requester)
-            room_alias (RoomAlias)
-            room_id (str)
-            servers (list[str]|None): List of servers that others servers
-                should try and join via
-            check_membership (bool): Whether to check if the user is in the room
+            requester
+            room_alias
+            room_id
+            servers: Iterable of servers that others servers should try and join via
+            check_membership: Whether to check if the user is in the room
                 before the alias can be set (if the server's config requires it).
 
         Returns:
@@ -147,15 +156,15 @@ class DirectoryHandler(BaseHandler):
         yield self._create_association(room_alias, room_id, servers, creator=user_id)
 
     @defer.inlineCallbacks
-    def delete_association(self, requester, room_alias):
+    def delete_association(self, requester: Requester, room_alias: RoomAlias):
         """Remove an alias from the directory
 
         (this is only meant for human users; AS users should call
         delete_appservice_association)
 
         Args:
-            requester (Requester):
-            room_alias (RoomAlias):
+            requester
+            room_alias
 
         Returns:
             Deferred[unicode]: room id that the alias used to point to
@@ -191,16 +200,16 @@ class DirectoryHandler(BaseHandler):
         room_id = yield self._delete_association(room_alias)
 
         try:
-            yield self._update_canonical_alias(
-                requester, requester.user.to_string(), room_id, room_alias
-            )
+            yield self._update_canonical_alias(requester, user_id, room_id, room_alias)
         except AuthError as e:
             logger.info("Failed to update alias events: %s", e)
 
         return room_id
 
     @defer.inlineCallbacks
-    def delete_appservice_association(self, service, room_alias):
+    def delete_appservice_association(
+        self, service: ApplicationService, room_alias: RoomAlias
+    ):
         if not service.is_interested_in_alias(room_alias.to_string()):
             raise SynapseError(
                 400,
@@ -210,7 +219,7 @@ class DirectoryHandler(BaseHandler):
         yield self._delete_association(room_alias)
 
     @defer.inlineCallbacks
-    def _delete_association(self, room_alias):
+    def _delete_association(self, room_alias: RoomAlias):
         if not self.hs.is_mine(room_alias):
             raise SynapseError(400, "Room alias must be local")
 
@@ -219,7 +228,7 @@ class DirectoryHandler(BaseHandler):
         return room_id
 
     @defer.inlineCallbacks
-    def get_association(self, room_alias):
+    def get_association(self, room_alias: RoomAlias):
         room_id = None
         if self.hs.is_mine(room_alias):
             result = yield self.get_association_from_room_alias(room_alias)
@@ -284,7 +293,9 @@ class DirectoryHandler(BaseHandler):
             )
 
     @defer.inlineCallbacks
-    def _update_canonical_alias(self, requester, user_id, room_id, room_alias):
+    def _update_canonical_alias(
+        self, requester: Requester, user_id: str, room_id: str, room_alias: RoomAlias
+    ):
         """
         Send an updated canonical alias event if the removed alias was set as
         the canonical alias or listed in the alt_aliases field.
@@ -307,15 +318,17 @@ class DirectoryHandler(BaseHandler):
             send_update = True
             content.pop("alias", "")
 
-        # Filter alt_aliases for the removed alias.
-        alt_aliases = content.pop("alt_aliases", None)
-        # If the aliases are not a list (or not found) do not attempt to modify
-        # the list.
-        if isinstance(alt_aliases, collections.Sequence):
+        # Filter the alt_aliases property for the removed alias. Note that the
+        # value is not modified if alt_aliases is of an unexpected form.
+        alt_aliases = content.get("alt_aliases")
+        if isinstance(alt_aliases, (list, tuple)) and alias_str in alt_aliases:
             send_update = True
             alt_aliases = [alias for alias in alt_aliases if alias != alias_str]
+
             if alt_aliases:
                 content["alt_aliases"] = alt_aliases
+            else:
+                del content["alt_aliases"]
 
         if send_update:
             yield self.event_creation_handler.create_and_send_nonmember_event(
@@ -331,7 +344,7 @@ class DirectoryHandler(BaseHandler):
             )
 
     @defer.inlineCallbacks
-    def get_association_from_room_alias(self, room_alias):
+    def get_association_from_room_alias(self, room_alias: RoomAlias):
         result = yield self.store.get_association_from_room_alias(room_alias)
         if not result:
             # Query AS to see if it exists
@@ -339,7 +352,7 @@ class DirectoryHandler(BaseHandler):
             result = yield as_handler.query_room_alias_exists(room_alias)
         return result
 
-    def can_modify_alias(self, alias, user_id=None):
+    def can_modify_alias(self, alias: RoomAlias, user_id: Optional[str] = None):
         # Any application service "interested" in an alias they are regexing on
         # can modify the alias.
         # Users can only modify the alias if ALL the interested services have
@@ -360,22 +373,42 @@ class DirectoryHandler(BaseHandler):
         return defer.succeed(True)
 
     @defer.inlineCallbacks
-    def _user_can_delete_alias(self, alias, user_id):
+    def _user_can_delete_alias(self, alias: RoomAlias, user_id: str):
+        """Determine whether a user can delete an alias.
+
+        One of the following must be true:
+
+        1. The user created the alias.
+        2. The user is a server administrator.
+        3. The user has a power-level sufficient to send a canonical alias event
+           for the current room.
+
+        """
         creator = yield self.store.get_room_alias_creator(alias.to_string())
 
         if creator is not None and creator == user_id:
             return True
 
-        is_admin = yield self.auth.is_server_admin(UserID.from_string(user_id))
-        return is_admin
+        # Resolve the alias to the corresponding room.
+        room_mapping = yield self.get_association(alias)
+        room_id = room_mapping["room_id"]
+        if not room_id:
+            return False
+
+        res = yield self.auth.check_can_change_room_list(
+            room_id, UserID.from_string(user_id)
+        )
+        return res
 
     @defer.inlineCallbacks
-    def edit_published_room_list(self, requester, room_id, visibility):
+    def edit_published_room_list(
+        self, requester: Requester, room_id: str, visibility: str
+    ):
         """Edit the entry of the room in the published room list.
 
         requester
-        room_id (str)
-        visibility (str): "public" or "private"
+        room_id
+        visibility: "public" or "private"
         """
         user_id = requester.user.to_string()
 
@@ -400,7 +433,15 @@ class DirectoryHandler(BaseHandler):
         if room is None:
             raise SynapseError(400, "Unknown room")
 
-        yield self.auth.check_can_change_room_list(room_id, requester.user)
+        can_change_room_list = yield self.auth.check_can_change_room_list(
+            room_id, requester.user
+        )
+        if not can_change_room_list:
+            raise AuthError(
+                403,
+                "This server requires you to be a moderator in the room to"
+                " edit its room list entry",
+            )
 
         making_public = visibility == "public"
         if making_public:
@@ -421,16 +462,16 @@ class DirectoryHandler(BaseHandler):
 
     @defer.inlineCallbacks
     def edit_published_appservice_room_list(
-        self, appservice_id, network_id, room_id, visibility
+        self, appservice_id: str, network_id: str, room_id: str, visibility: str
     ):
         """Add or remove a room from the appservice/network specific public
         room list.
 
         Args:
-            appservice_id (str): ID of the appservice that owns the list
-            network_id (str): The ID of the network the list is associated with
-            room_id (str)
-            visibility (str): either "public" or "private"
+            appservice_id: ID of the appservice that owns the list
+            network_id: The ID of the network the list is associated with
+            room_id
+            visibility: either "public" or "private"
         """
         if visibility not in ["public", "private"]:
             raise SynapseError(400, "Invalid visibility setting")
diff --git a/synapse/handlers/e2e_keys.py b/synapse/handlers/e2e_keys.py
index 95a9d71f41..8f1bc0323c 100644
--- a/synapse/handlers/e2e_keys.py
+++ b/synapse/handlers/e2e_keys.py
@@ -54,19 +54,23 @@ class E2eKeysHandler(object):
 
         self._edu_updater = SigningKeyEduUpdater(hs, self)
 
+        federation_registry = hs.get_federation_registry()
+
         self._is_master = hs.config.worker_app is None
         if not self._is_master:
             self._user_device_resync_client = ReplicationUserDevicesResyncRestServlet.make_client(
                 hs
             )
+        else:
+            # Only register this edu handler on master as it requires writing
+            # device updates to the db
+            #
+            # FIXME: switch to m.signing_key_update when MSC1756 is merged into the spec
+            federation_registry.register_edu_handler(
+                "org.matrix.signing_key_update",
+                self._edu_updater.incoming_signing_key_update,
+            )
 
-        federation_registry = hs.get_federation_registry()
-
-        # FIXME: switch to m.signing_key_update when MSC1756 is merged into the spec
-        federation_registry.register_edu_handler(
-            "org.matrix.signing_key_update",
-            self._edu_updater.incoming_signing_key_update,
-        )
         # doesn't really work as part of the generic query API, because the
         # query request requires an object POST, but we abuse the
         # "query handler" interface.
@@ -170,8 +174,8 @@ class E2eKeysHandler(object):
             """This is called when we are querying the device list of a user on
             a remote homeserver and their device list is not in the device list
             cache. If we share a room with this user and we're not querying for
-            specific user we will update the cache
-            with their device list."""
+            specific user we will update the cache with their device list.
+            """
 
             destination_query = remote_queries_not_in_cache[destination]
 
@@ -957,13 +961,19 @@ class E2eKeysHandler(object):
         return signature_list, failures
 
     @defer.inlineCallbacks
-    def _get_e2e_cross_signing_verify_key(self, user_id, key_type, from_user_id=None):
-        """Fetch the cross-signing public key from storage and interpret it.
+    def _get_e2e_cross_signing_verify_key(
+        self, user_id: str, key_type: str, from_user_id: str = None
+    ):
+        """Fetch locally or remotely query for a cross-signing public key.
+
+        First, attempt to fetch the cross-signing public key from storage.
+        If that fails, query the keys from the homeserver they belong to
+        and update our local copy.
 
         Args:
-            user_id (str): the user whose key should be fetched
-            key_type (str): the type of key to fetch
-            from_user_id (str): the user that we are fetching the keys for.
+            user_id: the user whose key should be fetched
+            key_type: the type of key to fetch
+            from_user_id: the user that we are fetching the keys for.
                 This affects what signatures are fetched.
 
         Returns:
@@ -972,16 +982,140 @@ class E2eKeysHandler(object):
 
         Raises:
             NotFoundError: if the key is not found
+            SynapseError: if `user_id` is invalid
         """
+        user = UserID.from_string(user_id)
         key = yield self.store.get_e2e_cross_signing_key(
             user_id, key_type, from_user_id
         )
+
+        if key:
+            # We found a copy of this key in our database. Decode and return it
+            key_id, verify_key = get_verify_key_from_cross_signing_key(key)
+            return key, key_id, verify_key
+
+        # If we couldn't find the key locally, and we're looking for keys of
+        # another user then attempt to fetch the missing key from the remote
+        # user's server.
+        #
+        # We may run into this in possible edge cases where a user tries to
+        # cross-sign a remote user, but does not share any rooms with them yet.
+        # Thus, we would not have their key list yet. We instead fetch the key,
+        # store it and notify clients of new, associated device IDs.
+        if self.is_mine(user) or key_type not in ["master", "self_signing"]:
+            # Note that master and self_signing keys are the only cross-signing keys we
+            # can request over federation
+            raise NotFoundError("No %s key found for %s" % (key_type, user_id))
+
+        (
+            key,
+            key_id,
+            verify_key,
+        ) = yield self._retrieve_cross_signing_keys_for_remote_user(user, key_type)
+
         if key is None:
-            logger.debug("no %s key found for %s", key_type, user_id)
             raise NotFoundError("No %s key found for %s" % (key_type, user_id))
-        key_id, verify_key = get_verify_key_from_cross_signing_key(key)
+
         return key, key_id, verify_key
 
+    @defer.inlineCallbacks
+    def _retrieve_cross_signing_keys_for_remote_user(
+        self, user: UserID, desired_key_type: str,
+    ):
+        """Queries cross-signing keys for a remote user and saves them to the database
+
+        Only the key specified by `key_type` will be returned, while all retrieved keys
+        will be saved regardless
+
+        Args:
+            user: The user to query remote keys for
+            desired_key_type: The type of key to receive. One of "master", "self_signing"
+
+        Returns:
+            Deferred[Tuple[Optional[Dict], Optional[str], Optional[VerifyKey]]]: A tuple
+            of the retrieved key content, the key's ID and the matching VerifyKey.
+            If the key cannot be retrieved, all values in the tuple will instead be None.
+        """
+        try:
+            remote_result = yield self.federation.query_user_devices(
+                user.domain, user.to_string()
+            )
+        except Exception as e:
+            logger.warning(
+                "Unable to query %s for cross-signing keys of user %s: %s %s",
+                user.domain,
+                user.to_string(),
+                type(e),
+                e,
+            )
+            return None, None, None
+
+        # Process each of the retrieved cross-signing keys
+        desired_key = None
+        desired_key_id = None
+        desired_verify_key = None
+        retrieved_device_ids = []
+        for key_type in ["master", "self_signing"]:
+            key_content = remote_result.get(key_type + "_key")
+            if not key_content:
+                continue
+
+            # Ensure these keys belong to the correct user
+            if "user_id" not in key_content:
+                logger.warning(
+                    "Invalid %s key retrieved, missing user_id field: %s",
+                    key_type,
+                    key_content,
+                )
+                continue
+            if user.to_string() != key_content["user_id"]:
+                logger.warning(
+                    "Found %s key of user %s when querying for keys of user %s",
+                    key_type,
+                    key_content["user_id"],
+                    user.to_string(),
+                )
+                continue
+
+            # Validate the key contents
+            try:
+                # verify_key is a VerifyKey from signedjson, which uses
+                # .version to denote the portion of the key ID after the
+                # algorithm and colon, which is the device ID
+                key_id, verify_key = get_verify_key_from_cross_signing_key(key_content)
+            except ValueError as e:
+                logger.warning(
+                    "Invalid %s key retrieved: %s - %s %s",
+                    key_type,
+                    key_content,
+                    type(e),
+                    e,
+                )
+                continue
+
+            # Note down the device ID attached to this key
+            retrieved_device_ids.append(verify_key.version)
+
+            # If this is the desired key type, save it and its ID/VerifyKey
+            if key_type == desired_key_type:
+                desired_key = key_content
+                desired_verify_key = verify_key
+                desired_key_id = key_id
+
+            # At the same time, store this key in the db for subsequent queries
+            yield self.store.set_e2e_cross_signing_key(
+                user.to_string(), key_type, key_content
+            )
+
+        # Notify clients that new devices for this user have been discovered
+        if retrieved_device_ids:
+            # XXX is this necessary?
+            yield self.device_handler.notify_device_update(
+                user.to_string(), retrieved_device_ids
+            )
+
+        return desired_key, desired_key_id, desired_verify_key
+
 
 def _check_cross_signing_key(key, user_id, key_type, signing_key=None):
     """Check a cross-signing key uploaded by a user.  Performs some basic sanity
diff --git a/synapse/handlers/e2e_room_keys.py b/synapse/handlers/e2e_room_keys.py
index f1b4424a02..9abaf13b8f 100644
--- a/synapse/handlers/e2e_room_keys.py
+++ b/synapse/handlers/e2e_room_keys.py
@@ -207,6 +207,13 @@ class E2eRoomKeysHandler(object):
             changed = False  # if anything has changed, we need to update the etag
             for room_id, room in iteritems(room_keys["rooms"]):
                 for session_id, room_key in iteritems(room["sessions"]):
+                    if not isinstance(room_key["is_verified"], bool):
+                        msg = (
+                            "is_verified must be a boolean in keys for session %s in"
+                            "room %s" % (session_id, room_id)
+                        )
+                        raise SynapseError(400, msg, Codes.INVALID_PARAM)
+
                     log_kv(
                         {
                             "message": "Trying to upload room key",
diff --git a/synapse/handlers/message.py b/synapse/handlers/message.py
index a0103addd3..b743fc2dcc 100644
--- a/synapse/handlers/message.py
+++ b/synapse/handlers/message.py
@@ -160,7 +160,7 @@ class MessageHandler(object):
                 raise NotFoundError("Can't find event for token %s" % (at_token,))
 
             visible_events = yield filter_events_for_client(
-                self.storage, user_id, last_events, apply_retention_policies=False
+                self.storage, user_id, last_events, filter_send_to_client=False
             )
 
             event = last_events[0]
@@ -888,19 +888,60 @@ class EventCreationHandler(object):
         yield self.base_handler.maybe_kick_guest_users(event, context)
 
         if event.type == EventTypes.CanonicalAlias:
-            # Check the alias is acually valid (at this time at least)
+            # Validate a newly added alias or newly added alt_aliases.
+
+            original_alias = None
+            original_alt_aliases = set()
+
+            original_event_id = event.unsigned.get("replaces_state")
+            if original_event_id:
+                original_event = yield self.store.get_event(original_event_id)
+
+                if original_event:
+                    original_alias = original_event.content.get("alias", None)
+                    original_alt_aliases = original_event.content.get("alt_aliases", [])
+
+            # Check the alias is currently valid (if it has changed).
             room_alias_str = event.content.get("alias", None)
-            if room_alias_str:
+            directory_handler = self.hs.get_handlers().directory_handler
+            if room_alias_str and room_alias_str != original_alias:
                 room_alias = RoomAlias.from_string(room_alias_str)
-                directory_handler = self.hs.get_handlers().directory_handler
                 mapping = yield directory_handler.get_association(room_alias)
 
                 if mapping["room_id"] != event.room_id:
                     raise SynapseError(
                         400,
                         "Room alias %s does not point to the room" % (room_alias_str,),
+                        Codes.BAD_ALIAS,
                     )
 
+            # Check that alt_aliases is the proper form.
+            alt_aliases = event.content.get("alt_aliases", [])
+            if not isinstance(alt_aliases, (list, tuple)):
+                raise SynapseError(
+                    400, "The alt_aliases property must be a list.", Codes.INVALID_PARAM
+                )
+
+            # If the old version of alt_aliases is of an unknown form,
+            # completely replace it.
+            if not isinstance(original_alt_aliases, (list, tuple)):
+                original_alt_aliases = []
+
+            # Check that each alias is currently valid.
+            new_alt_aliases = set(alt_aliases) - set(original_alt_aliases)
+            if new_alt_aliases:
+                for alias_str in new_alt_aliases:
+                    room_alias = RoomAlias.from_string(alias_str)
+                    mapping = yield directory_handler.get_association(room_alias)
+
+                    if mapping["room_id"] != event.room_id:
+                        raise SynapseError(
+                            400,
+                            "Room alias %s does not point to the room"
+                            % (room_alias_str,),
+                            Codes.BAD_ALIAS,
+                        )
+
         federation_handler = self.hs.get_handlers().federation_handler
 
         if event.type == EventTypes.Member:
diff --git a/synapse/handlers/room.py b/synapse/handlers/room.py
index 8ee870f0bb..f580ab2e9f 100644
--- a/synapse/handlers/room.py
+++ b/synapse/handlers/room.py
@@ -292,16 +292,6 @@ class RoomCreationHandler(BaseHandler):
             except AuthError as e:
                 logger.warning("Unable to update PLs in old room: %s", e)
 
-        new_pl_content = copy_power_levels_contents(old_room_pl_state.content)
-
-        # pre-msc2260 rooms may not have the right setting for aliases. If no other
-        # value is set, set it now.
-        events_default = new_pl_content.get("events_default", 0)
-        new_pl_content.setdefault("events", {}).setdefault(
-            EventTypes.Aliases, events_default
-        )
-
-        logger.debug("Setting correct PLs in new room to %s", new_pl_content)
         yield self.event_creation_handler.create_and_send_nonmember_event(
             requester,
             {
@@ -309,7 +299,7 @@ class RoomCreationHandler(BaseHandler):
                 "state_key": "",
                 "room_id": new_room_id,
                 "sender": requester.user.to_string(),
-                "content": new_pl_content,
+                "content": old_room_pl_state.content,
             },
             ratelimit=False,
         )
@@ -814,10 +804,6 @@ class RoomCreationHandler(BaseHandler):
                     EventTypes.RoomHistoryVisibility: 100,
                     EventTypes.CanonicalAlias: 50,
                     EventTypes.RoomAvatar: 50,
-                    # MSC2260: Allow everybody to send alias events by default
-                    # This will be reudundant on pre-MSC2260 rooms, since the
-                    # aliases event is special-cased.
-                    EventTypes.Aliases: 0,
                     EventTypes.Tombstone: 100,
                     EventTypes.ServerACL: 100,
                 },
diff --git a/synapse/handlers/saml_handler.py b/synapse/handlers/saml_handler.py
index 7f411b53b9..72c109981b 100644
--- a/synapse/handlers/saml_handler.py
+++ b/synapse/handlers/saml_handler.py
@@ -23,9 +23,9 @@ from saml2.client import Saml2Client
 
 from synapse.api.errors import SynapseError
 from synapse.config import ConfigError
+from synapse.http.server import finish_request
 from synapse.http.servlet import parse_string
 from synapse.module_api import ModuleApi
-from synapse.rest.client.v1.login import SSOAuthHandler
 from synapse.types import (
     UserID,
     map_username_to_mxid_localpart,
@@ -48,7 +48,7 @@ class Saml2SessionData:
 class SamlHandler:
     def __init__(self, hs):
         self._saml_client = Saml2Client(hs.config.saml2_sp_config)
-        self._sso_auth_handler = SSOAuthHandler(hs)
+        self._auth_handler = hs.get_auth_handler()
         self._registration_handler = hs.get_registration_handler()
 
         self._clock = hs.get_clock()
@@ -74,6 +74,8 @@ class SamlHandler:
         # a lock on the mappings
         self._mapping_lock = Linearizer(name="saml_mapping", clock=self._clock)
 
+        self._error_html_content = hs.config.saml2_error_html_content
+
     def handle_redirect_request(self, client_redirect_url):
         """Handle an incoming request to /login/sso/redirect
 
@@ -115,8 +117,23 @@ class SamlHandler:
         # the dict.
         self.expire_sessions()
 
-        user_id = await self._map_saml_response_to_user(resp_bytes, relay_state)
-        self._sso_auth_handler.complete_sso_login(user_id, request, relay_state)
+        try:
+            user_id = await self._map_saml_response_to_user(resp_bytes, relay_state)
+        except Exception as e:
+            # If decoding the response or mapping it to a user failed, then log the
+            # error and tell the user that something went wrong.
+            logger.error(e)
+
+            request.setResponseCode(400)
+            request.setHeader(b"Content-Type", b"text/html; charset=utf-8")
+            request.setHeader(
+                b"Content-Length", b"%d" % (len(self._error_html_content),)
+            )
+            request.write(self._error_html_content.encode("utf8"))
+            finish_request(request)
+            return
+
+        self._auth_handler.complete_sso_login(user_id, request, relay_state)
 
     async def _map_saml_response_to_user(self, resp_bytes, client_redirect_url):
         try:
diff --git a/synapse/handlers/set_password.py b/synapse/handlers/set_password.py
index d90c9e0108..12657ca698 100644
--- a/synapse/handlers/set_password.py
+++ b/synapse/handlers/set_password.py
@@ -13,10 +13,12 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 import logging
+from typing import Optional
 
 from twisted.internet import defer
 
 from synapse.api.errors import Codes, StoreError, SynapseError
+from synapse.types import Requester
 
 from ._base import BaseHandler
 
@@ -32,14 +34,17 @@ class SetPasswordHandler(BaseHandler):
         self._device_handler = hs.get_device_handler()
 
     @defer.inlineCallbacks
-    def set_password(self, user_id, newpassword, requester=None):
+    def set_password(
+        self,
+        user_id: str,
+        new_password: str,
+        logout_devices: bool,
+        requester: Optional[Requester] = None,
+    ):
         if not self.hs.config.password_localdb_enabled:
             raise SynapseError(403, "Password change disabled", errcode=Codes.FORBIDDEN)
 
-        password_hash = yield self._auth_handler.hash(newpassword)
-
-        except_device_id = requester.device_id if requester else None
-        except_access_token_id = requester.access_token_id if requester else None
+        password_hash = yield self._auth_handler.hash(new_password)
 
         try:
             yield self.store.user_set_password_hash(user_id, password_hash)
@@ -48,14 +53,18 @@ class SetPasswordHandler(BaseHandler):
                 raise SynapseError(404, "Unknown user", Codes.NOT_FOUND)
             raise e
 
-        # we want to log out all of the user's other sessions. First delete
-        # all his other devices.
-        yield self._device_handler.delete_all_devices_for_user(
-            user_id, except_device_id=except_device_id
-        )
-
-        # and now delete any access tokens which weren't associated with
-        # devices (or were associated with this device).
-        yield self._auth_handler.delete_access_tokens_for_user(
-            user_id, except_token_id=except_access_token_id
-        )
+        # Optionally, log out all of the user's other sessions.
+        if logout_devices:
+            except_device_id = requester.device_id if requester else None
+            except_access_token_id = requester.access_token_id if requester else None
+
+            # First delete all of their other devices.
+            yield self._device_handler.delete_all_devices_for_user(
+                user_id, except_device_id=except_device_id
+            )
+
+            # and now delete any access tokens which weren't associated with
+            # devices (or were associated with this device).
+            yield self._auth_handler.delete_access_tokens_for_user(
+                user_id, except_token_id=except_access_token_id
+            )
diff --git a/synapse/handlers/sync.py b/synapse/handlers/sync.py
index 669dbc8a48..cfd5dfc9e5 100644
--- a/synapse/handlers/sync.py
+++ b/synapse/handlers/sync.py
@@ -1143,9 +1143,14 @@ class SyncHandler(object):
                 user_id
             )
 
+            tracked_users = set(users_who_share_room)
+
+            # Always tell the user about their own devices
+            tracked_users.add(user_id)
+
             # Step 1a, check for changes in devices of users we share a room with
             users_that_have_changed = await self.store.get_users_whose_devices_changed(
-                since_token.device_list_key, users_who_share_room
+                since_token.device_list_key, tracked_users
             )
 
             # Step 1b, check for newly joined rooms
diff --git a/synapse/http/client.py b/synapse/http/client.py
index b15aad067f..b79e2fbe79 100644
--- a/synapse/http/client.py
+++ b/synapse/http/client.py
@@ -244,9 +244,6 @@ class SimpleHttpClient(object):
         pool.maxPersistentPerHost = max((100 * hs.config.caches.global_factor, 5))
         pool.cachedConnectionTimeout = 2 * 60
 
-        # The default context factory in Twisted 14.0.0 (which we require) is
-        # BrowserLikePolicyForHTTPS which will do regular cert validation
-        # 'like a browser'
         self.agent = ProxyAgent(
             self.reactor,
             connectTimeout=15,
diff --git a/synapse/http/federation/matrix_federation_agent.py b/synapse/http/federation/matrix_federation_agent.py
index 647d26dc56..f5f917f5ae 100644
--- a/synapse/http/federation/matrix_federation_agent.py
+++ b/synapse/http/federation/matrix_federation_agent.py
@@ -45,7 +45,7 @@ class MatrixFederationAgent(object):
     Args:
         reactor (IReactor): twisted reactor to use for underlying requests
 
-        tls_client_options_factory (ClientTLSOptionsFactory|None):
+        tls_client_options_factory (FederationPolicyForHTTPS|None):
             factory to use for fetching client tls options, or none to disable TLS.
 
         _srv_resolver (SrvResolver|None):
diff --git a/synapse/logging/context.py b/synapse/logging/context.py
index 1b940842f6..860b99a4c6 100644
--- a/synapse/logging/context.py
+++ b/synapse/logging/context.py
@@ -27,10 +27,15 @@ import inspect
 import logging
 import threading
 import types
-from typing import Any, List
+from typing import TYPE_CHECKING, Optional, Tuple, TypeVar, Union
+
+from typing_extensions import Literal
 
 from twisted.internet import defer, threads
 
+if TYPE_CHECKING:
+    from synapse.logging.scopecontextmanager import _LogContextScope
+
 logger = logging.getLogger(__name__)
 
 try:
@@ -91,7 +96,7 @@ class ContextResourceUsage(object):
         "evt_db_fetch_count",
     ]
 
-    def __init__(self, copy_from=None):
+    def __init__(self, copy_from: "Optional[ContextResourceUsage]" = None) -> None:
         """Create a new ContextResourceUsage
 
         Args:
@@ -101,27 +106,28 @@ class ContextResourceUsage(object):
         if copy_from is None:
             self.reset()
         else:
-            self.ru_utime = copy_from.ru_utime
-            self.ru_stime = copy_from.ru_stime
-            self.db_txn_count = copy_from.db_txn_count
+            # FIXME: mypy can't infer the types set via reset() above, so specify explicitly for now
+            self.ru_utime = copy_from.ru_utime  # type: float
+            self.ru_stime = copy_from.ru_stime  # type: float
+            self.db_txn_count = copy_from.db_txn_count  # type: int
 
-            self.db_txn_duration_sec = copy_from.db_txn_duration_sec
-            self.db_sched_duration_sec = copy_from.db_sched_duration_sec
-            self.evt_db_fetch_count = copy_from.evt_db_fetch_count
+            self.db_txn_duration_sec = copy_from.db_txn_duration_sec  # type: float
+            self.db_sched_duration_sec = copy_from.db_sched_duration_sec  # type: float
+            self.evt_db_fetch_count = copy_from.evt_db_fetch_count  # type: int
 
-    def copy(self):
+    def copy(self) -> "ContextResourceUsage":
         return ContextResourceUsage(copy_from=self)
 
-    def reset(self):
+    def reset(self) -> None:
         self.ru_stime = 0.0
         self.ru_utime = 0.0
         self.db_txn_count = 0
 
-        self.db_txn_duration_sec = 0
-        self.db_sched_duration_sec = 0
+        self.db_txn_duration_sec = 0.0
+        self.db_sched_duration_sec = 0.0
         self.evt_db_fetch_count = 0
 
-    def __repr__(self):
+    def __repr__(self) -> str:
         return (
             "<ContextResourceUsage ru_stime='%r', ru_utime='%r', "
             "db_txn_count='%r', db_txn_duration_sec='%r', "
@@ -135,7 +141,7 @@ class ContextResourceUsage(object):
             self.evt_db_fetch_count,
         )
 
-    def __iadd__(self, other):
+    def __iadd__(self, other: "ContextResourceUsage") -> "ContextResourceUsage":
         """Add another ContextResourceUsage's stats to this one's.
 
         Args:
@@ -149,7 +155,7 @@ class ContextResourceUsage(object):
         self.evt_db_fetch_count += other.evt_db_fetch_count
         return self
 
-    def __isub__(self, other):
+    def __isub__(self, other: "ContextResourceUsage") -> "ContextResourceUsage":
         self.ru_utime -= other.ru_utime
         self.ru_stime -= other.ru_stime
         self.db_txn_count -= other.db_txn_count
@@ -158,17 +164,20 @@ class ContextResourceUsage(object):
         self.evt_db_fetch_count -= other.evt_db_fetch_count
         return self
 
-    def __add__(self, other):
+    def __add__(self, other: "ContextResourceUsage") -> "ContextResourceUsage":
         res = ContextResourceUsage(copy_from=self)
         res += other
         return res
 
-    def __sub__(self, other):
+    def __sub__(self, other: "ContextResourceUsage") -> "ContextResourceUsage":
         res = ContextResourceUsage(copy_from=self)
         res -= other
         return res
 
 
+LoggingContextOrSentinel = Union["LoggingContext", "LoggingContext.Sentinel"]
+
+
 class LoggingContext(object):
     """Additional context for log formatting. Contexts are scoped within a
     "with" block.
@@ -201,7 +210,15 @@ class LoggingContext(object):
     class Sentinel(object):
         """Sentinel to represent the root context"""
 
-        __slots__ = []  # type: List[Any]
+        __slots__ = ["previous_context", "alive", "request", "scope", "tag"]
+
+        def __init__(self) -> None:
+            # Minimal set for compatibility with LoggingContext
+            self.previous_context = None
+            self.alive = None
+            self.request = None
+            self.scope = None
+            self.tag = None
 
         def __str__(self):
             return "sentinel"
@@ -235,7 +252,7 @@ class LoggingContext(object):
 
     sentinel = Sentinel()
 
-    def __init__(self, name=None, parent_context=None, request=None):
+    def __init__(self, name=None, parent_context=None, request=None) -> None:
         self.previous_context = LoggingContext.current_context()
         self.name = name
 
@@ -250,7 +267,7 @@ class LoggingContext(object):
         self.request = None
         self.tag = ""
         self.alive = True
-        self.scope = None
+        self.scope = None  # type: Optional[_LogContextScope]
 
         self.parent_context = parent_context
 
@@ -261,13 +278,13 @@ class LoggingContext(object):
             # the request param overrides the request from the parent context
             self.request = request
 
-    def __str__(self):
+    def __str__(self) -> str:
         if self.request:
             return str(self.request)
         return "%s@%x" % (self.name, id(self))
 
     @classmethod
-    def current_context(cls):
+    def current_context(cls) -> LoggingContextOrSentinel:
         """Get the current logging context from thread local storage
 
         Returns:
@@ -276,7 +293,9 @@ class LoggingContext(object):
         return getattr(cls.thread_local, "current_context", cls.sentinel)
 
     @classmethod
-    def set_current_context(cls, context):
+    def set_current_context(
+        cls, context: LoggingContextOrSentinel
+    ) -> LoggingContextOrSentinel:
         """Set the current logging context in thread local storage
         Args:
             context(LoggingContext): The context to activate.
@@ -291,7 +310,7 @@ class LoggingContext(object):
             context.start()
         return current
 
-    def __enter__(self):
+    def __enter__(self) -> "LoggingContext":
         """Enters this logging context into thread local storage"""
         old_context = self.set_current_context(self)
         if self.previous_context != old_context:
@@ -304,7 +323,7 @@ class LoggingContext(object):
 
         return self
 
-    def __exit__(self, type, value, traceback):
+    def __exit__(self, type, value, traceback) -> None:
         """Restore the logging context in thread local storage to the state it
         was before this context was entered.
         Returns:
@@ -318,7 +337,6 @@ class LoggingContext(object):
                 logger.warning(
                     "Expected logging context %s but found %s", self, current
                 )
-        self.previous_context = None
         self.alive = False
 
         # if we have a parent, pass our CPU usage stats on
@@ -330,7 +348,7 @@ class LoggingContext(object):
             # reset them in case we get entered again
             self._resource_usage.reset()
 
-    def copy_to(self, record):
+    def copy_to(self, record) -> None:
         """Copy logging fields from this context to a log record or
         another LoggingContext
         """
@@ -341,14 +359,14 @@ class LoggingContext(object):
         # we also track the current scope:
         record.scope = self.scope
 
-    def copy_to_twisted_log_entry(self, record):
+    def copy_to_twisted_log_entry(self, record) -> None:
         """
         Copy logging fields from this context to a Twisted log record.
         """
         record["request"] = self.request
         record["scope"] = self.scope
 
-    def start(self):
+    def start(self) -> None:
         if get_thread_id() != self.main_thread:
             logger.warning("Started logcontext %s on different thread", self)
             return
@@ -358,7 +376,7 @@ class LoggingContext(object):
         if not self.usage_start:
             self.usage_start = get_thread_resource_usage()
 
-    def stop(self):
+    def stop(self) -> None:
         if get_thread_id() != self.main_thread:
             logger.warning("Stopped logcontext %s on different thread", self)
             return
@@ -378,7 +396,7 @@ class LoggingContext(object):
 
         self.usage_start = None
 
-    def get_resource_usage(self):
+    def get_resource_usage(self) -> ContextResourceUsage:
         """Get resources used by this logcontext so far.
 
         Returns:
@@ -398,11 +416,13 @@ class LoggingContext(object):
 
         return res
 
-    def _get_cputime(self):
+    def _get_cputime(self) -> Tuple[float, float]:
         """Get the cpu usage time so far
 
         Returns: Tuple[float, float]: seconds in user mode, seconds in system mode
         """
+        assert self.usage_start is not None
+
         current = get_thread_resource_usage()
 
         # Indicate to mypy that we know that self.usage_start is None.
@@ -430,13 +450,13 @@ class LoggingContext(object):
 
         return utime_delta, stime_delta
 
-    def add_database_transaction(self, duration_sec):
+    def add_database_transaction(self, duration_sec: float) -> None:
         if duration_sec < 0:
             raise ValueError("DB txn time can only be non-negative")
         self._resource_usage.db_txn_count += 1
         self._resource_usage.db_txn_duration_sec += duration_sec
 
-    def add_database_scheduled(self, sched_sec):
+    def add_database_scheduled(self, sched_sec: float) -> None:
         """Record a use of the database pool
 
         Args:
@@ -447,7 +467,7 @@ class LoggingContext(object):
             raise ValueError("DB scheduling time can only be non-negative")
         self._resource_usage.db_sched_duration_sec += sched_sec
 
-    def record_event_fetch(self, event_count):
+    def record_event_fetch(self, event_count: int) -> None:
         """Record a number of events being fetched from the db
 
         Args:
@@ -464,10 +484,10 @@ class LoggingContextFilter(logging.Filter):
             missing fields
     """
 
-    def __init__(self, **defaults):
+    def __init__(self, **defaults) -> None:
         self.defaults = defaults
 
-    def filter(self, record):
+    def filter(self, record) -> Literal[True]:
         """Add each fields from the logging contexts to the record.
         Returns:
             True to include the record in the log output.
@@ -492,12 +512,13 @@ class PreserveLoggingContext(object):
 
     __slots__ = ["current_context", "new_context", "has_parent"]
 
-    def __init__(self, new_context=None):
+    def __init__(self, new_context: Optional[LoggingContextOrSentinel] = None) -> None:
         if new_context is None:
-            new_context = LoggingContext.sentinel
-        self.new_context = new_context
+            self.new_context = LoggingContext.sentinel  # type: LoggingContextOrSentinel
+        else:
+            self.new_context = new_context
 
-    def __enter__(self):
+    def __enter__(self) -> None:
         """Captures the current logging context"""
         self.current_context = LoggingContext.set_current_context(self.new_context)
 
@@ -506,7 +527,7 @@ class PreserveLoggingContext(object):
             if not self.current_context.alive:
                 logger.debug("Entering dead context: %s", self.current_context)
 
-    def __exit__(self, type, value, traceback):
+    def __exit__(self, type, value, traceback) -> None:
         """Restores the current logging context"""
         context = LoggingContext.set_current_context(self.current_context)
 
@@ -525,7 +546,9 @@ class PreserveLoggingContext(object):
                 logger.debug("Restoring dead context: %s", self.current_context)
 
 
-def nested_logging_context(suffix, parent_context=None):
+def nested_logging_context(
+    suffix: str, parent_context: Optional[LoggingContext] = None
+) -> LoggingContext:
     """Creates a new logging context as a child of another.
 
     The nested logging context will have a 'request' made up of the parent context's
@@ -546,10 +569,12 @@ def nested_logging_context(suffix, parent_context=None):
     Returns:
         LoggingContext: new logging context.
     """
-    if parent_context is None:
-        parent_context = LoggingContext.current_context()
+    if parent_context is not None:
+        context = parent_context  # type: LoggingContextOrSentinel
+    else:
+        context = LoggingContext.current_context()
     return LoggingContext(
-        parent_context=parent_context, request=parent_context.request + "-" + suffix
+        parent_context=context, request=str(context.request) + "-" + suffix
     )
 
 
@@ -654,7 +679,10 @@ def make_deferred_yieldable(deferred):
     return deferred
 
 
-def _set_context_cb(result, context):
+ResultT = TypeVar("ResultT")
+
+
+def _set_context_cb(result: ResultT, context: LoggingContext) -> ResultT:
     """A callback function which just sets the logging context"""
     LoggingContext.set_current_context(context)
     return result
diff --git a/synapse/metrics/__init__.py b/synapse/metrics/__init__.py
index 0dba997a23..d2fd29acb4 100644
--- a/synapse/metrics/__init__.py
+++ b/synapse/metrics/__init__.py
@@ -20,7 +20,7 @@ import os
 import platform
 import threading
 import time
-from typing import Dict, Union
+from typing import Callable, Dict, Iterable, Optional, Tuple, Union
 
 import six
 
@@ -59,10 +59,12 @@ class RegistryProxy(object):
 @attr.s(hash=True)
 class LaterGauge(object):
 
-    name = attr.ib()
-    desc = attr.ib()
-    labels = attr.ib(hash=False)
-    caller = attr.ib()
+    name = attr.ib(type=str)
+    desc = attr.ib(type=str)
+    labels = attr.ib(hash=False, type=Optional[Iterable[str]])
+    # callback: should either return a value (if there are no labels for this metric),
+    # or dict mapping from a label tuple to a value
+    caller = attr.ib(type=Callable[[], Union[Dict[Tuple[str, ...], float], float]])
 
     def collect(self):
 
diff --git a/synapse/metrics/background_process_metrics.py b/synapse/metrics/background_process_metrics.py
index b65bcd8806..8449ef82f7 100644
--- a/synapse/metrics/background_process_metrics.py
+++ b/synapse/metrics/background_process_metrics.py
@@ -17,6 +17,7 @@ import logging
 import threading
 from asyncio import iscoroutine
 from functools import wraps
+from typing import Dict, Set
 
 import six
 
@@ -80,13 +81,13 @@ _background_process_db_sched_duration = Counter(
 # map from description to a counter, so that we can name our logcontexts
 # incrementally. (It actually duplicates _background_process_start_count, but
 # it's much simpler to do so than to try to combine them.)
-_background_process_counts = {}  # type: dict[str, int]
+_background_process_counts = {}  # type: Dict[str, int]
 
 # map from description to the currently running background processes.
 #
 # it's kept as a dict of sets rather than a big set so that we can keep track
 # of process descriptions that no longer have any active processes.
-_background_processes = {}  # type: dict[str, set[_BackgroundProcess]]
+_background_processes = {}  # type: Dict[str, Set[_BackgroundProcess]]
 
 # A lock that covers the above dicts
 _bg_metrics_lock = threading.Lock()
diff --git a/synapse/module_api/__init__.py b/synapse/module_api/__init__.py
index d680ee95e1..c7fffd72f2 100644
--- a/synapse/module_api/__init__.py
+++ b/synapse/module_api/__init__.py
@@ -17,6 +17,7 @@ import logging
 
 from twisted.internet import defer
 
+from synapse.http.site import SynapseRequest
 from synapse.logging.context import make_deferred_yieldable, run_in_background
 from synapse.types import UserID
 
@@ -211,3 +212,21 @@ class ModuleApi(object):
             Deferred[object]: result of func
         """
         return self._store.db.runInteraction(desc, func, *args, **kwargs)
+
+    def complete_sso_login(
+        self, registered_user_id: str, request: SynapseRequest, client_redirect_url: str
+    ):
+        """Complete a SSO login by redirecting the user to a page to confirm whether they
+        want their access token sent to `client_redirect_url`, or redirect them to that
+        URL with a token directly if the URL matches with one of the whitelisted clients.
+
+        Args:
+            registered_user_id: The MXID that has been registered as a previous step of
+                of this SSO login.
+            request: The request to respond to.
+            client_redirect_url: The URL to which to offer to redirect the user (or to
+                redirect them directly if whitelisted).
+        """
+        self._auth_handler.complete_sso_login(
+            registered_user_id, request, client_redirect_url,
+        )
diff --git a/synapse/push/mailer.py b/synapse/push/mailer.py
index 4ccaf178ce..73580c1c6c 100644
--- a/synapse/push/mailer.py
+++ b/synapse/push/mailer.py
@@ -555,10 +555,12 @@ class Mailer(object):
             else:
                 # If the reason room doesn't have a name, say who the messages
                 # are from explicitly to avoid, "messages in the Bob room"
+                room_id = reason["room_id"]
+
                 sender_ids = list(
                     {
                         notif_events[n["event_id"]].sender
-                        for n in notifs_by_room[reason["room_id"]]
+                        for n in notifs_by_room[room_id]
                     }
                 )
 
diff --git a/synapse/push/push_rule_evaluator.py b/synapse/push/push_rule_evaluator.py
index a70e677281..11032491af 100644
--- a/synapse/push/push_rule_evaluator.py
+++ b/synapse/push/push_rule_evaluator.py
@@ -16,9 +16,11 @@
 
 import logging
 import re
+from typing import Pattern
 
 from six import string_types
 
+from synapse.events import EventBase
 from synapse.types import UserID
 from synapse.util.caches import register_cache
 from synapse.util.caches.lrucache import LruCache
@@ -56,18 +58,18 @@ def _test_ineq_condition(condition, number):
     rhs = m.group(2)
     if not rhs.isdigit():
         return False
-    rhs = int(rhs)
+    rhs_int = int(rhs)
 
     if ineq == "" or ineq == "==":
-        return number == rhs
+        return number == rhs_int
     elif ineq == "<":
-        return number < rhs
+        return number < rhs_int
     elif ineq == ">":
-        return number > rhs
+        return number > rhs_int
     elif ineq == ">=":
-        return number >= rhs
+        return number >= rhs_int
     elif ineq == "<=":
-        return number <= rhs
+        return number <= rhs_int
     else:
         return False
 
@@ -83,7 +85,13 @@ def tweaks_for_actions(actions):
 
 
 class PushRuleEvaluatorForEvent(object):
-    def __init__(self, event, room_member_count, sender_power_level, power_levels):
+    def __init__(
+        self,
+        event: EventBase,
+        room_member_count: int,
+        sender_power_level: int,
+        power_levels: dict,
+    ):
         self._event = event
         self._room_member_count = room_member_count
         self._sender_power_level = sender_power_level
@@ -92,7 +100,7 @@ class PushRuleEvaluatorForEvent(object):
         # Maps strings of e.g. 'content.body' -> event["content"]["body"]
         self._value_cache = _flatten_dict(event)
 
-    def matches(self, condition, user_id, display_name):
+    def matches(self, condition: dict, user_id: str, display_name: str) -> bool:
         if condition["kind"] == "event_match":
             return self._event_match(condition, user_id)
         elif condition["kind"] == "contains_display_name":
@@ -106,7 +114,7 @@ class PushRuleEvaluatorForEvent(object):
         else:
             return True
 
-    def _event_match(self, condition, user_id):
+    def _event_match(self, condition: dict, user_id: str) -> bool:
         pattern = condition.get("pattern", None)
 
         if not pattern:
@@ -134,7 +142,7 @@ class PushRuleEvaluatorForEvent(object):
 
             return _glob_matches(pattern, haystack)
 
-    def _contains_display_name(self, display_name):
+    def _contains_display_name(self, display_name: str) -> bool:
         if not display_name:
             return False
 
@@ -142,51 +150,52 @@ class PushRuleEvaluatorForEvent(object):
         if not body:
             return False
 
-        return _glob_matches(display_name, body, word_boundary=True)
+        # Similar to _glob_matches, but do not treat display_name as a glob.
+        r = regex_cache.get((display_name, False, True), None)
+        if not r:
+            r = re.escape(display_name)
+            r = _re_word_boundary(r)
+            r = re.compile(r, flags=re.IGNORECASE)
+            regex_cache[(display_name, False, True)] = r
+
+        return r.search(body)
 
-    def _get_value(self, dotted_key):
+    def _get_value(self, dotted_key: str) -> str:
         return self._value_cache.get(dotted_key, None)
 
 
-# Caches (glob, word_boundary) -> regex for push. See _glob_matches
+# Caches (string, is_glob, word_boundary) -> regex for push. See _glob_matches
 regex_cache = LruCache(50000)
 register_cache("cache", "regex_push_cache", regex_cache)
 
 
-def _glob_matches(glob, value, word_boundary=False):
+def _glob_matches(glob: str, value: str, word_boundary: bool = False) -> bool:
     """Tests if value matches glob.
 
     Args:
-        glob (string)
-        value (string): String to test against glob.
-        word_boundary (bool): Whether to match against word boundaries or entire
+        glob
+        value: String to test against glob.
+        word_boundary: Whether to match against word boundaries or entire
             string. Defaults to False.
-
-    Returns:
-        bool
     """
 
     try:
-        r = regex_cache.get((glob, word_boundary), None)
+        r = regex_cache.get((glob, True, word_boundary), None)
         if not r:
             r = _glob_to_re(glob, word_boundary)
-            regex_cache[(glob, word_boundary)] = r
+            regex_cache[(glob, True, word_boundary)] = r
         return r.search(value)
     except re.error:
         logger.warning("Failed to parse glob to regex: %r", glob)
         return False
 
 
-def _glob_to_re(glob, word_boundary):
+def _glob_to_re(glob: str, word_boundary: bool) -> Pattern:
     """Generates regex for a given glob.
 
     Args:
-        glob (string)
-        word_boundary (bool): Whether to match against word boundaries or entire
-            string. Defaults to False.
-
-    Returns:
-        regex object
+        glob
+        word_boundary: Whether to match against word boundaries or entire string.
     """
     if IS_GLOB.search(glob):
         r = re.escape(glob)
@@ -219,7 +228,7 @@ def _glob_to_re(glob, word_boundary):
         return re.compile(r, flags=re.IGNORECASE)
 
 
-def _re_word_boundary(r):
+def _re_word_boundary(r: str) -> str:
     """
     Adds word boundary characters to the start and end of an
     expression to require that the match occur as a whole word,
diff --git a/synapse/push/pusherpool.py b/synapse/push/pusherpool.py
index 01789a9fb4..88d203aa44 100644
--- a/synapse/push/pusherpool.py
+++ b/synapse/push/pusherpool.py
@@ -15,11 +15,17 @@
 # limitations under the License.
 
 import logging
+from collections import defaultdict
+from threading import Lock
+from typing import Dict, Tuple, Union
 
 from twisted.internet import defer
 
+from synapse.metrics import LaterGauge
 from synapse.metrics.background_process_metrics import run_as_background_process
 from synapse.push import PusherConfigException
+from synapse.push.emailpusher import EmailPusher
+from synapse.push.httppusher import HttpPusher
 from synapse.push.pusher import PusherFactory
 from synapse.util.async_helpers import concurrently_execute
 
@@ -47,7 +53,29 @@ class PusherPool:
         self._should_start_pushers = _hs.config.start_pushers
         self.store = self.hs.get_datastore()
         self.clock = self.hs.get_clock()
-        self.pushers = {}
+
+        # map from user id to app_id:pushkey to pusher
+        self.pushers = {}  # type: Dict[str, Dict[str, Union[HttpPusher, EmailPusher]]]
+
+        # a lock for the pushers dict, since `count_pushers` is called from an different
+        # and we otherwise get concurrent modification errors
+        self._pushers_lock = Lock()
+
+        def count_pushers():
+            results = defaultdict(int)  # type: Dict[Tuple[str, str], int]
+            with self._pushers_lock:
+                for pushers in self.pushers.values():
+                    for pusher in pushers.values():
+                        k = (type(pusher).__name__, pusher.app_id)
+                        results[k] += 1
+            return results
+
+        LaterGauge(
+            name="synapse_pushers",
+            desc="the number of active pushers",
+            labels=["kind", "app_id"],
+            caller=count_pushers,
+        )
 
     def start(self):
         """Starts the pushers off in a background process.
@@ -271,11 +299,12 @@ class PusherPool:
             return
 
         appid_pushkey = "%s:%s" % (pusherdict["app_id"], pusherdict["pushkey"])
-        byuser = self.pushers.setdefault(pusherdict["user_name"], {})
 
-        if appid_pushkey in byuser:
-            byuser[appid_pushkey].on_stop()
-        byuser[appid_pushkey] = p
+        with self._pushers_lock:
+            byuser = self.pushers.setdefault(pusherdict["user_name"], {})
+            if appid_pushkey in byuser:
+                byuser[appid_pushkey].on_stop()
+            byuser[appid_pushkey] = p
 
         # Check if there *may* be push to process. We do this as this check is a
         # lot cheaper to do than actually fetching the exact rows we need to
@@ -304,7 +333,9 @@ class PusherPool:
         if appid_pushkey in byuser:
             logger.info("Stopping pusher %s / %s", user_id, appid_pushkey)
             byuser[appid_pushkey].on_stop()
-            del byuser[appid_pushkey]
+            with self._pushers_lock:
+                del byuser[appid_pushkey]
+
         yield self.store.delete_pusher_by_app_id_pushkey_user_id(
             app_id, pushkey, user_id
         )
diff --git a/synapse/replication/http/federation.py b/synapse/replication/http/federation.py
index 8794720101..7e23b565b9 100644
--- a/synapse/replication/http/federation.py
+++ b/synapse/replication/http/federation.py
@@ -18,7 +18,7 @@ import logging
 from twisted.internet import defer
 
 from synapse.api.room_versions import KNOWN_ROOM_VERSIONS
-from synapse.events import event_type_from_format_version
+from synapse.events import make_event_from_dict
 from synapse.events.snapshot import EventContext
 from synapse.http.servlet import parse_json_object_from_request
 from synapse.replication.http._base import ReplicationEndpoint
@@ -38,6 +38,9 @@ class ReplicationFederationSendEventsRestServlet(ReplicationEndpoint):
         {
             "events": [{
                 "event": { .. serialized event .. },
+                "room_version": .., // "1", "2", "3", etc: the version of the room
+                                    // containing the event
+                "event_format_version": .., // 1,2,3 etc: the event format version
                 "internal_metadata": { .. serialized internal_metadata .. },
                 "rejected_reason": ..,   // The event.rejected_reason field
                 "context": { .. serialized event context .. },
@@ -73,6 +76,7 @@ class ReplicationFederationSendEventsRestServlet(ReplicationEndpoint):
             event_payloads.append(
                 {
                     "event": event.get_pdu_json(),
+                    "room_version": event.room_version.identifier,
                     "event_format_version": event.format_version,
                     "internal_metadata": event.internal_metadata.get_dict(),
                     "rejected_reason": event.rejected_reason,
@@ -95,12 +99,13 @@ class ReplicationFederationSendEventsRestServlet(ReplicationEndpoint):
             event_and_contexts = []
             for event_payload in event_payloads:
                 event_dict = event_payload["event"]
-                format_ver = event_payload["event_format_version"]
+                room_ver = KNOWN_ROOM_VERSIONS[event_payload["room_version"]]
                 internal_metadata = event_payload["internal_metadata"]
                 rejected_reason = event_payload["rejected_reason"]
 
-                EventType = event_type_from_format_version(format_ver)
-                event = EventType(event_dict, internal_metadata, rejected_reason)
+                event = make_event_from_dict(
+                    event_dict, room_ver, internal_metadata, rejected_reason
+                )
 
                 context = EventContext.deserialize(
                     self.storage, event_payload["context"]
diff --git a/synapse/replication/http/send_event.py b/synapse/replication/http/send_event.py
index 84b92f16ad..b74b088ff4 100644
--- a/synapse/replication/http/send_event.py
+++ b/synapse/replication/http/send_event.py
@@ -17,7 +17,8 @@ import logging
 
 from twisted.internet import defer
 
-from synapse.events import event_type_from_format_version
+from synapse.api.room_versions import KNOWN_ROOM_VERSIONS
+from synapse.events import make_event_from_dict
 from synapse.events.snapshot import EventContext
 from synapse.http.servlet import parse_json_object_from_request
 from synapse.replication.http._base import ReplicationEndpoint
@@ -37,6 +38,9 @@ class ReplicationSendEventRestServlet(ReplicationEndpoint):
 
         {
             "event": { .. serialized event .. },
+            "room_version": .., // "1", "2", "3", etc: the version of the room
+                                // containing the event
+            "event_format_version": .., // 1,2,3 etc: the event format version
             "internal_metadata": { .. serialized internal_metadata .. },
             "rejected_reason": ..,   // The event.rejected_reason field
             "context": { .. serialized event context .. },
@@ -77,6 +81,7 @@ class ReplicationSendEventRestServlet(ReplicationEndpoint):
 
         payload = {
             "event": event.get_pdu_json(),
+            "room_version": event.room_version.identifier,
             "event_format_version": event.format_version,
             "internal_metadata": event.internal_metadata.get_dict(),
             "rejected_reason": event.rejected_reason,
@@ -93,12 +98,13 @@ class ReplicationSendEventRestServlet(ReplicationEndpoint):
             content = parse_json_object_from_request(request)
 
             event_dict = content["event"]
-            format_ver = content["event_format_version"]
+            room_ver = KNOWN_ROOM_VERSIONS[content["room_version"]]
             internal_metadata = content["internal_metadata"]
             rejected_reason = content["rejected_reason"]
 
-            EventType = event_type_from_format_version(format_ver)
-            event = EventType(event_dict, internal_metadata, rejected_reason)
+            event = make_event_from_dict(
+                event_dict, room_ver, internal_metadata, rejected_reason
+            )
 
             requester = Requester.deserialize(self.store, content["requester"])
             context = EventContext.deserialize(self.storage, content["context"])
diff --git a/synapse/res/templates/saml_error.html b/synapse/res/templates/saml_error.html
new file mode 100644
index 0000000000..bfd6449c5d
--- /dev/null
+++ b/synapse/res/templates/saml_error.html
@@ -0,0 +1,45 @@
+<!DOCTYPE html>
+<html lang="en">
+<head>
+    <meta charset="UTF-8">
+    <title>SSO error</title>
+</head>
+<body>
+    <p>Oops! Something went wrong during authentication<span id="errormsg"></span>.</p>
+    <p>
+        If you are seeing this page after clicking a link sent to you via email, make
+        sure you only click the confirmation link once, and that you open the
+        validation link in the same client you're logging in from.
+    </p>
+    <p>
+        Try logging in again from your Matrix client and if the problem persists
+        please contact the server's administrator.
+    </p>
+
+    <script type="text/javascript">
+        // Error handling to support Auth0 errors that we might get through a GET request
+        // to the validation endpoint. If an error is provided, it's either going to be
+        // located in the query string or in a query string-like URI fragment.
+        // We try to locate the error from any of these two locations, but if we can't
+        // we just don't print anything specific.
+        let searchStr = "";
+        if (window.location.search) {
+            // window.location.searchParams isn't always defined when
+            // window.location.search is, so it's more reliable to parse the latter.
+            searchStr = window.location.search;
+        } else if (window.location.hash) {
+            // Replace the # with a ? so that URLSearchParams does the right thing and
+            // doesn't parse the first parameter incorrectly.
+            searchStr = window.location.hash.replace("#", "?");
+        }
+
+        // We might end up with no error in the URL, so we need to check if we have one
+        // to print one.
+        let errorDesc = new URLSearchParams(searchStr).get("error_description")
+        if (errorDesc) {
+
+            document.getElementById("errormsg").innerText = ` ("${errorDesc}")`;
+        }
+    </script>
+</body>
+</html>
\ No newline at end of file
diff --git a/synapse/res/templates/sso_redirect_confirm.html b/synapse/res/templates/sso_redirect_confirm.html
new file mode 100644
index 0000000000..20a15e1e74
--- /dev/null
+++ b/synapse/res/templates/sso_redirect_confirm.html
@@ -0,0 +1,14 @@
+<!DOCTYPE html>
+<html lang="en">
+<head>
+    <meta charset="UTF-8">
+    <title>SSO redirect confirmation</title>
+</head>
+    <body>
+        <p>The application at <span style="font-weight:bold">{{ display_url | e }}</span> is requesting full access to your <span style="font-weight:bold">{{ server_name }}</span> Matrix account.</p>
+        <p>If you don't recognise this address, you should ignore this and close this tab.</p>
+        <p>
+            <a href="{{ redirect_url | e }}">I trust this address</a>
+        </p>
+    </body>
+</html>
\ No newline at end of file
diff --git a/synapse/rest/admin/users.py b/synapse/rest/admin/users.py
index c5b461a236..8551ac19b8 100644
--- a/synapse/rest/admin/users.py
+++ b/synapse/rest/admin/users.py
@@ -211,9 +211,7 @@ class UserRestServletV2(RestServlet):
                     if target_user == auth_user and not set_admin_to:
                         raise SynapseError(400, "You may not demote yourself.")
 
-                    await self.admin_handler.set_user_server_admin(
-                        target_user, set_admin_to
-                    )
+                    await self.store.set_server_admin(target_user, set_admin_to)
 
             if "password" in body:
                 if (
@@ -223,8 +221,9 @@ class UserRestServletV2(RestServlet):
                     raise SynapseError(400, "Invalid password")
                 else:
                     new_password = body["password"]
+                    logout_devices = True
                     await self.set_password_handler.set_password(
-                        target_user.to_string(), new_password, requester
+                        target_user.to_string(), new_password, logout_devices, requester
                     )
 
             if "deactivated" in body:
@@ -538,9 +537,10 @@ class ResetPasswordRestServlet(RestServlet):
         params = parse_json_object_from_request(request)
         assert_params_in_dict(params, ["new_password"])
         new_password = params["new_password"]
+        logout_devices = params.get("logout_devices", True)
 
         await self._set_password_handler.set_password(
-            target_user_id, new_password, requester
+            target_user_id, new_password, logout_devices, requester
         )
         return 200, {}
 
@@ -651,6 +651,6 @@ class UserAdminServlet(RestServlet):
         if target_user == auth_user and not set_admin_to:
             raise SynapseError(400, "You may not demote yourself.")
 
-        await self.store.set_user_server_admin(target_user, set_admin_to)
+        await self.store.set_server_admin(target_user, set_admin_to)
 
         return 200, {}
diff --git a/synapse/rest/client/v1/login.py b/synapse/rest/client/v1/login.py
index 2c99536678..d0d4999795 100644
--- a/synapse/rest/client/v1/login.py
+++ b/synapse/rest/client/v1/login.py
@@ -28,7 +28,7 @@ from synapse.http.servlet import (
     parse_json_object_from_request,
     parse_string,
 )
-from synapse.http.site import SynapseRequest
+from synapse.push.mailer import load_jinja2_templates
 from synapse.rest.client.v2_alpha._base import client_patterns
 from synapse.rest.well_known import WellKnownBuilder
 from synapse.types import UserID, map_username_to_mxid_localpart
@@ -548,6 +548,16 @@ class SSOAuthHandler(object):
         self._registration_handler = hs.get_registration_handler()
         self._macaroon_gen = hs.get_macaroon_generator()
 
+        # Load the redirect page HTML template
+        self._template = load_jinja2_templates(
+            hs.config.sso_redirect_confirm_template_dir, ["sso_redirect_confirm.html"],
+        )[0]
+
+        self._server_name = hs.config.server_name
+
+        # cast to tuple for use with str.startswith
+        self._whitelisted_sso_clients = tuple(hs.config.sso_client_whitelist)
+
     async def on_successful_auth(
         self, username, request, client_redirect_url, user_display_name=None
     ):
@@ -580,36 +590,9 @@ class SSOAuthHandler(object):
                 localpart=localpart, default_display_name=user_display_name
             )
 
-        self.complete_sso_login(registered_user_id, request, client_redirect_url)
-
-    def complete_sso_login(
-        self, registered_user_id: str, request: SynapseRequest, client_redirect_url: str
-    ):
-        """Having figured out a mxid for this user, complete the HTTP request
-
-        Args:
-            registered_user_id:
-            request:
-            client_redirect_url:
-        """
-
-        login_token = self._macaroon_gen.generate_short_term_login_token(
-            registered_user_id
+        self._auth_handler.complete_sso_login(
+            registered_user_id, request, client_redirect_url
         )
-        redirect_url = self._add_login_token_to_redirect_url(
-            client_redirect_url, login_token
-        )
-        # Load page
-        request.redirect(redirect_url)
-        finish_request(request)
-
-    @staticmethod
-    def _add_login_token_to_redirect_url(url, token):
-        url_parts = list(urllib.parse.urlparse(url))
-        query = dict(urllib.parse.parse_qsl(url_parts[4]))
-        query.update({"loginToken": token})
-        url_parts[4] = urllib.parse.urlencode(query)
-        return urllib.parse.urlunparse(url_parts)
 
 
 def register_servlets(hs, http_server):
diff --git a/synapse/rest/client/v1/room.py b/synapse/rest/client/v1/room.py
index 64f51406fb..bffd43de5f 100644
--- a/synapse/rest/client/v1/room.py
+++ b/synapse/rest/client/v1/room.py
@@ -189,12 +189,6 @@ class RoomStateEventRestServlet(TransactionRestServlet):
 
         content = parse_json_object_from_request(request)
 
-        if event_type == EventTypes.Aliases:
-            # MSC2260
-            raise SynapseError(
-                400, "Cannot send m.room.aliases events via /rooms/{room_id}/state"
-            )
-
         event_dict = {
             "type": event_type,
             "content": content,
@@ -242,12 +236,6 @@ class RoomSendEventRestServlet(TransactionRestServlet):
         requester = await self.auth.get_user_by_req(request, allow_guest=True)
         content = parse_json_object_from_request(request)
 
-        if event_type == EventTypes.Aliases:
-            # MSC2260
-            raise SynapseError(
-                400, "Cannot send m.room.aliases events via /rooms/{room_id}/send"
-            )
-
         event_dict = {
             "type": event_type,
             "content": content,
diff --git a/synapse/rest/client/v2_alpha/account.py b/synapse/rest/client/v2_alpha/account.py
index dc837d6c75..631cc74cb4 100644
--- a/synapse/rest/client/v2_alpha/account.py
+++ b/synapse/rest/client/v2_alpha/account.py
@@ -265,8 +265,11 @@ class PasswordRestServlet(RestServlet):
 
         assert_params_in_dict(params, ["new_password"])
         new_password = params["new_password"]
+        logout_devices = params.get("logout_devices", True)
 
-        await self._set_password_handler.set_password(user_id, new_password, requester)
+        await self._set_password_handler.set_password(
+            user_id, new_password, logout_devices, requester
+        )
 
         return 200, {}
 
diff --git a/synapse/rest/client/v2_alpha/account_data.py b/synapse/rest/client/v2_alpha/account_data.py
index 64eb7fec3b..c1d4cd0caf 100644
--- a/synapse/rest/client/v2_alpha/account_data.py
+++ b/synapse/rest/client/v2_alpha/account_data.py
@@ -38,8 +38,12 @@ class AccountDataServlet(RestServlet):
         self.auth = hs.get_auth()
         self.store = hs.get_datastore()
         self.notifier = hs.get_notifier()
+        self._is_worker = hs.config.worker_app is not None
 
     async def on_PUT(self, request, user_id, account_data_type):
+        if self._is_worker:
+            raise Exception("Cannot handle PUT /account_data on worker")
+
         requester = await self.auth.get_user_by_req(request)
         if user_id != requester.user.to_string():
             raise AuthError(403, "Cannot add account data for other users.")
@@ -86,8 +90,12 @@ class RoomAccountDataServlet(RestServlet):
         self.auth = hs.get_auth()
         self.store = hs.get_datastore()
         self.notifier = hs.get_notifier()
+        self._is_worker = hs.config.worker_app is not None
 
     async def on_PUT(self, request, user_id, room_id, account_data_type):
+        if self._is_worker:
+            raise Exception("Cannot handle PUT /account_data on worker")
+
         requester = await self.auth.get_user_by_req(request)
         if user_id != requester.user.to_string():
             raise AuthError(403, "Cannot add account data for other users.")
diff --git a/synapse/rest/key/v2/remote_key_resource.py b/synapse/rest/key/v2/remote_key_resource.py
index 4b6d030a57..ab671f7334 100644
--- a/synapse/rest/key/v2/remote_key_resource.py
+++ b/synapse/rest/key/v2/remote_key_resource.py
@@ -18,8 +18,6 @@ from typing import Dict, Set
 from canonicaljson import encode_canonical_json, json
 from signedjson.sign import sign_json
 
-from twisted.internet import defer
-
 from synapse.api.errors import Codes, SynapseError
 from synapse.crypto.keyring import ServerKeyFetcher
 from synapse.http.server import (
@@ -125,8 +123,7 @@ class RemoteKey(DirectServeResource):
 
         await self.query_keys(request, query, query_remote_on_cache_miss=True)
 
-    @defer.inlineCallbacks
-    def query_keys(self, request, query, query_remote_on_cache_miss=False):
+    async def query_keys(self, request, query, query_remote_on_cache_miss=False):
         logger.info("Handling query for keys %r", query)
 
         store_queries = []
@@ -143,7 +140,7 @@ class RemoteKey(DirectServeResource):
             for key_id in key_ids:
                 store_queries.append((server_name, key_id, None))
 
-        cached = yield self.store.get_server_keys_json(store_queries)
+        cached = await self.store.get_server_keys_json(store_queries)
 
         json_results = set()
 
@@ -215,8 +212,8 @@ class RemoteKey(DirectServeResource):
                     json_results.add(bytes(result["key_json"]))
 
         if cache_misses and query_remote_on_cache_miss:
-            yield self.fetcher.get_keys(cache_misses)
-            yield self.query_keys(request, query, query_remote_on_cache_miss=False)
+            await self.fetcher.get_keys(cache_misses)
+            await self.query_keys(request, query, query_remote_on_cache_miss=False)
         else:
             signed_keys = []
             for key_json in json_results:
diff --git a/synapse/rest/media/v1/_base.py b/synapse/rest/media/v1/_base.py
index ba28dd089d..503f2bed98 100644
--- a/synapse/rest/media/v1/_base.py
+++ b/synapse/rest/media/v1/_base.py
@@ -30,6 +30,22 @@ from synapse.util.stringutils import is_ascii
 
 logger = logging.getLogger(__name__)
 
+# list all text content types that will have the charset default to UTF-8 when
+# none is given
+TEXT_CONTENT_TYPES = [
+    "text/css",
+    "text/csv",
+    "text/html",
+    "text/calendar",
+    "text/plain",
+    "text/javascript",
+    "application/json",
+    "application/ld+json",
+    "application/rtf",
+    "image/svg+xml",
+    "text/xml",
+]
+
 
 def parse_media_id(request):
     try:
@@ -96,7 +112,14 @@ def add_file_headers(request, media_type, file_size, upload_name):
     def _quote(x):
         return urllib.parse.quote(x.encode("utf-8"))
 
-    request.setHeader(b"Content-Type", media_type.encode("UTF-8"))
+    # Default to a UTF-8 charset for text content types.
+    # ex, uses UTF-8 for 'text/css' but not 'text/css; charset=UTF-16'
+    if media_type.lower() in TEXT_CONTENT_TYPES:
+        content_type = media_type + "; charset=UTF-8"
+    else:
+        content_type = media_type
+
+    request.setHeader(b"Content-Type", content_type.encode("UTF-8"))
     if upload_name:
         # RFC6266 section 4.1 [1] defines both `filename` and `filename*`.
         #
diff --git a/synapse/rest/saml2/response_resource.py b/synapse/rest/saml2/response_resource.py
index 69ecc5e4b4..a545c13db7 100644
--- a/synapse/rest/saml2/response_resource.py
+++ b/synapse/rest/saml2/response_resource.py
@@ -14,7 +14,11 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-from synapse.http.server import DirectServeResource, wrap_html_request_handler
+from synapse.http.server import (
+    DirectServeResource,
+    finish_request,
+    wrap_html_request_handler,
+)
 
 
 class SAML2ResponseResource(DirectServeResource):
@@ -24,8 +28,20 @@ class SAML2ResponseResource(DirectServeResource):
 
     def __init__(self, hs):
         super().__init__()
+        self._error_html_content = hs.config.saml2_error_html_content
         self._saml_handler = hs.get_saml_handler()
 
+    async def _async_render_GET(self, request):
+        # We're not expecting any GET request on that resource if everything goes right,
+        # but some IdPs sometimes end up responding with a 302 redirect on this endpoint.
+        # In this case, just tell the user that something went wrong and they should
+        # try to authenticate again.
+        request.setResponseCode(400)
+        request.setHeader(b"Content-Type", b"text/html; charset=utf-8")
+        request.setHeader(b"Content-Length", b"%d" % (len(self._error_html_content),))
+        request.write(self._error_html_content.encode("utf8"))
+        finish_request(request)
+
     @wrap_html_request_handler
     async def _async_render_POST(self, request):
         return await self._saml_handler.handle_saml_response(request)
diff --git a/synapse/server.py b/synapse/server.py
index fd2f69e928..1b980371de 100644
--- a/synapse/server.py
+++ b/synapse/server.py
@@ -26,7 +26,6 @@ import logging
 import os
 
 from twisted.mail.smtp import sendmail
-from twisted.web.client import BrowserLikePolicyForHTTPS
 
 from synapse.api.auth import Auth
 from synapse.api.filtering import Filtering
@@ -35,6 +34,7 @@ from synapse.appservice.api import ApplicationServiceApi
 from synapse.appservice.scheduler import ApplicationServiceScheduler
 from synapse.config.homeserver import HomeServerConfig
 from synapse.crypto import context_factory
+from synapse.crypto.context_factory import RegularPolicyForHTTPS
 from synapse.crypto.keyring import Keyring
 from synapse.events.builder import EventBuilderFactory
 from synapse.events.spamcheck import SpamChecker
@@ -310,7 +310,7 @@ class HomeServer(object):
         return (
             InsecureInterceptableContextFactory()
             if self.config.use_insecure_ssl_client_just_for_testing_do_not_use
-            else BrowserLikePolicyForHTTPS()
+            else RegularPolicyForHTTPS()
         )
 
     def build_simple_http_client(self):
@@ -420,7 +420,7 @@ class HomeServer(object):
         return PusherPool(self)
 
     def build_http_client(self):
-        tls_client_options_factory = context_factory.ClientTLSOptionsFactory(
+        tls_client_options_factory = context_factory.FederationPolicyForHTTPS(
             self.config
         )
         return MatrixFederationHttpClient(self, tls_client_options_factory)
diff --git a/synapse/state/__init__.py b/synapse/state/__init__.py
index 617e5ec196..2fa529fcd0 100644
--- a/synapse/state/__init__.py
+++ b/synapse/state/__init__.py
@@ -660,28 +660,16 @@ class StateResolutionStore(object):
             allow_rejected=allow_rejected,
         )
 
-    def get_auth_chain(self, event_ids: List[str], ignore_events: Set[str]):
-        """Gets the full auth chain for a set of events (including rejected
-        events).
-
-        Includes the given event IDs in the result.
-
-        Note that:
-            1. All events must be state events.
-            2. For v1 rooms this may not have the full auth chain in the
-               presence of rejected events
-
-        Args:
-            event_ids: The event IDs of the events to fetch the auth chain for.
-                Must be state events.
-            ignore_events: Set of events to exclude from the returned auth
-                chain.
+    def get_auth_chain_difference(self, state_sets: List[Set[str]]):
+        """Given sets of state events figure out the auth chain difference (as
+        per state res v2 algorithm).
 
+        This equivalent to fetching the full auth chain for each set of state
+        and returning the events that don't appear in each and every auth
+        chain.
 
         Returns:
-            Deferred[list[str]]: List of event IDs of the auth chain.
+            Deferred[Set[str]]: Set of event IDs.
         """
 
-        return self.store.get_auth_chain_ids(
-            event_ids, include_given=True, ignore_events=ignore_events,
-        )
+        return self.store.get_auth_chain_difference(state_sets)
diff --git a/synapse/state/v2.py b/synapse/state/v2.py
index 0ffe6d8c14..18484e2fa6 100644
--- a/synapse/state/v2.py
+++ b/synapse/state/v2.py
@@ -227,36 +227,12 @@ def _get_auth_chain_difference(state_sets, event_map, state_res_store):
     Returns:
         Deferred[set[str]]: Set of event IDs
     """
-    common = set(itervalues(state_sets[0])).intersection(
-        *(itervalues(s) for s in state_sets[1:])
-    )
-
-    auth_sets = []
-    for state_set in state_sets:
-        auth_ids = {
-            eid
-            for key, eid in iteritems(state_set)
-            if (
-                key[0] in (EventTypes.Member, EventTypes.ThirdPartyInvite)
-                or key
-                in (
-                    (EventTypes.PowerLevels, ""),
-                    (EventTypes.Create, ""),
-                    (EventTypes.JoinRules, ""),
-                )
-            )
-            and eid not in common
-        }
 
-        auth_chain = yield state_res_store.get_auth_chain(auth_ids, common)
-        auth_ids.update(auth_chain)
-
-        auth_sets.append(auth_ids)
-
-    intersection = set(auth_sets[0]).intersection(*auth_sets[1:])
-    union = set().union(*auth_sets)
+    difference = yield state_res_store.get_auth_chain_difference(
+        [set(state_set.values()) for state_set in state_sets]
+    )
 
-    return union - intersection
+    return difference
 
 
 def _seperate(state_sets):
diff --git a/synapse/storage/data_stores/main/devices.py b/synapse/storage/data_stores/main/devices.py
index d55733a4cd..8af5f7de54 100644
--- a/synapse/storage/data_stores/main/devices.py
+++ b/synapse/storage/data_stores/main/devices.py
@@ -317,14 +317,16 @@ class DeviceWorkerStore(SQLBaseStore):
                     key_json = device.get("key_json", None)
                     if key_json:
                         result["keys"] = db_to_json(key_json)
+
+                        if "signatures" in device:
+                            for sig_user_id, sigs in device["signatures"].items():
+                                result["keys"].setdefault("signatures", {}).setdefault(
+                                    sig_user_id, {}
+                                ).update(sigs)
+
                     device_display_name = device.get("device_display_name", None)
                     if device_display_name:
                         result["device_display_name"] = device_display_name
-                    if "signatures" in device:
-                        for sig_user_id, sigs in device["signatures"].items():
-                            result["keys"].setdefault("signatures", {}).setdefault(
-                                sig_user_id, {}
-                            ).update(sigs)
                 else:
                     result["deleted"] = True
 
@@ -525,14 +527,16 @@ class DeviceWorkerStore(SQLBaseStore):
                 key_json = device.get("key_json", None)
                 if key_json:
                     result["keys"] = db_to_json(key_json)
+
+                    if "signatures" in device:
+                        for sig_user_id, sigs in device["signatures"].items():
+                            result["keys"].setdefault("signatures", {}).setdefault(
+                                sig_user_id, {}
+                            ).update(sigs)
+
                 device_display_name = device.get("device_display_name", None)
                 if device_display_name:
                     result["device_display_name"] = device_display_name
-                if "signatures" in device:
-                    for sig_user_id, sigs in device["signatures"].items():
-                        result["keys"].setdefault("signatures", {}).setdefault(
-                            sig_user_id, {}
-                        ).update(sigs)
 
                 results.append(result)
 
diff --git a/synapse/storage/data_stores/main/event_federation.py b/synapse/storage/data_stores/main/event_federation.py
index 49a7b8b433..62d4e9f599 100644
--- a/synapse/storage/data_stores/main/event_federation.py
+++ b/synapse/storage/data_stores/main/event_federation.py
@@ -14,7 +14,7 @@
 # limitations under the License.
 import itertools
 import logging
-from typing import List, Optional, Set
+from typing import Dict, List, Optional, Set, Tuple
 
 from six.moves.queue import Empty, PriorityQueue
 
@@ -103,6 +103,154 @@ class EventFederationWorkerStore(EventsWorkerStore, SignatureWorkerStore, SQLBas
 
         return list(results)
 
+    def get_auth_chain_difference(self, state_sets: List[Set[str]]):
+        """Given sets of state events figure out the auth chain difference (as
+        per state res v2 algorithm).
+
+        This equivalent to fetching the full auth chain for each set of state
+        and returning the events that don't appear in each and every auth
+        chain.
+
+        Returns:
+            Deferred[Set[str]]
+        """
+
+        return self.db.runInteraction(
+            "get_auth_chain_difference",
+            self._get_auth_chain_difference_txn,
+            state_sets,
+        )
+
+    def _get_auth_chain_difference_txn(
+        self, txn, state_sets: List[Set[str]]
+    ) -> Set[str]:
+
+        # Algorithm Description
+        # ~~~~~~~~~~~~~~~~~~~~~
+        #
+        # The idea here is to basically walk the auth graph of each state set in
+        # tandem, keeping track of which auth events are reachable by each state
+        # set. If we reach an auth event we've already visited (via a different
+        # state set) then we mark that auth event and all ancestors as reachable
+        # by the state set. This requires that we keep track of the auth chains
+        # in memory.
+        #
+        # Doing it in a such a way means that we can stop early if all auth
+        # events we're currently walking are reachable by all state sets.
+        #
+        # *Note*: We can't stop walking an event's auth chain if it is reachable
+        # by all state sets. This is because other auth chains we're walking
+        # might be reachable only via the original auth chain. For example,
+        # given the following auth chain:
+        #
+        #       A -> C -> D -> E
+        #           /         /
+        #       B -ยด---------ยด
+        #
+        # and state sets {A} and {B} then walking the auth chains of A and B
+        # would immediately show that C is reachable by both. However, if we
+        # stopped at C then we'd only reach E via the auth chain of B and so E
+        # would errornously get included in the returned difference.
+        #
+        # The other thing that we do is limit the number of auth chains we walk
+        # at once, due to practical limits (i.e. we can only query the database
+        # with a limited set of parameters). We pick the auth chains we walk
+        # each iteration based on their depth, in the hope that events with a
+        # lower depth are likely reachable by those with higher depths.
+        #
+        # We could use any ordering that we believe would give a rough
+        # topological ordering, e.g. origin server timestamp. If the ordering
+        # chosen is not topological then the algorithm still produces the right
+        # result, but perhaps a bit more inefficiently. This is why it is safe
+        # to use "depth" here.
+
+        initial_events = set(state_sets[0]).union(*state_sets[1:])
+
+        # Dict from events in auth chains to which sets *cannot* reach them.
+        # I.e. if the set is empty then all sets can reach the event.
+        event_to_missing_sets = {
+            event_id: {i for i, a in enumerate(state_sets) if event_id not in a}
+            for event_id in initial_events
+        }
+
+        # We need to get the depth of the initial events for sorting purposes.
+        sql = """
+            SELECT depth, event_id FROM events
+            WHERE %s
+            ORDER BY depth ASC
+        """
+        clause, args = make_in_list_sql_clause(
+            txn.database_engine, "event_id", initial_events
+        )
+        txn.execute(sql % (clause,), args)
+
+        # The sorted list of events whose auth chains we should walk.
+        search = txn.fetchall()  # type: List[Tuple[int, str]]
+
+        # Map from event to its auth events
+        event_to_auth_events = {}  # type: Dict[str, Set[str]]
+
+        base_sql = """
+            SELECT a.event_id, auth_id, depth
+            FROM event_auth AS a
+            INNER JOIN events AS e ON (e.event_id = a.auth_id)
+            WHERE
+        """
+
+        while search:
+            # Check whether all our current walks are reachable by all state
+            # sets. If so we can bail.
+            if all(not event_to_missing_sets[eid] for _, eid in search):
+                break
+
+            # Fetch the auth events and their depths of the N last events we're
+            # currently walking
+            search, chunk = search[:-100], search[-100:]
+            clause, args = make_in_list_sql_clause(
+                txn.database_engine, "a.event_id", [e_id for _, e_id in chunk]
+            )
+            txn.execute(base_sql + clause, args)
+
+            for event_id, auth_event_id, auth_event_depth in txn:
+                event_to_auth_events.setdefault(event_id, set()).add(auth_event_id)
+
+                sets = event_to_missing_sets.get(auth_event_id)
+                if sets is None:
+                    # First time we're seeing this event, so we add it to the
+                    # queue of things to fetch.
+                    search.append((auth_event_depth, auth_event_id))
+
+                    # Assume that this event is unreachable from any of the
+                    # state sets until proven otherwise
+                    sets = event_to_missing_sets[auth_event_id] = set(
+                        range(len(state_sets))
+                    )
+                else:
+                    # We've previously seen this event, so look up its auth
+                    # events and recursively mark all ancestors as reachable
+                    # by the current event's state set.
+                    a_ids = event_to_auth_events.get(auth_event_id)
+                    while a_ids:
+                        new_aids = set()
+                        for a_id in a_ids:
+                            event_to_missing_sets[a_id].intersection_update(
+                                event_to_missing_sets[event_id]
+                            )
+
+                            b = event_to_auth_events.get(a_id)
+                            if b:
+                                new_aids.update(b)
+
+                        a_ids = new_aids
+
+                # Mark that the auth event is reachable by the approriate sets.
+                sets.intersection_update(event_to_missing_sets[event_id])
+
+            search.sort()
+
+        # Return all events where not all sets can reach them.
+        return {eid for eid, n in event_to_missing_sets.items() if n}
+
     def get_oldest_events_in_room(self, room_id):
         return self.db.runInteraction(
             "get_oldest_events_in_room", self._get_oldest_events_in_room_txn, room_id
diff --git a/synapse/storage/data_stores/main/event_push_actions.py b/synapse/storage/data_stores/main/event_push_actions.py
index 9988a6d3fc..8eed590929 100644
--- a/synapse/storage/data_stores/main/event_push_actions.py
+++ b/synapse/storage/data_stores/main/event_push_actions.py
@@ -608,6 +608,23 @@ class EventPushActionsWorkerStore(SQLBaseStore):
 
         return range_end
 
+    @defer.inlineCallbacks
+    def get_time_of_last_push_action_before(self, stream_ordering):
+        def f(txn):
+            sql = (
+                "SELECT e.received_ts"
+                " FROM event_push_actions AS ep"
+                " JOIN events e ON ep.room_id = e.room_id AND ep.event_id = e.event_id"
+                " WHERE ep.stream_ordering > ?"
+                " ORDER BY ep.stream_ordering ASC"
+                " LIMIT 1"
+            )
+            txn.execute(sql, (stream_ordering,))
+            return txn.fetchone()
+
+        result = yield self.db.runInteraction("get_time_of_last_push_action_before", f)
+        return result[0] if result else None
+
 
 class EventPushActionsStore(EventPushActionsWorkerStore):
     EPA_HIGHLIGHT_INDEX = "epa_highlight_index"
@@ -736,23 +753,6 @@ class EventPushActionsStore(EventPushActionsWorkerStore):
         return push_actions
 
     @defer.inlineCallbacks
-    def get_time_of_last_push_action_before(self, stream_ordering):
-        def f(txn):
-            sql = (
-                "SELECT e.received_ts"
-                " FROM event_push_actions AS ep"
-                " JOIN events e ON ep.room_id = e.room_id AND ep.event_id = e.event_id"
-                " WHERE ep.stream_ordering > ?"
-                " ORDER BY ep.stream_ordering ASC"
-                " LIMIT 1"
-            )
-            txn.execute(sql, (stream_ordering,))
-            return txn.fetchone()
-
-        result = yield self.db.runInteraction("get_time_of_last_push_action_before", f)
-        return result[0] if result else None
-
-    @defer.inlineCallbacks
     def get_latest_push_action_stream_ordering(self):
         def f(txn):
             txn.execute("SELECT MAX(stream_ordering) FROM event_push_actions")
diff --git a/synapse/storage/data_stores/main/events.py b/synapse/storage/data_stores/main/events.py
index 8ae23df00a..d593ef47b8 100644
--- a/synapse/storage/data_stores/main/events.py
+++ b/synapse/storage/data_stores/main/events.py
@@ -1168,7 +1168,11 @@ class EventsStore(
                 and original_event.internal_metadata.is_redacted()
             ):
                 # Redaction was allowed
-                pruned_json = encode_json(prune_event_dict(original_event.get_dict()))
+                pruned_json = encode_json(
+                    prune_event_dict(
+                        original_event.room_version, original_event.get_dict()
+                    )
+                )
             else:
                 # Redaction wasn't allowed
                 pruned_json = None
@@ -1929,7 +1933,9 @@ class EventsStore(
                 return
 
             # Prune the event's dict then convert it to JSON.
-            pruned_json = encode_json(prune_event_dict(event.get_dict()))
+            pruned_json = encode_json(
+                prune_event_dict(event.room_version, event.get_dict())
+            )
 
             # Update the event_json table to replace the event's JSON with the pruned
             # JSON.
diff --git a/synapse/storage/data_stores/main/events_worker.py b/synapse/storage/data_stores/main/events_worker.py
index 3704995bad..8d2bfeb4a3 100644
--- a/synapse/storage/data_stores/main/events_worker.py
+++ b/synapse/storage/data_stores/main/events_worker.py
@@ -28,9 +28,12 @@ from twisted.internet import defer
 
 from synapse.api.constants import EventTypes
 from synapse.api.errors import NotFoundError
-from synapse.api.room_versions import EventFormatVersions
-from synapse.events import FrozenEvent, event_type_from_format_version  # noqa: F401
-from synapse.events.snapshot import EventContext  # noqa: F401
+from synapse.api.room_versions import (
+    KNOWN_ROOM_VERSIONS,
+    EventFormatVersions,
+    RoomVersions,
+)
+from synapse.events import make_event_from_dict
 from synapse.events.utils import prune_event
 from synapse.logging.context import LoggingContext, PreserveLoggingContext
 from synapse.metrics.background_process_metrics import run_as_background_process
@@ -580,8 +583,49 @@ class EventsWorkerStore(SQLBaseStore):
                 # of a event format version, so it must be a V1 event.
                 format_version = EventFormatVersions.V1
 
-            original_ev = event_type_from_format_version(format_version)(
+            room_version_id = row["room_version_id"]
+
+            if not room_version_id:
+                # this should only happen for out-of-band membership events
+                if not internal_metadata.get("out_of_band_membership"):
+                    logger.warning(
+                        "Room %s for event %s is unknown", d["room_id"], event_id
+                    )
+                    continue
+
+                # take a wild stab at the room version based on the event format
+                if format_version == EventFormatVersions.V1:
+                    room_version = RoomVersions.V1
+                elif format_version == EventFormatVersions.V2:
+                    room_version = RoomVersions.V3
+                else:
+                    room_version = RoomVersions.V5
+            else:
+                room_version = KNOWN_ROOM_VERSIONS.get(room_version_id)
+                if not room_version:
+                    logger.error(
+                        "Event %s in room %s has unknown room version %s",
+                        event_id,
+                        d["room_id"],
+                        room_version_id,
+                    )
+                    continue
+
+                if room_version.event_format != format_version:
+                    logger.error(
+                        "Event %s in room %s with version %s has wrong format: "
+                        "expected %s, was %s",
+                        event_id,
+                        d["room_id"],
+                        room_version_id,
+                        room_version.event_format,
+                        format_version,
+                    )
+                    continue
+
+            original_ev = make_event_from_dict(
                 event_dict=d,
+                room_version=room_version,
                 internal_metadata_dict=internal_metadata,
                 rejected_reason=rejected_reason,
             )
@@ -661,6 +705,12 @@ class EventsWorkerStore(SQLBaseStore):
            of EventFormatVersions. 'None' means the event predates
            EventFormatVersions (so the event is format V1).
 
+         * room_version_id (str|None): The version of the room which contains the event.
+           Hopefully one of RoomVersions.
+
+           Due to historical reasons, there may be a few events in the database which
+           do not have an associated room; in this case None will be returned here.
+
          * rejected_reason (str|None): if the event was rejected, the reason
            why.
 
@@ -676,17 +726,18 @@ class EventsWorkerStore(SQLBaseStore):
         """
         event_dict = {}
         for evs in batch_iter(event_ids, 200):
-            sql = (
-                "SELECT "
-                " e.event_id, "
-                " e.internal_metadata,"
-                " e.json,"
-                " e.format_version, "
-                " rej.reason "
-                " FROM event_json as e"
-                " LEFT JOIN rejections as rej USING (event_id)"
-                " WHERE "
-            )
+            sql = """\
+                SELECT
+                  e.event_id,
+                  e.internal_metadata,
+                  e.json,
+                  e.format_version,
+                  r.room_version,
+                  rej.reason
+                FROM event_json as e
+                  LEFT JOIN rooms r USING (room_id)
+                  LEFT JOIN rejections as rej USING (event_id)
+                WHERE """
 
             clause, args = make_in_list_sql_clause(
                 txn.database_engine, "e.event_id", evs
@@ -701,7 +752,8 @@ class EventsWorkerStore(SQLBaseStore):
                     "internal_metadata": row[1],
                     "json": row[2],
                     "format_version": row[3],
-                    "rejected_reason": row[4],
+                    "room_version_id": row[4],
+                    "rejected_reason": row[5],
                     "redactions": [],
                 }
 
diff --git a/synapse/storage/data_stores/main/monthly_active_users.py b/synapse/storage/data_stores/main/monthly_active_users.py
index 1507a14e09..925bc5691b 100644
--- a/synapse/storage/data_stores/main/monthly_active_users.py
+++ b/synapse/storage/data_stores/main/monthly_active_users.py
@@ -43,13 +43,40 @@ class MonthlyActiveUsersWorkerStore(SQLBaseStore):
 
         def _count_users(txn):
             sql = "SELECT COALESCE(count(*), 0) FROM monthly_active_users"
-
             txn.execute(sql)
             (count,) = txn.fetchone()
             return count
 
         return self.db.runInteraction("count_users", _count_users)
 
+    @cached(num_args=0)
+    def get_monthly_active_count_by_service(self):
+        """Generates current count of monthly active users broken down by service.
+        A service is typically an appservice but also includes native matrix users.
+        Since the `monthly_active_users` table is populated from the `user_ips` table
+        `config.track_appservice_user_ips` must be set to `true` for this
+        method to return anything other than native matrix users.
+
+        Returns:
+            Deferred[dict]: dict that includes a mapping between app_service_id
+                and the number of occurrences.
+
+        """
+
+        def _count_users_by_service(txn):
+            sql = """
+                SELECT COALESCE(appservice_id, 'native'), COALESCE(count(*), 0)
+                FROM monthly_active_users
+                LEFT JOIN users ON monthly_active_users.user_id=users.name
+                GROUP BY appservice_id;
+            """
+
+            txn.execute(sql)
+            result = txn.fetchall()
+            return dict(result)
+
+        return self.db.runInteraction("count_users_by_service", _count_users_by_service)
+
     @defer.inlineCallbacks
     def get_registered_reserved_users(self):
         """Of the reserved threepids defined in config, which are associated
@@ -292,6 +319,9 @@ class MonthlyActiveUsersStore(MonthlyActiveUsersWorkerStore):
 
         self._invalidate_cache_and_stream(txn, self.get_monthly_active_count, ())
         self._invalidate_cache_and_stream(
+            txn, self.get_monthly_active_count_by_service, ()
+        )
+        self._invalidate_cache_and_stream(
             txn, self.user_last_seen_monthly_active, (user_id,)
         )
 
diff --git a/synapse/storage/data_stores/main/registration.py b/synapse/storage/data_stores/main/registration.py
index 49306642ed..3e53c8568a 100644
--- a/synapse/storage/data_stores/main/registration.py
+++ b/synapse/storage/data_stores/main/registration.py
@@ -301,12 +301,16 @@ class RegistrationWorkerStore(SQLBaseStore):
             admin (bool): true iff the user is to be a server admin,
                 false otherwise.
         """
-        return self.db.simple_update_one(
-            table="users",
-            keyvalues={"name": user.to_string()},
-            updatevalues={"admin": 1 if admin else 0},
-            desc="set_server_admin",
-        )
+
+        def set_server_admin_txn(txn):
+            self.db.simple_update_one_txn(
+                txn, "users", {"name": user.to_string()}, {"admin": 1 if admin else 0}
+            )
+            self._invalidate_cache_and_stream(
+                txn, self.get_user_by_id, (user.to_string(),)
+            )
+
+        return self.db.runInteraction("set_server_admin", set_server_admin_txn)
 
     def _query_for_auth(self, txn, token):
         sql = (
diff --git a/synapse/storage/data_stores/main/schema/delta/57/rooms_version_column_3.sql.postgres b/synapse/storage/data_stores/main/schema/delta/57/rooms_version_column_3.sql.postgres
new file mode 100644
index 0000000000..92aaadde0d
--- /dev/null
+++ b/synapse/storage/data_stores/main/schema/delta/57/rooms_version_column_3.sql.postgres
@@ -0,0 +1,39 @@
+/* Copyright 2020 The Matrix.org Foundation C.I.C.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+-- When we first added the room_version column to the rooms table, it was populated from
+-- the current_state_events table. However, there was an issue causing a background
+-- update to clean up the current_state_events table for rooms where the server is no
+-- longer participating, before that column could be populated. Therefore, some rooms had
+-- a NULL room_version.
+
+-- The rooms_version_column_2.sql.* delta files were introduced to make the populating
+-- synchronous instead of running it in a background update, which fixed this issue.
+-- However, all of the instances of Synapse installed or updated in the meantime got
+-- their rooms table corrupted with NULL room_versions.
+
+-- This query fishes out the room versions from the create event using the state_events
+-- table instead of the current_state_events one, as the former still have all of the
+-- create events.
+
+UPDATE rooms SET room_version=(
+    SELECT COALESCE(json::json->'content'->>'room_version','1')
+    FROM state_events se INNER JOIN event_json ej USING (event_id)
+    WHERE se.room_id=rooms.room_id AND se.type='m.room.create' AND se.state_key=''
+    LIMIT 1
+) WHERE rooms.room_version IS NULL;
+
+-- see also rooms_version_column_3.sql.sqlite which has a copy of the above query, using
+-- sqlite syntax for the json extraction.
diff --git a/synapse/storage/data_stores/main/schema/delta/57/rooms_version_column_3.sql.sqlite b/synapse/storage/data_stores/main/schema/delta/57/rooms_version_column_3.sql.sqlite
new file mode 100644
index 0000000000..e19dab97cb
--- /dev/null
+++ b/synapse/storage/data_stores/main/schema/delta/57/rooms_version_column_3.sql.sqlite
@@ -0,0 +1,23 @@
+/* Copyright 2020 The Matrix.org Foundation C.I.C.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+-- see rooms_version_column_3.sql.postgres for details of what's going on here.
+
+UPDATE rooms SET room_version=(
+    SELECT COALESCE(json_extract(ej.json, '$.content.room_version'), '1')
+    FROM state_events se INNER JOIN event_json ej USING (event_id)
+    WHERE se.room_id=rooms.room_id AND se.type='m.room.create' AND se.state_key=''
+    LIMIT 1
+) WHERE rooms.room_version IS NULL;
diff --git a/synapse/storage/data_stores/main/state_deltas.py b/synapse/storage/data_stores/main/state_deltas.py
index 12c982cb26..725e12507f 100644
--- a/synapse/storage/data_stores/main/state_deltas.py
+++ b/synapse/storage/data_stores/main/state_deltas.py
@@ -15,6 +15,8 @@
 
 import logging
 
+from twisted.internet import defer
+
 from synapse.storage._base import SQLBaseStore
 
 logger = logging.getLogger(__name__)
@@ -56,7 +58,7 @@ class StateDeltasStore(SQLBaseStore):
             # if the CSDs haven't changed between prev_stream_id and now, we
             # know for certain that they haven't changed between prev_stream_id and
             # max_stream_id.
-            return max_stream_id, []
+            return defer.succeed((max_stream_id, []))
 
         def get_current_state_deltas_txn(txn):
             # First we calculate the max stream id that will give us less than
diff --git a/synapse/storage/database.py b/synapse/storage/database.py
index 1953614401..e61595336c 100644
--- a/synapse/storage/database.py
+++ b/synapse/storage/database.py
@@ -15,9 +15,9 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 import logging
-import sys
 import time
-from typing import Iterable, Tuple
+from time import monotonic as monotonic_time
+from typing import Any, Callable, Dict, Iterable, Iterator, List, Optional, Tuple
 
 from six import iteritems, iterkeys, itervalues
 from six.moves import intern, range
@@ -29,27 +29,21 @@ from twisted.internet import defer
 
 from synapse.api.errors import StoreError
 from synapse.config.database import DatabaseConnectionConfig
-from synapse.logging.context import LoggingContext, make_deferred_yieldable
+from synapse.logging.context import (
+    LoggingContext,
+    LoggingContextOrSentinel,
+    make_deferred_yieldable,
+)
 from synapse.metrics.background_process_metrics import run_as_background_process
 from synapse.storage.background_updates import BackgroundUpdater
-from synapse.storage.engines import PostgresEngine, Sqlite3Engine
+from synapse.storage.engines import BaseDatabaseEngine, PostgresEngine, Sqlite3Engine
+from synapse.storage.types import Connection, Cursor
 from synapse.util.stringutils import exception_to_unicode
 
-# import a function which will return a monotonic time, in seconds
-try:
-    # on python 3, use time.monotonic, since time.clock can go backwards
-    from time import monotonic as monotonic_time
-except ImportError:
-    # ... but python 2 doesn't have it
-    from time import clock as monotonic_time
-
 logger = logging.getLogger(__name__)
 
-try:
-    MAX_TXN_ID = sys.maxint - 1
-except AttributeError:
-    # python 3 does not have a maximum int value
-    MAX_TXN_ID = 2 ** 63 - 1
+# python 3 does not have a maximum int value
+MAX_TXN_ID = 2 ** 63 - 1
 
 sql_logger = logging.getLogger("synapse.storage.SQL")
 transaction_logger = logging.getLogger("synapse.storage.txn")
@@ -77,7 +71,7 @@ UNIQUE_INDEX_BACKGROUND_UPDATES = {
 
 
 def make_pool(
-    reactor, db_config: DatabaseConnectionConfig, engine
+    reactor, db_config: DatabaseConnectionConfig, engine: BaseDatabaseEngine
 ) -> adbapi.ConnectionPool:
     """Get the connection pool for the database.
     """
@@ -90,7 +84,9 @@ def make_pool(
     )
 
 
-def make_conn(db_config: DatabaseConnectionConfig, engine):
+def make_conn(
+    db_config: DatabaseConnectionConfig, engine: BaseDatabaseEngine
+) -> Connection:
     """Make a new connection to the database and return it.
 
     Returns:
@@ -107,20 +103,27 @@ def make_conn(db_config: DatabaseConnectionConfig, engine):
     return db_conn
 
 
-class LoggingTransaction(object):
+# The type of entry which goes on our after_callbacks and exception_callbacks lists.
+#
+# Python 3.5.2 doesn't support Callable with an ellipsis, so we wrap it in quotes so
+# that mypy sees the type but the runtime python doesn't.
+_CallbackListEntry = Tuple["Callable[..., None]", Iterable[Any], Dict[str, Any]]
+
+
+class LoggingTransaction:
     """An object that almost-transparently proxies for the 'txn' object
     passed to the constructor. Adds logging and metrics to the .execute()
     method.
 
     Args:
         txn: The database transcation object to wrap.
-        name (str): The name of this transactions for logging.
-        database_engine (Sqlite3Engine|PostgresEngine)
-        after_callbacks(list|None): A list that callbacks will be appended to
+        name: The name of this transactions for logging.
+        database_engine
+        after_callbacks: A list that callbacks will be appended to
             that have been added by `call_after` which should be run on
             successful completion of the transaction. None indicates that no
             callbacks should be allowed to be scheduled to run.
-        exception_callbacks(list|None): A list that callbacks will be appended
+        exception_callbacks: A list that callbacks will be appended
             to that have been added by `call_on_exception` which should be run
             if transaction ends with an error. None indicates that no callbacks
             should be allowed to be scheduled to run.
@@ -135,46 +138,67 @@ class LoggingTransaction(object):
     ]
 
     def __init__(
-        self, txn, name, database_engine, after_callbacks=None, exception_callbacks=None
+        self,
+        txn: Cursor,
+        name: str,
+        database_engine: BaseDatabaseEngine,
+        after_callbacks: Optional[List[_CallbackListEntry]] = None,
+        exception_callbacks: Optional[List[_CallbackListEntry]] = None,
     ):
-        object.__setattr__(self, "txn", txn)
-        object.__setattr__(self, "name", name)
-        object.__setattr__(self, "database_engine", database_engine)
-        object.__setattr__(self, "after_callbacks", after_callbacks)
-        object.__setattr__(self, "exception_callbacks", exception_callbacks)
+        self.txn = txn
+        self.name = name
+        self.database_engine = database_engine
+        self.after_callbacks = after_callbacks
+        self.exception_callbacks = exception_callbacks
 
-    def call_after(self, callback, *args, **kwargs):
+    def call_after(self, callback: "Callable[..., None]", *args, **kwargs):
         """Call the given callback on the main twisted thread after the
         transaction has finished. Used to invalidate the caches on the
         correct thread.
         """
+        # if self.after_callbacks is None, that means that whatever constructed the
+        # LoggingTransaction isn't expecting there to be any callbacks; assert that
+        # is not the case.
+        assert self.after_callbacks is not None
         self.after_callbacks.append((callback, args, kwargs))
 
-    def call_on_exception(self, callback, *args, **kwargs):
+    def call_on_exception(self, callback: "Callable[..., None]", *args, **kwargs):
+        # if self.exception_callbacks is None, that means that whatever constructed the
+        # LoggingTransaction isn't expecting there to be any callbacks; assert that
+        # is not the case.
+        assert self.exception_callbacks is not None
         self.exception_callbacks.append((callback, args, kwargs))
 
-    def __getattr__(self, name):
-        return getattr(self.txn, name)
+    def fetchall(self) -> List[Tuple]:
+        return self.txn.fetchall()
 
-    def __setattr__(self, name, value):
-        setattr(self.txn, name, value)
+    def fetchone(self) -> Tuple:
+        return self.txn.fetchone()
 
-    def __iter__(self):
+    def __iter__(self) -> Iterator[Tuple]:
         return self.txn.__iter__()
 
+    @property
+    def rowcount(self) -> int:
+        return self.txn.rowcount
+
+    @property
+    def description(self) -> Any:
+        return self.txn.description
+
     def execute_batch(self, sql, args):
         if isinstance(self.database_engine, PostgresEngine):
-            from psycopg2.extras import execute_batch
+            from psycopg2.extras import execute_batch  # type: ignore
 
             self._do_execute(lambda *x: execute_batch(self.txn, *x), sql, args)
         else:
             for val in args:
                 self.execute(sql, val)
 
-    def execute(self, sql, *args):
+    def execute(self, sql: str, *args: Any):
         self._do_execute(self.txn.execute, sql, *args)
 
-    def executemany(self, sql, *args):
+    def executemany(self, sql: str, *args: Any):
         self._do_execute(self.txn.executemany, sql, *args)
 
     def _make_sql_one_line(self, sql):
@@ -207,6 +231,9 @@ class LoggingTransaction(object):
             sql_logger.debug("[SQL time] {%s} %f sec", self.name, secs)
             sql_query_timer.labels(sql.split()[0]).observe(secs)
 
+    def close(self):
+        self.txn.close()
+
 
 class PerformanceCounters(object):
     def __init__(self):
@@ -251,7 +278,9 @@ class Database(object):
 
     _TXN_ID = 0
 
-    def __init__(self, hs, database_config: DatabaseConnectionConfig, engine):
+    def __init__(
+        self, hs, database_config: DatabaseConnectionConfig, engine: BaseDatabaseEngine
+    ):
         self.hs = hs
         self._clock = hs.get_clock()
         self._database_config = database_config
@@ -259,9 +288,9 @@ class Database(object):
 
         self.updates = BackgroundUpdater(hs, self)
 
-        self._previous_txn_total_time = 0
-        self._current_txn_total_time = 0
-        self._previous_loop_ts = 0
+        self._previous_txn_total_time = 0.0
+        self._current_txn_total_time = 0.0
+        self._previous_loop_ts = 0.0
 
         # TODO(paul): These can eventually be removed once the metrics code
         #   is running in mainline, and we have some nice monitoring frontends
@@ -463,23 +492,23 @@ class Database(object):
             sql_txn_timer.labels(desc).observe(duration)
 
     @defer.inlineCallbacks
-    def runInteraction(self, desc, func, *args, **kwargs):
+    def runInteraction(self, desc: str, func: Callable, *args: Any, **kwargs: Any):
         """Starts a transaction on the database and runs a given function
 
         Arguments:
-            desc (str): description of the transaction, for logging and metrics
-            func (func): callback function, which will be called with a
+            desc: description of the transaction, for logging and metrics
+            func: callback function, which will be called with a
                 database transaction (twisted.enterprise.adbapi.Transaction) as
                 its first argument, followed by `args` and `kwargs`.
 
-            args (list): positional args to pass to `func`
-            kwargs (dict): named args to pass to `func`
+            args: positional args to pass to `func`
+            kwargs: named args to pass to `func`
 
         Returns:
             Deferred: The result of func
         """
-        after_callbacks = []
-        exception_callbacks = []
+        after_callbacks = []  # type: List[_CallbackListEntry]
+        exception_callbacks = []  # type: List[_CallbackListEntry]
 
         if LoggingContext.current_context() == LoggingContext.sentinel:
             logger.warning("Starting db txn '%s' from sentinel context", desc)
@@ -505,20 +534,22 @@ class Database(object):
         return result
 
     @defer.inlineCallbacks
-    def runWithConnection(self, func, *args, **kwargs):
+    def runWithConnection(self, func: Callable, *args: Any, **kwargs: Any):
         """Wraps the .runWithConnection() method on the underlying db_pool.
 
         Arguments:
-            func (func): callback function, which will be called with a
+            func: callback function, which will be called with a
                 database connection (twisted.enterprise.adbapi.Connection) as
                 its first argument, followed by `args` and `kwargs`.
-            args (list): positional args to pass to `func`
-            kwargs (dict): named args to pass to `func`
+            args: positional args to pass to `func`
+            kwargs: named args to pass to `func`
 
         Returns:
             Deferred: The result of func
         """
-        parent_context = LoggingContext.current_context()
+        parent_context = (
+            LoggingContext.current_context()
+        )  # type: Optional[LoggingContextOrSentinel]
         if parent_context == LoggingContext.sentinel:
             logger.warning(
                 "Starting db connection from sentinel context: metrics will be lost"
@@ -800,7 +831,7 @@ class Database(object):
                 return False
 
         # We didn't find any existing rows, so insert a new one
-        allvalues = {}
+        allvalues = {}  # type: Dict[str, Any]
         allvalues.update(keyvalues)
         allvalues.update(values)
         allvalues.update(insertion_values)
@@ -829,7 +860,7 @@ class Database(object):
         Returns:
             None
         """
-        allvalues = {}
+        allvalues = {}  # type: Dict[str, Any]
         allvalues.update(keyvalues)
         allvalues.update(insertion_values)
 
@@ -916,7 +947,7 @@ class Database(object):
         Returns:
             None
         """
-        allnames = []
+        allnames = []  # type: List[str]
         allnames.extend(key_names)
         allnames.extend(value_names)
 
@@ -1100,7 +1131,7 @@ class Database(object):
             keyvalues : dict of column names and values to select the rows with
             retcols : list of strings giving the names of the columns to return
         """
-        results = []
+        results = []  # type: List[Dict[str, Any]]
 
         if not iterable:
             return results
@@ -1439,7 +1470,7 @@ class Database(object):
             raise ValueError("order_direction must be one of 'ASC' or 'DESC'.")
 
         where_clause = "WHERE " if filters or keyvalues else ""
-        arg_list = []
+        arg_list = []  # type: List[Any]
         if filters:
             where_clause += " AND ".join("%s LIKE ?" % (k,) for k in filters)
             arg_list += list(filters.values())
diff --git a/synapse/storage/engines/__init__.py b/synapse/storage/engines/__init__.py
index 9d2d519922..035f9ea6e9 100644
--- a/synapse/storage/engines/__init__.py
+++ b/synapse/storage/engines/__init__.py
@@ -12,29 +12,31 @@
 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 # See the License for the specific language governing permissions and
 # limitations under the License.
-
-import importlib
 import platform
 
-from ._base import IncorrectDatabaseSetup
+from ._base import BaseDatabaseEngine, IncorrectDatabaseSetup
 from .postgres import PostgresEngine
 from .sqlite import Sqlite3Engine
 
-SUPPORTED_MODULE = {"sqlite3": Sqlite3Engine, "psycopg2": PostgresEngine}
-
 
-def create_engine(database_config):
+def create_engine(database_config) -> BaseDatabaseEngine:
     name = database_config["name"]
-    engine_class = SUPPORTED_MODULE.get(name, None)
 
-    if engine_class:
+    if name == "sqlite3":
+        import sqlite3
+
+        return Sqlite3Engine(sqlite3, database_config)
+
+    if name == "psycopg2":
         # pypy requires psycopg2cffi rather than psycopg2
-        if name == "psycopg2" and platform.python_implementation() == "PyPy":
-            name = "psycopg2cffi"
-        module = importlib.import_module(name)
-        return engine_class(module, database_config)
+        if platform.python_implementation() == "PyPy":
+            import psycopg2cffi as psycopg2  # type: ignore
+        else:
+            import psycopg2  # type: ignore
+
+        return PostgresEngine(psycopg2, database_config)
 
     raise RuntimeError("Unsupported database engine '%s'" % (name,))
 
 
-__all__ = ["create_engine", "IncorrectDatabaseSetup"]
+__all__ = ["create_engine", "BaseDatabaseEngine", "IncorrectDatabaseSetup"]
diff --git a/synapse/storage/engines/_base.py b/synapse/storage/engines/_base.py
index ec5a4d198b..ab0bbe4bd3 100644
--- a/synapse/storage/engines/_base.py
+++ b/synapse/storage/engines/_base.py
@@ -12,7 +12,94 @@
 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 # See the License for the specific language governing permissions and
 # limitations under the License.
+import abc
+from typing import Generic, TypeVar
+
+from synapse.storage.types import Connection
 
 
 class IncorrectDatabaseSetup(RuntimeError):
     pass
+
+
+ConnectionType = TypeVar("ConnectionType", bound=Connection)
+
+
+class BaseDatabaseEngine(Generic[ConnectionType], metaclass=abc.ABCMeta):
+    def __init__(self, module, database_config: dict):
+        self.module = module
+
+    @property
+    @abc.abstractmethod
+    def single_threaded(self) -> bool:
+        ...
+
+    @property
+    @abc.abstractmethod
+    def can_native_upsert(self) -> bool:
+        """
+        Do we support native UPSERTs?
+        """
+        ...
+
+    @property
+    @abc.abstractmethod
+    def supports_tuple_comparison(self) -> bool:
+        """
+        Do we support comparing tuples, i.e. `(a, b) > (c, d)`?
+        """
+        ...
+
+    @property
+    @abc.abstractmethod
+    def supports_using_any_list(self) -> bool:
+        """
+        Do we support using `a = ANY(?)` and passing a list
+        """
+        ...
+
+    @abc.abstractmethod
+    def check_database(
+        self, db_conn: ConnectionType, allow_outdated_version: bool = False
+    ) -> None:
+        ...
+
+    @abc.abstractmethod
+    def check_new_database(self, txn) -> None:
+        """Gets called when setting up a brand new database. This allows us to
+        apply stricter checks on new databases versus existing database.
+        """
+        ...
+
+    @abc.abstractmethod
+    def convert_param_style(self, sql: str) -> str:
+        ...
+
+    @abc.abstractmethod
+    def on_new_connection(self, db_conn: ConnectionType) -> None:
+        ...
+
+    @abc.abstractmethod
+    def is_deadlock(self, error: Exception) -> bool:
+        ...
+
+    @abc.abstractmethod
+    def is_connection_closed(self, conn: ConnectionType) -> bool:
+        ...
+
+    @abc.abstractmethod
+    def lock_table(self, txn, table: str) -> None:
+        ...
+
+    @abc.abstractmethod
+    def get_next_state_group_id(self, txn) -> int:
+        """Returns an int that can be used as a new state_group ID
+        """
+        ...
+
+    @property
+    @abc.abstractmethod
+    def server_version(self) -> str:
+        """Gets a string giving the server version. For example: '3.22.0'
+        """
+        ...
diff --git a/synapse/storage/engines/postgres.py b/synapse/storage/engines/postgres.py
index 53b3f372b0..6c7d08a6f2 100644
--- a/synapse/storage/engines/postgres.py
+++ b/synapse/storage/engines/postgres.py
@@ -15,16 +15,14 @@
 
 import logging
 
-from ._base import IncorrectDatabaseSetup
+from ._base import BaseDatabaseEngine, IncorrectDatabaseSetup
 
 logger = logging.getLogger(__name__)
 
 
-class PostgresEngine(object):
-    single_threaded = False
-
+class PostgresEngine(BaseDatabaseEngine):
     def __init__(self, database_module, database_config):
-        self.module = database_module
+        super().__init__(database_module, database_config)
         self.module.extensions.register_type(self.module.extensions.UNICODE)
 
         # Disables passing `bytes` to txn.execute, c.f. #6186. If you do
@@ -36,6 +34,10 @@ class PostgresEngine(object):
         self.synchronous_commit = database_config.get("synchronous_commit", True)
         self._version = None  # unknown as yet
 
+    @property
+    def single_threaded(self) -> bool:
+        return False
+
     def check_database(self, db_conn, allow_outdated_version: bool = False):
         # Get the version of PostgreSQL that we're using. As per the psycopg2
         # docs: The number is formed by converting the major, minor, and
diff --git a/synapse/storage/engines/sqlite.py b/synapse/storage/engines/sqlite.py
index 641e490697..3bc2e8b986 100644
--- a/synapse/storage/engines/sqlite.py
+++ b/synapse/storage/engines/sqlite.py
@@ -12,16 +12,19 @@
 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 # See the License for the specific language governing permissions and
 # limitations under the License.
-
 import struct
 import threading
+import typing
+
+from synapse.storage.engines import BaseDatabaseEngine
 
+if typing.TYPE_CHECKING:
+    import sqlite3  # noqa: F401
 
-class Sqlite3Engine(object):
-    single_threaded = True
 
+class Sqlite3Engine(BaseDatabaseEngine["sqlite3.Connection"]):
     def __init__(self, database_module, database_config):
-        self.module = database_module
+        super().__init__(database_module, database_config)
 
         database = database_config.get("args", {}).get("database")
         self._is_in_memory = database in (None, ":memory:",)
@@ -32,6 +35,10 @@ class Sqlite3Engine(object):
         self._current_state_group_id_lock = threading.Lock()
 
     @property
+    def single_threaded(self) -> bool:
+        return True
+
+    @property
     def can_native_upsert(self):
         """
         Do we support native UPSERTs? This requires SQLite3 3.24+, plus some
@@ -68,7 +75,6 @@ class Sqlite3Engine(object):
         return sql
 
     def on_new_connection(self, db_conn):
-
         # We need to import here to avoid an import loop.
         from synapse.storage.prepare_database import prepare_database
 
diff --git a/synapse/storage/types.py b/synapse/storage/types.py
new file mode 100644
index 0000000000..daff81c5ee
--- /dev/null
+++ b/synapse/storage/types.py
@@ -0,0 +1,65 @@
+# -*- coding: utf-8 -*-
+# Copyright 2020 The Matrix.org Foundation C.I.C.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from typing import Any, Iterable, Iterator, List, Tuple
+
+from typing_extensions import Protocol
+
+
+"""
+Some very basic protocol definitions for the DB-API2 classes specified in PEP-249
+"""
+
+
+class Cursor(Protocol):
+    def execute(self, sql: str, parameters: Iterable[Any] = ...) -> Any:
+        ...
+
+    def executemany(self, sql: str, parameters: Iterable[Iterable[Any]]) -> Any:
+        ...
+
+    def fetchall(self) -> List[Tuple]:
+        ...
+
+    def fetchone(self) -> Tuple:
+        ...
+
+    @property
+    def description(self) -> Any:
+        return None
+
+    @property
+    def rowcount(self) -> int:
+        return 0
+
+    def __iter__(self) -> Iterator[Tuple]:
+        ...
+
+    def close(self) -> None:
+        ...
+
+
+class Connection(Protocol):
+    def cursor(self) -> Cursor:
+        ...
+
+    def close(self) -> None:
+        ...
+
+    def commit(self) -> None:
+        ...
+
+    def rollback(self, *args, **kwargs) -> None:
+        ...
diff --git a/synapse/types.py b/synapse/types.py
index f3cd465735..acf60baddc 100644
--- a/synapse/types.py
+++ b/synapse/types.py
@@ -23,7 +23,7 @@ import attr
 from signedjson.key import decode_verify_key_bytes
 from unpaddedbase64 import decode_base64
 
-from synapse.api.errors import SynapseError
+from synapse.api.errors import Codes, SynapseError
 
 # define a version of typing.Collection that works on python 3.5
 if sys.version_info[:3] >= (3, 6, 0):
@@ -166,11 +166,13 @@ class DomainSpecificString(namedtuple("DomainSpecificString", ("localpart", "dom
         return self
 
     @classmethod
-    def from_string(cls, s):
+    def from_string(cls, s: str):
         """Parse the string given by 's' into a structure object."""
         if len(s) < 1 or s[0:1] != cls.SIGIL:
             raise SynapseError(
-                400, "Expected %s string to start with '%s'" % (cls.__name__, cls.SIGIL)
+                400,
+                "Expected %s string to start with '%s'" % (cls.__name__, cls.SIGIL),
+                Codes.INVALID_PARAM,
             )
 
         parts = s[1:].split(":", 1)
@@ -179,6 +181,7 @@ class DomainSpecificString(namedtuple("DomainSpecificString", ("localpart", "dom
                 400,
                 "Expected %s of the form '%slocalname:domain'"
                 % (cls.__name__, cls.SIGIL),
+                Codes.INVALID_PARAM,
             )
 
         domain = parts[1]
@@ -235,11 +238,13 @@ class GroupID(DomainSpecificString):
     def from_string(cls, s):
         group_id = super(GroupID, cls).from_string(s)
         if not group_id.localpart:
-            raise SynapseError(400, "Group ID cannot be empty")
+            raise SynapseError(400, "Group ID cannot be empty", Codes.INVALID_PARAM)
 
         if contains_invalid_mxid_characters(group_id.localpart):
             raise SynapseError(
-                400, "Group ID can only contain characters a-z, 0-9, or '=_-./'"
+                400,
+                "Group ID can only contain characters a-z, 0-9, or '=_-./'",
+                Codes.INVALID_PARAM,
             )
 
         return group_id
diff --git a/synapse/visibility.py b/synapse/visibility.py
index e60d9756b7..bab41182b9 100644
--- a/synapse/visibility.py
+++ b/synapse/visibility.py
@@ -49,7 +49,7 @@ def filter_events_for_client(
     events,
     is_peeking=False,
     always_include_ids=frozenset(),
-    apply_retention_policies=True,
+    filter_send_to_client=True,
 ):
     """
     Check which events a user is allowed to see. If the user can see the event but its
@@ -65,10 +65,9 @@ def filter_events_for_client(
             events
         always_include_ids (set(event_id)): set of event ids to specifically
             include (unless sender is ignored)
-        apply_retention_policies (bool): Whether to filter out events that's older than
-            allowed by the room's retention policy. Useful when this function is called
-            to e.g. check whether a user should be allowed to see the state at a given
-            event rather than to know if it should send an event to a user's client(s).
+        filter_send_to_client (bool): Whether we're checking an event that's going to be
+            sent to a client. This might not always be the case since this function can
+            also be called to check whether a user can see the state at a given point.
 
     Returns:
         Deferred[list[synapse.events.EventBase]]
@@ -96,7 +95,7 @@ def filter_events_for_client(
 
     erased_senders = yield storage.main.are_users_erased((e.sender for e in events))
 
-    if apply_retention_policies:
+    if filter_send_to_client:
         room_ids = {e.room_id for e in events}
         retention_policies = {}
 
@@ -119,27 +118,36 @@ def filter_events_for_client(
 
                the original event if they can see it as normal.
         """
-        if not event.is_state() and event.sender in ignore_list:
-            return None
-
-        # Until MSC2261 has landed we can't redact malicious alias events, so for
-        # now we temporarily filter out m.room.aliases entirely to mitigate
-        # abuse, while we spec a better solution to advertising aliases
-        # on rooms.
-        if event.type == EventTypes.Aliases:
-            return None
-
-        # Don't try to apply the room's retention policy if the event is a state event, as
-        # MSC1763 states that retention is only considered for non-state events.
-        if apply_retention_policies and not event.is_state():
-            retention_policy = retention_policies[event.room_id]
-            max_lifetime = retention_policy.get("max_lifetime")
-
-            if max_lifetime is not None:
-                oldest_allowed_ts = storage.main.clock.time_msec() - max_lifetime
-
-                if event.origin_server_ts < oldest_allowed_ts:
-                    return None
+        # Only run some checks if these events aren't about to be sent to clients. This is
+        # because, if this is not the case, we're probably only checking if the users can
+        # see events in the room at that point in the DAG, and that shouldn't be decided
+        # on those checks.
+        if filter_send_to_client:
+            if event.type == "org.matrix.dummy_event":
+                return None
+
+            if not event.is_state() and event.sender in ignore_list:
+                return None
+
+            # Until MSC2261 has landed we can't redact malicious alias events, so for
+            # now we temporarily filter out m.room.aliases entirely to mitigate
+            # abuse, while we spec a better solution to advertising aliases
+            # on rooms.
+            if event.type == EventTypes.Aliases:
+                return None
+
+            # Don't try to apply the room's retention policy if the event is a state
+            # event, as MSC1763 states that retention is only considered for non-state
+            # events.
+            if not event.is_state():
+                retention_policy = retention_policies[event.room_id]
+                max_lifetime = retention_policy.get("max_lifetime")
+
+                if max_lifetime is not None:
+                    oldest_allowed_ts = storage.main.clock.time_msec() - max_lifetime
+
+                    if event.origin_server_ts < oldest_allowed_ts:
+                        return None
 
         if event.event_id in always_include_ids:
             return event
diff --git a/tests/app/test_frontend_proxy.py b/tests/app/test_frontend_proxy.py
index 160e55aca9..d3feafa1b7 100644
--- a/tests/app/test_frontend_proxy.py
+++ b/tests/app/test_frontend_proxy.py
@@ -27,6 +27,11 @@ class FrontendProxyTests(HomeserverTestCase):
 
         return hs
 
+    def default_config(self, name="test"):
+        c = super().default_config(name)
+        c["worker_app"] = "synapse.app.frontend_proxy"
+        return c
+
     def test_listen_http_with_presence_enabled(self):
         """
         When presence is on, the stub servlet will not register.
diff --git a/tests/config/test_tls.py b/tests/config/test_tls.py
index 1be6ff563b..ec32d4b1ca 100644
--- a/tests/config/test_tls.py
+++ b/tests/config/test_tls.py
@@ -23,7 +23,7 @@ from OpenSSL import SSL
 
 from synapse.config._base import Config, RootConfig
 from synapse.config.tls import ConfigError, TlsConfig
-from synapse.crypto.context_factory import ClientTLSOptionsFactory
+from synapse.crypto.context_factory import FederationPolicyForHTTPS
 
 from tests.unittest import TestCase
 
@@ -180,12 +180,13 @@ s4niecZKPBizL6aucT59CsunNmmb5Glq8rlAcU+1ZTZZzGYqVYhF6axB9Qg=
         t = TestConfig()
         t.read_config(config, config_dir_path="", data_dir_path="")
 
-        cf = ClientTLSOptionsFactory(t)
+        cf = FederationPolicyForHTTPS(t)
+        options = _get_ssl_context_options(cf._verify_ssl_context)
 
         # The context has had NO_TLSv1_1 and NO_TLSv1_0 set, but not NO_TLSv1_2
-        self.assertNotEqual(cf._verify_ssl._options & SSL.OP_NO_TLSv1, 0)
-        self.assertNotEqual(cf._verify_ssl._options & SSL.OP_NO_TLSv1_1, 0)
-        self.assertEqual(cf._verify_ssl._options & SSL.OP_NO_TLSv1_2, 0)
+        self.assertNotEqual(options & SSL.OP_NO_TLSv1, 0)
+        self.assertNotEqual(options & SSL.OP_NO_TLSv1_1, 0)
+        self.assertEqual(options & SSL.OP_NO_TLSv1_2, 0)
 
     def test_tls_client_minimum_set_passed_through_1_0(self):
         """
@@ -195,12 +196,13 @@ s4niecZKPBizL6aucT59CsunNmmb5Glq8rlAcU+1ZTZZzGYqVYhF6axB9Qg=
         t = TestConfig()
         t.read_config(config, config_dir_path="", data_dir_path="")
 
-        cf = ClientTLSOptionsFactory(t)
+        cf = FederationPolicyForHTTPS(t)
+        options = _get_ssl_context_options(cf._verify_ssl_context)
 
         # The context has not had any of the NO_TLS set.
-        self.assertEqual(cf._verify_ssl._options & SSL.OP_NO_TLSv1, 0)
-        self.assertEqual(cf._verify_ssl._options & SSL.OP_NO_TLSv1_1, 0)
-        self.assertEqual(cf._verify_ssl._options & SSL.OP_NO_TLSv1_2, 0)
+        self.assertEqual(options & SSL.OP_NO_TLSv1, 0)
+        self.assertEqual(options & SSL.OP_NO_TLSv1_1, 0)
+        self.assertEqual(options & SSL.OP_NO_TLSv1_2, 0)
 
     def test_acme_disabled_in_generated_config_no_acme_domain_provied(self):
         """
@@ -273,7 +275,7 @@ s4niecZKPBizL6aucT59CsunNmmb5Glq8rlAcU+1ZTZZzGYqVYhF6axB9Qg=
         t = TestConfig()
         t.read_config(config, config_dir_path="", data_dir_path="")
 
-        cf = ClientTLSOptionsFactory(t)
+        cf = FederationPolicyForHTTPS(t)
 
         # Not in the whitelist
         opts = cf.get_options(b"notexample.com")
@@ -282,3 +284,10 @@ s4niecZKPBizL6aucT59CsunNmmb5Glq8rlAcU+1ZTZZzGYqVYhF6axB9Qg=
         # Caught by the wildcard
         opts = cf.get_options(idna.encode("ใƒ†ใ‚นใƒˆ.ใƒ‰ใƒกใ‚คใƒณ.ใƒ†ใ‚นใƒˆ"))
         self.assertFalse(opts._verifier._verify_certs)
+
+
+def _get_ssl_context_options(ssl_context: SSL.Context) -> int:
+    """get the options bits from an openssl context object"""
+    # the OpenSSL.SSL.Context wrapper doesn't expose get_options, so we have to
+    # use the low-level interface
+    return SSL._lib.SSL_CTX_get_options(ssl_context._context)
diff --git a/tests/events/test_utils.py b/tests/events/test_utils.py
index 45d55b9e94..ab5f5ac549 100644
--- a/tests/events/test_utils.py
+++ b/tests/events/test_utils.py
@@ -13,6 +13,7 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
+from synapse.api.room_versions import RoomVersions
 from synapse.events import make_event_from_dict
 from synapse.events.utils import (
     copy_power_levels_contents,
@@ -36,9 +37,9 @@ class PruneEventTestCase(unittest.TestCase):
     """ Asserts that a new event constructed with `evdict` will look like
     `matchdict` when it is redacted. """
 
-    def run_test(self, evdict, matchdict):
+    def run_test(self, evdict, matchdict, **kwargs):
         self.assertEquals(
-            prune_event(make_event_from_dict(evdict)).get_dict(), matchdict
+            prune_event(make_event_from_dict(evdict, **kwargs)).get_dict(), matchdict
         )
 
     def test_minimal(self):
@@ -128,6 +129,36 @@ class PruneEventTestCase(unittest.TestCase):
             },
         )
 
+    def test_alias_event(self):
+        """Alias events have special behavior up through room version 6."""
+        self.run_test(
+            {
+                "type": "m.room.aliases",
+                "event_id": "$test:domain",
+                "content": {"aliases": ["test"]},
+            },
+            {
+                "type": "m.room.aliases",
+                "event_id": "$test:domain",
+                "content": {"aliases": ["test"]},
+                "signatures": {},
+                "unsigned": {},
+            },
+        )
+
+    def test_msc2432_alias_event(self):
+        """After MSC2432, alias events have no special behavior."""
+        self.run_test(
+            {"type": "m.room.aliases", "content": {"aliases": ["test"]}},
+            {
+                "type": "m.room.aliases",
+                "content": {},
+                "signatures": {},
+                "unsigned": {},
+            },
+            room_version=RoomVersions.MSC2432_DEV,
+        )
+
 
 class SerializeEventTestCase(unittest.TestCase):
     def serialize(self, ev, fields):
diff --git a/tests/handlers/test_directory.py b/tests/handlers/test_directory.py
index 27b916aed4..5e40adba52 100644
--- a/tests/handlers/test_directory.py
+++ b/tests/handlers/test_directory.py
@@ -18,6 +18,7 @@ from mock import Mock
 
 from twisted.internet import defer
 
+import synapse
 import synapse.api.errors
 from synapse.api.constants import EventTypes
 from synapse.config.room_directory import RoomDirectoryConfig
@@ -87,50 +88,131 @@ class DirectoryTestCase(unittest.HomeserverTestCase):
             ignore_backoff=True,
         )
 
-    def test_delete_alias_not_allowed(self):
-        room_id = "!8765qwer:test"
+    def test_incoming_fed_query(self):
         self.get_success(
-            self.store.create_room_alias_association(self.my_room, room_id, ["test"])
+            self.store.create_room_alias_association(
+                self.your_room, "!8765asdf:test", ["test"]
+            )
+        )
+
+        response = self.get_success(
+            self.handler.on_directory_query({"room_alias": "#your-room:test"})
         )
 
+        self.assertEquals({"room_id": "!8765asdf:test", "servers": ["test"]}, response)
+
+
+class TestDeleteAlias(unittest.HomeserverTestCase):
+    servlets = [
+        synapse.rest.admin.register_servlets,
+        login.register_servlets,
+        room.register_servlets,
+        directory.register_servlets,
+    ]
+
+    def prepare(self, reactor, clock, hs):
+        self.store = hs.get_datastore()
+        self.handler = hs.get_handlers().directory_handler
+        self.state_handler = hs.get_state_handler()
+
+        # Create user
+        self.admin_user = self.register_user("admin", "pass", admin=True)
+        self.admin_user_tok = self.login("admin", "pass")
+
+        # Create a test room
+        self.room_id = self.helper.create_room_as(
+            self.admin_user, tok=self.admin_user_tok
+        )
+
+        self.test_alias = "#test:test"
+        self.room_alias = RoomAlias.from_string(self.test_alias)
+
+        # Create a test user.
+        self.test_user = self.register_user("user", "pass", admin=False)
+        self.test_user_tok = self.login("user", "pass")
+        self.helper.join(room=self.room_id, user=self.test_user, tok=self.test_user_tok)
+
+    def _create_alias(self, user):
+        # Create a new alias to this room.
+        self.get_success(
+            self.store.create_room_alias_association(
+                self.room_alias, self.room_id, ["test"], user
+            )
+        )
+
+    def test_delete_alias_not_allowed(self):
+        """A user that doesn't meet the expected guidelines cannot delete an alias."""
+        self._create_alias(self.admin_user)
         self.get_failure(
             self.handler.delete_association(
-                create_requester("@user:test"), self.my_room
+                create_requester(self.test_user), self.room_alias
             ),
             synapse.api.errors.AuthError,
         )
 
-    def test_delete_alias(self):
-        room_id = "!8765qwer:test"
-        user_id = "@user:test"
-        self.get_success(
-            self.store.create_room_alias_association(
-                self.my_room, room_id, ["test"], user_id
+    def test_delete_alias_creator(self):
+        """An alias creator can delete their own alias."""
+        # Create an alias from a different user.
+        self._create_alias(self.test_user)
+
+        # Delete the user's alias.
+        result = self.get_success(
+            self.handler.delete_association(
+                create_requester(self.test_user), self.room_alias
             )
         )
+        self.assertEquals(self.room_id, result)
+
+        # Confirm the alias is gone.
+        self.get_failure(
+            self.handler.get_association(self.room_alias),
+            synapse.api.errors.SynapseError,
+        )
+
+    def test_delete_alias_admin(self):
+        """A server admin can delete an alias created by another user."""
+        # Create an alias from a different user.
+        self._create_alias(self.test_user)
 
+        # Delete the user's alias as the admin.
         result = self.get_success(
-            self.handler.delete_association(create_requester(user_id), self.my_room)
+            self.handler.delete_association(
+                create_requester(self.admin_user), self.room_alias
+            )
         )
-        self.assertEquals(room_id, result)
+        self.assertEquals(self.room_id, result)
 
-        # The alias should not be found.
+        # Confirm the alias is gone.
         self.get_failure(
-            self.handler.get_association(self.my_room), synapse.api.errors.SynapseError
+            self.handler.get_association(self.room_alias),
+            synapse.api.errors.SynapseError,
         )
 
-    def test_incoming_fed_query(self):
-        self.get_success(
-            self.store.create_room_alias_association(
-                self.your_room, "!8765asdf:test", ["test"]
-            )
+    def test_delete_alias_sufficient_power(self):
+        """A user with a sufficient power level should be able to delete an alias."""
+        self._create_alias(self.admin_user)
+
+        # Increase the user's power level.
+        self.helper.send_state(
+            self.room_id,
+            "m.room.power_levels",
+            {"users": {self.test_user: 100}},
+            tok=self.admin_user_tok,
         )
 
-        response = self.get_success(
-            self.handler.on_directory_query({"room_alias": "#your-room:test"})
+        # They can now delete the alias.
+        result = self.get_success(
+            self.handler.delete_association(
+                create_requester(self.test_user), self.room_alias
+            )
         )
+        self.assertEquals(self.room_id, result)
 
-        self.assertEquals({"room_id": "!8765asdf:test", "servers": ["test"]}, response)
+        # Confirm the alias is gone.
+        self.get_failure(
+            self.handler.get_association(self.room_alias),
+            synapse.api.errors.SynapseError,
+        )
 
 
 class CanonicalAliasTestCase(unittest.HomeserverTestCase):
@@ -159,30 +241,42 @@ class CanonicalAliasTestCase(unittest.HomeserverTestCase):
         )
 
         self.test_alias = "#test:test"
-        self.room_alias = RoomAlias.from_string(self.test_alias)
+        self.room_alias = self._add_alias(self.test_alias)
+
+    def _add_alias(self, alias: str) -> RoomAlias:
+        """Add an alias to the test room."""
+        room_alias = RoomAlias.from_string(alias)
 
         # Create a new alias to this room.
         self.get_success(
             self.store.create_room_alias_association(
-                self.room_alias, self.room_id, ["test"], self.admin_user
+                room_alias, self.room_id, ["test"], self.admin_user
             )
         )
+        return room_alias
 
-    def test_remove_alias(self):
-        """Removing an alias that is the canonical alias should remove it there too."""
-        # Set this new alias as the canonical alias for this room
+    def _set_canonical_alias(self, content):
+        """Configure the canonical alias state on the room."""
         self.helper.send_state(
-            self.room_id,
-            "m.room.canonical_alias",
-            {"alias": self.test_alias, "alt_aliases": [self.test_alias]},
-            tok=self.admin_user_tok,
+            self.room_id, "m.room.canonical_alias", content, tok=self.admin_user_tok,
         )
 
-        data = self.get_success(
+    def _get_canonical_alias(self):
+        """Get the canonical alias state of the room."""
+        return self.get_success(
             self.state_handler.get_current_state(
                 self.room_id, EventTypes.CanonicalAlias, ""
             )
         )
+
+    def test_remove_alias(self):
+        """Removing an alias that is the canonical alias should remove it there too."""
+        # Set this new alias as the canonical alias for this room
+        self._set_canonical_alias(
+            {"alias": self.test_alias, "alt_aliases": [self.test_alias]}
+        )
+
+        data = self._get_canonical_alias()
         self.assertEqual(data["content"]["alias"], self.test_alias)
         self.assertEqual(data["content"]["alt_aliases"], [self.test_alias])
 
@@ -193,11 +287,7 @@ class CanonicalAliasTestCase(unittest.HomeserverTestCase):
             )
         )
 
-        data = self.get_success(
-            self.state_handler.get_current_state(
-                self.room_id, EventTypes.CanonicalAlias, ""
-            )
-        )
+        data = self._get_canonical_alias()
         self.assertNotIn("alias", data["content"])
         self.assertNotIn("alt_aliases", data["content"])
 
@@ -205,29 +295,17 @@ class CanonicalAliasTestCase(unittest.HomeserverTestCase):
         """Removing an alias listed as in alt_aliases should remove it there too."""
         # Create a second alias.
         other_test_alias = "#test2:test"
-        other_room_alias = RoomAlias.from_string(other_test_alias)
-        self.get_success(
-            self.store.create_room_alias_association(
-                other_room_alias, self.room_id, ["test"], self.admin_user
-            )
-        )
+        other_room_alias = self._add_alias(other_test_alias)
 
         # Set the alias as the canonical alias for this room.
-        self.helper.send_state(
-            self.room_id,
-            "m.room.canonical_alias",
+        self._set_canonical_alias(
             {
                 "alias": self.test_alias,
                 "alt_aliases": [self.test_alias, other_test_alias],
-            },
-            tok=self.admin_user_tok,
+            }
         )
 
-        data = self.get_success(
-            self.state_handler.get_current_state(
-                self.room_id, EventTypes.CanonicalAlias, ""
-            )
-        )
+        data = self._get_canonical_alias()
         self.assertEqual(data["content"]["alias"], self.test_alias)
         self.assertEqual(
             data["content"]["alt_aliases"], [self.test_alias, other_test_alias]
@@ -240,11 +318,7 @@ class CanonicalAliasTestCase(unittest.HomeserverTestCase):
             )
         )
 
-        data = self.get_success(
-            self.state_handler.get_current_state(
-                self.room_id, EventTypes.CanonicalAlias, ""
-            )
-        )
+        data = self._get_canonical_alias()
         self.assertEqual(data["content"]["alias"], self.test_alias)
         self.assertEqual(data["content"]["alt_aliases"], [self.test_alias])
 
diff --git a/tests/http/federation/test_matrix_federation_agent.py b/tests/http/federation/test_matrix_federation_agent.py
index cfcd98ff7d..fdc1d918ff 100644
--- a/tests/http/federation/test_matrix_federation_agent.py
+++ b/tests/http/federation/test_matrix_federation_agent.py
@@ -31,7 +31,7 @@ from twisted.web.http_headers import Headers
 from twisted.web.iweb import IPolicyForHTTPS
 
 from synapse.config.homeserver import HomeServerConfig
-from synapse.crypto.context_factory import ClientTLSOptionsFactory
+from synapse.crypto.context_factory import FederationPolicyForHTTPS
 from synapse.http.federation.matrix_federation_agent import MatrixFederationAgent
 from synapse.http.federation.srv_resolver import Server
 from synapse.http.federation.well_known_resolver import (
@@ -79,7 +79,7 @@ class MatrixFederationAgentTests(unittest.TestCase):
         self._config = config = HomeServerConfig()
         config.parse_config_dict(config_dict, "", "")
 
-        self.tls_factory = ClientTLSOptionsFactory(config)
+        self.tls_factory = FederationPolicyForHTTPS(config)
 
         self.well_known_cache = TTLCache("test_cache", timer=self.reactor.seconds)
         self.had_well_known_cache = TTLCache("test_cache", timer=self.reactor.seconds)
@@ -715,7 +715,7 @@ class MatrixFederationAgentTests(unittest.TestCase):
         config = default_config("test", parse=True)
 
         # Build a new agent and WellKnownResolver with a different tls factory
-        tls_factory = ClientTLSOptionsFactory(config)
+        tls_factory = FederationPolicyForHTTPS(config)
         agent = MatrixFederationAgent(
             reactor=self.reactor,
             tls_client_options_factory=tls_factory,
diff --git a/tests/push/test_push_rule_evaluator.py b/tests/push/test_push_rule_evaluator.py
new file mode 100644
index 0000000000..9ae6a87d7b
--- /dev/null
+++ b/tests/push/test_push_rule_evaluator.py
@@ -0,0 +1,65 @@
+# -*- coding: utf-8 -*-
+# Copyright 2020 The Matrix.org Foundation C.I.C.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from synapse.api.room_versions import RoomVersions
+from synapse.events import FrozenEvent
+from synapse.push.push_rule_evaluator import PushRuleEvaluatorForEvent
+
+from tests import unittest
+
+
+class PushRuleEvaluatorTestCase(unittest.TestCase):
+    def setUp(self):
+        event = FrozenEvent(
+            {
+                "event_id": "$event_id",
+                "type": "m.room.history_visibility",
+                "sender": "@user:test",
+                "state_key": "",
+                "room_id": "@room:test",
+                "content": {"body": "foo bar baz"},
+            },
+            RoomVersions.V1,
+        )
+        room_member_count = 0
+        sender_power_level = 0
+        power_levels = {}
+        self.evaluator = PushRuleEvaluatorForEvent(
+            event, room_member_count, sender_power_level, power_levels
+        )
+
+    def test_display_name(self):
+        """Check for a matching display name in the body of the event."""
+        condition = {
+            "kind": "contains_display_name",
+        }
+
+        # Blank names are skipped.
+        self.assertFalse(self.evaluator.matches(condition, "@user:test", ""))
+
+        # Check a display name that doesn't match.
+        self.assertFalse(self.evaluator.matches(condition, "@user:test", "not found"))
+
+        # Check a display name which matches.
+        self.assertTrue(self.evaluator.matches(condition, "@user:test", "foo"))
+
+        # A display name that matches, but not a full word does not result in a match.
+        self.assertFalse(self.evaluator.matches(condition, "@user:test", "ba"))
+
+        # A display name should not be interpreted as a regular expression.
+        self.assertFalse(self.evaluator.matches(condition, "@user:test", "ba[rz]"))
+
+        # A display name with spaces should work fine.
+        self.assertTrue(self.evaluator.matches(condition, "@user:test", "foo bar"))
diff --git a/tests/replication/slave/storage/test_events.py b/tests/replication/slave/storage/test_events.py
index d31210fbe4..f0561b30e3 100644
--- a/tests/replication/slave/storage/test_events.py
+++ b/tests/replication/slave/storage/test_events.py
@@ -15,6 +15,7 @@ import logging
 
 from canonicaljson import encode_canonical_json
 
+from synapse.api.room_versions import RoomVersions
 from synapse.events import FrozenEvent, _EventInternalMetadata, make_event_from_dict
 from synapse.events.snapshot import EventContext
 from synapse.handlers.room import RoomEventSource
@@ -58,6 +59,15 @@ class SlavedEventStoreTestCase(BaseSlavedStoreTestCase):
         self.unpatches = [patch__eq__(_EventInternalMetadata), patch__eq__(FrozenEvent)]
         return super(SlavedEventStoreTestCase, self).setUp()
 
+    def prepare(self, *args, **kwargs):
+        super().prepare(*args, **kwargs)
+
+        self.get_success(
+            self.master_store.store_room(
+                ROOM_ID, USER_ID, is_public=False, room_version=RoomVersions.V1,
+            )
+        )
+
     def tearDown(self):
         [unpatch() for unpatch in self.unpatches]
 
diff --git a/tests/rest/admin/test_admin.py b/tests/rest/admin/test_admin.py
index e5984aaad8..0342aed416 100644
--- a/tests/rest/admin/test_admin.py
+++ b/tests/rest/admin/test_admin.py
@@ -870,6 +870,13 @@ class RoomTestCase(unittest.HomeserverTestCase):
         # Set this new alias as the canonical alias for this room
         self.helper.send_state(
             room_id,
+            "m.room.aliases",
+            {"aliases": [test_alias]},
+            tok=self.admin_user_tok,
+            state_key="test",
+        )
+        self.helper.send_state(
+            room_id,
             "m.room.canonical_alias",
             {"alias": test_alias},
             tok=self.admin_user_tok,
diff --git a/tests/rest/admin/test_user.py b/tests/rest/admin/test_user.py
index cbe4a6a51f..6416fb5d2a 100644
--- a/tests/rest/admin/test_user.py
+++ b/tests/rest/admin/test_user.py
@@ -16,6 +16,7 @@
 import hashlib
 import hmac
 import json
+import urllib.parse
 
 from mock import Mock
 
@@ -371,22 +372,24 @@ class UserRestTestCase(unittest.HomeserverTestCase):
     def prepare(self, reactor, clock, hs):
         self.store = hs.get_datastore()
 
-        self.url = "/_synapse/admin/v2/users/@bob:test"
-
         self.admin_user = self.register_user("admin", "pass", admin=True)
         self.admin_user_tok = self.login("admin", "pass")
 
         self.other_user = self.register_user("user", "pass")
         self.other_user_token = self.login("user", "pass")
+        self.url_other_user = "/_synapse/admin/v2/users/%s" % urllib.parse.quote(
+            self.other_user
+        )
 
     def test_requester_is_no_admin(self):
         """
         If the user is not a server admin, an error is returned.
         """
         self.hs.config.registration_shared_secret = None
+        url = "/_synapse/admin/v2/users/@bob:test"
 
         request, channel = self.make_request(
-            "GET", self.url, access_token=self.other_user_token,
+            "GET", url, access_token=self.other_user_token,
         )
         self.render(request)
 
@@ -394,7 +397,7 @@ class UserRestTestCase(unittest.HomeserverTestCase):
         self.assertEqual("You are not a server admin", channel.json_body["error"])
 
         request, channel = self.make_request(
-            "PUT", self.url, access_token=self.other_user_token, content=b"{}",
+            "PUT", url, access_token=self.other_user_token, content=b"{}",
         )
         self.render(request)
 
@@ -417,24 +420,73 @@ class UserRestTestCase(unittest.HomeserverTestCase):
         self.assertEqual(404, channel.code, msg=channel.json_body)
         self.assertEqual("M_NOT_FOUND", channel.json_body["errcode"])
 
-    def test_requester_is_admin(self):
+    def test_create_server_admin(self):
         """
-        If the user is a server admin, a new user is created.
+        Check that a new admin user is created successfully.
         """
         self.hs.config.registration_shared_secret = None
+        url = "/_synapse/admin/v2/users/@bob:test"
 
+        # Create user (server admin)
         body = json.dumps(
             {
                 "password": "abc123",
                 "admin": True,
+                "displayname": "Bob's name",
                 "threepids": [{"medium": "email", "address": "bob@bob.bob"}],
             }
         )
 
+        request, channel = self.make_request(
+            "PUT",
+            url,
+            access_token=self.admin_user_tok,
+            content=body.encode(encoding="utf_8"),
+        )
+        self.render(request)
+
+        self.assertEqual(201, int(channel.result["code"]), msg=channel.result["body"])
+        self.assertEqual("@bob:test", channel.json_body["name"])
+        self.assertEqual("Bob's name", channel.json_body["displayname"])
+        self.assertEqual("email", channel.json_body["threepids"][0]["medium"])
+        self.assertEqual("bob@bob.bob", channel.json_body["threepids"][0]["address"])
+        self.assertEqual(True, channel.json_body["admin"])
+
+        # Get user
+        request, channel = self.make_request(
+            "GET", url, access_token=self.admin_user_tok,
+        )
+        self.render(request)
+
+        self.assertEqual(200, int(channel.result["code"]), msg=channel.result["body"])
+        self.assertEqual("@bob:test", channel.json_body["name"])
+        self.assertEqual("Bob's name", channel.json_body["displayname"])
+        self.assertEqual("email", channel.json_body["threepids"][0]["medium"])
+        self.assertEqual("bob@bob.bob", channel.json_body["threepids"][0]["address"])
+        self.assertEqual(True, channel.json_body["admin"])
+        self.assertEqual(False, channel.json_body["is_guest"])
+        self.assertEqual(False, channel.json_body["deactivated"])
+
+    def test_create_user(self):
+        """
+        Check that a new regular user is created successfully.
+        """
+        self.hs.config.registration_shared_secret = None
+        url = "/_synapse/admin/v2/users/@bob:test"
+
         # Create user
+        body = json.dumps(
+            {
+                "password": "abc123",
+                "admin": False,
+                "displayname": "Bob's name",
+                "threepids": [{"medium": "email", "address": "bob@bob.bob"}],
+            }
+        )
+
         request, channel = self.make_request(
             "PUT",
-            self.url,
+            url,
             access_token=self.admin_user_tok,
             content=body.encode(encoding="utf_8"),
         )
@@ -442,29 +494,38 @@ class UserRestTestCase(unittest.HomeserverTestCase):
 
         self.assertEqual(201, int(channel.result["code"]), msg=channel.result["body"])
         self.assertEqual("@bob:test", channel.json_body["name"])
-        self.assertEqual("bob", channel.json_body["displayname"])
+        self.assertEqual("Bob's name", channel.json_body["displayname"])
         self.assertEqual("email", channel.json_body["threepids"][0]["medium"])
         self.assertEqual("bob@bob.bob", channel.json_body["threepids"][0]["address"])
+        self.assertEqual(False, channel.json_body["admin"])
 
         # Get user
         request, channel = self.make_request(
-            "GET", self.url, access_token=self.admin_user_tok,
+            "GET", url, access_token=self.admin_user_tok,
         )
         self.render(request)
 
         self.assertEqual(200, int(channel.result["code"]), msg=channel.result["body"])
         self.assertEqual("@bob:test", channel.json_body["name"])
-        self.assertEqual("bob", channel.json_body["displayname"])
-        self.assertEqual(1, channel.json_body["admin"])
-        self.assertEqual(0, channel.json_body["is_guest"])
-        self.assertEqual(0, channel.json_body["deactivated"])
+        self.assertEqual("Bob's name", channel.json_body["displayname"])
+        self.assertEqual("email", channel.json_body["threepids"][0]["medium"])
+        self.assertEqual("bob@bob.bob", channel.json_body["threepids"][0]["address"])
+        self.assertEqual(False, channel.json_body["admin"])
+        self.assertEqual(False, channel.json_body["is_guest"])
+        self.assertEqual(False, channel.json_body["deactivated"])
+
+    def test_set_password(self):
+        """
+        Test setting a new password for another user.
+        """
+        self.hs.config.registration_shared_secret = None
 
         # Change password
         body = json.dumps({"password": "hahaha"})
 
         request, channel = self.make_request(
             "PUT",
-            self.url,
+            self.url_other_user,
             access_token=self.admin_user_tok,
             content=body.encode(encoding="utf_8"),
         )
@@ -472,41 +533,133 @@ class UserRestTestCase(unittest.HomeserverTestCase):
 
         self.assertEqual(200, int(channel.result["code"]), msg=channel.result["body"])
 
+    def test_set_displayname(self):
+        """
+        Test setting the displayname of another user.
+        """
+        self.hs.config.registration_shared_secret = None
+
         # Modify user
+        body = json.dumps({"displayname": "foobar"})
+
+        request, channel = self.make_request(
+            "PUT",
+            self.url_other_user,
+            access_token=self.admin_user_tok,
+            content=body.encode(encoding="utf_8"),
+        )
+        self.render(request)
+
+        self.assertEqual(200, int(channel.result["code"]), msg=channel.result["body"])
+        self.assertEqual("@user:test", channel.json_body["name"])
+        self.assertEqual("foobar", channel.json_body["displayname"])
+
+        # Get user
+        request, channel = self.make_request(
+            "GET", self.url_other_user, access_token=self.admin_user_tok,
+        )
+        self.render(request)
+
+        self.assertEqual(200, int(channel.result["code"]), msg=channel.result["body"])
+        self.assertEqual("@user:test", channel.json_body["name"])
+        self.assertEqual("foobar", channel.json_body["displayname"])
+
+    def test_set_threepid(self):
+        """
+        Test setting threepid for an other user.
+        """
+        self.hs.config.registration_shared_secret = None
+
+        # Delete old and add new threepid to user
         body = json.dumps(
-            {
-                "displayname": "foobar",
-                "deactivated": True,
-                "threepids": [{"medium": "email", "address": "bob2@bob.bob"}],
-            }
+            {"threepids": [{"medium": "email", "address": "bob3@bob.bob"}]}
         )
 
         request, channel = self.make_request(
             "PUT",
-            self.url,
+            self.url_other_user,
             access_token=self.admin_user_tok,
             content=body.encode(encoding="utf_8"),
         )
         self.render(request)
 
         self.assertEqual(200, int(channel.result["code"]), msg=channel.result["body"])
-        self.assertEqual("@bob:test", channel.json_body["name"])
-        self.assertEqual("foobar", channel.json_body["displayname"])
+        self.assertEqual("@user:test", channel.json_body["name"])
+        self.assertEqual("email", channel.json_body["threepids"][0]["medium"])
+        self.assertEqual("bob3@bob.bob", channel.json_body["threepids"][0]["address"])
+
+        # Get user
+        request, channel = self.make_request(
+            "GET", self.url_other_user, access_token=self.admin_user_tok,
+        )
+        self.render(request)
+
+        self.assertEqual(200, int(channel.result["code"]), msg=channel.result["body"])
+        self.assertEqual("@user:test", channel.json_body["name"])
+        self.assertEqual("email", channel.json_body["threepids"][0]["medium"])
+        self.assertEqual("bob3@bob.bob", channel.json_body["threepids"][0]["address"])
+
+    def test_deactivate_user(self):
+        """
+        Test deactivating another user.
+        """
+
+        # Deactivate user
+        body = json.dumps({"deactivated": True})
+
+        request, channel = self.make_request(
+            "PUT",
+            self.url_other_user,
+            access_token=self.admin_user_tok,
+            content=body.encode(encoding="utf_8"),
+        )
+        self.render(request)
+
+        self.assertEqual(200, int(channel.result["code"]), msg=channel.result["body"])
+        self.assertEqual("@user:test", channel.json_body["name"])
         self.assertEqual(True, channel.json_body["deactivated"])
         # the user is deactivated, the threepid will be deleted
 
         # Get user
         request, channel = self.make_request(
-            "GET", self.url, access_token=self.admin_user_tok,
+            "GET", self.url_other_user, access_token=self.admin_user_tok,
         )
         self.render(request)
 
         self.assertEqual(200, int(channel.result["code"]), msg=channel.result["body"])
-        self.assertEqual("@bob:test", channel.json_body["name"])
-        self.assertEqual("foobar", channel.json_body["displayname"])
-        self.assertEqual(1, channel.json_body["admin"])
-        self.assertEqual(0, channel.json_body["is_guest"])
-        self.assertEqual(1, channel.json_body["deactivated"])
+        self.assertEqual("@user:test", channel.json_body["name"])
+        self.assertEqual(True, channel.json_body["deactivated"])
+
+    def test_set_user_as_admin(self):
+        """
+        Test setting the admin flag on a user.
+        """
+        self.hs.config.registration_shared_secret = None
+
+        # Set a user as an admin
+        body = json.dumps({"admin": True})
+
+        request, channel = self.make_request(
+            "PUT",
+            self.url_other_user,
+            access_token=self.admin_user_tok,
+            content=body.encode(encoding="utf_8"),
+        )
+        self.render(request)
+
+        self.assertEqual(200, int(channel.result["code"]), msg=channel.result["body"])
+        self.assertEqual("@user:test", channel.json_body["name"])
+        self.assertEqual(True, channel.json_body["admin"])
+
+        # Get user
+        request, channel = self.make_request(
+            "GET", self.url_other_user, access_token=self.admin_user_tok,
+        )
+        self.render(request)
+
+        self.assertEqual(200, int(channel.result["code"]), msg=channel.result["body"])
+        self.assertEqual("@user:test", channel.json_body["name"])
+        self.assertEqual(True, channel.json_body["admin"])
 
     def test_accidental_deactivation_prevention(self):
         """
@@ -514,13 +667,14 @@ class UserRestTestCase(unittest.HomeserverTestCase):
         for the deactivated body parameter
         """
         self.hs.config.registration_shared_secret = None
+        url = "/_synapse/admin/v2/users/@bob:test"
 
         # Create user
         body = json.dumps({"password": "abc123"})
 
         request, channel = self.make_request(
             "PUT",
-            self.url,
+            url,
             access_token=self.admin_user_tok,
             content=body.encode(encoding="utf_8"),
         )
@@ -532,7 +686,7 @@ class UserRestTestCase(unittest.HomeserverTestCase):
 
         # Get user
         request, channel = self.make_request(
-            "GET", self.url, access_token=self.admin_user_tok,
+            "GET", url, access_token=self.admin_user_tok,
         )
         self.render(request)
 
@@ -546,7 +700,7 @@ class UserRestTestCase(unittest.HomeserverTestCase):
 
         request, channel = self.make_request(
             "PUT",
-            self.url,
+            url,
             access_token=self.admin_user_tok,
             content=body.encode(encoding="utf_8"),
         )
@@ -556,7 +710,7 @@ class UserRestTestCase(unittest.HomeserverTestCase):
 
         # Check user is not deactivated
         request, channel = self.make_request(
-            "GET", self.url, access_token=self.admin_user_tok,
+            "GET", url, access_token=self.admin_user_tok,
         )
         self.render(request)
 
diff --git a/tests/rest/client/v1/test_directory.py b/tests/rest/client/v1/test_directory.py
index 914cf54927..633b7dbda0 100644
--- a/tests/rest/client/v1/test_directory.py
+++ b/tests/rest/client/v1/test_directory.py
@@ -51,30 +51,26 @@ class DirectoryTestCase(unittest.HomeserverTestCase):
         self.user = self.register_user("user", "test")
         self.user_tok = self.login("user", "test")
 
-    def test_cannot_set_alias_via_state_event(self):
-        self.ensure_user_joined_room()
-        url = "/_matrix/client/r0/rooms/%s/state/m.room.aliases/%s" % (
-            self.room_id,
-            self.hs.hostname,
-        )
-
-        data = {"aliases": [self.random_alias(5)]}
-        request_data = json.dumps(data)
-
-        request, channel = self.make_request(
-            "PUT", url, request_data, access_token=self.user_tok
-        )
-        self.render(request)
-        self.assertEqual(channel.code, 400, channel.result)
+    def test_state_event_not_in_room(self):
+        self.ensure_user_left_room()
+        self.set_alias_via_state_event(403)
 
     def test_directory_endpoint_not_in_room(self):
         self.ensure_user_left_room()
         self.set_alias_via_directory(403)
 
+    def test_state_event_in_room_too_long(self):
+        self.ensure_user_joined_room()
+        self.set_alias_via_state_event(400, alias_length=256)
+
     def test_directory_in_room_too_long(self):
         self.ensure_user_joined_room()
         self.set_alias_via_directory(400, alias_length=256)
 
+    def test_state_event_in_room(self):
+        self.ensure_user_joined_room()
+        self.set_alias_via_state_event(200)
+
     def test_directory_in_room(self):
         self.ensure_user_joined_room()
         self.set_alias_via_directory(200)
@@ -106,6 +102,21 @@ class DirectoryTestCase(unittest.HomeserverTestCase):
         self.render(request)
         self.assertEqual(channel.code, 200, channel.result)
 
+    def set_alias_via_state_event(self, expected_code, alias_length=5):
+        url = "/_matrix/client/r0/rooms/%s/state/m.room.aliases/%s" % (
+            self.room_id,
+            self.hs.hostname,
+        )
+
+        data = {"aliases": [self.random_alias(alias_length)]}
+        request_data = json.dumps(data)
+
+        request, channel = self.make_request(
+            "PUT", url, request_data, access_token=self.user_tok
+        )
+        self.render(request)
+        self.assertEqual(channel.code, expected_code, channel.result)
+
     def set_alias_via_directory(self, expected_code, alias_length=5):
         url = "/_matrix/client/r0/directory/room/%s" % self.random_alias(alias_length)
         data = {"room_id": self.room_id}
diff --git a/tests/rest/client/v1/test_login.py b/tests/rest/client/v1/test_login.py
index eae5411325..da2c9bfa1e 100644
--- a/tests/rest/client/v1/test_login.py
+++ b/tests/rest/client/v1/test_login.py
@@ -1,4 +1,7 @@
 import json
+import urllib.parse
+
+from mock import Mock
 
 import synapse.rest.admin
 from synapse.rest.client.v1 import login
@@ -252,3 +255,111 @@ class LoginRestServletTestCase(unittest.HomeserverTestCase):
         )
         self.render(request)
         self.assertEquals(channel.code, 200, channel.result)
+
+
+class CASRedirectConfirmTestCase(unittest.HomeserverTestCase):
+
+    servlets = [
+        login.register_servlets,
+    ]
+
+    def make_homeserver(self, reactor, clock):
+        self.base_url = "https://matrix.goodserver.com/"
+        self.redirect_path = "_synapse/client/login/sso/redirect/confirm"
+
+        config = self.default_config()
+        config["cas_config"] = {
+            "enabled": True,
+            "server_url": "https://fake.test",
+            "service_url": "https://matrix.goodserver.com:8448",
+        }
+
+        async def get_raw(uri, args):
+            """Return an example response payload from a call to the `/proxyValidate`
+            endpoint of a CAS server, copied from
+            https://apereo.github.io/cas/5.0.x/protocol/CAS-Protocol-V2-Specification.html#26-proxyvalidate-cas-20
+
+            This needs to be returned by an async function (as opposed to set as the
+            mock's return value) because the corresponding Synapse code awaits on it.
+            """
+            return """
+                <cas:serviceResponse xmlns:cas='http://www.yale.edu/tp/cas'>
+                  <cas:authenticationSuccess>
+                      <cas:user>username</cas:user>
+                      <cas:proxyGrantingTicket>PGTIOU-84678-8a9d...</cas:proxyGrantingTicket>
+                      <cas:proxies>
+                          <cas:proxy>https://proxy2/pgtUrl</cas:proxy>
+                          <cas:proxy>https://proxy1/pgtUrl</cas:proxy>
+                      </cas:proxies>
+                  </cas:authenticationSuccess>
+                </cas:serviceResponse>
+            """
+
+        mocked_http_client = Mock(spec=["get_raw"])
+        mocked_http_client.get_raw.side_effect = get_raw
+
+        self.hs = self.setup_test_homeserver(
+            config=config, proxied_http_client=mocked_http_client,
+        )
+
+        return self.hs
+
+    def test_cas_redirect_confirm(self):
+        """Tests that the SSO login flow serves a confirmation page before redirecting a
+        user to the redirect URL.
+        """
+        base_url = "/_matrix/client/r0/login/cas/ticket?redirectUrl"
+        redirect_url = "https://dodgy-site.com/"
+
+        url_parts = list(urllib.parse.urlparse(base_url))
+        query = dict(urllib.parse.parse_qsl(url_parts[4]))
+        query.update({"redirectUrl": redirect_url})
+        query.update({"ticket": "ticket"})
+        url_parts[4] = urllib.parse.urlencode(query)
+        cas_ticket_url = urllib.parse.urlunparse(url_parts)
+
+        # Get Synapse to call the fake CAS and serve the template.
+        request, channel = self.make_request("GET", cas_ticket_url)
+        self.render(request)
+
+        # Test that the response is HTML.
+        self.assertEqual(channel.code, 200)
+        content_type_header_value = ""
+        for header in channel.result.get("headers", []):
+            if header[0] == b"Content-Type":
+                content_type_header_value = header[1].decode("utf8")
+
+        self.assertTrue(content_type_header_value.startswith("text/html"))
+
+        # Test that the body isn't empty.
+        self.assertTrue(len(channel.result["body"]) > 0)
+
+        # And that it contains our redirect link
+        self.assertIn(redirect_url, channel.result["body"].decode("UTF-8"))
+
+    @override_config(
+        {
+            "sso": {
+                "client_whitelist": [
+                    "https://legit-site.com/",
+                    "https://other-site.com/",
+                ]
+            }
+        }
+    )
+    def test_cas_redirect_whitelisted(self):
+        """Tests that the SSO login flow serves a redirect to a whitelisted url
+        """
+        redirect_url = "https://legit-site.com/"
+        cas_ticket_url = (
+            "/_matrix/client/r0/login/cas/ticket?redirectUrl=%s&ticket=ticket"
+            % (urllib.parse.quote(redirect_url))
+        )
+
+        # Get Synapse to call the fake CAS and serve the template.
+        request, channel = self.make_request("GET", cas_ticket_url)
+        self.render(request)
+
+        self.assertEqual(channel.code, 302)
+        location_headers = channel.headers.getRawHeaders("Location")
+        self.assertEqual(location_headers[0][: len(redirect_url)], redirect_url)
diff --git a/tests/rest/client/v1/test_rooms.py b/tests/rest/client/v1/test_rooms.py
index 2f3df5f88f..7dd86d0c27 100644
--- a/tests/rest/client/v1/test_rooms.py
+++ b/tests/rest/client/v1/test_rooms.py
@@ -1821,3 +1821,163 @@ class RoomAliasListTestCase(unittest.HomeserverTestCase):
         )
         self.render(request)
         self.assertEqual(channel.code, expected_code, channel.result)
+
+
+class RoomCanonicalAliasTestCase(unittest.HomeserverTestCase):
+    servlets = [
+        synapse.rest.admin.register_servlets_for_client_rest_resource,
+        directory.register_servlets,
+        login.register_servlets,
+        room.register_servlets,
+    ]
+
+    def prepare(self, reactor, clock, homeserver):
+        self.room_owner = self.register_user("room_owner", "test")
+        self.room_owner_tok = self.login("room_owner", "test")
+
+        self.room_id = self.helper.create_room_as(
+            self.room_owner, tok=self.room_owner_tok
+        )
+
+        self.alias = "#alias:test"
+        self._set_alias_via_directory(self.alias)
+
+    def _set_alias_via_directory(self, alias: str, expected_code: int = 200):
+        url = "/_matrix/client/r0/directory/room/" + alias
+        data = {"room_id": self.room_id}
+        request_data = json.dumps(data)
+
+        request, channel = self.make_request(
+            "PUT", url, request_data, access_token=self.room_owner_tok
+        )
+        self.render(request)
+        self.assertEqual(channel.code, expected_code, channel.result)
+
+    def _get_canonical_alias(self, expected_code: int = 200) -> JsonDict:
+        """Calls the endpoint under test. returns the json response object."""
+        request, channel = self.make_request(
+            "GET",
+            "rooms/%s/state/m.room.canonical_alias" % (self.room_id,),
+            access_token=self.room_owner_tok,
+        )
+        self.render(request)
+        self.assertEqual(channel.code, expected_code, channel.result)
+        res = channel.json_body
+        self.assertIsInstance(res, dict)
+        return res
+
+    def _set_canonical_alias(self, content: str, expected_code: int = 200) -> JsonDict:
+        """Calls the endpoint under test. returns the json response object."""
+        request, channel = self.make_request(
+            "PUT",
+            "rooms/%s/state/m.room.canonical_alias" % (self.room_id,),
+            json.dumps(content),
+            access_token=self.room_owner_tok,
+        )
+        self.render(request)
+        self.assertEqual(channel.code, expected_code, channel.result)
+        res = channel.json_body
+        self.assertIsInstance(res, dict)
+        return res
+
+    def test_canonical_alias(self):
+        """Test a basic alias message."""
+        # There is no canonical alias to start with.
+        self._get_canonical_alias(expected_code=404)
+
+        # Create an alias.
+        self._set_canonical_alias({"alias": self.alias})
+
+        # Canonical alias now exists!
+        res = self._get_canonical_alias()
+        self.assertEqual(res, {"alias": self.alias})
+
+        # Now remove the alias.
+        self._set_canonical_alias({})
+
+        # There is an alias event, but it is empty.
+        res = self._get_canonical_alias()
+        self.assertEqual(res, {})
+
+    def test_alt_aliases(self):
+        """Test a canonical alias message with alt_aliases."""
+        # Create an alias.
+        self._set_canonical_alias({"alt_aliases": [self.alias]})
+
+        # Canonical alias now exists!
+        res = self._get_canonical_alias()
+        self.assertEqual(res, {"alt_aliases": [self.alias]})
+
+        # Now remove the alt_aliases.
+        self._set_canonical_alias({})
+
+        # There is an alias event, but it is empty.
+        res = self._get_canonical_alias()
+        self.assertEqual(res, {})
+
+    def test_alias_alt_aliases(self):
+        """Test a canonical alias message with an alias and alt_aliases."""
+        # Create an alias.
+        self._set_canonical_alias({"alias": self.alias, "alt_aliases": [self.alias]})
+
+        # Canonical alias now exists!
+        res = self._get_canonical_alias()
+        self.assertEqual(res, {"alias": self.alias, "alt_aliases": [self.alias]})
+
+        # Now remove the alias and alt_aliases.
+        self._set_canonical_alias({})
+
+        # There is an alias event, but it is empty.
+        res = self._get_canonical_alias()
+        self.assertEqual(res, {})
+
+    def test_partial_modify(self):
+        """Test removing only the alt_aliases."""
+        # Create an alias.
+        self._set_canonical_alias({"alias": self.alias, "alt_aliases": [self.alias]})
+
+        # Canonical alias now exists!
+        res = self._get_canonical_alias()
+        self.assertEqual(res, {"alias": self.alias, "alt_aliases": [self.alias]})
+
+        # Now remove the alt_aliases.
+        self._set_canonical_alias({"alias": self.alias})
+
+        # There is an alias event, but it is empty.
+        res = self._get_canonical_alias()
+        self.assertEqual(res, {"alias": self.alias})
+
+    def test_add_alias(self):
+        """Test removing only the alt_aliases."""
+        # Create an additional alias.
+        second_alias = "#second:test"
+        self._set_alias_via_directory(second_alias)
+
+        # Add the canonical alias.
+        self._set_canonical_alias({"alias": self.alias, "alt_aliases": [self.alias]})
+
+        # Then add the second alias.
+        self._set_canonical_alias(
+            {"alias": self.alias, "alt_aliases": [self.alias, second_alias]}
+        )
+
+        # Canonical alias now exists!
+        res = self._get_canonical_alias()
+        self.assertEqual(
+            res, {"alias": self.alias, "alt_aliases": [self.alias, second_alias]}
+        )
+
+    def test_bad_data(self):
+        """Invalid data for alt_aliases should cause errors."""
+        self._set_canonical_alias({"alt_aliases": "@bad:test"}, expected_code=400)
+        self._set_canonical_alias({"alt_aliases": None}, expected_code=400)
+        self._set_canonical_alias({"alt_aliases": 0}, expected_code=400)
+        self._set_canonical_alias({"alt_aliases": 1}, expected_code=400)
+        self._set_canonical_alias({"alt_aliases": False}, expected_code=400)
+        self._set_canonical_alias({"alt_aliases": True}, expected_code=400)
+        self._set_canonical_alias({"alt_aliases": {}}, expected_code=400)
+
+    def test_bad_alias(self):
+        """An alias which does not point to the room raises a SynapseError."""
+        self._set_canonical_alias({"alias": "@unknown:test"}, expected_code=400)
+        self._set_canonical_alias({"alt_aliases": ["@unknown:test"]}, expected_code=400)
diff --git a/tests/state/test_v2.py b/tests/state/test_v2.py
index 5059ade850..a44960203e 100644
--- a/tests/state/test_v2.py
+++ b/tests/state/test_v2.py
@@ -603,7 +603,7 @@ class TestStateResolutionStore(object):
 
         return {eid: self.event_map[eid] for eid in event_ids if eid in self.event_map}
 
-    def get_auth_chain(self, event_ids, ignore_events):
+    def _get_auth_chain(self, event_ids):
         """Gets the full auth chain for a set of events (including rejected
         events).
 
@@ -617,9 +617,6 @@ class TestStateResolutionStore(object):
         Args:
             event_ids (list): The event IDs of the events to fetch the auth
                 chain for. Must be state events.
-            ignore_events: Set of events to exclude from the returned auth
-                chain.
-
         Returns:
             Deferred[list[str]]: List of event IDs of the auth chain.
         """
@@ -629,7 +626,7 @@ class TestStateResolutionStore(object):
         stack = list(event_ids)
         while stack:
             event_id = stack.pop()
-            if event_id in result or event_id in ignore_events:
+            if event_id in result:
                 continue
 
             result.add(event_id)
@@ -639,3 +636,9 @@ class TestStateResolutionStore(object):
                 stack.append(aid)
 
         return list(result)
+
+    def get_auth_chain_difference(self, auth_sets):
+        chains = [frozenset(self._get_auth_chain(a)) for a in auth_sets]
+
+        common = set(chains[0]).intersection(*chains[1:])
+        return set(chains[0]).union(*chains[1:]) - common
diff --git a/tests/storage/test_event_federation.py b/tests/storage/test_event_federation.py
index a331517f4d..3aeec0dc0f 100644
--- a/tests/storage/test_event_federation.py
+++ b/tests/storage/test_event_federation.py
@@ -13,19 +13,14 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-from twisted.internet import defer
-
 import tests.unittest
 import tests.utils
 
 
-class EventFederationWorkerStoreTestCase(tests.unittest.TestCase):
-    @defer.inlineCallbacks
-    def setUp(self):
-        hs = yield tests.utils.setup_test_homeserver(self.addCleanup)
+class EventFederationWorkerStoreTestCase(tests.unittest.HomeserverTestCase):
+    def prepare(self, reactor, clock, hs):
         self.store = hs.get_datastore()
 
-    @defer.inlineCallbacks
     def test_get_prev_events_for_room(self):
         room_id = "@ROOM:local"
 
@@ -61,15 +56,14 @@ class EventFederationWorkerStoreTestCase(tests.unittest.TestCase):
             )
 
         for i in range(0, 20):
-            yield self.store.db.runInteraction("insert", insert_event, i)
+            self.get_success(self.store.db.runInteraction("insert", insert_event, i))
 
         # this should get the last ten
-        r = yield self.store.get_prev_events_for_room(room_id)
+        r = self.get_success(self.store.get_prev_events_for_room(room_id))
         self.assertEqual(10, len(r))
         for i in range(0, 10):
             self.assertEqual("$event_%i:local" % (19 - i), r[i])
 
-    @defer.inlineCallbacks
     def test_get_rooms_with_many_extremities(self):
         room1 = "#room1"
         room2 = "#room2"
@@ -86,25 +80,154 @@ class EventFederationWorkerStoreTestCase(tests.unittest.TestCase):
             )
 
         for i in range(0, 20):
-            yield self.store.db.runInteraction("insert", insert_event, i, room1)
-            yield self.store.db.runInteraction("insert", insert_event, i, room2)
-            yield self.store.db.runInteraction("insert", insert_event, i, room3)
+            self.get_success(
+                self.store.db.runInteraction("insert", insert_event, i, room1)
+            )
+            self.get_success(
+                self.store.db.runInteraction("insert", insert_event, i, room2)
+            )
+            self.get_success(
+                self.store.db.runInteraction("insert", insert_event, i, room3)
+            )
 
         # Test simple case
-        r = yield self.store.get_rooms_with_many_extremities(5, 5, [])
+        r = self.get_success(self.store.get_rooms_with_many_extremities(5, 5, []))
         self.assertEqual(len(r), 3)
 
         # Does filter work?
 
-        r = yield self.store.get_rooms_with_many_extremities(5, 5, [room1])
+        r = self.get_success(self.store.get_rooms_with_many_extremities(5, 5, [room1]))
         self.assertTrue(room2 in r)
         self.assertTrue(room3 in r)
         self.assertEqual(len(r), 2)
 
-        r = yield self.store.get_rooms_with_many_extremities(5, 5, [room1, room2])
+        r = self.get_success(
+            self.store.get_rooms_with_many_extremities(5, 5, [room1, room2])
+        )
         self.assertEqual(r, [room3])
 
         # Does filter and limit work?
 
-        r = yield self.store.get_rooms_with_many_extremities(5, 1, [room1])
+        r = self.get_success(self.store.get_rooms_with_many_extremities(5, 1, [room1]))
         self.assertTrue(r == [room2] or r == [room3])
+
+    def test_auth_difference(self):
+        room_id = "@ROOM:local"
+
+        # The silly auth graph we use to test the auth difference algorithm,
+        # where the top are the most recent events.
+        #
+        #   A   B
+        #    \ /
+        #  D  E
+        #  \  |
+        #   ` F   C
+        #     |  /|
+        #     G ยด |
+        #     | \ |
+        #     H   I
+        #     |   |
+        #     K   J
+
+        auth_graph = {
+            "a": ["e"],
+            "b": ["e"],
+            "c": ["g", "i"],
+            "d": ["f"],
+            "e": ["f"],
+            "f": ["g"],
+            "g": ["h", "i"],
+            "h": ["k"],
+            "i": ["j"],
+            "k": [],
+            "j": [],
+        }
+
+        depth_map = {
+            "a": 7,
+            "b": 7,
+            "c": 4,
+            "d": 6,
+            "e": 6,
+            "f": 5,
+            "g": 3,
+            "h": 2,
+            "i": 2,
+            "k": 1,
+            "j": 1,
+        }
+
+        # We rudely fiddle with the appropriate tables directly, as that's much
+        # easier than constructing events properly.
+
+        def insert_event(txn, event_id, stream_ordering):
+
+            depth = depth_map[event_id]
+
+            self.store.db.simple_insert_txn(
+                txn,
+                table="events",
+                values={
+                    "event_id": event_id,
+                    "room_id": room_id,
+                    "depth": depth,
+                    "topological_ordering": depth,
+                    "type": "m.test",
+                    "processed": True,
+                    "outlier": False,
+                    "stream_ordering": stream_ordering,
+                },
+            )
+
+            self.store.db.simple_insert_many_txn(
+                txn,
+                table="event_auth",
+                values=[
+                    {"event_id": event_id, "room_id": room_id, "auth_id": a}
+                    for a in auth_graph[event_id]
+                ],
+            )
+
+        next_stream_ordering = 0
+        for event_id in auth_graph:
+            next_stream_ordering += 1
+            self.get_success(
+                self.store.db.runInteraction(
+                    "insert", insert_event, event_id, next_stream_ordering
+                )
+            )
+
+        # Now actually test that various combinations give the right result:
+
+        difference = self.get_success(
+            self.store.get_auth_chain_difference([{"a"}, {"b"}])
+        )
+        self.assertSetEqual(difference, {"a", "b"})
+
+        difference = self.get_success(
+            self.store.get_auth_chain_difference([{"a"}, {"b"}, {"c"}])
+        )
+        self.assertSetEqual(difference, {"a", "b", "c", "e", "f"})
+
+        difference = self.get_success(
+            self.store.get_auth_chain_difference([{"a", "c"}, {"b"}])
+        )
+        self.assertSetEqual(difference, {"a", "b", "c"})
+
+        difference = self.get_success(
+            self.store.get_auth_chain_difference([{"a"}, {"b"}, {"d"}])
+        )
+        self.assertSetEqual(difference, {"a", "b", "d", "e"})
+
+        difference = self.get_success(
+            self.store.get_auth_chain_difference([{"a"}, {"b"}, {"c"}, {"d"}])
+        )
+        self.assertSetEqual(difference, {"a", "b", "c", "d", "e", "f"})
+
+        difference = self.get_success(
+            self.store.get_auth_chain_difference([{"a"}, {"b"}, {"e"}])
+        )
+        self.assertSetEqual(difference, {"a", "b"})
+
+        difference = self.get_success(self.store.get_auth_chain_difference([{"a"}]))
+        self.assertSetEqual(difference, set())
diff --git a/tests/storage/test_monthly_active_users.py b/tests/storage/test_monthly_active_users.py
index 3c78faab45..bc53bf0951 100644
--- a/tests/storage/test_monthly_active_users.py
+++ b/tests/storage/test_monthly_active_users.py
@@ -303,3 +303,45 @@ class MonthlyActiveUsersTestCase(unittest.HomeserverTestCase):
         self.pump()
 
         self.store.upsert_monthly_active_user.assert_not_called()
+
+    def test_get_monthly_active_count_by_service(self):
+        appservice1_user1 = "@appservice1_user1:example.com"
+        appservice1_user2 = "@appservice1_user2:example.com"
+
+        appservice2_user1 = "@appservice2_user1:example.com"
+        native_user1 = "@native_user1:example.com"
+
+        service1 = "service1"
+        service2 = "service2"
+        native = "native"
+
+        self.store.register_user(
+            user_id=appservice1_user1, password_hash=None, appservice_id=service1
+        )
+        self.store.register_user(
+            user_id=appservice1_user2, password_hash=None, appservice_id=service1
+        )
+        self.store.register_user(
+            user_id=appservice2_user1, password_hash=None, appservice_id=service2
+        )
+        self.store.register_user(user_id=native_user1, password_hash=None)
+        self.pump()
+
+        count = self.store.get_monthly_active_count_by_service()
+        self.assertEqual({}, self.get_success(count))
+
+        self.store.upsert_monthly_active_user(native_user1)
+        self.store.upsert_monthly_active_user(appservice1_user1)
+        self.store.upsert_monthly_active_user(appservice1_user2)
+        self.store.upsert_monthly_active_user(appservice2_user1)
+        self.pump()
+
+        count = self.store.get_monthly_active_count()
+        self.assertEqual(4, self.get_success(count))
+
+        count = self.store.get_monthly_active_count_by_service()
+        result = self.get_success(count)
+
+        self.assertEqual(2, result[service1])
+        self.assertEqual(1, result[service2])
+        self.assertEqual(1, result[native])
diff --git a/tests/test_event_auth.py b/tests/test_event_auth.py
index bfa5d6f510..6c2351cf55 100644
--- a/tests/test_event_auth.py
+++ b/tests/test_event_auth.py
@@ -19,6 +19,7 @@ from synapse import event_auth
 from synapse.api.errors import AuthError
 from synapse.api.room_versions import RoomVersions
 from synapse.events import make_event_from_dict
+from synapse.types import get_domain_from_id
 
 
 class EventAuthTestCase(unittest.TestCase):
@@ -51,7 +52,7 @@ class EventAuthTestCase(unittest.TestCase):
             _random_state_event(joiner),
             auth_events,
             do_sig_check=False,
-        ),
+        )
 
     def test_state_default_level(self):
         """
@@ -87,6 +88,83 @@ class EventAuthTestCase(unittest.TestCase):
             RoomVersions.V1, _random_state_event(king), auth_events, do_sig_check=False,
         )
 
+    def test_alias_event(self):
+        """Alias events have special behavior up through room version 6."""
+        creator = "@creator:example.com"
+        other = "@other:example.com"
+        auth_events = {
+            ("m.room.create", ""): _create_event(creator),
+            ("m.room.member", creator): _join_event(creator),
+        }
+
+        # creator should be able to send aliases
+        event_auth.check(
+            RoomVersions.V1, _alias_event(creator), auth_events, do_sig_check=False,
+        )
+
+        # Reject an event with no state key.
+        with self.assertRaises(AuthError):
+            event_auth.check(
+                RoomVersions.V1,
+                _alias_event(creator, state_key=""),
+                auth_events,
+                do_sig_check=False,
+            )
+
+        # If the domain of the sender does not match the state key, reject.
+        with self.assertRaises(AuthError):
+            event_auth.check(
+                RoomVersions.V1,
+                _alias_event(creator, state_key="test.com"),
+                auth_events,
+                do_sig_check=False,
+            )
+
+        # Note that the member does *not* need to be in the room.
+        event_auth.check(
+            RoomVersions.V1, _alias_event(other), auth_events, do_sig_check=False,
+        )
+
+    def test_msc2432_alias_event(self):
+        """After MSC2432, alias events have no special behavior."""
+        creator = "@creator:example.com"
+        other = "@other:example.com"
+        auth_events = {
+            ("m.room.create", ""): _create_event(creator),
+            ("m.room.member", creator): _join_event(creator),
+        }
+
+        # creator should be able to send aliases
+        event_auth.check(
+            RoomVersions.MSC2432_DEV,
+            _alias_event(creator),
+            auth_events,
+            do_sig_check=False,
+        )
+
+        # No particular checks are done on the state key.
+        event_auth.check(
+            RoomVersions.MSC2432_DEV,
+            _alias_event(creator, state_key=""),
+            auth_events,
+            do_sig_check=False,
+        )
+        event_auth.check(
+            RoomVersions.MSC2432_DEV,
+            _alias_event(creator, state_key="test.com"),
+            auth_events,
+            do_sig_check=False,
+        )
+
+        # Per standard auth rules, the member must be in the room.
+        with self.assertRaises(AuthError):
+            event_auth.check(
+                RoomVersions.MSC2432_DEV,
+                _alias_event(other),
+                auth_events,
+                do_sig_check=False,
+            )
+
 
 # helpers for making events
 
@@ -131,6 +209,19 @@ def _power_levels_event(sender, content):
     )
 
 
+def _alias_event(sender, **kwargs):
+    data = {
+        "room_id": TEST_ROOM_ID,
+        "event_id": _get_event_id(),
+        "type": "m.room.aliases",
+        "sender": sender,
+        "state_key": get_domain_from_id(sender),
+        "content": {"aliases": []},
+    }
+    data.update(**kwargs)
+    return make_event_from_dict(data)
+
+
 def _random_state_event(sender):
     return make_event_from_dict(
         {
diff --git a/tests/test_types.py b/tests/test_types.py
index 8d97c751ea..480bea1bdc 100644
--- a/tests/test_types.py
+++ b/tests/test_types.py
@@ -75,7 +75,7 @@ class GroupIDTestCase(unittest.TestCase):
                 self.fail("Parsing '%s' should raise exception" % id_string)
             except SynapseError as exc:
                 self.assertEqual(400, exc.code)
-                self.assertEqual("M_UNKNOWN", exc.errcode)
+                self.assertEqual("M_INVALID_PARAM", exc.errcode)
 
 
 class MapUsernameTestCase(unittest.TestCase):
diff --git a/tox.ini b/tox.ini
index 4ccfde01b5..34d6322c4b 100644
--- a/tox.ini
+++ b/tox.ini
@@ -168,7 +168,6 @@ commands=
     coverage html
 
 [testenv:mypy]
-basepython = python3.7
 skip_install = True
 deps =
     {[base]deps}
@@ -179,19 +178,28 @@ env =
 extras = all
 commands = mypy \
             synapse/api \
-            synapse/config/ \
+            synapse/appservice \
+            synapse/config \
             synapse/events/spamcheck.py \
+            synapse/federation/federation_base.py \
+            synapse/federation/federation_client.py \
             synapse/federation/sender \
             synapse/federation/transport \
+            synapse/handlers/auth.py \
+            synapse/handlers/directory.py \
             synapse/handlers/presence.py \
             synapse/handlers/sync.py \
             synapse/handlers/ui_auth \
             synapse/logging/ \
+            synapse/metrics \
             synapse/module_api \
+            synapse/push/pusherpool.py \
+            synapse/push/push_rule_evaluator.py \
             synapse/replication \
             synapse/rest \
             synapse/spam_checker_api \
             synapse/storage/engines \
+            synapse/storage/database.py \
             synapse/streams
 
 # To find all folders that pass mypy you run: