summary refs log tree commit diff
diff options
context:
space:
mode:
-rw-r--r--CHANGES.md74
-rw-r--r--INSTALL.md20
-rw-r--r--UPGRADE.rst21
-rw-r--r--changelog.d/9045.misc1
-rw-r--r--changelog.d/9062.feature1
-rw-r--r--changelog.d/9121.bugfix1
-rw-r--r--changelog.d/9123.misc1
-rw-r--r--changelog.d/9129.misc1
-rw-r--r--changelog.d/9135.doc1
-rw-r--r--changelog.d/9163.bugfix1
-rw-r--r--changelog.d/9164.bugfix1
-rw-r--r--changelog.d/9165.bugfix1
-rw-r--r--changelog.d/9176.misc1
-rw-r--r--changelog.d/9180.misc1
-rw-r--r--changelog.d/9181.misc1
-rw-r--r--changelog.d/9183.feature1
-rw-r--r--changelog.d/9184.misc1
-rw-r--r--changelog.d/9188.misc1
-rw-r--r--changelog.d/9190.misc1
-rw-r--r--changelog.d/9191.misc1
-rw-r--r--changelog.d/9198.misc1
-rw-r--r--changelog.d/9199.removal1
-rw-r--r--changelog.d/9200.misc1
-rw-r--r--changelog.d/9209.feature1
-rw-r--r--changelog.d/9217.misc1
-rw-r--r--changelog.d/9218.bugfix1
-rw-r--r--changelog.d/9222.misc1
-rw-r--r--changelog.d/9223.misc1
-rw-r--r--changelog.d/9227.misc1
-rw-r--r--changelog.d/9229.bugfix1
-rw-r--r--changelog.d/9235.bugfix1
-rw-r--r--changelog.d/9240.misc1
-rw-r--r--changelog.d/9244.doc1
-rw-r--r--changelog.d/9257.bugfix1
-rw-r--r--changelog.d/9291.doc1
-rw-r--r--changelog.d/9296.bugfix1
-rw-r--r--changelog.d/9297.feature1
-rw-r--r--changelog.d/9299.misc1
-rw-r--r--changelog.d/9300.feature1
-rw-r--r--changelog.d/9301.feature1
-rw-r--r--changelog.d/9302.bugfix1
-rw-r--r--changelog.d/9305.misc1
-rw-r--r--changelog.d/9307.misc1
-rw-r--r--changelog.d/9308.doc1
-rw-r--r--changelog.d/9310.doc1
-rw-r--r--changelog.d/9311.feature1
-rw-r--r--changelog.d/9317.doc1
-rw-r--r--changelog.d/9322.doc1
-rwxr-xr-xdebian/build_virtualenv4
-rw-r--r--debian/changelog6
-rw-r--r--docker/Dockerfile-dhvirtualenv1
-rw-r--r--docs/admin_api/rooms.md30
-rw-r--r--docs/openid.md56
-rw-r--r--docs/sample_config.yaml159
-rw-r--r--docs/spam_checker.md3
-rw-r--r--docs/turn-howto.md2
-rw-r--r--docs/workers.md23
-rw-r--r--mypy.ini42
-rw-r--r--synapse/__init__.py2
-rw-r--r--synapse/app/_base.py3
-rw-r--r--synapse/app/generic_worker.py11
-rw-r--r--synapse/app/homeserver.py16
-rw-r--r--synapse/app/phone_stats_home.py9
-rw-r--r--synapse/config/_base.py39
-rw-r--r--synapse/config/_base.pyi2
-rw-r--r--synapse/config/oidc_config.py76
-rw-r--r--synapse/config/ratelimiting.py32
-rw-r--r--synapse/config/registration.py2
-rw-r--r--synapse/config/repository.py19
-rw-r--r--synapse/config/saml2_config.py8
-rw-r--r--synapse/config/server.py99
-rw-r--r--synapse/config/sso.py98
-rw-r--r--synapse/crypto/context_factory.py13
-rw-r--r--synapse/events/spamcheck.py47
-rw-r--r--synapse/federation/federation_client.py2
-rw-r--r--synapse/handlers/auth.py54
-rw-r--r--synapse/handlers/cas_handler.py3
-rw-r--r--synapse/handlers/device.py12
-rw-r--r--synapse/handlers/e2e_keys.py223
-rw-r--r--synapse/handlers/e2e_room_keys.py91
-rw-r--r--synapse/handlers/federation.py6
-rw-r--r--synapse/handlers/identity.py28
-rw-r--r--synapse/handlers/message.py2
-rw-r--r--synapse/handlers/oidc_handler.py103
-rw-r--r--synapse/handlers/register.py7
-rw-r--r--synapse/handlers/room.py7
-rw-r--r--synapse/handlers/room_member.py31
-rw-r--r--synapse/handlers/saml_handler.py5
-rw-r--r--synapse/handlers/sso.py205
-rw-r--r--synapse/http/federation/matrix_federation_agent.py7
-rw-r--r--synapse/http/server.py7
-rw-r--r--synapse/logging/opentracing.py2
-rw-r--r--synapse/module_api/__init__.py10
-rw-r--r--synapse/push/mailer.py474
-rw-r--r--synapse/python_dependencies.py8
-rw-r--r--synapse/res/templates/sso.css129
-rw-r--r--synapse/res/templates/sso_account_deactivated.html27
-rw-r--r--synapse/res/templates/sso_auth_account_details.html188
-rw-r--r--synapse/res/templates/sso_auth_account_details.js116
-rw-r--r--synapse/res/templates/sso_auth_bad_user.html28
-rw-r--r--synapse/res/templates/sso_auth_confirm.html33
-rw-r--r--synapse/res/templates/sso_auth_success.html40
-rw-r--r--synapse/res/templates/sso_error.html102
-rw-r--r--synapse/res/templates/sso_footer.html19
-rw-r--r--synapse/res/templates/sso_login_idp_picker.html74
-rw-r--r--synapse/res/templates/sso_new_user_consent.html32
-rw-r--r--synapse/res/templates/sso_partial_profile.html19
-rw-r--r--synapse/res/templates/sso_redirect_confirm.html38
-rw-r--r--synapse/res/username_picker/index.html19
-rw-r--r--synapse/res/username_picker/script.js95
-rw-r--r--synapse/res/username_picker/style.css27
-rw-r--r--synapse/rest/admin/__init__.py2
-rw-r--r--synapse/rest/admin/rooms.py39
-rw-r--r--synapse/rest/client/v1/login.py2
-rw-r--r--synapse/rest/client/v2_alpha/account.py12
-rw-r--r--synapse/rest/client/v2_alpha/register.py6
-rw-r--r--synapse/rest/consent/consent_resource.py1
-rw-r--r--synapse/rest/media/v1/media_storage.py59
-rw-r--r--synapse/rest/media/v1/upload_resource.py12
-rw-r--r--synapse/rest/synapse/client/__init__.py54
-rw-r--r--synapse/rest/synapse/client/new_user_consent.py97
-rw-r--r--synapse/rest/synapse/client/oidc/__init__.py (renamed from synapse/rest/oidc/__init__.py)6
-rw-r--r--synapse/rest/synapse/client/oidc/callback_resource.py (renamed from synapse/rest/oidc/callback_resource.py)0
-rw-r--r--synapse/rest/synapse/client/pick_username.py101
-rw-r--r--synapse/rest/synapse/client/saml2/__init__.py (renamed from synapse/rest/saml2/__init__.py)8
-rw-r--r--synapse/rest/synapse/client/saml2/metadata_resource.py (renamed from synapse/rest/saml2/metadata_resource.py)0
-rw-r--r--synapse/rest/synapse/client/saml2/response_resource.py (renamed from synapse/rest/saml2/response_resource.py)0
-rw-r--r--synapse/rest/synapse/client/sso_register.py50
-rw-r--r--synapse/storage/database.py14
-rw-r--r--synapse/storage/databases/main/end_to_end_keys.py4
-rw-r--r--synapse/storage/databases/main/metrics.py56
-rw-r--r--synapse/storage/databases/main/registration.py40
-rw-r--r--synapse/storage/engines/__init__.py8
-rw-r--r--synapse/storage/engines/sqlite.py6
-rw-r--r--synapse/storage/prepare_database.py4
-rw-r--r--synapse/storage/types.py37
-rw-r--r--synapse/storage/util/sequence.py8
-rw-r--r--synapse/util/templates.py106
-rw-r--r--synapse/visibility.py2
-rw-r--r--tests/config/test_server.py61
-rw-r--r--tests/handlers/test_cas.py8
-rw-r--r--tests/handlers/test_federation.py46
-rw-r--r--tests/handlers/test_oidc.py39
-rw-r--r--tests/handlers/test_saml.py8
-rw-r--r--tests/push/test_email.py81
-rw-r--r--tests/rest/admin/test_room.py15
-rw-r--r--tests/rest/client/v1/test_login.py46
-rw-r--r--tests/rest/client/v1/test_rooms.py35
-rw-r--r--tests/rest/client/v2_alpha/test_account.py90
-rw-r--r--tests/rest/client/v2_alpha/test_auth.py6
-rw-r--r--tests/rest/media/v1/test_media_storage.py94
-rw-r--r--tests/server.py9
-rw-r--r--tests/unittest.py5
-rw-r--r--tests/utils.py1
154 files changed, 3467 insertions, 1046 deletions
diff --git a/CHANGES.md b/CHANGES.md
index fcd782fa94..16c11ff0cb 100644
--- a/CHANGES.md
+++ b/CHANGES.md
@@ -1,8 +1,78 @@
-Unreleased
-==========
+Synapse 1.27.0rc1 (2021-02-02)
+==============================
 
 Note that this release includes a change in Synapse to use Redis as a cache ─ as well as a pub/sub mechanism ─ if Redis support is enabled. No action is needed by server administrators, and we do not expect resource usage of the Redis instance to change dramatically.
 
+This release also changes the callback URI for OpenID Connect (OIDC) identity providers. If your server is configured to use single sign-on via an OIDC/OAuth2 IdP, you may need to make configuration changes. Please review [UPGRADE.rst](UPGRADE.rst) for more details on these changes.
+
+This release also changes escaping of variables in the HTML templates for SSO or email notifications. If you have customised these templates, please review [UPGRADE.rst](UPGRADE.rst) for more details on these changes.
+
+
+Features
+--------
+
+- Add an admin API for getting and deleting forward extremities for a room. ([\#9062](https://github.com/matrix-org/synapse/issues/9062))
+- Add an admin API for retrieving the current room state of a room. ([\#9168](https://github.com/matrix-org/synapse/issues/9168))
+- Add experimental support for allowing clients to pick an SSO Identity Provider ([MSC2858](https://github.com/matrix-org/matrix-doc/pull/2858)). ([\#9183](https://github.com/matrix-org/synapse/issues/9183), [\#9242](https://github.com/matrix-org/synapse/issues/9242))
+- Add an admin API endpoint for shadow-banning users. ([\#9209](https://github.com/matrix-org/synapse/issues/9209))
+- Add ratelimits to the 3PID `/requestToken` APIs. ([\#9238](https://github.com/matrix-org/synapse/issues/9238))
+- Add support to the OpenID Connect integration for adding the user's email address. ([\#9245](https://github.com/matrix-org/synapse/issues/9245))
+- Add ratelimits to invites in rooms and to specific users. ([\#9258](https://github.com/matrix-org/synapse/issues/9258))
+- Improve the user experience of setting up an account via single-sign on. ([\#9262](https://github.com/matrix-org/synapse/issues/9262), [\#9272](https://github.com/matrix-org/synapse/issues/9272), [\#9275](https://github.com/matrix-org/synapse/issues/9275), [\#9276](https://github.com/matrix-org/synapse/issues/9276), [\#9277](https://github.com/matrix-org/synapse/issues/9277), [\#9286](https://github.com/matrix-org/synapse/issues/9286), [\#9287](https://github.com/matrix-org/synapse/issues/9287))
+- Add phone home stats for encrypted messages. ([\#9283](https://github.com/matrix-org/synapse/issues/9283))
+- Update the redirect URI for OIDC authentication. ([\#9288](https://github.com/matrix-org/synapse/issues/9288))
+
+
+Bugfixes
+--------
+
+- Fix spurious errors in logs when deleting a non-existant pusher. ([\#9121](https://github.com/matrix-org/synapse/issues/9121))
+- Fix a long-standing bug where Synapse would return a 500 error when a thumbnail did not exist (and auto-generation of thumbnails was not enabled). ([\#9163](https://github.com/matrix-org/synapse/issues/9163))
+- Fix a long-standing bug where an internal server error was raised when attempting to preview an HTML document in an unknown character encoding. ([\#9164](https://github.com/matrix-org/synapse/issues/9164))
+- Fix a long-standing bug where invalid data could cause errors when calculating the presentable room name for push. ([\#9165](https://github.com/matrix-org/synapse/issues/9165))
+- Fix bug where we sometimes didn't detect that Redis connections had died, causing workers to not see new data. ([\#9218](https://github.com/matrix-org/synapse/issues/9218))
+- Fix a bug where `None` was passed to Synapse modules instead of an empty dictionary if an empty module `config` block was provided in the homeserver config. ([\#9229](https://github.com/matrix-org/synapse/issues/9229))
+- Fix a bug in the `make_room_admin` admin API where it failed if the admin with the greatest power level was not in the room. Contributed by Pankaj Yadav. ([\#9235](https://github.com/matrix-org/synapse/issues/9235))
+- Prevent password hashes from getting dropped if a client failed threepid validation during a User Interactive Auth stage. Removes a workaround for an ancient bug in Riot Web <v0.7.4. ([\#9265](https://github.com/matrix-org/synapse/issues/9265))
+- Fix single-sign-on when the endpoints are routed to synapse workers. ([\#9271](https://github.com/matrix-org/synapse/issues/9271))
+
+
+Improved Documentation
+----------------------
+
+- Add docs for using Gitea as OpenID provider. ([\#9134](https://github.com/matrix-org/synapse/issues/9134))
+- Add link to Matrix VoIP tester for turn-howto. ([\#9135](https://github.com/matrix-org/synapse/issues/9135))
+- Add notes on integrating with Facebook for SSO login. ([\#9244](https://github.com/matrix-org/synapse/issues/9244))
+
+
+Deprecations and Removals
+-------------------------
+
+- The `service_url` parameter in `cas_config` is deprecated in favor of `public_baseurl`. ([\#9199](https://github.com/matrix-org/synapse/issues/9199))
+- Add new endpoint `/_synapse/client/saml2` for SAML2 authentication callbacks, and deprecate the old endpoint `/_matrix/saml2`. ([\#9289](https://github.com/matrix-org/synapse/issues/9289))
+
+
+Internal Changes
+----------------
+
+- Add tests to `test_user.UsersListTestCase` for List Users Admin API. ([\#9045](https://github.com/matrix-org/synapse/issues/9045))
+- Various improvements to the federation client. ([\#9129](https://github.com/matrix-org/synapse/issues/9129))
+- Speed up chain cover calculation when persisting a batch of state events at once. ([\#9176](https://github.com/matrix-org/synapse/issues/9176))
+- Add a `long_description_type` to the package metadata. ([\#9180](https://github.com/matrix-org/synapse/issues/9180))
+- Speed up batch insertion when using PostgreSQL. ([\#9181](https://github.com/matrix-org/synapse/issues/9181), [\#9188](https://github.com/matrix-org/synapse/issues/9188))
+- Emit an error at startup if different Identity Providers are configured with the same `idp_id`. ([\#9184](https://github.com/matrix-org/synapse/issues/9184))
+- Improve performance of concurrent use of `StreamIDGenerators`. ([\#9190](https://github.com/matrix-org/synapse/issues/9190))
+- Add some missing source directories to the automatic linting script. ([\#9191](https://github.com/matrix-org/synapse/issues/9191))
+- Precompute joined hosts and store in Redis. ([\#9198](https://github.com/matrix-org/synapse/issues/9198), [\#9227](https://github.com/matrix-org/synapse/issues/9227))
+- Clean-up template loading code. ([\#9200](https://github.com/matrix-org/synapse/issues/9200))
+- Fix the Python 3.5 old dependencies build. ([\#9217](https://github.com/matrix-org/synapse/issues/9217))
+- Update `isort` to v5.7.0 to bypass a bug where it would disagree with `black` about formatting. ([\#9222](https://github.com/matrix-org/synapse/issues/9222))
+- Add type hints to handlers code. ([\#9223](https://github.com/matrix-org/synapse/issues/9223), [\#9232](https://github.com/matrix-org/synapse/issues/9232))
+- Fix Debian package building on Ubuntu 16.04 LTS (Xenial). ([\#9254](https://github.com/matrix-org/synapse/issues/9254))
+- Minor performance improvement during TLS handshake. ([\#9255](https://github.com/matrix-org/synapse/issues/9255))
+- Refactor the generation of summary text for email notifications. ([\#9260](https://github.com/matrix-org/synapse/issues/9260))
+- Restore PyPy compatibility by not calling CPython-specific GC methods when under PyPy. ([\#9270](https://github.com/matrix-org/synapse/issues/9270))
+
 
 Synapse 1.26.0 (2021-01-27)
 ===========================
diff --git a/INSTALL.md b/INSTALL.md
index d405d9fe55..b9e3f613d1 100644
--- a/INSTALL.md
+++ b/INSTALL.md
@@ -151,29 +151,15 @@ sudo pacman -S base-devel python python-pip \
 
 ##### CentOS/Fedora
 
-Installing prerequisites on CentOS 8 or Fedora>26:
+Installing prerequisites on CentOS or Fedora Linux:
 
 ```sh
 sudo dnf install libtiff-devel libjpeg-devel libzip-devel freetype-devel \
-                 libwebp-devel tk-devel redhat-rpm-config \
-                 python3-virtualenv libffi-devel openssl-devel
+                 libwebp-devel libxml2-devel libxslt-devel libpq-devel \
+                 python3-virtualenv libffi-devel openssl-devel python3-devel
 sudo dnf groupinstall "Development Tools"
 ```
 
-Installing prerequisites on CentOS 7 or Fedora<=25:
-
-```sh
-sudo yum install libtiff-devel libjpeg-devel libzip-devel freetype-devel \
-                 lcms2-devel libwebp-devel tcl-devel tk-devel redhat-rpm-config \
-                 python3-virtualenv libffi-devel openssl-devel
-sudo yum groupinstall "Development Tools"
-```
-
-Note that Synapse does not support versions of SQLite before 3.11, and CentOS 7
-uses SQLite 3.7. You may be able to work around this by installing a more
-recent SQLite version, but it is recommended that you instead use a Postgres
-database: see [docs/postgres.md](docs/postgres.md).
-
 ##### macOS
 
 Installing prerequisites on macOS:
diff --git a/UPGRADE.rst b/UPGRADE.rst
index e62e647a1d..22edfe0d60 100644
--- a/UPGRADE.rst
+++ b/UPGRADE.rst
@@ -88,15 +88,30 @@ for example:
 Upgrading to v1.27.0
 ====================
 
+Changes to callback URI for OAuth2 / OpenID Connect
+---------------------------------------------------
+
+This version changes the URI used for callbacks from OAuth2 identity providers. If
+your server is configured for single sign-on via an OpenID Connect or OAuth2 identity
+provider, you will need to add ``[synapse public baseurl]/_synapse/client/oidc/callback``
+to the list of permitted "redirect URIs" at the identity provider.
+
+See `docs/openid.md <docs/openid.md>`_ for more information on setting up OpenID
+Connect.
+
+(Note: a similar change is being made for SAML2; in this case the old URI
+``[synapse public baseurl]/_matrix/saml2`` is being deprecated, but will continue to
+work, so no immediate changes are required for existing installations.)
+
 Changes to HTML templates
 -------------------------
 
 The HTML templates for SSO and email notifications now have `Jinja2's autoescape <https://jinja.palletsprojects.com/en/2.11.x/api/#autoescaping>`_
-enabled for files ending in ``.html``, ``.htm``, and ``.xml``. If you hae customised
+enabled for files ending in ``.html``, ``.htm``, and ``.xml``. If you have customised
 these templates and see issues when viewing them you might need to update them.
 It is expected that most configurations will need no changes.
 
-If you have customised the templates *names* for these templates it is recommended
+If you have customised the templates *names* for these templates, it is recommended
 to verify they end in ``.html`` to ensure autoescape is enabled.
 
 The above applies to the following templates:
@@ -235,7 +250,7 @@ shown below:
 
           return {"localpart": localpart}
 
-Removal historical Synapse Admin API 
+Removal historical Synapse Admin API
 ------------------------------------
 
 Historically, the Synapse Admin API has been accessible under:
diff --git a/changelog.d/9045.misc b/changelog.d/9045.misc
deleted file mode 100644
index 7f1886a0de..0000000000
--- a/changelog.d/9045.misc
+++ /dev/null
@@ -1 +0,0 @@
-Add tests to `test_user.UsersListTestCase` for List Users Admin API.
\ No newline at end of file
diff --git a/changelog.d/9062.feature b/changelog.d/9062.feature
deleted file mode 100644
index 8b950fa062..0000000000
--- a/changelog.d/9062.feature
+++ /dev/null
@@ -1 +0,0 @@
-Add admin API for getting and deleting forward extremities for a room.
diff --git a/changelog.d/9121.bugfix b/changelog.d/9121.bugfix
deleted file mode 100644
index a566878ec0..0000000000
--- a/changelog.d/9121.bugfix
+++ /dev/null
@@ -1 +0,0 @@
-Fix spurious errors in logs when deleting a non-existant pusher.
diff --git a/changelog.d/9123.misc b/changelog.d/9123.misc
new file mode 100644
index 0000000000..329600c40c
--- /dev/null
+++ b/changelog.d/9123.misc
@@ -0,0 +1 @@
+Add experimental support for running Synapse with PyPy.
diff --git a/changelog.d/9129.misc b/changelog.d/9129.misc
deleted file mode 100644
index 7800be3e7e..0000000000
--- a/changelog.d/9129.misc
+++ /dev/null
@@ -1 +0,0 @@
-Various improvements to the federation client.
diff --git a/changelog.d/9135.doc b/changelog.d/9135.doc
deleted file mode 100644
index d11ba70de4..0000000000
--- a/changelog.d/9135.doc
+++ /dev/null
@@ -1 +0,0 @@
-Add link to Matrix VoIP tester for turn-howto.
diff --git a/changelog.d/9163.bugfix b/changelog.d/9163.bugfix
deleted file mode 100644
index c51cf6ca80..0000000000
--- a/changelog.d/9163.bugfix
+++ /dev/null
@@ -1 +0,0 @@
-Fix a long-standing bug where Synapse would return a 500 error when a thumbnail did not exist (and auto-generation of thumbnails was not enabled).
diff --git a/changelog.d/9164.bugfix b/changelog.d/9164.bugfix
deleted file mode 100644
index 1c54a256c1..0000000000
--- a/changelog.d/9164.bugfix
+++ /dev/null
@@ -1 +0,0 @@
-Fix a long-standing bug where an internal server error was raised when attempting to preview an HTML document in an unknown character encoding.
diff --git a/changelog.d/9165.bugfix b/changelog.d/9165.bugfix
deleted file mode 100644
index 58db22f484..0000000000
--- a/changelog.d/9165.bugfix
+++ /dev/null
@@ -1 +0,0 @@
-Fix a long-standing bug where invalid data could cause errors when calculating the presentable room name for push.
diff --git a/changelog.d/9176.misc b/changelog.d/9176.misc
deleted file mode 100644
index 9c41d7b0f9..0000000000
--- a/changelog.d/9176.misc
+++ /dev/null
@@ -1 +0,0 @@
-Speed up chain cover calculation when persisting a batch of state events at once.
diff --git a/changelog.d/9180.misc b/changelog.d/9180.misc
deleted file mode 100644
index 69dd86110d..0000000000
--- a/changelog.d/9180.misc
+++ /dev/null
@@ -1 +0,0 @@
-Add a `long_description_type` to the package metadata.
diff --git a/changelog.d/9181.misc b/changelog.d/9181.misc
deleted file mode 100644
index 7820d09cd0..0000000000
--- a/changelog.d/9181.misc
+++ /dev/null
@@ -1 +0,0 @@
-Speed up batch insertion when using PostgreSQL.
diff --git a/changelog.d/9183.feature b/changelog.d/9183.feature
deleted file mode 100644
index 2d5c735042..0000000000
--- a/changelog.d/9183.feature
+++ /dev/null
@@ -1 +0,0 @@
-Add experimental support for allowing clients to pick an SSO Identity Provider ([MSC2858](https://github.com/matrix-org/matrix-doc/pull/2858).
diff --git a/changelog.d/9184.misc b/changelog.d/9184.misc
deleted file mode 100644
index 70da3d6cf5..0000000000
--- a/changelog.d/9184.misc
+++ /dev/null
@@ -1 +0,0 @@
-Emit an error at startup if different Identity Providers are configured with the same `idp_id`.
diff --git a/changelog.d/9188.misc b/changelog.d/9188.misc
deleted file mode 100644
index 7820d09cd0..0000000000
--- a/changelog.d/9188.misc
+++ /dev/null
@@ -1 +0,0 @@
-Speed up batch insertion when using PostgreSQL.
diff --git a/changelog.d/9190.misc b/changelog.d/9190.misc
deleted file mode 100644
index 1b0cc56a92..0000000000
--- a/changelog.d/9190.misc
+++ /dev/null
@@ -1 +0,0 @@
-Improve performance of concurrent use of `StreamIDGenerators`.
diff --git a/changelog.d/9191.misc b/changelog.d/9191.misc
deleted file mode 100644
index b4bc6be13a..0000000000
--- a/changelog.d/9191.misc
+++ /dev/null
@@ -1 +0,0 @@
-Add some missing source directories to the automatic linting script.
\ No newline at end of file
diff --git a/changelog.d/9198.misc b/changelog.d/9198.misc
deleted file mode 100644
index a6cb77fbb2..0000000000
--- a/changelog.d/9198.misc
+++ /dev/null
@@ -1 +0,0 @@
-Precompute joined hosts and store in Redis.
diff --git a/changelog.d/9199.removal b/changelog.d/9199.removal
deleted file mode 100644
index fbd2916cbf..0000000000
--- a/changelog.d/9199.removal
+++ /dev/null
@@ -1 +0,0 @@
-The `service_url` parameter in `cas_config` is deprecated in favor of `public_baseurl`.
diff --git a/changelog.d/9200.misc b/changelog.d/9200.misc
deleted file mode 100644
index 5f239ff9da..0000000000
--- a/changelog.d/9200.misc
+++ /dev/null
@@ -1 +0,0 @@
-Clean-up template loading code.
diff --git a/changelog.d/9209.feature b/changelog.d/9209.feature
deleted file mode 100644
index ec926e8eb4..0000000000
--- a/changelog.d/9209.feature
+++ /dev/null
@@ -1 +0,0 @@
-Add an admin API endpoint for shadow-banning users.
diff --git a/changelog.d/9217.misc b/changelog.d/9217.misc
deleted file mode 100644
index 72bacc7110..0000000000
--- a/changelog.d/9217.misc
+++ /dev/null
@@ -1 +0,0 @@
-Fix the Python 3.5 old dependencies build.
diff --git a/changelog.d/9218.bugfix b/changelog.d/9218.bugfix
deleted file mode 100644
index 577fff5497..0000000000
--- a/changelog.d/9218.bugfix
+++ /dev/null
@@ -1 +0,0 @@
-Fix bug where we sometimes didn't detect that Redis connections had died, causing workers to not see new data.
diff --git a/changelog.d/9222.misc b/changelog.d/9222.misc
deleted file mode 100644
index 37490717b3..0000000000
--- a/changelog.d/9222.misc
+++ /dev/null
@@ -1 +0,0 @@
-Update `isort` to v5.7.0 to bypass a bug where it would disagree with `black` about formatting.
\ No newline at end of file
diff --git a/changelog.d/9223.misc b/changelog.d/9223.misc
deleted file mode 100644
index 9d44b621c9..0000000000
--- a/changelog.d/9223.misc
+++ /dev/null
@@ -1 +0,0 @@
-Add type hints to handlers code.
diff --git a/changelog.d/9227.misc b/changelog.d/9227.misc
deleted file mode 100644
index a6cb77fbb2..0000000000
--- a/changelog.d/9227.misc
+++ /dev/null
@@ -1 +0,0 @@
-Precompute joined hosts and store in Redis.
diff --git a/changelog.d/9229.bugfix b/changelog.d/9229.bugfix
deleted file mode 100644
index 3ed32291de..0000000000
--- a/changelog.d/9229.bugfix
+++ /dev/null
@@ -1 +0,0 @@
-Fix a bug where `None` was passed to Synapse modules instead of an empty dictionary if an empty module `config` block was provided in the homeserver config.
\ No newline at end of file
diff --git a/changelog.d/9235.bugfix b/changelog.d/9235.bugfix
deleted file mode 100644
index 7809c8673b..0000000000
--- a/changelog.d/9235.bugfix
+++ /dev/null
@@ -1 +0,0 @@
-Fix a bug in the `make_room_admin` admin API where it failed if the admin with the greatest power level was not in the room. Contributed by Pankaj Yadav.
diff --git a/changelog.d/9240.misc b/changelog.d/9240.misc
new file mode 100644
index 0000000000..850201f6cd
--- /dev/null
+++ b/changelog.d/9240.misc
@@ -0,0 +1 @@
+Deny access to additional IP addresses by default.
diff --git a/changelog.d/9244.doc b/changelog.d/9244.doc
deleted file mode 100644
index 2ad81429fc..0000000000
--- a/changelog.d/9244.doc
+++ /dev/null
@@ -1 +0,0 @@
-Add notes on integrating with Facebook for SSO login.
diff --git a/changelog.d/9257.bugfix b/changelog.d/9257.bugfix
new file mode 100644
index 0000000000..5d0bd88dce
--- /dev/null
+++ b/changelog.d/9257.bugfix
@@ -0,0 +1 @@
+Fix long-standing bug where sending email push would fail for rooms that the server had since left.
diff --git a/changelog.d/9291.doc b/changelog.d/9291.doc
new file mode 100644
index 0000000000..422acd3891
--- /dev/null
+++ b/changelog.d/9291.doc
@@ -0,0 +1 @@
+Add note to `auto_join_rooms` config option explaining existing rooms must be publicly joinable.
diff --git a/changelog.d/9296.bugfix b/changelog.d/9296.bugfix
new file mode 100644
index 0000000000..d723f8c5bd
--- /dev/null
+++ b/changelog.d/9296.bugfix
@@ -0,0 +1 @@
+Fix bug in Synapse 1.27.0rc1 which meant the "session expired" error page during SSO registration was badly formatted.
diff --git a/changelog.d/9297.feature b/changelog.d/9297.feature
new file mode 100644
index 0000000000..a2d0b27da4
--- /dev/null
+++ b/changelog.d/9297.feature
@@ -0,0 +1 @@
+Further improvements to the user experience of registration via single sign-on.
diff --git a/changelog.d/9299.misc b/changelog.d/9299.misc
new file mode 100644
index 0000000000..c883a677ed
--- /dev/null
+++ b/changelog.d/9299.misc
@@ -0,0 +1 @@
+Update the `Cursor` type hints to better match PEP 249.
diff --git a/changelog.d/9300.feature b/changelog.d/9300.feature
new file mode 100644
index 0000000000..a2d0b27da4
--- /dev/null
+++ b/changelog.d/9300.feature
@@ -0,0 +1 @@
+Further improvements to the user experience of registration via single sign-on.
diff --git a/changelog.d/9301.feature b/changelog.d/9301.feature
new file mode 100644
index 0000000000..a2d0b27da4
--- /dev/null
+++ b/changelog.d/9301.feature
@@ -0,0 +1 @@
+Further improvements to the user experience of registration via single sign-on.
diff --git a/changelog.d/9302.bugfix b/changelog.d/9302.bugfix
new file mode 100644
index 0000000000..c1cdea52a3
--- /dev/null
+++ b/changelog.d/9302.bugfix
@@ -0,0 +1 @@
+Fix new ratelimiting for invites to respect the `ratelimit` flag on application services. Introduced in v1.27.0rc1.
diff --git a/changelog.d/9305.misc b/changelog.d/9305.misc
new file mode 100644
index 0000000000..456bfbfdd7
--- /dev/null
+++ b/changelog.d/9305.misc
@@ -0,0 +1 @@
+Add debug logging for SRV lookups. Contributed by @Bubu.
diff --git a/changelog.d/9307.misc b/changelog.d/9307.misc
new file mode 100644
index 0000000000..2f54d1ad07
--- /dev/null
+++ b/changelog.d/9307.misc
@@ -0,0 +1 @@
+Improve logging for OIDC login flow.
diff --git a/changelog.d/9308.doc b/changelog.d/9308.doc
new file mode 100644
index 0000000000..847f2908af
--- /dev/null
+++ b/changelog.d/9308.doc
@@ -0,0 +1 @@
+Correct name of Synapse's service file in TURN howto.
diff --git a/changelog.d/9310.doc b/changelog.d/9310.doc
new file mode 100644
index 0000000000..f61705b73a
--- /dev/null
+++ b/changelog.d/9310.doc
@@ -0,0 +1 @@
+Clarify the sample configuration for changes made to the template loading code.
diff --git a/changelog.d/9311.feature b/changelog.d/9311.feature
new file mode 100644
index 0000000000..293f2118e5
--- /dev/null
+++ b/changelog.d/9311.feature
@@ -0,0 +1 @@
+Add hook to spam checker modules that allow checking file uploads and remote downloads.
diff --git a/changelog.d/9317.doc b/changelog.d/9317.doc
new file mode 100644
index 0000000000..f4d508e090
--- /dev/null
+++ b/changelog.d/9317.doc
@@ -0,0 +1 @@
+Fix the braces in the `oidc_providers` section of the sample config.
diff --git a/changelog.d/9322.doc b/changelog.d/9322.doc
new file mode 100644
index 0000000000..c393a3a299
--- /dev/null
+++ b/changelog.d/9322.doc
@@ -0,0 +1 @@
+Update installation instructions on Fedora.
diff --git a/debian/build_virtualenv b/debian/build_virtualenv
index cbdde93f96..cf19084a9f 100755
--- a/debian/build_virtualenv
+++ b/debian/build_virtualenv
@@ -33,11 +33,13 @@ esac
 # Use --builtin-venv to use the better `venv` module from CPython 3.4+ rather
 # than the 2/3 compatible `virtualenv`.
 
+# Pin pip to 20.3.4 to fix breakage in 21.0 on py3.5 (xenial)
+
 dh_virtualenv \
     --install-suffix "matrix-synapse" \
     --builtin-venv \
     --python "$SNAKE" \
-    --upgrade-pip \
+    --upgrade-pip-to="20.3.4" \
     --preinstall="lxml" \
     --preinstall="mock" \
     --extra-pip-arg="--no-cache-dir" \
diff --git a/debian/changelog b/debian/changelog
index 57bab70a34..1a421a85bd 100644
--- a/debian/changelog
+++ b/debian/changelog
@@ -1,3 +1,9 @@
+matrix-synapse-py3 (1.26.0+nmu1) UNRELEASED; urgency=medium
+
+  * Fix build on Ubuntu 16.04 LTS (Xenial).
+
+ -- Dan Callahan <danc@element.io>  Thu, 28 Jan 2021 16:21:03 +0000
+
 matrix-synapse-py3 (1.26.0) stable; urgency=medium
 
   [ Richard van der Hoff ]
diff --git a/docker/Dockerfile-dhvirtualenv b/docker/Dockerfile-dhvirtualenv
index e529293803..0d74630370 100644
--- a/docker/Dockerfile-dhvirtualenv
+++ b/docker/Dockerfile-dhvirtualenv
@@ -27,6 +27,7 @@ RUN env DEBIAN_FRONTEND=noninteractive apt-get install \
         wget
 
 # fetch and unpack the package
+# TODO: Upgrade to 1.2.2 once xenial is dropped
 RUN mkdir /dh-virtualenv
 RUN wget -q -O /dh-virtualenv.tar.gz https://github.com/spotify/dh-virtualenv/archive/ac6e1b1.tar.gz
 RUN tar -xv --strip-components=1 -C /dh-virtualenv -f /dh-virtualenv.tar.gz
diff --git a/docs/admin_api/rooms.md b/docs/admin_api/rooms.md
index 6d737c5be1..bc737b30f5 100644
--- a/docs/admin_api/rooms.md
+++ b/docs/admin_api/rooms.md
@@ -369,6 +369,36 @@ Response:
 }
 ```
 
+# Room State API
+
+The Room State admin API allows server admins to get a list of all state events in a room.
+
+The response includes the following fields:
+
+* `state` - The current state of the room at the time of request.
+
+## Usage
+
+A standard request:
+
+```
+GET /_synapse/admin/v1/rooms/<room_id>/state
+
+{}
+```
+
+Response:
+
+```json
+{
+  "state": [
+    {"type": "m.room.create", "state_key": "", "etc": true},
+    {"type": "m.room.power_levels", "state_key": "", "etc": true},
+    {"type": "m.room.name", "state_key": "", "etc": true}
+  ]
+}
+```
+
 # Delete Room API
 
 The Delete Room admin API allows server admins to remove rooms from server
diff --git a/docs/openid.md b/docs/openid.md
index a73f490dc9..9d19368845 100644
--- a/docs/openid.md
+++ b/docs/openid.md
@@ -54,7 +54,8 @@ Here are a few configs for providers that should work with Synapse.
 ### Microsoft Azure Active Directory
 Azure AD can act as an OpenID Connect Provider. Register a new application under
 *App registrations* in the Azure AD management console. The RedirectURI for your
-application should point to your matrix server: `[synapse public baseurl]/_synapse/oidc/callback`
+application should point to your matrix server:
+`[synapse public baseurl]/_synapse/client/oidc/callback`
 
 Go to *Certificates & secrets* and register a new client secret. Make note of your
 Directory (tenant) ID as it will be used in the Azure links.
@@ -94,7 +95,7 @@ staticClients:
 - id: synapse
   secret: secret
   redirectURIs:
-  - '[synapse public baseurl]/_synapse/oidc/callback'
+  - '[synapse public baseurl]/_synapse/client/oidc/callback'
   name: 'Synapse'
 ```
 
@@ -140,7 +141,7 @@ Follow the [Getting Started Guide](https://www.keycloak.org/getting-started) to
 | Enabled | `On` |
 | Client Protocol | `openid-connect` |
 | Access Type | `confidential` |
-| Valid Redirect URIs | `[synapse public baseurl]/_synapse/oidc/callback` |
+| Valid Redirect URIs | `[synapse public baseurl]/_synapse/client/oidc/callback` |
 
 5. Click `Save`
 6. On the Credentials tab, update the fields:
@@ -168,7 +169,7 @@ oidc_providers:
 ### [Auth0][auth0]
 
 1. Create a regular web application for Synapse
-2. Set the Allowed Callback URLs to `[synapse public baseurl]/_synapse/oidc/callback`
+2. Set the Allowed Callback URLs to `[synapse public baseurl]/_synapse/client/oidc/callback`
 3. Add a rule to add the `preferred_username` claim.
    <details>
     <summary>Code sample</summary>
@@ -217,7 +218,7 @@ login mechanism needs an attribute to uniquely identify users, and that endpoint
 does not return a `sub` property, an alternative `subject_claim` has to be set.
 
 1. Create a new OAuth application: https://github.com/settings/applications/new.
-2. Set the callback URL to `[synapse public baseurl]/_synapse/oidc/callback`.
+2. Set the callback URL to `[synapse public baseurl]/_synapse/client/oidc/callback`.
 
 Synapse config:
 
@@ -225,6 +226,7 @@ Synapse config:
 oidc_providers:
   - idp_id: github
     idp_name: Github
+    idp_brand: "org.matrix.github"  # optional: styling hint for clients
     discover: false
     issuer: "https://github.com/"
     client_id: "your-client-id" # TO BE FILLED
@@ -250,6 +252,7 @@ oidc_providers:
    oidc_providers:
      - idp_id: google
        idp_name: Google
+       idp_brand: "org.matrix.google"  # optional: styling hint for clients
        issuer: "https://accounts.google.com/"
        client_id: "your-client-id" # TO BE FILLED
        client_secret: "your-client-secret" # TO BE FILLED
@@ -260,13 +263,13 @@ oidc_providers:
            display_name_template: "{{ user.name }}"
    ```
 4. Back in the Google console, add this Authorized redirect URI: `[synapse
-   public baseurl]/_synapse/oidc/callback`.
+   public baseurl]/_synapse/client/oidc/callback`.
 
 ### Twitch
 
 1. Setup a developer account on [Twitch](https://dev.twitch.tv/)
 2. Obtain the OAuth 2.0 credentials by [creating an app](https://dev.twitch.tv/console/apps/)
-3. Add this OAuth Redirect URL: `[synapse public baseurl]/_synapse/oidc/callback`
+3. Add this OAuth Redirect URL: `[synapse public baseurl]/_synapse/client/oidc/callback`
 
 Synapse config:
 
@@ -288,7 +291,7 @@ oidc_providers:
 
 1. Create a [new application](https://gitlab.com/profile/applications).
 2. Add the `read_user` and `openid` scopes.
-3. Add this Callback URL: `[synapse public baseurl]/_synapse/oidc/callback`
+3. Add this Callback URL: `[synapse public baseurl]/_synapse/client/oidc/callback`
 
 Synapse config:
 
@@ -296,6 +299,7 @@ Synapse config:
 oidc_providers:
   - idp_id: gitlab
     idp_name: Gitlab
+    idp_brand: "org.matrix.gitlab"  # optional: styling hint for clients
     issuer: "https://gitlab.com/"
     client_id: "your-client-id" # TO BE FILLED
     client_secret: "your-client-secret" # TO BE FILLED
@@ -320,7 +324,7 @@ one so requires a little more configuration.
 2. Once the app is created, add "Facebook Login" and choose "Web". You don't
    need to go through the whole form here.
 3. In the left-hand menu, open "Products"/"Facebook Login"/"Settings".
-   * Add `[synapse public baseurl]/_synapse/oidc/callback` as an OAuth Redirect
+   * Add `[synapse public baseurl]/_synapse/client/oidc/callback` as an OAuth Redirect
      URL.
 4. In the left-hand menu, open "Settings/Basic". Here you can copy the "App ID"
    and "App Secret" for use below.
@@ -350,3 +354,37 @@ Relevant documents:
  * https://developers.facebook.com/docs/facebook-login/manually-build-a-login-flow
  * Using Facebook's Graph API: https://developers.facebook.com/docs/graph-api/using-graph-api/
  * Reference to the User endpoint: https://developers.facebook.com/docs/graph-api/reference/user
+
+### Gitea
+
+Gitea is, like Github, not an OpenID provider, but just an OAuth2 provider.
+
+The [`/user` API endpoint](https://try.gitea.io/api/swagger#/user/userGetCurrent)
+can be used to retrieve information on the authenticated user. As the Synapse
+login mechanism needs an attribute to uniquely identify users, and that endpoint
+does not return a `sub` property, an alternative `subject_claim` has to be set.
+
+1. Create a new application.
+2. Add this Callback URL: `[synapse public baseurl]/_synapse/oidc/callback`
+
+Synapse config:
+
+```yaml
+oidc_providers:
+  - idp_id: gitea
+    idp_name: Gitea
+    discover: false
+    issuer: "https://your-gitea.com/"
+    client_id: "your-client-id" # TO BE FILLED
+    client_secret: "your-client-secret" # TO BE FILLED
+    client_auth_method: client_secret_post
+    scopes: [] # Gitea doesn't support Scopes
+    authorization_endpoint: "https://your-gitea.com/login/oauth/authorize"
+    token_endpoint: "https://your-gitea.com/login/oauth/access_token"
+    userinfo_endpoint: "https://your-gitea.com/api/v1/user"
+    user_mapping_provider:
+      config:
+        subject_claim: "id"
+        localpart_template: "{{ user.login }}"
+        display_name_template: "{{ user.full_name }}" 
+```
diff --git a/docs/sample_config.yaml b/docs/sample_config.yaml
index c2ccd68f3a..fbbf71edd9 100644
--- a/docs/sample_config.yaml
+++ b/docs/sample_config.yaml
@@ -169,6 +169,7 @@ pid_file: DATADIR/homeserver.pid
 #  - '100.64.0.0/10'
 #  - '192.0.0.0/24'
 #  - '169.254.0.0/16'
+#  - '192.88.99.0/24'
 #  - '198.18.0.0/15'
 #  - '192.0.2.0/24'
 #  - '198.51.100.0/24'
@@ -177,6 +178,9 @@ pid_file: DATADIR/homeserver.pid
 #  - '::1/128'
 #  - 'fe80::/10'
 #  - 'fc00::/7'
+#  - '2001:db8::/32'
+#  - 'ff00::/8'
+#  - 'fec0::/10'
 
 # List of IP address CIDR ranges that should be allowed for federation,
 # identity servers, push servers, and for checking key validity for
@@ -824,6 +828,9 @@ log_config: "CONFDIR/SERVERNAME.log.config"
 #     users are joining rooms the server is already in (this is cheap) vs
 #     "remote" for when users are trying to join rooms not on the server (which
 #     can be more expensive)
+#   - one for ratelimiting how often a user or IP can attempt to validate a 3PID.
+#   - two for ratelimiting how often invites can be sent in a room or to a
+#     specific user.
 #
 # The defaults are as shown below.
 #
@@ -857,7 +864,18 @@ log_config: "CONFDIR/SERVERNAME.log.config"
 #  remote:
 #    per_second: 0.01
 #    burst_count: 3
-
+#
+#rc_3pid_validation:
+#  per_second: 0.003
+#  burst_count: 5
+#
+#rc_invites:
+#  per_room:
+#    per_second: 0.3
+#    burst_count: 10
+#  per_user:
+#    per_second: 0.003
+#    burst_count: 5
 
 # Ratelimiting settings for incoming federation
 #
@@ -980,6 +998,7 @@ media_store_path: "DATADIR/media_store"
 #  - '100.64.0.0/10'
 #  - '192.0.0.0/24'
 #  - '169.254.0.0/16'
+#  - '192.88.99.0/24'
 #  - '198.18.0.0/15'
 #  - '192.0.2.0/24'
 #  - '198.51.100.0/24'
@@ -988,6 +1007,9 @@ media_store_path: "DATADIR/media_store"
 #  - '::1/128'
 #  - 'fe80::/10'
 #  - 'fc00::/7'
+#  - '2001:db8::/32'
+#  - 'ff00::/8'
+#  - 'fec0::/10'
 
 # List of IP address CIDR ranges that the URL preview spider is allowed
 # to access even if they are specified in url_preview_ip_range_blacklist.
@@ -1306,6 +1328,8 @@ account_threepid_delegates:
 # By default, any room aliases included in this list will be created
 # as a publicly joinable room when the first user registers for the
 # homeserver. This behaviour can be customised with the settings below.
+# If the room already exists, make certain it is a publicly joinable
+# room. The join rule of the room must be set to 'public'.
 #
 #auto_join_rooms:
 #  - "#example:example.com"
@@ -1552,10 +1576,10 @@ trusted_key_servers:
 # enable SAML login.
 #
 # Once SAML support is enabled, a metadata file will be exposed at
-# https://<server>:<port>/_matrix/saml2/metadata.xml, which you may be able to
+# https://<server>:<port>/_synapse/client/saml2/metadata.xml, which you may be able to
 # use to configure your SAML IdP with. Alternatively, you can manually configure
 # the IdP to use an ACS location of
-# https://<server>:<port>/_matrix/saml2/authn_response.
+# https://<server>:<port>/_synapse/client/saml2/authn_response.
 #
 saml2_config:
   # `sp_config` is the configuration for the pysaml2 Service Provider.
@@ -1727,10 +1751,14 @@ saml2_config:
 #       offer the user a choice of login mechanisms.
 #
 #   idp_icon: An optional icon for this identity provider, which is presented
-#       by identity picker pages. If given, must be an MXC URI of the format
-#       mxc://<server-name>/<media-id>. (An easy way to obtain such an MXC URI
-#       is to upload an image to an (unencrypted) room and then copy the "url"
-#       from the source of the event.)
+#       by clients and Synapse's own IdP picker page. If given, must be an
+#       MXC URI of the format mxc://<server-name>/<media-id>. (An easy way to
+#       obtain such an MXC URI is to upload an image to an (unencrypted) room
+#       and then copy the "url" from the source of the event.)
+#
+#   idp_brand: An optional brand for this identity provider, allowing clients
+#       to style the login flow according to the identity provider in question.
+#       See the spec for possible options here.
 #
 #   discover: set to 'false' to disable the use of the OIDC discovery mechanism
 #       to discover endpoints. Defaults to true.
@@ -1791,17 +1819,21 @@ saml2_config:
 #
 #           For the default provider, the following settings are available:
 #
-#             sub: name of the claim containing a unique identifier for the
-#                 user. Defaults to 'sub', which OpenID Connect compliant
-#                 providers should provide.
+#             subject_claim: name of the claim containing a unique identifier
+#                 for the user. Defaults to 'sub', which OpenID Connect
+#                 compliant providers should provide.
 #
 #             localpart_template: Jinja2 template for the localpart of the MXID.
 #                 If this is not set, the user will be prompted to choose their
-#                 own username.
+#                 own username (see 'sso_auth_account_details.html' in the 'sso'
+#                 section of this file).
 #
 #             display_name_template: Jinja2 template for the display name to set
 #                 on first login. If unset, no displayname will be set.
 #
+#             email_template: Jinja2 template for the email address of the user.
+#                 If unset, no email address will be added to the account.
+#
 #             extra_attributes: a map of Jinja2 templates for extra attributes
 #                 to send back to the client during login.
 #                 Note that these are non-standard and clients will ignore them
@@ -1837,6 +1869,12 @@ oidc_providers:
   #  userinfo_endpoint: "https://accounts.example.com/userinfo"
   #  jwks_uri: "https://accounts.example.com/.well-known/jwks.json"
   #  skip_verification: true
+  #  user_mapping_provider:
+  #    config:
+  #      subject_claim: "id"
+  #      localpart_template: "{{ user.login }}"
+  #      display_name_template: "{{ user.name }}"
+  #      email_template: "{{ user.email }}"
 
   # For use with Keycloak
   #
@@ -1851,6 +1889,7 @@ oidc_providers:
   #
   #- idp_id: github
   #  idp_name: Github
+  #  idp_brand: org.matrix.github
   #  discover: false
   #  issuer: "https://github.com/"
   #  client_id: "your-client-id" # TO BE FILLED
@@ -1862,8 +1901,8 @@ oidc_providers:
   #  user_mapping_provider:
   #    config:
   #      subject_claim: "id"
-  #      localpart_template: "{ user.login }"
-  #      display_name_template: "{ user.name }"
+  #      localpart_template: "{{ user.login }}"
+  #      display_name_template: "{{ user.name }}"
 
 
 # Enable Central Authentication Service (CAS) for registration and login.
@@ -1932,15 +1971,19 @@ sso:
     #
     #   When rendering, this template is given the following variables:
     #     * redirect_url: the URL that the user will be redirected to after
-    #       login. Needs manual escaping (see
-    #       https://jinja.palletsprojects.com/en/2.11.x/templates/#html-escaping).
+    #       login.
     #
     #     * server_name: the homeserver's name.
     #
     #     * providers: a list of available Identity Providers. Each element is
     #       an object with the following attributes:
+    #
     #         * idp_id: unique identifier for the IdP
     #         * idp_name: user-facing name for the IdP
+    #         * idp_icon: if specified in the IdP config, an MXC URI for an icon
+    #              for the IdP
+    #         * idp_brand: if specified in the IdP config, a textual identifier
+    #              for the brand of the IdP
     #
     #   The rendered HTML page should contain a form which submits its results
     #   back as a GET request, with the following query parameters:
@@ -1950,33 +1993,101 @@ sso:
     #
     #     * idp: the 'idp_id' of the chosen IDP.
     #
+    # * HTML page to prompt new users to enter a userid and confirm other
+    #   details: 'sso_auth_account_details.html'. This is only shown if the
+    #   SSO implementation (with any user_mapping_provider) does not return
+    #   a localpart.
+    #
+    #   When rendering, this template is given the following variables:
+    #
+    #     * server_name: the homeserver's name.
+    #
+    #     * idp: details of the SSO Identity Provider that the user logged in
+    #       with: an object with the following attributes:
+    #
+    #         * idp_id: unique identifier for the IdP
+    #         * idp_name: user-facing name for the IdP
+    #         * idp_icon: if specified in the IdP config, an MXC URI for an icon
+    #              for the IdP
+    #         * idp_brand: if specified in the IdP config, a textual identifier
+    #              for the brand of the IdP
+    #
+    #     * user_attributes: an object containing details about the user that
+    #       we received from the IdP. May have the following attributes:
+    #
+    #         * display_name: the user's display_name
+    #         * emails: a list of email addresses
+    #
+    #   The template should render a form which submits the following fields:
+    #
+    #     * username: the localpart of the user's chosen user id
+    #
+    # * HTML page allowing the user to consent to the server's terms and
+    #   conditions. This is only shown for new users, and only if
+    #   `user_consent.require_at_registration` is set.
+    #
+    #   When rendering, this template is given the following variables:
+    #
+    #     * server_name: the homeserver's name.
+    #
+    #     * user_id: the user's matrix proposed ID.
+    #
+    #     * user_profile.display_name: the user's proposed display name, if any.
+    #
+    #     * consent_version: the version of the terms that the user will be
+    #       shown
+    #
+    #     * terms_url: a link to the page showing the terms.
+    #
+    #   The template should render a form which submits the following fields:
+    #
+    #     * accepted_version: the version of the terms accepted by the user
+    #       (ie, 'consent_version' from the input variables).
+    #
     # * HTML page for a confirmation step before redirecting back to the client
     #   with the login token: 'sso_redirect_confirm.html'.
     #
-    #   When rendering, this template is given three variables:
-    #     * redirect_url: the URL the user is about to be redirected to. Needs
-    #                     manual escaping (see
-    #                     https://jinja.palletsprojects.com/en/2.11.x/templates/#html-escaping).
+    #   When rendering, this template is given the following variables:
+    #
+    #     * redirect_url: the URL the user is about to be redirected to.
     #
     #     * display_url: the same as `redirect_url`, but with the query
     #                    parameters stripped. The intention is to have a
     #                    human-readable URL to show to users, not to use it as
-    #                    the final address to redirect to. Needs manual escaping
-    #                    (see https://jinja.palletsprojects.com/en/2.11.x/templates/#html-escaping).
+    #                    the final address to redirect to.
     #
     #     * server_name: the homeserver's name.
     #
+    #     * new_user: a boolean indicating whether this is the user's first time
+    #          logging in.
+    #
+    #     * user_id: the user's matrix ID.
+    #
+    #     * user_profile.avatar_url: an MXC URI for the user's avatar, if any.
+    #           None if the user has not set an avatar.
+    #
+    #     * user_profile.display_name: the user's display name. None if the user
+    #           has not set a display name.
+    #
     # * HTML page which notifies the user that they are authenticating to confirm
     #   an operation on their account during the user interactive authentication
     #   process: 'sso_auth_confirm.html'.
     #
     #   When rendering, this template is given the following variables:
-    #     * redirect_url: the URL the user is about to be redirected to. Needs
-    #                     manual escaping (see
-    #                     https://jinja.palletsprojects.com/en/2.11.x/templates/#html-escaping).
+    #     * redirect_url: the URL the user is about to be redirected to.
     #
     #     * description: the operation which the user is being asked to confirm
     #
+    #     * idp: details of the Identity Provider that we will use to confirm
+    #       the user's identity: an object with the following attributes:
+    #
+    #         * idp_id: unique identifier for the IdP
+    #         * idp_name: user-facing name for the IdP
+    #         * idp_icon: if specified in the IdP config, an MXC URI for an icon
+    #              for the IdP
+    #         * idp_brand: if specified in the IdP config, a textual identifier
+    #              for the brand of the IdP
+    #
     # * HTML page shown after a successful user interactive authentication session:
     #   'sso_auth_success.html'.
     #
diff --git a/docs/spam_checker.md b/docs/spam_checker.md
index 5b4f6428e6..47a27bf85c 100644
--- a/docs/spam_checker.md
+++ b/docs/spam_checker.md
@@ -61,6 +61,9 @@ class ExampleSpamChecker:
 
     async def check_registration_for_spam(self, email_threepid, username, request_info):
         return RegistrationBehaviour.ALLOW  # allow all registrations
+
+    async def check_media_file_for_spam(self, file_wrapper, file_info):
+        return False  # allow all media
 ```
 
 ## Configuration
diff --git a/docs/turn-howto.md b/docs/turn-howto.md
index e8f13ad484..41738bbe69 100644
--- a/docs/turn-howto.md
+++ b/docs/turn-howto.md
@@ -187,7 +187,7 @@ After updating the homeserver configuration, you must restart synapse:
     ```
   * If you use systemd:
     ```
-    systemctl restart synapse.service
+    systemctl restart matrix-synapse.service
     ```
 ... and then reload any clients (or wait an hour for them to refresh their
 settings).
diff --git a/docs/workers.md b/docs/workers.md
index d2927d95a6..f7fc6df119 100644
--- a/docs/workers.md
+++ b/docs/workers.md
@@ -228,7 +228,6 @@ expressions:
     ^/_matrix/client/(api/v1|r0|unstable)/joined_groups$
     ^/_matrix/client/(api/v1|r0|unstable)/publicised_groups$
     ^/_matrix/client/(api/v1|r0|unstable)/publicised_groups/
-    ^/_synapse/client/password_reset/email/submit_token$
 
     # Registration/login requests
     ^/_matrix/client/(api/v1|r0|unstable)/login$
@@ -259,25 +258,29 @@ Additionally, the following endpoints should be included if Synapse is configure
 to use SSO (you only need to include the ones for whichever SSO provider you're
 using):
 
+    # for all SSO providers
+    ^/_matrix/client/(api/v1|r0|unstable)/login/sso/redirect
+    ^/_synapse/client/pick_idp$
+    ^/_synapse/client/pick_username
+    ^/_synapse/client/new_user_consent$
+    ^/_synapse/client/sso_register$
+
     # OpenID Connect requests.
-    ^/_matrix/client/(api/v1|r0|unstable)/login/sso/redirect$
-    ^/_synapse/oidc/callback$
+    ^/_synapse/client/oidc/callback$
 
     # SAML requests.
-    ^/_matrix/client/(api/v1|r0|unstable)/login/sso/redirect$
-    ^/_matrix/saml2/authn_response$
+    ^/_synapse/client/saml2/authn_response$
 
     # CAS requests.
-    ^/_matrix/client/(api/v1|r0|unstable)/login/(cas|sso)/redirect$
     ^/_matrix/client/(api/v1|r0|unstable)/login/cas/ticket$
 
-Note that a HTTP listener with `client` and `federation` resources must be
-configured in the `worker_listeners` option in the worker config.
-
-Ensure that all SSO logins go to a single process (usually the main process).
+Ensure that all SSO logins go to a single process.
 For multiple workers not handling the SSO endpoints properly, see
 [#7530](https://github.com/matrix-org/synapse/issues/7530).
 
+Note that a HTTP listener with `client` and `federation` resources must be
+configured in the `worker_listeners` option in the worker config.
+
 #### Load balancing
 
 It is possible to run multiple instances of this worker app, with incoming requests
diff --git a/mypy.ini b/mypy.ini
index f3700d323c..68a4533973 100644
--- a/mypy.ini
+++ b/mypy.ini
@@ -23,47 +23,7 @@ files =
   synapse/events/validator.py,
   synapse/events/spamcheck.py,
   synapse/federation,
-  synapse/handlers/_base.py,
-  synapse/handlers/account_data.py,
-  synapse/handlers/account_validity.py,
-  synapse/handlers/acme.py,
-  synapse/handlers/acme_issuing_service.py,
-  synapse/handlers/admin.py,
-  synapse/handlers/appservice.py,
-  synapse/handlers/auth.py,
-  synapse/handlers/cas_handler.py,
-  synapse/handlers/deactivate_account.py,
-  synapse/handlers/device.py,
-  synapse/handlers/devicemessage.py,
-  synapse/handlers/directory.py,
-  synapse/handlers/events.py,
-  synapse/handlers/federation.py,
-  synapse/handlers/groups_local.py,
-  synapse/handlers/identity.py,
-  synapse/handlers/initial_sync.py,
-  synapse/handlers/message.py,
-  synapse/handlers/oidc_handler.py,
-  synapse/handlers/pagination.py,
-  synapse/handlers/password_policy.py,
-  synapse/handlers/presence.py,
-  synapse/handlers/profile.py,
-  synapse/handlers/read_marker.py,
-  synapse/handlers/receipts.py,
-  synapse/handlers/register.py,
-  synapse/handlers/room.py,
-  synapse/handlers/room_list.py,
-  synapse/handlers/room_member.py,
-  synapse/handlers/room_member_worker.py,
-  synapse/handlers/saml_handler.py,
-  synapse/handlers/search.py,
-  synapse/handlers/set_password.py,
-  synapse/handlers/sso.py,
-  synapse/handlers/state_deltas.py,
-  synapse/handlers/stats.py,
-  synapse/handlers/sync.py,
-  synapse/handlers/typing.py,
-  synapse/handlers/user_directory.py,
-  synapse/handlers/ui_auth,
+  synapse/handlers,
   synapse/http/client.py,
   synapse/http/federation/matrix_federation_agent.py,
   synapse/http/federation/well_known_resolver.py,
diff --git a/synapse/__init__.py b/synapse/__init__.py
index 93601dbad0..06b3820be5 100644
--- a/synapse/__init__.py
+++ b/synapse/__init__.py
@@ -48,7 +48,7 @@ try:
 except ImportError:
     pass
 
-__version__ = "1.26.0"
+__version__ = "1.27.0rc1"
 
 if bool(os.environ.get("SYNAPSE_TEST_PATCH_LOG_CONTEXTS", False)):
     # We import here so that we don't have to install a bunch of deps when
diff --git a/synapse/app/_base.py b/synapse/app/_base.py
index 395e202b89..9840a9d55b 100644
--- a/synapse/app/_base.py
+++ b/synapse/app/_base.py
@@ -16,6 +16,7 @@
 import gc
 import logging
 import os
+import platform
 import signal
 import socket
 import sys
@@ -339,7 +340,7 @@ async def start(hs: "synapse.server.HomeServer", listeners: Iterable[ListenerCon
     # rest of time. Doing so means less work each GC (hopefully).
     #
     # This only works on Python 3.7
-    if sys.version_info >= (3, 7):
+    if platform.python_implementation() == "CPython" and sys.version_info >= (3, 7):
         gc.collect()
         gc.freeze()
 
diff --git a/synapse/app/generic_worker.py b/synapse/app/generic_worker.py
index e60988fa4a..516f2464b4 100644
--- a/synapse/app/generic_worker.py
+++ b/synapse/app/generic_worker.py
@@ -22,6 +22,7 @@ from typing import Dict, Iterable, Optional, Set
 from typing_extensions import ContextManager
 
 from twisted.internet import address
+from twisted.web.resource import IResource
 
 import synapse
 import synapse.events
@@ -90,9 +91,8 @@ from synapse.replication.tcp.streams import (
     ToDeviceStream,
 )
 from synapse.rest.admin import register_servlets_for_media_repo
-from synapse.rest.client.v1 import events, room
+from synapse.rest.client.v1 import events, login, room
 from synapse.rest.client.v1.initial_sync import InitialSyncRestServlet
-from synapse.rest.client.v1.login import LoginRestServlet
 from synapse.rest.client.v1.profile import (
     ProfileAvatarURLRestServlet,
     ProfileDisplaynameRestServlet,
@@ -127,6 +127,7 @@ from synapse.rest.client.v2_alpha.sendtodevice import SendToDeviceRestServlet
 from synapse.rest.client.versions import VersionsRestServlet
 from synapse.rest.health import HealthResource
 from synapse.rest.key.v2 import KeyApiV2Resource
+from synapse.rest.synapse.client import build_synapse_client_resource_tree
 from synapse.server import HomeServer, cache_in_self
 from synapse.storage.databases.main.censor_events import CensorEventsStore
 from synapse.storage.databases.main.client_ips import ClientIpWorkerStore
@@ -507,7 +508,7 @@ class GenericWorkerServer(HomeServer):
             site_tag = port
 
         # We always include a health resource.
-        resources = {"/health": HealthResource()}
+        resources = {"/health": HealthResource()}  # type: Dict[str, IResource]
 
         for res in listener_config.http_options.resources:
             for name in res.names:
@@ -517,7 +518,7 @@ class GenericWorkerServer(HomeServer):
                     resource = JsonResource(self, canonical_json=False)
 
                     RegisterRestServlet(self).register(resource)
-                    LoginRestServlet(self).register(resource)
+                    login.register_servlets(self, resource)
                     ThreepidRestServlet(self).register(resource)
                     DevicesRestServlet(self).register(resource)
                     KeyQueryServlet(self).register(resource)
@@ -557,6 +558,8 @@ class GenericWorkerServer(HomeServer):
                     groups.register_servlets(self, resource)
 
                     resources.update({CLIENT_API_PREFIX: resource})
+
+                    resources.update(build_synapse_client_resource_tree(self))
                 elif name == "federation":
                     resources.update({FEDERATION_PREFIX: TransportLayerServer(self)})
                 elif name == "media":
diff --git a/synapse/app/homeserver.py b/synapse/app/homeserver.py
index 57a2f5237c..244657cb88 100644
--- a/synapse/app/homeserver.py
+++ b/synapse/app/homeserver.py
@@ -60,8 +60,7 @@ from synapse.rest import ClientRestResource
 from synapse.rest.admin import AdminRestResource
 from synapse.rest.health import HealthResource
 from synapse.rest.key.v2 import KeyApiV2Resource
-from synapse.rest.synapse.client.pick_idp import PickIdpResource
-from synapse.rest.synapse.client.pick_username import pick_username_resource
+from synapse.rest.synapse.client import build_synapse_client_resource_tree
 from synapse.rest.well_known import WellKnownResource
 from synapse.server import HomeServer
 from synapse.storage import DataStore
@@ -190,21 +189,10 @@ class SynapseHomeServer(HomeServer):
                     "/_matrix/client/versions": client_resource,
                     "/.well-known/matrix/client": WellKnownResource(self),
                     "/_synapse/admin": AdminRestResource(self),
-                    "/_synapse/client/pick_username": pick_username_resource(self),
-                    "/_synapse/client/pick_idp": PickIdpResource(self),
+                    **build_synapse_client_resource_tree(self),
                 }
             )
 
-            if self.get_config().oidc_enabled:
-                from synapse.rest.oidc import OIDCResource
-
-                resources["/_synapse/oidc"] = OIDCResource(self)
-
-            if self.get_config().saml2_enabled:
-                from synapse.rest.saml2 import SAML2Resource
-
-                resources["/_matrix/saml2"] = SAML2Resource(self)
-
             if self.get_config().threepid_behaviour_email == ThreepidBehaviour.LOCAL:
                 from synapse.rest.synapse.client.password_reset import (
                     PasswordResetSubmitTokenResource,
diff --git a/synapse/app/phone_stats_home.py b/synapse/app/phone_stats_home.py
index c38cf8231f..8f86cecb76 100644
--- a/synapse/app/phone_stats_home.py
+++ b/synapse/app/phone_stats_home.py
@@ -93,15 +93,20 @@ async def phone_stats_home(hs, stats, stats_process=_stats_process):
 
     stats["daily_active_users"] = await hs.get_datastore().count_daily_users()
     stats["monthly_active_users"] = await hs.get_datastore().count_monthly_users()
+    daily_active_e2ee_rooms = await hs.get_datastore().count_daily_active_e2ee_rooms()
+    stats["daily_active_e2ee_rooms"] = daily_active_e2ee_rooms
+    stats["daily_e2ee_messages"] = await hs.get_datastore().count_daily_e2ee_messages()
+    daily_sent_e2ee_messages = await hs.get_datastore().count_daily_sent_e2ee_messages()
+    stats["daily_sent_e2ee_messages"] = daily_sent_e2ee_messages
     stats["daily_active_rooms"] = await hs.get_datastore().count_daily_active_rooms()
     stats["daily_messages"] = await hs.get_datastore().count_daily_messages()
+    daily_sent_messages = await hs.get_datastore().count_daily_sent_messages()
+    stats["daily_sent_messages"] = daily_sent_messages
 
     r30_results = await hs.get_datastore().count_r30_users()
     for name, count in r30_results.items():
         stats["r30_users_" + name] = count
 
-    daily_sent_messages = await hs.get_datastore().count_daily_sent_messages()
-    stats["daily_sent_messages"] = daily_sent_messages
     stats["cache_factor"] = hs.config.caches.global_factor
     stats["event_cache_size"] = hs.config.caches.event_cache_size
 
diff --git a/synapse/config/_base.py b/synapse/config/_base.py
index 6a0768ce00..a851f8801d 100644
--- a/synapse/config/_base.py
+++ b/synapse/config/_base.py
@@ -18,18 +18,18 @@
 import argparse
 import errno
 import os
-import time
-import urllib.parse
 from collections import OrderedDict
 from hashlib import sha256
 from textwrap import dedent
-from typing import Any, Callable, Iterable, List, MutableMapping, Optional
+from typing import Any, Iterable, List, MutableMapping, Optional
 
 import attr
 import jinja2
 import pkg_resources
 import yaml
 
+from synapse.util.templates import _create_mxc_to_http_filter, _format_ts_filter
+
 
 class ConfigError(Exception):
     """Represents a problem parsing the configuration
@@ -262,6 +262,7 @@ class Config:
             # Search the custom template directory as well
             search_directories.insert(0, custom_template_directory)
 
+        # TODO: switch to synapse.util.templates.build_jinja_env
         loader = jinja2.FileSystemLoader(search_directories)
         env = jinja2.Environment(loader=loader, autoescape=jinja2.select_autoescape(),)
 
@@ -277,38 +278,6 @@ class Config:
         return [env.get_template(filename) for filename in filenames]
 
 
-def _format_ts_filter(value: int, format: str):
-    return time.strftime(format, time.localtime(value / 1000))
-
-
-def _create_mxc_to_http_filter(public_baseurl: str) -> Callable:
-    """Create and return a jinja2 filter that converts MXC urls to HTTP
-
-    Args:
-        public_baseurl: The public, accessible base URL of the homeserver
-    """
-
-    def mxc_to_http_filter(value, width, height, resize_method="crop"):
-        if value[0:6] != "mxc://":
-            return ""
-
-        server_and_media_id = value[6:]
-        fragment = None
-        if "#" in server_and_media_id:
-            server_and_media_id, fragment = server_and_media_id.split("#", 1)
-            fragment = "#" + fragment
-
-        params = {"width": width, "height": height, "method": resize_method}
-        return "%s_matrix/media/v1/thumbnail/%s?%s%s" % (
-            public_baseurl,
-            server_and_media_id,
-            urllib.parse.urlencode(params),
-            fragment or "",
-        )
-
-    return mxc_to_http_filter
-
-
 class RootConfig:
     """
     Holder of an application's configuration.
diff --git a/synapse/config/_base.pyi b/synapse/config/_base.pyi
index 7ed07a801d..70025b5d60 100644
--- a/synapse/config/_base.pyi
+++ b/synapse/config/_base.pyi
@@ -54,7 +54,7 @@ class RootConfig:
     tls: tls.TlsConfig
     database: database.DatabaseConfig
     logging: logger.LoggingConfig
-    ratelimit: ratelimiting.RatelimitConfig
+    ratelimiting: ratelimiting.RatelimitConfig
     media: repository.ContentRepositoryConfig
     captcha: captcha.CaptchaConfig
     voip: voip.VoipConfig
diff --git a/synapse/config/oidc_config.py b/synapse/config/oidc_config.py
index 0162d7f7b0..9d8196d8c3 100644
--- a/synapse/config/oidc_config.py
+++ b/synapse/config/oidc_config.py
@@ -14,7 +14,6 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-import string
 from collections import Counter
 from typing import Iterable, Optional, Tuple, Type
 
@@ -54,7 +53,7 @@ class OIDCConfig(Config):
                     "Multiple OIDC providers have the idp_id %r." % idp_id
                 )
 
-        self.oidc_callback_url = self.public_baseurl + "_synapse/oidc/callback"
+        self.oidc_callback_url = self.public_baseurl + "_synapse/client/oidc/callback"
 
     @property
     def oidc_enabled(self) -> bool:
@@ -78,10 +77,14 @@ class OIDCConfig(Config):
         #       offer the user a choice of login mechanisms.
         #
         #   idp_icon: An optional icon for this identity provider, which is presented
-        #       by identity picker pages. If given, must be an MXC URI of the format
-        #       mxc://<server-name>/<media-id>. (An easy way to obtain such an MXC URI
-        #       is to upload an image to an (unencrypted) room and then copy the "url"
-        #       from the source of the event.)
+        #       by clients and Synapse's own IdP picker page. If given, must be an
+        #       MXC URI of the format mxc://<server-name>/<media-id>. (An easy way to
+        #       obtain such an MXC URI is to upload an image to an (unencrypted) room
+        #       and then copy the "url" from the source of the event.)
+        #
+        #   idp_brand: An optional brand for this identity provider, allowing clients
+        #       to style the login flow according to the identity provider in question.
+        #       See the spec for possible options here.
         #
         #   discover: set to 'false' to disable the use of the OIDC discovery mechanism
         #       to discover endpoints. Defaults to true.
@@ -142,17 +145,21 @@ class OIDCConfig(Config):
         #
         #           For the default provider, the following settings are available:
         #
-        #             sub: name of the claim containing a unique identifier for the
-        #                 user. Defaults to 'sub', which OpenID Connect compliant
-        #                 providers should provide.
+        #             subject_claim: name of the claim containing a unique identifier
+        #                 for the user. Defaults to 'sub', which OpenID Connect
+        #                 compliant providers should provide.
         #
         #             localpart_template: Jinja2 template for the localpart of the MXID.
         #                 If this is not set, the user will be prompted to choose their
-        #                 own username.
+        #                 own username (see 'sso_auth_account_details.html' in the 'sso'
+        #                 section of this file).
         #
         #             display_name_template: Jinja2 template for the display name to set
         #                 on first login. If unset, no displayname will be set.
         #
+        #             email_template: Jinja2 template for the email address of the user.
+        #                 If unset, no email address will be added to the account.
+        #
         #             extra_attributes: a map of Jinja2 templates for extra attributes
         #                 to send back to the client during login.
         #                 Note that these are non-standard and clients will ignore them
@@ -188,6 +195,12 @@ class OIDCConfig(Config):
           #  userinfo_endpoint: "https://accounts.example.com/userinfo"
           #  jwks_uri: "https://accounts.example.com/.well-known/jwks.json"
           #  skip_verification: true
+          #  user_mapping_provider:
+          #    config:
+          #      subject_claim: "id"
+          #      localpart_template: "{{{{ user.login }}}}"
+          #      display_name_template: "{{{{ user.name }}}}"
+          #      email_template: "{{{{ user.email }}}}"
 
           # For use with Keycloak
           #
@@ -202,6 +215,7 @@ class OIDCConfig(Config):
           #
           #- idp_id: github
           #  idp_name: Github
+          #  idp_brand: org.matrix.github
           #  discover: false
           #  issuer: "https://github.com/"
           #  client_id: "your-client-id" # TO BE FILLED
@@ -213,8 +227,8 @@ class OIDCConfig(Config):
           #  user_mapping_provider:
           #    config:
           #      subject_claim: "id"
-          #      localpart_template: "{{ user.login }}"
-          #      display_name_template: "{{ user.name }}"
+          #      localpart_template: "{{{{ user.login }}}}"
+          #      display_name_template: "{{{{ user.name }}}}"
         """.format(
             mapping_provider=DEFAULT_USER_MAPPING_PROVIDER
         )
@@ -225,11 +239,22 @@ OIDC_PROVIDER_CONFIG_SCHEMA = {
     "type": "object",
     "required": ["issuer", "client_id", "client_secret"],
     "properties": {
-        # TODO: fix the maxLength here depending on what MSC2528 decides
-        #   remember that we prefix the ID given here with `oidc-`
-        "idp_id": {"type": "string", "minLength": 1, "maxLength": 128},
+        "idp_id": {
+            "type": "string",
+            "minLength": 1,
+            # MSC2858 allows a maxlen of 255, but we prefix with "oidc-"
+            "maxLength": 250,
+            "pattern": "^[A-Za-z0-9._~-]+$",
+        },
         "idp_name": {"type": "string"},
         "idp_icon": {"type": "string"},
+        "idp_brand": {
+            "type": "string",
+            # MSC2758-style namespaced identifier
+            "minLength": 1,
+            "maxLength": 255,
+            "pattern": "^[a-z][a-z0-9_.-]*$",
+        },
         "discover": {"type": "boolean"},
         "issuer": {"type": "string"},
         "client_id": {"type": "string"},
@@ -348,25 +373,8 @@ def _parse_oidc_config_dict(
             config_path + ("user_mapping_provider", "module"),
         )
 
-    # MSC2858 will apply certain limits in what can be used as an IdP id, so let's
-    # enforce those limits now.
-    # TODO: factor out this stuff to a generic function
     idp_id = oidc_config.get("idp_id", "oidc")
 
-    # TODO: update this validity check based on what MSC2858 decides.
-    valid_idp_chars = set(string.ascii_lowercase + string.digits + "-._")
-
-    if any(c not in valid_idp_chars for c in idp_id):
-        raise ConfigError(
-            'idp_id may only contain a-z, 0-9, "-", ".", "_"',
-            config_path + ("idp_id",),
-        )
-
-    if idp_id[0] not in string.ascii_lowercase:
-        raise ConfigError(
-            "idp_id must start with a-z", config_path + ("idp_id",),
-        )
-
     # prefix the given IDP with a prefix specific to the SSO mechanism, to avoid
     # clashes with other mechs (such as SAML, CAS).
     #
@@ -392,6 +400,7 @@ def _parse_oidc_config_dict(
         idp_id=idp_id,
         idp_name=oidc_config.get("idp_name", "OIDC"),
         idp_icon=idp_icon,
+        idp_brand=oidc_config.get("idp_brand"),
         discover=oidc_config.get("discover", True),
         issuer=oidc_config["issuer"],
         client_id=oidc_config["client_id"],
@@ -422,6 +431,9 @@ class OidcProviderConfig:
     # Optional MXC URI for icon for this IdP.
     idp_icon = attr.ib(type=Optional[str])
 
+    # Optional brand identifier for this IdP.
+    idp_brand = attr.ib(type=Optional[str])
+
     # whether the OIDC discovery mechanism is used to discover endpoints
     discover = attr.ib(type=bool)
 
diff --git a/synapse/config/ratelimiting.py b/synapse/config/ratelimiting.py
index 14b8836197..def33a60ad 100644
--- a/synapse/config/ratelimiting.py
+++ b/synapse/config/ratelimiting.py
@@ -24,7 +24,7 @@ class RateLimitConfig:
         defaults={"per_second": 0.17, "burst_count": 3.0},
     ):
         self.per_second = config.get("per_second", defaults["per_second"])
-        self.burst_count = config.get("burst_count", defaults["burst_count"])
+        self.burst_count = int(config.get("burst_count", defaults["burst_count"]))
 
 
 class FederationRateLimitConfig:
@@ -102,6 +102,20 @@ class RatelimitConfig(Config):
             defaults={"per_second": 0.01, "burst_count": 3},
         )
 
+        self.rc_3pid_validation = RateLimitConfig(
+            config.get("rc_3pid_validation") or {},
+            defaults={"per_second": 0.003, "burst_count": 5},
+        )
+
+        self.rc_invites_per_room = RateLimitConfig(
+            config.get("rc_invites", {}).get("per_room", {}),
+            defaults={"per_second": 0.3, "burst_count": 10},
+        )
+        self.rc_invites_per_user = RateLimitConfig(
+            config.get("rc_invites", {}).get("per_user", {}),
+            defaults={"per_second": 0.003, "burst_count": 5},
+        )
+
     def generate_config_section(self, **kwargs):
         return """\
         ## Ratelimiting ##
@@ -131,6 +145,9 @@ class RatelimitConfig(Config):
         #     users are joining rooms the server is already in (this is cheap) vs
         #     "remote" for when users are trying to join rooms not on the server (which
         #     can be more expensive)
+        #   - one for ratelimiting how often a user or IP can attempt to validate a 3PID.
+        #   - two for ratelimiting how often invites can be sent in a room or to a
+        #     specific user.
         #
         # The defaults are as shown below.
         #
@@ -164,7 +181,18 @@ class RatelimitConfig(Config):
         #  remote:
         #    per_second: 0.01
         #    burst_count: 3
-
+        #
+        #rc_3pid_validation:
+        #  per_second: 0.003
+        #  burst_count: 5
+        #
+        #rc_invites:
+        #  per_room:
+        #    per_second: 0.3
+        #    burst_count: 10
+        #  per_user:
+        #    per_second: 0.003
+        #    burst_count: 5
 
         # Ratelimiting settings for incoming federation
         #
diff --git a/synapse/config/registration.py b/synapse/config/registration.py
index ac48913a0b..afb3e0b2a1 100644
--- a/synapse/config/registration.py
+++ b/synapse/config/registration.py
@@ -378,6 +378,8 @@ class RegistrationConfig(Config):
         # By default, any room aliases included in this list will be created
         # as a publicly joinable room when the first user registers for the
         # homeserver. This behaviour can be customised with the settings below.
+        # If the room already exists, make certain it is a publicly joinable
+        # room. The join rule of the room must be set to 'public'.
         #
         #auto_join_rooms:
         #  - "#example:example.com"
diff --git a/synapse/config/repository.py b/synapse/config/repository.py
index 850ac3ebd6..fcaea8fb93 100644
--- a/synapse/config/repository.py
+++ b/synapse/config/repository.py
@@ -17,9 +17,7 @@ import os
 from collections import namedtuple
 from typing import Dict, List
 
-from netaddr import IPSet
-
-from synapse.config.server import DEFAULT_IP_RANGE_BLACKLIST
+from synapse.config.server import DEFAULT_IP_RANGE_BLACKLIST, generate_ip_set
 from synapse.python_dependencies import DependencyException, check_requirements
 from synapse.util.module_loader import load_module
 
@@ -187,16 +185,17 @@ class ContentRepositoryConfig(Config):
                     "to work"
                 )
 
-            self.url_preview_ip_range_blacklist = IPSet(
-                config["url_preview_ip_range_blacklist"]
-            )
-
             # we always blacklist '0.0.0.0' and '::', which are supposed to be
             # unroutable addresses.
-            self.url_preview_ip_range_blacklist.update(["0.0.0.0", "::"])
+            self.url_preview_ip_range_blacklist = generate_ip_set(
+                config["url_preview_ip_range_blacklist"],
+                ["0.0.0.0", "::"],
+                config_path=("url_preview_ip_range_blacklist",),
+            )
 
-            self.url_preview_ip_range_whitelist = IPSet(
-                config.get("url_preview_ip_range_whitelist", ())
+            self.url_preview_ip_range_whitelist = generate_ip_set(
+                config.get("url_preview_ip_range_whitelist", ()),
+                config_path=("url_preview_ip_range_whitelist",),
             )
 
             self.url_preview_url_blacklist = config.get("url_preview_url_blacklist", ())
diff --git a/synapse/config/saml2_config.py b/synapse/config/saml2_config.py
index f33dfa0d6a..ad865a667f 100644
--- a/synapse/config/saml2_config.py
+++ b/synapse/config/saml2_config.py
@@ -194,8 +194,8 @@ class SAML2Config(Config):
             optional_attributes.add(self.saml2_grandfathered_mxid_source_attribute)
         optional_attributes -= required_attributes
 
-        metadata_url = public_baseurl + "_matrix/saml2/metadata.xml"
-        response_url = public_baseurl + "_matrix/saml2/authn_response"
+        metadata_url = public_baseurl + "_synapse/client/saml2/metadata.xml"
+        response_url = public_baseurl + "_synapse/client/saml2/authn_response"
         return {
             "entityid": metadata_url,
             "service": {
@@ -233,10 +233,10 @@ class SAML2Config(Config):
         # enable SAML login.
         #
         # Once SAML support is enabled, a metadata file will be exposed at
-        # https://<server>:<port>/_matrix/saml2/metadata.xml, which you may be able to
+        # https://<server>:<port>/_synapse/client/saml2/metadata.xml, which you may be able to
         # use to configure your SAML IdP with. Alternatively, you can manually configure
         # the IdP to use an ACS location of
-        # https://<server>:<port>/_matrix/saml2/authn_response.
+        # https://<server>:<port>/_synapse/client/saml2/authn_response.
         #
         saml2_config:
           # `sp_config` is the configuration for the pysaml2 Service Provider.
diff --git a/synapse/config/server.py b/synapse/config/server.py
index 47a0370173..b5e82ba3d0 100644
--- a/synapse/config/server.py
+++ b/synapse/config/server.py
@@ -15,6 +15,7 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
+import itertools
 import logging
 import os.path
 import re
@@ -23,7 +24,7 @@ from typing import Any, Dict, Iterable, List, Optional, Set
 
 import attr
 import yaml
-from netaddr import IPSet
+from netaddr import AddrFormatError, IPNetwork, IPSet
 
 from synapse.api.room_versions import KNOWN_ROOM_VERSIONS
 from synapse.util.stringutils import parse_and_validate_server_name
@@ -40,6 +41,66 @@ logger = logging.Logger(__name__)
 # in the list.
 DEFAULT_BIND_ADDRESSES = ["::", "0.0.0.0"]
 
+
+def _6to4(network: IPNetwork) -> IPNetwork:
+    """Convert an IPv4 network into a 6to4 IPv6 network per RFC 3056."""
+
+    # 6to4 networks consist of:
+    # * 2002 as the first 16 bits
+    # * The first IPv4 address in the network hex-encoded as the next 32 bits
+    # * The new prefix length needs to include the bits from the 2002 prefix.
+    hex_network = hex(network.first)[2:]
+    hex_network = ("0" * (8 - len(hex_network))) + hex_network
+    return IPNetwork(
+        "2002:%s:%s::/%d" % (hex_network[:4], hex_network[4:], 16 + network.prefixlen,)
+    )
+
+
+def generate_ip_set(
+    ip_addresses: Optional[Iterable[str]],
+    extra_addresses: Optional[Iterable[str]] = None,
+    config_path: Optional[Iterable[str]] = None,
+) -> IPSet:
+    """
+    Generate an IPSet from a list of IP addresses or CIDRs.
+
+    Additionally, for each IPv4 network in the list of IP addresses, also
+    includes the corresponding IPv6 networks.
+
+    This includes:
+
+    * IPv4-Compatible IPv6 Address (see RFC 4291, section 2.5.5.1)
+    * IPv4-Mapped IPv6 Address (see RFC 4291, section 2.5.5.2)
+    * 6to4 Address (see RFC 3056, section 2)
+
+    Args:
+        ip_addresses: An iterable of IP addresses or CIDRs.
+        extra_addresses: An iterable of IP addresses or CIDRs.
+        config_path: The path in the configuration for error messages.
+
+    Returns:
+        A new IP set.
+    """
+    result = IPSet()
+    for ip in itertools.chain(ip_addresses or (), extra_addresses or ()):
+        try:
+            network = IPNetwork(ip)
+        except AddrFormatError as e:
+            raise ConfigError(
+                "Invalid IP range provided: %s." % (ip,), config_path
+            ) from e
+        result.add(network)
+
+        # It is possible that these already exist in the set, but that's OK.
+        if ":" not in str(network):
+            result.add(IPNetwork(network).ipv6(ipv4_compatible=True))
+            result.add(IPNetwork(network).ipv6(ipv4_compatible=False))
+            result.add(_6to4(network))
+
+    return result
+
+
+# IP ranges that are considered private / unroutable / don't make sense.
 DEFAULT_IP_RANGE_BLACKLIST = [
     # Localhost
     "127.0.0.0/8",
@@ -53,6 +114,8 @@ DEFAULT_IP_RANGE_BLACKLIST = [
     "192.0.0.0/24",
     # Link-local networks.
     "169.254.0.0/16",
+    # Formerly used for 6to4 relay.
+    "192.88.99.0/24",
     # Testing networks.
     "198.18.0.0/15",
     "192.0.2.0/24",
@@ -66,6 +129,12 @@ DEFAULT_IP_RANGE_BLACKLIST = [
     "fe80::/10",
     # Unique local addresses.
     "fc00::/7",
+    # Testing networks.
+    "2001:db8::/32",
+    # Multicast.
+    "ff00::/8",
+    # Site-local addresses
+    "fec0::/10",
 ]
 
 DEFAULT_ROOM_VERSION = "6"
@@ -294,17 +363,15 @@ class ServerConfig(Config):
         )
 
         # Attempt to create an IPSet from the given ranges
-        try:
-            self.ip_range_blacklist = IPSet(ip_range_blacklist)
-        except Exception as e:
-            raise ConfigError("Invalid range(s) provided in ip_range_blacklist.") from e
+
         # Always blacklist 0.0.0.0, ::
-        self.ip_range_blacklist.update(["0.0.0.0", "::"])
+        self.ip_range_blacklist = generate_ip_set(
+            ip_range_blacklist, ["0.0.0.0", "::"], config_path=("ip_range_blacklist",)
+        )
 
-        try:
-            self.ip_range_whitelist = IPSet(config.get("ip_range_whitelist", ()))
-        except Exception as e:
-            raise ConfigError("Invalid range(s) provided in ip_range_whitelist.") from e
+        self.ip_range_whitelist = generate_ip_set(
+            config.get("ip_range_whitelist", ()), config_path=("ip_range_whitelist",)
+        )
 
         # The federation_ip_range_blacklist is used for backwards-compatibility
         # and only applies to federation and identity servers. If it is not given,
@@ -312,14 +379,12 @@ class ServerConfig(Config):
         federation_ip_range_blacklist = config.get(
             "federation_ip_range_blacklist", ip_range_blacklist
         )
-        try:
-            self.federation_ip_range_blacklist = IPSet(federation_ip_range_blacklist)
-        except Exception as e:
-            raise ConfigError(
-                "Invalid range(s) provided in federation_ip_range_blacklist."
-            ) from e
         # Always blacklist 0.0.0.0, ::
-        self.federation_ip_range_blacklist.update(["0.0.0.0", "::"])
+        self.federation_ip_range_blacklist = generate_ip_set(
+            federation_ip_range_blacklist,
+            ["0.0.0.0", "::"],
+            config_path=("federation_ip_range_blacklist",),
+        )
 
         self.start_pushers = config.get("start_pushers", True)
 
diff --git a/synapse/config/sso.py b/synapse/config/sso.py
index 59be825532..6c60c6fea4 100644
--- a/synapse/config/sso.py
+++ b/synapse/config/sso.py
@@ -27,7 +27,7 @@ class SSOConfig(Config):
         sso_config = config.get("sso") or {}  # type: Dict[str, Any]
 
         # The sso-specific template_dir
-        template_dir = sso_config.get("template_dir")
+        self.sso_template_dir = sso_config.get("template_dir")
 
         # Read templates from disk
         (
@@ -48,7 +48,7 @@ class SSOConfig(Config):
                 "sso_auth_success.html",
                 "sso_auth_bad_user.html",
             ],
-            template_dir,
+            self.sso_template_dir,
         )
 
         # These templates have no placeholders, so render them here
@@ -106,15 +106,19 @@ class SSOConfig(Config):
             #
             #   When rendering, this template is given the following variables:
             #     * redirect_url: the URL that the user will be redirected to after
-            #       login. Needs manual escaping (see
-            #       https://jinja.palletsprojects.com/en/2.11.x/templates/#html-escaping).
+            #       login.
             #
             #     * server_name: the homeserver's name.
             #
             #     * providers: a list of available Identity Providers. Each element is
             #       an object with the following attributes:
+            #
             #         * idp_id: unique identifier for the IdP
             #         * idp_name: user-facing name for the IdP
+            #         * idp_icon: if specified in the IdP config, an MXC URI for an icon
+            #              for the IdP
+            #         * idp_brand: if specified in the IdP config, a textual identifier
+            #              for the brand of the IdP
             #
             #   The rendered HTML page should contain a form which submits its results
             #   back as a GET request, with the following query parameters:
@@ -124,33 +128,101 @@ class SSOConfig(Config):
             #
             #     * idp: the 'idp_id' of the chosen IDP.
             #
+            # * HTML page to prompt new users to enter a userid and confirm other
+            #   details: 'sso_auth_account_details.html'. This is only shown if the
+            #   SSO implementation (with any user_mapping_provider) does not return
+            #   a localpart.
+            #
+            #   When rendering, this template is given the following variables:
+            #
+            #     * server_name: the homeserver's name.
+            #
+            #     * idp: details of the SSO Identity Provider that the user logged in
+            #       with: an object with the following attributes:
+            #
+            #         * idp_id: unique identifier for the IdP
+            #         * idp_name: user-facing name for the IdP
+            #         * idp_icon: if specified in the IdP config, an MXC URI for an icon
+            #              for the IdP
+            #         * idp_brand: if specified in the IdP config, a textual identifier
+            #              for the brand of the IdP
+            #
+            #     * user_attributes: an object containing details about the user that
+            #       we received from the IdP. May have the following attributes:
+            #
+            #         * display_name: the user's display_name
+            #         * emails: a list of email addresses
+            #
+            #   The template should render a form which submits the following fields:
+            #
+            #     * username: the localpart of the user's chosen user id
+            #
+            # * HTML page allowing the user to consent to the server's terms and
+            #   conditions. This is only shown for new users, and only if
+            #   `user_consent.require_at_registration` is set.
+            #
+            #   When rendering, this template is given the following variables:
+            #
+            #     * server_name: the homeserver's name.
+            #
+            #     * user_id: the user's matrix proposed ID.
+            #
+            #     * user_profile.display_name: the user's proposed display name, if any.
+            #
+            #     * consent_version: the version of the terms that the user will be
+            #       shown
+            #
+            #     * terms_url: a link to the page showing the terms.
+            #
+            #   The template should render a form which submits the following fields:
+            #
+            #     * accepted_version: the version of the terms accepted by the user
+            #       (ie, 'consent_version' from the input variables).
+            #
             # * HTML page for a confirmation step before redirecting back to the client
             #   with the login token: 'sso_redirect_confirm.html'.
             #
-            #   When rendering, this template is given three variables:
-            #     * redirect_url: the URL the user is about to be redirected to. Needs
-            #                     manual escaping (see
-            #                     https://jinja.palletsprojects.com/en/2.11.x/templates/#html-escaping).
+            #   When rendering, this template is given the following variables:
+            #
+            #     * redirect_url: the URL the user is about to be redirected to.
             #
             #     * display_url: the same as `redirect_url`, but with the query
             #                    parameters stripped. The intention is to have a
             #                    human-readable URL to show to users, not to use it as
-            #                    the final address to redirect to. Needs manual escaping
-            #                    (see https://jinja.palletsprojects.com/en/2.11.x/templates/#html-escaping).
+            #                    the final address to redirect to.
             #
             #     * server_name: the homeserver's name.
             #
+            #     * new_user: a boolean indicating whether this is the user's first time
+            #          logging in.
+            #
+            #     * user_id: the user's matrix ID.
+            #
+            #     * user_profile.avatar_url: an MXC URI for the user's avatar, if any.
+            #           None if the user has not set an avatar.
+            #
+            #     * user_profile.display_name: the user's display name. None if the user
+            #           has not set a display name.
+            #
             # * HTML page which notifies the user that they are authenticating to confirm
             #   an operation on their account during the user interactive authentication
             #   process: 'sso_auth_confirm.html'.
             #
             #   When rendering, this template is given the following variables:
-            #     * redirect_url: the URL the user is about to be redirected to. Needs
-            #                     manual escaping (see
-            #                     https://jinja.palletsprojects.com/en/2.11.x/templates/#html-escaping).
+            #     * redirect_url: the URL the user is about to be redirected to.
             #
             #     * description: the operation which the user is being asked to confirm
             #
+            #     * idp: details of the Identity Provider that we will use to confirm
+            #       the user's identity: an object with the following attributes:
+            #
+            #         * idp_id: unique identifier for the IdP
+            #         * idp_name: user-facing name for the IdP
+            #         * idp_icon: if specified in the IdP config, an MXC URI for an icon
+            #              for the IdP
+            #         * idp_brand: if specified in the IdP config, a textual identifier
+            #              for the brand of the IdP
+            #
             # * HTML page shown after a successful user interactive authentication session:
             #   'sso_auth_success.html'.
             #
diff --git a/synapse/crypto/context_factory.py b/synapse/crypto/context_factory.py
index 74b67b230a..14b21796d9 100644
--- a/synapse/crypto/context_factory.py
+++ b/synapse/crypto/context_factory.py
@@ -125,19 +125,24 @@ class FederationPolicyForHTTPS:
         self._no_verify_ssl_context = _no_verify_ssl.getContext()
         self._no_verify_ssl_context.set_info_callback(_context_info_cb)
 
-    def get_options(self, host: bytes):
+        self._should_verify = self._config.federation_verify_certificates
+
+        self._federation_certificate_verification_whitelist = (
+            self._config.federation_certificate_verification_whitelist
+        )
 
+    def get_options(self, host: bytes):
         # IPolicyForHTTPS.get_options takes bytes, but we want to compare
         # against the str whitelist. The hostnames in the whitelist are already
         # IDNA-encoded like the hosts will be here.
         ascii_host = host.decode("ascii")
 
         # Check if certificate verification has been enabled
-        should_verify = self._config.federation_verify_certificates
+        should_verify = self._should_verify
 
         # Check if we've disabled certificate verification for this host
-        if should_verify:
-            for regex in self._config.federation_certificate_verification_whitelist:
+        if self._should_verify:
+            for regex in self._federation_certificate_verification_whitelist:
                 if regex.match(ascii_host):
                     should_verify = False
                     break
diff --git a/synapse/events/spamcheck.py b/synapse/events/spamcheck.py
index e7e3a7b9a4..8cfc0bb3cb 100644
--- a/synapse/events/spamcheck.py
+++ b/synapse/events/spamcheck.py
@@ -17,6 +17,8 @@
 import inspect
 from typing import TYPE_CHECKING, Any, Dict, List, Optional, Tuple, Union
 
+from synapse.rest.media.v1._base import FileInfo
+from synapse.rest.media.v1.media_storage import ReadableFileWrapper
 from synapse.spam_checker_api import RegistrationBehaviour
 from synapse.types import Collection
 from synapse.util.async_helpers import maybe_awaitable
@@ -214,3 +216,48 @@ class SpamChecker:
                     return behaviour
 
         return RegistrationBehaviour.ALLOW
+
+    async def check_media_file_for_spam(
+        self, file_wrapper: ReadableFileWrapper, file_info: FileInfo
+    ) -> bool:
+        """Checks if a piece of newly uploaded media should be blocked.
+
+        This will be called for local uploads, downloads of remote media, each
+        thumbnail generated for those, and web pages/images used for URL
+        previews.
+
+        Note that care should be taken to not do blocking IO operations in the
+        main thread. For example, to get the contents of a file a module
+        should do::
+
+            async def check_media_file_for_spam(
+                self, file: ReadableFileWrapper, file_info: FileInfo
+            ) -> bool:
+                buffer = BytesIO()
+                await file.write_chunks_to(buffer.write)
+
+                if buffer.getvalue() == b"Hello World":
+                    return True
+
+                return False
+
+
+        Args:
+            file: An object that allows reading the contents of the media.
+            file_info: Metadata about the file.
+
+        Returns:
+            True if the media should be blocked or False if it should be
+            allowed.
+        """
+
+        for spam_checker in self.spam_checkers:
+            # For backwards compatibility, only run if the method exists on the
+            # spam checker
+            checker = getattr(spam_checker, "check_media_file_for_spam", None)
+            if checker:
+                spam = await maybe_awaitable(checker(file_wrapper, file_info))
+                if spam:
+                    return True
+
+        return False
diff --git a/synapse/federation/federation_client.py b/synapse/federation/federation_client.py
index d330ae5dbc..40e1451201 100644
--- a/synapse/federation/federation_client.py
+++ b/synapse/federation/federation_client.py
@@ -810,7 +810,7 @@ class FederationClient(FederationBase):
                         "User's homeserver does not support this room version",
                         Codes.UNSUPPORTED_ROOM_VERSION,
                     )
-            elif e.code == 403:
+            elif e.code in (403, 429):
                 raise e.to_synapse_error()
             else:
                 raise
diff --git a/synapse/handlers/auth.py b/synapse/handlers/auth.py
index 0e98db22b3..648fe91f53 100644
--- a/synapse/handlers/auth.py
+++ b/synapse/handlers/auth.py
@@ -61,6 +61,7 @@ from synapse.http.site import SynapseRequest
 from synapse.logging.context import defer_to_thread
 from synapse.metrics.background_process_metrics import run_as_background_process
 from synapse.module_api import ModuleApi
+from synapse.storage.roommember import ProfileInfo
 from synapse.types import JsonDict, Requester, UserID
 from synapse.util import stringutils as stringutils
 from synapse.util.async_helpers import maybe_awaitable
@@ -567,16 +568,6 @@ class AuthHandler(BaseHandler):
                         session.session_id, login_type, result
                     )
             except LoginError as e:
-                if login_type == LoginType.EMAIL_IDENTITY:
-                    # riot used to have a bug where it would request a new
-                    # validation token (thus sending a new email) each time it
-                    # got a 401 with a 'flows' field.
-                    # (https://github.com/vector-im/vector-web/issues/2447).
-                    #
-                    # Grandfather in the old behaviour for now to avoid
-                    # breaking old riot deployments.
-                    raise
-
                 # this step failed. Merge the error dict into the response
                 # so that the client can have another go.
                 errordict = e.error_dict()
@@ -1387,7 +1378,9 @@ class AuthHandler(BaseHandler):
         )
 
         return self._sso_auth_confirm_template.render(
-            description=session.description, redirect_url=redirect_url,
+            description=session.description,
+            redirect_url=redirect_url,
+            idp=sso_auth_provider,
         )
 
     async def complete_sso_login(
@@ -1396,6 +1389,7 @@ class AuthHandler(BaseHandler):
         request: Request,
         client_redirect_url: str,
         extra_attributes: Optional[JsonDict] = None,
+        new_user: bool = False,
     ):
         """Having figured out a mxid for this user, complete the HTTP request
 
@@ -1406,6 +1400,8 @@ class AuthHandler(BaseHandler):
                 process.
             extra_attributes: Extra attributes which will be passed to the client
                 during successful login. Must be JSON serializable.
+            new_user: True if we should use wording appropriate to a user who has just
+                registered.
         """
         # If the account has been deactivated, do not proceed with the login
         # flow.
@@ -1414,8 +1410,17 @@ class AuthHandler(BaseHandler):
             respond_with_html(request, 403, self._sso_account_deactivated_template)
             return
 
+        profile = await self.store.get_profileinfo(
+            UserID.from_string(registered_user_id).localpart
+        )
+
         self._complete_sso_login(
-            registered_user_id, request, client_redirect_url, extra_attributes
+            registered_user_id,
+            request,
+            client_redirect_url,
+            extra_attributes,
+            new_user=new_user,
+            user_profile_data=profile,
         )
 
     def _complete_sso_login(
@@ -1424,12 +1429,18 @@ class AuthHandler(BaseHandler):
         request: Request,
         client_redirect_url: str,
         extra_attributes: Optional[JsonDict] = None,
+        new_user: bool = False,
+        user_profile_data: Optional[ProfileInfo] = None,
     ):
         """
         The synchronous portion of complete_sso_login.
 
         This exists purely for backwards compatibility of synapse.module_api.ModuleApi.
         """
+
+        if user_profile_data is None:
+            user_profile_data = ProfileInfo(None, None)
+
         # Store any extra attributes which will be passed in the login response.
         # Note that this is per-user so it may overwrite a previous value, this
         # is considered OK since the newest SSO attributes should be most valid.
@@ -1461,12 +1472,27 @@ class AuthHandler(BaseHandler):
         # Remove the query parameters from the redirect URL to get a shorter version of
         # it. This is only to display a human-readable URL in the template, but not the
         # URL we redirect users to.
-        redirect_url_no_params = client_redirect_url.split("?")[0]
+        url_parts = urllib.parse.urlsplit(client_redirect_url)
+
+        if url_parts.scheme == "https":
+            # for an https uri, just show the netloc (ie, the hostname. Specifically,
+            # the bit between "//" and "/"; this includes any potential
+            # "username:password@" prefix.)
+            display_url = url_parts.netloc
+        else:
+            # for other uris, strip the query-params (including the login token) and
+            # fragment.
+            display_url = urllib.parse.urlunsplit(
+                (url_parts.scheme, url_parts.netloc, url_parts.path, "", "")
+            )
 
         html = self._sso_redirect_confirm_template.render(
-            display_url=redirect_url_no_params,
+            display_url=display_url,
             redirect_url=redirect_url,
             server_name=self._server_name,
+            new_user=new_user,
+            user_id=registered_user_id,
+            user_profile=user_profile_data,
         )
         respond_with_html(request, 200, html)
 
diff --git a/synapse/handlers/cas_handler.py b/synapse/handlers/cas_handler.py
index 21b6bc4992..bd35d1fb87 100644
--- a/synapse/handlers/cas_handler.py
+++ b/synapse/handlers/cas_handler.py
@@ -80,9 +80,10 @@ class CasHandler:
         # user-facing name of this auth provider
         self.idp_name = "CAS"
 
-        # we do not currently support icons for CAS auth, but this is required by
+        # we do not currently support brands/icons for CAS auth, but this is required by
         # the SsoIdentityProvider protocol type.
         self.idp_icon = None
+        self.idp_brand = None
 
         self._sso_handler = hs.get_sso_handler()
 
diff --git a/synapse/handlers/device.py b/synapse/handlers/device.py
index debb1b4f29..0863154f7a 100644
--- a/synapse/handlers/device.py
+++ b/synapse/handlers/device.py
@@ -15,7 +15,7 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 import logging
-from typing import TYPE_CHECKING, Any, Dict, Iterable, List, Optional, Set, Tuple
+from typing import TYPE_CHECKING, Dict, Iterable, List, Optional, Set, Tuple
 
 from synapse.api import errors
 from synapse.api.constants import EventTypes
@@ -62,7 +62,7 @@ class DeviceWorkerHandler(BaseHandler):
         self._auth_handler = hs.get_auth_handler()
 
     @trace
-    async def get_devices_by_user(self, user_id: str) -> List[Dict[str, Any]]:
+    async def get_devices_by_user(self, user_id: str) -> List[JsonDict]:
         """
         Retrieve the given user's devices
 
@@ -85,7 +85,7 @@ class DeviceWorkerHandler(BaseHandler):
         return devices
 
     @trace
-    async def get_device(self, user_id: str, device_id: str) -> Dict[str, Any]:
+    async def get_device(self, user_id: str, device_id: str) -> JsonDict:
         """ Retrieve the given device
 
         Args:
@@ -598,7 +598,7 @@ class DeviceHandler(DeviceWorkerHandler):
 
 
 def _update_device_from_client_ips(
-    device: Dict[str, Any], client_ips: Dict[Tuple[str, str], Dict[str, Any]]
+    device: JsonDict, client_ips: Dict[Tuple[str, str], JsonDict]
 ) -> None:
     ip = client_ips.get((device["user_id"], device["device_id"]), {})
     device.update({"last_seen_ts": ip.get("last_seen"), "last_seen_ip": ip.get("ip")})
@@ -946,8 +946,8 @@ class DeviceListUpdater:
     async def process_cross_signing_key_update(
         self,
         user_id: str,
-        master_key: Optional[Dict[str, Any]],
-        self_signing_key: Optional[Dict[str, Any]],
+        master_key: Optional[JsonDict],
+        self_signing_key: Optional[JsonDict],
     ) -> List[str]:
         """Process the given new master and self-signing key for the given remote user.
 
diff --git a/synapse/handlers/e2e_keys.py b/synapse/handlers/e2e_keys.py
index 929752150d..8f3a6b35a4 100644
--- a/synapse/handlers/e2e_keys.py
+++ b/synapse/handlers/e2e_keys.py
@@ -16,7 +16,7 @@
 # limitations under the License.
 
 import logging
-from typing import Dict, List, Optional, Tuple
+from typing import TYPE_CHECKING, Dict, Iterable, List, Optional, Tuple
 
 import attr
 from canonicaljson import encode_canonical_json
@@ -31,6 +31,7 @@ from synapse.logging.context import make_deferred_yieldable, run_in_background
 from synapse.logging.opentracing import log_kv, set_tag, tag_args, trace
 from synapse.replication.http.devices import ReplicationUserDevicesResyncRestServlet
 from synapse.types import (
+    JsonDict,
     UserID,
     get_domain_from_id,
     get_verify_key_from_cross_signing_key,
@@ -40,11 +41,14 @@ from synapse.util.async_helpers import Linearizer
 from synapse.util.caches.expiringcache import ExpiringCache
 from synapse.util.retryutils import NotRetryingDestination
 
+if TYPE_CHECKING:
+    from synapse.app.homeserver import HomeServer
+
 logger = logging.getLogger(__name__)
 
 
 class E2eKeysHandler:
-    def __init__(self, hs):
+    def __init__(self, hs: "HomeServer"):
         self.store = hs.get_datastore()
         self.federation = hs.get_federation_client()
         self.device_handler = hs.get_device_handler()
@@ -78,7 +82,9 @@ class E2eKeysHandler:
         )
 
     @trace
-    async def query_devices(self, query_body, timeout, from_user_id):
+    async def query_devices(
+        self, query_body: JsonDict, timeout: int, from_user_id: str
+    ) -> JsonDict:
         """ Handle a device key query from a client
 
         {
@@ -98,12 +104,14 @@ class E2eKeysHandler:
         }
 
         Args:
-            from_user_id (str): the user making the query.  This is used when
+            from_user_id: the user making the query.  This is used when
                 adding cross-signing signatures to limit what signatures users
                 can see.
         """
 
-        device_keys_query = query_body.get("device_keys", {})
+        device_keys_query = query_body.get(
+            "device_keys", {}
+        )  # type: Dict[str, Iterable[str]]
 
         # separate users by domain.
         # make a map from domain to user_id to device_ids
@@ -121,7 +129,8 @@ class E2eKeysHandler:
         set_tag("remote_key_query", remote_queries)
 
         # First get local devices.
-        failures = {}
+        # A map of destination -> failure response.
+        failures = {}  # type: Dict[str, JsonDict]
         results = {}
         if local_query:
             local_result = await self.query_local_devices(local_query)
@@ -135,9 +144,10 @@ class E2eKeysHandler:
         )
 
         # Now attempt to get any remote devices from our local cache.
-        remote_queries_not_in_cache = {}
+        # A map of destination -> user ID -> device IDs.
+        remote_queries_not_in_cache = {}  # type: Dict[str, Dict[str, Iterable[str]]]
         if remote_queries:
-            query_list = []
+            query_list = []  # type: List[Tuple[str, Optional[str]]]
             for user_id, device_ids in remote_queries.items():
                 if device_ids:
                     query_list.extend((user_id, device_id) for device_id in device_ids)
@@ -284,15 +294,15 @@ class E2eKeysHandler:
         return ret
 
     async def get_cross_signing_keys_from_cache(
-        self, query, from_user_id
+        self, query: Iterable[str], from_user_id: Optional[str]
     ) -> Dict[str, Dict[str, dict]]:
         """Get cross-signing keys for users from the database
 
         Args:
-            query (Iterable[string]) an iterable of user IDs.  A dict whose keys
+            query: an iterable of user IDs.  A dict whose keys
                 are user IDs satisfies this, so the query format used for
                 query_devices can be used here.
-            from_user_id (str): the user making the query.  This is used when
+            from_user_id: the user making the query.  This is used when
                 adding cross-signing signatures to limit what signatures users
                 can see.
 
@@ -315,14 +325,12 @@ class E2eKeysHandler:
             if "self_signing" in user_info:
                 self_signing_keys[user_id] = user_info["self_signing"]
 
-        if (
-            from_user_id in keys
-            and keys[from_user_id] is not None
-            and "user_signing" in keys[from_user_id]
-        ):
-            # users can see other users' master and self-signing keys, but can
-            # only see their own user-signing keys
-            user_signing_keys[from_user_id] = keys[from_user_id]["user_signing"]
+        # users can see other users' master and self-signing keys, but can
+        # only see their own user-signing keys
+        if from_user_id:
+            from_user_key = keys.get(from_user_id)
+            if from_user_key and "user_signing" in from_user_key:
+                user_signing_keys[from_user_id] = from_user_key["user_signing"]
 
         return {
             "master_keys": master_keys,
@@ -344,9 +352,9 @@ class E2eKeysHandler:
             A map from user_id -> device_id -> device details
         """
         set_tag("local_query", query)
-        local_query = []
+        local_query = []  # type: List[Tuple[str, Optional[str]]]
 
-        result_dict = {}
+        result_dict = {}  # type: Dict[str, Dict[str, dict]]
         for user_id, device_ids in query.items():
             # we use UserID.from_string to catch invalid user ids
             if not self.is_mine(UserID.from_string(user_id)):
@@ -380,10 +388,14 @@ class E2eKeysHandler:
         log_kv(results)
         return result_dict
 
-    async def on_federation_query_client_keys(self, query_body):
+    async def on_federation_query_client_keys(
+        self, query_body: Dict[str, Dict[str, Optional[List[str]]]]
+    ) -> JsonDict:
         """ Handle a device key query from a federated server
         """
-        device_keys_query = query_body.get("device_keys", {})
+        device_keys_query = query_body.get(
+            "device_keys", {}
+        )  # type: Dict[str, Optional[List[str]]]
         res = await self.query_local_devices(device_keys_query)
         ret = {"device_keys": res}
 
@@ -397,31 +409,34 @@ class E2eKeysHandler:
         return ret
 
     @trace
-    async def claim_one_time_keys(self, query, timeout):
-        local_query = []
-        remote_queries = {}
+    async def claim_one_time_keys(
+        self, query: Dict[str, Dict[str, Dict[str, str]]], timeout: int
+    ) -> JsonDict:
+        local_query = []  # type: List[Tuple[str, str, str]]
+        remote_queries = {}  # type: Dict[str, Dict[str, Dict[str, str]]]
 
-        for user_id, device_keys in query.get("one_time_keys", {}).items():
+        for user_id, one_time_keys in query.get("one_time_keys", {}).items():
             # we use UserID.from_string to catch invalid user ids
             if self.is_mine(UserID.from_string(user_id)):
-                for device_id, algorithm in device_keys.items():
+                for device_id, algorithm in one_time_keys.items():
                     local_query.append((user_id, device_id, algorithm))
             else:
                 domain = get_domain_from_id(user_id)
-                remote_queries.setdefault(domain, {})[user_id] = device_keys
+                remote_queries.setdefault(domain, {})[user_id] = one_time_keys
 
         set_tag("local_key_query", local_query)
         set_tag("remote_key_query", remote_queries)
 
         results = await self.store.claim_e2e_one_time_keys(local_query)
 
-        json_result = {}
-        failures = {}
+        # A map of user ID -> device ID -> key ID -> key.
+        json_result = {}  # type: Dict[str, Dict[str, Dict[str, JsonDict]]]
+        failures = {}  # type: Dict[str, JsonDict]
         for user_id, device_keys in results.items():
             for device_id, keys in device_keys.items():
-                for key_id, json_bytes in keys.items():
+                for key_id, json_str in keys.items():
                     json_result.setdefault(user_id, {})[device_id] = {
-                        key_id: json_decoder.decode(json_bytes)
+                        key_id: json_decoder.decode(json_str)
                     }
 
         @trace
@@ -468,7 +483,9 @@ class E2eKeysHandler:
         return {"one_time_keys": json_result, "failures": failures}
 
     @tag_args
-    async def upload_keys_for_user(self, user_id, device_id, keys):
+    async def upload_keys_for_user(
+        self, user_id: str, device_id: str, keys: JsonDict
+    ) -> JsonDict:
 
         time_now = self.clock.time_msec()
 
@@ -543,8 +560,8 @@ class E2eKeysHandler:
         return {"one_time_key_counts": result}
 
     async def _upload_one_time_keys_for_user(
-        self, user_id, device_id, time_now, one_time_keys
-    ):
+        self, user_id: str, device_id: str, time_now: int, one_time_keys: JsonDict
+    ) -> None:
         logger.info(
             "Adding one_time_keys %r for device %r for user %r at %d",
             one_time_keys.keys(),
@@ -585,12 +602,14 @@ class E2eKeysHandler:
         log_kv({"message": "Inserting new one_time_keys.", "keys": new_keys})
         await self.store.add_e2e_one_time_keys(user_id, device_id, time_now, new_keys)
 
-    async def upload_signing_keys_for_user(self, user_id, keys):
+    async def upload_signing_keys_for_user(
+        self, user_id: str, keys: JsonDict
+    ) -> JsonDict:
         """Upload signing keys for cross-signing
 
         Args:
-            user_id (string): the user uploading the keys
-            keys (dict[string, dict]): the signing keys
+            user_id: the user uploading the keys
+            keys: the signing keys
         """
 
         # if a master key is uploaded, then check it.  Otherwise, load the
@@ -667,16 +686,17 @@ class E2eKeysHandler:
 
         return {}
 
-    async def upload_signatures_for_device_keys(self, user_id, signatures):
+    async def upload_signatures_for_device_keys(
+        self, user_id: str, signatures: JsonDict
+    ) -> JsonDict:
         """Upload device signatures for cross-signing
 
         Args:
-            user_id (string): the user uploading the signatures
-            signatures (dict[string, dict[string, dict]]): map of users to
-                devices to signed keys. This is the submission from the user; an
-                exception will be raised if it is malformed.
+            user_id: the user uploading the signatures
+            signatures: map of users to devices to signed keys. This is the submission
+            from the user; an exception will be raised if it is malformed.
         Returns:
-            dict: response to be sent back to the client.  The response will have
+            The response to be sent back to the client.  The response will have
                 a "failures" key, which will be a dict mapping users to devices
                 to errors for the signatures that failed.
         Raises:
@@ -719,7 +739,9 @@ class E2eKeysHandler:
 
         return {"failures": failures}
 
-    async def _process_self_signatures(self, user_id, signatures):
+    async def _process_self_signatures(
+        self, user_id: str, signatures: JsonDict
+    ) -> Tuple[List["SignatureListItem"], Dict[str, Dict[str, dict]]]:
         """Process uploaded signatures of the user's own keys.
 
         Signatures of the user's own keys from this API come in two forms:
@@ -731,15 +753,14 @@ class E2eKeysHandler:
             signatures (dict[string, dict]): map of devices to signed keys
 
         Returns:
-            (list[SignatureListItem], dict[string, dict[string, dict]]):
-            a list of signatures to store, and a map of users to devices to failure
-            reasons
+            A tuple of a list of signatures to store, and a map of users to
+            devices to failure reasons
 
         Raises:
             SynapseError: if the input is malformed
         """
-        signature_list = []
-        failures = {}
+        signature_list = []  # type: List[SignatureListItem]
+        failures = {}  # type: Dict[str, Dict[str, JsonDict]]
         if not signatures:
             return signature_list, failures
 
@@ -834,19 +855,24 @@ class E2eKeysHandler:
         return signature_list, failures
 
     def _check_master_key_signature(
-        self, user_id, master_key_id, signed_master_key, stored_master_key, devices
-    ):
+        self,
+        user_id: str,
+        master_key_id: str,
+        signed_master_key: JsonDict,
+        stored_master_key: JsonDict,
+        devices: Dict[str, Dict[str, JsonDict]],
+    ) -> List["SignatureListItem"]:
         """Check signatures of a user's master key made by their devices.
 
         Args:
-            user_id (string): the user whose master key is being checked
-            master_key_id (string): the ID of the user's master key
-            signed_master_key (dict): the user's signed master key that was uploaded
-            stored_master_key (dict): our previously-stored copy of the user's master key
-            devices (iterable(dict)): the user's devices
+            user_id: the user whose master key is being checked
+            master_key_id: the ID of the user's master key
+            signed_master_key: the user's signed master key that was uploaded
+            stored_master_key: our previously-stored copy of the user's master key
+            devices: the user's devices
 
         Returns:
-            list[SignatureListItem]: a list of signatures to store
+            A list of signatures to store
 
         Raises:
             SynapseError: if a signature is invalid
@@ -877,25 +903,26 @@ class E2eKeysHandler:
 
         return master_key_signature_list
 
-    async def _process_other_signatures(self, user_id, signatures):
+    async def _process_other_signatures(
+        self, user_id: str, signatures: Dict[str, dict]
+    ) -> Tuple[List["SignatureListItem"], Dict[str, Dict[str, dict]]]:
         """Process uploaded signatures of other users' keys.  These will be the
         target user's master keys, signed by the uploading user's user-signing
         key.
 
         Args:
-            user_id (string): the user uploading the keys
-            signatures (dict[string, dict]): map of users to devices to signed keys
+            user_id: the user uploading the keys
+            signatures: map of users to devices to signed keys
 
         Returns:
-            (list[SignatureListItem], dict[string, dict[string, dict]]):
-            a list of signatures to store, and a map of users to devices to failure
+            A list of signatures to store, and a map of users to devices to failure
             reasons
 
         Raises:
             SynapseError: if the input is malformed
         """
-        signature_list = []
-        failures = {}
+        signature_list = []  # type: List[SignatureListItem]
+        failures = {}  # type: Dict[str, Dict[str, JsonDict]]
         if not signatures:
             return signature_list, failures
 
@@ -983,7 +1010,7 @@ class E2eKeysHandler:
 
     async def _get_e2e_cross_signing_verify_key(
         self, user_id: str, key_type: str, from_user_id: str = None
-    ):
+    ) -> Tuple[JsonDict, str, VerifyKey]:
         """Fetch locally or remotely query for a cross-signing public key.
 
         First, attempt to fetch the cross-signing public key from storage.
@@ -997,8 +1024,7 @@ class E2eKeysHandler:
                 This affects what signatures are fetched.
 
         Returns:
-            dict, str, VerifyKey: the raw key data, the key ID, and the
-                signedjson verify key
+            The raw key data, the key ID, and the signedjson verify key
 
         Raises:
             NotFoundError: if the key is not found
@@ -1135,16 +1161,18 @@ class E2eKeysHandler:
         return desired_key, desired_key_id, desired_verify_key
 
 
-def _check_cross_signing_key(key, user_id, key_type, signing_key=None):
+def _check_cross_signing_key(
+    key: JsonDict, user_id: str, key_type: str, signing_key: Optional[VerifyKey] = None
+) -> None:
     """Check a cross-signing key uploaded by a user.  Performs some basic sanity
     checking, and ensures that it is signed, if a signature is required.
 
     Args:
-        key (dict): the key data to verify
-        user_id (str): the user whose key is being checked
-        key_type (str): the type of key that the key should be
-        signing_key (VerifyKey): (optional) the signing key that the key should
-            be signed with.  If omitted, signatures will not be checked.
+        key: the key data to verify
+        user_id: the user whose key is being checked
+        key_type: the type of key that the key should be
+        signing_key: the signing key that the key should be signed with.  If
+            omitted, signatures will not be checked.
     """
     if (
         key.get("user_id") != user_id
@@ -1162,16 +1190,21 @@ def _check_cross_signing_key(key, user_id, key_type, signing_key=None):
             )
 
 
-def _check_device_signature(user_id, verify_key, signed_device, stored_device):
+def _check_device_signature(
+    user_id: str,
+    verify_key: VerifyKey,
+    signed_device: JsonDict,
+    stored_device: JsonDict,
+) -> None:
     """Check that a signature on a device or cross-signing key is correct and
     matches the copy of the device/key that we have stored.  Throws an
     exception if an error is detected.
 
     Args:
-        user_id (str): the user ID whose signature is being checked
-        verify_key (VerifyKey): the key to verify the device with
-        signed_device (dict): the uploaded signed device data
-        stored_device (dict): our previously stored copy of the device
+        user_id: the user ID whose signature is being checked
+        verify_key: the key to verify the device with
+        signed_device: the uploaded signed device data
+        stored_device: our previously stored copy of the device
 
     Raises:
         SynapseError: if the signature was invalid or the sent device is not the
@@ -1201,7 +1234,7 @@ def _check_device_signature(user_id, verify_key, signed_device, stored_device):
         raise SynapseError(400, "Invalid signature", Codes.INVALID_SIGNATURE)
 
 
-def _exception_to_failure(e):
+def _exception_to_failure(e: Exception) -> JsonDict:
     if isinstance(e, SynapseError):
         return {"status": e.code, "errcode": e.errcode, "message": str(e)}
 
@@ -1218,7 +1251,7 @@ def _exception_to_failure(e):
     return {"status": 503, "message": str(e)}
 
 
-def _one_time_keys_match(old_key_json, new_key):
+def _one_time_keys_match(old_key_json: str, new_key: JsonDict) -> bool:
     old_key = json_decoder.decode(old_key_json)
 
     # if either is a string rather than an object, they must match exactly
@@ -1239,16 +1272,16 @@ class SignatureListItem:
     """An item in the signature list as used by upload_signatures_for_device_keys.
     """
 
-    signing_key_id = attr.ib()
-    target_user_id = attr.ib()
-    target_device_id = attr.ib()
-    signature = attr.ib()
+    signing_key_id = attr.ib(type=str)
+    target_user_id = attr.ib(type=str)
+    target_device_id = attr.ib(type=str)
+    signature = attr.ib(type=JsonDict)
 
 
 class SigningKeyEduUpdater:
     """Handles incoming signing key updates from federation and updates the DB"""
 
-    def __init__(self, hs, e2e_keys_handler):
+    def __init__(self, hs: "HomeServer", e2e_keys_handler: E2eKeysHandler):
         self.store = hs.get_datastore()
         self.federation = hs.get_federation_client()
         self.clock = hs.get_clock()
@@ -1257,7 +1290,7 @@ class SigningKeyEduUpdater:
         self._remote_edu_linearizer = Linearizer(name="remote_signing_key")
 
         # user_id -> list of updates waiting to be handled.
-        self._pending_updates = {}
+        self._pending_updates = {}  # type: Dict[str, List[Tuple[JsonDict, JsonDict]]]
 
         # Recently seen stream ids. We don't bother keeping these in the DB,
         # but they're useful to have them about to reduce the number of spurious
@@ -1270,13 +1303,15 @@ class SigningKeyEduUpdater:
             iterable=True,
         )
 
-    async def incoming_signing_key_update(self, origin, edu_content):
+    async def incoming_signing_key_update(
+        self, origin: str, edu_content: JsonDict
+    ) -> None:
         """Called on incoming signing key update from federation. Responsible for
         parsing the EDU and adding to pending updates list.
 
         Args:
-            origin (string): the server that sent the EDU
-            edu_content (dict): the contents of the EDU
+            origin: the server that sent the EDU
+            edu_content: the contents of the EDU
         """
 
         user_id = edu_content.pop("user_id")
@@ -1299,11 +1334,11 @@ class SigningKeyEduUpdater:
 
         await self._handle_signing_key_updates(user_id)
 
-    async def _handle_signing_key_updates(self, user_id):
+    async def _handle_signing_key_updates(self, user_id: str) -> None:
         """Actually handle pending updates.
 
         Args:
-            user_id (string): the user whose updates we are processing
+            user_id: the user whose updates we are processing
         """
 
         device_handler = self.e2e_keys_handler.device_handler
@@ -1315,7 +1350,7 @@ class SigningKeyEduUpdater:
                 # This can happen since we batch updates
                 return
 
-            device_ids = []
+            device_ids = []  # type: List[str]
 
             logger.info("pending updates: %r", pending_updates)
 
diff --git a/synapse/handlers/e2e_room_keys.py b/synapse/handlers/e2e_room_keys.py
index f01b090772..622cae23be 100644
--- a/synapse/handlers/e2e_room_keys.py
+++ b/synapse/handlers/e2e_room_keys.py
@@ -15,6 +15,7 @@
 # limitations under the License.
 
 import logging
+from typing import TYPE_CHECKING, List, Optional
 
 from synapse.api.errors import (
     Codes,
@@ -24,8 +25,12 @@ from synapse.api.errors import (
     SynapseError,
 )
 from synapse.logging.opentracing import log_kv, trace
+from synapse.types import JsonDict
 from synapse.util.async_helpers import Linearizer
 
+if TYPE_CHECKING:
+    from synapse.app.homeserver import HomeServer
+
 logger = logging.getLogger(__name__)
 
 
@@ -37,7 +42,7 @@ class E2eRoomKeysHandler:
     The actual payload of the encrypted keys is completely opaque to the handler.
     """
 
-    def __init__(self, hs):
+    def __init__(self, hs: "HomeServer"):
         self.store = hs.get_datastore()
 
         # Used to lock whenever a client is uploading key data.  This prevents collisions
@@ -48,21 +53,27 @@ class E2eRoomKeysHandler:
         self._upload_linearizer = Linearizer("upload_room_keys_lock")
 
     @trace
-    async def get_room_keys(self, user_id, version, room_id=None, session_id=None):
+    async def get_room_keys(
+        self,
+        user_id: str,
+        version: str,
+        room_id: Optional[str] = None,
+        session_id: Optional[str] = None,
+    ) -> List[JsonDict]:
         """Bulk get the E2E room keys for a given backup, optionally filtered to a given
         room, or a given session.
         See EndToEndRoomKeyStore.get_e2e_room_keys for full details.
 
         Args:
-            user_id(str): the user whose keys we're getting
-            version(str): the version ID of the backup we're getting keys from
-            room_id(string): room ID to get keys for, for None to get keys for all rooms
-            session_id(string): session ID to get keys for, for None to get keys for all
+            user_id: the user whose keys we're getting
+            version: the version ID of the backup we're getting keys from
+            room_id: room ID to get keys for, for None to get keys for all rooms
+            session_id: session ID to get keys for, for None to get keys for all
                 sessions
         Raises:
             NotFoundError: if the backup version does not exist
         Returns:
-            A deferred list of dicts giving the session_data and message metadata for
+            A list of dicts giving the session_data and message metadata for
             these room keys.
         """
 
@@ -86,17 +97,23 @@ class E2eRoomKeysHandler:
             return results
 
     @trace
-    async def delete_room_keys(self, user_id, version, room_id=None, session_id=None):
+    async def delete_room_keys(
+        self,
+        user_id: str,
+        version: str,
+        room_id: Optional[str] = None,
+        session_id: Optional[str] = None,
+    ) -> JsonDict:
         """Bulk delete the E2E room keys for a given backup, optionally filtered to a given
         room or a given session.
         See EndToEndRoomKeyStore.delete_e2e_room_keys for full details.
 
         Args:
-            user_id(str): the user whose backup we're deleting
-            version(str): the version ID of the backup we're deleting
-            room_id(string): room ID to delete keys for, for None to delete keys for all
+            user_id: the user whose backup we're deleting
+            version: the version ID of the backup we're deleting
+            room_id: room ID to delete keys for, for None to delete keys for all
                 rooms
-            session_id(string): session ID to delete keys for, for None to delete keys
+            session_id: session ID to delete keys for, for None to delete keys
                 for all sessions
         Raises:
             NotFoundError: if the backup version does not exist
@@ -128,15 +145,17 @@ class E2eRoomKeysHandler:
             return {"etag": str(version_etag), "count": count}
 
     @trace
-    async def upload_room_keys(self, user_id, version, room_keys):
+    async def upload_room_keys(
+        self, user_id: str, version: str, room_keys: JsonDict
+    ) -> JsonDict:
         """Bulk upload a list of room keys into a given backup version, asserting
         that the given version is the current backup version.  room_keys are merged
         into the current backup as described in RoomKeysServlet.on_PUT().
 
         Args:
-            user_id(str): the user whose backup we're setting
-            version(str): the version ID of the backup we're updating
-            room_keys(dict): a nested dict describing the room_keys we're setting:
+            user_id: the user whose backup we're setting
+            version: the version ID of the backup we're updating
+            room_keys: a nested dict describing the room_keys we're setting:
 
         {
             "rooms": {
@@ -254,14 +273,16 @@ class E2eRoomKeysHandler:
             return {"etag": str(version_etag), "count": count}
 
     @staticmethod
-    def _should_replace_room_key(current_room_key, room_key):
+    def _should_replace_room_key(
+        current_room_key: Optional[JsonDict], room_key: JsonDict
+    ) -> bool:
         """
         Determine whether to replace a given current_room_key (if any)
         with a newly uploaded room_key backup
 
         Args:
-            current_room_key (dict): Optional, the current room_key dict if any
-            room_key (dict): The new room_key dict which may or may not be fit to
+            current_room_key: Optional, the current room_key dict if any
+            room_key : The new room_key dict which may or may not be fit to
                 replace the current_room_key
 
         Returns:
@@ -286,14 +307,14 @@ class E2eRoomKeysHandler:
         return True
 
     @trace
-    async def create_version(self, user_id, version_info):
+    async def create_version(self, user_id: str, version_info: JsonDict) -> str:
         """Create a new backup version.  This automatically becomes the new
         backup version for the user's keys; previous backups will no longer be
         writeable to.
 
         Args:
-            user_id(str): the user whose backup version we're creating
-            version_info(dict): metadata about the new version being created
+            user_id: the user whose backup version we're creating
+            version_info: metadata about the new version being created
 
         {
             "algorithm": "m.megolm_backup.v1",
@@ -301,7 +322,7 @@ class E2eRoomKeysHandler:
         }
 
         Returns:
-            A deferred of a string that gives the new version number.
+            The new version number.
         """
 
         # TODO: Validate the JSON to make sure it has the right keys.
@@ -313,17 +334,19 @@ class E2eRoomKeysHandler:
             )
             return new_version
 
-    async def get_version_info(self, user_id, version=None):
+    async def get_version_info(
+        self, user_id: str, version: Optional[str] = None
+    ) -> JsonDict:
         """Get the info about a given version of the user's backup
 
         Args:
-            user_id(str): the user whose current backup version we're querying
-            version(str): Optional; if None gives the most recent version
+            user_id: the user whose current backup version we're querying
+            version: Optional; if None gives the most recent version
                 otherwise a historical one.
         Raises:
             NotFoundError: if the requested backup version doesn't exist
         Returns:
-            A deferred of a info dict that gives the info about the new version.
+            A info dict that gives the info about the new version.
 
         {
             "version": "1234",
@@ -346,7 +369,7 @@ class E2eRoomKeysHandler:
             return res
 
     @trace
-    async def delete_version(self, user_id, version=None):
+    async def delete_version(self, user_id: str, version: Optional[str] = None) -> None:
         """Deletes a given version of the user's e2e_room_keys backup
 
         Args:
@@ -366,17 +389,19 @@ class E2eRoomKeysHandler:
                     raise
 
     @trace
-    async def update_version(self, user_id, version, version_info):
+    async def update_version(
+        self, user_id: str, version: str, version_info: JsonDict
+    ) -> JsonDict:
         """Update the info about a given version of the user's backup
 
         Args:
-            user_id(str): the user whose current backup version we're updating
-            version(str): the backup version we're updating
-            version_info(dict): the new information about the backup
+            user_id: the user whose current backup version we're updating
+            version: the backup version we're updating
+            version_info: the new information about the backup
         Raises:
             NotFoundError: if the requested backup version doesn't exist
         Returns:
-            A deferred of an empty dict.
+            An empty dict.
         """
         if "version" not in version_info:
             version_info["version"] = version
diff --git a/synapse/handlers/federation.py b/synapse/handlers/federation.py
index b6dc7f99b6..eddc7582d0 100644
--- a/synapse/handlers/federation.py
+++ b/synapse/handlers/federation.py
@@ -1617,6 +1617,12 @@ class FederationHandler(BaseHandler):
         if event.state_key == self._server_notices_mxid:
             raise SynapseError(HTTPStatus.FORBIDDEN, "Cannot invite this user")
 
+        # We retrieve the room member handler here as to not cause a cyclic dependency
+        member_handler = self.hs.get_room_member_handler()
+        # We don't rate limit based on room ID, as that should be done by
+        # sending server.
+        member_handler.ratelimit_invite(None, event.state_key)
+
         # keep a record of the room version, if we don't yet know it.
         # (this may get overwritten if we later get a different room version in a
         # join dance).
diff --git a/synapse/handlers/identity.py b/synapse/handlers/identity.py
index f61844d688..4f7137539b 100644
--- a/synapse/handlers/identity.py
+++ b/synapse/handlers/identity.py
@@ -27,9 +27,11 @@ from synapse.api.errors import (
     HttpResponseException,
     SynapseError,
 )
+from synapse.api.ratelimiting import Ratelimiter
 from synapse.config.emailconfig import ThreepidBehaviour
 from synapse.http import RequestTimedOutError
 from synapse.http.client import SimpleHttpClient
+from synapse.http.site import SynapseRequest
 from synapse.types import JsonDict, Requester
 from synapse.util import json_decoder
 from synapse.util.hash import sha256_and_url_safe_base64
@@ -57,6 +59,32 @@ class IdentityHandler(BaseHandler):
 
         self._web_client_location = hs.config.invite_client_location
 
+        # Ratelimiters for `/requestToken` endpoints.
+        self._3pid_validation_ratelimiter_ip = Ratelimiter(
+            clock=hs.get_clock(),
+            rate_hz=hs.config.ratelimiting.rc_3pid_validation.per_second,
+            burst_count=hs.config.ratelimiting.rc_3pid_validation.burst_count,
+        )
+        self._3pid_validation_ratelimiter_address = Ratelimiter(
+            clock=hs.get_clock(),
+            rate_hz=hs.config.ratelimiting.rc_3pid_validation.per_second,
+            burst_count=hs.config.ratelimiting.rc_3pid_validation.burst_count,
+        )
+
+    def ratelimit_request_token_requests(
+        self, request: SynapseRequest, medium: str, address: str,
+    ):
+        """Used to ratelimit requests to `/requestToken` by IP and address.
+
+        Args:
+            request: The associated request
+            medium: The type of threepid, e.g. "msisdn" or "email"
+            address: The actual threepid ID, e.g. the phone number or email address
+        """
+
+        self._3pid_validation_ratelimiter_ip.ratelimit((medium, request.getClientIP()))
+        self._3pid_validation_ratelimiter_address.ratelimit((medium, address))
+
     async def threepid_from_creds(
         self, id_server: str, creds: Dict[str, str]
     ) -> Optional[JsonDict]:
diff --git a/synapse/handlers/message.py b/synapse/handlers/message.py
index e2a7d567fa..a15336bf00 100644
--- a/synapse/handlers/message.py
+++ b/synapse/handlers/message.py
@@ -174,7 +174,7 @@ class MessageHandler:
                 raise NotFoundError("Can't find event for token %s" % (at_token,))
 
             visible_events = await filter_events_for_client(
-                self.storage, user_id, last_events, filter_send_to_client=False
+                self.storage, user_id, last_events, filter_send_to_client=False,
             )
 
             event = last_events[0]
diff --git a/synapse/handlers/oidc_handler.py b/synapse/handlers/oidc_handler.py
index 1607e12935..3adc75fa4a 100644
--- a/synapse/handlers/oidc_handler.py
+++ b/synapse/handlers/oidc_handler.py
@@ -102,7 +102,7 @@ class OidcHandler:
                 ) from e
 
     async def handle_oidc_callback(self, request: SynapseRequest) -> None:
-        """Handle an incoming request to /_synapse/oidc/callback
+        """Handle an incoming request to /_synapse/client/oidc/callback
 
         Since we might want to display OIDC-related errors in a user-friendly
         way, we don't raise SynapseError from here. Instead, we call
@@ -123,7 +123,6 @@ class OidcHandler:
         Args:
             request: the incoming request from the browser.
         """
-
         # The provider might redirect with an error.
         # In that case, just display it as-is.
         if b"error" in request.args:
@@ -137,8 +136,12 @@ class OidcHandler:
             # either the provider misbehaving or Synapse being misconfigured.
             # The only exception of that is "access_denied", where the user
             # probably cancelled the login flow. In other cases, log those errors.
-            if error != "access_denied":
-                logger.error("Error from the OIDC provider: %s %s", error, description)
+            logger.log(
+                logging.INFO if error == "access_denied" else logging.ERROR,
+                "Received OIDC callback with error: %s %s",
+                error,
+                description,
+            )
 
             self._sso_handler.render_error(request, error, description)
             return
@@ -149,7 +152,7 @@ class OidcHandler:
         # Fetch the session cookie
         session = request.getCookie(SESSION_COOKIE_NAME)  # type: Optional[bytes]
         if session is None:
-            logger.info("No session cookie found")
+            logger.info("Received OIDC callback, with no session cookie")
             self._sso_handler.render_error(
                 request, "missing_session", "No session cookie found"
             )
@@ -169,7 +172,7 @@ class OidcHandler:
 
         # Check for the state query parameter
         if b"state" not in request.args:
-            logger.info("State parameter is missing")
+            logger.info("Received OIDC callback, with no state parameter")
             self._sso_handler.render_error(
                 request, "invalid_request", "State parameter is missing"
             )
@@ -183,14 +186,16 @@ class OidcHandler:
                 session, state
             )
         except (MacaroonDeserializationException, ValueError) as e:
-            logger.exception("Invalid session")
+            logger.exception("Invalid session for OIDC callback")
             self._sso_handler.render_error(request, "invalid_session", str(e))
             return
         except MacaroonInvalidSignatureException as e:
-            logger.exception("Could not verify session")
+            logger.exception("Could not verify session for OIDC callback")
             self._sso_handler.render_error(request, "mismatching_session", str(e))
             return
 
+        logger.info("Received OIDC callback for IdP %s", session_data.idp_id)
+
         oidc_provider = self._providers.get(session_data.idp_id)
         if not oidc_provider:
             logger.error("OIDC session uses unknown IdP %r", oidc_provider)
@@ -274,6 +279,9 @@ class OidcProvider:
         # MXC URI for icon for this auth provider
         self.idp_icon = provider.idp_icon
 
+        # optional brand identifier for this auth provider
+        self.idp_brand = provider.idp_brand
+
         self._sso_handler = hs.get_sso_handler()
 
         self._sso_handler.register_identity_provider(self)
@@ -562,6 +570,7 @@ class OidcProvider:
         Returns:
             UserInfo: an object representing the user.
         """
+        logger.debug("Using the OAuth2 access_token to request userinfo")
         metadata = await self.load_metadata()
 
         resp = await self._http_client.get_json(
@@ -569,6 +578,8 @@ class OidcProvider:
             headers={"Authorization": ["Bearer {}".format(token["access_token"])]},
         )
 
+        logger.debug("Retrieved user info from userinfo endpoint: %r", resp)
+
         return UserInfo(resp)
 
     async def _parse_id_token(self, token: Token, nonce: str) -> UserInfo:
@@ -597,17 +608,19 @@ class OidcProvider:
             claims_cls = ImplicitIDToken
 
         alg_values = metadata.get("id_token_signing_alg_values_supported", ["RS256"])
-
         jwt = JsonWebToken(alg_values)
 
         claim_options = {"iss": {"values": [metadata["issuer"]]}}
 
+        id_token = token["id_token"]
+        logger.debug("Attempting to decode JWT id_token %r", id_token)
+
         # Try to decode the keys in cache first, then retry by forcing the keys
         # to be reloaded
         jwk_set = await self.load_jwks()
         try:
             claims = jwt.decode(
-                token["id_token"],
+                id_token,
                 key=jwk_set,
                 claims_cls=claims_cls,
                 claims_options=claim_options,
@@ -617,13 +630,15 @@ class OidcProvider:
             logger.info("Reloading JWKS after decode error")
             jwk_set = await self.load_jwks(force=True)  # try reloading the jwks
             claims = jwt.decode(
-                token["id_token"],
+                id_token,
                 key=jwk_set,
                 claims_cls=claims_cls,
                 claims_options=claim_options,
                 claims_params=claims_params,
             )
 
+        logger.debug("Decoded id_token JWT %r; validating", claims)
+
         claims.validate(leeway=120)  # allows 2 min of clock skew
         return UserInfo(claims)
 
@@ -640,7 +655,7 @@ class OidcProvider:
 
           - ``client_id``: the client ID set in ``oidc_config.client_id``
           - ``response_type``: ``code``
-          - ``redirect_uri``: the callback URL ; ``{base url}/_synapse/oidc/callback``
+          - ``redirect_uri``: the callback URL ; ``{base url}/_synapse/client/oidc/callback``
           - ``scope``: the list of scopes set in ``oidc_config.scopes``
           - ``state``: a random string
           - ``nonce``: a random string
@@ -681,7 +696,7 @@ class OidcProvider:
         request.addCookie(
             SESSION_COOKIE_NAME,
             cookie,
-            path="/_synapse/oidc",
+            path="/_synapse/client/oidc",
             max_age="3600",
             httpOnly=True,
             sameSite="lax",
@@ -702,7 +717,7 @@ class OidcProvider:
     async def handle_oidc_callback(
         self, request: SynapseRequest, session_data: "OidcSessionData", code: str
     ) -> None:
-        """Handle an incoming request to /_synapse/oidc/callback
+        """Handle an incoming request to /_synapse/client/oidc/callback
 
         By this time we have already validated the session on the synapse side, and
         now need to do the provider-specific operations. This includes:
@@ -723,19 +738,18 @@ class OidcProvider:
         """
         # Exchange the code with the provider
         try:
-            logger.debug("Exchanging code")
+            logger.debug("Exchanging OAuth2 code for a token")
             token = await self._exchange_code(code)
         except OidcError as e:
-            logger.exception("Could not exchange code")
+            logger.exception("Could not exchange OAuth2 code")
             self._sso_handler.render_error(request, e.error, e.error_description)
             return
 
-        logger.debug("Successfully obtained OAuth2 access token")
+        logger.debug("Successfully obtained OAuth2 token data: %r", token)
 
         # Now that we have a token, get the userinfo, either by decoding the
         # `id_token` or by fetching the `userinfo_endpoint`.
         if self._uses_userinfo:
-            logger.debug("Fetching userinfo")
             try:
                 userinfo = await self._fetch_userinfo(token)
             except Exception as e:
@@ -743,7 +757,6 @@ class OidcProvider:
                 self._sso_handler.render_error(request, "fetch_error", str(e))
                 return
         else:
-            logger.debug("Extracting userinfo from id_token")
             try:
                 userinfo = await self._parse_id_token(token, nonce=session_data.nonce)
             except Exception as e:
@@ -1056,7 +1069,8 @@ class OidcSessionData:
 
 
 UserAttributeDict = TypedDict(
-    "UserAttributeDict", {"localpart": Optional[str], "display_name": Optional[str]}
+    "UserAttributeDict",
+    {"localpart": Optional[str], "display_name": Optional[str], "emails": List[str]},
 )
 C = TypeVar("C")
 
@@ -1135,11 +1149,12 @@ def jinja_finalize(thing):
 env = Environment(finalize=jinja_finalize)
 
 
-@attr.s
+@attr.s(slots=True, frozen=True)
 class JinjaOidcMappingConfig:
     subject_claim = attr.ib(type=str)
     localpart_template = attr.ib(type=Optional[Template])
     display_name_template = attr.ib(type=Optional[Template])
+    email_template = attr.ib(type=Optional[Template])
     extra_attributes = attr.ib(type=Dict[str, Template])
 
 
@@ -1156,23 +1171,17 @@ class JinjaOidcMappingProvider(OidcMappingProvider[JinjaOidcMappingConfig]):
     def parse_config(config: dict) -> JinjaOidcMappingConfig:
         subject_claim = config.get("subject_claim", "sub")
 
-        localpart_template = None  # type: Optional[Template]
-        if "localpart_template" in config:
+        def parse_template_config(option_name: str) -> Optional[Template]:
+            if option_name not in config:
+                return None
             try:
-                localpart_template = env.from_string(config["localpart_template"])
+                return env.from_string(config[option_name])
             except Exception as e:
-                raise ConfigError(
-                    "invalid jinja template", path=["localpart_template"]
-                ) from e
+                raise ConfigError("invalid jinja template", path=[option_name]) from e
 
-        display_name_template = None  # type: Optional[Template]
-        if "display_name_template" in config:
-            try:
-                display_name_template = env.from_string(config["display_name_template"])
-            except Exception as e:
-                raise ConfigError(
-                    "invalid jinja template", path=["display_name_template"]
-                ) from e
+        localpart_template = parse_template_config("localpart_template")
+        display_name_template = parse_template_config("display_name_template")
+        email_template = parse_template_config("email_template")
 
         extra_attributes = {}  # type Dict[str, Template]
         if "extra_attributes" in config:
@@ -1192,6 +1201,7 @@ class JinjaOidcMappingProvider(OidcMappingProvider[JinjaOidcMappingConfig]):
             subject_claim=subject_claim,
             localpart_template=localpart_template,
             display_name_template=display_name_template,
+            email_template=email_template,
             extra_attributes=extra_attributes,
         )
 
@@ -1213,16 +1223,23 @@ class JinjaOidcMappingProvider(OidcMappingProvider[JinjaOidcMappingConfig]):
             # a usable mxid.
             localpart += str(failures) if failures else ""
 
-        display_name = None  # type: Optional[str]
-        if self._config.display_name_template is not None:
-            display_name = self._config.display_name_template.render(
-                user=userinfo
-            ).strip()
+        def render_template_field(template: Optional[Template]) -> Optional[str]:
+            if template is None:
+                return None
+            return template.render(user=userinfo).strip()
+
+        display_name = render_template_field(self._config.display_name_template)
+        if display_name == "":
+            display_name = None
 
-            if display_name == "":
-                display_name = None
+        emails = []  # type: List[str]
+        email = render_template_field(self._config.email_template)
+        if email:
+            emails.append(email)
 
-        return UserAttributeDict(localpart=localpart, display_name=display_name)
+        return UserAttributeDict(
+            localpart=localpart, display_name=display_name, emails=emails
+        )
 
     async def get_extra_attributes(self, userinfo: UserInfo, token: Token) -> JsonDict:
         extras = {}  # type: Dict[str, str]
diff --git a/synapse/handlers/register.py b/synapse/handlers/register.py
index a2cf0f6f3e..49b085269b 100644
--- a/synapse/handlers/register.py
+++ b/synapse/handlers/register.py
@@ -14,8 +14,9 @@
 # limitations under the License.
 
 """Contains functions for registering clients."""
+
 import logging
-from typing import TYPE_CHECKING, List, Optional, Tuple
+from typing import TYPE_CHECKING, Iterable, List, Optional, Tuple
 
 from synapse import types
 from synapse.api.constants import MAX_USERID_LENGTH, EventTypes, JoinRules, LoginType
@@ -152,7 +153,7 @@ class RegistrationHandler(BaseHandler):
         user_type: Optional[str] = None,
         default_display_name: Optional[str] = None,
         address: Optional[str] = None,
-        bind_emails: List[str] = [],
+        bind_emails: Iterable[str] = [],
         by_admin: bool = False,
         user_agent_ips: Optional[List[Tuple[str, str]]] = None,
     ) -> str:
@@ -693,6 +694,8 @@ class RegistrationHandler(BaseHandler):
             access_token: The access token of the newly logged in device, or
                 None if `inhibit_login` enabled.
         """
+        # TODO: 3pid registration can actually happen on the workers. Consider
+        # refactoring it.
         if self.hs.config.worker_app:
             await self._post_registration_client(
                 user_id=user_id, auth_result=auth_result, access_token=access_token
diff --git a/synapse/handlers/room.py b/synapse/handlers/room.py
index 99fd063084..1336a23a3a 100644
--- a/synapse/handlers/room.py
+++ b/synapse/handlers/room.py
@@ -127,6 +127,10 @@ class RoomCreationHandler(BaseHandler):
 
         self.third_party_event_rules = hs.get_third_party_event_rules()
 
+        self._invite_burst_count = (
+            hs.config.ratelimiting.rc_invites_per_room.burst_count
+        )
+
     async def upgrade_room(
         self, requester: Requester, old_room_id: str, new_version: RoomVersion
     ) -> str:
@@ -663,6 +667,9 @@ class RoomCreationHandler(BaseHandler):
             invite_3pid_list = []
             invite_list = []
 
+        if len(invite_list) + len(invite_3pid_list) > self._invite_burst_count:
+            raise SynapseError(400, "Cannot invite so many users at once")
+
         await self.event_creation_handler.assert_accepted_privacy_policy(requester)
 
         power_level_content_override = config.get("power_level_content_override")
diff --git a/synapse/handlers/room_member.py b/synapse/handlers/room_member.py
index e001e418f9..a5da97cfe0 100644
--- a/synapse/handlers/room_member.py
+++ b/synapse/handlers/room_member.py
@@ -85,6 +85,17 @@ class RoomMemberHandler(metaclass=abc.ABCMeta):
             burst_count=hs.config.ratelimiting.rc_joins_remote.burst_count,
         )
 
+        self._invites_per_room_limiter = Ratelimiter(
+            clock=self.clock,
+            rate_hz=hs.config.ratelimiting.rc_invites_per_room.per_second,
+            burst_count=hs.config.ratelimiting.rc_invites_per_room.burst_count,
+        )
+        self._invites_per_user_limiter = Ratelimiter(
+            clock=self.clock,
+            rate_hz=hs.config.ratelimiting.rc_invites_per_user.per_second,
+            burst_count=hs.config.ratelimiting.rc_invites_per_user.burst_count,
+        )
+
         # This is only used to get at ratelimit function, and
         # maybe_kick_guest_users. It's fine there are multiple of these as
         # it doesn't store state.
@@ -144,6 +155,16 @@ class RoomMemberHandler(metaclass=abc.ABCMeta):
         """
         raise NotImplementedError()
 
+    def ratelimit_invite(self, room_id: Optional[str], invitee_user_id: str):
+        """Ratelimit invites by room and by target user.
+
+        If room ID is missing then we just rate limit by target user.
+        """
+        if room_id:
+            self._invites_per_room_limiter.ratelimit(room_id)
+
+        self._invites_per_user_limiter.ratelimit(invitee_user_id)
+
     async def _local_membership_update(
         self,
         requester: Requester,
@@ -387,8 +408,14 @@ class RoomMemberHandler(metaclass=abc.ABCMeta):
                 raise SynapseError(403, "This room has been blocked on this server")
 
         if effective_membership_state == Membership.INVITE:
+            target_id = target.to_string()
+            if ratelimit:
+                # Don't ratelimit application services.
+                if not requester.app_service or requester.app_service.is_rate_limited():
+                    self.ratelimit_invite(room_id, target_id)
+
             # block any attempts to invite the server notices mxid
-            if target.to_string() == self._server_notices_mxid:
+            if target_id == self._server_notices_mxid:
                 raise SynapseError(HTTPStatus.FORBIDDEN, "Cannot invite this user")
 
             block_invite = False
@@ -412,7 +439,7 @@ class RoomMemberHandler(metaclass=abc.ABCMeta):
                     block_invite = True
 
                 if not await self.spam_checker.user_may_invite(
-                    requester.user.to_string(), target.to_string(), room_id
+                    requester.user.to_string(), target_id, room_id
                 ):
                     logger.info("Blocking invite due to spam checker")
                     block_invite = True
diff --git a/synapse/handlers/saml_handler.py b/synapse/handlers/saml_handler.py
index 38461cf79d..e88fd59749 100644
--- a/synapse/handlers/saml_handler.py
+++ b/synapse/handlers/saml_handler.py
@@ -78,9 +78,10 @@ class SamlHandler(BaseHandler):
         # user-facing name of this auth provider
         self.idp_name = "SAML"
 
-        # we do not currently support icons for SAML auth, but this is required by
+        # we do not currently support icons/brands for SAML auth, but this is required by
         # the SsoIdentityProvider protocol type.
         self.idp_icon = None
+        self.idp_brand = None
 
         # a map from saml session id to Saml2SessionData object
         self._outstanding_requests_dict = {}  # type: Dict[str, Saml2SessionData]
@@ -132,7 +133,7 @@ class SamlHandler(BaseHandler):
         raise Exception("prepare_for_authenticate didn't return a Location header")
 
     async def handle_saml_response(self, request: SynapseRequest) -> None:
-        """Handle an incoming request to /_matrix/saml2/authn_response
+        """Handle an incoming request to /_synapse/client/saml2/authn_response
 
         Args:
             request: the incoming request from the browser. We'll
diff --git a/synapse/handlers/sso.py b/synapse/handlers/sso.py
index afc1341d09..96ccd991ed 100644
--- a/synapse/handlers/sso.py
+++ b/synapse/handlers/sso.py
@@ -14,21 +14,31 @@
 # limitations under the License.
 import abc
 import logging
-from typing import TYPE_CHECKING, Awaitable, Callable, Dict, List, Mapping, Optional
+from typing import (
+    TYPE_CHECKING,
+    Awaitable,
+    Callable,
+    Dict,
+    Iterable,
+    Mapping,
+    Optional,
+    Set,
+)
 from urllib.parse import urlencode
 
 import attr
 from typing_extensions import NoReturn, Protocol
 
 from twisted.web.http import Request
+from twisted.web.iweb import IRequest
 
 from synapse.api.constants import LoginType
 from synapse.api.errors import Codes, NotFoundError, RedirectException, SynapseError
 from synapse.handlers.ui_auth import UIAuthSessionDataConstants
 from synapse.http import get_request_user_agent
-from synapse.http.server import respond_with_html
+from synapse.http.server import respond_with_html, respond_with_redirect
 from synapse.http.site import SynapseRequest
-from synapse.types import JsonDict, UserID, contains_invalid_mxid_characters
+from synapse.types import Collection, JsonDict, UserID, contains_invalid_mxid_characters
 from synapse.util.async_helpers import Linearizer
 from synapse.util.stringutils import random_string
 
@@ -80,6 +90,11 @@ class SsoIdentityProvider(Protocol):
         """Optional MXC URI for user-facing icon"""
         return None
 
+    @property
+    def idp_brand(self) -> Optional[str]:
+        """Optional branding identifier"""
+        return None
+
     @abc.abstractmethod
     async def handle_redirect_request(
         self,
@@ -109,7 +124,7 @@ class UserAttributes:
     # enter one.
     localpart = attr.ib(type=Optional[str])
     display_name = attr.ib(type=Optional[str], default=None)
-    emails = attr.ib(type=List[str], default=attr.Factory(list))
+    emails = attr.ib(type=Collection[str], default=attr.Factory(list))
 
 
 @attr.s(slots=True)
@@ -124,7 +139,7 @@ class UsernameMappingSession:
 
     # attributes returned by the ID mapper
     display_name = attr.ib(type=Optional[str])
-    emails = attr.ib(type=List[str])
+    emails = attr.ib(type=Collection[str])
 
     # An optional dictionary of extra attributes to be provided to the client in the
     # login response.
@@ -136,6 +151,12 @@ class UsernameMappingSession:
     # expiry time for the session, in milliseconds
     expiry_time_ms = attr.ib(type=int)
 
+    # choices made by the user
+    chosen_localpart = attr.ib(type=Optional[str], default=None)
+    use_display_name = attr.ib(type=bool, default=True)
+    emails_to_use = attr.ib(type=Collection[str], default=())
+    terms_accepted_version = attr.ib(type=Optional[str], default=None)
+
 
 # the HTTP cookie used to track the mapping session id
 USERNAME_MAPPING_SESSION_COOKIE_NAME = b"username_mapping_session"
@@ -170,6 +191,8 @@ class SsoHandler:
         # map from idp_id to SsoIdentityProvider
         self._identity_providers = {}  # type: Dict[str, SsoIdentityProvider]
 
+        self._consent_at_registration = hs.config.consent.user_consent_at_registration
+
     def register_identity_provider(self, p: SsoIdentityProvider):
         p_id = p.idp_id
         assert p_id not in self._identity_providers
@@ -382,6 +405,8 @@ class SsoHandler:
                 to an additional page. (e.g. to prompt for more information)
 
         """
+        new_user = False
+
         # grab a lock while we try to find a mapping for this user. This seems...
         # optimistic, especially for implementations that end up redirecting to
         # interstitial pages.
@@ -422,9 +447,14 @@ class SsoHandler:
                     get_request_user_agent(request),
                     request.getClientIP(),
                 )
+                new_user = True
 
         await self._auth_handler.complete_sso_login(
-            user_id, request, client_redirect_url, extra_login_attributes
+            user_id,
+            request,
+            client_redirect_url,
+            extra_login_attributes,
+            new_user=new_user,
         )
 
     async def _call_attribute_mapper(
@@ -514,7 +544,7 @@ class SsoHandler:
         logger.info("Recorded registration session id %s", session_id)
 
         # Set the cookie and redirect to the username picker
-        e = RedirectException(b"/_synapse/client/pick_username")
+        e = RedirectException(b"/_synapse/client/pick_username/account_details")
         e.cookies.append(
             b"%s=%s; path=/"
             % (USERNAME_MAPPING_SESSION_COOKIE_NAME, session_id.encode("ascii"))
@@ -642,6 +672,25 @@ class SsoHandler:
         )
         respond_with_html(request, 200, html)
 
+    def get_mapping_session(self, session_id: str) -> UsernameMappingSession:
+        """Look up the given username mapping session
+
+        If it is not found, raises a SynapseError with an http code of 400
+
+        Args:
+            session_id: session to look up
+        Returns:
+            active mapping session
+        Raises:
+            SynapseError if the session is not found/has expired
+        """
+        self._expire_old_sessions()
+        session = self._username_mapping_sessions.get(session_id)
+        if session:
+            return session
+        logger.info("Couldn't find session id %s", session_id)
+        raise SynapseError(400, "unknown session")
+
     async def check_username_availability(
         self, localpart: str, session_id: str,
     ) -> bool:
@@ -658,12 +707,7 @@ class SsoHandler:
 
         # make sure that there is a valid mapping session, to stop people dictionary-
         # scanning for accounts
-
-        self._expire_old_sessions()
-        session = self._username_mapping_sessions.get(session_id)
-        if not session:
-            logger.info("Couldn't find session id %s", session_id)
-            raise SynapseError(400, "unknown session")
+        self.get_mapping_session(session_id)
 
         logger.info(
             "[session %s] Checking for availability of username %s",
@@ -680,7 +724,12 @@ class SsoHandler:
         return not user_infos
 
     async def handle_submit_username_request(
-        self, request: SynapseRequest, localpart: str, session_id: str
+        self,
+        request: SynapseRequest,
+        session_id: str,
+        localpart: str,
+        use_display_name: bool,
+        emails_to_use: Iterable[str],
     ) -> None:
         """Handle a request to the username-picker 'submit' endpoint
 
@@ -690,21 +739,103 @@ class SsoHandler:
             request: HTTP request
             localpart: localpart requested by the user
             session_id: ID of the username mapping session, extracted from a cookie
+            use_display_name: whether the user wants to use the suggested display name
+            emails_to_use: emails that the user would like to use
         """
-        self._expire_old_sessions()
-        session = self._username_mapping_sessions.get(session_id)
-        if not session:
-            logger.info("Couldn't find session id %s", session_id)
-            raise SynapseError(400, "unknown session")
+        try:
+            session = self.get_mapping_session(session_id)
+        except SynapseError as e:
+            self.render_error(request, "bad_session", e.msg, code=e.code)
+            return
+
+        # update the session with the user's choices
+        session.chosen_localpart = localpart
+        session.use_display_name = use_display_name
+
+        emails_from_idp = set(session.emails)
+        filtered_emails = set()  # type: Set[str]
+
+        # we iterate through the list rather than just building a set conjunction, so
+        # that we can log attempts to use unknown addresses
+        for email in emails_to_use:
+            if email in emails_from_idp:
+                filtered_emails.add(email)
+            else:
+                logger.warning(
+                    "[session %s] ignoring user request to use unknown email address %r",
+                    session_id,
+                    email,
+                )
+        session.emails_to_use = filtered_emails
 
-        logger.info("[session %s] Registering localpart %s", session_id, localpart)
+        # we may now need to collect consent from the user, in which case, redirect
+        # to the consent-extraction-unit
+        if self._consent_at_registration:
+            redirect_url = b"/_synapse/client/new_user_consent"
+
+        # otherwise, redirect to the completion page
+        else:
+            redirect_url = b"/_synapse/client/sso_register"
+
+        respond_with_redirect(request, redirect_url)
+
+    async def handle_terms_accepted(
+        self, request: Request, session_id: str, terms_version: str
+    ):
+        """Handle a request to the new-user 'consent' endpoint
+
+        Will serve an HTTP response to the request.
+
+        Args:
+            request: HTTP request
+            session_id: ID of the username mapping session, extracted from a cookie
+            terms_version: the version of the terms which the user viewed and consented
+                to
+        """
+        logger.info(
+            "[session %s] User consented to terms version %s",
+            session_id,
+            terms_version,
+        )
+        try:
+            session = self.get_mapping_session(session_id)
+        except SynapseError as e:
+            self.render_error(request, "bad_session", e.msg, code=e.code)
+            return
+
+        session.terms_accepted_version = terms_version
+
+        # we're done; now we can register the user
+        respond_with_redirect(request, b"/_synapse/client/sso_register")
+
+    async def register_sso_user(self, request: Request, session_id: str) -> None:
+        """Called once we have all the info we need to register a new user.
+
+        Does so and serves an HTTP response
+
+        Args:
+            request: HTTP request
+            session_id: ID of the username mapping session, extracted from a cookie
+        """
+        try:
+            session = self.get_mapping_session(session_id)
+        except SynapseError as e:
+            self.render_error(request, "bad_session", e.msg, code=e.code)
+            return
+
+        logger.info(
+            "[session %s] Registering localpart %s",
+            session_id,
+            session.chosen_localpart,
+        )
 
         attributes = UserAttributes(
-            localpart=localpart,
-            display_name=session.display_name,
-            emails=session.emails,
+            localpart=session.chosen_localpart, emails=session.emails_to_use,
         )
 
+        if session.use_display_name:
+            attributes.display_name = session.display_name
+
         # the following will raise a 400 error if the username has been taken in the
         # meantime.
         user_id = await self._register_mapped_user(
@@ -715,7 +846,12 @@ class SsoHandler:
             request.getClientIP(),
         )
 
-        logger.info("[session %s] Registered userid %s", session_id, user_id)
+        logger.info(
+            "[session %s] Registered userid %s with attributes %s",
+            session_id,
+            user_id,
+            attributes,
+        )
 
         # delete the mapping session and the cookie
         del self._username_mapping_sessions[session_id]
@@ -728,11 +864,21 @@ class SsoHandler:
             path=b"/",
         )
 
+        auth_result = {}
+        if session.terms_accepted_version:
+            # TODO: make this less awful.
+            auth_result[LoginType.TERMS] = True
+
+        await self._registration_handler.post_registration_actions(
+            user_id, auth_result, access_token=None
+        )
+
         await self._auth_handler.complete_sso_login(
             user_id,
             request,
             session.client_redirect_url,
             session.extra_login_attributes,
+            new_user=True,
         )
 
     def _expire_old_sessions(self):
@@ -746,3 +892,14 @@ class SsoHandler:
         for session_id in to_expire:
             logger.info("Expiring mapping session %s", session_id)
             del self._username_mapping_sessions[session_id]
+
+
+def get_username_mapping_session_cookie_from_request(request: IRequest) -> str:
+    """Extract the session ID from the cookie
+
+    Raises a SynapseError if the cookie isn't found
+    """
+    session_id = request.getCookie(USERNAME_MAPPING_SESSION_COOKIE_NAME)
+    if not session_id:
+        raise SynapseError(code=400, msg="missing session_id")
+    return session_id.decode("ascii", errors="replace")
diff --git a/synapse/http/federation/matrix_federation_agent.py b/synapse/http/federation/matrix_federation_agent.py
index 4c06a117d3..113fd47134 100644
--- a/synapse/http/federation/matrix_federation_agent.py
+++ b/synapse/http/federation/matrix_federation_agent.py
@@ -323,12 +323,19 @@ class MatrixHostnameEndpoint:
         if port or _is_ip_literal(host):
             return [Server(host, port or 8448)]
 
+        logger.debug("Looking up SRV record for %s", host.decode(errors="replace"))
         server_list = await self._srv_resolver.resolve_service(b"_matrix._tcp." + host)
 
         if server_list:
+            logger.debug(
+                "Got %s from SRV lookup for %s",
+                ", ".join(map(str, server_list)),
+                host.decode(errors="replace"),
+            )
             return server_list
 
         # No SRV records, so we fallback to host and 8448
+        logger.debug("No SRV records for %s", host.decode(errors="replace"))
         return [Server(host, 8448)]
 
 
diff --git a/synapse/http/server.py b/synapse/http/server.py
index d69d579b3a..8249732b27 100644
--- a/synapse/http/server.py
+++ b/synapse/http/server.py
@@ -761,6 +761,13 @@ def set_clickjacking_protection_headers(request: Request):
     request.setHeader(b"Content-Security-Policy", b"frame-ancestors 'none';")
 
 
+def respond_with_redirect(request: Request, url: bytes) -> None:
+    """Write a 302 response to the request, if it is still alive."""
+    logger.debug("Redirect to %s", url.decode("utf-8"))
+    request.redirect(url)
+    finish_request(request)
+
+
 def finish_request(request: Request):
     """ Finish writing the response to the request.
 
diff --git a/synapse/logging/opentracing.py b/synapse/logging/opentracing.py
index ab586c318c..0538350f38 100644
--- a/synapse/logging/opentracing.py
+++ b/synapse/logging/opentracing.py
@@ -791,7 +791,7 @@ def tag_args(func):
 
     @wraps(func)
     def _tag_args_inner(*args, **kwargs):
-        argspec = inspect.getargspec(func)
+        argspec = inspect.getfullargspec(func)
         for i, arg in enumerate(argspec.args[1:]):
             set_tag("ARG_" + arg, args[i])
         set_tag("args", args[len(argspec.args) :])
diff --git a/synapse/module_api/__init__.py b/synapse/module_api/__init__.py
index 72ab5750cc..401d577293 100644
--- a/synapse/module_api/__init__.py
+++ b/synapse/module_api/__init__.py
@@ -279,7 +279,11 @@ class ModuleApi:
         )
 
     async def complete_sso_login_async(
-        self, registered_user_id: str, request: SynapseRequest, client_redirect_url: str
+        self,
+        registered_user_id: str,
+        request: SynapseRequest,
+        client_redirect_url: str,
+        new_user: bool = False,
     ):
         """Complete a SSO login by redirecting the user to a page to confirm whether they
         want their access token sent to `client_redirect_url`, or redirect them to that
@@ -291,9 +295,11 @@ class ModuleApi:
             request: The request to respond to.
             client_redirect_url: The URL to which to offer to redirect the user (or to
                 redirect them directly if whitelisted).
+            new_user: set to true to use wording for the consent appropriate to a user
+                who has just registered.
         """
         await self._auth_handler.complete_sso_login(
-            registered_user_id, request, client_redirect_url,
+            registered_user_id, request, client_redirect_url, new_user=new_user
         )
 
     @defer.inlineCallbacks
diff --git a/synapse/push/mailer.py b/synapse/push/mailer.py
index 745b1dde94..d10201b6b3 100644
--- a/synapse/push/mailer.py
+++ b/synapse/push/mailer.py
@@ -34,6 +34,7 @@ from synapse.push.presentable_names import (
     descriptor_from_member_events,
     name_from_member_event,
 )
+from synapse.storage.state import StateFilter
 from synapse.types import StateMap, UserID
 from synapse.util.async_helpers import concurrently_execute
 from synapse.visibility import filter_events_for_client
@@ -110,6 +111,7 @@ class Mailer:
 
         self.sendmail = self.hs.get_sendmail()
         self.store = self.hs.get_datastore()
+        self.state_store = self.hs.get_storage().state
         self.macaroon_gen = self.hs.get_macaroon_generator()
         self.state_handler = self.hs.get_state_handler()
         self.storage = hs.get_storage()
@@ -217,7 +219,17 @@ class Mailer:
         push_actions: Iterable[Dict[str, Any]],
         reason: Dict[str, Any],
     ) -> None:
-        """Send email regarding a user's room notifications"""
+        """
+        Send email regarding a user's room notifications
+
+        Params:
+            app_id: The application receiving the notification.
+            user_id: The user receiving the notification.
+            email_address: The email address receiving the notification.
+            push_actions: All outstanding notifications.
+            reason: The notification that was ready and is the cause of an email
+                being sent.
+        """
         rooms_in_order = deduped_ordered_list([pa["room_id"] for pa in push_actions])
 
         notif_events = await self.store.get_events(
@@ -241,7 +253,7 @@ class Mailer:
         except StoreError:
             user_display_name = user_id
 
-        async def _fetch_room_state(room_id):
+        async def _fetch_room_state(room_id: str) -> None:
             room_state = await self.store.get_current_state_ids(room_id)
             state_by_room[room_id] = room_state
 
@@ -255,7 +267,7 @@ class Mailer:
         rooms = []
 
         for r in rooms_in_order:
-            roomvars = await self.get_room_vars(
+            roomvars = await self._get_room_vars(
                 r, user_id, notifs_by_room[r], notif_events, state_by_room[r]
             )
             rooms.append(roomvars)
@@ -267,13 +279,25 @@ class Mailer:
             fallback_to_members=True,
         )
 
-        summary_text = await self.make_summary_text(
-            notifs_by_room, state_by_room, notif_events, user_id, reason
-        )
+        if len(notifs_by_room) == 1:
+            # Only one room has new stuff
+            room_id = list(notifs_by_room.keys())[0]
+
+            summary_text = await self._make_summary_text_single_room(
+                room_id,
+                notifs_by_room[room_id],
+                state_by_room[room_id],
+                notif_events,
+                user_id,
+            )
+        else:
+            summary_text = await self._make_summary_text(
+                notifs_by_room, state_by_room, notif_events, reason
+            )
 
         template_vars = {
             "user_display_name": user_display_name,
-            "unsubscribe_link": self.make_unsubscribe_link(
+            "unsubscribe_link": self._make_unsubscribe_link(
                 user_id, app_id, email_address
             ),
             "summary_text": summary_text,
@@ -337,7 +361,7 @@ class Mailer:
             )
         )
 
-    async def get_room_vars(
+    async def _get_room_vars(
         self,
         room_id: str,
         user_id: str,
@@ -345,6 +369,20 @@ class Mailer:
         notif_events: Dict[str, EventBase],
         room_state_ids: StateMap[str],
     ) -> Dict[str, Any]:
+        """
+        Generate the variables for notifications on a per-room basis.
+
+        Args:
+            room_id: The room ID
+            user_id: The user receiving the notification.
+            notifs: The outstanding push actions for this room.
+            notif_events: The events related to the above notifications.
+            room_state_ids: The event IDs of the current room state.
+
+        Returns:
+             A dictionary to be added to the template context.
+        """
+
         # Check if one of the notifs is an invite event for the user.
         is_invite = False
         for n in notifs:
@@ -361,12 +399,12 @@ class Mailer:
             "hash": string_ordinal_total(room_id),  # See sender avatar hash
             "notifs": [],
             "invite": is_invite,
-            "link": self.make_room_link(room_id),
+            "link": self._make_room_link(room_id),
         }  # type: Dict[str, Any]
 
         if not is_invite:
             for n in notifs:
-                notifvars = await self.get_notif_vars(
+                notifvars = await self._get_notif_vars(
                     n, user_id, notif_events[n["event_id"]], room_state_ids
                 )
 
@@ -393,13 +431,26 @@ class Mailer:
 
         return room_vars
 
-    async def get_notif_vars(
+    async def _get_notif_vars(
         self,
         notif: Dict[str, Any],
         user_id: str,
         notif_event: EventBase,
         room_state_ids: StateMap[str],
     ) -> Dict[str, Any]:
+        """
+        Generate the variables for a single notification.
+
+        Args:
+            notif: The outstanding notification for this room.
+            user_id: The user receiving the notification.
+            notif_event: The event related to the above notification.
+            room_state_ids: The event IDs of the current room state.
+
+        Returns:
+             A dictionary to be added to the template context.
+        """
+
         results = await self.store.get_events_around(
             notif["room_id"],
             notif["event_id"],
@@ -408,7 +459,7 @@ class Mailer:
         )
 
         ret = {
-            "link": self.make_notif_link(notif),
+            "link": self._make_notif_link(notif),
             "ts": notif["received_ts"],
             "messages": [],
         }
@@ -419,22 +470,51 @@ class Mailer:
         the_events.append(notif_event)
 
         for event in the_events:
-            messagevars = await self.get_message_vars(notif, event, room_state_ids)
+            messagevars = await self._get_message_vars(notif, event, room_state_ids)
             if messagevars is not None:
                 ret["messages"].append(messagevars)
 
         return ret
 
-    async def get_message_vars(
+    async def _get_message_vars(
         self, notif: Dict[str, Any], event: EventBase, room_state_ids: StateMap[str]
     ) -> Optional[Dict[str, Any]]:
+        """
+        Generate the variables for a single event, if possible.
+
+        Args:
+            notif: The outstanding notification for this room.
+            event: The event under consideration.
+            room_state_ids: The event IDs of the current room state.
+
+        Returns:
+             A dictionary to be added to the template context, or None if the
+             event cannot be processed.
+        """
         if event.type != EventTypes.Message and event.type != EventTypes.Encrypted:
             return None
 
-        sender_state_event_id = room_state_ids[("m.room.member", event.sender)]
-        sender_state_event = await self.store.get_event(sender_state_event_id)
-        sender_name = name_from_member_event(sender_state_event)
-        sender_avatar_url = sender_state_event.content.get("avatar_url")
+        # Get the sender's name and avatar from the room state.
+        type_state_key = ("m.room.member", event.sender)
+        sender_state_event_id = room_state_ids.get(type_state_key)
+        if sender_state_event_id:
+            sender_state_event = await self.store.get_event(
+                sender_state_event_id
+            )  # type: Optional[EventBase]
+        else:
+            # Attempt to check the historical state for the room.
+            historical_state = await self.state_store.get_state_for_event(
+                event.event_id, StateFilter.from_types((type_state_key,))
+            )
+            sender_state_event = historical_state.get(type_state_key)
+
+        if sender_state_event:
+            sender_name = name_from_member_event(sender_state_event)
+            sender_avatar_url = sender_state_event.content.get("avatar_url")
+        else:
+            # No state could be found, fallback to the MXID.
+            sender_name = event.sender
+            sender_avatar_url = None
 
         # 'hash' for deterministically picking default images: use
         # sender_hash % the number of default images to choose from
@@ -459,18 +539,25 @@ class Mailer:
         ret["msgtype"] = msgtype
 
         if msgtype == "m.text":
-            self.add_text_message_vars(ret, event)
+            self._add_text_message_vars(ret, event)
         elif msgtype == "m.image":
-            self.add_image_message_vars(ret, event)
+            self._add_image_message_vars(ret, event)
 
         if "body" in event.content:
             ret["body_text_plain"] = event.content["body"]
 
         return ret
 
-    def add_text_message_vars(
+    def _add_text_message_vars(
         self, messagevars: Dict[str, Any], event: EventBase
     ) -> None:
+        """
+        Potentially add a sanitised message body to the message variables.
+
+        Args:
+            messagevars: The template context to be modified.
+            event: The event under consideration.
+        """
         msgformat = event.content.get("format")
 
         messagevars["format"] = msgformat
@@ -483,149 +570,232 @@ class Mailer:
         elif body:
             messagevars["body_text_html"] = safe_text(body)
 
-    def add_image_message_vars(
+    def _add_image_message_vars(
         self, messagevars: Dict[str, Any], event: EventBase
     ) -> None:
         """
         Potentially add an image URL to the message variables.
+
+        Args:
+            messagevars: The template context to be modified.
+            event: The event under consideration.
         """
         if "url" in event.content:
             messagevars["image_url"] = event.content["url"]
 
-    async def make_summary_text(
+    async def _make_summary_text_single_room(
         self,
-        notifs_by_room: Dict[str, List[Dict[str, Any]]],
-        room_state_ids: Dict[str, StateMap[str]],
+        room_id: str,
+        notifs: List[Dict[str, Any]],
+        room_state_ids: StateMap[str],
         notif_events: Dict[str, EventBase],
         user_id: str,
-        reason: Dict[str, Any],
-    ):
-        if len(notifs_by_room) == 1:
-            # Only one room has new stuff
-            room_id = list(notifs_by_room.keys())[0]
+    ) -> str:
+        """
+        Make a summary text for the email when only a single room has notifications.
 
-            # If the room has some kind of name, use it, but we don't
-            # want the generated-from-names one here otherwise we'll
-            # end up with, "new message from Bob in the Bob room"
-            room_name = await calculate_room_name(
-                self.store, room_state_ids[room_id], user_id, fallback_to_members=False
-            )
+        Args:
+            room_id: The ID of the room.
+            notifs: The push actions for this room.
+            room_state_ids: The state map for the room.
+            notif_events: A map of event ID -> notification event.
+            user_id: The user receiving the notification.
+
+        Returns:
+            The summary text.
+        """
+        # If the room has some kind of name, use it, but we don't
+        # want the generated-from-names one here otherwise we'll
+        # end up with, "new message from Bob in the Bob room"
+        room_name = await calculate_room_name(
+            self.store, room_state_ids, user_id, fallback_to_members=False
+        )
+
+        # See if one of the notifs is an invite event for the user
+        invite_event = None
+        for n in notifs:
+            ev = notif_events[n["event_id"]]
+            if ev.type == EventTypes.Member and ev.state_key == user_id:
+                if ev.content.get("membership") == Membership.INVITE:
+                    invite_event = ev
+                    break
 
-            # See if one of the notifs is an invite event for the user
-            invite_event = None
-            for n in notifs_by_room[room_id]:
-                ev = notif_events[n["event_id"]]
-                if ev.type == EventTypes.Member and ev.state_key == user_id:
-                    if ev.content.get("membership") == Membership.INVITE:
-                        invite_event = ev
-                        break
-
-            if invite_event:
-                inviter_member_event_id = room_state_ids[room_id].get(
-                    ("m.room.member", invite_event.sender)
+        if invite_event:
+            inviter_member_event_id = room_state_ids.get(
+                ("m.room.member", invite_event.sender)
+            )
+            inviter_name = invite_event.sender
+            if inviter_member_event_id:
+                inviter_member_event = await self.store.get_event(
+                    inviter_member_event_id, allow_none=True
                 )
-                inviter_name = invite_event.sender
-                if inviter_member_event_id:
-                    inviter_member_event = await self.store.get_event(
-                        inviter_member_event_id, allow_none=True
-                    )
-                    if inviter_member_event:
-                        inviter_name = name_from_member_event(inviter_member_event)
-
-                if room_name is None:
-                    return self.email_subjects.invite_from_person % {
-                        "person": inviter_name,
-                        "app": self.app_name,
-                    }
-                else:
-                    return self.email_subjects.invite_from_person_to_room % {
-                        "person": inviter_name,
-                        "room": room_name,
-                        "app": self.app_name,
-                    }
+                if inviter_member_event:
+                    inviter_name = name_from_member_event(inviter_member_event)
+
+            if room_name is None:
+                return self.email_subjects.invite_from_person % {
+                    "person": inviter_name,
+                    "app": self.app_name,
+                }
 
+            return self.email_subjects.invite_from_person_to_room % {
+                "person": inviter_name,
+                "room": room_name,
+                "app": self.app_name,
+            }
+
+        if len(notifs) == 1:
+            # There is just the one notification, so give some detail
             sender_name = None
-            if len(notifs_by_room[room_id]) == 1:
-                # There is just the one notification, so give some detail
-                event = notif_events[notifs_by_room[room_id][0]["event_id"]]
-                if ("m.room.member", event.sender) in room_state_ids[room_id]:
-                    state_event_id = room_state_ids[room_id][
-                        ("m.room.member", event.sender)
-                    ]
-                    state_event = await self.store.get_event(state_event_id)
-                    sender_name = name_from_member_event(state_event)
-
-                if sender_name is not None and room_name is not None:
-                    return self.email_subjects.message_from_person_in_room % {
-                        "person": sender_name,
-                        "room": room_name,
-                        "app": self.app_name,
-                    }
-                elif sender_name is not None:
-                    return self.email_subjects.message_from_person % {
-                        "person": sender_name,
-                        "app": self.app_name,
-                    }
-            else:
-                # There's more than one notification for this room, so just
-                # say there are several
-                if room_name is not None:
-                    return self.email_subjects.messages_in_room % {
-                        "room": room_name,
-                        "app": self.app_name,
-                    }
-                else:
-                    # If the room doesn't have a name, say who the messages
-                    # are from explicitly to avoid, "messages in the Bob room"
-                    sender_ids = list(
-                        {
-                            notif_events[n["event_id"]].sender
-                            for n in notifs_by_room[room_id]
-                        }
-                    )
-
-                    member_events = await self.store.get_events(
-                        [
-                            room_state_ids[room_id][("m.room.member", s)]
-                            for s in sender_ids
-                        ]
-                    )
-
-                    return self.email_subjects.messages_from_person % {
-                        "person": descriptor_from_member_events(member_events.values()),
-                        "app": self.app_name,
-                    }
-        else:
-            # Stuff's happened in multiple different rooms
+            event = notif_events[notifs[0]["event_id"]]
+            if ("m.room.member", event.sender) in room_state_ids:
+                state_event_id = room_state_ids[("m.room.member", event.sender)]
+                state_event = await self.store.get_event(state_event_id)
+                sender_name = name_from_member_event(state_event)
+
+            if sender_name is not None and room_name is not None:
+                return self.email_subjects.message_from_person_in_room % {
+                    "person": sender_name,
+                    "room": room_name,
+                    "app": self.app_name,
+                }
+            elif sender_name is not None:
+                return self.email_subjects.message_from_person % {
+                    "person": sender_name,
+                    "app": self.app_name,
+                }
 
-            # ...but we still refer to the 'reason' room which triggered the mail
-            if reason["room_name"] is not None:
-                return self.email_subjects.messages_in_room_and_others % {
-                    "room": reason["room_name"],
+            # The sender is unknown, just use the room name (or ID).
+            return self.email_subjects.messages_in_room % {
+                "room": room_name or room_id,
+                "app": self.app_name,
+            }
+        else:
+            # There's more than one notification for this room, so just
+            # say there are several
+            if room_name is not None:
+                return self.email_subjects.messages_in_room % {
+                    "room": room_name,
                     "app": self.app_name,
                 }
+
+            return await self._make_summary_text_from_member_events(
+                room_id, notifs, room_state_ids, notif_events
+            )
+
+    async def _make_summary_text(
+        self,
+        notifs_by_room: Dict[str, List[Dict[str, Any]]],
+        room_state_ids: Dict[str, StateMap[str]],
+        notif_events: Dict[str, EventBase],
+        reason: Dict[str, Any],
+    ) -> str:
+        """
+        Make a summary text for the email when multiple rooms have notifications.
+
+        Args:
+            notifs_by_room: A map of room ID to the push actions for that room.
+            room_state_ids: A map of room ID to the state map for that room.
+            notif_events: A map of event ID -> notification event.
+            reason: The reason this notification is being sent.
+
+        Returns:
+            The summary text.
+        """
+        # Stuff's happened in multiple different rooms
+        # ...but we still refer to the 'reason' room which triggered the mail
+        if reason["room_name"] is not None:
+            return self.email_subjects.messages_in_room_and_others % {
+                "room": reason["room_name"],
+                "app": self.app_name,
+            }
+
+        room_id = reason["room_id"]
+        return await self._make_summary_text_from_member_events(
+            room_id, notifs_by_room[room_id], room_state_ids[room_id], notif_events
+        )
+
+    async def _make_summary_text_from_member_events(
+        self,
+        room_id: str,
+        notifs: List[Dict[str, Any]],
+        room_state_ids: StateMap[str],
+        notif_events: Dict[str, EventBase],
+    ) -> str:
+        """
+        Make a summary text for the email when only a single room has notifications.
+
+        Args:
+            room_id: The ID of the room.
+            notifs: The push actions for this room.
+            room_state_ids: The state map for the room.
+            notif_events: A map of event ID -> notification event.
+
+        Returns:
+            The summary text.
+        """
+        # If the room doesn't have a name, say who the messages
+        # are from explicitly to avoid, "messages in the Bob room"
+
+        # Find the latest event ID for each sender, note that the notifications
+        # are already in descending received_ts.
+        sender_ids = {}
+        for n in notifs:
+            sender = notif_events[n["event_id"]].sender
+            if sender not in sender_ids:
+                sender_ids[sender] = n["event_id"]
+
+        # Get the actual member events (in order to calculate a pretty name for
+        # the room).
+        member_event_ids = []
+        member_events = {}
+        for sender_id, event_id in sender_ids.items():
+            type_state_key = ("m.room.member", sender_id)
+            sender_state_event_id = room_state_ids.get(type_state_key)
+            if sender_state_event_id:
+                member_event_ids.append(sender_state_event_id)
             else:
-                # If the reason room doesn't have a name, say who the messages
-                # are from explicitly to avoid, "messages in the Bob room"
-                room_id = reason["room_id"]
-
-                sender_ids = list(
-                    {
-                        notif_events[n["event_id"]].sender
-                        for n in notifs_by_room[room_id]
-                    }
+                # Attempt to check the historical state for the room.
+                historical_state = await self.state_store.get_state_for_event(
+                    event_id, StateFilter.from_types((type_state_key,))
                 )
+                sender_state_event = historical_state.get(type_state_key)
+                if sender_state_event:
+                    member_events[event_id] = sender_state_event
+        member_events.update(await self.store.get_events(member_event_ids))
+
+        if not member_events:
+            # No member events were found! Maybe the room is empty?
+            # Fallback to the room ID (note that if there was a room name this
+            # would already have been used previously).
+            return self.email_subjects.messages_in_room % {
+                "room": room_id,
+                "app": self.app_name,
+            }
+
+        # There was a single sender.
+        if len(member_events) == 1:
+            return self.email_subjects.messages_from_person % {
+                "person": descriptor_from_member_events(member_events.values()),
+                "app": self.app_name,
+            }
+
+        # There was more than one sender, use the first one and a tweaked template.
+        return self.email_subjects.messages_from_person_and_others % {
+            "person": descriptor_from_member_events(list(member_events.values())[:1]),
+            "app": self.app_name,
+        }
 
-                member_events = await self.store.get_events(
-                    [room_state_ids[room_id][("m.room.member", s)] for s in sender_ids]
-                )
+    def _make_room_link(self, room_id: str) -> str:
+        """
+        Generate a link to open a room in the web client.
 
-                return self.email_subjects.messages_from_person_and_others % {
-                    "person": descriptor_from_member_events(member_events.values()),
-                    "app": self.app_name,
-                }
+        Args:
+            room_id: The room ID to generate a link to.
 
-    def make_room_link(self, room_id: str) -> str:
+        Returns:
+             A link to open a room in the web client.
+        """
         if self.hs.config.email_riot_base_url:
             base_url = "%s/#/room" % (self.hs.config.email_riot_base_url)
         elif self.app_name == "Vector":
@@ -635,7 +805,16 @@ class Mailer:
             base_url = "https://matrix.to/#"
         return "%s/%s" % (base_url, room_id)
 
-    def make_notif_link(self, notif: Dict[str, str]) -> str:
+    def _make_notif_link(self, notif: Dict[str, str]) -> str:
+        """
+        Generate a link to open an event in the web client.
+
+        Args:
+            notif: The notification to generate a link for.
+
+        Returns:
+             A link to open the notification in the web client.
+        """
         if self.hs.config.email_riot_base_url:
             return "%s/#/room/%s/%s" % (
                 self.hs.config.email_riot_base_url,
@@ -651,9 +830,20 @@ class Mailer:
         else:
             return "https://matrix.to/#/%s/%s" % (notif["room_id"], notif["event_id"])
 
-    def make_unsubscribe_link(
+    def _make_unsubscribe_link(
         self, user_id: str, app_id: str, email_address: str
     ) -> str:
+        """
+        Generate a link to unsubscribe from email notifications.
+
+        Args:
+            user_id: The user receiving the notification.
+            app_id: The application receiving the notification.
+            email_address: The email address receiving the notification.
+
+        Returns:
+             A link to unsubscribe from email notifications.
+        """
         params = {
             "access_token": self.macaroon_gen.generate_delete_pusher_token(user_id),
             "app_id": app_id,
diff --git a/synapse/python_dependencies.py b/synapse/python_dependencies.py
index bfd46a3730..8a2b73b75e 100644
--- a/synapse/python_dependencies.py
+++ b/synapse/python_dependencies.py
@@ -86,8 +86,12 @@ REQUIREMENTS = [
 
 CONDITIONAL_REQUIREMENTS = {
     "matrix-synapse-ldap3": ["matrix-synapse-ldap3>=0.1"],
-    # we use execute_values with the fetch param, which arrived in psycopg 2.8.
-    "postgres": ["psycopg2>=2.8"],
+    "postgres": [
+        # we use execute_values with the fetch param, which arrived in psycopg 2.8.
+        "psycopg2>=2.8 ; platform_python_implementation != 'PyPy'",
+        "psycopg2cffi>=2.8 ; platform_python_implementation == 'PyPy'",
+        "psycopg2cffi-compat==1.1 ; platform_python_implementation == 'PyPy'",
+    ],
     # ACME support is required to provision TLS certificates from authorities
     # that use the protocol, such as Let's Encrypt.
     "acme": [
diff --git a/synapse/res/templates/sso.css b/synapse/res/templates/sso.css
new file mode 100644
index 0000000000..338214f5d0
--- /dev/null
+++ b/synapse/res/templates/sso.css
@@ -0,0 +1,129 @@
+body, input, select, textarea {
+  font-family: "Inter", "Helvetica", "Arial", sans-serif;
+  font-size: 14px;
+  color: #17191C;
+}
+
+header, footer {
+  max-width: 480px;
+  width: 100%;
+  margin: 24px auto;
+  text-align: center;
+}
+
+@media screen and (min-width: 800px) {
+  header {
+    margin-top: 90px;
+  }
+}
+
+header {
+  min-height: 60px;
+}
+
+header p {
+  color: #737D8C;
+  line-height: 24px;
+}
+
+h1 {
+  font-size: 24px;
+}
+
+a {
+  color: #418DED;
+}
+
+.error_page h1 {
+  color: #FE2928;
+}
+
+h2 {
+  font-size: 14px;
+}
+
+h2 img {
+  vertical-align: middle;
+  margin-right: 8px;
+  width: 24px;
+  height: 24px;
+}
+
+label {
+  cursor: pointer;
+}
+
+main {
+  max-width: 360px;
+  width: 100%;
+  margin: 24px auto;
+}
+
+.primary-button {
+  border: none;
+  -webkit-appearance: none;
+  -moz-appearance: none;
+  appearance: none;
+  text-decoration: none;
+  padding: 12px;
+  color: white;
+  background-color: #418DED;
+  font-weight: bold;
+  display: block;
+  border-radius: 12px;
+  width: 100%;
+  box-sizing: border-box;
+  margin: 16px 0;
+  cursor: pointer;
+  text-align: center;
+}
+
+.profile {
+  display: flex;
+  flex-direction: column;
+  align-items: center;
+  justify-content: center;
+  margin: 24px;
+  padding: 13px;
+  border: 1px solid #E9ECF1;
+  border-radius: 4px;
+}
+
+.profile.with-avatar {
+  margin-top: 42px; /* (36px / 2) + 24px*/
+}
+
+.profile .avatar {
+  width: 36px;
+  height: 36px;
+  border-radius: 100%;
+  display: block;
+  margin-top: -32px;
+  margin-bottom: 8px;
+}
+
+.profile .display-name {
+  font-weight: bold;
+  margin-bottom: 4px;
+  font-size: 15px;
+  line-height: 18px;
+}
+.profile .user-id {
+  color: #737D8C;
+  font-size: 12px;
+  line-height: 12px;
+}
+
+footer {
+  margin-top: 80px;
+}
+
+footer svg {
+  display: block;
+  width: 46px;
+  margin: 0px auto 12px auto;
+}
+
+footer p {
+  color: #737D8C;
+}
\ No newline at end of file
diff --git a/synapse/res/templates/sso_account_deactivated.html b/synapse/res/templates/sso_account_deactivated.html
index 4eb8db9fb4..c3e4deed93 100644
--- a/synapse/res/templates/sso_account_deactivated.html
+++ b/synapse/res/templates/sso_account_deactivated.html
@@ -1,10 +1,25 @@
 <!DOCTYPE html>
 <html lang="en">
-<head>
-    <meta charset="UTF-8">
-    <title>SSO account deactivated</title>
-</head>
-    <body>
-        <p>This account has been deactivated.</p>
+    <head>
+        <meta charset="UTF-8">
+        <title>SSO account deactivated</title>
+        <meta name="viewport" content="width=device-width, user-scalable=no">
+        <style type="text/css">
+            {% include "sso.css" without context %}
+        </style>
+    </head>
+    <body class="error_page">
+        <header>
+            <h1>Your account has been deactivated</h1>
+            <p>
+                <strong>No account found</strong>
+            </p>
+            <p>
+                Your account might have been deactivated by the server administrator.
+                You can either try to create a new account or contact the server’s
+                administrator.
+            </p>
+        </header>
+        {% include "sso_footer.html" without context %}
     </body>
 </html>
diff --git a/synapse/res/templates/sso_auth_account_details.html b/synapse/res/templates/sso_auth_account_details.html
new file mode 100644
index 0000000000..f4fdc40b22
--- /dev/null
+++ b/synapse/res/templates/sso_auth_account_details.html
@@ -0,0 +1,188 @@
+<!DOCTYPE html>
+<html lang="en">
+  <head>
+    <title>Create your account</title>
+    <meta charset="utf-8">
+    <meta name="viewport" content="width=device-width, user-scalable=no">
+    <script type="text/javascript">
+      let wasKeyboard = false;
+      document.addEventListener("mousedown", function() { wasKeyboard = false; });
+      document.addEventListener("keydown", function() { wasKeyboard = true; });
+      document.addEventListener("focusin", function() {
+        if (wasKeyboard) {
+          document.body.classList.add("keyboard-focus");
+        } else {
+          document.body.classList.remove("keyboard-focus");
+        }
+      });
+    </script>
+    <style type="text/css">
+      {% include "sso.css" without context %}
+
+      body.keyboard-focus :focus, body.keyboard-focus .username_input:focus-within {
+        outline: 3px solid #17191C;
+        outline-offset: 4px;
+      }
+
+      .username_input {
+        display: flex;
+        border: 2px solid #418DED;
+        border-radius: 8px;
+        padding: 12px;
+        position: relative;
+        margin: 16px 0;
+        align-items: center;
+        font-size: 12px;
+      }
+
+      .username_input.invalid {
+        border-color: #FE2928;
+      }
+
+      .username_input.invalid input, .username_input.invalid label {
+        color: #FE2928;
+      }
+
+      .username_input div, .username_input input {
+        line-height: 18px;
+        font-size: 14px;
+      }
+
+      .username_input label {
+        position: absolute;
+        top: -5px;
+        left: 14px;
+        font-size: 10px;
+        line-height: 10px;
+        background: white;
+        padding: 0 2px;
+      }
+
+      .username_input input {
+        flex: 1;
+        display: block;
+        min-width: 0;
+        border: none;
+      }
+
+      /* only clear the outline if we know it will be shown on the parent div using :focus-within */
+      @supports selector(:focus-within) {
+        .username_input input {
+          outline: none !important;
+        }
+      }
+
+      .username_input div {
+        color: #8D99A5;
+      }
+
+      .idp-pick-details {
+        border: 1px solid #E9ECF1;
+        border-radius: 8px;
+        margin: 24px 0;
+      }
+
+      .idp-pick-details h2 {
+        margin: 0;
+        padding: 8px 12px;
+      }
+
+      .idp-pick-details .idp-detail {
+        border-top: 1px solid #E9ECF1;
+        padding: 12px;
+        display: block;
+      }
+      .idp-pick-details .check-row {
+        display: flex;
+        align-items: center;
+      }
+
+      .idp-pick-details .check-row .name {
+        flex: 1;
+      }
+
+      .idp-pick-details .use, .idp-pick-details .idp-value {
+        color: #737D8C;
+      }
+
+      .idp-pick-details .idp-value {
+        margin: 0;
+        margin-top: 8px;
+      }
+
+      .idp-pick-details .avatar {
+        width: 53px;
+        height: 53px;
+        border-radius: 100%;
+        display: block;
+        margin-top: 8px;
+      }
+
+      output {
+        padding: 0 14px;
+        display: block;
+      }
+
+      output.error {
+        color: #FE2928;
+      }
+    </style>
+  </head>
+  <body>
+    <header>
+      <h1>Your account is nearly ready</h1>
+      <p>Check your details before creating an account on {{ server_name }}</p>
+    </header>
+    <main>
+      <form method="post" class="form__input" id="form">
+        <div class="username_input" id="username_input">
+          <label for="field-username">Username</label>
+          <div class="prefix">@</div>
+          <input type="text" name="username" id="field-username" autofocus>
+          <div class="postfix">:{{ server_name }}</div>
+        </div>
+        <output for="username_input" id="field-username-output"></output>
+        <input type="submit" value="Continue" class="primary-button">
+        {% if user_attributes.avatar_url or user_attributes.display_name or user_attributes.emails %}
+        <section class="idp-pick-details">
+          <h2><img src="{{ idp.idp_icon | mxc_to_http(24, 24) }}"/>Information from {{ idp.idp_name }}</h2>
+          {% if user_attributes.avatar_url %}
+          <label class="idp-detail idp-avatar" for="idp-avatar">
+            <div class="check-row">
+              <span class="name">Avatar</span>
+              <span class="use">Use</span>
+              <input type="checkbox" name="use_avatar" id="idp-avatar" value="true" checked>
+            </div>
+            <img src="{{ user_attributes.avatar_url }}" class="avatar" />
+          </label>
+          {% endif %}
+          {% if user_attributes.display_name %}
+          <label class="idp-detail" for="idp-displayname">
+            <div class="check-row">
+              <span class="name">Display name</span>
+              <span class="use">Use</span>
+              <input type="checkbox" name="use_display_name" id="idp-displayname" value="true" checked>
+            </div>
+            <p class="idp-value">{{ user_attributes.display_name }}</p>
+          </label>
+          {% endif %}
+          {% for email in user_attributes.emails %}
+          <label class="idp-detail" for="idp-email{{ loop.index }}">
+            <div class="check-row">
+              <span class="name">E-mail</span>
+              <span class="use">Use</span>
+              <input type="checkbox" name="use_email" id="idp-email{{ loop.index }}" value="{{ email }}" checked>
+            </div>
+            <p class="idp-value">{{ email }}</p>
+          </label>
+          {% endfor %}
+        </section>
+        {% endif %}
+      </form>
+    </main>
+    {% include "sso_footer.html" without context %}
+    <script type="text/javascript">
+      {% include "sso_auth_account_details.js" without context %}
+    </script>
+  </body>
+</html>
diff --git a/synapse/res/templates/sso_auth_account_details.js b/synapse/res/templates/sso_auth_account_details.js
new file mode 100644
index 0000000000..3c45df9078
--- /dev/null
+++ b/synapse/res/templates/sso_auth_account_details.js
@@ -0,0 +1,116 @@
+const usernameField = document.getElementById("field-username");
+const usernameOutput = document.getElementById("field-username-output");
+const form = document.getElementById("form");
+
+// needed to validate on change event when no input was changed
+let needsValidation = true;
+let isValid = false;
+
+function throttle(fn, wait) {
+    let timeout;
+    const throttleFn = function() {
+        const args = Array.from(arguments);
+        if (timeout) {
+            clearTimeout(timeout);
+        }
+        timeout = setTimeout(fn.bind.apply(fn, [null].concat(args)), wait);
+    };
+    throttleFn.cancelQueued = function() {
+        clearTimeout(timeout);
+    };
+    return throttleFn;
+}
+
+function checkUsernameAvailable(username) {
+    let check_uri = 'check?username=' + encodeURIComponent(username);
+    return fetch(check_uri, {
+        // include the cookie
+        "credentials": "same-origin",
+    }).then(function(response) {
+        if(!response.ok) {
+            // for non-200 responses, raise the body of the response as an exception
+            return response.text().then((text) => { throw new Error(text); });
+        } else {
+            return response.json();
+        }
+    }).then(function(json) {
+        if(json.error) {
+            return {message: json.error};
+        } else if(json.available) {
+            return {available: true};
+        } else {
+            return {message: username + " is not available, please choose another."};
+        }
+    });
+}
+
+const allowedUsernameCharacters = new RegExp("^[a-z0-9\\.\\_\\-\\/\\=]+$");
+const allowedCharactersString = "lowercase letters, digits, ., _, -, /, =";
+
+function reportError(error) {
+    throttledCheckUsernameAvailable.cancelQueued();
+    usernameOutput.innerText = error;
+    usernameOutput.classList.add("error");
+    usernameField.parentElement.classList.add("invalid");
+    usernameField.focus();
+}
+
+function validateUsername(username) {
+    isValid = false;
+    needsValidation = false;
+    usernameOutput.innerText = "";
+    usernameField.parentElement.classList.remove("invalid");
+    usernameOutput.classList.remove("error");
+    if (!username) {
+        return reportError("Please provide a username");
+    }
+    if (username.length > 255) {
+        return reportError("Too long, please choose something shorter");
+    }
+    if (!allowedUsernameCharacters.test(username)) {
+        return reportError("Invalid username, please only use " + allowedCharactersString);
+    }
+    usernameOutput.innerText = "Checking if username is available …";
+    throttledCheckUsernameAvailable(username);
+}
+
+const throttledCheckUsernameAvailable = throttle(function(username) {
+    const handleError = function(err) {
+        // don't prevent form submission on error
+        usernameOutput.innerText = "";
+        isValid = true;
+    };
+    try {
+        checkUsernameAvailable(username).then(function(result) {
+            if (!result.available) {
+                reportError(result.message);
+            } else {
+                isValid = true;
+                usernameOutput.innerText = "";
+            }
+        }, handleError);
+    } catch (err) {
+        handleError(err);
+    }
+}, 500);
+
+form.addEventListener("submit", function(evt) {
+    if (needsValidation) {
+        validateUsername(usernameField.value);
+        evt.preventDefault();
+        return;
+    }
+    if (!isValid) {
+        evt.preventDefault();
+        usernameField.focus();
+        return;
+    }
+});
+usernameField.addEventListener("input", function(evt) {
+    validateUsername(usernameField.value);
+});
+usernameField.addEventListener("change", function(evt) {
+    if (needsValidation) {
+        validateUsername(usernameField.value);
+    }
+});
diff --git a/synapse/res/templates/sso_auth_bad_user.html b/synapse/res/templates/sso_auth_bad_user.html
index f7099098c7..da579ffe69 100644
--- a/synapse/res/templates/sso_auth_bad_user.html
+++ b/synapse/res/templates/sso_auth_bad_user.html
@@ -1,18 +1,26 @@
-<html>
-<head>
-    <title>Authentication Failed</title>
-</head>
-    <body>
-        <div>
+<!DOCTYPE html>
+<html lang="en">
+    <head>
+        <meta charset="UTF-8">
+        <title>Authentication failed</title>
+        <meta name="viewport" content="width=device-width, user-scalable=no">
+        <style type="text/css">
+            {% include "sso.css" without context %}
+        </style>
+    </head>
+    <body class="error_page">
+        <header>
+            <h1>That doesn't look right</h1>
             <p>
-                We were unable to validate your <tt>{{ server_name }}</tt> account via
-                single-sign-on (SSO), because the SSO Identity Provider returned
-                different details than when you logged in.
+                <strong>We were unable to validate your {{ server_name }} account</strong>
+                via single&nbsp;sign&#8209;on&nbsp;(SSO), because the SSO Identity
+                Provider returned different details than when you logged in.
             </p>
             <p>
                 Try the operation again, and ensure that you use the same details on
                 the Identity Provider as when you log into your account.
             </p>
-        </div>
+        </header>
+        {% include "sso_footer.html" without context %}
     </body>
 </html>
diff --git a/synapse/res/templates/sso_auth_confirm.html b/synapse/res/templates/sso_auth_confirm.html
index 4e7ca3a2ed..f9d0456f0a 100644
--- a/synapse/res/templates/sso_auth_confirm.html
+++ b/synapse/res/templates/sso_auth_confirm.html
@@ -1,14 +1,29 @@
-<html>
-<head>
-    <title>Authentication</title>
-</head>
+<!DOCTYPE html>
+<html lang="en">
+    <head>
+        <meta charset="UTF-8">
+        <title>Confirm it's you</title>
+        <meta name="viewport" content="width=device-width, user-scalable=no">
+        <style type="text/css">
+            {% include "sso.css" without context %}
+        </style>
+    </head>
     <body>
-        <div>
+        <header>
+            <h1>Confirm it's you to continue</h1>
             <p>
-                A client is trying to {{ description }}. To confirm this action,
-                <a href="{{ redirect_url }}">re-authenticate with single sign-on</a>.
-                If you did not expect this, your account may be compromised!
+                A client is trying to {{ description }}. To confirm this action
+                re-authorize your account with single sign-on.
             </p>
-        </div>
+            <p><strong>
+                If you did not expect this, your account may be compromised.
+            </strong></p>
+        </header>
+        <main>
+            <a href="{{ redirect_url }}" class="primary-button">
+                Continue with {{ idp.idp_name }}
+            </a>
+        </main>
+        {% include "sso_footer.html" without context %}
     </body>
 </html>
diff --git a/synapse/res/templates/sso_auth_success.html b/synapse/res/templates/sso_auth_success.html
index 03f1419467..1ed3967e87 100644
--- a/synapse/res/templates/sso_auth_success.html
+++ b/synapse/res/templates/sso_auth_success.html
@@ -1,18 +1,28 @@
-<html>
-<head>
-    <title>Authentication Successful</title>
-    <script>
-    if (window.onAuthDone) {
-        window.onAuthDone();
-    } else if (window.opener && window.opener.postMessage) {
-        window.opener.postMessage("authDone", "*");
-    }
-    </script>
-</head>
+<!DOCTYPE html>
+<html lang="en">
+    <head>
+        <meta charset="UTF-8">
+        <title>Authentication successful</title>
+        <meta name="viewport" content="width=device-width, user-scalable=no">
+        <style type="text/css">
+            {% include "sso.css" without context %}
+        </style>
+        <script>
+            if (window.onAuthDone) {
+                window.onAuthDone();
+            } else if (window.opener && window.opener.postMessage) {
+                window.opener.postMessage("authDone", "*");
+            }
+        </script>
+    </head>
     <body>
-        <div>
-            <p>Thank you</p>
-            <p>You may now close this window and return to the application</p>
-        </div>
+        <header>
+            <h1>Thank you</h1>
+            <p>
+                Now we know it’s you, you can close this window and return to the
+                application.
+            </p>
+        </header>
+        {% include "sso_footer.html" without context %}
     </body>
 </html>
diff --git a/synapse/res/templates/sso_error.html b/synapse/res/templates/sso_error.html
index af8459719a..472309c350 100644
--- a/synapse/res/templates/sso_error.html
+++ b/synapse/res/templates/sso_error.html
@@ -1,53 +1,69 @@
 <!DOCTYPE html>
 <html lang="en">
-<head>
-    <meta charset="UTF-8">
-    <title>SSO error</title>
-</head>
-<body>
+    <head>
+        <meta charset="UTF-8">
+        <title>Authentication failed</title>
+        <meta name="viewport" content="width=device-width, user-scalable=no">
+        <style type="text/css">
+            {% include "sso.css" without context %}
+
+            #error_code {
+                margin-top: 56px;
+            }
+        </style>
+    </head>
+    <body class="error_page">
 {# If an error of unauthorised is returned it means we have actively rejected their login #}
 {% if error == "unauthorised" %}
-    <p>You are not allowed to log in here.</p>
+        <header>
+            <p>You are not allowed to log in here.</p>
+        </header>
 {% else %}
-    <p>
-        There was an error during authentication:
-    </p>
-    <div id="errormsg" style="margin:20px 80px">{{ error_description }}</div>
-    <p>
-        If you are seeing this page after clicking a link sent to you via email, make
-        sure you only click the confirmation link once, and that you open the
-        validation link in the same client you're logging in from.
-    </p>
-    <p>
-        Try logging in again from your Matrix client and if the problem persists
-        please contact the server's administrator.
-    </p>
-    <p>Error: <code>{{ error }}</code></p>
+        <header>
+            <h1>There was an error</h1>
+            <p>
+                <strong id="errormsg">{{ error_description }}</strong>
+            </p>
+            <p>
+                If you are seeing this page after clicking a link sent to you via email,
+                make sure you only click the confirmation link once, and that you open
+                the validation link in the same client you're logging in from.
+            </p>
+            <p>
+                Try logging in again from your Matrix client and if the problem persists
+                please contact the server's administrator.
+            </p>
+            <div id="error_code">
+                <p><strong>Error code</strong></p>
+                <p>{{ error }}</p>
+            </div>
+        </header>
+        {% include "sso_footer.html" without context %}
 
-    <script type="text/javascript">
-        // Error handling to support Auth0 errors that we might get through a GET request
-        // to the validation endpoint. If an error is provided, it's either going to be
-        // located in the query string or in a query string-like URI fragment.
-        // We try to locate the error from any of these two locations, but if we can't
-        // we just don't print anything specific.
-        let searchStr = "";
-        if (window.location.search) {
-            // window.location.searchParams isn't always defined when
-            // window.location.search is, so it's more reliable to parse the latter.
-            searchStr = window.location.search;
-        } else if (window.location.hash) {
-            // Replace the # with a ? so that URLSearchParams does the right thing and
-            // doesn't parse the first parameter incorrectly.
-            searchStr = window.location.hash.replace("#", "?");
-        }
+        <script type="text/javascript">
+            // Error handling to support Auth0 errors that we might get through a GET request
+            // to the validation endpoint. If an error is provided, it's either going to be
+            // located in the query string or in a query string-like URI fragment.
+            // We try to locate the error from any of these two locations, but if we can't
+            // we just don't print anything specific.
+            let searchStr = "";
+            if (window.location.search) {
+                // window.location.searchParams isn't always defined when
+                // window.location.search is, so it's more reliable to parse the latter.
+                searchStr = window.location.search;
+            } else if (window.location.hash) {
+                // Replace the # with a ? so that URLSearchParams does the right thing and
+                // doesn't parse the first parameter incorrectly.
+                searchStr = window.location.hash.replace("#", "?");
+            }
 
-        // We might end up with no error in the URL, so we need to check if we have one
-        // to print one.
-        let errorDesc = new URLSearchParams(searchStr).get("error_description")
-        if (errorDesc) {
-            document.getElementById("errormsg").innerText = errorDesc;
-        }
-    </script>
+            // We might end up with no error in the URL, so we need to check if we have one
+            // to print one.
+            let errorDesc = new URLSearchParams(searchStr).get("error_description")
+            if (errorDesc) {
+                document.getElementById("errormsg").innerText = errorDesc;
+            }
+        </script>
 {% endif %}
 </body>
 </html>
diff --git a/synapse/res/templates/sso_footer.html b/synapse/res/templates/sso_footer.html
new file mode 100644
index 0000000000..588a3d508d
--- /dev/null
+++ b/synapse/res/templates/sso_footer.html
@@ -0,0 +1,19 @@
+<footer>
+	<svg role="img" aria-label="[Matrix logo]" viewBox="0 0 200 85" version="1.1" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink">
+          <g id="parent" stroke="none" stroke-width="1" fill="none" fill-rule="evenodd">
+              <g id="child" transform="translate(-122.000000, -6.000000)" fill="#000000" fill-rule="nonzero">
+                  <g id="matrix-logo" transform="translate(122.000000, 6.000000)">
+                      <polygon id="left-bracket" points="2.24708861 1.93811009 2.24708861 82.7268844 8.10278481 82.7268844 8.10278481 84.6652459 0 84.6652459 0 0 8.10278481 0 8.10278481 1.93811009"></polygon>
+                      <path d="M24.8073418,27.5493174 L24.8073418,31.6376991 L24.924557,31.6376991 C26.0227848,30.0814294 27.3455696,28.8730642 28.8951899,28.0163743 C30.4437975,27.1611927 32.2189873,26.7318422 34.218481,26.7318422 C36.1394937,26.7318422 37.8946835,27.102622 39.4825316,27.8416679 C41.0708861,28.5819706 42.276962,29.8856073 43.1005063,31.7548404 C44.0017722,30.431345 45.2270886,29.2629486 46.7767089,28.2506569 C48.3253165,27.2388679 50.158481,26.7318422 52.2764557,26.7318422 C53.8843038,26.7318422 55.3736709,26.9269101 56.7473418,27.3162917 C58.1189873,27.7056734 59.295443,28.3285835 60.2759494,29.185022 C61.255443,30.0422147 62.02,31.1615927 62.5701266,32.5426532 C63.1187342,33.9262275 63.3936709,35.5898349 63.3936709,37.5372459 L63.3936709,57.7443688 L55.0410127,57.7441174 L55.0410127,40.6319376 C55.0410127,39.6201486 55.0020253,38.6661761 54.9232911,37.7700202 C54.8440506,36.8751211 54.6293671,36.0968606 54.2764557,35.4339817 C53.9232911,34.772611 53.403038,34.2464807 52.7177215,33.8568477 C52.0313924,33.4689743 51.0997468,33.2731523 49.9235443,33.2731523 C48.7473418,33.2731523 47.7962025,33.4983853 47.0706329,33.944578 C46.344557,34.393033 45.7764557,34.9774826 45.3650633,35.6969211 C44.9534177,36.4181193 44.6787342,37.2353431 44.5417722,38.150855 C44.4037975,39.0653615 44.3356962,39.9904257 44.3356962,40.9247908 L44.3356962,57.7443688 L35.9835443,57.7443688 L35.9835443,40.8079009 C35.9835443,39.9124991 35.963038,39.0263982 35.9253165,38.150855 C35.8853165,37.2743064 35.7192405,36.4666349 35.424557,35.7263321 C35.1303797,34.9872862 34.64,34.393033 33.9539241,33.944578 C33.2675949,33.4983853 32.2579747,33.2731523 30.9248101,33.2731523 C30.5321519,33.2731523 30.0126582,33.3608826 29.3663291,33.5365945 C28.7192405,33.7118037 28.0913924,34.0433688 27.4840506,34.5292789 C26.875443,35.0164459 26.3564557,35.7172826 25.9250633,36.6315376 C25.4934177,37.5470495 25.2779747,38.7436 25.2779747,40.2229486 L25.2779747,57.7441174 L16.9260759,57.7443688 L16.9260759,27.5493174 L24.8073418,27.5493174 Z" id="m"></path>
+                      <path d="M68.7455696,31.9886202 C69.6075949,30.7033339 70.7060759,29.672189 72.0397468,28.8926716 C73.3724051,28.1141596 74.8716456,27.5596239 76.5387342,27.2283101 C78.2050633,26.8977505 79.8817722,26.7315908 81.5678481,26.7315908 C83.0974684,26.7315908 84.6458228,26.8391798 86.2144304,27.0525982 C87.7827848,27.2675248 89.2144304,27.6865688 90.5086076,28.3087248 C91.8025316,28.9313835 92.8610127,29.7983798 93.6848101,30.9074514 C94.5083544,32.0170257 94.92,33.4870734 94.92,35.3173431 L94.92,51.026844 C94.92,52.3913138 94.998481,53.6941963 95.1556962,54.9400165 C95.3113924,56.1865908 95.5863291,57.120956 95.9787342,57.7436147 L87.5091139,57.7436147 C87.3518987,57.276055 87.2240506,56.7996972 87.1265823,56.3125303 C87.0278481,55.8266202 86.9592405,55.3301523 86.9207595,54.8236294 C85.5873418,56.1865908 84.0182278,57.1405633 82.2156962,57.6857982 C80.4113924,58.2295248 78.5683544,58.503022 76.6860759,58.503022 C75.2346835,58.503022 73.8817722,58.3275615 72.6270886,57.9776459 C71.3718987,57.6269761 70.2744304,57.082244 69.3334177,56.3411872 C68.3921519,55.602644 67.656962,54.6680275 67.1275949,53.5390972 C66.5982278,52.410167 66.3331646,51.065556 66.3331646,49.5087835 C66.3331646,47.7961578 66.6367089,46.384178 67.2455696,45.2756092 C67.8529114,44.1652807 68.6367089,43.2799339 69.5987342,42.6173064 C70.5589873,41.9556844 71.6567089,41.4592165 72.8924051,41.1284055 C74.1273418,40.7978459 75.3721519,40.5356606 76.6270886,40.3398385 C77.8820253,40.1457761 79.116962,39.9896716 80.3329114,39.873033 C81.5483544,39.7558917 82.6270886,39.5804312 83.5681013,39.3469028 C84.5093671,39.1133743 85.2536709,38.7732624 85.8032911,38.3250587 C86.3513924,37.8773578 86.6063291,37.2252881 86.5678481,36.3680954 C86.5678481,35.4731963 86.4210127,34.7620532 86.1268354,34.2366771 C85.8329114,33.7113009 85.4405063,33.3018092 84.9506329,33.0099615 C84.4602532,32.7181138 83.8916456,32.5232972 83.2450633,32.4255119 C82.5977215,32.3294862 81.9010127,32.2797138 81.156962,32.2797138 C79.5098734,32.2797138 78.2159494,32.6303835 77.2746835,33.3312202 C76.3339241,34.0320569 75.7837975,35.2007046 75.6275949,36.8354037 L67.275443,36.8354037 C67.3924051,34.8892495 67.8817722,33.2726495 68.7455696,31.9886202 Z M85.2440506,43.6984752 C84.7149367,43.873433 84.1460759,44.0189798 83.5387342,44.1361211 C82.9306329,44.253011 82.2936709,44.350545 81.6270886,44.4279688 C80.96,44.5066495 80.2934177,44.6034294 79.6273418,44.7203193 C78.9994937,44.8362037 78.3820253,44.9933138 77.7749367,45.1871248 C77.1663291,45.3829468 76.636962,45.6451321 76.1865823,45.9759431 C75.7349367,46.3070055 75.3724051,46.7263009 75.0979747,47.2313156 C74.8232911,47.7375872 74.6863291,48.380356 74.6863291,49.1588679 C74.6863291,49.8979138 74.8232911,50.5218294 75.0979747,51.026844 C75.3724051,51.5338697 75.7455696,51.9328037 76.2159494,52.2246514 C76.6863291,52.5164991 77.2349367,52.7213706 77.8632911,52.8375064 C78.4898734,52.9546477 79.136962,53.012967 79.8037975,53.012967 C81.4506329,53.012967 82.724557,52.740978 83.6273418,52.1952404 C84.5288608,51.6507596 85.1949367,50.9981872 85.6270886,50.2382771 C86.0579747,49.4793725 86.323038,48.7119211 86.4212658,47.9321523 C86.518481,47.1536404 86.5681013,46.5304789 86.5681013,46.063422 L86.5681013,42.9677248 C86.2146835,43.2799339 85.7736709,43.5230147 85.2440506,43.6984752 Z" id="a"></path>
+                      <path d="M116.917975,27.5493174 L116.917975,33.0976917 L110.801266,33.0976917 L110.801266,48.0492936 C110.801266,49.4502128 111.036203,50.3850807 111.507089,50.8518862 C111.976962,51.3191945 112.918734,51.5527229 114.33038,51.5527229 C114.801013,51.5527229 115.251392,51.5336183 115.683038,51.4944037 C116.114177,51.4561945 116.526076,51.3968697 116.917975,51.3194459 L116.917975,57.7438661 C116.212152,57.860756 115.427595,57.9381798 114.565316,57.9778972 C113.702785,58.0153523 112.859747,58.0357138 112.036203,58.0357138 C110.742278,58.0357138 109.516456,57.9477321 108.36,57.7722716 C107.202785,57.5975651 106.183544,57.2577046 105.301519,56.7509303 C104.418987,56.2454128 103.722785,55.5242147 103.213418,54.5898495 C102.703038,53.6562385 102.448608,52.4292716 102.448608,50.9099541 L102.448608,33.0976917 L97.3903797,33.0976917 L97.3903797,27.5493174 L102.448608,27.5493174 L102.448608,18.4967596 L110.801013,18.4967596 L110.801013,27.5493174 L116.917975,27.5493174 Z" id="t"></path>
+                      <path d="M128.857975,27.5493174 L128.857975,33.1565138 L128.975696,33.1565138 C129.367089,32.2213945 129.896203,31.3559064 130.563544,30.557033 C131.23038,29.7596679 131.99443,29.0776844 132.857215,28.5130936 C133.719241,27.9495083 134.641266,27.5113596 135.622532,27.1988991 C136.601772,26.8879468 137.622025,26.7315908 138.681013,26.7315908 C139.229873,26.7315908 139.836962,26.8296275 140.504304,27.0239413 L140.504304,34.7336477 C140.111646,34.6552183 139.641013,34.586844 139.092658,34.5290275 C138.543291,34.4704569 138.014177,34.4410459 137.504304,34.4410459 C135.974937,34.4410459 134.681013,34.6949358 133.622785,35.2004532 C132.564051,35.7067248 131.711392,36.397255 131.064051,37.2735523 C130.417215,38.1501009 129.955443,39.1714422 129.681266,40.3398385 C129.407089,41.5074807 129.269873,42.7736624 129.269873,44.1361211 L129.269873,57.7438661 L120.917722,57.7438661 L120.917722,27.5493174 L128.857975,27.5493174 Z" id="r"></path>
+                      <path d="M144.033165,22.8767376 L144.033165,16.0435798 L152.386076,16.0435798 L152.386076,22.8767376 L144.033165,22.8767376 Z M152.386076,27.5493174 L152.386076,57.7438661 L144.033165,57.7438661 L144.033165,27.5493174 L152.386076,27.5493174 Z" id="i"></path>
+                      <polygon id="x" points="156.738228 27.5493174 166.266582 27.5493174 171.619494 35.4337303 176.913418 27.5493174 186.147848 27.5493174 176.148861 41.6831927 187.383544 57.7441174 177.85443 57.7441174 171.501772 48.2245028 165.148861 57.7441174 155.797468 57.7441174 166.737468 41.8589046"></polygon>
+                      <polygon id="right-bracket" points="197.580759 82.7268844 197.580759 1.93811009 191.725063 1.93811009 191.725063 0 199.828354 0 199.828354 84.6652459 191.725063 84.6652459 191.725063 82.7268844"></polygon>
+                  </g>
+              </g>
+          </g>
+      </svg>
+      <p>An open network for secure, decentralized communication.<br>© 2021 The Matrix.org Foundation C.I.C.</p>
+</footer>
\ No newline at end of file
diff --git a/synapse/res/templates/sso_login_idp_picker.html b/synapse/res/templates/sso_login_idp_picker.html
index 62a640dad2..53b82db84e 100644
--- a/synapse/res/templates/sso_login_idp_picker.html
+++ b/synapse/res/templates/sso_login_idp_picker.html
@@ -2,30 +2,60 @@
 <html lang="en">
     <head>
         <meta charset="UTF-8">
-        <link rel="stylesheet" href="/_matrix/static/client/login/style.css">
-        <title>{{ server_name }} Login</title>
+        <title>Choose identity provider</title>
+        <style type="text/css">
+          {% include "sso.css" without context %}
+
+          .providers {
+            list-style: none;
+            padding: 0;
+          }
+
+          .providers li {
+            margin: 12px;
+          }
+
+          .providers a {
+            display: block;
+            border-radius: 4px;
+            border: 1px solid #17191C;
+            padding: 8px;
+            text-align: center;
+            text-decoration: none;
+            color: #17191C;
+            display: flex;
+            align-items: center;
+            font-weight: bold;
+          }
+
+          .providers a img {
+            width: 24px;
+            height: 24px;
+          }
+          .providers a span {
+            flex: 1;
+          }
+        </style>
     </head>
     <body>
-        <div id="container">
-            <h1 id="title">{{ server_name }} Login</h1>
-            <div class="login_flow">
-                <p>Choose one of the following identity providers:</p>
-            <form>
-                <input type="hidden" name="redirectUrl" value="{{ redirect_url }}">
-                <ul class="radiobuttons">
-{% for p in providers %}
-                    <li>
-                        <input type="radio" name="idp" id="prov{{ loop.index }}" value="{{ p.idp_id }}">
-                        <label for="prov{{ loop.index }}">{{ p.idp_name }}</label>
-{% if p.idp_icon %}
+        <header>
+            <h1>Log in to {{ server_name }} </h1>
+            <p>Choose an identity provider to log in</p>
+        </header>
+        <main>
+            <ul class="providers">
+                {% for p in providers %}
+                <li>
+                    <a href="pick_idp?idp={{ p.idp_id }}&redirectUrl={{ redirect_url | urlencode }}">
+                        {% if p.idp_icon %}
                         <img src="{{ p.idp_icon | mxc_to_http(32, 32) }}"/>
-{% endif %}
-                    </li>
-{% endfor %}
-                </ul>
-                <input type="submit" class="button button--full-width" id="button-submit" value="Submit">
-            </form>
-            </div>
-        </div>
+                        {% endif %}
+                        <span>{{ p.idp_name }}</span>
+                    </a>
+                </li>
+                {% endfor %}
+            </ul>
+        </main>
+        {% include "sso_footer.html" without context %}
     </body>
 </html>
diff --git a/synapse/res/templates/sso_new_user_consent.html b/synapse/res/templates/sso_new_user_consent.html
new file mode 100644
index 0000000000..68c8b9f33a
--- /dev/null
+++ b/synapse/res/templates/sso_new_user_consent.html
@@ -0,0 +1,32 @@
+<!DOCTYPE html>
+<html lang="en">
+<head>
+    <meta charset="UTF-8">
+    <title>Agree to terms and conditions</title>
+    <meta name="viewport" content="width=device-width, user-scalable=no">
+    <style type="text/css">
+      {% include "sso.css" without context %}
+
+      #consent_form {
+        margin-top: 56px;
+      }
+    </style>
+</head>
+    <body>
+        <header>
+            <h1>Your account is nearly ready</h1>
+            <p>Agree to the terms to create your account.</p>
+        </header>
+        <main>
+            {% include "sso_partial_profile.html" %}
+            <form method="post" action="{{my_url}}" id="consent_form">
+                <p>
+                    <input id="accepted_version" type="checkbox" name="accepted_version" value="{{ consent_version }}" required>
+                    <label for="accepted_version">I have read and agree to the <a href="{{ terms_url }}" target="_blank" rel="noopener">terms and conditions</a>.</label>
+                </p>
+                <input type="submit" class="primary-button" value="Continue"/>
+            </form>
+        </main>
+        {% include "sso_footer.html" without context %}
+    </body>
+</html>
diff --git a/synapse/res/templates/sso_partial_profile.html b/synapse/res/templates/sso_partial_profile.html
new file mode 100644
index 0000000000..c9c76c455e
--- /dev/null
+++ b/synapse/res/templates/sso_partial_profile.html
@@ -0,0 +1,19 @@
+{# html fragment to be included in SSO pages, to show the user's profile #}
+
+<div class="profile{% if user_profile.avatar_url %} with-avatar{% endif %}">
+    {% if user_profile.avatar_url %}
+    <img src="{{ user_profile.avatar_url | mxc_to_http(64, 64) }}" class="avatar" />
+    {% endif %}
+    {# users that signed up with SSO will have a display_name of some sort;
+       however that is not the case for users who signed up via other
+       methods, so we need to handle that.
+    #}
+    {% if user_profile.display_name %}
+        <div class="display-name">{{ user_profile.display_name }}</div>
+    {% else %}
+        {# split the userid on ':', take the part before the first ':',
+           and then remove the leading '@'. #}
+        <div class="display-name">{{ user_id.split(":")[0][1:] }}</div>
+    {% endif %}
+    <div class="user-id">{{ user_id }}</div>
+</div>
diff --git a/synapse/res/templates/sso_redirect_confirm.html b/synapse/res/templates/sso_redirect_confirm.html
index a45a50916c..1b01471ac8 100644
--- a/synapse/res/templates/sso_redirect_confirm.html
+++ b/synapse/res/templates/sso_redirect_confirm.html
@@ -2,13 +2,39 @@
 <html lang="en">
 <head>
     <meta charset="UTF-8">
-    <title>SSO redirect confirmation</title>
+    <title>Continue to your account</title>
+    <meta name="viewport" content="width=device-width, user-scalable=no">
+    <style type="text/css">
+      {% include "sso.css" without context %}
+
+      .confirm-trust {
+        margin: 34px 0;
+        color: #8D99A5;
+      }
+      .confirm-trust strong {
+        color: #17191C;
+      }
+
+      .confirm-trust::before {
+        content: "";
+        background-image: url('data:image/svg+xml;base64,PHN2ZyB3aWR0aD0iMTgiIGhlaWdodD0iMTgiIHZpZXdCb3g9IjAgMCAxOCAxOCIgZmlsbD0ibm9uZSIgeG1sbnM9Imh0dHA6Ly93d3cudzMub3JnLzIwMDAvc3ZnIj4KPHBhdGggZmlsbC1ydWxlPSJldmVub2RkIiBjbGlwLXJ1bGU9ImV2ZW5vZGQiIGQ9Ik0xNi41IDlDMTYuNSAxMy4xNDIxIDEzLjE0MjEgMTYuNSA5IDE2LjVDNC44NTc4NiAxNi41IDEuNSAxMy4xNDIxIDEuNSA5QzEuNSA0Ljg1Nzg2IDQuODU3ODYgMS41IDkgMS41QzEzLjE0MjEgMS41IDE2LjUgNC44NTc4NiAxNi41IDlaTTcuMjUgOUM3LjI1IDkuNDY1OTYgNy41Njg2OSA5Ljg1NzQ4IDggOS45Njg1VjEyLjM3NUM4IDEyLjkyNzMgOC40NDc3MiAxMy4zNzUgOSAxMy4zNzVIMTAuMTI1QzEwLjY3NzMgMTMuMzc1IDExLjEyNSAxMi45MjczIDExLjEyNSAxMi4zNzVDMTEuMTI1IDExLjgyMjcgMTAuNjc3MyAxMS4zNzUgMTAuMTI1IDExLjM3NUgxMFY5QzEwIDguOTY1NDggOS45OTgyNSA4LjkzMTM3IDkuOTk0ODQgOC44OTc3NkM5Ljk0MzYzIDguMzkzNSA5LjUxNzc3IDggOSA4SDguMjVDNy42OTc3MiA4IDcuMjUgOC40NDc3MiA3LjI1IDlaTTkgNy41QzkuNjIxMzIgNy41IDEwLjEyNSA2Ljk5NjMyIDEwLjEyNSA2LjM3NUMxMC4xMjUgNS43NTM2OCA5LjYyMTMyIDUuMjUgOSA1LjI1QzguMzc4NjggNS4yNSA3Ljg3NSA1Ljc1MzY4IDcuODc1IDYuMzc1QzcuODc1IDYuOTk2MzIgOC4zNzg2OCA3LjUgOSA3LjVaIiBmaWxsPSIjQzFDNkNEIi8+Cjwvc3ZnPgoK');
+        background-repeat: no-repeat;
+        width: 24px;
+        height: 24px;
+        display: block;
+        float: left;
+      }
+    </style>
 </head>
     <body>
-        <p>The application at <span style="font-weight:bold">{{ display_url }}</span> is requesting full access to your <span style="font-weight:bold">{{ server_name }}</span> Matrix account.</p>
-        <p>If you don't recognise this address, you should ignore this and close this tab.</p>
-        <p>
-            <a href="{{ redirect_url }}">I trust this address</a>
-        </p>
+        <header>
+            <h1>Continue to your account</h1>
+        </header>
+        <main>
+            {% include "sso_partial_profile.html" %}
+            <p class="confirm-trust">Continuing will grant <strong>{{ display_url }}</strong> access to your account.</p>
+            <a href="{{ redirect_url }}" class="primary-button">Continue</a>
+        </main>
+        {% include "sso_footer.html" without context %}
     </body>
 </html>
diff --git a/synapse/res/username_picker/index.html b/synapse/res/username_picker/index.html
deleted file mode 100644
index 37ea8bb6d8..0000000000
--- a/synapse/res/username_picker/index.html
+++ /dev/null
@@ -1,19 +0,0 @@
-<!DOCTYPE html>
-<html lang="en">
-  <head>
-    <title>Synapse Login</title>
-    <link rel="stylesheet" href="style.css" type="text/css" />
-  </head>
-  <body>
-    <div class="card">
-      <form method="post" class="form__input" id="form" action="submit">
-        <label for="field-username">Please pick your username:</label>
-        <input type="text" name="username" id="field-username" autofocus="">
-        <input type="submit" class="button button--full-width" id="button-submit" value="Submit">
-      </form>
-      <!-- this is used for feedback -->
-      <div role=alert class="tooltip hidden" id="message"></div>
-      <script src="script.js"></script>
-    </div>
-  </body>
-</html>
diff --git a/synapse/res/username_picker/script.js b/synapse/res/username_picker/script.js
deleted file mode 100644
index 416a7c6f41..0000000000
--- a/synapse/res/username_picker/script.js
+++ /dev/null
@@ -1,95 +0,0 @@
-let inputField = document.getElementById("field-username");
-let inputForm = document.getElementById("form");
-let submitButton = document.getElementById("button-submit");
-let message = document.getElementById("message");
-
-// Submit username and receive response
-function showMessage(messageText) {
-    // Unhide the message text
-    message.classList.remove("hidden");
-
-    message.textContent = messageText;
-};
-
-function doSubmit() {
-    showMessage("Success. Please wait a moment for your browser to redirect.");
-
-    // remove the event handler before re-submitting the form.
-    delete inputForm.onsubmit;
-    inputForm.submit();
-}
-
-function onResponse(response) {
-    // Display message
-    showMessage(response);
-
-    // Enable submit button and input field
-    submitButton.classList.remove('button--disabled');
-    submitButton.value = "Submit";
-};
-
-let allowedUsernameCharacters = RegExp("[^a-z0-9\\.\\_\\=\\-\\/]");
-function usernameIsValid(username) {
-    return !allowedUsernameCharacters.test(username);
-}
-let allowedCharactersString = "lowercase letters, digits, ., _, -, /, =";
-
-function buildQueryString(params) {
-    return Object.keys(params)
-        .map(k => encodeURIComponent(k) + '=' + encodeURIComponent(params[k]))
-        .join('&');
-}
-
-function submitUsername(username) {
-    if(username.length == 0) {
-        onResponse("Please enter a username.");
-        return;
-    }
-    if(!usernameIsValid(username)) {
-        onResponse("Invalid username. Only the following characters are allowed: " + allowedCharactersString);
-        return;
-    }
-
-    // if this browser doesn't support fetch, skip the availability check.
-    if(!window.fetch) {
-        doSubmit();
-        return;
-    }
-
-    let check_uri = 'check?' + buildQueryString({"username": username});
-    fetch(check_uri, {
-        // include the cookie
-        "credentials": "same-origin",
-    }).then((response) => {
-        if(!response.ok) {
-            // for non-200 responses, raise the body of the response as an exception
-            return response.text().then((text) => { throw text; });
-        } else {
-            return response.json();
-        }
-    }).then((json) => {
-        if(json.error) {
-            throw json.error;
-        } else if(json.available) {
-            doSubmit();
-        } else {
-            onResponse("This username is not available, please choose another.");
-        }
-    }).catch((err) => {
-        onResponse("Error checking username availability: " + err);
-    });
-}
-
-function clickSubmit() {
-    event.preventDefault();
-    if(submitButton.classList.contains('button--disabled')) { return; }
-
-    // Disable submit button and input field
-    submitButton.classList.add('button--disabled');
-
-    // Submit username
-    submitButton.value = "Checking...";
-    submitUsername(inputField.value);
-};
-
-inputForm.onsubmit = clickSubmit;
diff --git a/synapse/res/username_picker/style.css b/synapse/res/username_picker/style.css
deleted file mode 100644
index 745bd4c684..0000000000
--- a/synapse/res/username_picker/style.css
+++ /dev/null
@@ -1,27 +0,0 @@
-input[type="text"] {
-  font-size: 100%;
-  background-color: #ededf0;
-  border: 1px solid #fff;
-  border-radius: .2em;
-  padding: .5em .9em;
-  display: block;
-  width: 26em;
-}
-
-.button--disabled {
-  border-color: #fff;
-  background-color: transparent;
-  color: #000;
-  text-transform: none;
-}
-
-.hidden {
-  display: none;
-}
-
-.tooltip {
-  background-color: #f9f9fa;
-  padding: 1em;
-  margin: 1em 0;
-}
-
diff --git a/synapse/rest/admin/__init__.py b/synapse/rest/admin/__init__.py
index cb24998b47..8457db1e22 100644
--- a/synapse/rest/admin/__init__.py
+++ b/synapse/rest/admin/__init__.py
@@ -45,6 +45,7 @@ from synapse.rest.admin.rooms import (
     RoomEventContextServlet,
     RoomMembersRestServlet,
     RoomRestServlet,
+    RoomStateRestServlet,
     ShutdownRoomRestServlet,
 )
 from synapse.rest.admin.server_notice_servlet import SendServerNoticeServlet
@@ -214,6 +215,7 @@ def register_servlets(hs, http_server):
     """
     register_servlets_for_client_rest_resource(hs, http_server)
     ListRoomRestServlet(hs).register(http_server)
+    RoomStateRestServlet(hs).register(http_server)
     RoomRestServlet(hs).register(http_server)
     RoomMembersRestServlet(hs).register(http_server)
     DeleteRoomRestServlet(hs).register(http_server)
diff --git a/synapse/rest/admin/rooms.py b/synapse/rest/admin/rooms.py
index c673ff07e7..491f9ca095 100644
--- a/synapse/rest/admin/rooms.py
+++ b/synapse/rest/admin/rooms.py
@@ -295,6 +295,45 @@ class RoomMembersRestServlet(RestServlet):
         return 200, ret
 
 
+class RoomStateRestServlet(RestServlet):
+    """
+    Get full state within a room.
+    """
+
+    PATTERNS = admin_patterns("/rooms/(?P<room_id>[^/]+)/state")
+
+    def __init__(self, hs: "HomeServer"):
+        self.hs = hs
+        self.auth = hs.get_auth()
+        self.store = hs.get_datastore()
+        self.clock = hs.get_clock()
+        self._event_serializer = hs.get_event_client_serializer()
+
+    async def on_GET(
+        self, request: SynapseRequest, room_id: str
+    ) -> Tuple[int, JsonDict]:
+        requester = await self.auth.get_user_by_req(request)
+        await assert_user_is_admin(self.auth, requester.user)
+
+        ret = await self.store.get_room(room_id)
+        if not ret:
+            raise NotFoundError("Room not found")
+
+        event_ids = await self.store.get_current_state_ids(room_id)
+        events = await self.store.get_events(event_ids.values())
+        now = self.clock.time_msec()
+        room_state = await self._event_serializer.serialize_events(
+            events.values(),
+            now,
+            # We don't bother bundling aggregations in when asked for state
+            # events, as clients won't use them.
+            bundle_aggregations=False,
+        )
+        ret = {"state": room_state}
+
+        return 200, ret
+
+
 class JoinRoomAliasServlet(RestServlet):
 
     PATTERNS = admin_patterns("/join/(?P<room_identifier>[^/]*)")
diff --git a/synapse/rest/client/v1/login.py b/synapse/rest/client/v1/login.py
index 0a561eea60..0fb9419e58 100644
--- a/synapse/rest/client/v1/login.py
+++ b/synapse/rest/client/v1/login.py
@@ -333,6 +333,8 @@ def _get_auth_flow_dict_for_idp(idp: SsoIdentityProvider) -> JsonDict:
     e = {"id": idp.idp_id, "name": idp.idp_name}  # type: JsonDict
     if idp.idp_icon:
         e["icon"] = idp.idp_icon
+    if idp.idp_brand:
+        e["brand"] = idp.idp_brand
     return e
 
 
diff --git a/synapse/rest/client/v2_alpha/account.py b/synapse/rest/client/v2_alpha/account.py
index 65e68d641b..a84a2fb385 100644
--- a/synapse/rest/client/v2_alpha/account.py
+++ b/synapse/rest/client/v2_alpha/account.py
@@ -54,7 +54,7 @@ logger = logging.getLogger(__name__)
 class EmailPasswordRequestTokenRestServlet(RestServlet):
     PATTERNS = client_patterns("/account/password/email/requestToken$")
 
-    def __init__(self, hs):
+    def __init__(self, hs: "HomeServer"):
         super().__init__()
         self.hs = hs
         self.datastore = hs.get_datastore()
@@ -103,6 +103,8 @@ class EmailPasswordRequestTokenRestServlet(RestServlet):
             # Raise if the provided next_link value isn't valid
             assert_valid_next_link(self.hs, next_link)
 
+        self.identity_handler.ratelimit_request_token_requests(request, "email", email)
+
         # The email will be sent to the stored address.
         # This avoids a potential account hijack by requesting a password reset to
         # an email address which is controlled by the attacker but which, after
@@ -379,6 +381,8 @@ class EmailThreepidRequestTokenRestServlet(RestServlet):
                 Codes.THREEPID_DENIED,
             )
 
+        self.identity_handler.ratelimit_request_token_requests(request, "email", email)
+
         if next_link:
             # Raise if the provided next_link value isn't valid
             assert_valid_next_link(self.hs, next_link)
@@ -430,7 +434,7 @@ class EmailThreepidRequestTokenRestServlet(RestServlet):
 class MsisdnThreepidRequestTokenRestServlet(RestServlet):
     PATTERNS = client_patterns("/account/3pid/msisdn/requestToken$")
 
-    def __init__(self, hs):
+    def __init__(self, hs: "HomeServer"):
         self.hs = hs
         super().__init__()
         self.store = self.hs.get_datastore()
@@ -458,6 +462,10 @@ class MsisdnThreepidRequestTokenRestServlet(RestServlet):
                 Codes.THREEPID_DENIED,
             )
 
+        self.identity_handler.ratelimit_request_token_requests(
+            request, "msisdn", msisdn
+        )
+
         if next_link:
             # Raise if the provided next_link value isn't valid
             assert_valid_next_link(self.hs, next_link)
diff --git a/synapse/rest/client/v2_alpha/register.py b/synapse/rest/client/v2_alpha/register.py
index b093183e79..10e1891174 100644
--- a/synapse/rest/client/v2_alpha/register.py
+++ b/synapse/rest/client/v2_alpha/register.py
@@ -126,6 +126,8 @@ class EmailRegisterRequestTokenRestServlet(RestServlet):
                 Codes.THREEPID_DENIED,
             )
 
+        self.identity_handler.ratelimit_request_token_requests(request, "email", email)
+
         existing_user_id = await self.hs.get_datastore().get_user_id_by_threepid(
             "email", email
         )
@@ -205,6 +207,10 @@ class MsisdnRegisterRequestTokenRestServlet(RestServlet):
                 Codes.THREEPID_DENIED,
             )
 
+        self.identity_handler.ratelimit_request_token_requests(
+            request, "msisdn", msisdn
+        )
+
         existing_user_id = await self.hs.get_datastore().get_user_id_by_threepid(
             "msisdn", msisdn
         )
diff --git a/synapse/rest/consent/consent_resource.py b/synapse/rest/consent/consent_resource.py
index b3e4d5612e..8b9ef26cf2 100644
--- a/synapse/rest/consent/consent_resource.py
+++ b/synapse/rest/consent/consent_resource.py
@@ -100,6 +100,7 @@ class ConsentResource(DirectServeHtmlResource):
 
         consent_template_directory = hs.config.user_consent_template_dir
 
+        # TODO: switch to synapse.util.templates.build_jinja_env
         loader = jinja2.FileSystemLoader(consent_template_directory)
         self._jinja_env = jinja2.Environment(
             loader=loader, autoescape=jinja2.select_autoescape(["html", "htm", "xml"])
diff --git a/synapse/rest/media/v1/media_storage.py b/synapse/rest/media/v1/media_storage.py
index 89cdd605aa..aba6d689a8 100644
--- a/synapse/rest/media/v1/media_storage.py
+++ b/synapse/rest/media/v1/media_storage.py
@@ -16,13 +16,17 @@ import contextlib
 import logging
 import os
 import shutil
-from typing import IO, TYPE_CHECKING, Any, Optional, Sequence
+from typing import IO, TYPE_CHECKING, Any, Callable, Optional, Sequence
+
+import attr
 
 from twisted.internet.defer import Deferred
 from twisted.internet.interfaces import IConsumer
 from twisted.protocols.basic import FileSender
 
+from synapse.api.errors import NotFoundError
 from synapse.logging.context import defer_to_thread, make_deferred_yieldable
+from synapse.util import Clock
 from synapse.util.file_consumer import BackgroundFileConsumer
 
 from ._base import FileInfo, Responder
@@ -58,6 +62,8 @@ class MediaStorage:
         self.local_media_directory = local_media_directory
         self.filepaths = filepaths
         self.storage_providers = storage_providers
+        self.spam_checker = hs.get_spam_checker()
+        self.clock = hs.get_clock()
 
     async def store_file(self, source: IO, file_info: FileInfo) -> str:
         """Write `source` to the on disk media store, and also any other
@@ -127,18 +133,29 @@ class MediaStorage:
                     f.flush()
                     f.close()
 
+                    spam = await self.spam_checker.check_media_file_for_spam(
+                        ReadableFileWrapper(self.clock, fname), file_info
+                    )
+                    if spam:
+                        logger.info("Blocking media due to spam checker")
+                        # Note that we'll delete the stored media, due to the
+                        # try/except below. The media also won't be stored in
+                        # the DB.
+                        raise SpamMediaException()
+
                     for provider in self.storage_providers:
                         await provider.store_file(path, file_info)
 
                     finished_called[0] = True
 
                 yield f, fname, finish
-        except Exception:
+        except Exception as e:
             try:
                 os.remove(fname)
             except Exception:
                 pass
-            raise
+
+            raise e from None
 
         if not finished_called:
             raise Exception("Finished callback not called")
@@ -302,3 +319,39 @@ class FileResponder(Responder):
 
     def __exit__(self, exc_type, exc_val, exc_tb):
         self.open_file.close()
+
+
+class SpamMediaException(NotFoundError):
+    """The media was blocked by a spam checker, so we simply 404 the request (in
+    the same way as if it was quarantined).
+    """
+
+
+@attr.s(slots=True)
+class ReadableFileWrapper:
+    """Wrapper that allows reading a file in chunks, yielding to the reactor,
+    and writing to a callback.
+
+    This is simplified `FileSender` that takes an IO object rather than an
+    `IConsumer`.
+    """
+
+    CHUNK_SIZE = 2 ** 14
+
+    clock = attr.ib(type=Clock)
+    path = attr.ib(type=str)
+
+    async def write_chunks_to(self, callback: Callable[[bytes], None]):
+        """Reads the file in chunks and calls the callback with each chunk.
+        """
+
+        with open(self.path, "rb") as file:
+            while True:
+                chunk = file.read(self.CHUNK_SIZE)
+                if not chunk:
+                    break
+
+                callback(chunk)
+
+                # We yield to the reactor by sleeping for 0 seconds.
+                await self.clock.sleep(0)
diff --git a/synapse/rest/media/v1/upload_resource.py b/synapse/rest/media/v1/upload_resource.py
index 6da76ae994..1136277794 100644
--- a/synapse/rest/media/v1/upload_resource.py
+++ b/synapse/rest/media/v1/upload_resource.py
@@ -22,6 +22,7 @@ from twisted.web.http import Request
 from synapse.api.errors import Codes, SynapseError
 from synapse.http.server import DirectServeJsonResource, respond_with_json
 from synapse.http.servlet import parse_string
+from synapse.rest.media.v1.media_storage import SpamMediaException
 
 if TYPE_CHECKING:
     from synapse.app.homeserver import HomeServer
@@ -86,9 +87,14 @@ class UploadResource(DirectServeJsonResource):
         #     disposition = headers.getRawHeaders(b"Content-Disposition")[0]
         # TODO(markjh): parse content-dispostion
 
-        content_uri = await self.media_repo.create_content(
-            media_type, upload_name, request.content, content_length, requester.user
-        )
+        try:
+            content_uri = await self.media_repo.create_content(
+                media_type, upload_name, request.content, content_length, requester.user
+            )
+        except SpamMediaException:
+            # For uploading of media we want to respond with a 400, instead of
+            # the default 404, as that would just be confusing.
+            raise SynapseError(400, "Bad content")
 
         logger.info("Uploaded content with URI %r", content_uri)
 
diff --git a/synapse/rest/synapse/client/__init__.py b/synapse/rest/synapse/client/__init__.py
index c0b733488b..e5ef515090 100644
--- a/synapse/rest/synapse/client/__init__.py
+++ b/synapse/rest/synapse/client/__init__.py
@@ -1,5 +1,5 @@
 # -*- coding: utf-8 -*-
-# Copyright 2020 The Matrix.org Foundation C.I.C.
+# Copyright 2021 The Matrix.org Foundation C.I.C.
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
 # you may not use this file except in compliance with the License.
@@ -12,3 +12,55 @@
 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 # See the License for the specific language governing permissions and
 # limitations under the License.
+
+from typing import TYPE_CHECKING, Mapping
+
+from twisted.web.resource import Resource
+
+from synapse.rest.synapse.client.new_user_consent import NewUserConsentResource
+from synapse.rest.synapse.client.pick_idp import PickIdpResource
+from synapse.rest.synapse.client.pick_username import pick_username_resource
+from synapse.rest.synapse.client.sso_register import SsoRegisterResource
+
+if TYPE_CHECKING:
+    from synapse.server import HomeServer
+
+
+def build_synapse_client_resource_tree(hs: "HomeServer") -> Mapping[str, Resource]:
+    """Builds a resource tree to include synapse-specific client resources
+
+    These are resources which should be loaded on all workers which expose a C-S API:
+    ie, the main process, and any generic workers so configured.
+
+    Returns:
+         map from path to Resource.
+    """
+    resources = {
+        # SSO bits. These are always loaded, whether or not SSO login is actually
+        # enabled (they just won't work very well if it's not)
+        "/_synapse/client/pick_idp": PickIdpResource(hs),
+        "/_synapse/client/pick_username": pick_username_resource(hs),
+        "/_synapse/client/new_user_consent": NewUserConsentResource(hs),
+        "/_synapse/client/sso_register": SsoRegisterResource(hs),
+    }
+
+    # provider-specific SSO bits. Only load these if they are enabled, since they
+    # rely on optional dependencies.
+    if hs.config.oidc_enabled:
+        from synapse.rest.synapse.client.oidc import OIDCResource
+
+        resources["/_synapse/client/oidc"] = OIDCResource(hs)
+
+    if hs.config.saml2_enabled:
+        from synapse.rest.synapse.client.saml2 import SAML2Resource
+
+        res = SAML2Resource(hs)
+        resources["/_synapse/client/saml2"] = res
+
+        # This is also mounted under '/_matrix' for backwards-compatibility.
+        resources["/_matrix/saml2"] = res
+
+    return resources
+
+
+__all__ = ["build_synapse_client_resource_tree"]
diff --git a/synapse/rest/synapse/client/new_user_consent.py b/synapse/rest/synapse/client/new_user_consent.py
new file mode 100644
index 0000000000..b2e0f93810
--- /dev/null
+++ b/synapse/rest/synapse/client/new_user_consent.py
@@ -0,0 +1,97 @@
+# -*- coding: utf-8 -*-
+# Copyright 2021 The Matrix.org Foundation C.I.C.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+import logging
+from typing import TYPE_CHECKING
+
+from twisted.web.http import Request
+
+from synapse.api.errors import SynapseError
+from synapse.handlers.sso import get_username_mapping_session_cookie_from_request
+from synapse.http.server import DirectServeHtmlResource, respond_with_html
+from synapse.http.servlet import parse_string
+from synapse.types import UserID
+from synapse.util.templates import build_jinja_env
+
+if TYPE_CHECKING:
+    from synapse.server import HomeServer
+
+logger = logging.getLogger(__name__)
+
+
+class NewUserConsentResource(DirectServeHtmlResource):
+    """A resource which collects consent to the server's terms from a new user
+
+    This resource gets mounted at /_synapse/client/new_user_consent, and is shown
+    when we are automatically creating a new user due to an SSO login.
+
+    It shows a template which prompts the user to go and read the Ts and Cs, and click
+    a clickybox if they have done so.
+    """
+
+    def __init__(self, hs: "HomeServer"):
+        super().__init__()
+        self._sso_handler = hs.get_sso_handler()
+        self._server_name = hs.hostname
+        self._consent_version = hs.config.consent.user_consent_version
+
+        def template_search_dirs():
+            if hs.config.sso.sso_template_dir:
+                yield hs.config.sso.sso_template_dir
+            yield hs.config.sso.default_template_dir
+
+        self._jinja_env = build_jinja_env(template_search_dirs(), hs.config)
+
+    async def _async_render_GET(self, request: Request) -> None:
+        try:
+            session_id = get_username_mapping_session_cookie_from_request(request)
+            session = self._sso_handler.get_mapping_session(session_id)
+        except SynapseError as e:
+            logger.warning("Error fetching session: %s", e)
+            self._sso_handler.render_error(request, "bad_session", e.msg, code=e.code)
+            return
+
+        user_id = UserID(session.chosen_localpart, self._server_name)
+        user_profile = {
+            "display_name": session.display_name,
+        }
+
+        template_params = {
+            "user_id": user_id.to_string(),
+            "user_profile": user_profile,
+            "consent_version": self._consent_version,
+            "terms_url": "/_matrix/consent?v=%s" % (self._consent_version,),
+        }
+
+        template = self._jinja_env.get_template("sso_new_user_consent.html")
+        html = template.render(template_params)
+        respond_with_html(request, 200, html)
+
+    async def _async_render_POST(self, request: Request):
+        try:
+            session_id = get_username_mapping_session_cookie_from_request(request)
+        except SynapseError as e:
+            logger.warning("Error fetching session cookie: %s", e)
+            self._sso_handler.render_error(request, "bad_session", e.msg, code=e.code)
+            return
+
+        try:
+            accepted_version = parse_string(request, "accepted_version", required=True)
+        except SynapseError as e:
+            self._sso_handler.render_error(request, "bad_param", e.msg, code=e.code)
+            return
+
+        await self._sso_handler.handle_terms_accepted(
+            request, session_id, accepted_version
+        )
diff --git a/synapse/rest/oidc/__init__.py b/synapse/rest/synapse/client/oidc/__init__.py
index d958dd65bb..64c0deb75d 100644
--- a/synapse/rest/oidc/__init__.py
+++ b/synapse/rest/synapse/client/oidc/__init__.py
@@ -12,11 +12,12 @@
 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 # See the License for the specific language governing permissions and
 # limitations under the License.
+
 import logging
 
 from twisted.web.resource import Resource
 
-from synapse.rest.oidc.callback_resource import OIDCCallbackResource
+from synapse.rest.synapse.client.oidc.callback_resource import OIDCCallbackResource
 
 logger = logging.getLogger(__name__)
 
@@ -25,3 +26,6 @@ class OIDCResource(Resource):
     def __init__(self, hs):
         Resource.__init__(self)
         self.putChild(b"callback", OIDCCallbackResource(hs))
+
+
+__all__ = ["OIDCResource"]
diff --git a/synapse/rest/oidc/callback_resource.py b/synapse/rest/synapse/client/oidc/callback_resource.py
index f7a0bc4bdb..f7a0bc4bdb 100644
--- a/synapse/rest/oidc/callback_resource.py
+++ b/synapse/rest/synapse/client/oidc/callback_resource.py
diff --git a/synapse/rest/synapse/client/pick_username.py b/synapse/rest/synapse/client/pick_username.py
index d3b6803e65..96077cfcd1 100644
--- a/synapse/rest/synapse/client/pick_username.py
+++ b/synapse/rest/synapse/client/pick_username.py
@@ -12,42 +12,42 @@
 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 # See the License for the specific language governing permissions and
 # limitations under the License.
-from typing import TYPE_CHECKING
 
-import pkg_resources
+import logging
+from typing import TYPE_CHECKING, List
 
 from twisted.web.http import Request
 from twisted.web.resource import Resource
-from twisted.web.static import File
 
 from synapse.api.errors import SynapseError
-from synapse.handlers.sso import USERNAME_MAPPING_SESSION_COOKIE_NAME
-from synapse.http.server import DirectServeHtmlResource, DirectServeJsonResource
-from synapse.http.servlet import parse_string
+from synapse.handlers.sso import get_username_mapping_session_cookie_from_request
+from synapse.http.server import (
+    DirectServeHtmlResource,
+    DirectServeJsonResource,
+    respond_with_html,
+)
+from synapse.http.servlet import parse_boolean, parse_string
 from synapse.http.site import SynapseRequest
+from synapse.util.templates import build_jinja_env
 
 if TYPE_CHECKING:
     from synapse.server import HomeServer
 
+logger = logging.getLogger(__name__)
+
 
 def pick_username_resource(hs: "HomeServer") -> Resource:
     """Factory method to generate the username picker resource.
 
-    This resource gets mounted under /_synapse/client/pick_username. The top-level
-    resource is just a File resource which serves up the static files in the resources
-    "res" directory, but it has a couple of children:
-
-    * "submit", which does the mechanics of registering the new user, and redirects the
-      browser back to the client URL
+    This resource gets mounted under /_synapse/client/pick_username and has two
+       children:
 
-    * "check": checks if a userid is free.
+      * "account_details": renders the form and handles the POSTed response
+      * "check": a JSON endpoint which checks if a userid is free.
     """
 
-    # XXX should we make this path customisable so that admins can restyle it?
-    base_path = pkg_resources.resource_filename("synapse", "res/username_picker")
-
-    res = File(base_path)
-    res.putChild(b"submit", SubmitResource(hs))
+    res = Resource()
+    res.putChild(b"account_details", AccountDetailsResource(hs))
     res.putChild(b"check", AvailabilityCheckResource(hs))
 
     return res
@@ -61,28 +61,71 @@ class AvailabilityCheckResource(DirectServeJsonResource):
     async def _async_render_GET(self, request: Request):
         localpart = parse_string(request, "username", required=True)
 
-        session_id = request.getCookie(USERNAME_MAPPING_SESSION_COOKIE_NAME)
-        if not session_id:
-            raise SynapseError(code=400, msg="missing session_id")
+        session_id = get_username_mapping_session_cookie_from_request(request)
 
         is_available = await self._sso_handler.check_username_availability(
-            localpart, session_id.decode("ascii", errors="replace")
+            localpart, session_id
         )
         return 200, {"available": is_available}
 
 
-class SubmitResource(DirectServeHtmlResource):
+class AccountDetailsResource(DirectServeHtmlResource):
     def __init__(self, hs: "HomeServer"):
         super().__init__()
         self._sso_handler = hs.get_sso_handler()
 
-    async def _async_render_POST(self, request: SynapseRequest):
-        localpart = parse_string(request, "username", required=True)
+        def template_search_dirs():
+            if hs.config.sso.sso_template_dir:
+                yield hs.config.sso.sso_template_dir
+            yield hs.config.sso.default_template_dir
+
+        self._jinja_env = build_jinja_env(template_search_dirs(), hs.config)
+
+    async def _async_render_GET(self, request: Request) -> None:
+        try:
+            session_id = get_username_mapping_session_cookie_from_request(request)
+            session = self._sso_handler.get_mapping_session(session_id)
+        except SynapseError as e:
+            logger.warning("Error fetching session: %s", e)
+            self._sso_handler.render_error(request, "bad_session", e.msg, code=e.code)
+            return
+
+        idp_id = session.auth_provider_id
+        template_params = {
+            "idp": self._sso_handler.get_identity_providers()[idp_id],
+            "user_attributes": {
+                "display_name": session.display_name,
+                "emails": session.emails,
+            },
+        }
+
+        template = self._jinja_env.get_template("sso_auth_account_details.html")
+        html = template.render(template_params)
+        respond_with_html(request, 200, html)
 
-        session_id = request.getCookie(USERNAME_MAPPING_SESSION_COOKIE_NAME)
-        if not session_id:
-            raise SynapseError(code=400, msg="missing session_id")
+    async def _async_render_POST(self, request: SynapseRequest):
+        try:
+            session_id = get_username_mapping_session_cookie_from_request(request)
+        except SynapseError as e:
+            logger.warning("Error fetching session cookie: %s", e)
+            self._sso_handler.render_error(request, "bad_session", e.msg, code=e.code)
+            return
+
+        try:
+            localpart = parse_string(request, "username", required=True)
+            use_display_name = parse_boolean(request, "use_display_name", default=False)
+
+            try:
+                emails_to_use = [
+                    val.decode("utf-8") for val in request.args.get(b"use_email", [])
+                ]  # type: List[str]
+            except ValueError:
+                raise SynapseError(400, "Query parameter use_email must be utf-8")
+        except SynapseError as e:
+            logger.warning("[session %s] bad param: %s", session_id, e)
+            self._sso_handler.render_error(request, "bad_param", e.msg, code=e.code)
+            return
 
         await self._sso_handler.handle_submit_username_request(
-            request, localpart, session_id.decode("ascii", errors="replace")
+            request, session_id, localpart, use_display_name, emails_to_use
         )
diff --git a/synapse/rest/saml2/__init__.py b/synapse/rest/synapse/client/saml2/__init__.py
index 68da37ca6a..3e8235ee1e 100644
--- a/synapse/rest/saml2/__init__.py
+++ b/synapse/rest/synapse/client/saml2/__init__.py
@@ -12,12 +12,13 @@
 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 # See the License for the specific language governing permissions and
 # limitations under the License.
+
 import logging
 
 from twisted.web.resource import Resource
 
-from synapse.rest.saml2.metadata_resource import SAML2MetadataResource
-from synapse.rest.saml2.response_resource import SAML2ResponseResource
+from synapse.rest.synapse.client.saml2.metadata_resource import SAML2MetadataResource
+from synapse.rest.synapse.client.saml2.response_resource import SAML2ResponseResource
 
 logger = logging.getLogger(__name__)
 
@@ -27,3 +28,6 @@ class SAML2Resource(Resource):
         Resource.__init__(self)
         self.putChild(b"metadata.xml", SAML2MetadataResource(hs))
         self.putChild(b"authn_response", SAML2ResponseResource(hs))
+
+
+__all__ = ["SAML2Resource"]
diff --git a/synapse/rest/saml2/metadata_resource.py b/synapse/rest/synapse/client/saml2/metadata_resource.py
index 1e8526e22e..1e8526e22e 100644
--- a/synapse/rest/saml2/metadata_resource.py
+++ b/synapse/rest/synapse/client/saml2/metadata_resource.py
diff --git a/synapse/rest/saml2/response_resource.py b/synapse/rest/synapse/client/saml2/response_resource.py
index f6668fb5e3..f6668fb5e3 100644
--- a/synapse/rest/saml2/response_resource.py
+++ b/synapse/rest/synapse/client/saml2/response_resource.py
diff --git a/synapse/rest/synapse/client/sso_register.py b/synapse/rest/synapse/client/sso_register.py
new file mode 100644
index 0000000000..dfefeb7796
--- /dev/null
+++ b/synapse/rest/synapse/client/sso_register.py
@@ -0,0 +1,50 @@
+# -*- coding: utf-8 -*-
+# Copyright 2021 The Matrix.org Foundation C.I.C.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import logging
+from typing import TYPE_CHECKING
+
+from twisted.web.http import Request
+
+from synapse.api.errors import SynapseError
+from synapse.handlers.sso import get_username_mapping_session_cookie_from_request
+from synapse.http.server import DirectServeHtmlResource
+
+if TYPE_CHECKING:
+    from synapse.server import HomeServer
+
+logger = logging.getLogger(__name__)
+
+
+class SsoRegisterResource(DirectServeHtmlResource):
+    """A resource which completes SSO registration
+
+    This resource gets mounted at /_synapse/client/sso_register, and is shown
+    after we collect username and/or consent for a new SSO user. It (finally) registers
+    the user, and confirms redirect to the client
+    """
+
+    def __init__(self, hs: "HomeServer"):
+        super().__init__()
+        self._sso_handler = hs.get_sso_handler()
+
+    async def _async_render_GET(self, request: Request) -> None:
+        try:
+            session_id = get_username_mapping_session_cookie_from_request(request)
+        except SynapseError as e:
+            logger.warning("Error fetching session cookie: %s", e)
+            self._sso_handler.render_error(request, "bad_session", e.msg, code=e.code)
+            return
+        await self._sso_handler.register_sso_user(request, session_id)
diff --git a/synapse/storage/database.py b/synapse/storage/database.py
index d2ba4bd2fc..ae4bf1a54f 100644
--- a/synapse/storage/database.py
+++ b/synapse/storage/database.py
@@ -158,8 +158,8 @@ class LoggingDatabaseConnection:
     def commit(self) -> None:
         self.conn.commit()
 
-    def rollback(self, *args, **kwargs) -> None:
-        self.conn.rollback(*args, **kwargs)
+    def rollback(self) -> None:
+        self.conn.rollback()
 
     def __enter__(self) -> "Connection":
         self.conn.__enter__()
@@ -244,12 +244,15 @@ class LoggingTransaction:
         assert self.exception_callbacks is not None
         self.exception_callbacks.append((callback, args, kwargs))
 
+    def fetchone(self) -> Optional[Tuple]:
+        return self.txn.fetchone()
+
+    def fetchmany(self, size: Optional[int] = None) -> List[Tuple]:
+        return self.txn.fetchmany(size=size)
+
     def fetchall(self) -> List[Tuple]:
         return self.txn.fetchall()
 
-    def fetchone(self) -> Tuple:
-        return self.txn.fetchone()
-
     def __iter__(self) -> Iterator[Tuple]:
         return self.txn.__iter__()
 
@@ -754,6 +757,7 @@ class DatabasePool:
         Returns:
             A list of dicts where the key is the column header.
         """
+        assert cursor.description is not None, "cursor.description was None"
         col_headers = [intern(str(column[0])) for column in cursor.description]
         results = [dict(zip(col_headers, row)) for row in cursor]
         return results
diff --git a/synapse/storage/databases/main/end_to_end_keys.py b/synapse/storage/databases/main/end_to_end_keys.py
index c128889bf9..309f1e865b 100644
--- a/synapse/storage/databases/main/end_to_end_keys.py
+++ b/synapse/storage/databases/main/end_to_end_keys.py
@@ -634,7 +634,7 @@ class EndToEndKeyWorkerStore(EndToEndKeyBackgroundStore):
 
     async def get_e2e_cross_signing_keys_bulk(
         self, user_ids: List[str], from_user_id: Optional[str] = None
-    ) -> Dict[str, Dict[str, dict]]:
+    ) -> Dict[str, Optional[Dict[str, dict]]]:
         """Returns the cross-signing keys for a set of users.
 
         Args:
@@ -724,7 +724,7 @@ class EndToEndKeyWorkerStore(EndToEndKeyBackgroundStore):
 
     async def claim_e2e_one_time_keys(
         self, query_list: Iterable[Tuple[str, str, str]]
-    ) -> Dict[str, Dict[str, Dict[str, bytes]]]:
+    ) -> Dict[str, Dict[str, Dict[str, str]]]:
         """Take a list of one time keys out of the database.
 
         Args:
diff --git a/synapse/storage/databases/main/metrics.py b/synapse/storage/databases/main/metrics.py
index ab18cc4d79..92e65aa640 100644
--- a/synapse/storage/databases/main/metrics.py
+++ b/synapse/storage/databases/main/metrics.py
@@ -88,6 +88,62 @@ class ServerMetricsStore(EventPushActionsWorkerStore, SQLBaseStore):
             (x[0] - 1) * x[1] for x in res if x[1]
         )
 
+    async def count_daily_e2ee_messages(self):
+        """
+        Returns an estimate of the number of messages sent in the last day.
+
+        If it has been significantly less or more than one day since the last
+        call to this function, it will return None.
+        """
+
+        def _count_messages(txn):
+            sql = """
+                SELECT COALESCE(COUNT(*), 0) FROM events
+                WHERE type = 'm.room.encrypted'
+                AND stream_ordering > ?
+            """
+            txn.execute(sql, (self.stream_ordering_day_ago,))
+            (count,) = txn.fetchone()
+            return count
+
+        return await self.db_pool.runInteraction("count_e2ee_messages", _count_messages)
+
+    async def count_daily_sent_e2ee_messages(self):
+        def _count_messages(txn):
+            # This is good enough as if you have silly characters in your own
+            # hostname then thats your own fault.
+            like_clause = "%:" + self.hs.hostname
+
+            sql = """
+                SELECT COALESCE(COUNT(*), 0) FROM events
+                WHERE type = 'm.room.encrypted'
+                    AND sender LIKE ?
+                AND stream_ordering > ?
+            """
+
+            txn.execute(sql, (like_clause, self.stream_ordering_day_ago))
+            (count,) = txn.fetchone()
+            return count
+
+        return await self.db_pool.runInteraction(
+            "count_daily_sent_e2ee_messages", _count_messages
+        )
+
+    async def count_daily_active_e2ee_rooms(self):
+        def _count(txn):
+            sql = """
+                SELECT COALESCE(COUNT(DISTINCT room_id), 0) FROM events
+                WHERE type = 'm.room.encrypted'
+                AND stream_ordering > ?
+            """
+            txn.execute(sql, (self.stream_ordering_day_ago,))
+            (count,) = txn.fetchone()
+            return count
+
+        return await self.db_pool.runInteraction(
+            "count_daily_active_e2ee_rooms", _count
+        )
+
     async def count_daily_messages(self):
         """
         Returns an estimate of the number of messages sent in the last day.
diff --git a/synapse/storage/databases/main/registration.py b/synapse/storage/databases/main/registration.py
index 0618b4387a..8405dd460f 100644
--- a/synapse/storage/databases/main/registration.py
+++ b/synapse/storage/databases/main/registration.py
@@ -472,6 +472,26 @@ class RegistrationWorkerStore(CacheInvalidationWorkerStore):
 
         return await self.db_pool.runInteraction("get_users_by_id_case_insensitive", f)
 
+    async def record_user_external_id(
+        self, auth_provider: str, external_id: str, user_id: str
+    ) -> None:
+        """Record a mapping from an external user id to a mxid
+
+        Args:
+            auth_provider: identifier for the remote auth provider
+            external_id: id on that system
+            user_id: complete mxid that it is mapped to
+        """
+        await self.db_pool.simple_insert(
+            table="user_external_ids",
+            values={
+                "auth_provider": auth_provider,
+                "external_id": external_id,
+                "user_id": user_id,
+            },
+            desc="record_user_external_id",
+        )
+
     async def get_user_by_external_id(
         self, auth_provider: str, external_id: str
     ) -> Optional[str]:
@@ -1400,26 +1420,6 @@ class RegistrationStore(StatsStore, RegistrationBackgroundUpdateStore):
 
         self._invalidate_cache_and_stream(txn, self.get_user_by_id, (user_id,))
 
-    async def record_user_external_id(
-        self, auth_provider: str, external_id: str, user_id: str
-    ) -> None:
-        """Record a mapping from an external user id to a mxid
-
-        Args:
-            auth_provider: identifier for the remote auth provider
-            external_id: id on that system
-            user_id: complete mxid that it is mapped to
-        """
-        await self.db_pool.simple_insert(
-            table="user_external_ids",
-            values={
-                "auth_provider": auth_provider,
-                "external_id": external_id,
-                "user_id": user_id,
-            },
-            desc="record_user_external_id",
-        )
-
     async def user_set_password_hash(
         self, user_id: str, password_hash: Optional[str]
     ) -> None:
diff --git a/synapse/storage/engines/__init__.py b/synapse/storage/engines/__init__.py
index 035f9ea6e9..d15ccfacde 100644
--- a/synapse/storage/engines/__init__.py
+++ b/synapse/storage/engines/__init__.py
@@ -12,7 +12,6 @@
 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 # See the License for the specific language governing permissions and
 # limitations under the License.
-import platform
 
 from ._base import BaseDatabaseEngine, IncorrectDatabaseSetup
 from .postgres import PostgresEngine
@@ -28,11 +27,8 @@ def create_engine(database_config) -> BaseDatabaseEngine:
         return Sqlite3Engine(sqlite3, database_config)
 
     if name == "psycopg2":
-        # pypy requires psycopg2cffi rather than psycopg2
-        if platform.python_implementation() == "PyPy":
-            import psycopg2cffi as psycopg2  # type: ignore
-        else:
-            import psycopg2  # type: ignore
+        # Note that psycopg2cffi-compat provides the psycopg2 module on pypy.
+        import psycopg2  # type: ignore
 
         return PostgresEngine(psycopg2, database_config)
 
diff --git a/synapse/storage/engines/sqlite.py b/synapse/storage/engines/sqlite.py
index 5db0f0b520..b3d1834efb 100644
--- a/synapse/storage/engines/sqlite.py
+++ b/synapse/storage/engines/sqlite.py
@@ -12,6 +12,7 @@
 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 # See the License for the specific language governing permissions and
 # limitations under the License.
+import platform
 import struct
 import threading
 import typing
@@ -30,6 +31,11 @@ class Sqlite3Engine(BaseDatabaseEngine["sqlite3.Connection"]):
         database = database_config.get("args", {}).get("database")
         self._is_in_memory = database in (None, ":memory:",)
 
+        if platform.python_implementation() == "PyPy":
+            # pypy's sqlite3 module doesn't handle bytearrays, convert them
+            # back to bytes.
+            database_module.register_adapter(bytearray, lambda array: bytes(array))
+
         # The current max state_group, or None if we haven't looked
         # in the DB yet.
         self._current_state_group_id = None
diff --git a/synapse/storage/prepare_database.py b/synapse/storage/prepare_database.py
index 566ea19bae..28bb2eb662 100644
--- a/synapse/storage/prepare_database.py
+++ b/synapse/storage/prepare_database.py
@@ -619,9 +619,9 @@ def _get_or_create_schema_state(
 
     txn.execute("SELECT version, upgraded FROM schema_version")
     row = txn.fetchone()
-    current_version = int(row[0]) if row else None
 
-    if current_version:
+    if row is not None:
+        current_version = int(row[0])
         txn.execute(
             "SELECT file FROM applied_schema_deltas WHERE version >= ?",
             (current_version,),
diff --git a/synapse/storage/types.py b/synapse/storage/types.py
index 9cadcba18f..17291c9d5e 100644
--- a/synapse/storage/types.py
+++ b/synapse/storage/types.py
@@ -12,7 +12,7 @@
 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 # See the License for the specific language governing permissions and
 # limitations under the License.
-from typing import Any, Iterable, Iterator, List, Optional, Tuple
+from typing import Any, Iterator, List, Mapping, Optional, Sequence, Tuple, Union
 
 from typing_extensions import Protocol
 
@@ -20,23 +20,44 @@ from typing_extensions import Protocol
 Some very basic protocol definitions for the DB-API2 classes specified in PEP-249
 """
 
+_Parameters = Union[Sequence[Any], Mapping[str, Any]]
+
 
 class Cursor(Protocol):
-    def execute(self, sql: str, parameters: Iterable[Any] = ...) -> Any:
+    def execute(self, sql: str, parameters: _Parameters = ...) -> Any:
         ...
 
-    def executemany(self, sql: str, parameters: Iterable[Iterable[Any]]) -> Any:
+    def executemany(self, sql: str, parameters: Sequence[_Parameters]) -> Any:
         ...
 
-    def fetchall(self) -> List[Tuple]:
+    def fetchone(self) -> Optional[Tuple]:
+        ...
+
+    def fetchmany(self, size: Optional[int] = ...) -> List[Tuple]:
         ...
 
-    def fetchone(self) -> Tuple:
+    def fetchall(self) -> List[Tuple]:
         ...
 
     @property
-    def description(self) -> Any:
-        return None
+    def description(
+        self,
+    ) -> Optional[
+        Sequence[
+            # Note that this is an approximate typing based on sqlite3 and other
+            # drivers, and may not be entirely accurate.
+            Tuple[
+                str,
+                Optional[Any],
+                Optional[int],
+                Optional[int],
+                Optional[int],
+                Optional[int],
+                Optional[int],
+            ]
+        ]
+    ]:
+        ...
 
     @property
     def rowcount(self) -> int:
@@ -59,7 +80,7 @@ class Connection(Protocol):
     def commit(self) -> None:
         ...
 
-    def rollback(self, *args, **kwargs) -> None:
+    def rollback(self) -> None:
         ...
 
     def __enter__(self) -> "Connection":
diff --git a/synapse/storage/util/sequence.py b/synapse/storage/util/sequence.py
index 0ec4dc2918..e2b316a218 100644
--- a/synapse/storage/util/sequence.py
+++ b/synapse/storage/util/sequence.py
@@ -106,7 +106,9 @@ class PostgresSequenceGenerator(SequenceGenerator):
 
     def get_next_id_txn(self, txn: Cursor) -> int:
         txn.execute("SELECT nextval(?)", (self._sequence_name,))
-        return txn.fetchone()[0]
+        fetch_res = txn.fetchone()
+        assert fetch_res is not None
+        return fetch_res[0]
 
     def get_next_mult_txn(self, txn: Cursor, n: int) -> List[int]:
         txn.execute(
@@ -147,7 +149,9 @@ class PostgresSequenceGenerator(SequenceGenerator):
         txn.execute(
             "SELECT last_value, is_called FROM %(seq)s" % {"seq": self._sequence_name}
         )
-        last_value, is_called = txn.fetchone()
+        fetch_res = txn.fetchone()
+        assert fetch_res is not None
+        last_value, is_called = fetch_res
 
         # If we have an associated stream check the stream_positions table.
         max_in_stream_positions = None
diff --git a/synapse/util/templates.py b/synapse/util/templates.py
new file mode 100644
index 0000000000..7e5109d206
--- /dev/null
+++ b/synapse/util/templates.py
@@ -0,0 +1,106 @@
+# -*- coding: utf-8 -*-
+# Copyright 2021 The Matrix.org Foundation C.I.C.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Utilities for dealing with jinja2 templates"""
+
+import time
+import urllib.parse
+from typing import TYPE_CHECKING, Callable, Iterable, Union
+
+import jinja2
+
+if TYPE_CHECKING:
+    from synapse.config.homeserver import HomeServerConfig
+
+
+def build_jinja_env(
+    template_search_directories: Iterable[str],
+    config: "HomeServerConfig",
+    autoescape: Union[bool, Callable[[str], bool], None] = None,
+) -> jinja2.Environment:
+    """Set up a Jinja2 environment to load templates from the given search path
+
+    The returned environment defines the following filters:
+        - format_ts: formats timestamps as strings in the server's local timezone
+             (XXX: why is that useful??)
+        - mxc_to_http: converts mxc: uris to http URIs. Args are:
+             (uri, width, height, resize_method="crop")
+
+    and the following global variables:
+        - server_name: matrix server name
+
+    Args:
+        template_search_directories: directories to search for templates
+
+        config: homeserver config, for things like `server_name` and `public_baseurl`
+
+        autoescape: whether template variables should be autoescaped. bool, or
+           a function mapping from template name to bool. Defaults to escaping templates
+           whose names end in .html, .xml or .htm.
+
+    Returns:
+        jinja environment
+    """
+
+    if autoescape is None:
+        autoescape = jinja2.select_autoescape()
+
+    loader = jinja2.FileSystemLoader(template_search_directories)
+    env = jinja2.Environment(loader=loader, autoescape=autoescape)
+
+    # Update the environment with our custom filters
+    env.filters.update(
+        {
+            "format_ts": _format_ts_filter,
+            "mxc_to_http": _create_mxc_to_http_filter(config.public_baseurl),
+        }
+    )
+
+    # common variables for all templates
+    env.globals.update({"server_name": config.server_name})
+
+    return env
+
+
+def _create_mxc_to_http_filter(public_baseurl: str) -> Callable:
+    """Create and return a jinja2 filter that converts MXC urls to HTTP
+
+    Args:
+        public_baseurl: The public, accessible base URL of the homeserver
+    """
+
+    def mxc_to_http_filter(value, width, height, resize_method="crop"):
+        if value[0:6] != "mxc://":
+            return ""
+
+        server_and_media_id = value[6:]
+        fragment = None
+        if "#" in server_and_media_id:
+            server_and_media_id, fragment = server_and_media_id.split("#", 1)
+            fragment = "#" + fragment
+
+        params = {"width": width, "height": height, "method": resize_method}
+        return "%s_matrix/media/v1/thumbnail/%s?%s%s" % (
+            public_baseurl,
+            server_and_media_id,
+            urllib.parse.urlencode(params),
+            fragment or "",
+        )
+
+    return mxc_to_http_filter
+
+
+def _format_ts_filter(value: int, format: str):
+    return time.strftime(format, time.localtime(value / 1000))
diff --git a/synapse/visibility.py b/synapse/visibility.py
index 2109e2f37f..e39d02602a 100644
--- a/synapse/visibility.py
+++ b/synapse/visibility.py
@@ -234,7 +234,7 @@ async def filter_events_for_client(
 
         elif visibility == HistoryVisibility.SHARED and is_peeking:
             # if the visibility is shared, users cannot see the event unless
-            # they have *subequently* joined the room (or were members at the
+            # they have *subsequently* joined the room (or were members at the
             # time, of course)
             #
             # XXX: if the user has subsequently joined and then left again,
diff --git a/tests/config/test_server.py b/tests/config/test_server.py
index a10d017120..98af7aa675 100644
--- a/tests/config/test_server.py
+++ b/tests/config/test_server.py
@@ -15,7 +15,8 @@
 
 import yaml
 
-from synapse.config.server import ServerConfig, is_threepid_reserved
+from synapse.config._base import ConfigError
+from synapse.config.server import ServerConfig, generate_ip_set, is_threepid_reserved
 
 from tests import unittest
 
@@ -128,3 +129,61 @@ class ServerConfigTestCase(unittest.TestCase):
         )
 
         self.assertEqual(conf["listeners"], expected_listeners)
+
+
+class GenerateIpSetTestCase(unittest.TestCase):
+    def test_empty(self):
+        ip_set = generate_ip_set(())
+        self.assertFalse(ip_set)
+
+        ip_set = generate_ip_set((), ())
+        self.assertFalse(ip_set)
+
+    def test_generate(self):
+        """Check adding IPv4 and IPv6 addresses."""
+        # IPv4 address
+        ip_set = generate_ip_set(("1.2.3.4",))
+        self.assertEqual(len(ip_set.iter_cidrs()), 4)
+
+        # IPv4 CIDR
+        ip_set = generate_ip_set(("1.2.3.4/24",))
+        self.assertEqual(len(ip_set.iter_cidrs()), 4)
+
+        # IPv6 address
+        ip_set = generate_ip_set(("2001:db8::8a2e:370:7334",))
+        self.assertEqual(len(ip_set.iter_cidrs()), 1)
+
+        # IPv6 CIDR
+        ip_set = generate_ip_set(("2001:db8::/104",))
+        self.assertEqual(len(ip_set.iter_cidrs()), 1)
+
+        # The addresses can overlap OK.
+        ip_set = generate_ip_set(("1.2.3.4", "::1.2.3.4"))
+        self.assertEqual(len(ip_set.iter_cidrs()), 4)
+
+    def test_extra(self):
+        """Extra IP addresses are treated the same."""
+        ip_set = generate_ip_set((), ("1.2.3.4",))
+        self.assertEqual(len(ip_set.iter_cidrs()), 4)
+
+        ip_set = generate_ip_set(("1.1.1.1",), ("1.2.3.4",))
+        self.assertEqual(len(ip_set.iter_cidrs()), 8)
+
+        # They can duplicate without error.
+        ip_set = generate_ip_set(("1.2.3.4",), ("1.2.3.4",))
+        self.assertEqual(len(ip_set.iter_cidrs()), 4)
+
+    def test_bad_value(self):
+        """An error should be raised if a bad value is passed in."""
+        with self.assertRaises(ConfigError):
+            generate_ip_set(("not-an-ip",))
+
+        with self.assertRaises(ConfigError):
+            generate_ip_set(("1.2.3.4/128",))
+
+        with self.assertRaises(ConfigError):
+            generate_ip_set((":::",))
+
+        # The following get treated as empty data.
+        self.assertFalse(generate_ip_set(None))
+        self.assertFalse(generate_ip_set({}))
diff --git a/tests/handlers/test_cas.py b/tests/handlers/test_cas.py
index c37bb6440e..7baf224f7e 100644
--- a/tests/handlers/test_cas.py
+++ b/tests/handlers/test_cas.py
@@ -62,7 +62,7 @@ class CasHandlerTestCase(HomeserverTestCase):
 
         # check that the auth handler got called as expected
         auth_handler.complete_sso_login.assert_called_once_with(
-            "@test_user:test", request, "redirect_uri", None
+            "@test_user:test", request, "redirect_uri", None, new_user=True
         )
 
     def test_map_cas_user_to_existing_user(self):
@@ -85,7 +85,7 @@ class CasHandlerTestCase(HomeserverTestCase):
 
         # check that the auth handler got called as expected
         auth_handler.complete_sso_login.assert_called_once_with(
-            "@test_user:test", request, "redirect_uri", None
+            "@test_user:test", request, "redirect_uri", None, new_user=False
         )
 
         # Subsequent calls should map to the same mxid.
@@ -94,7 +94,7 @@ class CasHandlerTestCase(HomeserverTestCase):
             self.handler._handle_cas_response(request, cas_response, "redirect_uri", "")
         )
         auth_handler.complete_sso_login.assert_called_once_with(
-            "@test_user:test", request, "redirect_uri", None
+            "@test_user:test", request, "redirect_uri", None, new_user=False
         )
 
     def test_map_cas_user_to_invalid_localpart(self):
@@ -112,7 +112,7 @@ class CasHandlerTestCase(HomeserverTestCase):
 
         # check that the auth handler got called as expected
         auth_handler.complete_sso_login.assert_called_once_with(
-            "@f=c3=b6=c3=b6:test", request, "redirect_uri", None
+            "@f=c3=b6=c3=b6:test", request, "redirect_uri", None, new_user=True
         )
 
 
diff --git a/tests/handlers/test_federation.py b/tests/handlers/test_federation.py
index 0b24b89a2e..983e368592 100644
--- a/tests/handlers/test_federation.py
+++ b/tests/handlers/test_federation.py
@@ -16,7 +16,7 @@ import logging
 from unittest import TestCase
 
 from synapse.api.constants import EventTypes
-from synapse.api.errors import AuthError, Codes, SynapseError
+from synapse.api.errors import AuthError, Codes, LimitExceededError, SynapseError
 from synapse.api.room_versions import RoomVersions
 from synapse.events import EventBase
 from synapse.federation.federation_base import event_from_pdu_json
@@ -191,6 +191,50 @@ class FederationTestCase(unittest.HomeserverTestCase):
 
         self.assertEqual(sg, sg2)
 
+    @unittest.override_config(
+        {"rc_invites": {"per_user": {"per_second": 0.5, "burst_count": 3}}}
+    )
+    def test_invite_by_user_ratelimit(self):
+        """Tests that invites from federation to a particular user are
+        actually rate-limited.
+        """
+        other_server = "otherserver"
+        other_user = "@otheruser:" + other_server
+
+        # create the room
+        user_id = self.register_user("kermit", "test")
+        tok = self.login("kermit", "test")
+
+        def create_invite():
+            room_id = self.helper.create_room_as(room_creator=user_id, tok=tok)
+            room_version = self.get_success(self.store.get_room_version(room_id))
+            return event_from_pdu_json(
+                {
+                    "type": EventTypes.Member,
+                    "content": {"membership": "invite"},
+                    "room_id": room_id,
+                    "sender": other_user,
+                    "state_key": "@user:test",
+                    "depth": 32,
+                    "prev_events": [],
+                    "auth_events": [],
+                    "origin_server_ts": self.clock.time_msec(),
+                },
+                room_version,
+            )
+
+        for i in range(3):
+            event = create_invite()
+            self.get_success(
+                self.handler.on_invite_request(other_server, event, event.room_version,)
+            )
+
+        event = create_invite()
+        self.get_failure(
+            self.handler.on_invite_request(other_server, event, event.room_version,),
+            exc=LimitExceededError,
+        )
+
     def _build_and_send_join_event(self, other_server, other_user, room_id):
         join_event = self.get_success(
             self.handler.on_make_join_request(other_server, room_id, other_user)
diff --git a/tests/handlers/test_oidc.py b/tests/handlers/test_oidc.py
index b3dfa40d25..ad20400b1d 100644
--- a/tests/handlers/test_oidc.py
+++ b/tests/handlers/test_oidc.py
@@ -40,7 +40,7 @@ ISSUER = "https://issuer/"
 CLIENT_ID = "test-client-id"
 CLIENT_SECRET = "test-client-secret"
 BASE_URL = "https://synapse/"
-CALLBACK_URL = BASE_URL + "_synapse/oidc/callback"
+CALLBACK_URL = BASE_URL + "_synapse/client/oidc/callback"
 SCOPES = ["openid"]
 
 AUTHORIZATION_ENDPOINT = ISSUER + "authorize"
@@ -58,12 +58,6 @@ COMMON_CONFIG = {
 }
 
 
-# The cookie name and path don't really matter, just that it has to be coherent
-# between the callback & redirect handlers.
-COOKIE_NAME = b"oidc_session"
-COOKIE_PATH = "/_synapse/oidc"
-
-
 class TestMappingProvider:
     @staticmethod
     def parse_config(config):
@@ -340,8 +334,11 @@ class OidcHandlerTestCase(HomeserverTestCase):
         # For some reason, call.args does not work with python3.5
         args = calls[0][0]
         kwargs = calls[0][1]
-        self.assertEqual(args[0], COOKIE_NAME)
-        self.assertEqual(kwargs["path"], COOKIE_PATH)
+
+        # The cookie name and path don't really matter, just that it has to be coherent
+        # between the callback & redirect handlers.
+        self.assertEqual(args[0], b"oidc_session")
+        self.assertEqual(kwargs["path"], "/_synapse/client/oidc")
         cookie = args[1]
 
         macaroon = pymacaroons.Macaroon.deserialize(cookie)
@@ -419,7 +416,7 @@ class OidcHandlerTestCase(HomeserverTestCase):
         self.get_success(self.handler.handle_oidc_callback(request))
 
         auth_handler.complete_sso_login.assert_called_once_with(
-            expected_user_id, request, client_redirect_url, None,
+            expected_user_id, request, client_redirect_url, None, new_user=True
         )
         self.provider._exchange_code.assert_called_once_with(code)
         self.provider._parse_id_token.assert_called_once_with(token, nonce=nonce)
@@ -450,7 +447,7 @@ class OidcHandlerTestCase(HomeserverTestCase):
         self.get_success(self.handler.handle_oidc_callback(request))
 
         auth_handler.complete_sso_login.assert_called_once_with(
-            expected_user_id, request, client_redirect_url, None,
+            expected_user_id, request, client_redirect_url, None, new_user=False
         )
         self.provider._exchange_code.assert_called_once_with(code)
         self.provider._parse_id_token.assert_not_called()
@@ -623,7 +620,11 @@ class OidcHandlerTestCase(HomeserverTestCase):
         self.get_success(self.handler.handle_oidc_callback(request))
 
         auth_handler.complete_sso_login.assert_called_once_with(
-            "@foo:test", request, client_redirect_url, {"phone": "1234567"},
+            "@foo:test",
+            request,
+            client_redirect_url,
+            {"phone": "1234567"},
+            new_user=True,
         )
 
     def test_map_userinfo_to_user(self):
@@ -637,7 +638,7 @@ class OidcHandlerTestCase(HomeserverTestCase):
         }
         self.get_success(_make_callback_with_userinfo(self.hs, userinfo))
         auth_handler.complete_sso_login.assert_called_once_with(
-            "@test_user:test", ANY, ANY, None,
+            "@test_user:test", ANY, ANY, None, new_user=True
         )
         auth_handler.complete_sso_login.reset_mock()
 
@@ -648,7 +649,7 @@ class OidcHandlerTestCase(HomeserverTestCase):
         }
         self.get_success(_make_callback_with_userinfo(self.hs, userinfo))
         auth_handler.complete_sso_login.assert_called_once_with(
-            "@test_user_2:test", ANY, ANY, None,
+            "@test_user_2:test", ANY, ANY, None, new_user=True
         )
         auth_handler.complete_sso_login.reset_mock()
 
@@ -685,14 +686,14 @@ class OidcHandlerTestCase(HomeserverTestCase):
         }
         self.get_success(_make_callback_with_userinfo(self.hs, userinfo))
         auth_handler.complete_sso_login.assert_called_once_with(
-            user.to_string(), ANY, ANY, None,
+            user.to_string(), ANY, ANY, None, new_user=False
         )
         auth_handler.complete_sso_login.reset_mock()
 
         # Subsequent calls should map to the same mxid.
         self.get_success(_make_callback_with_userinfo(self.hs, userinfo))
         auth_handler.complete_sso_login.assert_called_once_with(
-            user.to_string(), ANY, ANY, None,
+            user.to_string(), ANY, ANY, None, new_user=False
         )
         auth_handler.complete_sso_login.reset_mock()
 
@@ -707,7 +708,7 @@ class OidcHandlerTestCase(HomeserverTestCase):
         }
         self.get_success(_make_callback_with_userinfo(self.hs, userinfo))
         auth_handler.complete_sso_login.assert_called_once_with(
-            user.to_string(), ANY, ANY, None,
+            user.to_string(), ANY, ANY, None, new_user=False
         )
         auth_handler.complete_sso_login.reset_mock()
 
@@ -743,7 +744,7 @@ class OidcHandlerTestCase(HomeserverTestCase):
 
         self.get_success(_make_callback_with_userinfo(self.hs, userinfo))
         auth_handler.complete_sso_login.assert_called_once_with(
-            "@TEST_USER_2:test", ANY, ANY, None,
+            "@TEST_USER_2:test", ANY, ANY, None, new_user=False
         )
 
     def test_map_userinfo_to_invalid_localpart(self):
@@ -779,7 +780,7 @@ class OidcHandlerTestCase(HomeserverTestCase):
 
         # test_user is already taken, so test_user1 gets registered instead.
         auth_handler.complete_sso_login.assert_called_once_with(
-            "@test_user1:test", ANY, ANY, None,
+            "@test_user1:test", ANY, ANY, None, new_user=True
         )
         auth_handler.complete_sso_login.reset_mock()
 
diff --git a/tests/handlers/test_saml.py b/tests/handlers/test_saml.py
index 261c7083d1..a8d6c0f617 100644
--- a/tests/handlers/test_saml.py
+++ b/tests/handlers/test_saml.py
@@ -131,7 +131,7 @@ class SamlHandlerTestCase(HomeserverTestCase):
 
         # check that the auth handler got called as expected
         auth_handler.complete_sso_login.assert_called_once_with(
-            "@test_user:test", request, "redirect_uri", None
+            "@test_user:test", request, "redirect_uri", None, new_user=True
         )
 
     @override_config({"saml2_config": {"grandfathered_mxid_source_attribute": "mxid"}})
@@ -157,7 +157,7 @@ class SamlHandlerTestCase(HomeserverTestCase):
 
         # check that the auth handler got called as expected
         auth_handler.complete_sso_login.assert_called_once_with(
-            "@test_user:test", request, "", None
+            "@test_user:test", request, "", None, new_user=False
         )
 
         # Subsequent calls should map to the same mxid.
@@ -166,7 +166,7 @@ class SamlHandlerTestCase(HomeserverTestCase):
             self.handler._handle_authn_response(request, saml_response, "")
         )
         auth_handler.complete_sso_login.assert_called_once_with(
-            "@test_user:test", request, "", None
+            "@test_user:test", request, "", None, new_user=False
         )
 
     def test_map_saml_response_to_invalid_localpart(self):
@@ -214,7 +214,7 @@ class SamlHandlerTestCase(HomeserverTestCase):
 
         # test_user is already taken, so test_user1 gets registered instead.
         auth_handler.complete_sso_login.assert_called_once_with(
-            "@test_user1:test", request, "", None
+            "@test_user1:test", request, "", None, new_user=True
         )
         auth_handler.complete_sso_login.reset_mock()
 
diff --git a/tests/push/test_email.py b/tests/push/test_email.py
index 961bf09de9..22f452ec24 100644
--- a/tests/push/test_email.py
+++ b/tests/push/test_email.py
@@ -124,13 +124,18 @@ class EmailPusherTests(HomeserverTestCase):
         )
         self.helper.join(room=room, user=self.others[0].id, tok=self.others[0].token)
 
-        # The other user sends some messages
+        # The other user sends a single message.
         self.helper.send(room, body="Hi!", tok=self.others[0].token)
-        self.helper.send(room, body="There!", tok=self.others[0].token)
 
         # We should get emailed about that message
         self._check_for_mail()
 
+        # The other user sends multiple messages.
+        self.helper.send(room, body="Hi!", tok=self.others[0].token)
+        self.helper.send(room, body="There!", tok=self.others[0].token)
+
+        self._check_for_mail()
+
     def test_invite_sends_email(self):
         # Create a room and invite the user to it
         room = self.helper.create_room_as(self.others[0].id, tok=self.others[0].token)
@@ -187,6 +192,75 @@ class EmailPusherTests(HomeserverTestCase):
         # We should get emailed about those messages
         self._check_for_mail()
 
+    def test_multiple_rooms(self):
+        # We want to test multiple notifications from multiple rooms, so we pause
+        # processing of push while we send messages.
+        self.pusher._pause_processing()
+
+        # Create a simple room with multiple other users
+        rooms = [
+            self.helper.create_room_as(self.user_id, tok=self.access_token),
+            self.helper.create_room_as(self.user_id, tok=self.access_token),
+        ]
+
+        for r, other in zip(rooms, self.others):
+            self.helper.invite(
+                room=r, src=self.user_id, tok=self.access_token, targ=other.id
+            )
+            self.helper.join(room=r, user=other.id, tok=other.token)
+
+        # The other users send some messages
+        self.helper.send(rooms[0], body="Hi!", tok=self.others[0].token)
+        self.helper.send(rooms[1], body="There!", tok=self.others[1].token)
+        self.helper.send(rooms[1], body="There!", tok=self.others[1].token)
+
+        # Nothing should have happened yet, as we're paused.
+        assert not self.email_attempts
+
+        self.pusher._resume_processing()
+
+        # We should get emailed about those messages
+        self._check_for_mail()
+
+    def test_empty_room(self):
+        """All users leaving a room shouldn't cause the pusher to break."""
+        # Create a simple room with two users
+        room = self.helper.create_room_as(self.user_id, tok=self.access_token)
+        self.helper.invite(
+            room=room, src=self.user_id, tok=self.access_token, targ=self.others[0].id
+        )
+        self.helper.join(room=room, user=self.others[0].id, tok=self.others[0].token)
+
+        # The other user sends a single message.
+        self.helper.send(room, body="Hi!", tok=self.others[0].token)
+
+        # Leave the room before the message is processed.
+        self.helper.leave(room, self.user_id, tok=self.access_token)
+        self.helper.leave(room, self.others[0].id, tok=self.others[0].token)
+
+        # We should get emailed about that message
+        self._check_for_mail()
+
+    def test_empty_room_multiple_messages(self):
+        """All users leaving a room shouldn't cause the pusher to break."""
+        # Create a simple room with two users
+        room = self.helper.create_room_as(self.user_id, tok=self.access_token)
+        self.helper.invite(
+            room=room, src=self.user_id, tok=self.access_token, targ=self.others[0].id
+        )
+        self.helper.join(room=room, user=self.others[0].id, tok=self.others[0].token)
+
+        # The other user sends a single message.
+        self.helper.send(room, body="Hi!", tok=self.others[0].token)
+        self.helper.send(room, body="There!", tok=self.others[0].token)
+
+        # Leave the room before the message is processed.
+        self.helper.leave(room, self.user_id, tok=self.access_token)
+        self.helper.leave(room, self.others[0].id, tok=self.others[0].token)
+
+        # We should get emailed about that message
+        self._check_for_mail()
+
     def test_encrypted_message(self):
         room = self.helper.create_room_as(self.user_id, tok=self.access_token)
         self.helper.invite(
@@ -239,3 +313,6 @@ class EmailPusherTests(HomeserverTestCase):
         pushers = list(pushers)
         self.assertEqual(len(pushers), 1)
         self.assertTrue(pushers[0].last_stream_ordering > last_stream_ordering)
+
+        # Reset the attempts.
+        self.email_attempts = []
diff --git a/tests/rest/admin/test_room.py b/tests/rest/admin/test_room.py
index f4f5569a89..2a217b1ce0 100644
--- a/tests/rest/admin/test_room.py
+++ b/tests/rest/admin/test_room.py
@@ -1180,6 +1180,21 @@ class RoomTestCase(unittest.HomeserverTestCase):
         )
         self.assertEqual(channel.json_body["total"], 3)
 
+    def test_room_state(self):
+        """Test that room state can be requested correctly"""
+        # Create two test rooms
+        room_id = self.helper.create_room_as(self.admin_user, tok=self.admin_user_tok)
+
+        url = "/_synapse/admin/v1/rooms/%s/state" % (room_id,)
+        channel = self.make_request(
+            "GET", url.encode("ascii"), access_token=self.admin_user_tok,
+        )
+        self.assertEqual(200, channel.code, msg=channel.json_body)
+        self.assertIn("state", channel.json_body)
+        # testing that the state events match is painful and not done here. We assume that
+        # the create_room already does the right thing, so no need to verify that we got
+        # the state events it created.
+
 
 class JoinAliasRoomTestCase(unittest.HomeserverTestCase):
 
diff --git a/tests/rest/client/v1/test_login.py b/tests/rest/client/v1/test_login.py
index e2bb945453..ceb4ad2366 100644
--- a/tests/rest/client/v1/test_login.py
+++ b/tests/rest/client/v1/test_login.py
@@ -15,7 +15,7 @@
 
 import time
 import urllib.parse
-from typing import Any, Dict, Union
+from typing import Any, Dict, List, Union
 from urllib.parse import urlencode
 
 from mock import Mock
@@ -29,8 +29,7 @@ from synapse.appservice import ApplicationService
 from synapse.rest.client.v1 import login, logout
 from synapse.rest.client.v2_alpha import devices, register
 from synapse.rest.client.v2_alpha.account import WhoamiRestServlet
-from synapse.rest.synapse.client.pick_idp import PickIdpResource
-from synapse.rest.synapse.client.pick_username import pick_username_resource
+from synapse.rest.synapse.client import build_synapse_client_resource_tree
 from synapse.types import create_requester
 
 from tests import unittest
@@ -423,11 +422,8 @@ class MultiSSOTestCase(unittest.HomeserverTestCase):
         return config
 
     def create_resource_dict(self) -> Dict[str, Resource]:
-        from synapse.rest.oidc import OIDCResource
-
         d = super().create_resource_dict()
-        d["/_synapse/client/pick_idp"] = PickIdpResource(self.hs)
-        d["/_synapse/oidc"] = OIDCResource(self.hs)
+        d.update(build_synapse_client_resource_tree(self.hs))
         return d
 
     def test_get_login_flows(self):
@@ -497,13 +493,21 @@ class MultiSSOTestCase(unittest.HomeserverTestCase):
         self.assertEqual(channel.code, 200, channel.result)
 
         # parse the form to check it has fields assumed elsewhere in this class
+        html = channel.result["body"].decode("utf-8")
         p = TestHtmlParser()
-        p.feed(channel.result["body"].decode("utf-8"))
+        p.feed(html)
         p.close()
 
-        self.assertCountEqual(p.radios["idp"], ["cas", "oidc", "oidc-idp1", "saml"])
+        # there should be a link for each href
+        returned_idps = []  # type: List[str]
+        for link in p.links:
+            path, query = link.split("?", 1)
+            self.assertEqual(path, "pick_idp")
+            params = urllib.parse.parse_qs(query)
+            self.assertEqual(params["redirectUrl"], [TEST_CLIENT_REDIRECT_URL])
+            returned_idps.append(params["idp"][0])
 
-        self.assertEqual(p.hiddens["redirectUrl"], TEST_CLIENT_REDIRECT_URL)
+        self.assertCountEqual(returned_idps, ["cas", "oidc", "oidc-idp1", "saml"])
 
     def test_multi_sso_redirect_to_cas(self):
         """If CAS is chosen, should redirect to the CAS server"""
@@ -1211,11 +1215,8 @@ class UsernamePickerTestCase(HomeserverTestCase):
         return config
 
     def create_resource_dict(self) -> Dict[str, Resource]:
-        from synapse.rest.oidc import OIDCResource
-
         d = super().create_resource_dict()
-        d["/_synapse/client/pick_username"] = pick_username_resource(self.hs)
-        d["/_synapse/oidc"] = OIDCResource(self.hs)
+        d.update(build_synapse_client_resource_tree(self.hs))
         return d
 
     def test_username_picker(self):
@@ -1229,7 +1230,7 @@ class UsernamePickerTestCase(HomeserverTestCase):
         # that should redirect to the username picker
         self.assertEqual(channel.code, 302, channel.result)
         picker_url = channel.headers.getRawHeaders("Location")[0]
-        self.assertEqual(picker_url, "/_synapse/client/pick_username")
+        self.assertEqual(picker_url, "/_synapse/client/pick_username/account_details")
 
         # ... with a username_mapping_session cookie
         cookies = {}  # type: Dict[str,str]
@@ -1253,12 +1254,11 @@ class UsernamePickerTestCase(HomeserverTestCase):
         self.assertApproximates(session.expiry_time_ms, expected_expiry, tolerance=1000)
 
         # Now, submit a username to the username picker, which should serve a redirect
-        # back to the client
-        submit_path = picker_url + "/submit"
+        # to the completion page
         content = urlencode({b"username": b"bobby"}).encode("utf8")
         chan = self.make_request(
             "POST",
-            path=submit_path,
+            path=picker_url,
             content=content,
             content_is_form=True,
             custom_headers=[
@@ -1270,6 +1270,16 @@ class UsernamePickerTestCase(HomeserverTestCase):
         )
         self.assertEqual(chan.code, 302, chan.result)
         location_headers = chan.headers.getRawHeaders("Location")
+
+        # send a request to the completion page, which should 302 to the client redirectUrl
+        chan = self.make_request(
+            "GET",
+            path=location_headers[0],
+            custom_headers=[("Cookie", "username_mapping_session=" + session_id)],
+        )
+        self.assertEqual(chan.code, 302, chan.result)
+        location_headers = chan.headers.getRawHeaders("Location")
+
         # ensure that the returned location matches the requested redirect URL
         path, query = location_headers[0].split("?", 1)
         self.assertEqual(path, "https://x")
diff --git a/tests/rest/client/v1/test_rooms.py b/tests/rest/client/v1/test_rooms.py
index d4e3165436..2548b3a80c 100644
--- a/tests/rest/client/v1/test_rooms.py
+++ b/tests/rest/client/v1/test_rooms.py
@@ -616,6 +616,41 @@ class RoomMemberStateTestCase(RoomBase):
         self.assertEquals(json.loads(content), channel.json_body)
 
 
+class RoomInviteRatelimitTestCase(RoomBase):
+    user_id = "@sid1:red"
+
+    servlets = [
+        admin.register_servlets,
+        profile.register_servlets,
+        room.register_servlets,
+    ]
+
+    @unittest.override_config(
+        {"rc_invites": {"per_room": {"per_second": 0.5, "burst_count": 3}}}
+    )
+    def test_invites_by_rooms_ratelimit(self):
+        """Tests that invites in a room are actually rate-limited."""
+        room_id = self.helper.create_room_as(self.user_id)
+
+        for i in range(3):
+            self.helper.invite(room_id, self.user_id, "@user-%s:red" % (i,))
+
+        self.helper.invite(room_id, self.user_id, "@user-4:red", expect_code=429)
+
+    @unittest.override_config(
+        {"rc_invites": {"per_user": {"per_second": 0.5, "burst_count": 3}}}
+    )
+    def test_invites_by_users_ratelimit(self):
+        """Tests that invites to a specific user are actually rate-limited."""
+
+        for i in range(3):
+            room_id = self.helper.create_room_as(self.user_id)
+            self.helper.invite(room_id, self.user_id, "@other-users:red")
+
+        room_id = self.helper.create_room_as(self.user_id)
+        self.helper.invite(room_id, self.user_id, "@other-users:red", expect_code=429)
+
+
 class RoomJoinRatelimitTestCase(RoomBase):
     user_id = "@sid1:red"
 
diff --git a/tests/rest/client/v2_alpha/test_account.py b/tests/rest/client/v2_alpha/test_account.py
index cb87b80e33..177dc476da 100644
--- a/tests/rest/client/v2_alpha/test_account.py
+++ b/tests/rest/client/v2_alpha/test_account.py
@@ -24,7 +24,7 @@ import pkg_resources
 
 import synapse.rest.admin
 from synapse.api.constants import LoginType, Membership
-from synapse.api.errors import Codes
+from synapse.api.errors import Codes, HttpResponseException
 from synapse.rest.client.v1 import login, room
 from synapse.rest.client.v2_alpha import account, register
 from synapse.rest.synapse.client.password_reset import PasswordResetSubmitTokenResource
@@ -112,6 +112,56 @@ class PasswordResetTestCase(unittest.HomeserverTestCase):
         # Assert we can't log in with the old password
         self.attempt_wrong_password_login("kermit", old_password)
 
+    @override_config({"rc_3pid_validation": {"burst_count": 3}})
+    def test_ratelimit_by_email(self):
+        """Test that we ratelimit /requestToken for the same email.
+        """
+        old_password = "monkey"
+        new_password = "kangeroo"
+
+        user_id = self.register_user("kermit", old_password)
+        self.login("kermit", old_password)
+
+        email = "test1@example.com"
+
+        # Add a threepid
+        self.get_success(
+            self.store.user_add_threepid(
+                user_id=user_id,
+                medium="email",
+                address=email,
+                validated_at=0,
+                added_at=0,
+            )
+        )
+
+        def reset(ip):
+            client_secret = "foobar"
+            session_id = self._request_token(email, client_secret, ip)
+
+            self.assertEquals(len(self.email_attempts), 1)
+            link = self._get_link_from_email()
+
+            self._validate_token(link)
+
+            self._reset_password(new_password, session_id, client_secret)
+
+            self.email_attempts.clear()
+
+        # We expect to be able to make three requests before getting rate
+        # limited.
+        #
+        # We change IPs to ensure that we're not being ratelimited due to the
+        # same IP
+        reset("127.0.0.1")
+        reset("127.0.0.2")
+        reset("127.0.0.3")
+
+        with self.assertRaises(HttpResponseException) as cm:
+            reset("127.0.0.4")
+
+        self.assertEqual(cm.exception.code, 429)
+
     def test_basic_password_reset_canonicalise_email(self):
         """Test basic password reset flow
         Request password reset with different spelling
@@ -239,13 +289,18 @@ class PasswordResetTestCase(unittest.HomeserverTestCase):
 
         self.assertIsNotNone(session_id)
 
-    def _request_token(self, email, client_secret):
+    def _request_token(self, email, client_secret, ip="127.0.0.1"):
         channel = self.make_request(
             "POST",
             b"account/password/email/requestToken",
             {"client_secret": client_secret, "email": email, "send_attempt": 1},
+            client_ip=ip,
         )
-        self.assertEquals(200, channel.code, channel.result)
+
+        if channel.code != 200:
+            raise HttpResponseException(
+                channel.code, channel.result["reason"], channel.result["body"],
+            )
 
         return channel.json_body["sid"]
 
@@ -509,6 +564,21 @@ class ThreepidEmailRestTestCase(unittest.HomeserverTestCase):
     def test_address_trim(self):
         self.get_success(self._add_email(" foo@test.bar ", "foo@test.bar"))
 
+    @override_config({"rc_3pid_validation": {"burst_count": 3}})
+    def test_ratelimit_by_ip(self):
+        """Tests that adding emails is ratelimited by IP
+        """
+
+        # We expect to be able to set three emails before getting ratelimited.
+        self.get_success(self._add_email("foo1@test.bar", "foo1@test.bar"))
+        self.get_success(self._add_email("foo2@test.bar", "foo2@test.bar"))
+        self.get_success(self._add_email("foo3@test.bar", "foo3@test.bar"))
+
+        with self.assertRaises(HttpResponseException) as cm:
+            self.get_success(self._add_email("foo4@test.bar", "foo4@test.bar"))
+
+        self.assertEqual(cm.exception.code, 429)
+
     def test_add_email_if_disabled(self):
         """Test adding email to profile when doing so is disallowed
         """
@@ -777,7 +847,11 @@ class ThreepidEmailRestTestCase(unittest.HomeserverTestCase):
             body["next_link"] = next_link
 
         channel = self.make_request("POST", b"account/3pid/email/requestToken", body,)
-        self.assertEquals(expect_code, channel.code, channel.result)
+
+        if channel.code != expect_code:
+            raise HttpResponseException(
+                channel.code, channel.result["reason"], channel.result["body"],
+            )
 
         return channel.json_body.get("sid")
 
@@ -823,10 +897,12 @@ class ThreepidEmailRestTestCase(unittest.HomeserverTestCase):
     def _add_email(self, request_email, expected_email):
         """Test adding an email to profile
         """
+        previous_email_attempts = len(self.email_attempts)
+
         client_secret = "foobar"
         session_id = self._request_token(request_email, client_secret)
 
-        self.assertEquals(len(self.email_attempts), 1)
+        self.assertEquals(len(self.email_attempts) - previous_email_attempts, 1)
         link = self._get_link_from_email()
 
         self._validate_token(link)
@@ -855,4 +931,6 @@ class ThreepidEmailRestTestCase(unittest.HomeserverTestCase):
 
         self.assertEqual(200, int(channel.result["code"]), msg=channel.result["body"])
         self.assertEqual("email", channel.json_body["threepids"][0]["medium"])
-        self.assertEqual(expected_email, channel.json_body["threepids"][0]["address"])
+
+        threepids = {threepid["address"] for threepid in channel.json_body["threepids"]}
+        self.assertIn(expected_email, threepids)
diff --git a/tests/rest/client/v2_alpha/test_auth.py b/tests/rest/client/v2_alpha/test_auth.py
index a6488a3d29..3f50c56745 100644
--- a/tests/rest/client/v2_alpha/test_auth.py
+++ b/tests/rest/client/v2_alpha/test_auth.py
@@ -22,7 +22,7 @@ from synapse.api.constants import LoginType
 from synapse.handlers.ui_auth.checkers import UserInteractiveAuthChecker
 from synapse.rest.client.v1 import login
 from synapse.rest.client.v2_alpha import auth, devices, register
-from synapse.rest.oidc import OIDCResource
+from synapse.rest.synapse.client import build_synapse_client_resource_tree
 from synapse.types import JsonDict, UserID
 
 from tests import unittest
@@ -173,9 +173,7 @@ class UIAuthTests(unittest.HomeserverTestCase):
 
     def create_resource_dict(self):
         resource_dict = super().create_resource_dict()
-        if HAS_OIDC:
-            # mount the OIDC resource at /_synapse/oidc
-            resource_dict["/_synapse/oidc"] = OIDCResource(self.hs)
+        resource_dict.update(build_synapse_client_resource_tree(self.hs))
         return resource_dict
 
     def prepare(self, reactor, clock, hs):
diff --git a/tests/rest/media/v1/test_media_storage.py b/tests/rest/media/v1/test_media_storage.py
index a6c6985173..c279eb49e3 100644
--- a/tests/rest/media/v1/test_media_storage.py
+++ b/tests/rest/media/v1/test_media_storage.py
@@ -30,6 +30,8 @@ from twisted.internet import defer
 from twisted.internet.defer import Deferred
 
 from synapse.logging.context import make_deferred_yieldable
+from synapse.rest import admin
+from synapse.rest.client.v1 import login
 from synapse.rest.media.v1._base import FileInfo
 from synapse.rest.media.v1.filepath import MediaFilePaths
 from synapse.rest.media.v1.media_storage import MediaStorage
@@ -37,6 +39,7 @@ from synapse.rest.media.v1.storage_provider import FileStorageProviderBackend
 
 from tests import unittest
 from tests.server import FakeSite, make_request
+from tests.utils import default_config
 
 
 class MediaStorageTests(unittest.HomeserverTestCase):
@@ -398,3 +401,94 @@ class MediaRepoTests(unittest.HomeserverTestCase):
             headers.getRawHeaders(b"X-Robots-Tag"),
             [b"noindex, nofollow, noarchive, noimageindex"],
         )
+
+
+class TestSpamChecker:
+    """A spam checker module that rejects all media that includes the bytes
+    `evil`.
+    """
+
+    def __init__(self, config, api):
+        self.config = config
+        self.api = api
+
+    def parse_config(config):
+        return config
+
+    async def check_event_for_spam(self, foo):
+        return False  # allow all events
+
+    async def user_may_invite(self, inviter_userid, invitee_userid, room_id):
+        return True  # allow all invites
+
+    async def user_may_create_room(self, userid):
+        return True  # allow all room creations
+
+    async def user_may_create_room_alias(self, userid, room_alias):
+        return True  # allow all room aliases
+
+    async def user_may_publish_room(self, userid, room_id):
+        return True  # allow publishing of all rooms
+
+    async def check_media_file_for_spam(self, file_wrapper, file_info) -> bool:
+        buf = BytesIO()
+        await file_wrapper.write_chunks_to(buf.write)
+
+        return b"evil" in buf.getvalue()
+
+
+class SpamCheckerTestCase(unittest.HomeserverTestCase):
+    servlets = [
+        login.register_servlets,
+        admin.register_servlets,
+    ]
+
+    def prepare(self, reactor, clock, hs):
+        self.user = self.register_user("user", "pass")
+        self.tok = self.login("user", "pass")
+
+        # Allow for uploading and downloading to/from the media repo
+        self.media_repo = hs.get_media_repository_resource()
+        self.download_resource = self.media_repo.children[b"download"]
+        self.upload_resource = self.media_repo.children[b"upload"]
+
+    def default_config(self):
+        config = default_config("test")
+
+        config.update(
+            {
+                "spam_checker": [
+                    {
+                        "module": TestSpamChecker.__module__ + ".TestSpamChecker",
+                        "config": {},
+                    }
+                ]
+            }
+        )
+
+        return config
+
+    def test_upload_innocent(self):
+        """Attempt to upload some innocent data that should be allowed.
+        """
+
+        image_data = unhexlify(
+            b"89504e470d0a1a0a0000000d4948445200000001000000010806"
+            b"0000001f15c4890000000a49444154789c63000100000500010d"
+            b"0a2db40000000049454e44ae426082"
+        )
+
+        self.helper.upload_media(
+            self.upload_resource, image_data, tok=self.tok, expect_code=200
+        )
+
+    def test_upload_ban(self):
+        """Attempt to upload some data that includes bytes "evil", which should
+        get rejected by the spam checker.
+        """
+
+        data = b"Some evil data"
+
+        self.helper.upload_media(
+            self.upload_resource, data, tok=self.tok, expect_code=400
+        )
diff --git a/tests/server.py b/tests/server.py
index 5a85d5fe7f..6419c445ec 100644
--- a/tests/server.py
+++ b/tests/server.py
@@ -47,6 +47,7 @@ class FakeChannel:
     site = attr.ib(type=Site)
     _reactor = attr.ib()
     result = attr.ib(type=dict, default=attr.Factory(dict))
+    _ip = attr.ib(type=str, default="127.0.0.1")
     _producer = None
 
     @property
@@ -120,7 +121,7 @@ class FakeChannel:
     def getPeer(self):
         # We give an address so that getClientIP returns a non null entry,
         # causing us to record the MAU
-        return address.IPv4Address("TCP", "127.0.0.1", 3423)
+        return address.IPv4Address("TCP", self._ip, 3423)
 
     def getHost(self):
         return None
@@ -196,6 +197,7 @@ def make_request(
     custom_headers: Optional[
         Iterable[Tuple[Union[bytes, str], Union[bytes, str]]]
     ] = None,
+    client_ip: str = "127.0.0.1",
 ) -> FakeChannel:
     """
     Make a web request using the given method, path and content, and render it
@@ -223,6 +225,9 @@ def make_request(
              will pump the reactor until the the renderer tells the channel the request
              is finished.
 
+        client_ip: The IP to use as the requesting IP. Useful for testing
+            ratelimiting.
+
     Returns:
         channel
     """
@@ -250,7 +255,7 @@ def make_request(
     if isinstance(content, str):
         content = content.encode("utf8")
 
-    channel = FakeChannel(site, reactor)
+    channel = FakeChannel(site, reactor, ip=client_ip)
 
     req = request(channel)
     req.content = BytesIO(content)
diff --git a/tests/unittest.py b/tests/unittest.py
index bbd295687c..767d5d6077 100644
--- a/tests/unittest.py
+++ b/tests/unittest.py
@@ -386,6 +386,7 @@ class HomeserverTestCase(TestCase):
         custom_headers: Optional[
             Iterable[Tuple[Union[bytes, str], Union[bytes, str]]]
         ] = None,
+        client_ip: str = "127.0.0.1",
     ) -> FakeChannel:
         """
         Create a SynapseRequest at the path using the method and containing the
@@ -410,6 +411,9 @@ class HomeserverTestCase(TestCase):
 
             custom_headers: (name, value) pairs to add as request headers
 
+            client_ip: The IP to use as the requesting IP. Useful for testing
+                ratelimiting.
+
         Returns:
             The FakeChannel object which stores the result of the request.
         """
@@ -426,6 +430,7 @@ class HomeserverTestCase(TestCase):
             content_is_form,
             await_result,
             custom_headers,
+            client_ip,
         )
 
     def setup_test_homeserver(self, *args, **kwargs):
diff --git a/tests/utils.py b/tests/utils.py
index 022223cf24..68033d7535 100644
--- a/tests/utils.py
+++ b/tests/utils.py
@@ -157,6 +157,7 @@ def default_config(name, parse=False):
             "local": {"per_second": 10000, "burst_count": 10000},
             "remote": {"per_second": 10000, "burst_count": 10000},
         },
+        "rc_3pid_validation": {"per_second": 10000, "burst_count": 10000},
         "saml2_enabled": False,
         "default_identity_server": None,
         "key_refresh_interval": 24 * 60 * 60 * 1000,