summary refs log tree commit diff
diff options
context:
space:
mode:
authorBrendan Abolivier <babolivier@matrix.org>2019-07-08 15:49:23 +0100
committerBrendan Abolivier <babolivier@matrix.org>2019-07-08 15:49:23 +0100
commit57eacee4f4cff3a663a6e22834a82f9c5783a56d (patch)
treeb5714f78b8c129ad8aca1689e2d7ddecb20c3bed
parentChangelog (diff)
parentUpdate ModuleApi to avoid register(generate_token=True) (#5640) (diff)
downloadsynapse-57eacee4f4cff3a663a6e22834a82f9c5783a56d.tar.xz
Merge branch 'develop' into babolivier/invite-json
-rw-r--r--.buildkite/pipeline.yml9
-rw-r--r--.buildkite/synapse_sytest.sh145
-rw-r--r--.github/ISSUE_TEMPLATE/SUPPORT_REQUEST.md5
-rw-r--r--.github/SUPPORT.md6
-rw-r--r--CHANGES.md134
-rw-r--r--MANIFEST.in1
-rw-r--r--UPGRADE.rst8
-rw-r--r--changelog.d/4276.misc1
-rw-r--r--changelog.d/5015.misc1
-rw-r--r--changelog.d/5042.bugfix1
-rw-r--r--changelog.d/5051.bugfix1
-rw-r--r--changelog.d/5252.feature1
-rw-r--r--changelog.d/5313.misc1
-rw-r--r--changelog.d/5325.bugfix1
-rw-r--r--changelog.d/5363.feature1
-rw-r--r--changelog.d/5378.misc1
-rw-r--r--changelog.d/5381.misc1
-rw-r--r--changelog.d/5382.misc1
-rw-r--r--changelog.d/5383.misc1
-rw-r--r--changelog.d/5384.feature1
-rw-r--r--changelog.d/5386.misc1
-rw-r--r--changelog.d/5387.bugfix1
-rw-r--r--changelog.d/5388.bugfix1
-rw-r--r--changelog.d/5389.bugfix1
-rw-r--r--changelog.d/5390.bugfix1
-rw-r--r--changelog.d/5394.bugfix1
-rw-r--r--changelog.d/5412.feature1
-rw-r--r--changelog.d/5425.removal1
-rw-r--r--changelog.d/5440.feature1
-rw-r--r--changelog.d/5446.misc1
-rw-r--r--changelog.d/5447.misc1
-rw-r--r--changelog.d/5448.removal1
-rw-r--r--changelog.d/5458.feature1
-rw-r--r--changelog.d/5459.misc1
-rw-r--r--changelog.d/5460.misc1
-rw-r--r--changelog.d/5461.feature1
-rw-r--r--changelog.d/5464.bugfix1
-rw-r--r--changelog.d/5465.misc2
-rw-r--r--changelog.d/5474.feature1
-rw-r--r--changelog.d/5476.misc1
-rw-r--r--changelog.d/5477.feature1
-rw-r--r--changelog.d/5478.misc1
-rw-r--r--changelog.d/5480.misc1
-rw-r--r--changelog.d/5482.misc1
-rw-r--r--changelog.d/5490.bugfix1
-rw-r--r--changelog.d/5493.misc1
-rw-r--r--changelog.d/5498.bugfix1
-rw-r--r--changelog.d/5499.misc1
-rw-r--r--changelog.d/5500.bugfix1
-rw-r--r--changelog.d/5502.misc1
-rw-r--r--changelog.d/5505.feature1
-rw-r--r--changelog.d/5509.misc1
-rw-r--r--changelog.d/5510.misc1
-rw-r--r--changelog.d/5511.misc1
-rw-r--r--changelog.d/5512.feature1
-rw-r--r--changelog.d/5513.feature1
-rw-r--r--changelog.d/5514.bugfix1
-rw-r--r--changelog.d/5516.feature1
-rw-r--r--changelog.d/5521.feature1
-rw-r--r--changelog.d/5522.feature1
-rw-r--r--changelog.d/5523.bugfix1
-rw-r--r--changelog.d/5524.feature1
-rw-r--r--changelog.d/5525.removal1
-rw-r--r--changelog.d/5531.feature1
-rw-r--r--changelog.d/5534.feature1
-rw-r--r--changelog.d/5537.misc1
-rw-r--r--changelog.d/5545.misc1
-rw-r--r--changelog.d/5546.feature1
-rw-r--r--changelog.d/5547.feature1
-rw-r--r--changelog.d/5548.misc1
-rw-r--r--changelog.d/5555.bugfix1
-rw-r--r--changelog.d/5558.misc1
-rw-r--r--changelog.d/5561.feature1
-rw-r--r--changelog.d/5562.feature1
-rw-r--r--changelog.d/5563.bugfix1
-rw-r--r--changelog.d/5564.misc1
-rw-r--r--changelog.d/5565.feature1
-rw-r--r--changelog.d/5566.feature1
-rw-r--r--changelog.d/5567.feature1
-rw-r--r--changelog.d/5568.feature1
-rw-r--r--changelog.d/5570.misc1
-rw-r--r--changelog.d/5606.misc1
-rw-r--r--changelog.d/5609.bugfix1
-rw-r--r--changelog.d/5611.misc1
-rw-r--r--changelog.d/5613.feature1
-rw-r--r--changelog.d/5616.misc1
-rw-r--r--changelog.d/5617.misc1
-rw-r--r--changelog.d/5621.bugfix1
-rw-r--r--changelog.d/5622.misc1
-rw-r--r--changelog.d/5623.feature1
-rw-r--r--changelog.d/5625.removal1
-rw-r--r--changelog.d/5628.misc1
-rw-r--r--changelog.d/5630.misc1
-rw-r--r--changelog.d/5639.misc1
-rw-r--r--changelog.d/5640.misc1
-rw-r--r--contrib/example_log_config.yaml8
-rw-r--r--contrib/experiments/test_messaging.py2
-rw-r--r--contrib/systemd/log_config.yaml2
-rwxr-xr-xdebian/build_virtualenv2
-rw-r--r--debian/changelog17
-rw-r--r--debian/log.yaml2
-rw-r--r--docker/Dockerfile1
-rw-r--r--docker/README.md2
-rw-r--r--docker/conf/homeserver.yaml19
-rw-r--r--docker/conf/log.config2
-rw-r--r--docs/log_contexts.rst38
-rw-r--r--docs/sample_config.yaml47
-rw-r--r--pyproject.toml2
-rwxr-xr-xscripts/generate_config2
-rw-r--r--synapse/__init__.py2
-rw-r--r--synapse/app/_base.py59
-rw-r--r--synapse/app/appservice.py2
-rw-r--r--synapse/app/client_reader.py2
-rw-r--r--synapse/app/event_creator.py2
-rw-r--r--synapse/app/federation_reader.py2
-rw-r--r--synapse/app/federation_sender.py2
-rw-r--r--synapse/app/frontend_proxy.py2
-rwxr-xr-xsynapse/app/homeserver.py2
-rw-r--r--synapse/app/media_repository.py2
-rw-r--r--synapse/app/pusher.py2
-rw-r--r--synapse/app/synchrotron.py2
-rw-r--r--synapse/app/user_dir.py2
-rw-r--r--synapse/appservice/scheduler.py2
-rw-r--r--synapse/config/emailconfig.py29
-rw-r--r--synapse/config/logger.py4
-rw-r--r--synapse/config/password.py7
-rw-r--r--synapse/config/ratelimiting.py4
-rw-r--r--synapse/config/registration.py5
-rw-r--r--synapse/config/saml2_config.py33
-rw-r--r--synapse/config/tls.py32
-rw-r--r--synapse/crypto/context_factory.py39
-rw-r--r--synapse/crypto/keyring.py15
-rw-r--r--synapse/events/snapshot.py2
-rw-r--r--synapse/events/utils.py6
-rw-r--r--synapse/federation/federation_base.py24
-rw-r--r--synapse/federation/federation_client.py8
-rw-r--r--synapse/federation/federation_server.py4
-rw-r--r--synapse/federation/persistence.py34
-rw-r--r--synapse/federation/sender/__init__.py12
-rw-r--r--synapse/federation/sender/transaction_manager.py9
-rw-r--r--synapse/federation/transport/client.py2
-rw-r--r--synapse/federation/transport/server.py2
-rw-r--r--synapse/groups/attestations.py2
-rw-r--r--synapse/handlers/account_validity.py12
-rw-r--r--synapse/handlers/appservice.py2
-rw-r--r--synapse/handlers/auth.py8
-rw-r--r--synapse/handlers/device.py14
-rw-r--r--synapse/handlers/e2e_keys.py2
-rw-r--r--synapse/handlers/events.py2
-rw-r--r--synapse/handlers/federation.py43
-rw-r--r--synapse/handlers/initial_sync.py2
-rw-r--r--synapse/handlers/message.py2
-rw-r--r--synapse/handlers/pagination.py2
-rw-r--r--synapse/handlers/presence.py25
-rw-r--r--synapse/handlers/register.py81
-rw-r--r--synapse/handlers/room_member.py40
-rw-r--r--synapse/handlers/room_member_worker.py12
-rw-r--r--synapse/handlers/saml_handler.py123
-rw-r--r--synapse/handlers/set_password.py3
-rw-r--r--synapse/handlers/sync.py72
-rw-r--r--synapse/handlers/typing.py2
-rw-r--r--synapse/http/client.py2
-rw-r--r--synapse/http/federation/matrix_federation_agent.py2
-rw-r--r--synapse/http/federation/srv_resolver.py2
-rw-r--r--synapse/http/matrixfederationclient.py2
-rw-r--r--synapse/http/request_metrics.py2
-rw-r--r--synapse/http/server.py89
-rw-r--r--synapse/http/site.py2
-rw-r--r--synapse/logging/__init__.py0
-rw-r--r--synapse/logging/context.py693
-rw-r--r--synapse/logging/formatter.py53
-rw-r--r--synapse/logging/utils.py (renamed from synapse/util/logutils.py)0
-rw-r--r--synapse/metrics/__init__.py5
-rw-r--r--synapse/metrics/background_process_metrics.py2
-rw-r--r--synapse/module_api/__init__.py64
-rw-r--r--synapse/notifier.py4
-rw-r--r--synapse/push/baserules.py13
-rw-r--r--synapse/push/mailer.py2
-rw-r--r--synapse/replication/http/membership.py65
-rw-r--r--synapse/replication/tcp/protocol.py2
-rw-r--r--synapse/rest/client/transactions.py2
-rw-r--r--synapse/rest/client/v1/login.py104
-rw-r--r--synapse/rest/consent/consent_resource.py33
-rw-r--r--synapse/rest/key/v2/remote_key_resource.py28
-rw-r--r--synapse/rest/media/v1/_base.py6
-rw-r--r--synapse/rest/media/v1/config_resource.py21
-rw-r--r--synapse/rest/media/v1/download_resource.py26
-rw-r--r--synapse/rest/media/v1/media_repository.py12
-rw-r--r--synapse/rest/media/v1/media_storage.py5
-rw-r--r--synapse/rest/media/v1/preview_url_resource.py21
-rw-r--r--synapse/rest/media/v1/storage_provider.py5
-rw-r--r--synapse/rest/media/v1/thumbnail_resource.py27
-rw-r--r--synapse/rest/media/v1/upload_resource.py23
-rw-r--r--synapse/rest/saml2/response_resource.py52
-rw-r--r--synapse/server.py6
-rw-r--r--synapse/state/__init__.py2
-rw-r--r--synapse/storage/_base.py2
-rw-r--r--synapse/storage/devices.py50
-rw-r--r--synapse/storage/events.py13
-rw-r--r--synapse/storage/events_worker.py6
-rw-r--r--synapse/storage/registration.py60
-rw-r--r--synapse/storage/stream.py2
-rw-r--r--synapse/storage/transactions.py28
-rw-r--r--synapse/util/__init__.py16
-rw-r--r--synapse/util/async_helpers.py9
-rw-r--r--synapse/util/caches/descriptors.py11
-rw-r--r--synapse/util/caches/response_cache.py4
-rw-r--r--synapse/util/distributor.py2
-rw-r--r--synapse/util/file_consumer.py2
-rw-r--r--synapse/util/logcontext.py688
-rw-r--r--synapse/util/logformatter.py44
-rw-r--r--synapse/util/metrics.py2
-rw-r--r--synapse/util/ratelimitutils.py18
-rw-r--r--synapse/util/retryutils.py4
-rwxr-xr-xsynctl12
-rw-r--r--sytest-blacklist35
-rw-r--r--tests/appservice/test_scheduler.py2
-rw-r--r--tests/config/test_ratelimiting.py40
-rw-r--r--tests/config/test_tls.py115
-rw-r--r--tests/crypto/test_keyring.py13
-rw-r--r--tests/handlers/test_register.py68
-rw-r--r--tests/http/federation/test_matrix_federation_agent.py2
-rw-r--r--tests/http/federation/test_srv_resolver.py2
-rw-r--r--tests/http/test_fedclient.py2
-rw-r--r--tests/patch_inline_callbacks.py2
-rw-r--r--tests/push/test_http.py2
-rw-r--r--tests/rest/client/test_transactions.py2
-rw-r--r--tests/rest/client/v2_alpha/test_relations.py24
-rw-r--r--tests/rest/media/v1/test_media_storage.py27
-rw-r--r--tests/rest/media/v1/test_url_preview.py12
-rw-r--r--tests/test_federation.py2
-rw-r--r--tests/test_server.py2
-rw-r--r--tests/test_utils/logging_setup.py2
-rw-r--r--tests/unittest.py42
-rw-r--r--tests/util/caches/test_descriptors.py49
-rw-r--r--tests/util/test_async_utils.py7
-rw-r--r--tests/util/test_linearizer.py9
-rw-r--r--tests/util/test_logcontext.py55
-rw-r--r--tests/util/test_logformatter.py2
-rw-r--r--tests/util/test_ratelimitutils.py97
-rw-r--r--tests/utils.py8
241 files changed, 2403 insertions, 1918 deletions
diff --git a/.buildkite/pipeline.yml b/.buildkite/pipeline.yml
index 513eb3bde9..dd0f98cba0 100644
--- a/.buildkite/pipeline.yml
+++ b/.buildkite/pipeline.yml
@@ -173,11 +173,12 @@ steps:
       queue: "medium"
     command:
       - "bash .buildkite/merge_base_branch.sh"
-      - "bash .buildkite/synapse_sytest.sh"
+      - "bash /synapse_sytest.sh"
     plugins:
       - docker#v3.0.1:
           image: "matrixdotorg/sytest-synapse:py35"
           propagate-environment: true
+          always-pull: true
     retry:
       automatic:
         - exit_status: -1
@@ -192,11 +193,12 @@ steps:
       POSTGRES: "1"
     command:
       - "bash .buildkite/merge_base_branch.sh"
-      - "bash .buildkite/synapse_sytest.sh"
+      - "bash /synapse_sytest.sh"
     plugins:
       - docker#v3.0.1:
           image: "matrixdotorg/sytest-synapse:py35"
           propagate-environment: true
+          always-pull: true
     retry:
       automatic:
         - exit_status: -1
@@ -212,11 +214,12 @@ steps:
       WORKERS: "1"
     command:
       - "bash .buildkite/merge_base_branch.sh"
-      - "bash .buildkite/synapse_sytest.sh"
+      - "bash /synapse_sytest.sh"
     plugins:
       - docker#v3.0.1:
           image: "matrixdotorg/sytest-synapse:py35"
           propagate-environment: true
+          always-pull: true
     soft_fail: true
     retry:
       automatic:
diff --git a/.buildkite/synapse_sytest.sh b/.buildkite/synapse_sytest.sh
deleted file mode 100644
index 3011b88bb7..0000000000
--- a/.buildkite/synapse_sytest.sh
+++ /dev/null
@@ -1,145 +0,0 @@
-#!/bin/bash
-#
-# Fetch sytest, and then run the tests for synapse. The entrypoint for the
-# sytest-synapse docker images.
-
-set -ex
-
-if [ -n "$BUILDKITE" ]
-then
-    SYNAPSE_DIR=`pwd`
-else
-    SYNAPSE_DIR="/src"
-fi
-
-# Attempt to find a sytest to use.
-# If /sytest exists, it means that a SyTest checkout has been mounted into the Docker image.
-if [ -d "/sytest" ]; then
-    # If the user has mounted in a SyTest checkout, use that.
-    echo "Using local sytests..."
-
-    # create ourselves a working directory and dos2unix some scripts therein
-    mkdir -p /work/jenkins
-    for i in install-deps.pl run-tests.pl tap-to-junit-xml.pl jenkins/prep_sytest_for_postgres.sh; do
-        dos2unix -n "/sytest/$i" "/work/$i"
-    done
-    ln -sf /sytest/tests /work
-    ln -sf /sytest/keys /work
-    SYTEST_LIB="/sytest/lib"
-else
-    if [ -n "BUILDKITE_BRANCH" ]
-    then
-        branch_name=$BUILDKITE_BRANCH
-    else
-        # Otherwise, try and find out what the branch that the Synapse checkout is using. Fall back to develop if it's not a branch.
-        branch_name="$(git --git-dir=/src/.git symbolic-ref HEAD 2>/dev/null)" || branch_name="develop"
-    fi
-
-    # Try and fetch the branch
-    echo "Trying to get same-named sytest branch..."
-    wget -q https://github.com/matrix-org/sytest/archive/$branch_name.tar.gz -O sytest.tar.gz || {
-        # Probably a 404, fall back to develop
-        echo "Using develop instead..."
-        wget -q https://github.com/matrix-org/sytest/archive/develop.tar.gz -O sytest.tar.gz
-    }
-
-    mkdir -p /work
-    tar -C /work --strip-components=1 -xf sytest.tar.gz
-    SYTEST_LIB="/work/lib"
-fi
-
-cd /work
-
-# PostgreSQL setup
-if [ -n "$POSTGRES" ]
-then
-    export PGUSER=postgres
-    export POSTGRES_DB_1=pg1
-    export POSTGRES_DB_2=pg2
-
-    # Start the database
-    su -c 'eatmydata /usr/lib/postgresql/9.6/bin/pg_ctl -w -D /var/lib/postgresql/data start' postgres
-
-    # Use the Jenkins script to write out the configuration for a PostgreSQL using Synapse
-    jenkins/prep_sytest_for_postgres.sh
-
-    # Make the test databases for the two Synapse servers that will be spun up
-    su -c 'psql -c "CREATE DATABASE pg1;"' postgres
-    su -c 'psql -c "CREATE DATABASE pg2;"' postgres
-
-fi
-
-if [ -n "$OFFLINE" ]; then
-    # if we're in offline mode, just put synapse into the virtualenv, and
-    # hope that the deps are up-to-date.
-    #
-    # (`pip install -e` likes to reinstall setuptools even if it's already installed,
-    # so we just run setup.py explicitly.)
-    #
-    (cd $SYNAPSE_DIR && /venv/bin/python setup.py -q develop)
-else
-    # We've already created the virtualenv, but lets double check we have all
-    # deps.
-    /venv/bin/pip install -q --upgrade --no-cache-dir -e $SYNAPSE_DIR
-    /venv/bin/pip install -q --upgrade --no-cache-dir \
-        lxml psycopg2 coverage codecov tap.py
-
-    # Make sure all Perl deps are installed -- this is done in the docker build
-    # so will only install packages added since the last Docker build
-    ./install-deps.pl
-fi
-
-
-# Run the tests
->&2 echo "+++ Running tests"
-
-RUN_TESTS=(
-    perl -I "$SYTEST_LIB" ./run-tests.pl --python=/venv/bin/python --synapse-directory=$SYNAPSE_DIR --coverage -O tap --all
-)
-
-TEST_STATUS=0
-
-if [ -n "$WORKERS" ]; then
-    RUN_TESTS+=(-I Synapse::ViaHaproxy --dendron-binary=/pydron.py)
-else
-    RUN_TESTS+=(-I Synapse)
-fi
-
-"${RUN_TESTS[@]}" "$@" > results.tap || TEST_STATUS=$?
-
-if [ $TEST_STATUS -ne 0 ]; then
-    >&2 echo -e "run-tests \e[31mFAILED\e[0m: exit code $TEST_STATUS"
-else
-    >&2 echo -e "run-tests \e[32mPASSED\e[0m"
-fi
-
->&2 echo "--- Copying assets"
-
-# Copy out the logs
-mkdir -p /logs
-cp results.tap /logs/results.tap
-rsync --ignore-missing-args  --min-size=1B -av server-0 server-1 /logs --include "*/" --include="*.log.*" --include="*.log" --exclude="*"
-
-# Upload coverage to codecov and upload files, if running on Buildkite
-if [ -n "$BUILDKITE" ]
-then
-    /venv/bin/coverage combine || true
-    /venv/bin/coverage xml || true
-    /venv/bin/codecov -X gcov -f coverage.xml
-
-    wget -O buildkite.tar.gz https://github.com/buildkite/agent/releases/download/v3.13.0/buildkite-agent-linux-amd64-3.13.0.tar.gz
-    tar xvf buildkite.tar.gz
-    chmod +x ./buildkite-agent
-
-    # Upload the files
-    ./buildkite-agent artifact upload "/logs/**/*.log*"
-    ./buildkite-agent artifact upload "/logs/results.tap"
-
-    if [ $TEST_STATUS -ne 0 ]; then
-        # Annotate, if failure
-        /venv/bin/python $SYNAPSE_DIR/.buildkite/format_tap.py /logs/results.tap "$BUILDKITE_LABEL" | ./buildkite-agent annotate --style="error" --context="$BUILDKITE_LABEL"
-    fi
-fi
-
-
-exit $TEST_STATUS
diff --git a/.github/ISSUE_TEMPLATE/SUPPORT_REQUEST.md b/.github/ISSUE_TEMPLATE/SUPPORT_REQUEST.md
index 77581596c4..64c06fe3ce 100644
--- a/.github/ISSUE_TEMPLATE/SUPPORT_REQUEST.md
+++ b/.github/ISSUE_TEMPLATE/SUPPORT_REQUEST.md
@@ -4,6 +4,7 @@ about: I need support for Synapse
 
 ---
 
-# Please ask for support in [**#matrix:matrix.org**](https://matrix.to/#/#matrix:matrix.org)
+Please don't file github issues asking for support.
 
-## Don't file an issue as a support request.
+Instead, please join [`#synapse:matrix.org`](https://matrix.to/#/#synapse:matrix.org)
+(from a matrix.org account if necessary), and ask there.
diff --git a/.github/SUPPORT.md b/.github/SUPPORT.md
index 7a4244f673..fad2dd2efe 100644
--- a/.github/SUPPORT.md
+++ b/.github/SUPPORT.md
@@ -1,3 +1,3 @@
-[**#matrix:matrix.org**](https://matrix.to/#/#matrix:matrix.org) is the official support room for Matrix, and can be accessed by any client from https://matrix.org/docs/projects/try-matrix-now.html 
-
-It can also be access via IRC bridge at irc://irc.freenode.net/matrix or on the web here: https://webchat.freenode.net/?channels=matrix
+[**#synapse:matrix.org**](https://matrix.to/#/#synapse:matrix.org) is the official support room for
+Synapse, and can be accessed by any client from https://matrix.org/docs/projects/try-matrix-now.html.
+Please ask for support there, rather than filing github issues.
diff --git a/CHANGES.md b/CHANGES.md
index 284e89b266..dc8c74fe58 100644
--- a/CHANGES.md
+++ b/CHANGES.md
@@ -1,3 +1,137 @@
+Synapse 1.1.0 (2019-07-04)
+==========================
+
+As of v1.1.0, Synapse no longer supports Python 2, nor Postgres version 9.4.
+See the [upgrade notes](UPGRADE.rst#upgrading-to-v110) for more details.
+
+This release also deprecates the use of environment variables to configure the
+docker image. See the [docker README](https://github.com/matrix-org/synapse/blob/release-v1.1.0/docker/README.md#legacy-dynamic-configuration-file-support)
+for more details.
+
+No changes since 1.1.0rc2.
+
+
+Synapse 1.1.0rc2 (2019-07-03)
+=============================
+
+Bugfixes
+--------
+
+- Fix regression in 1.1rc1 where OPTIONS requests to the media repo would fail. ([\#5593](https://github.com/matrix-org/synapse/issues/5593))
+- Removed the `SYNAPSE_SMTP_*` docker container environment variables. Using these environment variables prevented the docker container from starting in Synapse v1.0, even though they didn't actually allow any functionality anyway. ([\#5596](https://github.com/matrix-org/synapse/issues/5596))
+- Fix a number of "Starting txn from sentinel context" warnings. ([\#5605](https://github.com/matrix-org/synapse/issues/5605))
+
+
+Internal Changes
+----------------
+
+- Update github templates. ([\#5552](https://github.com/matrix-org/synapse/issues/5552))
+
+
+Synapse 1.1.0rc1 (2019-07-02)
+=============================
+
+As of v1.1.0, Synapse no longer supports Python 2, nor Postgres version 9.4.
+See the [upgrade notes](UPGRADE.rst#upgrading-to-v110) for more details.
+
+Features
+--------
+
+- Added possibilty to disable local password authentication. Contributed by Daniel Hoffend. ([\#5092](https://github.com/matrix-org/synapse/issues/5092))
+- Add monthly active users to phonehome stats. ([\#5252](https://github.com/matrix-org/synapse/issues/5252))
+- Allow expired user to trigger renewal email sending manually. ([\#5363](https://github.com/matrix-org/synapse/issues/5363))
+- Statistics on forward extremities per room are now exposed via Prometheus. ([\#5384](https://github.com/matrix-org/synapse/issues/5384), [\#5458](https://github.com/matrix-org/synapse/issues/5458), [\#5461](https://github.com/matrix-org/synapse/issues/5461))
+- Add --no-daemonize option to run synapse in the foreground, per issue #4130. Contributed by Soham Gumaste. ([\#5412](https://github.com/matrix-org/synapse/issues/5412), [\#5587](https://github.com/matrix-org/synapse/issues/5587))
+- Fully support SAML2 authentication. Contributed by [Alexander Trost](https://github.com/galexrt) - thank you! ([\#5422](https://github.com/matrix-org/synapse/issues/5422))
+- Allow server admins to define implementations of extra rules for allowing or denying incoming events. ([\#5440](https://github.com/matrix-org/synapse/issues/5440), [\#5474](https://github.com/matrix-org/synapse/issues/5474), [\#5477](https://github.com/matrix-org/synapse/issues/5477))
+- Add support for handling pagination APIs on client reader worker. ([\#5505](https://github.com/matrix-org/synapse/issues/5505), [\#5513](https://github.com/matrix-org/synapse/issues/5513), [\#5531](https://github.com/matrix-org/synapse/issues/5531))
+- Improve help and cmdline option names for --generate-config options. ([\#5512](https://github.com/matrix-org/synapse/issues/5512))
+- Allow configuration of the path used for ACME account keys. ([\#5516](https://github.com/matrix-org/synapse/issues/5516), [\#5521](https://github.com/matrix-org/synapse/issues/5521), [\#5522](https://github.com/matrix-org/synapse/issues/5522))
+- Add --data-dir and --open-private-ports options. ([\#5524](https://github.com/matrix-org/synapse/issues/5524))
+- Split public rooms directory auth config in two settings, in order to manage client auth independently from the federation part of it. Obsoletes the "restrict_public_rooms_to_local_users" configuration setting. If "restrict_public_rooms_to_local_users" is set in the config, Synapse will act as if both new options are enabled, i.e. require authentication through the client API and deny federation requests. ([\#5534](https://github.com/matrix-org/synapse/issues/5534))
+- The minimum TLS version used for outgoing federation requests can now be set with `federation_client_minimum_tls_version`. ([\#5550](https://github.com/matrix-org/synapse/issues/5550))
+- Optimise devices changed query to not pull unnecessary rows from the database, reducing database load. ([\#5559](https://github.com/matrix-org/synapse/issues/5559))
+- Add new metrics for number of forward extremities being persisted and number of state groups involved in resolution. ([\#5476](https://github.com/matrix-org/synapse/issues/5476))
+
+Bugfixes
+--------
+
+- Fix bug processing incoming events over federation if call to `/get_missing_events` fails. ([\#5042](https://github.com/matrix-org/synapse/issues/5042))
+- Prevent more than one room upgrade happening simultaneously on the same room. ([\#5051](https://github.com/matrix-org/synapse/issues/5051))
+- Fix a bug where running synapse_port_db would cause the account validity feature to fail because it didn't set the type of the email_sent column to boolean. ([\#5325](https://github.com/matrix-org/synapse/issues/5325))
+- Warn about disabling email-based password resets when a reset occurs, and remove warning when someone attempts a phone-based reset. ([\#5387](https://github.com/matrix-org/synapse/issues/5387))
+- Fix email notifications for unnamed rooms with multiple people. ([\#5388](https://github.com/matrix-org/synapse/issues/5388))
+- Fix exceptions in federation reader worker caused by attempting to renew attestations, which should only happen on master worker. ([\#5389](https://github.com/matrix-org/synapse/issues/5389))
+- Fix handling of failures fetching remote content to not log failures as exceptions. ([\#5390](https://github.com/matrix-org/synapse/issues/5390))
+- Fix a bug where deactivated users could receive renewal emails if the account validity feature is on. ([\#5394](https://github.com/matrix-org/synapse/issues/5394))
+- Fix missing invite state after exchanging 3PID invites over federaton. ([\#5464](https://github.com/matrix-org/synapse/issues/5464))
+- Fix intermittent exceptions on Apple hardware. Also fix bug that caused database activity times to be under-reported in log lines. ([\#5498](https://github.com/matrix-org/synapse/issues/5498))
+- Fix logging error when a tampered event is detected. ([\#5500](https://github.com/matrix-org/synapse/issues/5500))
+- Fix bug where clients could tight loop calling `/sync` for a period. ([\#5507](https://github.com/matrix-org/synapse/issues/5507))
+- Fix bug with `jinja2` preventing Synapse from starting. Users who had this problem should now simply need to run `pip install matrix-synapse`. ([\#5514](https://github.com/matrix-org/synapse/issues/5514))
+- Fix a regression where homeservers on private IP addresses were incorrectly blacklisted. ([\#5523](https://github.com/matrix-org/synapse/issues/5523))
+- Fixed m.login.jwt using unregistred user_id and added pyjwt>=1.6.4 as jwt conditional dependencies. Contributed by Pau Rodriguez-Estivill. ([\#5555](https://github.com/matrix-org/synapse/issues/5555), [\#5586](https://github.com/matrix-org/synapse/issues/5586))
+- Fix a bug that would cause invited users to receive several emails for a single 3PID invite in case the inviter is rate limited. ([\#5576](https://github.com/matrix-org/synapse/issues/5576))
+
+
+Updates to the Docker image
+---------------------------
+- Add ability to change Docker containers [timezone](https://en.wikipedia.org/wiki/List_of_tz_database_time_zones) with the `TZ` variable. ([\#5383](https://github.com/matrix-org/synapse/issues/5383))
+- Update docker image to use Python 3.7. ([\#5546](https://github.com/matrix-org/synapse/issues/5546))
+- Deprecate the use of environment variables for configuration, and make the use of a static configuration the default. ([\#5561](https://github.com/matrix-org/synapse/issues/5561), [\#5562](https://github.com/matrix-org/synapse/issues/5562), [\#5566](https://github.com/matrix-org/synapse/issues/5566), [\#5567](https://github.com/matrix-org/synapse/issues/5567))
+- Increase default log level for docker image to INFO. It can still be changed by editing the generated log.config file. ([\#5547](https://github.com/matrix-org/synapse/issues/5547))
+- Send synapse logs to the docker logging system, by default. ([\#5565](https://github.com/matrix-org/synapse/issues/5565))
+- Open the non-TLS port by default. ([\#5568](https://github.com/matrix-org/synapse/issues/5568))
+- Fix failure to start under docker with SAML support enabled. ([\#5490](https://github.com/matrix-org/synapse/issues/5490))
+- Use a sensible location for data files when generating a config file. ([\#5563](https://github.com/matrix-org/synapse/issues/5563))
+
+
+Deprecations and Removals
+-------------------------
+
+- Python 2.7 is no longer a supported platform. Synapse now requires Python 3.5+ to run. ([\#5425](https://github.com/matrix-org/synapse/issues/5425))
+- PostgreSQL 9.4 is no longer supported. Synapse requires Postgres 9.5+ or above for Postgres support. ([\#5448](https://github.com/matrix-org/synapse/issues/5448))
+- Remove support for cpu_affinity setting. ([\#5525](https://github.com/matrix-org/synapse/issues/5525))
+
+
+Improved Documentation
+----------------------
+- Improve README section on performance troubleshooting. ([\#4276](https://github.com/matrix-org/synapse/issues/4276))
+- Add information about how to install and run `black` on the codebase to code_style.rst. ([\#5537](https://github.com/matrix-org/synapse/issues/5537))
+- Improve install docs on choosing server_name. ([\#5558](https://github.com/matrix-org/synapse/issues/5558))
+
+
+Internal Changes
+----------------
+
+- Add logging to 3pid invite signature verification. ([\#5015](https://github.com/matrix-org/synapse/issues/5015))
+- Update example haproxy config to a more compatible setup. ([\#5313](https://github.com/matrix-org/synapse/issues/5313))
+- Track deactivated accounts in the database. ([\#5378](https://github.com/matrix-org/synapse/issues/5378), [\#5465](https://github.com/matrix-org/synapse/issues/5465), [\#5493](https://github.com/matrix-org/synapse/issues/5493))
+- Clean up code for sending federation EDUs. ([\#5381](https://github.com/matrix-org/synapse/issues/5381))
+- Add a sponsor button to the repo. ([\#5382](https://github.com/matrix-org/synapse/issues/5382), [\#5386](https://github.com/matrix-org/synapse/issues/5386))
+- Don't log non-200 responses from federation queries as exceptions. ([\#5383](https://github.com/matrix-org/synapse/issues/5383))
+- Update Python syntax in contrib/ to Python 3. ([\#5446](https://github.com/matrix-org/synapse/issues/5446))
+- Update federation_client dev script to support `.well-known` and work with python3. ([\#5447](https://github.com/matrix-org/synapse/issues/5447))
+- SyTest has been moved to Buildkite. ([\#5459](https://github.com/matrix-org/synapse/issues/5459))
+- Demo script now uses python3. ([\#5460](https://github.com/matrix-org/synapse/issues/5460))
+- Synapse can now handle RestServlets that return coroutines. ([\#5475](https://github.com/matrix-org/synapse/issues/5475), [\#5585](https://github.com/matrix-org/synapse/issues/5585))
+- The demo servers talk to each other again. ([\#5478](https://github.com/matrix-org/synapse/issues/5478))
+- Add an EXPERIMENTAL config option to try and periodically clean up extremities by sending dummy events. ([\#5480](https://github.com/matrix-org/synapse/issues/5480))
+- Synapse's codebase is now formatted by `black`. ([\#5482](https://github.com/matrix-org/synapse/issues/5482))
+- Some cleanups and sanity-checking in the CPU and database metrics. ([\#5499](https://github.com/matrix-org/synapse/issues/5499))
+- Improve email notification logging. ([\#5502](https://github.com/matrix-org/synapse/issues/5502))
+- Fix "Unexpected entry in 'full_schemas'" log warning. ([\#5509](https://github.com/matrix-org/synapse/issues/5509))
+- Improve logging when generating config files. ([\#5510](https://github.com/matrix-org/synapse/issues/5510))
+- Refactor and clean up Config parser for maintainability. ([\#5511](https://github.com/matrix-org/synapse/issues/5511))
+- Make the config clearer in that email.template_dir is relative to the Synapse's root directory, not the `synapse/` folder within it. ([\#5543](https://github.com/matrix-org/synapse/issues/5543))
+- Update v1.0.0 release changelog to include more information about changes to password resets. ([\#5545](https://github.com/matrix-org/synapse/issues/5545))
+- Remove non-functioning check_event_hash.py dev script. ([\#5548](https://github.com/matrix-org/synapse/issues/5548))
+- Synapse will now only allow TLS v1.2 connections when serving federation, if it terminates TLS. As Synapse's allowed ciphers were only able to be used in TLSv1.2 before, this does not change behaviour. ([\#5550](https://github.com/matrix-org/synapse/issues/5550))
+- Logging when running GC collection on generation 0 is now at the DEBUG level, not INFO. ([\#5557](https://github.com/matrix-org/synapse/issues/5557))
+- Reduce the amount of stuff we send in the docker context. ([\#5564](https://github.com/matrix-org/synapse/issues/5564))
+- Point the reverse links in the Purge History contrib scripts at the intended location. ([\#5570](https://github.com/matrix-org/synapse/issues/5570))
+
+
 Synapse 1.0.0 (2019-06-11)
 ==========================
 
diff --git a/MANIFEST.in b/MANIFEST.in
index 2c59c7bdc2..834ddfad39 100644
--- a/MANIFEST.in
+++ b/MANIFEST.in
@@ -7,6 +7,7 @@ include demo/README
 include demo/demo.tls.dh
 include demo/*.py
 include demo/*.sh
+include sytest-blacklist
 
 recursive-include synapse/storage/schema *.sql
 recursive-include synapse/storage/schema *.sql.postgres
diff --git a/UPGRADE.rst b/UPGRADE.rst
index 1fb109a218..72064accf3 100644
--- a/UPGRADE.rst
+++ b/UPGRADE.rst
@@ -49,16 +49,16 @@ returned by the Client-Server API:
     # configured on port 443.
     curl -kv https://<host.name>/_matrix/client/versions 2>&1 | grep "Server:"
 
-Upgrading to v1.1
-=================
+Upgrading to v1.1.0
+===================
 
-Synapse 1.1 removes support for older Python and PostgreSQL versions, as
+Synapse v1.1.0 removes support for older Python and PostgreSQL versions, as
 outlined in `our deprecation notice <https://matrix.org/blog/2019/04/08/synapse-deprecating-postgres-9-4-and-python-2-x>`_.
 
 Minimum Python Version
 ----------------------
 
-Synapse v1.1 has a minimum Python requirement of Python 3.5. Python 3.6 or
+Synapse v1.1.0 has a minimum Python requirement of Python 3.5. Python 3.6 or
 Python 3.7 are recommended as they have improved internal string handling,
 significantly reducing memory usage.
 
diff --git a/changelog.d/4276.misc b/changelog.d/4276.misc
deleted file mode 100644
index 285939a4b8..0000000000
--- a/changelog.d/4276.misc
+++ /dev/null
@@ -1 +0,0 @@
-Improve README section on performance troubleshooting.
diff --git a/changelog.d/5015.misc b/changelog.d/5015.misc
deleted file mode 100644
index eeec85b92c..0000000000
--- a/changelog.d/5015.misc
+++ /dev/null
@@ -1 +0,0 @@
-Add logging to 3pid invite signature verification.
diff --git a/changelog.d/5042.bugfix b/changelog.d/5042.bugfix
deleted file mode 100644
index 736b07c790..0000000000
--- a/changelog.d/5042.bugfix
+++ /dev/null
@@ -1 +0,0 @@
-Fix bug processing incoming events over federation if call to `/get_missing_events` fails.
diff --git a/changelog.d/5051.bugfix b/changelog.d/5051.bugfix
deleted file mode 100644
index bfa22cc759..0000000000
--- a/changelog.d/5051.bugfix
+++ /dev/null
@@ -1 +0,0 @@
-Prevent >1 room upgrades happening simultaneously on the same room.
diff --git a/changelog.d/5252.feature b/changelog.d/5252.feature
deleted file mode 100644
index 44115b0382..0000000000
--- a/changelog.d/5252.feature
+++ /dev/null
@@ -1 +0,0 @@
-Add monthly active users to phonehome stats.
diff --git a/changelog.d/5313.misc b/changelog.d/5313.misc
deleted file mode 100644
index 2ea01cb9d3..0000000000
--- a/changelog.d/5313.misc
+++ /dev/null
@@ -1 +0,0 @@
-Update example haproxy config to a more compatible setup.
diff --git a/changelog.d/5325.bugfix b/changelog.d/5325.bugfix
deleted file mode 100644
index b9413388f5..0000000000
--- a/changelog.d/5325.bugfix
+++ /dev/null
@@ -1 +0,0 @@
-Fix a bug where running synapse_port_db would cause the account validity feature to fail because it didn't set the type of the email_sent column to boolean.
diff --git a/changelog.d/5363.feature b/changelog.d/5363.feature
deleted file mode 100644
index 803fe3fc37..0000000000
--- a/changelog.d/5363.feature
+++ /dev/null
@@ -1 +0,0 @@
-Allow expired user to trigger renewal email sending manually.
diff --git a/changelog.d/5378.misc b/changelog.d/5378.misc
deleted file mode 100644
index 365e49d634..0000000000
--- a/changelog.d/5378.misc
+++ /dev/null
@@ -1 +0,0 @@
-Track deactivated accounts in the database.
diff --git a/changelog.d/5381.misc b/changelog.d/5381.misc
deleted file mode 100644
index bbf70a0445..0000000000
--- a/changelog.d/5381.misc
+++ /dev/null
@@ -1 +0,0 @@
-Clean up code for sending federation EDUs.
diff --git a/changelog.d/5382.misc b/changelog.d/5382.misc
deleted file mode 100644
index 060cbba2a9..0000000000
--- a/changelog.d/5382.misc
+++ /dev/null
@@ -1 +0,0 @@
-Add a sponsor button to the repo.
diff --git a/changelog.d/5383.misc b/changelog.d/5383.misc
deleted file mode 100644
index 9dd5d1df93..0000000000
--- a/changelog.d/5383.misc
+++ /dev/null
@@ -1 +0,0 @@
-Don't log non-200 responses from federation queries as exceptions.
diff --git a/changelog.d/5384.feature b/changelog.d/5384.feature
deleted file mode 100644
index 9497f521c8..0000000000
--- a/changelog.d/5384.feature
+++ /dev/null
@@ -1 +0,0 @@
-Statistics on forward extremities per room are now exposed via Prometheus.
diff --git a/changelog.d/5386.misc b/changelog.d/5386.misc
deleted file mode 100644
index 060cbba2a9..0000000000
--- a/changelog.d/5386.misc
+++ /dev/null
@@ -1 +0,0 @@
-Add a sponsor button to the repo.
diff --git a/changelog.d/5387.bugfix b/changelog.d/5387.bugfix
deleted file mode 100644
index 2c6c94efc4..0000000000
--- a/changelog.d/5387.bugfix
+++ /dev/null
@@ -1 +0,0 @@
-Warn about disabling email-based password resets when a reset occurs, and remove warning when someone attempts a phone-based reset.
diff --git a/changelog.d/5388.bugfix b/changelog.d/5388.bugfix
deleted file mode 100644
index 503e830915..0000000000
--- a/changelog.d/5388.bugfix
+++ /dev/null
@@ -1 +0,0 @@
-Fix email notifications for unnamed rooms with multiple people.
diff --git a/changelog.d/5389.bugfix b/changelog.d/5389.bugfix
deleted file mode 100644
index dd648e26c8..0000000000
--- a/changelog.d/5389.bugfix
+++ /dev/null
@@ -1 +0,0 @@
-Fix exceptions in federation reader worker caused by attempting to renew attestations, which should only happen on master worker.
diff --git a/changelog.d/5390.bugfix b/changelog.d/5390.bugfix
deleted file mode 100644
index e7b7483cf2..0000000000
--- a/changelog.d/5390.bugfix
+++ /dev/null
@@ -1 +0,0 @@
-Fix handling of failures fetching remote content to not log failures as exceptions.
diff --git a/changelog.d/5394.bugfix b/changelog.d/5394.bugfix
deleted file mode 100644
index 2ad9fbe82c..0000000000
--- a/changelog.d/5394.bugfix
+++ /dev/null
@@ -1 +0,0 @@
-Fix a bug where deactivated users could receive renewal emails if the account validity feature is on.
diff --git a/changelog.d/5412.feature b/changelog.d/5412.feature
deleted file mode 100644
index ec1503860a..0000000000
--- a/changelog.d/5412.feature
+++ /dev/null
@@ -1 +0,0 @@
-Add --no-daemonize option to run synapse in the foreground, per issue #4130. Contributed by Soham Gumaste.
\ No newline at end of file
diff --git a/changelog.d/5425.removal b/changelog.d/5425.removal
deleted file mode 100644
index 30022ee63d..0000000000
--- a/changelog.d/5425.removal
+++ /dev/null
@@ -1 +0,0 @@
-Python 2.7 is no longer a supported platform. Synapse now requires Python 3.5+ to run.
diff --git a/changelog.d/5440.feature b/changelog.d/5440.feature
deleted file mode 100644
index 63d9b58734..0000000000
--- a/changelog.d/5440.feature
+++ /dev/null
@@ -1 +0,0 @@
-Allow server admins to define implementations of extra rules for allowing or denying incoming events.
diff --git a/changelog.d/5446.misc b/changelog.d/5446.misc
deleted file mode 100644
index e5209be0a6..0000000000
--- a/changelog.d/5446.misc
+++ /dev/null
@@ -1 +0,0 @@
-Update Python syntax in contrib/ to Python 3. 
diff --git a/changelog.d/5447.misc b/changelog.d/5447.misc
deleted file mode 100644
index dd52068404..0000000000
--- a/changelog.d/5447.misc
+++ /dev/null
@@ -1 +0,0 @@
-Update federation_client dev script to support `.well-known` and work with python3.
diff --git a/changelog.d/5448.removal b/changelog.d/5448.removal
deleted file mode 100644
index 33b9859dae..0000000000
--- a/changelog.d/5448.removal
+++ /dev/null
@@ -1 +0,0 @@
-PostgreSQL 9.4 is no longer supported. Synapse requires Postgres 9.5+ or above for Postgres support.
diff --git a/changelog.d/5458.feature b/changelog.d/5458.feature
deleted file mode 100644
index 9497f521c8..0000000000
--- a/changelog.d/5458.feature
+++ /dev/null
@@ -1 +0,0 @@
-Statistics on forward extremities per room are now exposed via Prometheus.
diff --git a/changelog.d/5459.misc b/changelog.d/5459.misc
deleted file mode 100644
index 904e45f66b..0000000000
--- a/changelog.d/5459.misc
+++ /dev/null
@@ -1 +0,0 @@
-SyTest has been moved to Buildkite.
diff --git a/changelog.d/5460.misc b/changelog.d/5460.misc
deleted file mode 100644
index badc8bb79a..0000000000
--- a/changelog.d/5460.misc
+++ /dev/null
@@ -1 +0,0 @@
-Demo script now uses python3.
diff --git a/changelog.d/5461.feature b/changelog.d/5461.feature
deleted file mode 100644
index 9497f521c8..0000000000
--- a/changelog.d/5461.feature
+++ /dev/null
@@ -1 +0,0 @@
-Statistics on forward extremities per room are now exposed via Prometheus.
diff --git a/changelog.d/5464.bugfix b/changelog.d/5464.bugfix
deleted file mode 100644
index 8278d1bce9..0000000000
--- a/changelog.d/5464.bugfix
+++ /dev/null
@@ -1 +0,0 @@
-Fix missing invite state after exchanging 3PID invites over federaton.
diff --git a/changelog.d/5465.misc b/changelog.d/5465.misc
deleted file mode 100644
index af5f0f8f45..0000000000
--- a/changelog.d/5465.misc
+++ /dev/null
@@ -1,2 +0,0 @@
-Track deactivated accounts in the database.
-
diff --git a/changelog.d/5474.feature b/changelog.d/5474.feature
deleted file mode 100644
index 63d9b58734..0000000000
--- a/changelog.d/5474.feature
+++ /dev/null
@@ -1 +0,0 @@
-Allow server admins to define implementations of extra rules for allowing or denying incoming events.
diff --git a/changelog.d/5476.misc b/changelog.d/5476.misc
deleted file mode 100644
index 7955c14744..0000000000
--- a/changelog.d/5476.misc
+++ /dev/null
@@ -1 +0,0 @@
-Add new metrics for number of forward extremities being persisted and number of state groups involved in resolution.
diff --git a/changelog.d/5477.feature b/changelog.d/5477.feature
deleted file mode 100644
index 63d9b58734..0000000000
--- a/changelog.d/5477.feature
+++ /dev/null
@@ -1 +0,0 @@
-Allow server admins to define implementations of extra rules for allowing or denying incoming events.
diff --git a/changelog.d/5478.misc b/changelog.d/5478.misc
deleted file mode 100644
index 829bb1e521..0000000000
--- a/changelog.d/5478.misc
+++ /dev/null
@@ -1 +0,0 @@
-The demo servers talk to each other again.
diff --git a/changelog.d/5480.misc b/changelog.d/5480.misc
deleted file mode 100644
index 3001bcc1fe..0000000000
--- a/changelog.d/5480.misc
+++ /dev/null
@@ -1 +0,0 @@
-Add an EXPERIMENTAL config option to try and periodically clean up extremities by sending dummy events.
diff --git a/changelog.d/5482.misc b/changelog.d/5482.misc
deleted file mode 100644
index 0332d1133b..0000000000
--- a/changelog.d/5482.misc
+++ /dev/null
@@ -1 +0,0 @@
-Synapse's codebase is now formatted by `black`.
diff --git a/changelog.d/5490.bugfix b/changelog.d/5490.bugfix
deleted file mode 100644
index 4242254c53..0000000000
--- a/changelog.d/5490.bugfix
+++ /dev/null
@@ -1 +0,0 @@
-Fix failure to start under docker with SAML support enabled.
\ No newline at end of file
diff --git a/changelog.d/5493.misc b/changelog.d/5493.misc
deleted file mode 100644
index 365e49d634..0000000000
--- a/changelog.d/5493.misc
+++ /dev/null
@@ -1 +0,0 @@
-Track deactivated accounts in the database.
diff --git a/changelog.d/5498.bugfix b/changelog.d/5498.bugfix
deleted file mode 100644
index 6ef298d20c..0000000000
--- a/changelog.d/5498.bugfix
+++ /dev/null
@@ -1 +0,0 @@
-Fix intermittent exceptions on Apple hardware. Also fix bug that caused database activity times to be under-reported in log lines.
diff --git a/changelog.d/5499.misc b/changelog.d/5499.misc
deleted file mode 100644
index 84de1f2dae..0000000000
--- a/changelog.d/5499.misc
+++ /dev/null
@@ -1 +0,0 @@
-Some cleanups and sanity-checking in the CPU and database metrics.
\ No newline at end of file
diff --git a/changelog.d/5500.bugfix b/changelog.d/5500.bugfix
deleted file mode 100644
index 624c678435..0000000000
--- a/changelog.d/5500.bugfix
+++ /dev/null
@@ -1 +0,0 @@
-Fix logging error when a tampered event is detected.
diff --git a/changelog.d/5502.misc b/changelog.d/5502.misc
deleted file mode 100644
index d515323eae..0000000000
--- a/changelog.d/5502.misc
+++ /dev/null
@@ -1 +0,0 @@
-Improve email notification logging.
diff --git a/changelog.d/5505.feature b/changelog.d/5505.feature
deleted file mode 100644
index 5c6bab2c31..0000000000
--- a/changelog.d/5505.feature
+++ /dev/null
@@ -1 +0,0 @@
-Add support for handling pagination APIs on client reader worker.
diff --git a/changelog.d/5509.misc b/changelog.d/5509.misc
deleted file mode 100644
index cc27cf2940..0000000000
--- a/changelog.d/5509.misc
+++ /dev/null
@@ -1 +0,0 @@
-Fix "Unexpected entry in 'full_schemas'" log warning.
diff --git a/changelog.d/5510.misc b/changelog.d/5510.misc
deleted file mode 100644
index 4591a63d9d..0000000000
--- a/changelog.d/5510.misc
+++ /dev/null
@@ -1 +0,0 @@
-Improve logging when generating config files.
diff --git a/changelog.d/5511.misc b/changelog.d/5511.misc
deleted file mode 100644
index c1f679b287..0000000000
--- a/changelog.d/5511.misc
+++ /dev/null
@@ -1 +0,0 @@
-Refactor and clean up Config parser for maintainability.
diff --git a/changelog.d/5512.feature b/changelog.d/5512.feature
deleted file mode 100644
index 712878901b..0000000000
--- a/changelog.d/5512.feature
+++ /dev/null
@@ -1 +0,0 @@
-Improve help and cmdline option names for --generate-config options.
diff --git a/changelog.d/5513.feature b/changelog.d/5513.feature
deleted file mode 100644
index 5c6bab2c31..0000000000
--- a/changelog.d/5513.feature
+++ /dev/null
@@ -1 +0,0 @@
-Add support for handling pagination APIs on client reader worker.
diff --git a/changelog.d/5514.bugfix b/changelog.d/5514.bugfix
deleted file mode 100644
index c3a76a854a..0000000000
--- a/changelog.d/5514.bugfix
+++ /dev/null
@@ -1 +0,0 @@
-Fix bug with `jinja2` preventing Synapse from starting. Users who had this problem should now simply need to run `pip install matrix-synapse`.
diff --git a/changelog.d/5516.feature b/changelog.d/5516.feature
deleted file mode 100644
index fdf91c35e4..0000000000
--- a/changelog.d/5516.feature
+++ /dev/null
@@ -1 +0,0 @@
-Allow configuration of the path used for ACME account keys.
diff --git a/changelog.d/5521.feature b/changelog.d/5521.feature
deleted file mode 100644
index fdf91c35e4..0000000000
--- a/changelog.d/5521.feature
+++ /dev/null
@@ -1 +0,0 @@
-Allow configuration of the path used for ACME account keys.
diff --git a/changelog.d/5522.feature b/changelog.d/5522.feature
deleted file mode 100644
index fdf91c35e4..0000000000
--- a/changelog.d/5522.feature
+++ /dev/null
@@ -1 +0,0 @@
-Allow configuration of the path used for ACME account keys.
diff --git a/changelog.d/5523.bugfix b/changelog.d/5523.bugfix
deleted file mode 100644
index 26acd367a8..0000000000
--- a/changelog.d/5523.bugfix
+++ /dev/null
@@ -1 +0,0 @@
-Fix a regression where homeservers on private IP addresses were incorrectly blacklisted.
\ No newline at end of file
diff --git a/changelog.d/5524.feature b/changelog.d/5524.feature
deleted file mode 100644
index 6ba211c3cc..0000000000
--- a/changelog.d/5524.feature
+++ /dev/null
@@ -1 +0,0 @@
-Add --data-dir and --open-private-ports options.
\ No newline at end of file
diff --git a/changelog.d/5525.removal b/changelog.d/5525.removal
deleted file mode 100644
index af71560f36..0000000000
--- a/changelog.d/5525.removal
+++ /dev/null
@@ -1 +0,0 @@
-Remove support for cpu_affinity setting.
diff --git a/changelog.d/5531.feature b/changelog.d/5531.feature
deleted file mode 100644
index 5c6bab2c31..0000000000
--- a/changelog.d/5531.feature
+++ /dev/null
@@ -1 +0,0 @@
-Add support for handling pagination APIs on client reader worker.
diff --git a/changelog.d/5534.feature b/changelog.d/5534.feature
deleted file mode 100644
index 2e279c9b77..0000000000
--- a/changelog.d/5534.feature
+++ /dev/null
@@ -1 +0,0 @@
-Split public rooms directory auth config in two settings, in order to manage client auth independently from the federation part of it. Obsoletes the "restrict_public_rooms_to_local_users" configuration setting. If "restrict_public_rooms_to_local_users" is set in the config, Synapse will act as if both new options are enabled, i.e. require authentication through the client API and deny federation requests.
diff --git a/changelog.d/5537.misc b/changelog.d/5537.misc
deleted file mode 100644
index 870a5ff18b..0000000000
--- a/changelog.d/5537.misc
+++ /dev/null
@@ -1 +0,0 @@
-Add information about how to install and run `black` on the codebase to code_style.rst.
diff --git a/changelog.d/5545.misc b/changelog.d/5545.misc
deleted file mode 100644
index b738eef4bd..0000000000
--- a/changelog.d/5545.misc
+++ /dev/null
@@ -1 +0,0 @@
-Update v1.0.0 release changelog to include more information about changes to password resets.
diff --git a/changelog.d/5546.feature b/changelog.d/5546.feature
deleted file mode 100644
index 16952b62fc..0000000000
--- a/changelog.d/5546.feature
+++ /dev/null
@@ -1 +0,0 @@
-Update docker image to use Python 3.7.
diff --git a/changelog.d/5547.feature b/changelog.d/5547.feature
deleted file mode 100644
index 509e36c7ea..0000000000
--- a/changelog.d/5547.feature
+++ /dev/null
@@ -1 +0,0 @@
-Increase default log level for docker image to INFO. It can still be changed by editing the generated log.config file.
diff --git a/changelog.d/5548.misc b/changelog.d/5548.misc
deleted file mode 100644
index f35939cfe9..0000000000
--- a/changelog.d/5548.misc
+++ /dev/null
@@ -1 +0,0 @@
-Remove non-functioning check_event_hash.py dev script.
diff --git a/changelog.d/5555.bugfix b/changelog.d/5555.bugfix
deleted file mode 100644
index c0b1ecf81a..0000000000
--- a/changelog.d/5555.bugfix
+++ /dev/null
@@ -1 +0,0 @@
-Fixed m.login.jwt using unregistred user_id and added pyjwt>=1.6.4 as jwt conditional dependencies. Contributed by Pau Rodriguez-Estivill.
diff --git a/changelog.d/5558.misc b/changelog.d/5558.misc
deleted file mode 100644
index 9ce3555d45..0000000000
--- a/changelog.d/5558.misc
+++ /dev/null
@@ -1 +0,0 @@
-Improve install docs on choosing server_name.
diff --git a/changelog.d/5561.feature b/changelog.d/5561.feature
deleted file mode 100644
index 85380bc517..0000000000
--- a/changelog.d/5561.feature
+++ /dev/null
@@ -1 +0,0 @@
-Update Docker image to deprecate the use of environment variables for configuration, and make the use of a static configuration the default.
diff --git a/changelog.d/5562.feature b/changelog.d/5562.feature
deleted file mode 100644
index 85380bc517..0000000000
--- a/changelog.d/5562.feature
+++ /dev/null
@@ -1 +0,0 @@
-Update Docker image to deprecate the use of environment variables for configuration, and make the use of a static configuration the default.
diff --git a/changelog.d/5563.bugfix b/changelog.d/5563.bugfix
deleted file mode 100644
index 09c4381a23..0000000000
--- a/changelog.d/5563.bugfix
+++ /dev/null
@@ -1 +0,0 @@
-Docker: Use a sensible location for data files when generating a config file.
\ No newline at end of file
diff --git a/changelog.d/5564.misc b/changelog.d/5564.misc
deleted file mode 100644
index e209cdcc29..0000000000
--- a/changelog.d/5564.misc
+++ /dev/null
@@ -1 +0,0 @@
-Reduce the amount of stuff we send in the docker context.
diff --git a/changelog.d/5565.feature b/changelog.d/5565.feature
deleted file mode 100644
index 4b0665af03..0000000000
--- a/changelog.d/5565.feature
+++ /dev/null
@@ -1 +0,0 @@
-Docker: Send synapse logs to the docker logging system, by default.
diff --git a/changelog.d/5566.feature b/changelog.d/5566.feature
deleted file mode 100644
index 85380bc517..0000000000
--- a/changelog.d/5566.feature
+++ /dev/null
@@ -1 +0,0 @@
-Update Docker image to deprecate the use of environment variables for configuration, and make the use of a static configuration the default.
diff --git a/changelog.d/5567.feature b/changelog.d/5567.feature
deleted file mode 100644
index 85380bc517..0000000000
--- a/changelog.d/5567.feature
+++ /dev/null
@@ -1 +0,0 @@
-Update Docker image to deprecate the use of environment variables for configuration, and make the use of a static configuration the default.
diff --git a/changelog.d/5568.feature b/changelog.d/5568.feature
deleted file mode 100644
index 59b9e5f96d..0000000000
--- a/changelog.d/5568.feature
+++ /dev/null
@@ -1 +0,0 @@
-Docker image: open the non-TLS port by default.
diff --git a/changelog.d/5570.misc b/changelog.d/5570.misc
deleted file mode 100644
index dfb1d7e58b..0000000000
--- a/changelog.d/5570.misc
+++ /dev/null
@@ -1 +0,0 @@
-Point the reverse links in the Purge History contrib scripts at the intended location.
diff --git a/changelog.d/5606.misc b/changelog.d/5606.misc
new file mode 100644
index 0000000000..bb3c028167
--- /dev/null
+++ b/changelog.d/5606.misc
@@ -0,0 +1 @@
+Move logging code out of `synapse.util` and into `synapse.logging`.
diff --git a/changelog.d/5609.bugfix b/changelog.d/5609.bugfix
new file mode 100644
index 0000000000..534ee22a1b
--- /dev/null
+++ b/changelog.d/5609.bugfix
@@ -0,0 +1 @@
+Fix 'utime went backwards' errors on daemonization.
diff --git a/changelog.d/5611.misc b/changelog.d/5611.misc
new file mode 100644
index 0000000000..a2d1695139
--- /dev/null
+++ b/changelog.d/5611.misc
@@ -0,0 +1 @@
+Add a blacklist file to the repo to blacklist certain sytests from failing CI.
diff --git a/changelog.d/5613.feature b/changelog.d/5613.feature
new file mode 100644
index 0000000000..4b7bb2745c
--- /dev/null
+++ b/changelog.d/5613.feature
@@ -0,0 +1 @@
+Add `sender` and `origin_server_ts` fields to `m.replace`.
diff --git a/changelog.d/5616.misc b/changelog.d/5616.misc
new file mode 100644
index 0000000000..9f94be6778
--- /dev/null
+++ b/changelog.d/5616.misc
@@ -0,0 +1 @@
+Make runtime errors surrounding password reset emails much clearer.
diff --git a/changelog.d/5617.misc b/changelog.d/5617.misc
new file mode 100644
index 0000000000..bb3c028167
--- /dev/null
+++ b/changelog.d/5617.misc
@@ -0,0 +1 @@
+Move logging code out of `synapse.util` and into `synapse.logging`.
diff --git a/changelog.d/5621.bugfix b/changelog.d/5621.bugfix
new file mode 100644
index 0000000000..f1a2851f45
--- /dev/null
+++ b/changelog.d/5621.bugfix
@@ -0,0 +1 @@
+Various minor fixes to the federation request rate limiter.
diff --git a/changelog.d/5622.misc b/changelog.d/5622.misc
new file mode 100644
index 0000000000..9f0a87311c
--- /dev/null
+++ b/changelog.d/5622.misc
@@ -0,0 +1 @@
+Remove dead code for persiting outgoing federation transactions.
diff --git a/changelog.d/5623.feature b/changelog.d/5623.feature
new file mode 100644
index 0000000000..b73080e88d
--- /dev/null
+++ b/changelog.d/5623.feature
@@ -0,0 +1 @@
+Add default push rule to ignore reactions.
diff --git a/changelog.d/5625.removal b/changelog.d/5625.removal
new file mode 100644
index 0000000000..d33a778d69
--- /dev/null
+++ b/changelog.d/5625.removal
@@ -0,0 +1 @@
+Remove support for the `invite_3pid_guest` configuration setting.
diff --git a/changelog.d/5628.misc b/changelog.d/5628.misc
new file mode 100644
index 0000000000..fec8446793
--- /dev/null
+++ b/changelog.d/5628.misc
@@ -0,0 +1 @@
+Move RegistrationHandler.get_or_create_user to test code.
diff --git a/changelog.d/5630.misc b/changelog.d/5630.misc
new file mode 100644
index 0000000000..f112d873eb
--- /dev/null
+++ b/changelog.d/5630.misc
@@ -0,0 +1 @@
+Add some more common python virtual-environment paths to the black exclusion list.
diff --git a/changelog.d/5639.misc b/changelog.d/5639.misc
new file mode 100644
index 0000000000..413b13128c
--- /dev/null
+++ b/changelog.d/5639.misc
@@ -0,0 +1 @@
+Factor out some redundant code in the login implementation.
diff --git a/changelog.d/5640.misc b/changelog.d/5640.misc
new file mode 100644
index 0000000000..7d69a1b3b6
--- /dev/null
+++ b/changelog.d/5640.misc
@@ -0,0 +1 @@
+Update ModuleApi to avoid register(generate_token=True).
diff --git a/contrib/example_log_config.yaml b/contrib/example_log_config.yaml
index 06592963da..3a76a7a33f 100644
--- a/contrib/example_log_config.yaml
+++ b/contrib/example_log_config.yaml
@@ -1,7 +1,7 @@
-# Example log_config file for synapse. To enable, point `log_config` to it in 
+# Example log_config file for synapse. To enable, point `log_config` to it in
 # `homeserver.yaml`, and restart synapse.
 #
-# This configuration will produce similar results to the defaults within 
+# This configuration will produce similar results to the defaults within
 # synapse, but can be edited to give more flexibility.
 
 version: 1
@@ -12,7 +12,7 @@ formatters:
 
 filters:
   context:
-    (): synapse.util.logcontext.LoggingContextFilter
+    (): synapse.logging.context.LoggingContextFilter
     request: ""
 
 handlers:
@@ -35,7 +35,7 @@ handlers:
 root:
     level: INFO
     handlers: [console] # to use file handler instead, switch to [file]
-    
+
 loggers:
     synapse:
         level: INFO
diff --git a/contrib/experiments/test_messaging.py b/contrib/experiments/test_messaging.py
index c7e55d8aa7..5ef140ae48 100644
--- a/contrib/experiments/test_messaging.py
+++ b/contrib/experiments/test_messaging.py
@@ -36,7 +36,7 @@ from synapse.util import origin_from_ucid
 
 from synapse.app.homeserver import SynapseHomeServer
 
-# from synapse.util.logutils import log_function
+# from synapse.logging.utils import log_function
 
 from twisted.internet import reactor, defer
 from twisted.python import log
diff --git a/contrib/systemd/log_config.yaml b/contrib/systemd/log_config.yaml
index d85bdd1208..22f67a50ce 100644
--- a/contrib/systemd/log_config.yaml
+++ b/contrib/systemd/log_config.yaml
@@ -8,7 +8,7 @@ formatters:
 
 filters:
     context:
-        (): synapse.util.logcontext.LoggingContextFilter
+        (): synapse.logging.context.LoggingContextFilter
         request: ""
 
 handlers:
diff --git a/debian/build_virtualenv b/debian/build_virtualenv
index bead3ebc6e..2791896052 100755
--- a/debian/build_virtualenv
+++ b/debian/build_virtualenv
@@ -43,7 +43,7 @@ dh_virtualenv \
     --preinstall="mock" \
     --extra-pip-arg="--no-cache-dir" \
     --extra-pip-arg="--compile" \
-    --extras="all"
+    --extras="all,systemd"
 
 PACKAGE_BUILD_DIR="debian/matrix-synapse-py3"
 VIRTUALENV_DIR="${PACKAGE_BUILD_DIR}${DH_VIRTUALENV_INSTALL_ROOT}/matrix-synapse"
diff --git a/debian/changelog b/debian/changelog
index ef4edd7ac0..b964cf90a2 100644
--- a/debian/changelog
+++ b/debian/changelog
@@ -1,3 +1,20 @@
+matrix-synapse-py3 (1.1.0-1) UNRELEASED; urgency=medium
+
+  [ Amber Brown ]
+  * Update logging config defaults to match API changes in Synapse.
+
+ -- Erik Johnston <erikj@rae>  Thu, 04 Jul 2019 13:59:02 +0100
+
+matrix-synapse-py3 (1.1.0) stable; urgency=medium
+
+  [ Silke Hofstra ]
+  * Include systemd-python to allow logging to the systemd journal.
+
+  [ Synapse Packaging team ]
+  * New synapse release 1.1.0.
+
+ -- Synapse Packaging team <packages@matrix.org>  Thu, 04 Jul 2019 11:43:41 +0100
+
 matrix-synapse-py3 (1.0.0) stable; urgency=medium
 
   * New synapse release 1.0.0.
diff --git a/debian/log.yaml b/debian/log.yaml
index 206b65f1ac..95b655dd35 100644
--- a/debian/log.yaml
+++ b/debian/log.yaml
@@ -7,7 +7,7 @@ formatters:
 
 filters:
   context:
-    (): synapse.util.logcontext.LoggingContextFilter
+    (): synapse.logging.context.LoggingContextFilter
     request: ""
 
 handlers:
diff --git a/docker/Dockerfile b/docker/Dockerfile
index 0939cadf39..79276209f6 100644
--- a/docker/Dockerfile
+++ b/docker/Dockerfile
@@ -66,6 +66,7 @@ RUN apk add --no-cache --virtual .runtime_deps \
         libpq \
         zlib \
         su-exec \
+        tzdata \
         xmlsec
 
 COPY --from=builder /install /usr/local
diff --git a/docker/README.md b/docker/README.md
index b62417c281..46bb9d2d99 100644
--- a/docker/README.md
+++ b/docker/README.md
@@ -63,7 +63,6 @@ The following environment variables are supported in `generate` mode:
 * `UID`, `GID`: the user id and group id to use for creating the data
   directories. Defaults to `991`, `991`.
 
-
 ## Running synapse
 
 Once you have a valid configuration file, you can start synapse as follows:
@@ -91,6 +90,7 @@ The following environment variables are supported in run mode:
 * `SYNAPSE_CONFIG_PATH`: path to the config file. Defaults to
   `<SYNAPSE_CONFIG_DIR>/homeserver.yaml`.
 * `UID`, `GID`: the user and group id to run Synapse as. Defaults to `991`, `991`.
+* `TZ`: the [timezone](https://en.wikipedia.org/wiki/List_of_tz_database_time_zones) the container will run with. Defaults to `UTC`.
 
 ## TLS support
 
diff --git a/docker/conf/homeserver.yaml b/docker/conf/homeserver.yaml
index b0267b1c60..c1110f0f53 100644
--- a/docker/conf/homeserver.yaml
+++ b/docker/conf/homeserver.yaml
@@ -207,22 +207,3 @@ perspectives:
 
 password_config:
    enabled: true
-
-{% if SYNAPSE_SMTP_HOST %}
-email:
-   enable_notifs: false
-   smtp_host: "{{ SYNAPSE_SMTP_HOST }}"
-   smtp_port: {{ SYNAPSE_SMTP_PORT or "25" }}
-   smtp_user: "{{ SYNAPSE_SMTP_USER }}"
-   smtp_pass: "{{ SYNAPSE_SMTP_PASSWORD }}"
-   require_transport_security: False
-   notif_from: "{{ SYNAPSE_SMTP_FROM or "hostmaster@" + SYNAPSE_SERVER_NAME }}"
-   app_name: Matrix
-   # if template_dir is unset, uses the example templates that are part of
-   # the Synapse distribution.
-   #template_dir: res/templates
-   notif_template_html: notif_mail.html
-   notif_template_text: notif_mail.txt
-   notif_for_new_users: True
-   riot_base_url: "https://{{ SYNAPSE_SERVER_NAME }}"
-{% endif %}
diff --git a/docker/conf/log.config b/docker/conf/log.config
index 895e45d20b..ea5ccfd68b 100644
--- a/docker/conf/log.config
+++ b/docker/conf/log.config
@@ -6,7 +6,7 @@ formatters:
 
 filters:
   context:
-    (): synapse.util.logcontext.LoggingContextFilter
+    (): synapse.logging.context.LoggingContextFilter
     request: ""
 
 handlers:
diff --git a/docs/log_contexts.rst b/docs/log_contexts.rst
index 27cde11cf7..f5cd5de8ab 100644
--- a/docs/log_contexts.rst
+++ b/docs/log_contexts.rst
@@ -1,4 +1,4 @@
-Log contexts
+Log Contexts
 ============
 
 .. contents::
@@ -12,7 +12,7 @@ record.
 Logcontexts are also used for CPU and database accounting, so that we can track
 which requests were responsible for high CPU use or database activity.
 
-The ``synapse.util.logcontext`` module provides a facilities for managing the
+The ``synapse.logging.context`` module provides a facilities for managing the
 current log context (as well as providing the ``LoggingContextFilter`` class).
 
 Deferreds make the whole thing complicated, so this document describes how it
@@ -27,19 +27,19 @@ found them:
 
 .. code:: python
 
-    from synapse.util import logcontext         # omitted from future snippets
+    from synapse.logging import context         # omitted from future snippets
 
     def handle_request(request_id):
-        request_context = logcontext.LoggingContext()
+        request_context = context.LoggingContext()
 
-        calling_context = logcontext.LoggingContext.current_context()
-        logcontext.LoggingContext.set_current_context(request_context)
+        calling_context = context.LoggingContext.current_context()
+        context.LoggingContext.set_current_context(request_context)
         try:
             request_context.request = request_id
             do_request_handling()
             logger.debug("finished")
         finally:
-            logcontext.LoggingContext.set_current_context(calling_context)
+            context.LoggingContext.set_current_context(calling_context)
 
     def do_request_handling():
         logger.debug("phew")  # this will be logged against request_id
@@ -51,7 +51,7 @@ written much more succinctly as:
 .. code:: python
 
     def handle_request(request_id):
-        with logcontext.LoggingContext() as request_context:
+        with context.LoggingContext() as request_context:
             request_context.request = request_id
             do_request_handling()
             logger.debug("finished")
@@ -74,7 +74,7 @@ blocking operation, and returns a deferred:
 
     @defer.inlineCallbacks
     def handle_request(request_id):
-        with logcontext.LoggingContext() as request_context:
+        with context.LoggingContext() as request_context:
             request_context.request = request_id
             yield do_request_handling()
             logger.debug("finished")
@@ -179,7 +179,7 @@ though, we need to make up a new Deferred, or we get a Deferred back from
 external code. We need to make it follow our rules.
 
 The easy way to do it is with a combination of ``defer.inlineCallbacks``, and
-``logcontext.PreserveLoggingContext``. Suppose we want to implement ``sleep``,
+``context.PreserveLoggingContext``. Suppose we want to implement ``sleep``,
 which returns a deferred which will run its callbacks after a given number of
 seconds. That might look like:
 
@@ -204,13 +204,13 @@ That doesn't follow the rules, but we can fix it by wrapping it with
 This technique works equally for external functions which return deferreds,
 or deferreds we have made ourselves.
 
-You can also use ``logcontext.make_deferred_yieldable``, which just does the
+You can also use ``context.make_deferred_yieldable``, which just does the
 boilerplate for you, so the above could be written:
 
 .. code:: python
 
     def sleep(seconds):
-        return logcontext.make_deferred_yieldable(get_sleep_deferred(seconds))
+        return context.make_deferred_yieldable(get_sleep_deferred(seconds))
 
 
 Fire-and-forget
@@ -279,7 +279,7 @@ Obviously that option means that the operations done in
 that might be fixed by setting a different logcontext via a ``with
 LoggingContext(...)`` in ``background_operation``).
 
-The second option is to use ``logcontext.run_in_background``, which wraps a
+The second option is to use ``context.run_in_background``, which wraps a
 function so that it doesn't reset the logcontext even when it returns an
 incomplete deferred, and adds a callback to the returned deferred to reset the
 logcontext. In other words, it turns a function that follows the Synapse rules
@@ -293,7 +293,7 @@ It can be used like this:
     def do_request_handling():
         yield foreground_operation()
 
-        logcontext.run_in_background(background_operation)
+        context.run_in_background(background_operation)
 
         # this will now be logged against the request context
         logger.debug("Request handling complete")
@@ -332,7 +332,7 @@ gathered:
             result = yield defer.gatherResults([d1, d2])
 
 In this case particularly, though, option two, of using
-``logcontext.preserve_fn`` almost certainly makes more sense, so that
+``context.preserve_fn`` almost certainly makes more sense, so that
 ``operation1`` and ``operation2`` are both logged against the original
 logcontext. This looks like:
 
@@ -340,8 +340,8 @@ logcontext. This looks like:
 
     @defer.inlineCallbacks
     def do_request_handling():
-        d1 = logcontext.preserve_fn(operation1)()
-        d2 = logcontext.preserve_fn(operation2)()
+        d1 = context.preserve_fn(operation1)()
+        d2 = context.preserve_fn(operation2)()
 
         with PreserveLoggingContext():
             result = yield defer.gatherResults([d1, d2])
@@ -381,7 +381,7 @@ off the background process, and then leave the ``with`` block to wait for it:
 .. code:: python
 
     def handle_request(request_id):
-        with logcontext.LoggingContext() as request_context:
+        with context.LoggingContext() as request_context:
             request_context.request = request_id
             d = do_request_handling()
 
@@ -414,7 +414,7 @@ runs its callbacks in the original logcontext, all is happy.
 
 The business of a Deferred which runs its callbacks in the original logcontext
 isn't hard to achieve — we have it today, in the shape of
-``logcontext._PreservingContextDeferred``:
+``context._PreservingContextDeferred``:
 
 .. code:: python
 
diff --git a/docs/sample_config.yaml b/docs/sample_config.yaml
index da10788e96..7fe7c94ac4 100644
--- a/docs/sample_config.yaml
+++ b/docs/sample_config.yaml
@@ -317,6 +317,15 @@ listeners:
 #
 #federation_verify_certificates: false
 
+# The minimum TLS version that will be used for outbound federation requests.
+#
+# Defaults to `1`. Configurable to `1`, `1.1`, `1.2`, or `1.3`. Note
+# that setting this value higher than `1.2` will prevent federation to most
+# of the public Matrix network: only configure it to `1.3` if you have an
+# entirely private federation setup and you can ensure TLS 1.3 support.
+#
+#federation_client_minimum_tls_version: 1.2
+
 # Skip federation certificate verification on the following whitelist
 # of domains.
 #
@@ -988,6 +997,12 @@ signing_key_path: "CONFDIR/SERVERNAME.signing.key"
 # so it is not normally necessary to specify them unless you need to
 # override them.
 #
+# Once SAML support is enabled, a metadata file will be exposed at
+# https://<server>:<port>/_matrix/saml2/metadata.xml, which you may be able to
+# use to configure your SAML IdP with. Alternatively, you can manually configure
+# the IdP to use an ACS location of
+# https://<server>:<port>/_matrix/saml2/authn_response.
+#
 #saml2_config:
 #  sp_config:
 #    # point this to the IdP's metadata. You can use either a local file or
@@ -997,7 +1012,15 @@ signing_key_path: "CONFDIR/SERVERNAME.signing.key"
 #      remote:
 #        - url: https://our_idp/metadata.xml
 #
-#    # The rest of sp_config is just used to generate our metadata xml, and you
+#    # By default, the user has to go to our login page first. If you'd like to
+#    # allow IdP-initiated login, set 'allow_unsolicited: True' in a
+#    # 'service.sp' section:
+#    #
+#    #service:
+#    #  sp:
+#    #    allow_unsolicited: True
+#
+#    # The examples below are just used to generate our metadata xml, and you
 #    # may well not need it, depending on your setup. Alternatively you
 #    # may need a whole lot more detail - see the pysaml2 docs!
 #
@@ -1020,6 +1043,12 @@ signing_key_path: "CONFDIR/SERVERNAME.signing.key"
 #  # separate pysaml2 configuration file:
 #  #
 #  config_path: "CONFDIR/sp_conf.py"
+#
+#  # the lifetime of a SAML session. This defines how long a user has to
+#  # complete the authentication process, if allow_unsolicited is unset.
+#  # The default is 5 minutes.
+#  #
+#  # saml_session_lifetime: 5m
 
 
 
@@ -1046,6 +1075,12 @@ password_config:
    #
    #enabled: false
 
+   # Uncomment to disable authentication against the local password
+   # database. This is ignored if `enabled` is false, and is only useful
+   # if you have other password_providers.
+   #
+   #localdb_enabled: false
+
    # Uncomment and change to a secret random string for extra security.
    # DO NOT CHANGE THIS AFTER INITIAL SETUP!
    #
@@ -1070,11 +1105,13 @@ password_config:
 #   app_name: Matrix
 #
 #   # Enable email notifications by default
+#   #
 #   notif_for_new_users: True
 #
 #   # Defining a custom URL for Riot is only needed if email notifications
 #   # should contain links to a self-hosted installation of Riot; when set
 #   # the "app_name" setting is ignored
+#   #
 #   riot_base_url: "http://localhost/riot"
 #
 #   # Enable sending password reset emails via the configured, trusted
@@ -1087,16 +1124,22 @@ password_config:
 #   #
 #   # If this option is set to false and SMTP options have not been
 #   # configured, resetting user passwords via email will be disabled
+#   #
 #   #trust_identity_server_for_password_resets: false
 #
 #   # Configure the time that a validation email or text message code
 #   # will expire after sending
 #   #
 #   # This is currently used for password resets
+#   #
 #   #validation_token_lifetime: 1h
 #
 #   # Template directory. All template files should be stored within this
-#   # directory
+#   # directory. If not set, default templates from within the Synapse
+#   # package will be used
+#   #
+#   # For the list of default templates, please see
+#   # https://github.com/matrix-org/synapse/tree/master/synapse/res/templates
 #   #
 #   #template_dir: res/templates
 #
diff --git a/pyproject.toml b/pyproject.toml
index ec23258da8..ef329aab41 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -39,6 +39,8 @@ exclude = '''
     | \.git          # root of the project
     | \.tox
     | \.venv
+    | \.env
+    | env
     | _build
     | _trial_temp.*
     | build
diff --git a/scripts/generate_config b/scripts/generate_config
index 93b6406992..771cbf8d95 100755
--- a/scripts/generate_config
+++ b/scripts/generate_config
@@ -1,4 +1,4 @@
-#!/usr/bin/env python
+#!/usr/bin/env python3
 
 import argparse
 import shutil
diff --git a/synapse/__init__.py b/synapse/__init__.py
index 119359be68..cf22fabd61 100644
--- a/synapse/__init__.py
+++ b/synapse/__init__.py
@@ -35,4 +35,4 @@ try:
 except ImportError:
     pass
 
-__version__ = "1.0.0"
+__version__ = "1.1.0"
diff --git a/synapse/app/_base.py b/synapse/app/_base.py
index d50a9840d4..1ebb7ae539 100644
--- a/synapse/app/_base.py
+++ b/synapse/app/_base.py
@@ -27,7 +27,7 @@ from twisted.protocols.tls import TLSMemoryBIOFactory
 import synapse
 from synapse.app import check_bind_error
 from synapse.crypto import context_factory
-from synapse.util import PreserveLoggingContext
+from synapse.logging.context import PreserveLoggingContext
 from synapse.util.async_helpers import Linearizer
 from synapse.util.rlimit import change_resource_limit
 from synapse.util.versionstring import get_version_string
@@ -93,33 +93,36 @@ def start_reactor(
     install_dns_limiter(reactor)
 
     def run():
-        # make sure that we run the reactor with the sentinel log context,
-        # otherwise other PreserveLoggingContext instances will get confused
-        # and complain when they see the logcontext arbitrarily swapping
-        # between the sentinel and `run` logcontexts.
-        with PreserveLoggingContext():
-            logger.info("Running")
-
-            change_resource_limit(soft_file_limit)
-            if gc_thresholds:
-                gc.set_threshold(*gc_thresholds)
-            reactor.run()
-
-    if daemonize:
-        if print_pidfile:
-            print(pid_file)
-
-        daemon = Daemonize(
-            app=appname,
-            pid=pid_file,
-            action=run,
-            auto_close_fds=False,
-            verbose=True,
-            logger=logger,
-        )
-        daemon.start()
-    else:
-        run()
+        logger.info("Running")
+        change_resource_limit(soft_file_limit)
+        if gc_thresholds:
+            gc.set_threshold(*gc_thresholds)
+        reactor.run()
+
+    # make sure that we run the reactor with the sentinel log context,
+    # otherwise other PreserveLoggingContext instances will get confused
+    # and complain when they see the logcontext arbitrarily swapping
+    # between the sentinel and `run` logcontexts.
+    #
+    # We also need to drop the logcontext before forking if we're daemonizing,
+    # otherwise the cputime metrics get confused about the per-thread resource usage
+    # appearing to go backwards.
+    with PreserveLoggingContext():
+        if daemonize:
+            if print_pidfile:
+                print(pid_file)
+
+            daemon = Daemonize(
+                app=appname,
+                pid=pid_file,
+                action=run,
+                auto_close_fds=False,
+                verbose=True,
+                logger=logger,
+            )
+            daemon.start()
+        else:
+            run()
 
 
 def quit_with_error(error_string):
diff --git a/synapse/app/appservice.py b/synapse/app/appservice.py
index 9120bdb143..be44249ed6 100644
--- a/synapse/app/appservice.py
+++ b/synapse/app/appservice.py
@@ -26,6 +26,7 @@ from synapse.config._base import ConfigError
 from synapse.config.homeserver import HomeServerConfig
 from synapse.config.logger import setup_logging
 from synapse.http.site import SynapseSite
+from synapse.logging.context import LoggingContext, run_in_background
 from synapse.metrics import RegistryProxy
 from synapse.metrics.resource import METRICS_PREFIX, MetricsResource
 from synapse.replication.slave.storage.appservice import SlavedApplicationServiceStore
@@ -36,7 +37,6 @@ from synapse.replication.tcp.client import ReplicationClientHandler
 from synapse.server import HomeServer
 from synapse.storage.engines import create_engine
 from synapse.util.httpresourcetree import create_resource_tree
-from synapse.util.logcontext import LoggingContext, run_in_background
 from synapse.util.manhole import manhole
 from synapse.util.versionstring import get_version_string
 
diff --git a/synapse/app/client_reader.py b/synapse/app/client_reader.py
index 90bc79cdda..ff11beca82 100644
--- a/synapse/app/client_reader.py
+++ b/synapse/app/client_reader.py
@@ -27,6 +27,7 @@ from synapse.config.homeserver import HomeServerConfig
 from synapse.config.logger import setup_logging
 from synapse.http.server import JsonResource
 from synapse.http.site import SynapseSite
+from synapse.logging.context import LoggingContext
 from synapse.metrics import RegistryProxy
 from synapse.metrics.resource import METRICS_PREFIX, MetricsResource
 from synapse.replication.slave.storage._base import BaseSlavedStore
@@ -64,7 +65,6 @@ from synapse.rest.client.versions import VersionsRestServlet
 from synapse.server import HomeServer
 from synapse.storage.engines import create_engine
 from synapse.util.httpresourcetree import create_resource_tree
-from synapse.util.logcontext import LoggingContext
 from synapse.util.manhole import manhole
 from synapse.util.versionstring import get_version_string
 
diff --git a/synapse/app/event_creator.py b/synapse/app/event_creator.py
index ff522e4499..cacad25eac 100644
--- a/synapse/app/event_creator.py
+++ b/synapse/app/event_creator.py
@@ -27,6 +27,7 @@ from synapse.config.homeserver import HomeServerConfig
 from synapse.config.logger import setup_logging
 from synapse.http.server import JsonResource
 from synapse.http.site import SynapseSite
+from synapse.logging.context import LoggingContext
 from synapse.metrics import RegistryProxy
 from synapse.metrics.resource import METRICS_PREFIX, MetricsResource
 from synapse.replication.slave.storage._base import BaseSlavedStore
@@ -59,7 +60,6 @@ from synapse.server import HomeServer
 from synapse.storage.engines import create_engine
 from synapse.storage.user_directory import UserDirectoryStore
 from synapse.util.httpresourcetree import create_resource_tree
-from synapse.util.logcontext import LoggingContext
 from synapse.util.manhole import manhole
 from synapse.util.versionstring import get_version_string
 
diff --git a/synapse/app/federation_reader.py b/synapse/app/federation_reader.py
index 9421420930..11e80dbae0 100644
--- a/synapse/app/federation_reader.py
+++ b/synapse/app/federation_reader.py
@@ -28,6 +28,7 @@ from synapse.config.homeserver import HomeServerConfig
 from synapse.config.logger import setup_logging
 from synapse.federation.transport.server import TransportLayerServer
 from synapse.http.site import SynapseSite
+from synapse.logging.context import LoggingContext
 from synapse.metrics import RegistryProxy
 from synapse.metrics.resource import METRICS_PREFIX, MetricsResource
 from synapse.replication.slave.storage._base import BaseSlavedStore
@@ -48,7 +49,6 @@ from synapse.rest.key.v2 import KeyApiV2Resource
 from synapse.server import HomeServer
 from synapse.storage.engines import create_engine
 from synapse.util.httpresourcetree import create_resource_tree
-from synapse.util.logcontext import LoggingContext
 from synapse.util.manhole import manhole
 from synapse.util.versionstring import get_version_string
 
diff --git a/synapse/app/federation_sender.py b/synapse/app/federation_sender.py
index 969be58d0b..97da7bdcbf 100644
--- a/synapse/app/federation_sender.py
+++ b/synapse/app/federation_sender.py
@@ -27,6 +27,7 @@ from synapse.config.homeserver import HomeServerConfig
 from synapse.config.logger import setup_logging
 from synapse.federation import send_queue
 from synapse.http.site import SynapseSite
+from synapse.logging.context import LoggingContext, run_in_background
 from synapse.metrics import RegistryProxy
 from synapse.metrics.background_process_metrics import run_as_background_process
 from synapse.metrics.resource import METRICS_PREFIX, MetricsResource
@@ -44,7 +45,6 @@ from synapse.storage.engines import create_engine
 from synapse.types import ReadReceipt
 from synapse.util.async_helpers import Linearizer
 from synapse.util.httpresourcetree import create_resource_tree
-from synapse.util.logcontext import LoggingContext, run_in_background
 from synapse.util.manhole import manhole
 from synapse.util.versionstring import get_version_string
 
diff --git a/synapse/app/frontend_proxy.py b/synapse/app/frontend_proxy.py
index 2fd7d57ebf..417a10bbd2 100644
--- a/synapse/app/frontend_proxy.py
+++ b/synapse/app/frontend_proxy.py
@@ -29,6 +29,7 @@ from synapse.config.logger import setup_logging
 from synapse.http.server import JsonResource
 from synapse.http.servlet import RestServlet, parse_json_object_from_request
 from synapse.http.site import SynapseSite
+from synapse.logging.context import LoggingContext
 from synapse.metrics import RegistryProxy
 from synapse.metrics.resource import METRICS_PREFIX, MetricsResource
 from synapse.replication.slave.storage._base import BaseSlavedStore
@@ -41,7 +42,6 @@ from synapse.rest.client.v2_alpha._base import client_patterns
 from synapse.server import HomeServer
 from synapse.storage.engines import create_engine
 from synapse.util.httpresourcetree import create_resource_tree
-from synapse.util.logcontext import LoggingContext
 from synapse.util.manhole import manhole
 from synapse.util.versionstring import get_version_string
 
diff --git a/synapse/app/homeserver.py b/synapse/app/homeserver.py
index 49da105cf6..639b1429c0 100755
--- a/synapse/app/homeserver.py
+++ b/synapse/app/homeserver.py
@@ -54,6 +54,7 @@ from synapse.federation.transport.server import TransportLayerServer
 from synapse.http.additional_resource import AdditionalResource
 from synapse.http.server import RootRedirect
 from synapse.http.site import SynapseSite
+from synapse.logging.context import LoggingContext
 from synapse.metrics import RegistryProxy
 from synapse.metrics.background_process_metrics import run_as_background_process
 from synapse.metrics.resource import METRICS_PREFIX, MetricsResource
@@ -72,7 +73,6 @@ from synapse.storage.engines import IncorrectDatabaseSetup, create_engine
 from synapse.storage.prepare_database import UpgradeDatabaseException, prepare_database
 from synapse.util.caches import CACHE_SIZE_FACTOR
 from synapse.util.httpresourcetree import create_resource_tree
-from synapse.util.logcontext import LoggingContext
 from synapse.util.manhole import manhole
 from synapse.util.module_loader import load_module
 from synapse.util.rlimit import change_resource_limit
diff --git a/synapse/app/media_repository.py b/synapse/app/media_repository.py
index cf0e2036c3..f23b9b6eda 100644
--- a/synapse/app/media_repository.py
+++ b/synapse/app/media_repository.py
@@ -27,6 +27,7 @@ from synapse.config._base import ConfigError
 from synapse.config.homeserver import HomeServerConfig
 from synapse.config.logger import setup_logging
 from synapse.http.site import SynapseSite
+from synapse.logging.context import LoggingContext
 from synapse.metrics import RegistryProxy
 from synapse.metrics.resource import METRICS_PREFIX, MetricsResource
 from synapse.replication.slave.storage._base import BaseSlavedStore
@@ -40,7 +41,6 @@ from synapse.server import HomeServer
 from synapse.storage.engines import create_engine
 from synapse.storage.media_repository import MediaRepositoryStore
 from synapse.util.httpresourcetree import create_resource_tree
-from synapse.util.logcontext import LoggingContext
 from synapse.util.manhole import manhole
 from synapse.util.versionstring import get_version_string
 
diff --git a/synapse/app/pusher.py b/synapse/app/pusher.py
index df29ea5ecb..4f929edf86 100644
--- a/synapse/app/pusher.py
+++ b/synapse/app/pusher.py
@@ -26,6 +26,7 @@ from synapse.config._base import ConfigError
 from synapse.config.homeserver import HomeServerConfig
 from synapse.config.logger import setup_logging
 from synapse.http.site import SynapseSite
+from synapse.logging.context import LoggingContext, run_in_background
 from synapse.metrics import RegistryProxy
 from synapse.metrics.resource import METRICS_PREFIX, MetricsResource
 from synapse.replication.slave.storage._base import __func__
@@ -38,7 +39,6 @@ from synapse.server import HomeServer
 from synapse.storage import DataStore
 from synapse.storage.engines import create_engine
 from synapse.util.httpresourcetree import create_resource_tree
-from synapse.util.logcontext import LoggingContext, run_in_background
 from synapse.util.manhole import manhole
 from synapse.util.versionstring import get_version_string
 
diff --git a/synapse/app/synchrotron.py b/synapse/app/synchrotron.py
index 858949910d..de4797fddc 100644
--- a/synapse/app/synchrotron.py
+++ b/synapse/app/synchrotron.py
@@ -31,6 +31,7 @@ from synapse.config.logger import setup_logging
 from synapse.handlers.presence import PresenceHandler, get_interested_parties
 from synapse.http.server import JsonResource
 from synapse.http.site import SynapseSite
+from synapse.logging.context import LoggingContext, run_in_background
 from synapse.metrics import RegistryProxy
 from synapse.metrics.resource import METRICS_PREFIX, MetricsResource
 from synapse.replication.slave.storage._base import BaseSlavedStore, __func__
@@ -57,7 +58,6 @@ from synapse.server import HomeServer
 from synapse.storage.engines import create_engine
 from synapse.storage.presence import UserPresenceState
 from synapse.util.httpresourcetree import create_resource_tree
-from synapse.util.logcontext import LoggingContext, run_in_background
 from synapse.util.manhole import manhole
 from synapse.util.stringutils import random_string
 from synapse.util.versionstring import get_version_string
diff --git a/synapse/app/user_dir.py b/synapse/app/user_dir.py
index 2d9d2e1bbc..1177ddd72e 100644
--- a/synapse/app/user_dir.py
+++ b/synapse/app/user_dir.py
@@ -28,6 +28,7 @@ from synapse.config.homeserver import HomeServerConfig
 from synapse.config.logger import setup_logging
 from synapse.http.server import JsonResource
 from synapse.http.site import SynapseSite
+from synapse.logging.context import LoggingContext, run_in_background
 from synapse.metrics import RegistryProxy
 from synapse.metrics.resource import METRICS_PREFIX, MetricsResource
 from synapse.replication.slave.storage._base import BaseSlavedStore
@@ -46,7 +47,6 @@ from synapse.storage.engines import create_engine
 from synapse.storage.user_directory import UserDirectoryStore
 from synapse.util.caches.stream_change_cache import StreamChangeCache
 from synapse.util.httpresourcetree import create_resource_tree
-from synapse.util.logcontext import LoggingContext, run_in_background
 from synapse.util.manhole import manhole
 from synapse.util.versionstring import get_version_string
 
diff --git a/synapse/appservice/scheduler.py b/synapse/appservice/scheduler.py
index b54bf5411f..e5b36494f5 100644
--- a/synapse/appservice/scheduler.py
+++ b/synapse/appservice/scheduler.py
@@ -53,8 +53,8 @@ import logging
 from twisted.internet import defer
 
 from synapse.appservice import ApplicationServiceState
+from synapse.logging.context import run_in_background
 from synapse.metrics.background_process_metrics import run_as_background_process
-from synapse.util.logcontext import run_in_background
 
 logger = logging.getLogger(__name__)
 
diff --git a/synapse/config/emailconfig.py b/synapse/config/emailconfig.py
index cf39936da7..8381b8eb29 100644
--- a/synapse/config/emailconfig.py
+++ b/synapse/config/emailconfig.py
@@ -112,13 +112,17 @@ class EmailConfig(Config):
             missing = []
             for k in required:
                 if k not in email_config:
-                    missing.append(k)
+                    missing.append("email." + k)
+
+            if config.get("public_baseurl") is None:
+                missing.append("public_base_url")
 
             if len(missing) > 0:
                 raise RuntimeError(
-                    "email.password_reset_behaviour is set to 'local' "
-                    "but required keys are missing: %s"
-                    % (", ".join(["email." + k for k in missing]),)
+                    "Password resets emails are configured to be sent from "
+                    "this homeserver due to a partial 'email' block. "
+                    "However, the following required keys are missing: %s"
+                    % (", ".join(missing),)
                 )
 
             # Templates for password reset emails
@@ -156,13 +160,6 @@ class EmailConfig(Config):
                 filepath, "email.password_reset_template_success_html"
             )
 
-            if config.get("public_baseurl") is None:
-                raise RuntimeError(
-                    "email.password_reset_behaviour is set to 'local' but no "
-                    "public_baseurl is set. This is necessary to generate password "
-                    "reset links"
-                )
-
         if self.email_enable_notifs:
             required = [
                 "smtp_host",
@@ -233,11 +230,13 @@ class EmailConfig(Config):
         #   app_name: Matrix
         #
         #   # Enable email notifications by default
+        #   #
         #   notif_for_new_users: True
         #
         #   # Defining a custom URL for Riot is only needed if email notifications
         #   # should contain links to a self-hosted installation of Riot; when set
         #   # the "app_name" setting is ignored
+        #   #
         #   riot_base_url: "http://localhost/riot"
         #
         #   # Enable sending password reset emails via the configured, trusted
@@ -250,16 +249,22 @@ class EmailConfig(Config):
         #   #
         #   # If this option is set to false and SMTP options have not been
         #   # configured, resetting user passwords via email will be disabled
+        #   #
         #   #trust_identity_server_for_password_resets: false
         #
         #   # Configure the time that a validation email or text message code
         #   # will expire after sending
         #   #
         #   # This is currently used for password resets
+        #   #
         #   #validation_token_lifetime: 1h
         #
         #   # Template directory. All template files should be stored within this
-        #   # directory
+        #   # directory. If not set, default templates from within the Synapse
+        #   # package will be used
+        #   #
+        #   # For the list of default templates, please see
+        #   # https://github.com/matrix-org/synapse/tree/master/synapse/res/templates
         #   #
         #   #template_dir: res/templates
         #
diff --git a/synapse/config/logger.py b/synapse/config/logger.py
index 931aec41c0..0f5554211c 100644
--- a/synapse/config/logger.py
+++ b/synapse/config/logger.py
@@ -24,7 +24,7 @@ from twisted.logger import STDLibLogObserver, globalLogBeginner
 
 import synapse
 from synapse.app import _base as appbase
-from synapse.util.logcontext import LoggingContextFilter
+from synapse.logging.context import LoggingContextFilter
 from synapse.util.versionstring import get_version_string
 
 from ._base import Config
@@ -40,7 +40,7 @@ formatters:
 
 filters:
     context:
-        (): synapse.util.logcontext.LoggingContextFilter
+        (): synapse.logging.context.LoggingContextFilter
         request: ""
 
 handlers:
diff --git a/synapse/config/password.py b/synapse/config/password.py
index 598f84fc0c..d5b5953f2f 100644
--- a/synapse/config/password.py
+++ b/synapse/config/password.py
@@ -26,6 +26,7 @@ class PasswordConfig(Config):
             password_config = {}
 
         self.password_enabled = password_config.get("enabled", True)
+        self.password_localdb_enabled = password_config.get("localdb_enabled", True)
         self.password_pepper = password_config.get("pepper", "")
 
     def generate_config_section(self, config_dir_path, server_name, **kwargs):
@@ -35,6 +36,12 @@ class PasswordConfig(Config):
            #
            #enabled: false
 
+           # Uncomment to disable authentication against the local password
+           # database. This is ignored if `enabled` is false, and is only useful
+           # if you have other password_providers.
+           #
+           #localdb_enabled: false
+
            # Uncomment and change to a secret random string for extra security.
            # DO NOT CHANGE THIS AFTER INITIAL SETUP!
            #
diff --git a/synapse/config/ratelimiting.py b/synapse/config/ratelimiting.py
index 8c587f3fd2..33f31cf213 100644
--- a/synapse/config/ratelimiting.py
+++ b/synapse/config/ratelimiting.py
@@ -23,7 +23,7 @@ class RateLimitConfig(object):
 
 class FederationRateLimitConfig(object):
     _items_and_default = {
-        "window_size": 10000,
+        "window_size": 1000,
         "sleep_limit": 10,
         "sleep_delay": 500,
         "reject_limit": 50,
@@ -54,7 +54,7 @@ class RatelimitConfig(Config):
 
         # Load the new-style federation config, if it exists. Otherwise, fall
         # back to the old method.
-        if "federation_rc" in config:
+        if "rc_federation" in config:
             self.rc_federation = FederationRateLimitConfig(**config["rc_federation"])
         else:
             self.rc_federation = FederationRateLimitConfig(
diff --git a/synapse/config/registration.py b/synapse/config/registration.py
index 4a59e6ec90..b895c4e9f4 100644
--- a/synapse/config/registration.py
+++ b/synapse/config/registration.py
@@ -71,9 +71,8 @@ class RegistrationConfig(Config):
         self.default_identity_server = config.get("default_identity_server")
         self.allow_guest_access = config.get("allow_guest_access", False)
 
-        self.invite_3pid_guest = self.allow_guest_access and config.get(
-            "invite_3pid_guest", False
-        )
+        if config.get("invite_3pid_guest", False):
+            raise ConfigError("invite_3pid_guest is no longer supported")
 
         self.auto_join_rooms = config.get("auto_join_rooms", [])
         for room_alias in self.auto_join_rooms:
diff --git a/synapse/config/saml2_config.py b/synapse/config/saml2_config.py
index 872a1ba934..6a8161547a 100644
--- a/synapse/config/saml2_config.py
+++ b/synapse/config/saml2_config.py
@@ -12,6 +12,7 @@
 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 # See the License for the specific language governing permissions and
 # limitations under the License.
+from synapse.python_dependencies import DependencyException, check_requirements
 
 from ._base import Config, ConfigError
 
@@ -25,6 +26,11 @@ class SAML2Config(Config):
         if not saml2_config or not saml2_config.get("enabled", True):
             return
 
+        try:
+            check_requirements("saml2")
+        except DependencyException as e:
+            raise ConfigError(e.message)
+
         self.saml2_enabled = True
 
         import saml2.config
@@ -37,6 +43,11 @@ class SAML2Config(Config):
         if config_path is not None:
             self.saml2_sp_config.load_file(config_path)
 
+        # session lifetime: in milliseconds
+        self.saml2_session_lifetime = self.parse_duration(
+            saml2_config.get("saml_session_lifetime", "5m")
+        )
+
     def _default_saml_config_dict(self):
         import saml2
 
@@ -72,6 +83,12 @@ class SAML2Config(Config):
         # so it is not normally necessary to specify them unless you need to
         # override them.
         #
+        # Once SAML support is enabled, a metadata file will be exposed at
+        # https://<server>:<port>/_matrix/saml2/metadata.xml, which you may be able to
+        # use to configure your SAML IdP with. Alternatively, you can manually configure
+        # the IdP to use an ACS location of
+        # https://<server>:<port>/_matrix/saml2/authn_response.
+        #
         #saml2_config:
         #  sp_config:
         #    # point this to the IdP's metadata. You can use either a local file or
@@ -81,7 +98,15 @@ class SAML2Config(Config):
         #      remote:
         #        - url: https://our_idp/metadata.xml
         #
-        #    # The rest of sp_config is just used to generate our metadata xml, and you
+        #    # By default, the user has to go to our login page first. If you'd like to
+        #    # allow IdP-initiated login, set 'allow_unsolicited: True' in a
+        #    # 'service.sp' section:
+        #    #
+        #    #service:
+        #    #  sp:
+        #    #    allow_unsolicited: True
+        #
+        #    # The examples below are just used to generate our metadata xml, and you
         #    # may well not need it, depending on your setup. Alternatively you
         #    # may need a whole lot more detail - see the pysaml2 docs!
         #
@@ -104,6 +129,12 @@ class SAML2Config(Config):
         #  # separate pysaml2 configuration file:
         #  #
         #  config_path: "%(config_dir_path)s/sp_conf.py"
+        #
+        #  # the lifetime of a SAML session. This defines how long a user has to
+        #  # complete the authentication process, if allow_unsolicited is unset.
+        #  # The default is 5 minutes.
+        #  #
+        #  # saml_session_lifetime: 5m
         """ % {
             "config_dir_path": config_dir_path
         }
diff --git a/synapse/config/tls.py b/synapse/config/tls.py
index 8fcf801418..ca508a224f 100644
--- a/synapse/config/tls.py
+++ b/synapse/config/tls.py
@@ -23,7 +23,7 @@ import six
 
 from unpaddedbase64 import encode_base64
 
-from OpenSSL import crypto
+from OpenSSL import SSL, crypto
 from twisted.internet._sslverify import Certificate, trustRootFromCertificates
 
 from synapse.config._base import Config, ConfigError
@@ -81,6 +81,27 @@ class TlsConfig(Config):
             "federation_verify_certificates", True
         )
 
+        # Minimum TLS version to use for outbound federation traffic
+        self.federation_client_minimum_tls_version = str(
+            config.get("federation_client_minimum_tls_version", 1)
+        )
+
+        if self.federation_client_minimum_tls_version not in ["1", "1.1", "1.2", "1.3"]:
+            raise ConfigError(
+                "federation_client_minimum_tls_version must be one of: 1, 1.1, 1.2, 1.3"
+            )
+
+        # Prevent people shooting themselves in the foot here by setting it to
+        # the biggest number blindly
+        if self.federation_client_minimum_tls_version == "1.3":
+            if getattr(SSL, "OP_NO_TLSv1_3", None) is None:
+                raise ConfigError(
+                    (
+                        "federation_client_minimum_tls_version cannot be 1.3, "
+                        "your OpenSSL does not support it"
+                    )
+                )
+
         # Whitelist of domains to not verify certificates for
         fed_whitelist_entries = config.get(
             "federation_certificate_verification_whitelist", []
@@ -261,6 +282,15 @@ class TlsConfig(Config):
         #
         #federation_verify_certificates: false
 
+        # The minimum TLS version that will be used for outbound federation requests.
+        #
+        # Defaults to `1`. Configurable to `1`, `1.1`, `1.2`, or `1.3`. Note
+        # that setting this value higher than `1.2` will prevent federation to most
+        # of the public Matrix network: only configure it to `1.3` if you have an
+        # entirely private federation setup and you can ensure TLS 1.3 support.
+        #
+        #federation_client_minimum_tls_version: 1.2
+
         # Skip federation certificate verification on the following whitelist
         # of domains.
         #
diff --git a/synapse/crypto/context_factory.py b/synapse/crypto/context_factory.py
index 2bc5cc3807..4f48e8e88d 100644
--- a/synapse/crypto/context_factory.py
+++ b/synapse/crypto/context_factory.py
@@ -24,12 +24,25 @@ from OpenSSL import SSL, crypto
 from twisted.internet._sslverify import _defaultCurveName
 from twisted.internet.abstract import isIPAddress, isIPv6Address
 from twisted.internet.interfaces import IOpenSSLClientConnectionCreator
-from twisted.internet.ssl import CertificateOptions, ContextFactory, platformTrust
+from twisted.internet.ssl import (
+    CertificateOptions,
+    ContextFactory,
+    TLSVersion,
+    platformTrust,
+)
 from twisted.python.failure import Failure
 
 logger = logging.getLogger(__name__)
 
 
+_TLS_VERSION_MAP = {
+    "1": TLSVersion.TLSv1_0,
+    "1.1": TLSVersion.TLSv1_1,
+    "1.2": TLSVersion.TLSv1_2,
+    "1.3": TLSVersion.TLSv1_3,
+}
+
+
 class ServerContextFactory(ContextFactory):
     """Factory for PyOpenSSL SSL contexts that are used to handle incoming
     connections."""
@@ -43,16 +56,18 @@ class ServerContextFactory(ContextFactory):
         try:
             _ecCurve = crypto.get_elliptic_curve(_defaultCurveName)
             context.set_tmp_ecdh(_ecCurve)
-
         except Exception:
             logger.exception("Failed to enable elliptic curve for TLS")
-        context.set_options(SSL.OP_NO_SSLv2 | SSL.OP_NO_SSLv3)
+
+        context.set_options(
+            SSL.OP_NO_SSLv2 | SSL.OP_NO_SSLv3 | SSL.OP_NO_TLSv1 | SSL.OP_NO_TLSv1_1
+        )
         context.use_certificate_chain_file(config.tls_certificate_file)
         context.use_privatekey(config.tls_private_key)
 
         # https://hynek.me/articles/hardening-your-web-servers-ssl-ciphers/
         context.set_cipher_list(
-            "ECDH+AESGCM:ECDH+CHACHA20:ECDH+AES256:ECDH+AES128:!aNULL:!SHA1"
+            "ECDH+AESGCM:ECDH+CHACHA20:ECDH+AES256:ECDH+AES128:!aNULL:!SHA1:!AESCCM"
         )
 
     def getContext(self):
@@ -79,10 +94,22 @@ class ClientTLSOptionsFactory(object):
             # Use CA root certs provided by OpenSSL
             trust_root = platformTrust()
 
-        self._verify_ssl_context = CertificateOptions(trustRoot=trust_root).getContext()
+        # "insecurelyLowerMinimumTo" is the argument that will go lower than
+        # Twisted's default, which is why it is marked as "insecure" (since
+        # Twisted's defaults are reasonably secure). But, since Twisted is
+        # moving to TLS 1.2 by default, we want to respect the config option if
+        # it is set to 1.0 (which the alternate option, raiseMinimumTo, will not
+        # let us do).
+        minTLS = _TLS_VERSION_MAP[config.federation_client_minimum_tls_version]
+
+        self._verify_ssl = CertificateOptions(
+            trustRoot=trust_root, insecurelyLowerMinimumTo=minTLS
+        )
+        self._verify_ssl_context = self._verify_ssl.getContext()
         self._verify_ssl_context.set_info_callback(self._context_info_cb)
 
-        self._no_verify_ssl_context = CertificateOptions().getContext()
+        self._no_verify_ssl = CertificateOptions(insecurelyLowerMinimumTo=minTLS)
+        self._no_verify_ssl_context = self._no_verify_ssl.getContext()
         self._no_verify_ssl_context.set_info_callback(self._context_info_cb)
 
     def get_options(self, host):
diff --git a/synapse/crypto/keyring.py b/synapse/crypto/keyring.py
index 10c2eb7f0f..341c863152 100644
--- a/synapse/crypto/keyring.py
+++ b/synapse/crypto/keyring.py
@@ -44,15 +44,16 @@ from synapse.api.errors import (
     RequestSendFailed,
     SynapseError,
 )
-from synapse.storage.keys import FetchKeyResult
-from synapse.util import logcontext, unwrapFirstError
-from synapse.util.async_helpers import yieldable_gather_results
-from synapse.util.logcontext import (
+from synapse.logging.context import (
     LoggingContext,
     PreserveLoggingContext,
+    make_deferred_yieldable,
     preserve_fn,
     run_in_background,
 )
+from synapse.storage.keys import FetchKeyResult
+from synapse.util import unwrapFirstError
+from synapse.util.async_helpers import yieldable_gather_results
 from synapse.util.metrics import Measure
 from synapse.util.retryutils import NotRetryingDestination
 
@@ -140,7 +141,7 @@ class Keyring(object):
         """
         req = VerifyJsonRequest(server_name, json_object, validity_time, request_name)
         requests = (req,)
-        return logcontext.make_deferred_yieldable(self._verify_objects(requests)[0])
+        return make_deferred_yieldable(self._verify_objects(requests)[0])
 
     def verify_json_objects_for_server(self, server_and_json):
         """Bulk verifies signatures of json objects, bulk fetching keys as
@@ -557,7 +558,7 @@ class BaseV2KeyFetcher(object):
 
         signed_key_json_bytes = encode_canonical_json(signed_key_json)
 
-        yield logcontext.make_deferred_yieldable(
+        yield make_deferred_yieldable(
             defer.gatherResults(
                 [
                     run_in_background(
@@ -612,7 +613,7 @@ class PerspectivesKeyFetcher(BaseV2KeyFetcher):
 
             defer.returnValue({})
 
-        results = yield logcontext.make_deferred_yieldable(
+        results = yield make_deferred_yieldable(
             defer.gatherResults(
                 [run_in_background(get_key, server) for server in self.key_servers],
                 consumeErrors=True,
diff --git a/synapse/events/snapshot.py b/synapse/events/snapshot.py
index a96cdada3d..a9545e6c1b 100644
--- a/synapse/events/snapshot.py
+++ b/synapse/events/snapshot.py
@@ -19,7 +19,7 @@ from frozendict import frozendict
 
 from twisted.internet import defer
 
-from synapse.util.logcontext import make_deferred_yieldable, run_in_background
+from synapse.logging.context import make_deferred_yieldable, run_in_background
 
 
 class EventContext(object):
diff --git a/synapse/events/utils.py b/synapse/events/utils.py
index f24f0c16f0..987de5cab7 100644
--- a/synapse/events/utils.py
+++ b/synapse/events/utils.py
@@ -392,7 +392,11 @@ class EventClientSerializer(object):
                     serialized_event["content"].pop("m.relates_to", None)
 
                 r = serialized_event["unsigned"].setdefault("m.relations", {})
-                r[RelationTypes.REPLACE] = {"event_id": edit.event_id}
+                r[RelationTypes.REPLACE] = {
+                    "event_id": edit.event_id,
+                    "origin_server_ts": edit.origin_server_ts,
+                    "sender": edit.sender,
+                }
 
         defer.returnValue(serialized_event)
 
diff --git a/synapse/federation/federation_base.py b/synapse/federation/federation_base.py
index 1e925b19e7..f7bb806ae7 100644
--- a/synapse/federation/federation_base.py
+++ b/synapse/federation/federation_base.py
@@ -27,8 +27,14 @@ from synapse.crypto.event_signing import check_event_content_hash
 from synapse.events import event_type_from_format_version
 from synapse.events.utils import prune_event
 from synapse.http.servlet import assert_params_in_dict
+from synapse.logging.context import (
+    LoggingContext,
+    PreserveLoggingContext,
+    make_deferred_yieldable,
+    preserve_fn,
+)
 from synapse.types import get_domain_from_id
-from synapse.util import logcontext, unwrapFirstError
+from synapse.util import unwrapFirstError
 
 logger = logging.getLogger(__name__)
 
@@ -73,7 +79,7 @@ class FederationBase(object):
         @defer.inlineCallbacks
         def handle_check_result(pdu, deferred):
             try:
-                res = yield logcontext.make_deferred_yieldable(deferred)
+                res = yield make_deferred_yieldable(deferred)
             except SynapseError:
                 res = None
 
@@ -102,10 +108,10 @@ class FederationBase(object):
 
             defer.returnValue(res)
 
-        handle = logcontext.preserve_fn(handle_check_result)
+        handle = preserve_fn(handle_check_result)
         deferreds2 = [handle(pdu, deferred) for pdu, deferred in zip(pdus, deferreds)]
 
-        valid_pdus = yield logcontext.make_deferred_yieldable(
+        valid_pdus = yield make_deferred_yieldable(
             defer.gatherResults(deferreds2, consumeErrors=True)
         ).addErrback(unwrapFirstError)
 
@@ -115,7 +121,7 @@ class FederationBase(object):
             defer.returnValue([p for p in valid_pdus if p])
 
     def _check_sigs_and_hash(self, room_version, pdu):
-        return logcontext.make_deferred_yieldable(
+        return make_deferred_yieldable(
             self._check_sigs_and_hashes(room_version, [pdu])[0]
         )
 
@@ -133,14 +139,14 @@ class FederationBase(object):
               * returns a redacted version of the event (if the signature
                 matched but the hash did not)
               * throws a SynapseError if the signature check failed.
-            The deferreds run their callbacks in the sentinel logcontext.
+            The deferreds run their callbacks in the sentinel
         """
         deferreds = _check_sigs_on_pdus(self.keyring, room_version, pdus)
 
-        ctx = logcontext.LoggingContext.current_context()
+        ctx = LoggingContext.current_context()
 
         def callback(_, pdu):
-            with logcontext.PreserveLoggingContext(ctx):
+            with PreserveLoggingContext(ctx):
                 if not check_event_content_hash(pdu):
                     # let's try to distinguish between failures because the event was
                     # redacted (which are somewhat expected) vs actual ball-tampering
@@ -178,7 +184,7 @@ class FederationBase(object):
 
         def errback(failure, pdu):
             failure.trap(SynapseError)
-            with logcontext.PreserveLoggingContext(ctx):
+            with PreserveLoggingContext(ctx):
                 logger.warn(
                     "Signature check failed for %s: %s",
                     pdu.event_id,
diff --git a/synapse/federation/federation_client.py b/synapse/federation/federation_client.py
index 3883eb525e..3cb4b94420 100644
--- a/synapse/federation/federation_client.py
+++ b/synapse/federation/federation_client.py
@@ -39,10 +39,10 @@ from synapse.api.room_versions import (
 )
 from synapse.events import builder, room_version_to_event_format
 from synapse.federation.federation_base import FederationBase, event_from_pdu_json
-from synapse.util import logcontext, unwrapFirstError
+from synapse.logging.context import make_deferred_yieldable, run_in_background
+from synapse.logging.utils import log_function
+from synapse.util import unwrapFirstError
 from synapse.util.caches.expiringcache import ExpiringCache
-from synapse.util.logcontext import make_deferred_yieldable, run_in_background
-from synapse.util.logutils import log_function
 from synapse.util.retryutils import NotRetryingDestination
 
 logger = logging.getLogger(__name__)
@@ -207,7 +207,7 @@ class FederationClient(FederationBase):
         ]
 
         # FIXME: We should handle signature failures more gracefully.
-        pdus[:] = yield logcontext.make_deferred_yieldable(
+        pdus[:] = yield make_deferred_yieldable(
             defer.gatherResults(
                 self._check_sigs_and_hashes(room_version, pdus), consumeErrors=True
             ).addErrback(unwrapFirstError)
diff --git a/synapse/federation/federation_server.py b/synapse/federation/federation_server.py
index 2e0cebb638..8c0a18b120 100644
--- a/synapse/federation/federation_server.py
+++ b/synapse/federation/federation_server.py
@@ -42,6 +42,8 @@ from synapse.federation.federation_base import FederationBase, event_from_pdu_js
 from synapse.federation.persistence import TransactionActions
 from synapse.federation.units import Edu, Transaction
 from synapse.http.endpoint import parse_server_name
+from synapse.logging.context import nested_logging_context
+from synapse.logging.utils import log_function
 from synapse.replication.http.federation import (
     ReplicationFederationSendEduRestServlet,
     ReplicationGetQueryRestServlet,
@@ -50,8 +52,6 @@ from synapse.types import get_domain_from_id
 from synapse.util import glob_to_regex
 from synapse.util.async_helpers import Linearizer, concurrently_execute
 from synapse.util.caches.response_cache import ResponseCache
-from synapse.util.logcontext import nested_logging_context
-from synapse.util.logutils import log_function
 
 # when processing incoming transactions, we try to handle multiple rooms in
 # parallel, up to this limit.
diff --git a/synapse/federation/persistence.py b/synapse/federation/persistence.py
index 7535f79203..44edcabed4 100644
--- a/synapse/federation/persistence.py
+++ b/synapse/federation/persistence.py
@@ -21,9 +21,7 @@ These actions are mostly only used by the :py:mod:`.replication` module.
 
 import logging
 
-from twisted.internet import defer
-
-from synapse.util.logutils import log_function
+from synapse.logging.utils import log_function
 
 logger = logging.getLogger(__name__)
 
@@ -63,33 +61,3 @@ class TransactionActions(object):
         return self.store.set_received_txn_response(
             transaction.transaction_id, origin, code, response
         )
-
-    @defer.inlineCallbacks
-    @log_function
-    def prepare_to_send(self, transaction):
-        """ Persists the `Transaction` we are about to send and works out the
-        correct value for the `prev_ids` key.
-
-        Returns:
-            Deferred
-        """
-        transaction.prev_ids = yield self.store.prep_send_transaction(
-            transaction.transaction_id,
-            transaction.destination,
-            transaction.origin_server_ts,
-        )
-
-    @log_function
-    def delivered(self, transaction, response_code, response_dict):
-        """ Marks the given `Transaction` as having been successfully
-        delivered to the remote homeserver, and what the response was.
-
-        Returns:
-            Deferred
-        """
-        return self.store.delivered_txn(
-            transaction.transaction_id,
-            transaction.destination,
-            response_code,
-            response_dict,
-        )
diff --git a/synapse/federation/sender/__init__.py b/synapse/federation/sender/__init__.py
index 766c5a37cd..d46f4aaeb1 100644
--- a/synapse/federation/sender/__init__.py
+++ b/synapse/federation/sender/__init__.py
@@ -26,6 +26,11 @@ from synapse.federation.sender.per_destination_queue import PerDestinationQueue
 from synapse.federation.sender.transaction_manager import TransactionManager
 from synapse.federation.units import Edu
 from synapse.handlers.presence import get_interested_remotes
+from synapse.logging.context import (
+    make_deferred_yieldable,
+    preserve_fn,
+    run_in_background,
+)
 from synapse.metrics import (
     LaterGauge,
     event_processing_loop_counter,
@@ -33,7 +38,6 @@ from synapse.metrics import (
     events_processed_counter,
 )
 from synapse.metrics.background_process_metrics import run_as_background_process
-from synapse.util import logcontext
 from synapse.util.metrics import measure_func
 
 logger = logging.getLogger(__name__)
@@ -210,10 +214,10 @@ class FederationSender(object):
                 for event in events:
                     events_by_room.setdefault(event.room_id, []).append(event)
 
-                yield logcontext.make_deferred_yieldable(
+                yield make_deferred_yieldable(
                     defer.gatherResults(
                         [
-                            logcontext.run_in_background(handle_room_events, evs)
+                            run_in_background(handle_room_events, evs)
                             for evs in itervalues(events_by_room)
                         ],
                         consumeErrors=True,
@@ -360,7 +364,7 @@ class FederationSender(object):
         for queue in queues:
             queue.flush_read_receipts_for_room(room_id)
 
-    @logcontext.preserve_fn  # the caller should not yield on this
+    @preserve_fn  # the caller should not yield on this
     @defer.inlineCallbacks
     def send_presence(self, states):
         """Send the new presence states to the appropriate destinations.
diff --git a/synapse/federation/sender/transaction_manager.py b/synapse/federation/sender/transaction_manager.py
index c987bb9a0d..0460a8c4ac 100644
--- a/synapse/federation/sender/transaction_manager.py
+++ b/synapse/federation/sender/transaction_manager.py
@@ -63,8 +63,6 @@ class TransactionManager(object):
             len(edus),
         )
 
-        logger.debug("TX [%s] Persisting transaction...", destination)
-
         transaction = Transaction.create_new(
             origin_server_ts=int(self.clock.time_msec()),
             transaction_id=txn_id,
@@ -76,9 +74,6 @@ class TransactionManager(object):
 
         self._next_txn_id += 1
 
-        yield self._transaction_actions.prepare_to_send(transaction)
-
-        logger.debug("TX [%s] Persisted transaction", destination)
         logger.info(
             "TX [%s] {%s} Sending transaction [%s]," " (PDUs: %d, EDUs: %d)",
             destination,
@@ -118,10 +113,6 @@ class TransactionManager(object):
 
         logger.info("TX [%s] {%s} got %d response", destination, txn_id, code)
 
-        yield self._transaction_actions.delivered(transaction, code, response)
-
-        logger.debug("TX [%s] {%s} Marked as delivered", destination, txn_id)
-
         if code == 200:
             for e_id, r in response.get("pdus", {}).items():
                 if "error" in r:
diff --git a/synapse/federation/transport/client.py b/synapse/federation/transport/client.py
index aecd142309..1aae9ec9e7 100644
--- a/synapse/federation/transport/client.py
+++ b/synapse/federation/transport/client.py
@@ -22,7 +22,7 @@ from twisted.internet import defer
 
 from synapse.api.constants import Membership
 from synapse.api.urls import FEDERATION_V1_PREFIX, FEDERATION_V2_PREFIX
-from synapse.util.logutils import log_function
+from synapse.logging.utils import log_function
 
 logger = logging.getLogger(__name__)
 
diff --git a/synapse/federation/transport/server.py b/synapse/federation/transport/server.py
index 955f0f4308..2efdcff7ef 100644
--- a/synapse/federation/transport/server.py
+++ b/synapse/federation/transport/server.py
@@ -36,8 +36,8 @@ from synapse.http.servlet import (
     parse_json_object_from_request,
     parse_string_from_args,
 )
+from synapse.logging.context import run_in_background
 from synapse.types import ThirdPartyInstanceID, get_domain_from_id
-from synapse.util.logcontext import run_in_background
 from synapse.util.ratelimitutils import FederationRateLimiter
 from synapse.util.versionstring import get_version_string
 
diff --git a/synapse/groups/attestations.py b/synapse/groups/attestations.py
index e73757570c..f497711133 100644
--- a/synapse/groups/attestations.py
+++ b/synapse/groups/attestations.py
@@ -43,9 +43,9 @@ from signedjson.sign import sign_json
 from twisted.internet import defer
 
 from synapse.api.errors import HttpResponseException, RequestSendFailed, SynapseError
+from synapse.logging.context import run_in_background
 from synapse.metrics.background_process_metrics import run_as_background_process
 from synapse.types import get_domain_from_id
-from synapse.util.logcontext import run_in_background
 
 logger = logging.getLogger(__name__)
 
diff --git a/synapse/handlers/account_validity.py b/synapse/handlers/account_validity.py
index 0719da3ab7..1f1708ba7d 100644
--- a/synapse/handlers/account_validity.py
+++ b/synapse/handlers/account_validity.py
@@ -22,9 +22,10 @@ from email.mime.text import MIMEText
 from twisted.internet import defer
 
 from synapse.api.errors import StoreError
+from synapse.logging.context import make_deferred_yieldable
+from synapse.metrics.background_process_metrics import run_as_background_process
 from synapse.types import UserID
 from synapse.util import stringutils
-from synapse.util.logcontext import make_deferred_yieldable
 
 try:
     from synapse.push.mailer import load_jinja2_templates
@@ -67,7 +68,14 @@ class AccountValidityHandler(object):
             )
 
             # Check the renewal emails to send and send them every 30min.
-            self.clock.looping_call(self.send_renewal_emails, 30 * 60 * 1000)
+            def send_emails():
+                # run as a background process to make sure that the database transactions
+                # have a logcontext to report to
+                return run_as_background_process(
+                    "send_renewals", self.send_renewal_emails
+                )
+
+            self.clock.looping_call(send_emails, 30 * 60 * 1000)
 
     @defer.inlineCallbacks
     def send_renewal_emails(self):
diff --git a/synapse/handlers/appservice.py b/synapse/handlers/appservice.py
index 5cc89d43f6..8f089f0e33 100644
--- a/synapse/handlers/appservice.py
+++ b/synapse/handlers/appservice.py
@@ -23,13 +23,13 @@ from twisted.internet import defer
 
 import synapse
 from synapse.api.constants import EventTypes
+from synapse.logging.context import make_deferred_yieldable, run_in_background
 from synapse.metrics import (
     event_processing_loop_counter,
     event_processing_loop_room_count,
 )
 from synapse.metrics.background_process_metrics import run_as_background_process
 from synapse.util import log_failure
-from synapse.util.logcontext import make_deferred_yieldable, run_in_background
 from synapse.util.metrics import Measure
 
 logger = logging.getLogger(__name__)
diff --git a/synapse/handlers/auth.py b/synapse/handlers/auth.py
index 97b21c4093..ef5585aa99 100644
--- a/synapse/handlers/auth.py
+++ b/synapse/handlers/auth.py
@@ -36,9 +36,9 @@ from synapse.api.errors import (
     SynapseError,
 )
 from synapse.api.ratelimiting import Ratelimiter
+from synapse.logging.context import defer_to_thread
 from synapse.module_api import ModuleApi
 from synapse.types import UserID
-from synapse.util import logcontext
 from synapse.util.caches.expiringcache import ExpiringCache
 
 from ._base import BaseHandler
@@ -743,7 +743,7 @@ class AuthHandler(BaseHandler):
                     result = (result, None)
                 defer.returnValue(result)
 
-        if login_type == LoginType.PASSWORD:
+        if login_type == LoginType.PASSWORD and self.hs.config.password_localdb_enabled:
             known_login_type = True
 
             canonical_user_id = yield self._check_local_password(
@@ -987,7 +987,7 @@ class AuthHandler(BaseHandler):
                 bcrypt.gensalt(self.bcrypt_rounds),
             ).decode("ascii")
 
-        return logcontext.defer_to_thread(self.hs.get_reactor(), _do_hash)
+        return defer_to_thread(self.hs.get_reactor(), _do_hash)
 
     def validate_hash(self, password, stored_hash):
         """Validates that self.hash(password) == stored_hash.
@@ -1013,7 +1013,7 @@ class AuthHandler(BaseHandler):
             if not isinstance(stored_hash, bytes):
                 stored_hash = stored_hash.encode("ascii")
 
-            return logcontext.defer_to_thread(self.hs.get_reactor(), _do_validate_hash)
+            return defer_to_thread(self.hs.get_reactor(), _do_validate_hash)
         else:
             return defer.succeed(False)
 
diff --git a/synapse/handlers/device.py b/synapse/handlers/device.py
index f59d0479b5..99e8413092 100644
--- a/synapse/handlers/device.py
+++ b/synapse/handlers/device.py
@@ -101,9 +101,13 @@ class DeviceWorkerHandler(BaseHandler):
 
         room_ids = yield self.store.get_rooms_for_user(user_id)
 
-        # First we check if any devices have changed
-        changed = yield self.store.get_user_whose_devices_changed(
-            from_token.device_list_key
+        # First we check if any devices have changed for users that we share
+        # rooms with.
+        users_who_share_room = yield self.store.get_users_who_share_room_with_user(
+            user_id
+        )
+        changed = yield self.store.get_users_whose_devices_changed(
+            from_token.device_list_key, users_who_share_room
         )
 
         # Then work out if any users have since joined
@@ -188,10 +192,6 @@ class DeviceWorkerHandler(BaseHandler):
                         break
 
         if possibly_changed or possibly_left:
-            users_who_share_room = yield self.store.get_users_who_share_room_with_user(
-                user_id
-            )
-
             # Take the intersection of the users whose devices may have changed
             # and those that actually still share a room with the user
             possibly_joined = possibly_changed & users_who_share_room
diff --git a/synapse/handlers/e2e_keys.py b/synapse/handlers/e2e_keys.py
index 807900fe52..55b4ab3a1a 100644
--- a/synapse/handlers/e2e_keys.py
+++ b/synapse/handlers/e2e_keys.py
@@ -23,8 +23,8 @@ from canonicaljson import encode_canonical_json, json
 from twisted.internet import defer
 
 from synapse.api.errors import CodeMessageException, FederationDeniedError, SynapseError
+from synapse.logging.context import make_deferred_yieldable, run_in_background
 from synapse.types import UserID, get_domain_from_id
-from synapse.util.logcontext import make_deferred_yieldable, run_in_background
 from synapse.util.retryutils import NotRetryingDestination
 
 logger = logging.getLogger(__name__)
diff --git a/synapse/handlers/events.py b/synapse/handlers/events.py
index 5836d3c639..6a38328af3 100644
--- a/synapse/handlers/events.py
+++ b/synapse/handlers/events.py
@@ -21,8 +21,8 @@ from twisted.internet import defer
 from synapse.api.constants import EventTypes, Membership
 from synapse.api.errors import AuthError, SynapseError
 from synapse.events import EventBase
+from synapse.logging.utils import log_function
 from synapse.types import UserID
-from synapse.util.logutils import log_function
 from synapse.visibility import filter_events_for_client
 
 from ._base import BaseHandler
diff --git a/synapse/handlers/federation.py b/synapse/handlers/federation.py
index 02d397c498..57be968c67 100644
--- a/synapse/handlers/federation.py
+++ b/synapse/handlers/federation.py
@@ -45,6 +45,13 @@ from synapse.api.room_versions import KNOWN_ROOM_VERSIONS, RoomVersions
 from synapse.crypto.event_signing import compute_event_signature
 from synapse.event_auth import auth_types_for_event
 from synapse.events.validator import EventValidator
+from synapse.logging.context import (
+    make_deferred_yieldable,
+    nested_logging_context,
+    preserve_fn,
+    run_in_background,
+)
+from synapse.logging.utils import log_function
 from synapse.replication.http.federation import (
     ReplicationCleanRoomRestServlet,
     ReplicationFederationSendEventsRestServlet,
@@ -52,10 +59,9 @@ from synapse.replication.http.federation import (
 from synapse.replication.http.membership import ReplicationUserJoinedLeftRoomRestServlet
 from synapse.state import StateResolutionStore, resolve_events_with_store
 from synapse.types import UserID, get_domain_from_id
-from synapse.util import logcontext, unwrapFirstError
+from synapse.util import unwrapFirstError
 from synapse.util.async_helpers import Linearizer
 from synapse.util.distributor import user_joined_room
-from synapse.util.logutils import log_function
 from synapse.util.retryutils import NotRetryingDestination
 from synapse.visibility import filter_events_for_server
 
@@ -338,7 +344,7 @@ class FederationHandler(BaseHandler):
 
                         room_version = yield self.store.get_room_version(room_id)
 
-                        with logcontext.nested_logging_context(p):
+                        with nested_logging_context(p):
                             # note that if any of the missing prevs share missing state or
                             # auth events, the requests to fetch those events are deduped
                             # by the get_pdu_cache in federation_client.
@@ -532,7 +538,7 @@ class FederationHandler(BaseHandler):
                 event_id,
                 ev.event_id,
             )
-            with logcontext.nested_logging_context(ev.event_id):
+            with nested_logging_context(ev.event_id):
                 try:
                     yield self.on_receive_pdu(origin, ev, sent_to_us_directly=False)
                 except FederationError as e:
@@ -725,10 +731,10 @@ class FederationHandler(BaseHandler):
                     missing_auth - failed_to_fetch,
                 )
 
-                results = yield logcontext.make_deferred_yieldable(
+                results = yield make_deferred_yieldable(
                     defer.gatherResults(
                         [
-                            logcontext.run_in_background(
+                            run_in_background(
                                 self.federation_client.get_pdu,
                                 [dest],
                                 event_id,
@@ -994,10 +1000,8 @@ class FederationHandler(BaseHandler):
         event_ids = list(extremities.keys())
 
         logger.debug("calling resolve_state_groups in _maybe_backfill")
-        resolve = logcontext.preserve_fn(
-            self.state_handler.resolve_state_groups_for_events
-        )
-        states = yield logcontext.make_deferred_yieldable(
+        resolve = preserve_fn(self.state_handler.resolve_state_groups_for_events)
+        states = yield make_deferred_yieldable(
             defer.gatherResults(
                 [resolve(room_id, [e]) for e in event_ids], consumeErrors=True
             )
@@ -1171,7 +1175,7 @@ class FederationHandler(BaseHandler):
             # lots of requests for missing prev_events which we do actually
             # have. Hence we fire off the deferred, but don't wait for it.
 
-            logcontext.run_in_background(self._handle_queued_pdus, room_queue)
+            run_in_background(self._handle_queued_pdus, room_queue)
 
         defer.returnValue(True)
 
@@ -1191,7 +1195,7 @@ class FederationHandler(BaseHandler):
                     p.event_id,
                     p.room_id,
                 )
-                with logcontext.nested_logging_context(p.event_id):
+                with nested_logging_context(p.event_id):
                     yield self.on_receive_pdu(origin, p, sent_to_us_directly=True)
             except Exception as e:
                 logger.warn(
@@ -1610,7 +1614,7 @@ class FederationHandler(BaseHandler):
             success = True
         finally:
             if not success:
-                logcontext.run_in_background(
+                run_in_background(
                     self.store.remove_push_actions_from_staging, event.event_id
                 )
 
@@ -1629,7 +1633,7 @@ class FederationHandler(BaseHandler):
         @defer.inlineCallbacks
         def prep(ev_info):
             event = ev_info["event"]
-            with logcontext.nested_logging_context(suffix=event.event_id):
+            with nested_logging_context(suffix=event.event_id):
                 res = yield self._prep_event(
                     origin,
                     event,
@@ -1639,12 +1643,9 @@ class FederationHandler(BaseHandler):
                 )
             defer.returnValue(res)
 
-        contexts = yield logcontext.make_deferred_yieldable(
+        contexts = yield make_deferred_yieldable(
             defer.gatherResults(
-                [
-                    logcontext.run_in_background(prep, ev_info)
-                    for ev_info in event_infos
-                ],
+                [run_in_background(prep, ev_info) for ev_info in event_infos],
                 consumeErrors=True,
             )
         )
@@ -2106,10 +2107,10 @@ class FederationHandler(BaseHandler):
 
         room_version = yield self.store.get_room_version(event.room_id)
 
-        different_events = yield logcontext.make_deferred_yieldable(
+        different_events = yield make_deferred_yieldable(
             defer.gatherResults(
                 [
-                    logcontext.run_in_background(
+                    run_in_background(
                         self.store.get_event, d, allow_none=True, allow_rejected=False
                     )
                     for d in different_auth
diff --git a/synapse/handlers/initial_sync.py b/synapse/handlers/initial_sync.py
index a1fe9d116f..54c966c8a6 100644
--- a/synapse/handlers/initial_sync.py
+++ b/synapse/handlers/initial_sync.py
@@ -21,12 +21,12 @@ from synapse.api.constants import EventTypes, Membership
 from synapse.api.errors import AuthError, Codes, SynapseError
 from synapse.events.validator import EventValidator
 from synapse.handlers.presence import format_user_presence_state
+from synapse.logging.context import make_deferred_yieldable, run_in_background
 from synapse.streams.config import PaginationConfig
 from synapse.types import StreamToken, UserID
 from synapse.util import unwrapFirstError
 from synapse.util.async_helpers import concurrently_execute
 from synapse.util.caches.snapshot_cache import SnapshotCache
-from synapse.util.logcontext import make_deferred_yieldable, run_in_background
 from synapse.visibility import filter_events_for_client
 
 from ._base import BaseHandler
diff --git a/synapse/handlers/message.py b/synapse/handlers/message.py
index 683da6bf32..eaeda7a5cb 100644
--- a/synapse/handlers/message.py
+++ b/synapse/handlers/message.py
@@ -34,13 +34,13 @@ from synapse.api.errors import (
 from synapse.api.room_versions import RoomVersions
 from synapse.api.urls import ConsentURIBuilder
 from synapse.events.validator import EventValidator
+from synapse.logging.context import run_in_background
 from synapse.metrics.background_process_metrics import run_as_background_process
 from synapse.replication.http.send_event import ReplicationSendEventRestServlet
 from synapse.storage.state import StateFilter
 from synapse.types import RoomAlias, UserID, create_requester
 from synapse.util.async_helpers import Linearizer
 from synapse.util.frozenutils import frozendict_json_encoder
-from synapse.util.logcontext import run_in_background
 from synapse.util.metrics import measure_func
 from synapse.visibility import filter_events_for_client
 
diff --git a/synapse/handlers/pagination.py b/synapse/handlers/pagination.py
index 76ee97ddd3..20bcfed334 100644
--- a/synapse/handlers/pagination.py
+++ b/synapse/handlers/pagination.py
@@ -20,10 +20,10 @@ from twisted.python.failure import Failure
 
 from synapse.api.constants import EventTypes, Membership
 from synapse.api.errors import SynapseError
+from synapse.logging.context import run_in_background
 from synapse.storage.state import StateFilter
 from synapse.types import RoomStreamToken
 from synapse.util.async_helpers import ReadWriteLock
-from synapse.util.logcontext import run_in_background
 from synapse.util.stringutils import random_string
 from synapse.visibility import filter_events_for_client
 
diff --git a/synapse/handlers/presence.py b/synapse/handlers/presence.py
index 5204073a38..6f3537e435 100644
--- a/synapse/handlers/presence.py
+++ b/synapse/handlers/presence.py
@@ -34,14 +34,14 @@ from twisted.internet import defer
 import synapse.metrics
 from synapse.api.constants import EventTypes, Membership, PresenceState
 from synapse.api.errors import SynapseError
+from synapse.logging.context import run_in_background
+from synapse.logging.utils import log_function
 from synapse.metrics import LaterGauge
 from synapse.metrics.background_process_metrics import run_as_background_process
 from synapse.storage.presence import UserPresenceState
 from synapse.types import UserID, get_domain_from_id
 from synapse.util.async_helpers import Linearizer
 from synapse.util.caches.descriptors import cachedInlineCallbacks
-from synapse.util.logcontext import run_in_background
-from synapse.util.logutils import log_function
 from synapse.util.metrics import Measure
 from synapse.util.wheel_timer import WheelTimer
 
@@ -1017,11 +1017,28 @@ class PresenceEventSource(object):
             if from_key is not None:
                 from_key = int(from_key)
 
+            max_token = self.store.get_current_presence_token()
+            if from_key == max_token:
+                # This is necessary as due to the way stream ID generators work
+                # we may get updates that have a stream ID greater than the max
+                # token (e.g. max_token is N but stream generator may return
+                # results for N+2, due to N+1 not having finished being
+                # persisted yet).
+                #
+                # This is usually fine, as it just means that we may send down
+                # some presence updates multiple times. However, we need to be
+                # careful that the sync stream either actually does make some
+                # progress or doesn't return, otherwise clients will end up
+                # tight looping calling /sync due to it immediately returning
+                # the same token repeatedly.
+                #
+                # Hence this guard where we just return nothing so that the sync
+                # doesn't return. C.f. #5503.
+                defer.returnValue(([], max_token))
+
             presence = self.get_presence_handler()
             stream_change_cache = self.store.presence_stream_cache
 
-            max_token = self.store.get_current_presence_token()
-
             users_interested_in = yield self._get_interested_in(user, explicit_room_id)
 
             user_ids_changed = set()
diff --git a/synapse/handlers/register.py b/synapse/handlers/register.py
index e487b90c08..853020180b 100644
--- a/synapse/handlers/register.py
+++ b/synapse/handlers/register.py
@@ -506,87 +506,6 @@ class RegistrationHandler(BaseHandler):
         defer.returnValue(data)
 
     @defer.inlineCallbacks
-    def get_or_create_user(self, requester, localpart, displayname, password_hash=None):
-        """Creates a new user if the user does not exist,
-        else revokes all previous access tokens and generates a new one.
-
-        Args:
-            localpart : The local part of the user ID to register. If None,
-              one will be randomly generated.
-        Returns:
-            A tuple of (user_id, access_token).
-        Raises:
-            RegistrationError if there was a problem registering.
-
-        NB this is only used in tests. TODO: move it to the test package!
-        """
-        if localpart is None:
-            raise SynapseError(400, "Request must include user id")
-        yield self.auth.check_auth_blocking()
-        need_register = True
-
-        try:
-            yield self.check_username(localpart)
-        except SynapseError as e:
-            if e.errcode == Codes.USER_IN_USE:
-                need_register = False
-            else:
-                raise
-
-        user = UserID(localpart, self.hs.hostname)
-        user_id = user.to_string()
-        token = self.macaroon_gen.generate_access_token(user_id)
-
-        if need_register:
-            yield self.register_with_store(
-                user_id=user_id,
-                token=token,
-                password_hash=password_hash,
-                create_profile_with_displayname=user.localpart,
-            )
-        else:
-            yield self._auth_handler.delete_access_tokens_for_user(user_id)
-            yield self.store.add_access_token_to_user(user_id=user_id, token=token)
-
-        if displayname is not None:
-            logger.info("setting user display name: %s -> %s", user_id, displayname)
-            yield self.profile_handler.set_displayname(
-                user, requester, displayname, by_admin=True
-            )
-
-        defer.returnValue((user_id, token))
-
-    @defer.inlineCallbacks
-    def get_or_register_3pid_guest(self, medium, address, inviter_user_id):
-        """Get a guest access token for a 3PID, creating a guest account if
-        one doesn't already exist.
-
-        Args:
-            medium (str)
-            address (str)
-            inviter_user_id (str): The user ID who is trying to invite the
-                3PID
-
-        Returns:
-            Deferred[(str, str)]: A 2-tuple of `(user_id, access_token)` of the
-            3PID guest account.
-        """
-        access_token = yield self.store.get_3pid_guest_access_token(medium, address)
-        if access_token:
-            user_info = yield self.auth.get_user_by_access_token(access_token)
-
-            defer.returnValue((user_info["user"].to_string(), access_token))
-
-        user_id, access_token = yield self.register(
-            generate_token=True, make_guest=True
-        )
-        access_token = yield self.store.save_or_get_3pid_guest_access_token(
-            medium, address, access_token, inviter_user_id
-        )
-
-        defer.returnValue((user_id, access_token))
-
-    @defer.inlineCallbacks
     def _join_user_to_room(self, requester, room_identifier):
         room_id = None
         room_member_handler = self.hs.get_room_member_handler()
diff --git a/synapse/handlers/room_member.py b/synapse/handlers/room_member.py
index c3420b4b22..e0196ef83e 100644
--- a/synapse/handlers/room_member.py
+++ b/synapse/handlers/room_member.py
@@ -119,24 +119,6 @@ class RoomMemberHandler(object):
         raise NotImplementedError()
 
     @abc.abstractmethod
-    def get_or_register_3pid_guest(self, requester, medium, address, inviter_user_id):
-        """Get a guest access token for a 3PID, creating a guest account if
-        one doesn't already exist.
-
-        Args:
-            requester (Requester)
-            medium (str)
-            address (str)
-            inviter_user_id (str): The user ID who is trying to invite the
-                3PID
-
-        Returns:
-            Deferred[(str, str)]: A 2-tuple of `(user_id, access_token)` of the
-            3PID guest account.
-        """
-        raise NotImplementedError()
-
-    @abc.abstractmethod
     def _user_joined_room(self, target, room_id):
         """Notifies distributor on master process that the user has joined the
         room.
@@ -823,6 +805,7 @@ class RoomMemberHandler(object):
                 "sender": user.to_string(),
                 "state_key": token,
             },
+            ratelimit=False,
             txn_id=txn_id,
         )
 
@@ -889,21 +872,6 @@ class RoomMemberHandler(object):
             "sender_avatar_url": inviter_avatar_url,
         }
 
-        if self.config.invite_3pid_guest:
-            guest_user_id, guest_access_token = yield self.get_or_register_3pid_guest(
-                requester=requester,
-                medium=medium,
-                address=address,
-                inviter_user_id=inviter_user_id,
-            )
-
-            invite_config.update(
-                {
-                    "guest_access_token": guest_access_token,
-                    "guest_user_id": guest_user_id,
-                }
-            )
-
         try:
             data = yield self.simple_http_client.post_json_get_json(
                 is_url, invite_config
@@ -1023,12 +991,6 @@ class RoomMemberMasterHandler(RoomMemberHandler):
             yield self.store.locally_reject_invite(target.to_string(), room_id)
             defer.returnValue({})
 
-    def get_or_register_3pid_guest(self, requester, medium, address, inviter_user_id):
-        """Implements RoomMemberHandler.get_or_register_3pid_guest
-        """
-        rg = self.registration_handler
-        return rg.get_or_register_3pid_guest(medium, address, inviter_user_id)
-
     def _user_joined_room(self, target, room_id):
         """Implements RoomMemberHandler._user_joined_room
         """
diff --git a/synapse/handlers/room_member_worker.py b/synapse/handlers/room_member_worker.py
index da501f38c0..fc873a3ba6 100644
--- a/synapse/handlers/room_member_worker.py
+++ b/synapse/handlers/room_member_worker.py
@@ -20,7 +20,6 @@ from twisted.internet import defer
 from synapse.api.errors import SynapseError
 from synapse.handlers.room_member import RoomMemberHandler
 from synapse.replication.http.membership import (
-    ReplicationRegister3PIDGuestRestServlet as Repl3PID,
     ReplicationRemoteJoinRestServlet as ReplRemoteJoin,
     ReplicationRemoteRejectInviteRestServlet as ReplRejectInvite,
     ReplicationUserJoinedLeftRoomRestServlet as ReplJoinedLeft,
@@ -33,7 +32,6 @@ class RoomMemberWorkerHandler(RoomMemberHandler):
     def __init__(self, hs):
         super(RoomMemberWorkerHandler, self).__init__(hs)
 
-        self._get_register_3pid_client = Repl3PID.make_client(hs)
         self._remote_join_client = ReplRemoteJoin.make_client(hs)
         self._remote_reject_client = ReplRejectInvite.make_client(hs)
         self._notify_change_client = ReplJoinedLeft.make_client(hs)
@@ -80,13 +78,3 @@ class RoomMemberWorkerHandler(RoomMemberHandler):
         return self._notify_change_client(
             user_id=target.to_string(), room_id=room_id, change="left"
         )
-
-    def get_or_register_3pid_guest(self, requester, medium, address, inviter_user_id):
-        """Implements RoomMemberHandler.get_or_register_3pid_guest
-        """
-        return self._get_register_3pid_client(
-            requester=requester,
-            medium=medium,
-            address=address,
-            inviter_user_id=inviter_user_id,
-        )
diff --git a/synapse/handlers/saml_handler.py b/synapse/handlers/saml_handler.py
new file mode 100644
index 0000000000..a1ce6929cf
--- /dev/null
+++ b/synapse/handlers/saml_handler.py
@@ -0,0 +1,123 @@
+# -*- coding: utf-8 -*-
+# Copyright 2019 The Matrix.org Foundation C.I.C.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+import logging
+
+import attr
+import saml2
+from saml2.client import Saml2Client
+
+from synapse.api.errors import SynapseError
+from synapse.http.servlet import parse_string
+from synapse.rest.client.v1.login import SSOAuthHandler
+
+logger = logging.getLogger(__name__)
+
+
+class SamlHandler:
+    def __init__(self, hs):
+        self._saml_client = Saml2Client(hs.config.saml2_sp_config)
+        self._sso_auth_handler = SSOAuthHandler(hs)
+
+        # a map from saml session id to Saml2SessionData object
+        self._outstanding_requests_dict = {}
+
+        self._clock = hs.get_clock()
+        self._saml2_session_lifetime = hs.config.saml2_session_lifetime
+
+    def handle_redirect_request(self, client_redirect_url):
+        """Handle an incoming request to /login/sso/redirect
+
+        Args:
+            client_redirect_url (bytes): the URL that we should redirect the
+                client to when everything is done
+
+        Returns:
+            bytes: URL to redirect to
+        """
+        reqid, info = self._saml_client.prepare_for_authenticate(
+            relay_state=client_redirect_url
+        )
+
+        now = self._clock.time_msec()
+        self._outstanding_requests_dict[reqid] = Saml2SessionData(creation_time=now)
+
+        for key, value in info["headers"]:
+            if key == "Location":
+                return value
+
+        # this shouldn't happen!
+        raise Exception("prepare_for_authenticate didn't return a Location header")
+
+    def handle_saml_response(self, request):
+        """Handle an incoming request to /_matrix/saml2/authn_response
+
+        Args:
+            request (SynapseRequest): the incoming request from the browser. We'll
+                respond to it with a redirect.
+
+        Returns:
+            Deferred[none]: Completes once we have handled the request.
+        """
+        resp_bytes = parse_string(request, "SAMLResponse", required=True)
+        relay_state = parse_string(request, "RelayState", required=True)
+
+        # expire outstanding sessions before parse_authn_request_response checks
+        # the dict.
+        self.expire_sessions()
+
+        try:
+            saml2_auth = self._saml_client.parse_authn_request_response(
+                resp_bytes,
+                saml2.BINDING_HTTP_POST,
+                outstanding=self._outstanding_requests_dict,
+            )
+        except Exception as e:
+            logger.warning("Exception parsing SAML2 response: %s", e)
+            raise SynapseError(400, "Unable to parse SAML2 response: %s" % (e,))
+
+        if saml2_auth.not_signed:
+            logger.warning("SAML2 response was not signed")
+            raise SynapseError(400, "SAML2 response was not signed")
+
+        if "uid" not in saml2_auth.ava:
+            logger.warning("SAML2 response lacks a 'uid' attestation")
+            raise SynapseError(400, "uid not in SAML2 response")
+
+        self._outstanding_requests_dict.pop(saml2_auth.in_response_to, None)
+
+        username = saml2_auth.ava["uid"][0]
+        displayName = saml2_auth.ava.get("displayName", [None])[0]
+
+        return self._sso_auth_handler.on_successful_auth(
+            username, request, relay_state, user_display_name=displayName
+        )
+
+    def expire_sessions(self):
+        expire_before = self._clock.time_msec() - self._saml2_session_lifetime
+        to_expire = set()
+        for reqid, data in self._outstanding_requests_dict.items():
+            if data.creation_time < expire_before:
+                to_expire.add(reqid)
+        for reqid in to_expire:
+            logger.debug("Expiring session id %s", reqid)
+            del self._outstanding_requests_dict[reqid]
+
+
+@attr.s
+class Saml2SessionData:
+    """Data we track about SAML2 sessions"""
+
+    # time the session was created, in milliseconds
+    creation_time = attr.ib()
diff --git a/synapse/handlers/set_password.py b/synapse/handlers/set_password.py
index 5a0995d4fe..d90c9e0108 100644
--- a/synapse/handlers/set_password.py
+++ b/synapse/handlers/set_password.py
@@ -33,6 +33,9 @@ class SetPasswordHandler(BaseHandler):
 
     @defer.inlineCallbacks
     def set_password(self, user_id, newpassword, requester=None):
+        if not self.hs.config.password_localdb_enabled:
+            raise SynapseError(403, "Password change disabled", errcode=Codes.FORBIDDEN)
+
         password_hash = yield self._auth_handler.hash(newpassword)
 
         except_device_id = requester.device_id if requester else None
diff --git a/synapse/handlers/sync.py b/synapse/handlers/sync.py
index c5188a1f8e..cd1ac0a27a 100644
--- a/synapse/handlers/sync.py
+++ b/synapse/handlers/sync.py
@@ -25,6 +25,7 @@ from prometheus_client import Counter
 from twisted.internet import defer
 
 from synapse.api.constants import EventTypes, Membership
+from synapse.logging.context import LoggingContext
 from synapse.push.clientformat import format_push_rules_for_user
 from synapse.storage.roommember import MemberSummary
 from synapse.storage.state import StateFilter
@@ -33,7 +34,6 @@ from synapse.util.async_helpers import concurrently_execute
 from synapse.util.caches.expiringcache import ExpiringCache
 from synapse.util.caches.lrucache import LruCache
 from synapse.util.caches.response_cache import ResponseCache
-from synapse.util.logcontext import LoggingContext
 from synapse.util.metrics import Measure, measure_func
 from synapse.visibility import filter_events_for_client
 
@@ -1058,40 +1058,74 @@ class SyncHandler(object):
         newly_left_rooms,
         newly_left_users,
     ):
+        """Generate the DeviceLists section of sync
+
+        Args:
+            sync_result_builder (SyncResultBuilder)
+            newly_joined_rooms (set[str]): Set of rooms user has joined since
+                previous sync
+            newly_joined_or_invited_users (set[str]): Set of users that have
+                joined or been invited to a room since previous sync.
+            newly_left_rooms (set[str]): Set of rooms user has left since
+                previous sync
+            newly_left_users (set[str]): Set of users that have left a room
+                we're in since previous sync
+
+        Returns:
+            Deferred[DeviceLists]
+        """
+
         user_id = sync_result_builder.sync_config.user.to_string()
         since_token = sync_result_builder.since_token
 
+        # We're going to mutate these fields, so lets copy them rather than
+        # assume they won't get used later.
+        newly_joined_or_invited_users = set(newly_joined_or_invited_users)
+        newly_left_users = set(newly_left_users)
+
         if since_token and since_token.device_list_key:
-            changed = yield self.store.get_user_whose_devices_changed(
-                since_token.device_list_key
+            # We want to figure out what user IDs the client should refetch
+            # device keys for, and which users we aren't going to track changes
+            # for anymore.
+            #
+            # For the first step we check:
+            #   a. if any users we share a room with have updated their devices,
+            #      and
+            #   b. we also check if we've joined any new rooms, or if a user has
+            #      joined a room we're in.
+            #
+            # For the second step we just find any users we no longer share a
+            # room with by looking at all users that have left a room plus users
+            # that were in a room we've left.
+
+            users_who_share_room = yield self.store.get_users_who_share_room_with_user(
+                user_id
+            )
+
+            # Step 1a, check for changes in devices of users we share a room with
+            users_that_have_changed = yield self.store.get_users_whose_devices_changed(
+                since_token.device_list_key, users_who_share_room
             )
 
-            # TODO: Be more clever than this, i.e. remove users who we already
-            # share a room with?
+            # Step 1b, check for newly joined rooms
             for room_id in newly_joined_rooms:
                 joined_users = yield self.state.get_current_users_in_room(room_id)
                 newly_joined_or_invited_users.update(joined_users)
 
-            for room_id in newly_left_rooms:
-                left_users = yield self.state.get_current_users_in_room(room_id)
-                newly_left_users.update(left_users)
-
             # TODO: Check that these users are actually new, i.e. either they
             # weren't in the previous sync *or* they left and rejoined.
-            changed.update(newly_joined_or_invited_users)
+            users_that_have_changed.update(newly_joined_or_invited_users)
 
-            if not changed and not newly_left_users:
-                defer.returnValue(DeviceLists(changed=[], left=newly_left_users))
+            # Now find users that we no longer track
+            for room_id in newly_left_rooms:
+                left_users = yield self.state.get_current_users_in_room(room_id)
+                newly_left_users.update(left_users)
 
-            users_who_share_room = yield self.store.get_users_who_share_room_with_user(
-                user_id
-            )
+            # Remove any users that we still share a room with.
+            newly_left_users -= users_who_share_room
 
             defer.returnValue(
-                DeviceLists(
-                    changed=users_who_share_room & changed,
-                    left=set(newly_left_users) - users_who_share_room,
-                )
+                DeviceLists(changed=users_that_have_changed, left=newly_left_users)
             )
         else:
             defer.returnValue(DeviceLists(changed=[], left=[]))
diff --git a/synapse/handlers/typing.py b/synapse/handlers/typing.py
index f8062c8671..c3e0c8fc7e 100644
--- a/synapse/handlers/typing.py
+++ b/synapse/handlers/typing.py
@@ -19,9 +19,9 @@ from collections import namedtuple
 from twisted.internet import defer
 
 from synapse.api.errors import AuthError, SynapseError
+from synapse.logging.context import run_in_background
 from synapse.types import UserID, get_domain_from_id
 from synapse.util.caches.stream_change_cache import StreamChangeCache
-from synapse.util.logcontext import run_in_background
 from synapse.util.metrics import Measure
 from synapse.util.wheel_timer import WheelTimer
 
diff --git a/synapse/http/client.py b/synapse/http/client.py
index 9bc7035c8d..45d5010952 100644
--- a/synapse/http/client.py
+++ b/synapse/http/client.py
@@ -45,9 +45,9 @@ from synapse.http import (
     cancelled_to_request_timed_out_error,
     redact_uri,
 )
+from synapse.logging.context import make_deferred_yieldable
 from synapse.util.async_helpers import timeout_deferred
 from synapse.util.caches import CACHE_SIZE_FACTOR
-from synapse.util.logcontext import make_deferred_yieldable
 
 logger = logging.getLogger(__name__)
 
diff --git a/synapse/http/federation/matrix_federation_agent.py b/synapse/http/federation/matrix_federation_agent.py
index 414cde0777..054c321a20 100644
--- a/synapse/http/federation/matrix_federation_agent.py
+++ b/synapse/http/federation/matrix_federation_agent.py
@@ -30,9 +30,9 @@ from twisted.web.http_headers import Headers
 from twisted.web.iweb import IAgent
 
 from synapse.http.federation.srv_resolver import SrvResolver, pick_server_from_list
+from synapse.logging.context import make_deferred_yieldable
 from synapse.util import Clock
 from synapse.util.caches.ttlcache import TTLCache
-from synapse.util.logcontext import make_deferred_yieldable
 from synapse.util.metrics import Measure
 
 # period to cache .well-known results for by default
diff --git a/synapse/http/federation/srv_resolver.py b/synapse/http/federation/srv_resolver.py
index 1f22f78a75..ecc88f9b96 100644
--- a/synapse/http/federation/srv_resolver.py
+++ b/synapse/http/federation/srv_resolver.py
@@ -25,7 +25,7 @@ from twisted.internet.error import ConnectError
 from twisted.names import client, dns
 from twisted.names.error import DNSNameError, DomainError
 
-from synapse.util.logcontext import make_deferred_yieldable
+from synapse.logging.context import make_deferred_yieldable
 
 logger = logging.getLogger(__name__)
 
diff --git a/synapse/http/matrixfederationclient.py b/synapse/http/matrixfederationclient.py
index 5ef8bb60a3..dee3710f68 100644
--- a/synapse/http/matrixfederationclient.py
+++ b/synapse/http/matrixfederationclient.py
@@ -48,8 +48,8 @@ from synapse.api.errors import (
 from synapse.http import QuieterFileBodyProducer
 from synapse.http.client import BlacklistingAgentWrapper, IPBlacklistingResolver
 from synapse.http.federation.matrix_federation_agent import MatrixFederationAgent
+from synapse.logging.context import make_deferred_yieldable
 from synapse.util.async_helpers import timeout_deferred
-from synapse.util.logcontext import make_deferred_yieldable
 from synapse.util.metrics import Measure
 
 logger = logging.getLogger(__name__)
diff --git a/synapse/http/request_metrics.py b/synapse/http/request_metrics.py
index 62045a918b..46af27c8f6 100644
--- a/synapse/http/request_metrics.py
+++ b/synapse/http/request_metrics.py
@@ -19,8 +19,8 @@ import threading
 
 from prometheus_client.core import Counter, Histogram
 
+from synapse.logging.context import LoggingContext
 from synapse.metrics import LaterGauge
-from synapse.util.logcontext import LoggingContext
 
 logger = logging.getLogger(__name__)
 
diff --git a/synapse/http/server.py b/synapse/http/server.py
index 6fd13e87d1..72a3d67eb6 100644
--- a/synapse/http/server.py
+++ b/synapse/http/server.py
@@ -16,10 +16,11 @@
 
 import cgi
 import collections
+import http.client
 import logging
-
-from six import PY3
-from six.moves import http_client, urllib
+import types
+import urllib
+from io import BytesIO
 
 from canonicaljson import encode_canonical_json, encode_pretty_printed_json, json
 
@@ -38,13 +39,8 @@ from synapse.api.errors import (
     SynapseError,
     UnrecognizedRequestError,
 )
+from synapse.logging.context import preserve_fn
 from synapse.util.caches import intern_dict
-from synapse.util.logcontext import preserve_fn
-
-if PY3:
-    from io import BytesIO
-else:
-    from cStringIO import StringIO as BytesIO
 
 logger = logging.getLogger(__name__)
 
@@ -69,16 +65,15 @@ def wrap_json_request_handler(h):
     The handler method must have a signature of "handle_foo(self, request)",
     where "request" must be a SynapseRequest.
 
-    The handler must return a deferred. If the deferred succeeds we assume that
-    a response has been sent. If the deferred fails with a SynapseError we use
+    The handler must return a deferred or a coroutine. If the deferred succeeds
+    we assume that a response has been sent. If the deferred fails with a SynapseError we use
     it to send a JSON response with the appropriate HTTP reponse code. If the
     deferred fails with any other type of error we send a 500 reponse.
     """
 
-    @defer.inlineCallbacks
-    def wrapped_request_handler(self, request):
+    async def wrapped_request_handler(self, request):
         try:
-            yield h(self, request)
+            await h(self, request)
         except SynapseError as e:
             code = e.code
             logger.info("%s SynapseError: %s - %s", request, code, e.msg)
@@ -142,10 +137,12 @@ def wrap_html_request_handler(h):
     where "request" must be a SynapseRequest.
     """
 
-    def wrapped_request_handler(self, request):
-        d = defer.maybeDeferred(h, self, request)
-        d.addErrback(_return_html_error, request)
-        return d
+    async def wrapped_request_handler(self, request):
+        try:
+            return await h(self, request)
+        except Exception:
+            f = failure.Failure()
+            return _return_html_error(f, request)
 
     return wrap_async_request_handler(wrapped_request_handler)
 
@@ -171,7 +168,7 @@ def _return_html_error(f, request):
                 exc_info=(f.type, f.value, f.getTracebackObject()),
             )
     else:
-        code = http_client.INTERNAL_SERVER_ERROR
+        code = http.client.INTERNAL_SERVER_ERROR
         msg = "Internal server error"
 
         logger.error(
@@ -201,10 +198,9 @@ def wrap_async_request_handler(h):
     logged until the deferred completes.
     """
 
-    @defer.inlineCallbacks
-    def wrapped_async_request_handler(self, request):
+    async def wrapped_async_request_handler(self, request):
         with request.processing():
-            yield h(self, request)
+            await h(self, request)
 
     # we need to preserve_fn here, because the synchronous render method won't yield for
     # us (obviously)
@@ -270,12 +266,11 @@ class JsonResource(HttpServer, resource.Resource):
     def render(self, request):
         """ This gets called by twisted every time someone sends us a request.
         """
-        self._async_render(request)
+        defer.ensureDeferred(self._async_render(request))
         return NOT_DONE_YET
 
     @wrap_json_request_handler
-    @defer.inlineCallbacks
-    def _async_render(self, request):
+    async def _async_render(self, request):
         """ This gets called from render() every time someone sends us a request.
             This checks if anyone has registered a callback for that method and
             path.
@@ -292,26 +287,19 @@ class JsonResource(HttpServer, resource.Resource):
         # Now trigger the callback. If it returns a response, we send it
         # here. If it throws an exception, that is handled by the wrapper
         # installed by @request_handler.
-
-        def _unquote(s):
-            if PY3:
-                # On Python 3, unquote is unicode -> unicode
-                return urllib.parse.unquote(s)
-            else:
-                # On Python 2, unquote is bytes -> bytes We need to encode the
-                # URL again (as it was decoded by _get_handler_for request), as
-                # ASCII because it's a URL, and then decode it to get the UTF-8
-                # characters that were quoted.
-                return urllib.parse.unquote(s.encode("ascii")).decode("utf8")
-
         kwargs = intern_dict(
             {
-                name: _unquote(value) if value else value
+                name: urllib.parse.unquote(value) if value else value
                 for name, value in group_dict.items()
             }
         )
 
-        callback_return = yield callback(request, **kwargs)
+        callback_return = callback(request, **kwargs)
+
+        # Is it synchronous? We'll allow this for now.
+        if isinstance(callback_return, (defer.Deferred, types.CoroutineType)):
+            callback_return = await callback_return
+
         if callback_return is not None:
             code, response = callback_return
             self._send_response(request, code, response)
@@ -360,6 +348,29 @@ class JsonResource(HttpServer, resource.Resource):
         )
 
 
+class DirectServeResource(resource.Resource):
+    def render(self, request):
+        """
+        Render the request, using an asynchronous render handler if it exists.
+        """
+        async_render_callback_name = "_async_render_" + request.method.decode("ascii")
+
+        # Try and get the async renderer
+        callback = getattr(self, async_render_callback_name, None)
+
+        # No async renderer for this request method.
+        if not callback:
+            return super().render(request)
+
+        resp = callback(request)
+
+        # If it's a coroutine, turn it into a Deferred
+        if isinstance(resp, types.CoroutineType):
+            defer.ensureDeferred(resp)
+
+        return NOT_DONE_YET
+
+
 def _options_handler(request):
     """Request handler for OPTIONS requests
 
diff --git a/synapse/http/site.py b/synapse/http/site.py
index 93f679ea48..df5274c177 100644
--- a/synapse/http/site.py
+++ b/synapse/http/site.py
@@ -19,7 +19,7 @@ from twisted.web.server import Request, Site
 
 from synapse.http import redact_uri
 from synapse.http.request_metrics import RequestMetrics, requests_counter
-from synapse.util.logcontext import LoggingContext, PreserveLoggingContext
+from synapse.logging.context import LoggingContext, PreserveLoggingContext
 
 logger = logging.getLogger(__name__)
 
diff --git a/synapse/logging/__init__.py b/synapse/logging/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/synapse/logging/__init__.py
diff --git a/synapse/logging/context.py b/synapse/logging/context.py
new file mode 100644
index 0000000000..30dfa1d6b2
--- /dev/null
+++ b/synapse/logging/context.py
@@ -0,0 +1,693 @@
+# Copyright 2014-2016 OpenMarket Ltd
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+""" Thread-local-alike tracking of log contexts within synapse
+
+This module provides objects and utilities for tracking contexts through
+synapse code, so that log lines can include a request identifier, and so that
+CPU and database activity can be accounted for against the request that caused
+them.
+
+See doc/log_contexts.rst for details on how this works.
+"""
+
+import logging
+import threading
+import types
+
+from twisted.internet import defer, threads
+
+logger = logging.getLogger(__name__)
+
+try:
+    import resource
+
+    # Python doesn't ship with a definition of RUSAGE_THREAD but it's defined
+    # to be 1 on linux so we hard code it.
+    RUSAGE_THREAD = 1
+
+    # If the system doesn't support RUSAGE_THREAD then this should throw an
+    # exception.
+    resource.getrusage(RUSAGE_THREAD)
+
+    def get_thread_resource_usage():
+        return resource.getrusage(RUSAGE_THREAD)
+
+
+except Exception:
+    # If the system doesn't support resource.getrusage(RUSAGE_THREAD) then we
+    # won't track resource usage by returning None.
+    def get_thread_resource_usage():
+        return None
+
+
+# get an id for the current thread.
+#
+# threading.get_ident doesn't actually return an OS-level tid, and annoyingly,
+# on Linux it actually returns the same value either side of a fork() call. However
+# we only fork in one place, so it's not worth the hoop-jumping to get a real tid.
+#
+get_thread_id = threading.get_ident
+
+
+class ContextResourceUsage(object):
+    """Object for tracking the resources used by a log context
+
+    Attributes:
+        ru_utime (float): user CPU time (in seconds)
+        ru_stime (float): system CPU time (in seconds)
+        db_txn_count (int): number of database transactions done
+        db_sched_duration_sec (float): amount of time spent waiting for a
+            database connection
+        db_txn_duration_sec (float): amount of time spent doing database
+            transactions (excluding scheduling time)
+        evt_db_fetch_count (int): number of events requested from the database
+    """
+
+    __slots__ = [
+        "ru_stime",
+        "ru_utime",
+        "db_txn_count",
+        "db_txn_duration_sec",
+        "db_sched_duration_sec",
+        "evt_db_fetch_count",
+    ]
+
+    def __init__(self, copy_from=None):
+        """Create a new ContextResourceUsage
+
+        Args:
+            copy_from (ContextResourceUsage|None): if not None, an object to
+                copy stats from
+        """
+        if copy_from is None:
+            self.reset()
+        else:
+            self.ru_utime = copy_from.ru_utime
+            self.ru_stime = copy_from.ru_stime
+            self.db_txn_count = copy_from.db_txn_count
+
+            self.db_txn_duration_sec = copy_from.db_txn_duration_sec
+            self.db_sched_duration_sec = copy_from.db_sched_duration_sec
+            self.evt_db_fetch_count = copy_from.evt_db_fetch_count
+
+    def copy(self):
+        return ContextResourceUsage(copy_from=self)
+
+    def reset(self):
+        self.ru_stime = 0.0
+        self.ru_utime = 0.0
+        self.db_txn_count = 0
+
+        self.db_txn_duration_sec = 0
+        self.db_sched_duration_sec = 0
+        self.evt_db_fetch_count = 0
+
+    def __repr__(self):
+        return (
+            "<ContextResourceUsage ru_stime='%r', ru_utime='%r', "
+            "db_txn_count='%r', db_txn_duration_sec='%r', "
+            "db_sched_duration_sec='%r', evt_db_fetch_count='%r'>"
+        ) % (
+            self.ru_stime,
+            self.ru_utime,
+            self.db_txn_count,
+            self.db_txn_duration_sec,
+            self.db_sched_duration_sec,
+            self.evt_db_fetch_count,
+        )
+
+    def __iadd__(self, other):
+        """Add another ContextResourceUsage's stats to this one's.
+
+        Args:
+            other (ContextResourceUsage): the other resource usage object
+        """
+        self.ru_utime += other.ru_utime
+        self.ru_stime += other.ru_stime
+        self.db_txn_count += other.db_txn_count
+        self.db_txn_duration_sec += other.db_txn_duration_sec
+        self.db_sched_duration_sec += other.db_sched_duration_sec
+        self.evt_db_fetch_count += other.evt_db_fetch_count
+        return self
+
+    def __isub__(self, other):
+        self.ru_utime -= other.ru_utime
+        self.ru_stime -= other.ru_stime
+        self.db_txn_count -= other.db_txn_count
+        self.db_txn_duration_sec -= other.db_txn_duration_sec
+        self.db_sched_duration_sec -= other.db_sched_duration_sec
+        self.evt_db_fetch_count -= other.evt_db_fetch_count
+        return self
+
+    def __add__(self, other):
+        res = ContextResourceUsage(copy_from=self)
+        res += other
+        return res
+
+    def __sub__(self, other):
+        res = ContextResourceUsage(copy_from=self)
+        res -= other
+        return res
+
+
+class LoggingContext(object):
+    """Additional context for log formatting. Contexts are scoped within a
+    "with" block.
+
+    If a parent is given when creating a new context, then:
+        - logging fields are copied from the parent to the new context on entry
+        - when the new context exits, the cpu usage stats are copied from the
+          child to the parent
+
+    Args:
+        name (str): Name for the context for debugging.
+        parent_context (LoggingContext|None): The parent of the new context
+    """
+
+    __slots__ = [
+        "previous_context",
+        "name",
+        "parent_context",
+        "_resource_usage",
+        "usage_start",
+        "main_thread",
+        "alive",
+        "request",
+        "tag",
+    ]
+
+    thread_local = threading.local()
+
+    class Sentinel(object):
+        """Sentinel to represent the root context"""
+
+        __slots__ = []
+
+        def __str__(self):
+            return "sentinel"
+
+        def copy_to(self, record):
+            pass
+
+        def start(self):
+            pass
+
+        def stop(self):
+            pass
+
+        def add_database_transaction(self, duration_sec):
+            pass
+
+        def add_database_scheduled(self, sched_sec):
+            pass
+
+        def record_event_fetch(self, event_count):
+            pass
+
+        def __nonzero__(self):
+            return False
+
+        __bool__ = __nonzero__  # python3
+
+    sentinel = Sentinel()
+
+    def __init__(self, name=None, parent_context=None, request=None):
+        self.previous_context = LoggingContext.current_context()
+        self.name = name
+
+        # track the resources used by this context so far
+        self._resource_usage = ContextResourceUsage()
+
+        # If alive has the thread resource usage when the logcontext last
+        # became active.
+        self.usage_start = None
+
+        self.main_thread = get_thread_id()
+        self.request = None
+        self.tag = ""
+        self.alive = True
+
+        self.parent_context = parent_context
+
+        if self.parent_context is not None:
+            self.parent_context.copy_to(self)
+
+        if request is not None:
+            # the request param overrides the request from the parent context
+            self.request = request
+
+    def __str__(self):
+        if self.request:
+            return str(self.request)
+        return "%s@%x" % (self.name, id(self))
+
+    @classmethod
+    def current_context(cls):
+        """Get the current logging context from thread local storage
+
+        Returns:
+            LoggingContext: the current logging context
+        """
+        return getattr(cls.thread_local, "current_context", cls.sentinel)
+
+    @classmethod
+    def set_current_context(cls, context):
+        """Set the current logging context in thread local storage
+        Args:
+            context(LoggingContext): The context to activate.
+        Returns:
+            The context that was previously active
+        """
+        current = cls.current_context()
+
+        if current is not context:
+            current.stop()
+            cls.thread_local.current_context = context
+            context.start()
+        return current
+
+    def __enter__(self):
+        """Enters this logging context into thread local storage"""
+        old_context = self.set_current_context(self)
+        if self.previous_context != old_context:
+            logger.warn(
+                "Expected previous context %r, found %r",
+                self.previous_context,
+                old_context,
+            )
+        self.alive = True
+
+        return self
+
+    def __exit__(self, type, value, traceback):
+        """Restore the logging context in thread local storage to the state it
+        was before this context was entered.
+        Returns:
+            None to avoid suppressing any exceptions that were thrown.
+        """
+        current = self.set_current_context(self.previous_context)
+        if current is not self:
+            if current is self.sentinel:
+                logger.warning("Expected logging context %s was lost", self)
+            else:
+                logger.warning(
+                    "Expected logging context %s but found %s", self, current
+                )
+        self.previous_context = None
+        self.alive = False
+
+        # if we have a parent, pass our CPU usage stats on
+        if self.parent_context is not None and hasattr(
+            self.parent_context, "_resource_usage"
+        ):
+            self.parent_context._resource_usage += self._resource_usage
+
+            # reset them in case we get entered again
+            self._resource_usage.reset()
+
+    def copy_to(self, record):
+        """Copy logging fields from this context to a log record or
+        another LoggingContext
+        """
+
+        # 'request' is the only field we currently use in the logger, so that's
+        # all we need to copy
+        record.request = self.request
+
+    def start(self):
+        if get_thread_id() != self.main_thread:
+            logger.warning("Started logcontext %s on different thread", self)
+            return
+
+        # If we haven't already started record the thread resource usage so
+        # far
+        if not self.usage_start:
+            self.usage_start = get_thread_resource_usage()
+
+    def stop(self):
+        if get_thread_id() != self.main_thread:
+            logger.warning("Stopped logcontext %s on different thread", self)
+            return
+
+        # When we stop, let's record the cpu used since we started
+        if not self.usage_start:
+            logger.warning("Called stop on logcontext %s without calling start", self)
+            return
+
+        utime_delta, stime_delta = self._get_cputime()
+        self._resource_usage.ru_utime += utime_delta
+        self._resource_usage.ru_stime += stime_delta
+
+        self.usage_start = None
+
+    def get_resource_usage(self):
+        """Get resources used by this logcontext so far.
+
+        Returns:
+            ContextResourceUsage: a *copy* of the object tracking resource
+                usage so far
+        """
+        # we always return a copy, for consistency
+        res = self._resource_usage.copy()
+
+        # If we are on the correct thread and we're currently running then we
+        # can include resource usage so far.
+        is_main_thread = get_thread_id() == self.main_thread
+        if self.alive and self.usage_start and is_main_thread:
+            utime_delta, stime_delta = self._get_cputime()
+            res.ru_utime += utime_delta
+            res.ru_stime += stime_delta
+
+        return res
+
+    def _get_cputime(self):
+        """Get the cpu usage time so far
+
+        Returns: Tuple[float, float]: seconds in user mode, seconds in system mode
+        """
+        current = get_thread_resource_usage()
+
+        utime_delta = current.ru_utime - self.usage_start.ru_utime
+        stime_delta = current.ru_stime - self.usage_start.ru_stime
+
+        # sanity check
+        if utime_delta < 0:
+            logger.error(
+                "utime went backwards! %f < %f",
+                current.ru_utime,
+                self.usage_start.ru_utime,
+            )
+            utime_delta = 0
+
+        if stime_delta < 0:
+            logger.error(
+                "stime went backwards! %f < %f",
+                current.ru_stime,
+                self.usage_start.ru_stime,
+            )
+            stime_delta = 0
+
+        return utime_delta, stime_delta
+
+    def add_database_transaction(self, duration_sec):
+        if duration_sec < 0:
+            raise ValueError("DB txn time can only be non-negative")
+        self._resource_usage.db_txn_count += 1
+        self._resource_usage.db_txn_duration_sec += duration_sec
+
+    def add_database_scheduled(self, sched_sec):
+        """Record a use of the database pool
+
+        Args:
+            sched_sec (float): number of seconds it took us to get a
+                connection
+        """
+        if sched_sec < 0:
+            raise ValueError("DB scheduling time can only be non-negative")
+        self._resource_usage.db_sched_duration_sec += sched_sec
+
+    def record_event_fetch(self, event_count):
+        """Record a number of events being fetched from the db
+
+        Args:
+            event_count (int): number of events being fetched
+        """
+        self._resource_usage.evt_db_fetch_count += event_count
+
+
+class LoggingContextFilter(logging.Filter):
+    """Logging filter that adds values from the current logging context to each
+    record.
+    Args:
+        **defaults: Default values to avoid formatters complaining about
+            missing fields
+    """
+
+    def __init__(self, **defaults):
+        self.defaults = defaults
+
+    def filter(self, record):
+        """Add each fields from the logging contexts to the record.
+        Returns:
+            True to include the record in the log output.
+        """
+        context = LoggingContext.current_context()
+        for key, value in self.defaults.items():
+            setattr(record, key, value)
+
+        # context should never be None, but if it somehow ends up being, then
+        # we end up in a death spiral of infinite loops, so let's check, for
+        # robustness' sake.
+        if context is not None:
+            context.copy_to(record)
+
+        return True
+
+
+class PreserveLoggingContext(object):
+    """Captures the current logging context and restores it when the scope is
+    exited. Used to restore the context after a function using
+    @defer.inlineCallbacks is resumed by a callback from the reactor."""
+
+    __slots__ = ["current_context", "new_context", "has_parent"]
+
+    def __init__(self, new_context=None):
+        if new_context is None:
+            new_context = LoggingContext.sentinel
+        self.new_context = new_context
+
+    def __enter__(self):
+        """Captures the current logging context"""
+        self.current_context = LoggingContext.set_current_context(self.new_context)
+
+        if self.current_context:
+            self.has_parent = self.current_context.previous_context is not None
+            if not self.current_context.alive:
+                logger.debug("Entering dead context: %s", self.current_context)
+
+    def __exit__(self, type, value, traceback):
+        """Restores the current logging context"""
+        context = LoggingContext.set_current_context(self.current_context)
+
+        if context != self.new_context:
+            if context is LoggingContext.sentinel:
+                logger.warning("Expected logging context %s was lost", self.new_context)
+            else:
+                logger.warning(
+                    "Expected logging context %s but found %s",
+                    self.new_context,
+                    context,
+                )
+
+        if self.current_context is not LoggingContext.sentinel:
+            if not self.current_context.alive:
+                logger.debug("Restoring dead context: %s", self.current_context)
+
+
+def nested_logging_context(suffix, parent_context=None):
+    """Creates a new logging context as a child of another.
+
+    The nested logging context will have a 'request' made up of the parent context's
+    request, plus the given suffix.
+
+    CPU/db usage stats will be added to the parent context's on exit.
+
+    Normal usage looks like:
+
+        with nested_logging_context(suffix):
+            # ... do stuff
+
+    Args:
+        suffix (str): suffix to add to the parent context's 'request'.
+        parent_context (LoggingContext|None): parent context. Will use the current context
+            if None.
+
+    Returns:
+        LoggingContext: new logging context.
+    """
+    if parent_context is None:
+        parent_context = LoggingContext.current_context()
+    return LoggingContext(
+        parent_context=parent_context, request=parent_context.request + "-" + suffix
+    )
+
+
+def preserve_fn(f):
+    """Function decorator which wraps the function with run_in_background"""
+
+    def g(*args, **kwargs):
+        return run_in_background(f, *args, **kwargs)
+
+    return g
+
+
+def run_in_background(f, *args, **kwargs):
+    """Calls a function, ensuring that the current context is restored after
+    return from the function, and that the sentinel context is set once the
+    deferred returned by the function completes.
+
+    Useful for wrapping functions that return a deferred or coroutine, which you don't
+    yield or await on (for instance because you want to pass it to
+    deferred.gatherResults()).
+
+    Note that if you completely discard the result, you should make sure that
+    `f` doesn't raise any deferred exceptions, otherwise a scary-looking
+    CRITICAL error about an unhandled error will be logged without much
+    indication about where it came from.
+    """
+    current = LoggingContext.current_context()
+    try:
+        res = f(*args, **kwargs)
+    except:  # noqa: E722
+        # the assumption here is that the caller doesn't want to be disturbed
+        # by synchronous exceptions, so let's turn them into Failures.
+        return defer.fail()
+
+    if isinstance(res, types.CoroutineType):
+        res = defer.ensureDeferred(res)
+
+    if not isinstance(res, defer.Deferred):
+        return res
+
+    if res.called and not res.paused:
+        # The function should have maintained the logcontext, so we can
+        # optimise out the messing about
+        return res
+
+    # The function may have reset the context before returning, so
+    # we need to restore it now.
+    ctx = LoggingContext.set_current_context(current)
+
+    # The original context will be restored when the deferred
+    # completes, but there is nothing waiting for it, so it will
+    # get leaked into the reactor or some other function which
+    # wasn't expecting it. We therefore need to reset the context
+    # here.
+    #
+    # (If this feels asymmetric, consider it this way: we are
+    # effectively forking a new thread of execution. We are
+    # probably currently within a ``with LoggingContext()`` block,
+    # which is supposed to have a single entry and exit point. But
+    # by spawning off another deferred, we are effectively
+    # adding a new exit point.)
+    res.addBoth(_set_context_cb, ctx)
+    return res
+
+
+def make_deferred_yieldable(deferred):
+    """Given a deferred, make it follow the Synapse logcontext rules:
+
+    If the deferred has completed (or is not actually a Deferred), essentially
+    does nothing (just returns another completed deferred with the
+    result/failure).
+
+    If the deferred has not yet completed, resets the logcontext before
+    returning a deferred. Then, when the deferred completes, restores the
+    current logcontext before running callbacks/errbacks.
+
+    (This is more-or-less the opposite operation to run_in_background.)
+    """
+    if not isinstance(deferred, defer.Deferred):
+        return deferred
+
+    if deferred.called and not deferred.paused:
+        # it looks like this deferred is ready to run any callbacks we give it
+        # immediately. We may as well optimise out the logcontext faffery.
+        return deferred
+
+    # ok, we can't be sure that a yield won't block, so let's reset the
+    # logcontext, and add a callback to the deferred to restore it.
+    prev_context = LoggingContext.set_current_context(LoggingContext.sentinel)
+    deferred.addBoth(_set_context_cb, prev_context)
+    return deferred
+
+
+def _set_context_cb(result, context):
+    """A callback function which just sets the logging context"""
+    LoggingContext.set_current_context(context)
+    return result
+
+
+def defer_to_thread(reactor, f, *args, **kwargs):
+    """
+    Calls the function `f` using a thread from the reactor's default threadpool and
+    returns the result as a Deferred.
+
+    Creates a new logcontext for `f`, which is created as a child of the current
+    logcontext (so its CPU usage metrics will get attributed to the current
+    logcontext). `f` should preserve the logcontext it is given.
+
+    The result deferred follows the Synapse logcontext rules: you should `yield`
+    on it.
+
+    Args:
+        reactor (twisted.internet.base.ReactorBase): The reactor in whose main thread
+            the Deferred will be invoked, and whose threadpool we should use for the
+            function.
+
+            Normally this will be hs.get_reactor().
+
+        f (callable): The function to call.
+
+        args: positional arguments to pass to f.
+
+        kwargs: keyword arguments to pass to f.
+
+    Returns:
+        Deferred: A Deferred which fires a callback with the result of `f`, or an
+            errback if `f` throws an exception.
+    """
+    return defer_to_threadpool(reactor, reactor.getThreadPool(), f, *args, **kwargs)
+
+
+def defer_to_threadpool(reactor, threadpool, f, *args, **kwargs):
+    """
+    A wrapper for twisted.internet.threads.deferToThreadpool, which handles
+    logcontexts correctly.
+
+    Calls the function `f` using a thread from the given threadpool and returns
+    the result as a Deferred.
+
+    Creates a new logcontext for `f`, which is created as a child of the current
+    logcontext (so its CPU usage metrics will get attributed to the current
+    logcontext). `f` should preserve the logcontext it is given.
+
+    The result deferred follows the Synapse logcontext rules: you should `yield`
+    on it.
+
+    Args:
+        reactor (twisted.internet.base.ReactorBase): The reactor in whose main thread
+            the Deferred will be invoked. Normally this will be hs.get_reactor().
+
+        threadpool (twisted.python.threadpool.ThreadPool): The threadpool to use for
+            running `f`. Normally this will be hs.get_reactor().getThreadPool().
+
+        f (callable): The function to call.
+
+        args: positional arguments to pass to f.
+
+        kwargs: keyword arguments to pass to f.
+
+    Returns:
+        Deferred: A Deferred which fires a callback with the result of `f`, or an
+            errback if `f` throws an exception.
+    """
+    logcontext = LoggingContext.current_context()
+
+    def g():
+        with LoggingContext(parent_context=logcontext):
+            return f(*args, **kwargs)
+
+    return make_deferred_yieldable(threads.deferToThreadPool(reactor, threadpool, g))
diff --git a/synapse/logging/formatter.py b/synapse/logging/formatter.py
new file mode 100644
index 0000000000..fbf570c756
--- /dev/null
+++ b/synapse/logging/formatter.py
@@ -0,0 +1,53 @@
+# -*- coding: utf-8 -*-
+# Copyright 2017 New Vector Ltd
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+import logging
+import traceback
+
+from six import StringIO
+
+
+class LogFormatter(logging.Formatter):
+    """Log formatter which gives more detail for exceptions
+
+    This is the same as the standard log formatter, except that when logging
+    exceptions [typically via log.foo("msg", exc_info=1)], it prints the
+    sequence that led up to the point at which the exception was caught.
+    (Normally only stack frames between the point the exception was raised and
+    where it was caught are logged).
+    """
+
+    def __init__(self, *args, **kwargs):
+        super(LogFormatter, self).__init__(*args, **kwargs)
+
+    def formatException(self, ei):
+        sio = StringIO()
+        (typ, val, tb) = ei
+
+        # log the stack above the exception capture point if possible, but
+        # check that we actually have an f_back attribute to work around
+        # https://twistedmatrix.com/trac/ticket/9305
+
+        if tb and hasattr(tb.tb_frame, "f_back"):
+            sio.write("Capture point (most recent call last):\n")
+            traceback.print_stack(tb.tb_frame.f_back, None, sio)
+
+        traceback.print_exception(typ, val, tb, None, sio)
+        s = sio.getvalue()
+        sio.close()
+        if s[-1:] == "\n":
+            s = s[:-1]
+        return s
diff --git a/synapse/util/logutils.py b/synapse/logging/utils.py
index 7df0fa6087..7df0fa6087 100644
--- a/synapse/util/logutils.py
+++ b/synapse/logging/utils.py
diff --git a/synapse/metrics/__init__.py b/synapse/metrics/__init__.py
index 1f30179b51..eaf0aaa86e 100644
--- a/synapse/metrics/__init__.py
+++ b/synapse/metrics/__init__.py
@@ -437,7 +437,10 @@ def runUntilCurrentTimer(func):
         counts = gc.get_count()
         for i in (2, 1, 0):
             if threshold[i] < counts[i]:
-                logger.info("Collecting gc %d", i)
+                if i == 0:
+                    logger.debug("Collecting gc %d", i)
+                else:
+                    logger.info("Collecting gc %d", i)
 
                 start = time.time()
                 unreachable = gc.collect(i)
diff --git a/synapse/metrics/background_process_metrics.py b/synapse/metrics/background_process_metrics.py
index 167e2c068a..edd6b42db3 100644
--- a/synapse/metrics/background_process_metrics.py
+++ b/synapse/metrics/background_process_metrics.py
@@ -22,7 +22,7 @@ from prometheus_client.core import REGISTRY, Counter, GaugeMetricFamily
 
 from twisted.internet import defer
 
-from synapse.util.logcontext import LoggingContext, PreserveLoggingContext
+from synapse.logging.context import LoggingContext, PreserveLoggingContext
 
 logger = logging.getLogger(__name__)
 
diff --git a/synapse/module_api/__init__.py b/synapse/module_api/__init__.py
index bf43ca09be..a0be2c5ca3 100644
--- a/synapse/module_api/__init__.py
+++ b/synapse/module_api/__init__.py
@@ -12,10 +12,14 @@
 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 # See the License for the specific language governing permissions and
 # limitations under the License.
+import logging
+
 from twisted.internet import defer
 
 from synapse.types import UserID
 
+logger = logging.getLogger(__name__)
+
 
 class ModuleApi(object):
     """A proxy object that gets passed to password auth providers so they
@@ -76,8 +80,13 @@ class ModuleApi(object):
 
     @defer.inlineCallbacks
     def register(self, localpart, displayname=None, emails=[]):
-        """Registers a new user with given localpart and optional
-           displayname, emails.
+        """Registers a new user with given localpart and optional displayname, emails.
+
+        Also returns an access token for the new user.
+
+        Deprecated: avoid this, as it generates a new device with no way to
+        return that device to the user. Prefer separate calls to register_user and
+        register_device.
 
         Args:
             localpart (str): The localpart of the new user.
@@ -85,17 +94,56 @@ class ModuleApi(object):
             emails (List[str]): Emails to bind to the new user.
 
         Returns:
-            Deferred: a 2-tuple of (user_id, access_token)
+            Deferred[tuple[str, str]]: a 2-tuple of (user_id, access_token)
         """
-        # Register the user
-        reg = self.hs.get_registration_handler()
-        user_id, access_token = yield reg.register(
-            localpart=localpart, default_display_name=displayname, bind_emails=emails
+        logger.warning(
+            "Using deprecated ModuleApi.register which creates a dummy user device."
         )
-
+        user_id = yield self.register_user(localpart, displayname, emails)
+        _, access_token = yield self.register_device(user_id)
         defer.returnValue((user_id, access_token))
 
     @defer.inlineCallbacks
+    def register_user(self, localpart, displayname=None, emails=[]):
+        """Registers a new user with given localpart and optional displayname, emails.
+
+        Args:
+            localpart (str): The localpart of the new user.
+            displayname (str|None): The displayname of the new user.
+            emails (List[str]): Emails to bind to the new user.
+
+        Returns:
+            Deferred[str]: user_id
+        """
+        user_id, _ = yield self.hs.get_registration_handler().register(
+            localpart=localpart,
+            default_display_name=displayname,
+            bind_emails=emails,
+            generate_token=False,
+        )
+
+        defer.returnValue(user_id)
+
+    def register_device(self, user_id, device_id=None, initial_display_name=None):
+        """Register a device for a user and generate an access token.
+
+        Args:
+            user_id (str): full canonical @user:id
+            device_id (str|None): The device ID to check, or None to generate
+                a new one.
+            initial_display_name (str|None): An optional display name for the
+                device.
+
+        Returns:
+            defer.Deferred[tuple[str, str]]: Tuple of device ID and access token
+        """
+        return self.hs.get_registration_handler().register_device(
+            user_id=user_id,
+            device_id=device_id,
+            initial_display_name=initial_display_name,
+        )
+
+    @defer.inlineCallbacks
     def invalidate_access_token(self, access_token):
         """Invalidate an access token for a user
 
diff --git a/synapse/notifier.py b/synapse/notifier.py
index d398078eed..918ef64897 100644
--- a/synapse/notifier.py
+++ b/synapse/notifier.py
@@ -23,12 +23,12 @@ from twisted.internet import defer
 from synapse.api.constants import EventTypes, Membership
 from synapse.api.errors import AuthError
 from synapse.handlers.presence import format_user_presence_state
+from synapse.logging.context import PreserveLoggingContext
+from synapse.logging.utils import log_function
 from synapse.metrics import LaterGauge
 from synapse.metrics.background_process_metrics import run_as_background_process
 from synapse.types import StreamToken
 from synapse.util.async_helpers import ObservableDeferred, timeout_deferred
-from synapse.util.logcontext import PreserveLoggingContext
-from synapse.util.logutils import log_function
 from synapse.util.metrics import Measure
 from synapse.visibility import filter_events_for_client
 
diff --git a/synapse/push/baserules.py b/synapse/push/baserules.py
index 96d087de22..134bf805eb 100644
--- a/synapse/push/baserules.py
+++ b/synapse/push/baserules.py
@@ -1,5 +1,6 @@
 # Copyright 2015, 2016 OpenMarket Ltd
 # Copyright 2017 New Vector Ltd
+# Copyright 2019 The Matrix.org Foundation C.I.C.
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
 # you may not use this file except in compliance with the License.
@@ -248,6 +249,18 @@ BASE_APPEND_OVERRIDE_RULES = [
         ],
         "actions": ["notify", {"set_tweak": "highlight", "value": True}],
     },
+    {
+        "rule_id": "global/override/.m.rule.reaction",
+        "conditions": [
+            {
+                "kind": "event_match",
+                "key": "type",
+                "pattern": "m.reaction",
+                "_id": "_reaction",
+            }
+        ],
+        "actions": ["dont_notify"],
+    },
 ]
 
 
diff --git a/synapse/push/mailer.py b/synapse/push/mailer.py
index 809199fe88..521c6e2cd7 100644
--- a/synapse/push/mailer.py
+++ b/synapse/push/mailer.py
@@ -29,6 +29,7 @@ from twisted.internet import defer
 
 from synapse.api.constants import EventTypes
 from synapse.api.errors import StoreError
+from synapse.logging.context import make_deferred_yieldable
 from synapse.push.presentable_names import (
     calculate_room_name,
     descriptor_from_member_events,
@@ -36,7 +37,6 @@ from synapse.push.presentable_names import (
 )
 from synapse.types import UserID
 from synapse.util.async_helpers import concurrently_execute
-from synapse.util.logcontext import make_deferred_yieldable
 from synapse.visibility import filter_events_for_client
 
 logger = logging.getLogger(__name__)
diff --git a/synapse/replication/http/membership.py b/synapse/replication/http/membership.py
index 0a76a3762f..2d9cbbaefc 100644
--- a/synapse/replication/http/membership.py
+++ b/synapse/replication/http/membership.py
@@ -156,70 +156,6 @@ class ReplicationRemoteRejectInviteRestServlet(ReplicationEndpoint):
         defer.returnValue((200, ret))
 
 
-class ReplicationRegister3PIDGuestRestServlet(ReplicationEndpoint):
-    """Gets/creates a guest account for given 3PID.
-
-    Request format:
-
-        POST /_synapse/replication/get_or_register_3pid_guest/
-
-        {
-            "requester": ...,
-            "medium": ...,
-            "address": ...,
-            "inviter_user_id": ...
-        }
-    """
-
-    NAME = "get_or_register_3pid_guest"
-    PATH_ARGS = ()
-
-    def __init__(self, hs):
-        super(ReplicationRegister3PIDGuestRestServlet, self).__init__(hs)
-
-        self.registeration_handler = hs.get_registration_handler()
-        self.store = hs.get_datastore()
-        self.clock = hs.get_clock()
-
-    @staticmethod
-    def _serialize_payload(requester, medium, address, inviter_user_id):
-        """
-        Args:
-            requester(Requester)
-            medium (str)
-            address (str)
-            inviter_user_id (str): The user ID who is trying to invite the
-                3PID
-        """
-        return {
-            "requester": requester.serialize(),
-            "medium": medium,
-            "address": address,
-            "inviter_user_id": inviter_user_id,
-        }
-
-    @defer.inlineCallbacks
-    def _handle_request(self, request):
-        content = parse_json_object_from_request(request)
-
-        medium = content["medium"]
-        address = content["address"]
-        inviter_user_id = content["inviter_user_id"]
-
-        requester = Requester.deserialize(self.store, content["requester"])
-
-        if requester.user:
-            request.authenticated_entity = requester.user.to_string()
-
-        logger.info("get_or_register_3pid_guest: %r", content)
-
-        ret = yield self.registeration_handler.get_or_register_3pid_guest(
-            medium, address, inviter_user_id
-        )
-
-        defer.returnValue((200, ret))
-
-
 class ReplicationUserJoinedLeftRoomRestServlet(ReplicationEndpoint):
     """Notifies that a user has joined or left the room
 
@@ -272,5 +208,4 @@ class ReplicationUserJoinedLeftRoomRestServlet(ReplicationEndpoint):
 def register_servlets(hs, http_server):
     ReplicationRemoteJoinRestServlet(hs).register(http_server)
     ReplicationRemoteRejectInviteRestServlet(hs).register(http_server)
-    ReplicationRegister3PIDGuestRestServlet(hs).register(http_server)
     ReplicationUserJoinedLeftRoomRestServlet(hs).register(http_server)
diff --git a/synapse/replication/tcp/protocol.py b/synapse/replication/tcp/protocol.py
index 97efb835ad..5ffdf2675d 100644
--- a/synapse/replication/tcp/protocol.py
+++ b/synapse/replication/tcp/protocol.py
@@ -62,9 +62,9 @@ from twisted.internet import defer
 from twisted.protocols.basic import LineOnlyReceiver
 from twisted.python.failure import Failure
 
+from synapse.logging.context import make_deferred_yieldable, run_in_background
 from synapse.metrics import LaterGauge
 from synapse.metrics.background_process_metrics import run_as_background_process
-from synapse.util.logcontext import make_deferred_yieldable, run_in_background
 from synapse.util.stringutils import random_string
 
 from .commands import (
diff --git a/synapse/rest/client/transactions.py b/synapse/rest/client/transactions.py
index 36404b797d..6da71dc46f 100644
--- a/synapse/rest/client/transactions.py
+++ b/synapse/rest/client/transactions.py
@@ -17,8 +17,8 @@
 to ensure idempotency when performing PUTs using the REST API."""
 import logging
 
+from synapse.logging.context import make_deferred_yieldable, run_in_background
 from synapse.util.async_helpers import ObservableDeferred
-from synapse.util.logcontext import make_deferred_yieldable, run_in_background
 
 logger = logging.getLogger(__name__)
 
diff --git a/synapse/rest/client/v1/login.py b/synapse/rest/client/v1/login.py
index ede6bc8b1e..b13043cc64 100644
--- a/synapse/rest/client/v1/login.py
+++ b/synapse/rest/client/v1/login.py
@@ -86,6 +86,7 @@ class LoginRestServlet(RestServlet):
         self.jwt_enabled = hs.config.jwt_enabled
         self.jwt_secret = hs.config.jwt_secret
         self.jwt_algorithm = hs.config.jwt_algorithm
+        self.saml2_enabled = hs.config.saml2_enabled
         self.cas_enabled = hs.config.cas_enabled
         self.auth_handler = self.hs.get_auth_handler()
         self.registration_handler = hs.get_registration_handler()
@@ -97,6 +98,9 @@ class LoginRestServlet(RestServlet):
         flows = []
         if self.jwt_enabled:
             flows.append({"type": LoginRestServlet.JWT_TYPE})
+        if self.saml2_enabled:
+            flows.append({"type": LoginRestServlet.SSO_TYPE})
+            flows.append({"type": LoginRestServlet.TOKEN_TYPE})
         if self.cas_enabled:
             flows.append({"type": LoginRestServlet.SSO_TYPE})
 
@@ -279,19 +283,7 @@ class LoginRestServlet(RestServlet):
             yield auth_handler.validate_short_term_login_token_and_get_user_id(token)
         )
 
-        device_id = login_submission.get("device_id")
-        initial_display_name = login_submission.get("initial_device_display_name")
-        device_id, access_token = yield self.registration_handler.register_device(
-            user_id, device_id, initial_display_name
-        )
-
-        result = {
-            "user_id": user_id,  # may have changed
-            "access_token": access_token,
-            "home_server": self.hs.hostname,
-            "device_id": device_id,
-        }
-
+        result = yield self._register_device_with_callback(user_id, login_submission)
         defer.returnValue(result)
 
     @defer.inlineCallbacks
@@ -320,61 +312,63 @@ class LoginRestServlet(RestServlet):
 
         user_id = UserID(user, self.hs.hostname).to_string()
 
-        auth_handler = self.auth_handler
-        registered_user_id = yield auth_handler.check_user_exists(user_id)
-        if registered_user_id:
-            device_id = login_submission.get("device_id")
-            initial_display_name = login_submission.get("initial_device_display_name")
-            device_id, access_token = yield self.registration_handler.register_device(
-                registered_user_id, device_id, initial_display_name
+        registered_user_id = yield self.auth_handler.check_user_exists(user_id)
+        if not registered_user_id:
+            registered_user_id, _ = (
+                yield self.registration_handler.register(
+                    localpart=user, generate_token=False
+                )
             )
 
-            result = {
-                "user_id": registered_user_id,
-                "access_token": access_token,
-                "home_server": self.hs.hostname,
-            }
-        else:
-            user_id, access_token = (
-                yield self.registration_handler.register(localpart=user)
-            )
+        result = yield self._register_device_with_callback(
+            registered_user_id, login_submission
+        )
+        defer.returnValue(result)
 
-            device_id = login_submission.get("device_id")
-            initial_display_name = login_submission.get("initial_device_display_name")
-            device_id, access_token = yield self.registration_handler.register_device(
-                registered_user_id, device_id, initial_display_name
-            )
 
-            result = {
-                "user_id": user_id,  # may have changed
-                "access_token": access_token,
-                "home_server": self.hs.hostname,
-            }
+class BaseSSORedirectServlet(RestServlet):
+    """Common base class for /login/sso/redirect impls"""
 
-        defer.returnValue(result)
+    PATTERNS = client_patterns("/login/(cas|sso)/redirect", v1=True)
 
+    def on_GET(self, request):
+        args = request.args
+        if b"redirectUrl" not in args:
+            return 400, "Redirect URL not specified for SSO auth"
+        client_redirect_url = args[b"redirectUrl"][0]
+        sso_url = self.get_sso_url(client_redirect_url)
+        request.redirect(sso_url)
+        finish_request(request)
+
+    def get_sso_url(self, client_redirect_url):
+        """Get the URL to redirect to, to perform SSO auth
+
+        Args:
+            client_redirect_url (bytes): the URL that we should redirect the
+                client to when everything is done
+
+        Returns:
+            bytes: URL to redirect to
+        """
+        # to be implemented by subclasses
+        raise NotImplementedError()
 
-class CasRedirectServlet(RestServlet):
-    PATTERNS = client_patterns("/login/(cas|sso)/redirect", v1=True)
 
+class CasRedirectServlet(BaseSSORedirectServlet):
     def __init__(self, hs):
         super(CasRedirectServlet, self).__init__()
         self.cas_server_url = hs.config.cas_server_url.encode("ascii")
         self.cas_service_url = hs.config.cas_service_url.encode("ascii")
 
-    def on_GET(self, request):
-        args = request.args
-        if b"redirectUrl" not in args:
-            return (400, "Redirect URL not specified for CAS auth")
+    def get_sso_url(self, client_redirect_url):
         client_redirect_url_param = urllib.parse.urlencode(
-            {b"redirectUrl": args[b"redirectUrl"][0]}
+            {b"redirectUrl": client_redirect_url}
         ).encode("ascii")
         hs_redirect_url = self.cas_service_url + b"/_matrix/client/r0/login/cas/ticket"
         service_param = urllib.parse.urlencode(
             {b"service": b"%s?%s" % (hs_redirect_url, client_redirect_url_param)}
         ).encode("ascii")
-        request.redirect(b"%s/login?%s" % (self.cas_server_url, service_param))
-        finish_request(request)
+        return b"%s/login?%s" % (self.cas_server_url, service_param)
 
 
 class CasTicketServlet(RestServlet):
@@ -457,6 +451,16 @@ class CasTicketServlet(RestServlet):
         return user, attributes
 
 
+class SAMLRedirectServlet(BaseSSORedirectServlet):
+    PATTERNS = client_patterns("/login/sso/redirect", v1=True)
+
+    def __init__(self, hs):
+        self._saml_handler = hs.get_saml_handler()
+
+    def get_sso_url(self, client_redirect_url):
+        return self._saml_handler.handle_redirect_request(client_redirect_url)
+
+
 class SSOAuthHandler(object):
     """
     Utility class for Resources and Servlets which handle the response from a SSO
@@ -532,3 +536,5 @@ def register_servlets(hs, http_server):
     if hs.config.cas_enabled:
         CasRedirectServlet(hs).register(http_server)
         CasTicketServlet(hs).register(http_server)
+    elif hs.config.saml2_enabled:
+        SAMLRedirectServlet(hs).register(http_server)
diff --git a/synapse/rest/consent/consent_resource.py b/synapse/rest/consent/consent_resource.py
index 9a32892d8b..1ddf9997ff 100644
--- a/synapse/rest/consent/consent_resource.py
+++ b/synapse/rest/consent/consent_resource.py
@@ -24,12 +24,14 @@ import jinja2
 from jinja2 import TemplateNotFound
 
 from twisted.internet import defer
-from twisted.web.resource import Resource
-from twisted.web.server import NOT_DONE_YET
 
 from synapse.api.errors import NotFoundError, StoreError, SynapseError
 from synapse.config import ConfigError
-from synapse.http.server import finish_request, wrap_html_request_handler
+from synapse.http.server import (
+    DirectServeResource,
+    finish_request,
+    wrap_html_request_handler,
+)
 from synapse.http.servlet import parse_string
 from synapse.types import UserID
 
@@ -47,7 +49,7 @@ else:
         return a == b
 
 
-class ConsentResource(Resource):
+class ConsentResource(DirectServeResource):
     """A twisted Resource to display a privacy policy and gather consent to it
 
     When accessed via GET, returns the privacy policy via a template.
@@ -87,7 +89,7 @@ class ConsentResource(Resource):
         Args:
             hs (synapse.server.HomeServer): homeserver
         """
-        Resource.__init__(self)
+        super().__init__()
 
         self.hs = hs
         self.store = hs.get_datastore()
@@ -118,18 +120,12 @@ class ConsentResource(Resource):
 
         self._hmac_secret = hs.config.form_secret.encode("utf-8")
 
-    def render_GET(self, request):
-        self._async_render_GET(request)
-        return NOT_DONE_YET
-
     @wrap_html_request_handler
-    @defer.inlineCallbacks
-    def _async_render_GET(self, request):
+    async def _async_render_GET(self, request):
         """
         Args:
             request (twisted.web.http.Request):
         """
-
         version = parse_string(request, "v", default=self._default_consent_version)
         username = parse_string(request, "u", required=False, default="")
         userhmac = None
@@ -145,7 +141,7 @@ class ConsentResource(Resource):
             else:
                 qualified_user_id = UserID(username, self.hs.hostname).to_string()
 
-            u = yield self.store.get_user_by_id(qualified_user_id)
+            u = await defer.maybeDeferred(self.store.get_user_by_id, qualified_user_id)
             if u is None:
                 raise NotFoundError("Unknown user")
 
@@ -165,13 +161,8 @@ class ConsentResource(Resource):
         except TemplateNotFound:
             raise NotFoundError("Unknown policy version")
 
-    def render_POST(self, request):
-        self._async_render_POST(request)
-        return NOT_DONE_YET
-
     @wrap_html_request_handler
-    @defer.inlineCallbacks
-    def _async_render_POST(self, request):
+    async def _async_render_POST(self, request):
         """
         Args:
             request (twisted.web.http.Request):
@@ -188,12 +179,12 @@ class ConsentResource(Resource):
             qualified_user_id = UserID(username, self.hs.hostname).to_string()
 
         try:
-            yield self.store.user_set_consent_version(qualified_user_id, version)
+            await self.store.user_set_consent_version(qualified_user_id, version)
         except StoreError as e:
             if e.code != 404:
                 raise
             raise NotFoundError("Unknown user")
-        yield self.registration_handler.post_consent_actions(qualified_user_id)
+        await self.registration_handler.post_consent_actions(qualified_user_id)
 
         try:
             self._render_template(request, "success.html")
diff --git a/synapse/rest/key/v2/remote_key_resource.py b/synapse/rest/key/v2/remote_key_resource.py
index ec8b9d7269..031a316693 100644
--- a/synapse/rest/key/v2/remote_key_resource.py
+++ b/synapse/rest/key/v2/remote_key_resource.py
@@ -16,18 +16,20 @@ import logging
 from io import BytesIO
 
 from twisted.internet import defer
-from twisted.web.resource import Resource
-from twisted.web.server import NOT_DONE_YET
 
 from synapse.api.errors import Codes, SynapseError
 from synapse.crypto.keyring import ServerKeyFetcher
-from synapse.http.server import respond_with_json_bytes, wrap_json_request_handler
+from synapse.http.server import (
+    DirectServeResource,
+    respond_with_json_bytes,
+    wrap_json_request_handler,
+)
 from synapse.http.servlet import parse_integer, parse_json_object_from_request
 
 logger = logging.getLogger(__name__)
 
 
-class RemoteKey(Resource):
+class RemoteKey(DirectServeResource):
     """HTTP resource for retreiving the TLS certificate and NACL signature
     verification keys for a collection of servers. Checks that the reported
     X.509 TLS certificate matches the one used in the HTTPS connection. Checks
@@ -94,13 +96,8 @@ class RemoteKey(Resource):
         self.clock = hs.get_clock()
         self.federation_domain_whitelist = hs.config.federation_domain_whitelist
 
-    def render_GET(self, request):
-        self.async_render_GET(request)
-        return NOT_DONE_YET
-
     @wrap_json_request_handler
-    @defer.inlineCallbacks
-    def async_render_GET(self, request):
+    async def _async_render_GET(self, request):
         if len(request.postpath) == 1:
             server, = request.postpath
             query = {server.decode("ascii"): {}}
@@ -114,20 +111,15 @@ class RemoteKey(Resource):
         else:
             raise SynapseError(404, "Not found %r" % request.postpath, Codes.NOT_FOUND)
 
-        yield self.query_keys(request, query, query_remote_on_cache_miss=True)
-
-    def render_POST(self, request):
-        self.async_render_POST(request)
-        return NOT_DONE_YET
+        await self.query_keys(request, query, query_remote_on_cache_miss=True)
 
     @wrap_json_request_handler
-    @defer.inlineCallbacks
-    def async_render_POST(self, request):
+    async def _async_render_POST(self, request):
         content = parse_json_object_from_request(request)
 
         query = content["server_keys"]
 
-        yield self.query_keys(request, query, query_remote_on_cache_miss=True)
+        await self.query_keys(request, query, query_remote_on_cache_miss=True)
 
     @defer.inlineCallbacks
     def query_keys(self, request, query, query_remote_on_cache_miss=False):
diff --git a/synapse/rest/media/v1/_base.py b/synapse/rest/media/v1/_base.py
index 3318638d3e..5fefee4dde 100644
--- a/synapse/rest/media/v1/_base.py
+++ b/synapse/rest/media/v1/_base.py
@@ -25,7 +25,7 @@ from twisted.protocols.basic import FileSender
 
 from synapse.api.errors import Codes, SynapseError, cs_error
 from synapse.http.server import finish_request, respond_with_json
-from synapse.util import logcontext
+from synapse.logging.context import make_deferred_yieldable
 from synapse.util.stringutils import is_ascii
 
 logger = logging.getLogger(__name__)
@@ -75,9 +75,7 @@ def respond_with_file(request, media_type, file_path, file_size=None, upload_nam
         add_file_headers(request, media_type, file_size, upload_name)
 
         with open(file_path, "rb") as f:
-            yield logcontext.make_deferred_yieldable(
-                FileSender().beginFileTransfer(f, request)
-            )
+            yield make_deferred_yieldable(FileSender().beginFileTransfer(f, request))
 
         finish_request(request)
     else:
diff --git a/synapse/rest/media/v1/config_resource.py b/synapse/rest/media/v1/config_resource.py
index fa3d6680fc..9f747de263 100644
--- a/synapse/rest/media/v1/config_resource.py
+++ b/synapse/rest/media/v1/config_resource.py
@@ -14,31 +14,28 @@
 # limitations under the License.
 #
 
-from twisted.internet import defer
-from twisted.web.resource import Resource
 from twisted.web.server import NOT_DONE_YET
 
-from synapse.http.server import respond_with_json, wrap_json_request_handler
+from synapse.http.server import (
+    DirectServeResource,
+    respond_with_json,
+    wrap_json_request_handler,
+)
 
 
-class MediaConfigResource(Resource):
+class MediaConfigResource(DirectServeResource):
     isLeaf = True
 
     def __init__(self, hs):
-        Resource.__init__(self)
+        super().__init__()
         config = hs.get_config()
         self.clock = hs.get_clock()
         self.auth = hs.get_auth()
         self.limits_dict = {"m.upload.size": config.max_upload_size}
 
-    def render_GET(self, request):
-        self._async_render_GET(request)
-        return NOT_DONE_YET
-
     @wrap_json_request_handler
-    @defer.inlineCallbacks
-    def _async_render_GET(self, request):
-        yield self.auth.get_user_by_req(request)
+    async def _async_render_GET(self, request):
+        await self.auth.get_user_by_req(request)
         respond_with_json(request, 200, self.limits_dict, send_cors=True)
 
     def render_OPTIONS(self, request):
diff --git a/synapse/rest/media/v1/download_resource.py b/synapse/rest/media/v1/download_resource.py
index a21a35f843..66a01559e1 100644
--- a/synapse/rest/media/v1/download_resource.py
+++ b/synapse/rest/media/v1/download_resource.py
@@ -14,37 +14,31 @@
 # limitations under the License.
 import logging
 
-from twisted.internet import defer
-from twisted.web.resource import Resource
-from twisted.web.server import NOT_DONE_YET
-
 import synapse.http.servlet
-from synapse.http.server import set_cors_headers, wrap_json_request_handler
+from synapse.http.server import (
+    DirectServeResource,
+    set_cors_headers,
+    wrap_json_request_handler,
+)
 
 from ._base import parse_media_id, respond_404
 
 logger = logging.getLogger(__name__)
 
 
-class DownloadResource(Resource):
+class DownloadResource(DirectServeResource):
     isLeaf = True
 
     def __init__(self, hs, media_repo):
-        Resource.__init__(self)
-
+        super().__init__()
         self.media_repo = media_repo
         self.server_name = hs.hostname
 
         # this is expected by @wrap_json_request_handler
         self.clock = hs.get_clock()
 
-    def render_GET(self, request):
-        self._async_render_GET(request)
-        return NOT_DONE_YET
-
     @wrap_json_request_handler
-    @defer.inlineCallbacks
-    def _async_render_GET(self, request):
+    async def _async_render_GET(self, request):
         set_cors_headers(request)
         request.setHeader(
             b"Content-Security-Policy",
@@ -58,7 +52,7 @@ class DownloadResource(Resource):
         )
         server_name, media_id, name = parse_media_id(request)
         if server_name == self.server_name:
-            yield self.media_repo.get_local_media(request, media_id, name)
+            await self.media_repo.get_local_media(request, media_id, name)
         else:
             allow_remote = synapse.http.servlet.parse_boolean(
                 request, "allow_remote", default=True
@@ -72,4 +66,4 @@ class DownloadResource(Resource):
                 respond_404(request)
                 return
 
-            yield self.media_repo.get_remote_media(request, server_name, media_id, name)
+            await self.media_repo.get_remote_media(request, server_name, media_id, name)
diff --git a/synapse/rest/media/v1/media_repository.py b/synapse/rest/media/v1/media_repository.py
index df3d985a38..65afffbb42 100644
--- a/synapse/rest/media/v1/media_repository.py
+++ b/synapse/rest/media/v1/media_repository.py
@@ -33,8 +33,8 @@ from synapse.api.errors import (
     RequestSendFailed,
     SynapseError,
 )
+from synapse.logging.context import defer_to_thread
 from synapse.metrics.background_process_metrics import run_as_background_process
-from synapse.util import logcontext
 from synapse.util.async_helpers import Linearizer
 from synapse.util.retryutils import NotRetryingDestination
 from synapse.util.stringutils import random_string
@@ -463,7 +463,7 @@ class MediaRepository(object):
         )
 
         thumbnailer = Thumbnailer(input_path)
-        t_byte_source = yield logcontext.defer_to_thread(
+        t_byte_source = yield defer_to_thread(
             self.hs.get_reactor(),
             self._generate_thumbnail,
             thumbnailer,
@@ -511,7 +511,7 @@ class MediaRepository(object):
         )
 
         thumbnailer = Thumbnailer(input_path)
-        t_byte_source = yield logcontext.defer_to_thread(
+        t_byte_source = yield defer_to_thread(
             self.hs.get_reactor(),
             self._generate_thumbnail,
             thumbnailer,
@@ -596,7 +596,7 @@ class MediaRepository(object):
             return
 
         if thumbnailer.transpose_method is not None:
-            m_width, m_height = yield logcontext.defer_to_thread(
+            m_width, m_height = yield defer_to_thread(
                 self.hs.get_reactor(), thumbnailer.transpose
             )
 
@@ -616,11 +616,11 @@ class MediaRepository(object):
         for (t_width, t_height, t_type), t_method in iteritems(thumbnails):
             # Generate the thumbnail
             if t_method == "crop":
-                t_byte_source = yield logcontext.defer_to_thread(
+                t_byte_source = yield defer_to_thread(
                     self.hs.get_reactor(), thumbnailer.crop, t_width, t_height, t_type
                 )
             elif t_method == "scale":
-                t_byte_source = yield logcontext.defer_to_thread(
+                t_byte_source = yield defer_to_thread(
                     self.hs.get_reactor(), thumbnailer.scale, t_width, t_height, t_type
                 )
             else:
diff --git a/synapse/rest/media/v1/media_storage.py b/synapse/rest/media/v1/media_storage.py
index eff86836fb..25e5ac2848 100644
--- a/synapse/rest/media/v1/media_storage.py
+++ b/synapse/rest/media/v1/media_storage.py
@@ -24,9 +24,8 @@ import six
 from twisted.internet import defer
 from twisted.protocols.basic import FileSender
 
-from synapse.util import logcontext
+from synapse.logging.context import defer_to_thread, make_deferred_yieldable
 from synapse.util.file_consumer import BackgroundFileConsumer
-from synapse.util.logcontext import make_deferred_yieldable
 
 from ._base import Responder
 
@@ -65,7 +64,7 @@ class MediaStorage(object):
 
         with self.store_into_file(file_info) as (f, fname, finish_cb):
             # Write to the main repository
-            yield logcontext.defer_to_thread(
+            yield defer_to_thread(
                 self.hs.get_reactor(), _write_file_synchronously, source, f
             )
             yield finish_cb()
diff --git a/synapse/rest/media/v1/preview_url_resource.py b/synapse/rest/media/v1/preview_url_resource.py
index de6f292ffb..5871737bfd 100644
--- a/synapse/rest/media/v1/preview_url_resource.py
+++ b/synapse/rest/media/v1/preview_url_resource.py
@@ -32,22 +32,21 @@ from canonicaljson import json
 
 from twisted.internet import defer
 from twisted.internet.error import DNSLookupError
-from twisted.web.resource import Resource
-from twisted.web.server import NOT_DONE_YET
 
 from synapse.api.errors import Codes, SynapseError
 from synapse.http.client import SimpleHttpClient
 from synapse.http.server import (
+    DirectServeResource,
     respond_with_json,
     respond_with_json_bytes,
     wrap_json_request_handler,
 )
 from synapse.http.servlet import parse_integer, parse_string
+from synapse.logging.context import make_deferred_yieldable, run_in_background
 from synapse.metrics.background_process_metrics import run_as_background_process
 from synapse.rest.media.v1._base import get_filename_from_headers
 from synapse.util.async_helpers import ObservableDeferred
 from synapse.util.caches.expiringcache import ExpiringCache
-from synapse.util.logcontext import make_deferred_yieldable, run_in_background
 from synapse.util.stringutils import random_string
 
 from ._base import FileInfo
@@ -58,11 +57,11 @@ _charset_match = re.compile(br"<\s*meta[^>]*charset\s*=\s*([a-z0-9-]+)", flags=r
 _content_type_match = re.compile(r'.*; *charset="?(.*?)"?(;|$)', flags=re.I)
 
 
-class PreviewUrlResource(Resource):
+class PreviewUrlResource(DirectServeResource):
     isLeaf = True
 
     def __init__(self, hs, media_repo, media_storage):
-        Resource.__init__(self)
+        super().__init__()
 
         self.auth = hs.get_auth()
         self.clock = hs.get_clock()
@@ -96,18 +95,14 @@ class PreviewUrlResource(Resource):
         )
 
     def render_OPTIONS(self, request):
+        request.setHeader(b"Allow", b"OPTIONS, GET")
         return respond_with_json(request, 200, {}, send_cors=True)
 
-    def render_GET(self, request):
-        self._async_render_GET(request)
-        return NOT_DONE_YET
-
     @wrap_json_request_handler
-    @defer.inlineCallbacks
-    def _async_render_GET(self, request):
+    async def _async_render_GET(self, request):
 
         # XXX: if get_user_by_req fails, what should we do in an async render?
-        requester = yield self.auth.get_user_by_req(request)
+        requester = await self.auth.get_user_by_req(request)
         url = parse_string(request, "url")
         if b"ts" in request.args:
             ts = parse_integer(request, "ts")
@@ -159,7 +154,7 @@ class PreviewUrlResource(Resource):
         else:
             logger.info("Returning cached response")
 
-        og = yield make_deferred_yieldable(observable.observe())
+        og = await make_deferred_yieldable(defer.maybeDeferred(observable.observe))
         respond_with_json_bytes(request, 200, og, send_cors=True)
 
     @defer.inlineCallbacks
diff --git a/synapse/rest/media/v1/storage_provider.py b/synapse/rest/media/v1/storage_provider.py
index 359b45ebfc..e8f559acc1 100644
--- a/synapse/rest/media/v1/storage_provider.py
+++ b/synapse/rest/media/v1/storage_provider.py
@@ -20,8 +20,7 @@ import shutil
 from twisted.internet import defer
 
 from synapse.config._base import Config
-from synapse.util import logcontext
-from synapse.util.logcontext import run_in_background
+from synapse.logging.context import defer_to_thread, run_in_background
 
 from .media_storage import FileResponder
 
@@ -125,7 +124,7 @@ class FileStorageProviderBackend(StorageProvider):
         if not os.path.exists(dirname):
             os.makedirs(dirname)
 
-        return logcontext.defer_to_thread(
+        return defer_to_thread(
             self.hs.get_reactor(), shutil.copyfile, primary_fname, backup_fname
         )
 
diff --git a/synapse/rest/media/v1/thumbnail_resource.py b/synapse/rest/media/v1/thumbnail_resource.py
index ca84c9f139..08329884ac 100644
--- a/synapse/rest/media/v1/thumbnail_resource.py
+++ b/synapse/rest/media/v1/thumbnail_resource.py
@@ -17,10 +17,12 @@
 import logging
 
 from twisted.internet import defer
-from twisted.web.resource import Resource
-from twisted.web.server import NOT_DONE_YET
 
-from synapse.http.server import set_cors_headers, wrap_json_request_handler
+from synapse.http.server import (
+    DirectServeResource,
+    set_cors_headers,
+    wrap_json_request_handler,
+)
 from synapse.http.servlet import parse_integer, parse_string
 
 from ._base import (
@@ -34,11 +36,11 @@ from ._base import (
 logger = logging.getLogger(__name__)
 
 
-class ThumbnailResource(Resource):
+class ThumbnailResource(DirectServeResource):
     isLeaf = True
 
     def __init__(self, hs, media_repo, media_storage):
-        Resource.__init__(self)
+        super().__init__()
 
         self.store = hs.get_datastore()
         self.media_repo = media_repo
@@ -47,13 +49,8 @@ class ThumbnailResource(Resource):
         self.server_name = hs.hostname
         self.clock = hs.get_clock()
 
-    def render_GET(self, request):
-        self._async_render_GET(request)
-        return NOT_DONE_YET
-
     @wrap_json_request_handler
-    @defer.inlineCallbacks
-    def _async_render_GET(self, request):
+    async def _async_render_GET(self, request):
         set_cors_headers(request)
         server_name, media_id, _ = parse_media_id(request)
         width = parse_integer(request, "width", required=True)
@@ -63,21 +60,21 @@ class ThumbnailResource(Resource):
 
         if server_name == self.server_name:
             if self.dynamic_thumbnails:
-                yield self._select_or_generate_local_thumbnail(
+                await self._select_or_generate_local_thumbnail(
                     request, media_id, width, height, method, m_type
                 )
             else:
-                yield self._respond_local_thumbnail(
+                await self._respond_local_thumbnail(
                     request, media_id, width, height, method, m_type
                 )
             self.media_repo.mark_recently_accessed(None, media_id)
         else:
             if self.dynamic_thumbnails:
-                yield self._select_or_generate_remote_thumbnail(
+                await self._select_or_generate_remote_thumbnail(
                     request, server_name, media_id, width, height, method, m_type
                 )
             else:
-                yield self._respond_remote_thumbnail(
+                await self._respond_remote_thumbnail(
                     request, server_name, media_id, width, height, method, m_type
                 )
             self.media_repo.mark_recently_accessed(server_name, media_id)
diff --git a/synapse/rest/media/v1/upload_resource.py b/synapse/rest/media/v1/upload_resource.py
index d1d7e959f0..5d76bbdf68 100644
--- a/synapse/rest/media/v1/upload_resource.py
+++ b/synapse/rest/media/v1/upload_resource.py
@@ -15,22 +15,24 @@
 
 import logging
 
-from twisted.internet import defer
-from twisted.web.resource import Resource
 from twisted.web.server import NOT_DONE_YET
 
 from synapse.api.errors import SynapseError
-from synapse.http.server import respond_with_json, wrap_json_request_handler
+from synapse.http.server import (
+    DirectServeResource,
+    respond_with_json,
+    wrap_json_request_handler,
+)
 from synapse.http.servlet import parse_string
 
 logger = logging.getLogger(__name__)
 
 
-class UploadResource(Resource):
+class UploadResource(DirectServeResource):
     isLeaf = True
 
     def __init__(self, hs, media_repo):
-        Resource.__init__(self)
+        super().__init__()
 
         self.media_repo = media_repo
         self.filepaths = media_repo.filepaths
@@ -41,18 +43,13 @@ class UploadResource(Resource):
         self.max_upload_size = hs.config.max_upload_size
         self.clock = hs.get_clock()
 
-    def render_POST(self, request):
-        self._async_render_POST(request)
-        return NOT_DONE_YET
-
     def render_OPTIONS(self, request):
         respond_with_json(request, 200, {}, send_cors=True)
         return NOT_DONE_YET
 
     @wrap_json_request_handler
-    @defer.inlineCallbacks
-    def _async_render_POST(self, request):
-        requester = yield self.auth.get_user_by_req(request)
+    async def _async_render_POST(self, request):
+        requester = await self.auth.get_user_by_req(request)
         # TODO: The checks here are a bit late. The content will have
         # already been uploaded to a tmp file at this point
         content_length = request.getHeader(b"Content-Length").decode("ascii")
@@ -81,7 +78,7 @@ class UploadResource(Resource):
         #     disposition = headers.getRawHeaders(b"Content-Disposition")[0]
         # TODO(markjh): parse content-dispostion
 
-        content_uri = yield self.media_repo.create_content(
+        content_uri = await self.media_repo.create_content(
             media_type, upload_name, request.content, content_length, requester.user
         )
 
diff --git a/synapse/rest/saml2/response_resource.py b/synapse/rest/saml2/response_resource.py
index ab14b70675..69ecc5e4b4 100644
--- a/synapse/rest/saml2/response_resource.py
+++ b/synapse/rest/saml2/response_resource.py
@@ -13,59 +13,19 @@
 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 # See the License for the specific language governing permissions and
 # limitations under the License.
-import logging
 
-import saml2
-from saml2.client import Saml2Client
+from synapse.http.server import DirectServeResource, wrap_html_request_handler
 
-from twisted.web.resource import Resource
-from twisted.web.server import NOT_DONE_YET
 
-from synapse.api.errors import CodeMessageException
-from synapse.http.server import wrap_html_request_handler
-from synapse.http.servlet import parse_string
-from synapse.rest.client.v1.login import SSOAuthHandler
-
-logger = logging.getLogger(__name__)
-
-
-class SAML2ResponseResource(Resource):
+class SAML2ResponseResource(DirectServeResource):
     """A Twisted web resource which handles the SAML response"""
 
     isLeaf = 1
 
     def __init__(self, hs):
-        Resource.__init__(self)
-
-        self._saml_client = Saml2Client(hs.config.saml2_sp_config)
-        self._sso_auth_handler = SSOAuthHandler(hs)
-
-    def render_POST(self, request):
-        self._async_render_POST(request)
-        return NOT_DONE_YET
+        super().__init__()
+        self._saml_handler = hs.get_saml_handler()
 
     @wrap_html_request_handler
-    def _async_render_POST(self, request):
-        resp_bytes = parse_string(request, "SAMLResponse", required=True)
-        relay_state = parse_string(request, "RelayState", required=True)
-
-        try:
-            saml2_auth = self._saml_client.parse_authn_request_response(
-                resp_bytes, saml2.BINDING_HTTP_POST
-            )
-        except Exception as e:
-            logger.warning("Exception parsing SAML2 response", exc_info=1)
-            raise CodeMessageException(400, "Unable to parse SAML2 response: %s" % (e,))
-
-        if saml2_auth.not_signed:
-            raise CodeMessageException(400, "SAML2 response was not signed")
-
-        if "uid" not in saml2_auth.ava:
-            raise CodeMessageException(400, "uid not in SAML2 response")
-
-        username = saml2_auth.ava["uid"][0]
-
-        displayName = saml2_auth.ava.get("displayName", [None])[0]
-        return self._sso_auth_handler.on_successful_auth(
-            username, request, relay_state, user_display_name=displayName
-        )
+    async def _async_render_POST(self, request):
+        return await self._saml_handler.handle_saml_response(request)
diff --git a/synapse/server.py b/synapse/server.py
index a9592c396c..9e28dba2b1 100644
--- a/synapse/server.py
+++ b/synapse/server.py
@@ -194,6 +194,7 @@ class HomeServer(object):
         "sendmail",
         "registration_handler",
         "account_validity_handler",
+        "saml_handler",
         "event_client_serializer",
     ]
 
@@ -524,6 +525,11 @@ class HomeServer(object):
     def build_account_validity_handler(self):
         return AccountValidityHandler(self)
 
+    def build_saml_handler(self):
+        from synapse.handlers.saml_handler import SamlHandler
+
+        return SamlHandler(self)
+
     def build_event_client_serializer(self):
         return EventClientSerializer(self)
 
diff --git a/synapse/state/__init__.py b/synapse/state/__init__.py
index 1b454a56a1..9f708fa205 100644
--- a/synapse/state/__init__.py
+++ b/synapse/state/__init__.py
@@ -28,11 +28,11 @@ from twisted.internet import defer
 from synapse.api.constants import EventTypes
 from synapse.api.room_versions import KNOWN_ROOM_VERSIONS, StateResolutionVersions
 from synapse.events.snapshot import EventContext
+from synapse.logging.utils import log_function
 from synapse.state import v1, v2
 from synapse.util.async_helpers import Linearizer
 from synapse.util.caches import get_cache_factor_for
 from synapse.util.caches.expiringcache import ExpiringCache
-from synapse.util.logutils import log_function
 from synapse.util.metrics import Measure
 
 logger = logging.getLogger(__name__)
diff --git a/synapse/storage/_base.py b/synapse/storage/_base.py
index 29589853c6..2f940dbae6 100644
--- a/synapse/storage/_base.py
+++ b/synapse/storage/_base.py
@@ -30,12 +30,12 @@ from prometheus_client import Histogram
 from twisted.internet import defer
 
 from synapse.api.errors import StoreError
+from synapse.logging.context import LoggingContext, PreserveLoggingContext
 from synapse.metrics.background_process_metrics import run_as_background_process
 from synapse.storage.engines import PostgresEngine, Sqlite3Engine
 from synapse.types import get_domain_from_id
 from synapse.util import batch_iter
 from synapse.util.caches.descriptors import Cache
-from synapse.util.logcontext import LoggingContext, PreserveLoggingContext
 from synapse.util.stringutils import exception_to_unicode
 
 # import a function which will return a monotonic time, in seconds
diff --git a/synapse/storage/devices.py b/synapse/storage/devices.py
index 3413a46675..d2b113a4e7 100644
--- a/synapse/storage/devices.py
+++ b/synapse/storage/devices.py
@@ -24,6 +24,7 @@ from synapse.api.errors import StoreError
 from synapse.metrics.background_process_metrics import run_as_background_process
 from synapse.storage._base import Cache, SQLBaseStore, db_to_json
 from synapse.storage.background_updates import BackgroundUpdateStore
+from synapse.util import batch_iter
 from synapse.util.caches.descriptors import cached, cachedInlineCallbacks, cachedList
 
 logger = logging.getLogger(__name__)
@@ -391,22 +392,47 @@ class DeviceWorkerStore(SQLBaseStore):
 
         return now_stream_id, []
 
-    @defer.inlineCallbacks
-    def get_user_whose_devices_changed(self, from_key):
-        """Get set of users whose devices have changed since `from_key`.
+    def get_users_whose_devices_changed(self, from_key, user_ids):
+        """Get set of users whose devices have changed since `from_key` that
+        are in the given list of user_ids.
+
+        Args:
+            from_key (str): The device lists stream token
+            user_ids (Iterable[str])
+
+        Returns:
+            Deferred[set[str]]: The set of user_ids whose devices have changed
+            since `from_key`
         """
         from_key = int(from_key)
-        changed = self._device_list_stream_cache.get_all_entities_changed(from_key)
-        if changed is not None:
-            defer.returnValue(set(changed))
 
-        sql = """
-            SELECT DISTINCT user_id FROM device_lists_stream WHERE stream_id > ?
-        """
-        rows = yield self._execute(
-            "get_user_whose_devices_changed", None, sql, from_key
+        # Get set of users who *may* have changed. Users not in the returned
+        # list have definitely not changed.
+        to_check = list(
+            self._device_list_stream_cache.get_entities_changed(user_ids, from_key)
+        )
+
+        if not to_check:
+            return defer.succeed(set())
+
+        def _get_users_whose_devices_changed_txn(txn):
+            changes = set()
+
+            sql = """
+                SELECT DISTINCT user_id FROM device_lists_stream
+                WHERE stream_id > ?
+                AND user_id IN (%s)
+            """
+
+            for chunk in batch_iter(to_check, 100):
+                txn.execute(sql % (",".join("?" for _ in chunk),), (from_key,) + chunk)
+                changes.update(user_id for user_id, in txn)
+
+            return changes
+
+        return self.runInteraction(
+            "get_users_whose_devices_changed", _get_users_whose_devices_changed_txn
         )
-        defer.returnValue(set(row[0] for row in rows))
 
     def get_all_device_list_changes_for_remotes(self, from_key, to_key):
         """Return a list of `(stream_id, user_id, destination)` which is the
diff --git a/synapse/storage/events.py b/synapse/storage/events.py
index fefba39ea1..b486ca50eb 100644
--- a/synapse/storage/events.py
+++ b/synapse/storage/events.py
@@ -33,6 +33,8 @@ from synapse.api.constants import EventTypes
 from synapse.api.errors import SynapseError
 from synapse.events import EventBase  # noqa: F401
 from synapse.events.snapshot import EventContext  # noqa: F401
+from synapse.logging.context import PreserveLoggingContext, make_deferred_yieldable
+from synapse.logging.utils import log_function
 from synapse.metrics import BucketCollector
 from synapse.metrics.background_process_metrics import run_as_background_process
 from synapse.state import StateResolutionStore
@@ -45,8 +47,6 @@ from synapse.util import batch_iter
 from synapse.util.async_helpers import ObservableDeferred
 from synapse.util.caches.descriptors import cached, cachedInlineCallbacks
 from synapse.util.frozenutils import frozendict_json_encoder
-from synapse.util.logcontext import PreserveLoggingContext, make_deferred_yieldable
-from synapse.util.logutils import log_function
 from synapse.util.metrics import Measure
 
 logger = logging.getLogger(__name__)
@@ -253,7 +253,14 @@ class EventsStore(
         )
 
         # Read the extrems every 60 minutes
-        hs.get_clock().looping_call(self._read_forward_extremities, 60 * 60 * 1000)
+        def read_forward_extremities():
+            # run as a background process to make sure that the database transactions
+            # have a logcontext to report to
+            return run_as_background_process(
+                "read_forward_extremities", self._read_forward_extremities
+            )
+
+        hs.get_clock().looping_call(read_forward_extremities, 60 * 60 * 1000)
 
     @defer.inlineCallbacks
     def _read_forward_extremities(self):
diff --git a/synapse/storage/events_worker.py b/synapse/storage/events_worker.py
index 6d680d405a..09db872511 100644
--- a/synapse/storage/events_worker.py
+++ b/synapse/storage/events_worker.py
@@ -29,14 +29,14 @@ from synapse.api.room_versions import EventFormatVersions
 from synapse.events import FrozenEvent, event_type_from_format_version  # noqa: F401
 from synapse.events.snapshot import EventContext  # noqa: F401
 from synapse.events.utils import prune_event
-from synapse.metrics.background_process_metrics import run_as_background_process
-from synapse.types import get_domain_from_id
-from synapse.util.logcontext import (
+from synapse.logging.context import (
     LoggingContext,
     PreserveLoggingContext,
     make_deferred_yieldable,
     run_in_background,
 )
+from synapse.metrics.background_process_metrics import run_as_background_process
+from synapse.types import get_domain_from_id
 from synapse.util.metrics import Measure
 
 from ._base import SQLBaseStore
diff --git a/synapse/storage/registration.py b/synapse/storage/registration.py
index 983ce13291..aea5b3276b 100644
--- a/synapse/storage/registration.py
+++ b/synapse/storage/registration.py
@@ -25,6 +25,7 @@ from twisted.internet import defer
 
 from synapse.api.constants import UserTypes
 from synapse.api.errors import Codes, StoreError, ThreepidValidationError
+from synapse.metrics.background_process_metrics import run_as_background_process
 from synapse.storage import background_updates
 from synapse.storage._base import SQLBaseStore
 from synapse.types import UserID
@@ -432,19 +433,6 @@ class RegistrationWorkerStore(SQLBaseStore):
         )
 
     @defer.inlineCallbacks
-    def get_3pid_guest_access_token(self, medium, address):
-        ret = yield self._simple_select_one(
-            "threepid_guest_access_tokens",
-            {"medium": medium, "address": address},
-            ["guest_access_token"],
-            True,
-            "get_3pid_guest_access_token",
-        )
-        if ret:
-            defer.returnValue(ret["guest_access_token"])
-        defer.returnValue(None)
-
-    @defer.inlineCallbacks
     def get_user_id_by_threepid(self, medium, address, require_verified=False):
         """Returns user id from threepid
 
@@ -619,9 +607,15 @@ class RegistrationStore(
         )
 
         # Create a background job for culling expired 3PID validity tokens
-        hs.get_clock().looping_call(
-            self.cull_expired_threepid_validation_tokens, THIRTY_MINUTES_IN_MS
-        )
+        def start_cull():
+            # run as a background process to make sure that the database transactions
+            # have a logcontext to report to
+            return run_as_background_process(
+                "cull_expired_threepid_validation_tokens",
+                self.cull_expired_threepid_validation_tokens,
+            )
+
+        hs.get_clock().looping_call(start_cull, THIRTY_MINUTES_IN_MS)
 
     @defer.inlineCallbacks
     def _backgroud_update_set_deactivated_flag(self, progress, batch_size):
@@ -972,40 +966,6 @@ class RegistrationStore(
 
         defer.returnValue(res if res else False)
 
-    @defer.inlineCallbacks
-    def save_or_get_3pid_guest_access_token(
-        self, medium, address, access_token, inviter_user_id
-    ):
-        """
-        Gets the 3pid's guest access token if exists, else saves access_token.
-
-        Args:
-            medium (str): Medium of the 3pid. Must be "email".
-            address (str): 3pid address.
-            access_token (str): The access token to persist if none is
-                already persisted.
-            inviter_user_id (str): User ID of the inviter.
-
-        Returns:
-            deferred str: Whichever access token is persisted at the end
-            of this function call.
-        """
-
-        def insert(txn):
-            txn.execute(
-                "INSERT INTO threepid_guest_access_tokens "
-                "(medium, address, guest_access_token, first_inviter) "
-                "VALUES (?, ?, ?, ?)",
-                (medium, address, access_token, inviter_user_id),
-            )
-
-        try:
-            yield self.runInteraction("save_3pid_guest_access_token", insert)
-            defer.returnValue(access_token)
-        except self.database_engine.module.IntegrityError:
-            ret = yield self.get_3pid_guest_access_token(medium, address)
-            defer.returnValue(ret)
-
     def add_user_pending_deactivation(self, user_id):
         """
         Adds a user to the table of users who need to be parted from all the rooms they're
diff --git a/synapse/storage/stream.py b/synapse/storage/stream.py
index d9482a3843..386a9dbe14 100644
--- a/synapse/storage/stream.py
+++ b/synapse/storage/stream.py
@@ -41,12 +41,12 @@ from six.moves import range
 
 from twisted.internet import defer
 
+from synapse.logging.context import make_deferred_yieldable, run_in_background
 from synapse.storage._base import SQLBaseStore
 from synapse.storage.engines import PostgresEngine
 from synapse.storage.events_worker import EventsWorkerStore
 from synapse.types import RoomStreamToken
 from synapse.util.caches.stream_change_cache import StreamChangeCache
-from synapse.util.logcontext import make_deferred_yieldable, run_in_background
 
 logger = logging.getLogger(__name__)
 
diff --git a/synapse/storage/transactions.py b/synapse/storage/transactions.py
index b1188f6bcb..fd18619178 100644
--- a/synapse/storage/transactions.py
+++ b/synapse/storage/transactions.py
@@ -133,34 +133,6 @@ class TransactionStore(SQLBaseStore):
             desc="set_received_txn_response",
         )
 
-    def prep_send_transaction(self, transaction_id, destination, origin_server_ts):
-        """Persists an outgoing transaction and calculates the values for the
-        previous transaction id list.
-
-        This should be called before sending the transaction so that it has the
-        correct value for the `prev_ids` key.
-
-        Args:
-            transaction_id (str)
-            destination (str)
-            origin_server_ts (int)
-
-        Returns:
-            list: A list of previous transaction ids.
-        """
-        return defer.succeed([])
-
-    def delivered_txn(self, transaction_id, destination, code, response_dict):
-        """Persists the response for an outgoing transaction.
-
-        Args:
-            transaction_id (str)
-            destination (str)
-            code (int)
-            response_json (str)
-        """
-        pass
-
     @defer.inlineCallbacks
     def get_destination_retry_timings(self, destination):
         """Gets the current retry timings (if any) for a given destination.
diff --git a/synapse/util/__init__.py b/synapse/util/__init__.py
index dcc747cac1..f506b2a695 100644
--- a/synapse/util/__init__.py
+++ b/synapse/util/__init__.py
@@ -21,7 +21,7 @@ import attr
 
 from twisted.internet import defer, task
 
-from synapse.util.logcontext import PreserveLoggingContext
+from synapse.logging import context
 
 logger = logging.getLogger(__name__)
 
@@ -46,7 +46,7 @@ class Clock(object):
     @defer.inlineCallbacks
     def sleep(self, seconds):
         d = defer.Deferred()
-        with PreserveLoggingContext():
+        with context.PreserveLoggingContext():
             self._reactor.callLater(seconds, d.callback, seconds)
             res = yield d
         defer.returnValue(res)
@@ -62,7 +62,10 @@ class Clock(object):
     def looping_call(self, f, msec):
         """Call a function repeatedly.
 
-         Waits `msec` initially before calling `f` for the first time.
+        Waits `msec` initially before calling `f` for the first time.
+
+        Note that the function will be called with no logcontext, so if it is anything
+        other than trivial, you probably want to wrap it in run_as_background_process.
 
         Args:
             f(function): The function to call repeatedly.
@@ -77,6 +80,9 @@ class Clock(object):
     def call_later(self, delay, callback, *args, **kwargs):
         """Call something later
 
+        Note that the function will be called with no logcontext, so if it is anything
+        other than trivial, you probably want to wrap it in run_as_background_process.
+
         Args:
             delay(float): How long to wait in seconds.
             callback(function): Function to call
@@ -85,10 +91,10 @@ class Clock(object):
         """
 
         def wrapped_callback(*args, **kwargs):
-            with PreserveLoggingContext():
+            with context.PreserveLoggingContext():
                 callback(*args, **kwargs)
 
-        with PreserveLoggingContext():
+        with context.PreserveLoggingContext():
             return self._reactor.callLater(delay, wrapped_callback, *args, **kwargs)
 
     def cancel_call_later(self, timer, ignore_errs=False):
diff --git a/synapse/util/async_helpers.py b/synapse/util/async_helpers.py
index 7757b8708a..58a6b8764f 100644
--- a/synapse/util/async_helpers.py
+++ b/synapse/util/async_helpers.py
@@ -23,13 +23,12 @@ from twisted.internet import defer
 from twisted.internet.defer import CancelledError
 from twisted.python import failure
 
-from synapse.util import Clock, logcontext, unwrapFirstError
-
-from .logcontext import (
+from synapse.logging.context import (
     PreserveLoggingContext,
     make_deferred_yieldable,
     run_in_background,
 )
+from synapse.util import Clock, unwrapFirstError
 
 logger = logging.getLogger(__name__)
 
@@ -153,7 +152,7 @@ def concurrently_execute(func, args, limit):
         except StopIteration:
             pass
 
-    return logcontext.make_deferred_yieldable(
+    return make_deferred_yieldable(
         defer.gatherResults(
             [run_in_background(_concurrently_execute_inner) for _ in range(limit)],
             consumeErrors=True,
@@ -174,7 +173,7 @@ def yieldable_gather_results(func, iter, *args, **kwargs):
         Deferred[list]: Resolved when all functions have been invoked, or errors if
         one of the function calls fails.
     """
-    return logcontext.make_deferred_yieldable(
+    return make_deferred_yieldable(
         defer.gatherResults(
             [run_in_background(func, item, *args, **kwargs) for item in iter],
             consumeErrors=True,
diff --git a/synapse/util/caches/descriptors.py b/synapse/util/caches/descriptors.py
index d2f25063aa..675db2f448 100644
--- a/synapse/util/caches/descriptors.py
+++ b/synapse/util/caches/descriptors.py
@@ -24,7 +24,8 @@ from six import itervalues, string_types
 
 from twisted.internet import defer
 
-from synapse.util import logcontext, unwrapFirstError
+from synapse.logging.context import make_deferred_yieldable, preserve_fn
+from synapse.util import unwrapFirstError
 from synapse.util.async_helpers import ObservableDeferred
 from synapse.util.caches import get_cache_factor_for
 from synapse.util.caches.lrucache import LruCache
@@ -388,7 +389,7 @@ class CacheDescriptor(_CacheDescriptorBase):
 
             except KeyError:
                 ret = defer.maybeDeferred(
-                    logcontext.preserve_fn(self.function_to_call), obj, *args, **kwargs
+                    preserve_fn(self.function_to_call), obj, *args, **kwargs
                 )
 
                 def onErr(f):
@@ -408,7 +409,7 @@ class CacheDescriptor(_CacheDescriptorBase):
                 observer = result_d.observe()
 
             if isinstance(observer, defer.Deferred):
-                return logcontext.make_deferred_yieldable(observer)
+                return make_deferred_yieldable(observer)
             else:
                 return observer
 
@@ -563,7 +564,7 @@ class CacheListDescriptor(_CacheDescriptorBase):
 
                 cached_defers.append(
                     defer.maybeDeferred(
-                        logcontext.preserve_fn(self.function_to_call), **args_to_call
+                        preserve_fn(self.function_to_call), **args_to_call
                     ).addCallbacks(complete_all, errback)
                 )
 
@@ -571,7 +572,7 @@ class CacheListDescriptor(_CacheDescriptorBase):
                 d = defer.gatherResults(cached_defers, consumeErrors=True).addCallbacks(
                     lambda _: results, unwrapFirstError
                 )
-                return logcontext.make_deferred_yieldable(d)
+                return make_deferred_yieldable(d)
             else:
                 return results
 
diff --git a/synapse/util/caches/response_cache.py b/synapse/util/caches/response_cache.py
index cbe54d45dd..d6908e169d 100644
--- a/synapse/util/caches/response_cache.py
+++ b/synapse/util/caches/response_cache.py
@@ -16,9 +16,9 @@ import logging
 
 from twisted.internet import defer
 
+from synapse.logging.context import make_deferred_yieldable, run_in_background
 from synapse.util.async_helpers import ObservableDeferred
 from synapse.util.caches import register_cache
-from synapse.util.logcontext import make_deferred_yieldable, run_in_background
 
 logger = logging.getLogger(__name__)
 
@@ -78,7 +78,7 @@ class ResponseCache(object):
 
         *deferred* should run its callbacks in the sentinel logcontext (ie,
         you should wrap normal synapse deferreds with
-        logcontext.run_in_background).
+        synapse.logging.context.run_in_background).
 
         Can return either a new Deferred (which also doesn't follow the synapse
         logcontext rules), or, if *deferred* was already complete, the actual
diff --git a/synapse/util/distributor.py b/synapse/util/distributor.py
index 5a79db821c..45af8d3eeb 100644
--- a/synapse/util/distributor.py
+++ b/synapse/util/distributor.py
@@ -17,8 +17,8 @@ import logging
 
 from twisted.internet import defer
 
+from synapse.logging.context import make_deferred_yieldable, run_in_background
 from synapse.metrics.background_process_metrics import run_as_background_process
-from synapse.util.logcontext import make_deferred_yieldable, run_in_background
 
 logger = logging.getLogger(__name__)
 
diff --git a/synapse/util/file_consumer.py b/synapse/util/file_consumer.py
index 629ed44149..8b17d1c8b8 100644
--- a/synapse/util/file_consumer.py
+++ b/synapse/util/file_consumer.py
@@ -17,7 +17,7 @@ from six.moves import queue
 
 from twisted.internet import threads
 
-from synapse.util.logcontext import make_deferred_yieldable, run_in_background
+from synapse.logging.context import make_deferred_yieldable, run_in_background
 
 
 class BackgroundFileConsumer(object):
diff --git a/synapse/util/logcontext.py b/synapse/util/logcontext.py
index 6b0d2deea0..40e5c10a49 100644
--- a/synapse/util/logcontext.py
+++ b/synapse/util/logcontext.py
@@ -1,4 +1,4 @@
-# Copyright 2014-2016 OpenMarket Ltd
+# Copyright 2019 The Matrix.org Foundation C.I.C.
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
 # you may not use this file except in compliance with the License.
@@ -12,668 +12,28 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-""" Thread-local-alike tracking of log contexts within synapse
-
-This module provides objects and utilities for tracking contexts through
-synapse code, so that log lines can include a request identifier, and so that
-CPU and database activity can be accounted for against the request that caused
-them.
-
-See doc/log_contexts.rst for details on how this works.
+"""
+Backwards compatibility re-exports of ``synapse.logging.context`` functionality.
 """
 
-import logging
-import threading
-
-from twisted.internet import defer, threads
-
-logger = logging.getLogger(__name__)
-
-try:
-    import resource
-
-    # Python doesn't ship with a definition of RUSAGE_THREAD but it's defined
-    # to be 1 on linux so we hard code it.
-    RUSAGE_THREAD = 1
-
-    # If the system doesn't support RUSAGE_THREAD then this should throw an
-    # exception.
-    resource.getrusage(RUSAGE_THREAD)
-
-    def get_thread_resource_usage():
-        return resource.getrusage(RUSAGE_THREAD)
-
-
-except Exception:
-    # If the system doesn't support resource.getrusage(RUSAGE_THREAD) then we
-    # won't track resource usage by returning None.
-    def get_thread_resource_usage():
-        return None
-
-
-class ContextResourceUsage(object):
-    """Object for tracking the resources used by a log context
-
-    Attributes:
-        ru_utime (float): user CPU time (in seconds)
-        ru_stime (float): system CPU time (in seconds)
-        db_txn_count (int): number of database transactions done
-        db_sched_duration_sec (float): amount of time spent waiting for a
-            database connection
-        db_txn_duration_sec (float): amount of time spent doing database
-            transactions (excluding scheduling time)
-        evt_db_fetch_count (int): number of events requested from the database
-    """
-
-    __slots__ = [
-        "ru_stime",
-        "ru_utime",
-        "db_txn_count",
-        "db_txn_duration_sec",
-        "db_sched_duration_sec",
-        "evt_db_fetch_count",
-    ]
-
-    def __init__(self, copy_from=None):
-        """Create a new ContextResourceUsage
-
-        Args:
-            copy_from (ContextResourceUsage|None): if not None, an object to
-                copy stats from
-        """
-        if copy_from is None:
-            self.reset()
-        else:
-            self.ru_utime = copy_from.ru_utime
-            self.ru_stime = copy_from.ru_stime
-            self.db_txn_count = copy_from.db_txn_count
-
-            self.db_txn_duration_sec = copy_from.db_txn_duration_sec
-            self.db_sched_duration_sec = copy_from.db_sched_duration_sec
-            self.evt_db_fetch_count = copy_from.evt_db_fetch_count
-
-    def copy(self):
-        return ContextResourceUsage(copy_from=self)
-
-    def reset(self):
-        self.ru_stime = 0.0
-        self.ru_utime = 0.0
-        self.db_txn_count = 0
-
-        self.db_txn_duration_sec = 0
-        self.db_sched_duration_sec = 0
-        self.evt_db_fetch_count = 0
-
-    def __repr__(self):
-        return (
-            "<ContextResourceUsage ru_stime='%r', ru_utime='%r', "
-            "db_txn_count='%r', db_txn_duration_sec='%r', "
-            "db_sched_duration_sec='%r', evt_db_fetch_count='%r'>"
-        ) % (
-            self.ru_stime,
-            self.ru_utime,
-            self.db_txn_count,
-            self.db_txn_duration_sec,
-            self.db_sched_duration_sec,
-            self.evt_db_fetch_count,
-        )
-
-    def __iadd__(self, other):
-        """Add another ContextResourceUsage's stats to this one's.
-
-        Args:
-            other (ContextResourceUsage): the other resource usage object
-        """
-        self.ru_utime += other.ru_utime
-        self.ru_stime += other.ru_stime
-        self.db_txn_count += other.db_txn_count
-        self.db_txn_duration_sec += other.db_txn_duration_sec
-        self.db_sched_duration_sec += other.db_sched_duration_sec
-        self.evt_db_fetch_count += other.evt_db_fetch_count
-        return self
-
-    def __isub__(self, other):
-        self.ru_utime -= other.ru_utime
-        self.ru_stime -= other.ru_stime
-        self.db_txn_count -= other.db_txn_count
-        self.db_txn_duration_sec -= other.db_txn_duration_sec
-        self.db_sched_duration_sec -= other.db_sched_duration_sec
-        self.evt_db_fetch_count -= other.evt_db_fetch_count
-        return self
-
-    def __add__(self, other):
-        res = ContextResourceUsage(copy_from=self)
-        res += other
-        return res
-
-    def __sub__(self, other):
-        res = ContextResourceUsage(copy_from=self)
-        res -= other
-        return res
-
-
-class LoggingContext(object):
-    """Additional context for log formatting. Contexts are scoped within a
-    "with" block.
-
-    If a parent is given when creating a new context, then:
-        - logging fields are copied from the parent to the new context on entry
-        - when the new context exits, the cpu usage stats are copied from the
-          child to the parent
-
-    Args:
-        name (str): Name for the context for debugging.
-        parent_context (LoggingContext|None): The parent of the new context
-    """
-
-    __slots__ = [
-        "previous_context",
-        "name",
-        "parent_context",
-        "_resource_usage",
-        "usage_start",
-        "main_thread",
-        "alive",
-        "request",
-        "tag",
-    ]
-
-    thread_local = threading.local()
-
-    class Sentinel(object):
-        """Sentinel to represent the root context"""
-
-        __slots__ = []
-
-        def __str__(self):
-            return "sentinel"
-
-        def copy_to(self, record):
-            pass
-
-        def start(self):
-            pass
-
-        def stop(self):
-            pass
-
-        def add_database_transaction(self, duration_sec):
-            pass
-
-        def add_database_scheduled(self, sched_sec):
-            pass
-
-        def record_event_fetch(self, event_count):
-            pass
-
-        def __nonzero__(self):
-            return False
-
-        __bool__ = __nonzero__  # python3
-
-    sentinel = Sentinel()
-
-    def __init__(self, name=None, parent_context=None, request=None):
-        self.previous_context = LoggingContext.current_context()
-        self.name = name
-
-        # track the resources used by this context so far
-        self._resource_usage = ContextResourceUsage()
-
-        # If alive has the thread resource usage when the logcontext last
-        # became active.
-        self.usage_start = None
-
-        self.main_thread = threading.current_thread()
-        self.request = None
-        self.tag = ""
-        self.alive = True
-
-        self.parent_context = parent_context
-
-        if self.parent_context is not None:
-            self.parent_context.copy_to(self)
-
-        if request is not None:
-            # the request param overrides the request from the parent context
-            self.request = request
-
-    def __str__(self):
-        if self.request:
-            return str(self.request)
-        return "%s@%x" % (self.name, id(self))
-
-    @classmethod
-    def current_context(cls):
-        """Get the current logging context from thread local storage
-
-        Returns:
-            LoggingContext: the current logging context
-        """
-        return getattr(cls.thread_local, "current_context", cls.sentinel)
-
-    @classmethod
-    def set_current_context(cls, context):
-        """Set the current logging context in thread local storage
-        Args:
-            context(LoggingContext): The context to activate.
-        Returns:
-            The context that was previously active
-        """
-        current = cls.current_context()
-
-        if current is not context:
-            current.stop()
-            cls.thread_local.current_context = context
-            context.start()
-        return current
-
-    def __enter__(self):
-        """Enters this logging context into thread local storage"""
-        old_context = self.set_current_context(self)
-        if self.previous_context != old_context:
-            logger.warn(
-                "Expected previous context %r, found %r",
-                self.previous_context,
-                old_context,
-            )
-        self.alive = True
-
-        return self
-
-    def __exit__(self, type, value, traceback):
-        """Restore the logging context in thread local storage to the state it
-        was before this context was entered.
-        Returns:
-            None to avoid suppressing any exceptions that were thrown.
-        """
-        current = self.set_current_context(self.previous_context)
-        if current is not self:
-            if current is self.sentinel:
-                logger.warning("Expected logging context %s was lost", self)
-            else:
-                logger.warning(
-                    "Expected logging context %s but found %s", self, current
-                )
-        self.previous_context = None
-        self.alive = False
-
-        # if we have a parent, pass our CPU usage stats on
-        if self.parent_context is not None and hasattr(
-            self.parent_context, "_resource_usage"
-        ):
-            self.parent_context._resource_usage += self._resource_usage
-
-            # reset them in case we get entered again
-            self._resource_usage.reset()
-
-    def copy_to(self, record):
-        """Copy logging fields from this context to a log record or
-        another LoggingContext
-        """
-
-        # 'request' is the only field we currently use in the logger, so that's
-        # all we need to copy
-        record.request = self.request
-
-    def start(self):
-        if threading.current_thread() is not self.main_thread:
-            logger.warning("Started logcontext %s on different thread", self)
-            return
-
-        # If we haven't already started record the thread resource usage so
-        # far
-        if not self.usage_start:
-            self.usage_start = get_thread_resource_usage()
-
-    def stop(self):
-        if threading.current_thread() is not self.main_thread:
-            logger.warning("Stopped logcontext %s on different thread", self)
-            return
-
-        # When we stop, let's record the cpu used since we started
-        if not self.usage_start:
-            logger.warning("Called stop on logcontext %s without calling start", self)
-            return
-
-        utime_delta, stime_delta = self._get_cputime()
-        self._resource_usage.ru_utime += utime_delta
-        self._resource_usage.ru_stime += stime_delta
-
-        self.usage_start = None
-
-    def get_resource_usage(self):
-        """Get resources used by this logcontext so far.
-
-        Returns:
-            ContextResourceUsage: a *copy* of the object tracking resource
-                usage so far
-        """
-        # we always return a copy, for consistency
-        res = self._resource_usage.copy()
-
-        # If we are on the correct thread and we're currently running then we
-        # can include resource usage so far.
-        is_main_thread = threading.current_thread() is self.main_thread
-        if self.alive and self.usage_start and is_main_thread:
-            utime_delta, stime_delta = self._get_cputime()
-            res.ru_utime += utime_delta
-            res.ru_stime += stime_delta
-
-        return res
-
-    def _get_cputime(self):
-        """Get the cpu usage time so far
-
-        Returns: Tuple[float, float]: seconds in user mode, seconds in system mode
-        """
-        current = get_thread_resource_usage()
-
-        utime_delta = current.ru_utime - self.usage_start.ru_utime
-        stime_delta = current.ru_stime - self.usage_start.ru_stime
-
-        # sanity check
-        if utime_delta < 0:
-            logger.error(
-                "utime went backwards! %f < %f",
-                current.ru_utime,
-                self.usage_start.ru_utime,
-            )
-            utime_delta = 0
-
-        if stime_delta < 0:
-            logger.error(
-                "stime went backwards! %f < %f",
-                current.ru_stime,
-                self.usage_start.ru_stime,
-            )
-            stime_delta = 0
-
-        return utime_delta, stime_delta
-
-    def add_database_transaction(self, duration_sec):
-        if duration_sec < 0:
-            raise ValueError("DB txn time can only be non-negative")
-        self._resource_usage.db_txn_count += 1
-        self._resource_usage.db_txn_duration_sec += duration_sec
-
-    def add_database_scheduled(self, sched_sec):
-        """Record a use of the database pool
-
-        Args:
-            sched_sec (float): number of seconds it took us to get a
-                connection
-        """
-        if sched_sec < 0:
-            raise ValueError("DB scheduling time can only be non-negative")
-        self._resource_usage.db_sched_duration_sec += sched_sec
-
-    def record_event_fetch(self, event_count):
-        """Record a number of events being fetched from the db
-
-        Args:
-            event_count (int): number of events being fetched
-        """
-        self._resource_usage.evt_db_fetch_count += event_count
-
-
-class LoggingContextFilter(logging.Filter):
-    """Logging filter that adds values from the current logging context to each
-    record.
-    Args:
-        **defaults: Default values to avoid formatters complaining about
-            missing fields
-    """
-
-    def __init__(self, **defaults):
-        self.defaults = defaults
-
-    def filter(self, record):
-        """Add each fields from the logging contexts to the record.
-        Returns:
-            True to include the record in the log output.
-        """
-        context = LoggingContext.current_context()
-        for key, value in self.defaults.items():
-            setattr(record, key, value)
-
-        # context should never be None, but if it somehow ends up being, then
-        # we end up in a death spiral of infinite loops, so let's check, for
-        # robustness' sake.
-        if context is not None:
-            context.copy_to(record)
-
-        return True
-
-
-class PreserveLoggingContext(object):
-    """Captures the current logging context and restores it when the scope is
-    exited. Used to restore the context after a function using
-    @defer.inlineCallbacks is resumed by a callback from the reactor."""
-
-    __slots__ = ["current_context", "new_context", "has_parent"]
-
-    def __init__(self, new_context=None):
-        if new_context is None:
-            new_context = LoggingContext.sentinel
-        self.new_context = new_context
-
-    def __enter__(self):
-        """Captures the current logging context"""
-        self.current_context = LoggingContext.set_current_context(self.new_context)
-
-        if self.current_context:
-            self.has_parent = self.current_context.previous_context is not None
-            if not self.current_context.alive:
-                logger.debug("Entering dead context: %s", self.current_context)
-
-    def __exit__(self, type, value, traceback):
-        """Restores the current logging context"""
-        context = LoggingContext.set_current_context(self.current_context)
-
-        if context != self.new_context:
-            if context is LoggingContext.sentinel:
-                logger.warning("Expected logging context %s was lost", self.new_context)
-            else:
-                logger.warning(
-                    "Expected logging context %s but found %s",
-                    self.new_context,
-                    context,
-                )
-
-        if self.current_context is not LoggingContext.sentinel:
-            if not self.current_context.alive:
-                logger.debug("Restoring dead context: %s", self.current_context)
-
-
-def nested_logging_context(suffix, parent_context=None):
-    """Creates a new logging context as a child of another.
-
-    The nested logging context will have a 'request' made up of the parent context's
-    request, plus the given suffix.
-
-    CPU/db usage stats will be added to the parent context's on exit.
-
-    Normal usage looks like:
-
-        with nested_logging_context(suffix):
-            # ... do stuff
-
-    Args:
-        suffix (str): suffix to add to the parent context's 'request'.
-        parent_context (LoggingContext|None): parent context. Will use the current context
-            if None.
-
-    Returns:
-        LoggingContext: new logging context.
-    """
-    if parent_context is None:
-        parent_context = LoggingContext.current_context()
-    return LoggingContext(
-        parent_context=parent_context, request=parent_context.request + "-" + suffix
-    )
-
-
-def preserve_fn(f):
-    """Function decorator which wraps the function with run_in_background"""
-
-    def g(*args, **kwargs):
-        return run_in_background(f, *args, **kwargs)
-
-    return g
-
-
-def run_in_background(f, *args, **kwargs):
-    """Calls a function, ensuring that the current context is restored after
-    return from the function, and that the sentinel context is set once the
-    deferred returned by the function completes.
-
-    Useful for wrapping functions that return a deferred which you don't yield
-    on (for instance because you want to pass it to deferred.gatherResults()).
-
-    Note that if you completely discard the result, you should make sure that
-    `f` doesn't raise any deferred exceptions, otherwise a scary-looking
-    CRITICAL error about an unhandled error will be logged without much
-    indication about where it came from.
-    """
-    current = LoggingContext.current_context()
-    try:
-        res = f(*args, **kwargs)
-    except:  # noqa: E722
-        # the assumption here is that the caller doesn't want to be disturbed
-        # by synchronous exceptions, so let's turn them into Failures.
-        return defer.fail()
-
-    if not isinstance(res, defer.Deferred):
-        return res
-
-    if res.called and not res.paused:
-        # The function should have maintained the logcontext, so we can
-        # optimise out the messing about
-        return res
-
-    # The function may have reset the context before returning, so
-    # we need to restore it now.
-    ctx = LoggingContext.set_current_context(current)
-
-    # The original context will be restored when the deferred
-    # completes, but there is nothing waiting for it, so it will
-    # get leaked into the reactor or some other function which
-    # wasn't expecting it. We therefore need to reset the context
-    # here.
-    #
-    # (If this feels asymmetric, consider it this way: we are
-    # effectively forking a new thread of execution. We are
-    # probably currently within a ``with LoggingContext()`` block,
-    # which is supposed to have a single entry and exit point. But
-    # by spawning off another deferred, we are effectively
-    # adding a new exit point.)
-    res.addBoth(_set_context_cb, ctx)
-    return res
-
-
-def make_deferred_yieldable(deferred):
-    """Given a deferred, make it follow the Synapse logcontext rules:
-
-    If the deferred has completed (or is not actually a Deferred), essentially
-    does nothing (just returns another completed deferred with the
-    result/failure).
-
-    If the deferred has not yet completed, resets the logcontext before
-    returning a deferred. Then, when the deferred completes, restores the
-    current logcontext before running callbacks/errbacks.
-
-    (This is more-or-less the opposite operation to run_in_background.)
-    """
-    if not isinstance(deferred, defer.Deferred):
-        return deferred
-
-    if deferred.called and not deferred.paused:
-        # it looks like this deferred is ready to run any callbacks we give it
-        # immediately. We may as well optimise out the logcontext faffery.
-        return deferred
-
-    # ok, we can't be sure that a yield won't block, so let's reset the
-    # logcontext, and add a callback to the deferred to restore it.
-    prev_context = LoggingContext.set_current_context(LoggingContext.sentinel)
-    deferred.addBoth(_set_context_cb, prev_context)
-    return deferred
-
-
-def _set_context_cb(result, context):
-    """A callback function which just sets the logging context"""
-    LoggingContext.set_current_context(context)
-    return result
-
-
-def defer_to_thread(reactor, f, *args, **kwargs):
-    """
-    Calls the function `f` using a thread from the reactor's default threadpool and
-    returns the result as a Deferred.
-
-    Creates a new logcontext for `f`, which is created as a child of the current
-    logcontext (so its CPU usage metrics will get attributed to the current
-    logcontext). `f` should preserve the logcontext it is given.
-
-    The result deferred follows the Synapse logcontext rules: you should `yield`
-    on it.
-
-    Args:
-        reactor (twisted.internet.base.ReactorBase): The reactor in whose main thread
-            the Deferred will be invoked, and whose threadpool we should use for the
-            function.
-
-            Normally this will be hs.get_reactor().
-
-        f (callable): The function to call.
-
-        args: positional arguments to pass to f.
-
-        kwargs: keyword arguments to pass to f.
-
-    Returns:
-        Deferred: A Deferred which fires a callback with the result of `f`, or an
-            errback if `f` throws an exception.
-    """
-    return defer_to_threadpool(reactor, reactor.getThreadPool(), f, *args, **kwargs)
-
-
-def defer_to_threadpool(reactor, threadpool, f, *args, **kwargs):
-    """
-    A wrapper for twisted.internet.threads.deferToThreadpool, which handles
-    logcontexts correctly.
-
-    Calls the function `f` using a thread from the given threadpool and returns
-    the result as a Deferred.
-
-    Creates a new logcontext for `f`, which is created as a child of the current
-    logcontext (so its CPU usage metrics will get attributed to the current
-    logcontext). `f` should preserve the logcontext it is given.
-
-    The result deferred follows the Synapse logcontext rules: you should `yield`
-    on it.
-
-    Args:
-        reactor (twisted.internet.base.ReactorBase): The reactor in whose main thread
-            the Deferred will be invoked. Normally this will be hs.get_reactor().
-
-        threadpool (twisted.python.threadpool.ThreadPool): The threadpool to use for
-            running `f`. Normally this will be hs.get_reactor().getThreadPool().
-
-        f (callable): The function to call.
-
-        args: positional arguments to pass to f.
-
-        kwargs: keyword arguments to pass to f.
-
-    Returns:
-        Deferred: A Deferred which fires a callback with the result of `f`, or an
-            errback if `f` throws an exception.
-    """
-    logcontext = LoggingContext.current_context()
-
-    def g():
-        with LoggingContext(parent_context=logcontext):
-            return f(*args, **kwargs)
-
-    return make_deferred_yieldable(threads.deferToThreadPool(reactor, threadpool, g))
+from synapse.logging.context import (
+    LoggingContext,
+    LoggingContextFilter,
+    PreserveLoggingContext,
+    defer_to_thread,
+    make_deferred_yieldable,
+    nested_logging_context,
+    preserve_fn,
+    run_in_background,
+)
+
+__all__ = [
+    "defer_to_thread",
+    "LoggingContext",
+    "LoggingContextFilter",
+    "make_deferred_yieldable",
+    "nested_logging_context",
+    "preserve_fn",
+    "PreserveLoggingContext",
+    "run_in_background",
+]
diff --git a/synapse/util/logformatter.py b/synapse/util/logformatter.py
index fbf570c756..320e8f8174 100644
--- a/synapse/util/logformatter.py
+++ b/synapse/util/logformatter.py
@@ -1,5 +1,4 @@
-# -*- coding: utf-8 -*-
-# Copyright 2017 New Vector Ltd
+# Copyright 2019 The Matrix.org Foundation C.I.C.
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
 # you may not use this file except in compliance with the License.
@@ -13,41 +12,10 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
+"""
+Backwards compatibility re-exports of ``synapse.logging.formatter`` functionality.
+"""
 
-import logging
-import traceback
+from synapse.logging.formatter import LogFormatter
 
-from six import StringIO
-
-
-class LogFormatter(logging.Formatter):
-    """Log formatter which gives more detail for exceptions
-
-    This is the same as the standard log formatter, except that when logging
-    exceptions [typically via log.foo("msg", exc_info=1)], it prints the
-    sequence that led up to the point at which the exception was caught.
-    (Normally only stack frames between the point the exception was raised and
-    where it was caught are logged).
-    """
-
-    def __init__(self, *args, **kwargs):
-        super(LogFormatter, self).__init__(*args, **kwargs)
-
-    def formatException(self, ei):
-        sio = StringIO()
-        (typ, val, tb) = ei
-
-        # log the stack above the exception capture point if possible, but
-        # check that we actually have an f_back attribute to work around
-        # https://twistedmatrix.com/trac/ticket/9305
-
-        if tb and hasattr(tb.tb_frame, "f_back"):
-            sio.write("Capture point (most recent call last):\n")
-            traceback.print_stack(tb.tb_frame.f_back, None, sio)
-
-        traceback.print_exception(typ, val, tb, None, sio)
-        s = sio.getvalue()
-        sio.close()
-        if s[-1:] == "\n":
-            s = s[:-1]
-        return s
+__all__ = ["LogFormatter"]
diff --git a/synapse/util/metrics.py b/synapse/util/metrics.py
index 01284d3cf8..c30b6de19c 100644
--- a/synapse/util/metrics.py
+++ b/synapse/util/metrics.py
@@ -20,8 +20,8 @@ from prometheus_client import Counter
 
 from twisted.internet import defer
 
+from synapse.logging.context import LoggingContext
 from synapse.metrics import InFlightGauge
-from synapse.util.logcontext import LoggingContext
 
 logger = logging.getLogger(__name__)
 
diff --git a/synapse/util/ratelimitutils.py b/synapse/util/ratelimitutils.py
index 06defa8199..5ca4521ce3 100644
--- a/synapse/util/ratelimitutils.py
+++ b/synapse/util/ratelimitutils.py
@@ -20,7 +20,7 @@ import logging
 from twisted.internet import defer
 
 from synapse.api.errors import LimitExceededError
-from synapse.util.logcontext import (
+from synapse.logging.context import (
     PreserveLoggingContext,
     make_deferred_yieldable,
     run_in_background,
@@ -36,9 +36,11 @@ class FederationRateLimiter(object):
             clock (Clock)
             config (FederationRateLimitConfig)
         """
-        self.clock = clock
-        self._config = config
-        self.ratelimiters = {}
+
+        def new_limiter():
+            return _PerHostRatelimiter(clock=clock, config=config)
+
+        self.ratelimiters = collections.defaultdict(new_limiter)
 
     def ratelimit(self, host):
         """Used to ratelimit an incoming request from given host
@@ -53,11 +55,9 @@ class FederationRateLimiter(object):
             host (str): Origin of incoming request.
 
         Returns:
-            _PerHostRatelimiter
+            context manager which returns a deferred.
         """
-        return self.ratelimiters.setdefault(
-            host, _PerHostRatelimiter(clock=self.clock, config=self._config)
-        ).ratelimit()
+        return self.ratelimiters[host].ratelimit()
 
 
 class _PerHostRatelimiter(object):
@@ -122,7 +122,7 @@ class _PerHostRatelimiter(object):
         self.request_times.append(time_now)
 
         def queue_request():
-            if len(self.current_processing) > self.concurrent_requests:
+            if len(self.current_processing) >= self.concurrent_requests:
                 queue_defer = defer.Deferred()
                 self.ready_request_queue[request_id] = queue_defer
                 logger.info(
diff --git a/synapse/util/retryutils.py b/synapse/util/retryutils.py
index 1a77456498..d8d0ceae51 100644
--- a/synapse/util/retryutils.py
+++ b/synapse/util/retryutils.py
@@ -17,7 +17,7 @@ import random
 
 from twisted.internet import defer
 
-import synapse.util.logcontext
+import synapse.logging.context
 from synapse.api.errors import CodeMessageException
 
 logger = logging.getLogger(__name__)
@@ -225,4 +225,4 @@ class RetryDestinationLimiter(object):
                 logger.exception("Failed to store destination_retry_timings")
 
         # we deliberately do this in the background.
-        synapse.util.logcontext.run_in_background(store_retry_timings)
+        synapse.logging.context.run_in_background(store_retry_timings)
diff --git a/synctl b/synctl
index 30d751236f..794de99ea3 100755
--- a/synctl
+++ b/synctl
@@ -150,8 +150,9 @@ def main():
     parser.add_argument(
         "--no-daemonize",
         action="store_false",
+        dest="daemonize",
         help="Run synapse in the foreground for debugging. "
-        "Will work only if the daemonize option is not set in the config."
+        "Will work only if the daemonize option is not set in the config.",
     )
 
     options = parser.parse_args()
@@ -159,7 +160,7 @@ def main():
     if options.worker and options.all_processes:
         write('Cannot use "--worker" with "--all-processes"', stream=sys.stderr)
         sys.exit(1)
-    if options.no_daemonize and options.all_processes:
+    if not options.daemonize and options.all_processes:
         write('Cannot use "--no-daemonize" with "--all-processes"', stream=sys.stderr)
         sys.exit(1)
 
@@ -169,9 +170,8 @@ def main():
         write(
             "No config file found\n"
             "To generate a config file, run '%s -c %s --generate-config"
-            " --server-name=<server name> --report-stats=<yes/no>'\n" % (
-                " ".join(SYNAPSE), options.configfile,
-            ),
+            " --server-name=<server name> --report-stats=<yes/no>'\n"
+            % (" ".join(SYNAPSE), options.configfile),
             stream=sys.stderr,
         )
         sys.exit(1)
@@ -289,7 +289,7 @@ def main():
             # Check if synapse is already running
             if os.path.exists(pidfile) and pid_running(int(open(pidfile).read())):
                 abort("synapse.app.homeserver already running")
-            start(configfile, bool(options.no_daemonize))
+            start(configfile, bool(options.daemonize))
 
         for worker in workers:
             env = os.environ.copy()
diff --git a/sytest-blacklist b/sytest-blacklist
new file mode 100644
index 0000000000..b760a48c57
--- /dev/null
+++ b/sytest-blacklist
@@ -0,0 +1,35 @@
+# This file serves as a blacklist for SyTest tests that we expect will fail in
+# Synapse.
+# 
+# Each line of this file is scanned by sytest during a run and if the line
+# exactly matches the name of a test, it will be marked as "expected fail",
+# meaning the test will still run, but failure will not mark the entire test
+# suite as failing.
+#
+# Test names are encouraged to have a bug accompanied with them, serving as an
+# explanation for why the test has been excluded.
+
+# Blacklisted due to https://github.com/matrix-org/synapse/issues/1679
+Remote room members also see posted message events
+
+# Blacklisted due to https://github.com/matrix-org/synapse/issues/2065
+Guest users can accept invites to private rooms over federation
+
+# Blacklisted due to https://github.com/vector-im/riot-web/issues/7211
+The only membership state included in a gapped incremental sync is for senders in the timeline
+
+# Blacklisted due to https://github.com/matrix-org/synapse/issues/1658
+Newly created users see their own presence in /initialSync (SYT-34)
+
+# Blacklisted due to https://github.com/matrix-org/synapse/issues/1396
+Should reject keys claiming to belong to a different user
+
+# Blacklisted due to https://github.com/matrix-org/synapse/issues/2306
+Users appear/disappear from directory when join_rules are changed
+Users appear/disappear from directory when history_visibility are changed
+
+# Blacklisted due to https://github.com/matrix-org/synapse/issues/1531
+Enabling an unknown default rule fails with 404
+
+# Blacklisted due to https://github.com/matrix-org/synapse/issues/1663
+New federated private chats get full presence information (SYN-115)
diff --git a/tests/appservice/test_scheduler.py b/tests/appservice/test_scheduler.py
index db9f86bdac..04b8c2c07c 100644
--- a/tests/appservice/test_scheduler.py
+++ b/tests/appservice/test_scheduler.py
@@ -22,7 +22,7 @@ from synapse.appservice.scheduler import (
     _ServiceQueuer,
     _TransactionController,
 )
-from synapse.util.logcontext import make_deferred_yieldable
+from synapse.logging.context import make_deferred_yieldable
 
 from tests import unittest
 
diff --git a/tests/config/test_ratelimiting.py b/tests/config/test_ratelimiting.py
new file mode 100644
index 0000000000..13ab282384
--- /dev/null
+++ b/tests/config/test_ratelimiting.py
@@ -0,0 +1,40 @@
+# -*- coding: utf-8 -*-
+# Copyright 2019 The Matrix.org Foundation C.I.C.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+from synapse.config.homeserver import HomeServerConfig
+
+from tests.unittest import TestCase
+from tests.utils import default_config
+
+
+class RatelimitConfigTestCase(TestCase):
+    def test_parse_rc_federation(self):
+        config_dict = default_config("test")
+        config_dict["rc_federation"] = {
+            "window_size": 20000,
+            "sleep_limit": 693,
+            "sleep_delay": 252,
+            "reject_limit": 198,
+            "concurrent": 7,
+        }
+
+        config = HomeServerConfig()
+        config.parse_config_dict(config_dict, "", "")
+        config_obj = config.rc_federation
+
+        self.assertEqual(config_obj.window_size, 20000)
+        self.assertEqual(config_obj.sleep_limit, 693)
+        self.assertEqual(config_obj.sleep_delay, 252)
+        self.assertEqual(config_obj.reject_limit, 198)
+        self.assertEqual(config_obj.concurrent, 7)
diff --git a/tests/config/test_tls.py b/tests/config/test_tls.py
index a5d88d644a..4f8a87a3df 100644
--- a/tests/config/test_tls.py
+++ b/tests/config/test_tls.py
@@ -1,5 +1,6 @@
 # -*- coding: utf-8 -*-
 # Copyright 2019 New Vector Ltd
+# Copyright 2019 Matrix.org Foundation C.I.C.
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
 # you may not use this file except in compliance with the License.
@@ -15,7 +16,10 @@
 
 import os
 
-from synapse.config.tls import TlsConfig
+from OpenSSL import SSL
+
+from synapse.config.tls import ConfigError, TlsConfig
+from synapse.crypto.context_factory import ClientTLSOptionsFactory
 
 from tests.unittest import TestCase
 
@@ -78,3 +82,112 @@ s4niecZKPBizL6aucT59CsunNmmb5Glq8rlAcU+1ZTZZzGYqVYhF6axB9Qg=
                 "or use Synapse's ACME support to provision one."
             ),
         )
+
+    def test_tls_client_minimum_default(self):
+        """
+        The default client TLS version is 1.0.
+        """
+        config = {}
+        t = TestConfig()
+        t.read_config(config, config_dir_path="", data_dir_path="")
+
+        self.assertEqual(t.federation_client_minimum_tls_version, "1")
+
+    def test_tls_client_minimum_set(self):
+        """
+        The default client TLS version can be set to 1.0, 1.1, and 1.2.
+        """
+        config = {"federation_client_minimum_tls_version": 1}
+        t = TestConfig()
+        t.read_config(config, config_dir_path="", data_dir_path="")
+        self.assertEqual(t.federation_client_minimum_tls_version, "1")
+
+        config = {"federation_client_minimum_tls_version": 1.1}
+        t = TestConfig()
+        t.read_config(config, config_dir_path="", data_dir_path="")
+        self.assertEqual(t.federation_client_minimum_tls_version, "1.1")
+
+        config = {"federation_client_minimum_tls_version": 1.2}
+        t = TestConfig()
+        t.read_config(config, config_dir_path="", data_dir_path="")
+        self.assertEqual(t.federation_client_minimum_tls_version, "1.2")
+
+        # Also test a string version
+        config = {"federation_client_minimum_tls_version": "1"}
+        t = TestConfig()
+        t.read_config(config, config_dir_path="", data_dir_path="")
+        self.assertEqual(t.federation_client_minimum_tls_version, "1")
+
+        config = {"federation_client_minimum_tls_version": "1.2"}
+        t = TestConfig()
+        t.read_config(config, config_dir_path="", data_dir_path="")
+        self.assertEqual(t.federation_client_minimum_tls_version, "1.2")
+
+    def test_tls_client_minimum_1_point_3_missing(self):
+        """
+        If TLS 1.3 support is missing and it's configured, it will raise a
+        ConfigError.
+        """
+        # thanks i hate it
+        if hasattr(SSL, "OP_NO_TLSv1_3"):
+            OP_NO_TLSv1_3 = SSL.OP_NO_TLSv1_3
+            delattr(SSL, "OP_NO_TLSv1_3")
+            self.addCleanup(setattr, SSL, "SSL.OP_NO_TLSv1_3", OP_NO_TLSv1_3)
+            assert not hasattr(SSL, "OP_NO_TLSv1_3")
+
+        config = {"federation_client_minimum_tls_version": 1.3}
+        t = TestConfig()
+        with self.assertRaises(ConfigError) as e:
+            t.read_config(config, config_dir_path="", data_dir_path="")
+        self.assertEqual(
+            e.exception.args[0],
+            (
+                "federation_client_minimum_tls_version cannot be 1.3, "
+                "your OpenSSL does not support it"
+            ),
+        )
+
+    def test_tls_client_minimum_1_point_3_exists(self):
+        """
+        If TLS 1.3 support exists and it's configured, it will be settable.
+        """
+        # thanks i hate it, still
+        if not hasattr(SSL, "OP_NO_TLSv1_3"):
+            SSL.OP_NO_TLSv1_3 = 0x00
+            self.addCleanup(lambda: delattr(SSL, "OP_NO_TLSv1_3"))
+            assert hasattr(SSL, "OP_NO_TLSv1_3")
+
+        config = {"federation_client_minimum_tls_version": 1.3}
+        t = TestConfig()
+        t.read_config(config, config_dir_path="", data_dir_path="")
+        self.assertEqual(t.federation_client_minimum_tls_version, "1.3")
+
+    def test_tls_client_minimum_set_passed_through_1_2(self):
+        """
+        The configured TLS version is correctly configured by the ContextFactory.
+        """
+        config = {"federation_client_minimum_tls_version": 1.2}
+        t = TestConfig()
+        t.read_config(config, config_dir_path="", data_dir_path="")
+
+        cf = ClientTLSOptionsFactory(t)
+
+        # The context has had NO_TLSv1_1 and NO_TLSv1_0 set, but not NO_TLSv1_2
+        self.assertNotEqual(cf._verify_ssl._options & SSL.OP_NO_TLSv1, 0)
+        self.assertNotEqual(cf._verify_ssl._options & SSL.OP_NO_TLSv1_1, 0)
+        self.assertEqual(cf._verify_ssl._options & SSL.OP_NO_TLSv1_2, 0)
+
+    def test_tls_client_minimum_set_passed_through_1_0(self):
+        """
+        The configured TLS version is correctly configured by the ContextFactory.
+        """
+        config = {"federation_client_minimum_tls_version": 1}
+        t = TestConfig()
+        t.read_config(config, config_dir_path="", data_dir_path="")
+
+        cf = ClientTLSOptionsFactory(t)
+
+        # The context has not had any of the NO_TLS set.
+        self.assertEqual(cf._verify_ssl._options & SSL.OP_NO_TLSv1, 0)
+        self.assertEqual(cf._verify_ssl._options & SSL.OP_NO_TLSv1_1, 0)
+        self.assertEqual(cf._verify_ssl._options & SSL.OP_NO_TLSv1_2, 0)
diff --git a/tests/crypto/test_keyring.py b/tests/crypto/test_keyring.py
index 5a355f00cc..795703967d 100644
--- a/tests/crypto/test_keyring.py
+++ b/tests/crypto/test_keyring.py
@@ -30,9 +30,12 @@ from synapse.crypto.keyring import (
     ServerKeyFetcher,
     StoreKeyFetcher,
 )
+from synapse.logging.context import (
+    LoggingContext,
+    PreserveLoggingContext,
+    make_deferred_yieldable,
+)
 from synapse.storage.keys import FetchKeyResult
-from synapse.util import logcontext
-from synapse.util.logcontext import LoggingContext
 
 from tests import unittest
 
@@ -131,7 +134,7 @@ class KeyringTestCase(unittest.HomeserverTestCase):
         @defer.inlineCallbacks
         def get_perspectives(**kwargs):
             self.assertEquals(LoggingContext.current_context().request, "11")
-            with logcontext.PreserveLoggingContext():
+            with PreserveLoggingContext():
                 yield persp_deferred
             defer.returnValue(persp_resp)
 
@@ -158,7 +161,7 @@ class KeyringTestCase(unittest.HomeserverTestCase):
                 self.assertFalse(res_deferreds[0].called)
                 res_deferreds[0].addBoth(self.check_context, None)
 
-                yield logcontext.make_deferred_yieldable(res_deferreds[0])
+                yield make_deferred_yieldable(res_deferreds[0])
 
                 # let verify_json_objects_for_server finish its work before we kill the
                 # logcontext
@@ -184,7 +187,7 @@ class KeyringTestCase(unittest.HomeserverTestCase):
                     [("server10", json1, 0, "test")]
                 )
                 res_deferreds_2[0].addBoth(self.check_context, None)
-                yield logcontext.make_deferred_yieldable(res_deferreds_2[0])
+                yield make_deferred_yieldable(res_deferreds_2[0])
 
                 # let verify_json_objects_for_server finish its work before we kill the
                 # logcontext
diff --git a/tests/handlers/test_register.py b/tests/handlers/test_register.py
index 4edce7af43..1c7ded7397 100644
--- a/tests/handlers/test_register.py
+++ b/tests/handlers/test_register.py
@@ -18,7 +18,7 @@ from mock import Mock
 from twisted.internet import defer
 
 from synapse.api.constants import UserTypes
-from synapse.api.errors import ResourceLimitError, SynapseError
+from synapse.api.errors import Codes, ResourceLimitError, SynapseError
 from synapse.handlers.register import RegistrationHandler
 from synapse.types import RoomAlias, UserID, create_requester
 
@@ -67,7 +67,7 @@ class RegistrationTestCase(unittest.HomeserverTestCase):
         user_id = frank.to_string()
         requester = create_requester(user_id)
         result_user_id, result_token = self.get_success(
-            self.handler.get_or_create_user(requester, frank.localpart, "Frankie")
+            self.get_or_create_user(requester, frank.localpart, "Frankie")
         )
         self.assertEquals(result_user_id, user_id)
         self.assertTrue(result_token is not None)
@@ -87,7 +87,7 @@ class RegistrationTestCase(unittest.HomeserverTestCase):
         user_id = frank.to_string()
         requester = create_requester(user_id)
         result_user_id, result_token = self.get_success(
-            self.handler.get_or_create_user(requester, local_part, None)
+            self.get_or_create_user(requester, local_part, None)
         )
         self.assertEquals(result_user_id, user_id)
         self.assertTrue(result_token is not None)
@@ -95,9 +95,7 @@ class RegistrationTestCase(unittest.HomeserverTestCase):
     def test_mau_limits_when_disabled(self):
         self.hs.config.limit_usage_by_mau = False
         # Ensure does not throw exception
-        self.get_success(
-            self.handler.get_or_create_user(self.requester, "a", "display_name")
-        )
+        self.get_success(self.get_or_create_user(self.requester, "a", "display_name"))
 
     def test_get_or_create_user_mau_not_blocked(self):
         self.hs.config.limit_usage_by_mau = True
@@ -105,7 +103,7 @@ class RegistrationTestCase(unittest.HomeserverTestCase):
             return_value=defer.succeed(self.hs.config.max_mau_value - 1)
         )
         # Ensure does not throw exception
-        self.get_success(self.handler.get_or_create_user(self.requester, "c", "User"))
+        self.get_success(self.get_or_create_user(self.requester, "c", "User"))
 
     def test_get_or_create_user_mau_blocked(self):
         self.hs.config.limit_usage_by_mau = True
@@ -113,7 +111,7 @@ class RegistrationTestCase(unittest.HomeserverTestCase):
             return_value=defer.succeed(self.lots_of_users)
         )
         self.get_failure(
-            self.handler.get_or_create_user(self.requester, "b", "display_name"),
+            self.get_or_create_user(self.requester, "b", "display_name"),
             ResourceLimitError,
         )
 
@@ -121,7 +119,7 @@ class RegistrationTestCase(unittest.HomeserverTestCase):
             return_value=defer.succeed(self.hs.config.max_mau_value)
         )
         self.get_failure(
-            self.handler.get_or_create_user(self.requester, "b", "display_name"),
+            self.get_or_create_user(self.requester, "b", "display_name"),
             ResourceLimitError,
         )
 
@@ -232,3 +230,55 @@ class RegistrationTestCase(unittest.HomeserverTestCase):
     def test_invalid_user_id_length(self):
         invalid_user_id = "x" * 256
         self.get_failure(self.handler.register(localpart=invalid_user_id), SynapseError)
+
+    @defer.inlineCallbacks
+    def get_or_create_user(self, requester, localpart, displayname, password_hash=None):
+        """Creates a new user if the user does not exist,
+        else revokes all previous access tokens and generates a new one.
+
+        XXX: this used to be in the main codebase, but was only used by this file,
+        so got moved here. TODO: get rid of it, probably
+
+        Args:
+            localpart : The local part of the user ID to register. If None,
+              one will be randomly generated.
+        Returns:
+            A tuple of (user_id, access_token).
+        Raises:
+            RegistrationError if there was a problem registering.
+        """
+        if localpart is None:
+            raise SynapseError(400, "Request must include user id")
+        yield self.hs.get_auth().check_auth_blocking()
+        need_register = True
+
+        try:
+            yield self.handler.check_username(localpart)
+        except SynapseError as e:
+            if e.errcode == Codes.USER_IN_USE:
+                need_register = False
+            else:
+                raise
+
+        user = UserID(localpart, self.hs.hostname)
+        user_id = user.to_string()
+        token = self.macaroon_generator.generate_access_token(user_id)
+
+        if need_register:
+            yield self.handler.register_with_store(
+                user_id=user_id,
+                token=token,
+                password_hash=password_hash,
+                create_profile_with_displayname=user.localpart,
+            )
+        else:
+            yield self.hs.get_auth_handler().delete_access_tokens_for_user(user_id)
+            yield self.store.add_access_token_to_user(user_id=user_id, token=token)
+
+        if displayname is not None:
+            # logger.info("setting user display name: %s -> %s", user_id, displayname)
+            yield self.hs.get_profile_handler().set_displayname(
+                user, requester, displayname, by_admin=True
+            )
+
+        defer.returnValue((user_id, token))
diff --git a/tests/http/federation/test_matrix_federation_agent.py b/tests/http/federation/test_matrix_federation_agent.py
index 417fda3ab2..a49f9b3224 100644
--- a/tests/http/federation/test_matrix_federation_agent.py
+++ b/tests/http/federation/test_matrix_federation_agent.py
@@ -36,8 +36,8 @@ from synapse.http.federation.matrix_federation_agent import (
     _cache_period_from_headers,
 )
 from synapse.http.federation.srv_resolver import Server
+from synapse.logging.context import LoggingContext
 from synapse.util.caches.ttlcache import TTLCache
-from synapse.util.logcontext import LoggingContext
 
 from tests.http import TestServerTLSConnectionFactory, get_test_ca_cert_file
 from tests.server import FakeTransport, ThreadedMemoryReactorClock
diff --git a/tests/http/federation/test_srv_resolver.py b/tests/http/federation/test_srv_resolver.py
index cf6c6e95b5..65b51dc981 100644
--- a/tests/http/federation/test_srv_resolver.py
+++ b/tests/http/federation/test_srv_resolver.py
@@ -22,7 +22,7 @@ from twisted.internet.error import ConnectError
 from twisted.names import dns, error
 
 from synapse.http.federation.srv_resolver import SrvResolver
-from synapse.util.logcontext import LoggingContext
+from synapse.logging.context import LoggingContext
 
 from tests import unittest
 from tests.utils import MockClock
diff --git a/tests/http/test_fedclient.py b/tests/http/test_fedclient.py
index c4c0d9b968..b9d6d7ad1c 100644
--- a/tests/http/test_fedclient.py
+++ b/tests/http/test_fedclient.py
@@ -29,7 +29,7 @@ from synapse.http.matrixfederationclient import (
     MatrixFederationHttpClient,
     MatrixFederationRequest,
 )
-from synapse.util.logcontext import LoggingContext
+from synapse.logging.context import LoggingContext
 
 from tests.server import FakeTransport
 from tests.unittest import HomeserverTestCase
diff --git a/tests/patch_inline_callbacks.py b/tests/patch_inline_callbacks.py
index ee0add3455..220884311c 100644
--- a/tests/patch_inline_callbacks.py
+++ b/tests/patch_inline_callbacks.py
@@ -28,7 +28,7 @@ def do_patch():
     Patch defer.inlineCallbacks so that it checks the state of the logcontext on exit
     """
 
-    from synapse.util.logcontext import LoggingContext
+    from synapse.logging.context import LoggingContext
 
     orig_inline_callbacks = defer.inlineCallbacks
 
diff --git a/tests/push/test_http.py b/tests/push/test_http.py
index 22c3f73ef3..8ce6bb62da 100644
--- a/tests/push/test_http.py
+++ b/tests/push/test_http.py
@@ -18,8 +18,8 @@ from mock import Mock
 from twisted.internet.defer import Deferred
 
 import synapse.rest.admin
+from synapse.logging.context import make_deferred_yieldable
 from synapse.rest.client.v1 import login, room
-from synapse.util.logcontext import make_deferred_yieldable
 
 from tests.unittest import HomeserverTestCase
 
diff --git a/tests/rest/client/test_transactions.py b/tests/rest/client/test_transactions.py
index 708dc26e61..a8adc9a61d 100644
--- a/tests/rest/client/test_transactions.py
+++ b/tests/rest/client/test_transactions.py
@@ -2,9 +2,9 @@ from mock import Mock, call
 
 from twisted.internet import defer, reactor
 
+from synapse.logging.context import LoggingContext
 from synapse.rest.client.transactions import CLEANUP_PERIOD_MS, HttpTransactionCache
 from synapse.util import Clock
-from synapse.util.logcontext import LoggingContext
 
 from tests import unittest
 from tests.utils import MockClock
diff --git a/tests/rest/client/v2_alpha/test_relations.py b/tests/rest/client/v2_alpha/test_relations.py
index 3deeed3a70..6bb7d92638 100644
--- a/tests/rest/client/v2_alpha/test_relations.py
+++ b/tests/rest/client/v2_alpha/test_relations.py
@@ -466,9 +466,15 @@ class RelationsTestCase(unittest.HomeserverTestCase):
 
         self.assertEquals(channel.json_body["content"], new_body)
 
-        self.assertEquals(
-            channel.json_body["unsigned"].get("m.relations"),
-            {RelationTypes.REPLACE: {"event_id": edit_event_id}},
+        relations_dict = channel.json_body["unsigned"].get("m.relations")
+        self.assertIn(RelationTypes.REPLACE, relations_dict)
+
+        m_replace_dict = relations_dict[RelationTypes.REPLACE]
+        for key in ["event_id", "sender", "origin_server_ts"]:
+            self.assertIn(key, m_replace_dict)
+
+        self.assert_dict(
+            {"event_id": edit_event_id, "sender": self.user_id}, m_replace_dict
         )
 
     def test_multi_edit(self):
@@ -518,9 +524,15 @@ class RelationsTestCase(unittest.HomeserverTestCase):
 
         self.assertEquals(channel.json_body["content"], new_body)
 
-        self.assertEquals(
-            channel.json_body["unsigned"].get("m.relations"),
-            {RelationTypes.REPLACE: {"event_id": edit_event_id}},
+        relations_dict = channel.json_body["unsigned"].get("m.relations")
+        self.assertIn(RelationTypes.REPLACE, relations_dict)
+
+        m_replace_dict = relations_dict[RelationTypes.REPLACE]
+        for key in ["event_id", "sender", "origin_server_ts"]:
+            self.assertIn(key, m_replace_dict)
+
+        self.assert_dict(
+            {"event_id": edit_event_id, "sender": self.user_id}, m_replace_dict
         )
 
     def _send_relation(
diff --git a/tests/rest/media/v1/test_media_storage.py b/tests/rest/media/v1/test_media_storage.py
index e2d418b1df..bc662b61db 100644
--- a/tests/rest/media/v1/test_media_storage.py
+++ b/tests/rest/media/v1/test_media_storage.py
@@ -22,27 +22,28 @@ from binascii import unhexlify
 from mock import Mock
 from six.moves.urllib import parse
 
-from twisted.internet import defer, reactor
 from twisted.internet.defer import Deferred
 
+from synapse.logging.context import make_deferred_yieldable
 from synapse.rest.media.v1._base import FileInfo
 from synapse.rest.media.v1.filepath import MediaFilePaths
 from synapse.rest.media.v1.media_storage import MediaStorage
 from synapse.rest.media.v1.storage_provider import FileStorageProviderBackend
-from synapse.util.logcontext import make_deferred_yieldable
 
 from tests import unittest
 
 
-class MediaStorageTests(unittest.TestCase):
-    def setUp(self):
+class MediaStorageTests(unittest.HomeserverTestCase):
+
+    needs_threadpool = True
+
+    def prepare(self, reactor, clock, hs):
         self.test_dir = tempfile.mkdtemp(prefix="synapse-tests-")
+        self.addCleanup(shutil.rmtree, self.test_dir)
 
         self.primary_base_path = os.path.join(self.test_dir, "primary")
         self.secondary_base_path = os.path.join(self.test_dir, "secondary")
 
-        hs = Mock()
-        hs.get_reactor = Mock(return_value=reactor)
         hs.config.media_store_path = self.primary_base_path
 
         storage_providers = [FileStorageProviderBackend(hs, self.secondary_base_path)]
@@ -52,10 +53,6 @@ class MediaStorageTests(unittest.TestCase):
             hs, self.primary_base_path, self.filepaths, storage_providers
         )
 
-    def tearDown(self):
-        shutil.rmtree(self.test_dir)
-
-    @defer.inlineCallbacks
     def test_ensure_media_is_in_local_cache(self):
         media_id = "some_media_id"
         test_body = "Test\n"
@@ -73,7 +70,15 @@ class MediaStorageTests(unittest.TestCase):
         # Now we run ensure_media_is_in_local_cache, which should copy the file
         # to the local cache.
         file_info = FileInfo(None, media_id)
-        local_path = yield self.media_storage.ensure_media_is_in_local_cache(file_info)
+
+        # This uses a real blocking threadpool so we have to wait for it to be
+        # actually done :/
+        x = self.media_storage.ensure_media_is_in_local_cache(file_info)
+
+        # Hotloop until the threadpool does its job...
+        self.wait_on_thread(x)
+
+        local_path = self.get_success(x)
 
         self.assertTrue(os.path.exists(local_path))
 
diff --git a/tests/rest/media/v1/test_url_preview.py b/tests/rest/media/v1/test_url_preview.py
index 8fe5961866..976652aee8 100644
--- a/tests/rest/media/v1/test_url_preview.py
+++ b/tests/rest/media/v1/test_url_preview.py
@@ -460,3 +460,15 @@ class URLPreviewTests(unittest.HomeserverTestCase):
                 "error": "DNS resolution failure during URL preview generation",
             },
         )
+
+    def test_OPTIONS(self):
+        """
+        OPTIONS returns the OPTIONS.
+        """
+        request, channel = self.make_request(
+            "OPTIONS", "url_preview?url=http://example.com", shorthand=False
+        )
+        request.render(self.preview_url)
+        self.pump()
+        self.assertEqual(channel.code, 200)
+        self.assertEqual(channel.json_body, {})
diff --git a/tests/test_federation.py b/tests/test_federation.py
index 6a8339b561..a73f18f88e 100644
--- a/tests/test_federation.py
+++ b/tests/test_federation.py
@@ -3,9 +3,9 @@ from mock import Mock
 from twisted.internet.defer import maybeDeferred, succeed
 
 from synapse.events import FrozenEvent
+from synapse.logging.context import LoggingContext
 from synapse.types import Requester, UserID
 from synapse.util import Clock
-from synapse.util.logcontext import LoggingContext
 
 from tests import unittest
 from tests.server import ThreadedMemoryReactorClock, setup_test_homeserver
diff --git a/tests/test_server.py b/tests/test_server.py
index da29ae92ce..ba08483a4b 100644
--- a/tests/test_server.py
+++ b/tests/test_server.py
@@ -26,8 +26,8 @@ from twisted.web.server import NOT_DONE_YET
 from synapse.api.errors import Codes, SynapseError
 from synapse.http.server import JsonResource
 from synapse.http.site import SynapseSite, logger
+from synapse.logging.context import make_deferred_yieldable
 from synapse.util import Clock
-from synapse.util.logcontext import make_deferred_yieldable
 
 from tests import unittest
 from tests.server import (
diff --git a/tests/test_utils/logging_setup.py b/tests/test_utils/logging_setup.py
index 813f984199..2d96b0fa8d 100644
--- a/tests/test_utils/logging_setup.py
+++ b/tests/test_utils/logging_setup.py
@@ -17,7 +17,7 @@ import os
 
 import twisted.logger
 
-from synapse.util.logcontext import LoggingContextFilter
+from synapse.logging.context import LoggingContextFilter
 
 
 class ToTwistedHandler(logging.Handler):
diff --git a/tests/unittest.py b/tests/unittest.py
index 36df43c137..a09e76c7c2 100644
--- a/tests/unittest.py
+++ b/tests/unittest.py
@@ -17,6 +17,7 @@ import gc
 import hashlib
 import hmac
 import logging
+import time
 
 from mock import Mock
 
@@ -24,16 +25,17 @@ from canonicaljson import json
 
 import twisted
 import twisted.logger
-from twisted.internet.defer import Deferred
+from twisted.internet.defer import Deferred, succeed
+from twisted.python.threadpool import ThreadPool
 from twisted.trial import unittest
 
 from synapse.api.constants import EventTypes
 from synapse.config.homeserver import HomeServerConfig
 from synapse.http.server import JsonResource
 from synapse.http.site import SynapseRequest
+from synapse.logging.context import LoggingContext
 from synapse.server import HomeServer
 from synapse.types import Requester, UserID, create_requester
-from synapse.util.logcontext import LoggingContext
 
 from tests.server import get_clock, make_request, render, setup_test_homeserver
 from tests.test_utils.logging_setup import setup_logging
@@ -164,6 +166,7 @@ class HomeserverTestCase(TestCase):
 
     servlets = []
     hijack_auth = True
+    needs_threadpool = False
 
     def setUp(self):
         """
@@ -192,15 +195,19 @@ class HomeserverTestCase(TestCase):
             if self.hijack_auth:
 
                 def get_user_by_access_token(token=None, allow_guest=False):
-                    return {
-                        "user": UserID.from_string(self.helper.auth_user_id),
-                        "token_id": 1,
-                        "is_guest": False,
-                    }
+                    return succeed(
+                        {
+                            "user": UserID.from_string(self.helper.auth_user_id),
+                            "token_id": 1,
+                            "is_guest": False,
+                        }
+                    )
 
                 def get_user_by_req(request, allow_guest=False, rights="access"):
-                    return create_requester(
-                        UserID.from_string(self.helper.auth_user_id), 1, False, None
+                    return succeed(
+                        create_requester(
+                            UserID.from_string(self.helper.auth_user_id), 1, False, None
+                        )
                     )
 
                 self.hs.get_auth().get_user_by_req = get_user_by_req
@@ -209,9 +216,26 @@ class HomeserverTestCase(TestCase):
                     return_value="1234"
                 )
 
+        if self.needs_threadpool:
+            self.reactor.threadpool = ThreadPool()
+            self.addCleanup(self.reactor.threadpool.stop)
+            self.reactor.threadpool.start()
+
         if hasattr(self, "prepare"):
             self.prepare(self.reactor, self.clock, self.hs)
 
+    def wait_on_thread(self, deferred, timeout=10):
+        """
+        Wait until a Deferred is done, where it's waiting on a real thread.
+        """
+        start_time = time.time()
+
+        while not deferred.called:
+            if start_time + timeout < time.time():
+                raise ValueError("Timed out waiting for threadpool")
+            self.reactor.advance(0.01)
+            time.sleep(0.01)
+
     def make_homeserver(self, reactor, clock):
         """
         Make and return a homeserver.
diff --git a/tests/util/caches/test_descriptors.py b/tests/util/caches/test_descriptors.py
index 6f8f52537c..7807328e2f 100644
--- a/tests/util/caches/test_descriptors.py
+++ b/tests/util/caches/test_descriptors.py
@@ -21,7 +21,11 @@ import mock
 from twisted.internet import defer, reactor
 
 from synapse.api.errors import SynapseError
-from synapse.util import logcontext
+from synapse.logging.context import (
+    LoggingContext,
+    PreserveLoggingContext,
+    make_deferred_yieldable,
+)
 from synapse.util.caches import descriptors
 
 from tests import unittest
@@ -32,7 +36,7 @@ logger = logging.getLogger(__name__)
 def run_on_reactor():
     d = defer.Deferred()
     reactor.callLater(0, d.callback, 0)
-    return logcontext.make_deferred_yieldable(d)
+    return make_deferred_yieldable(d)
 
 
 class CacheTestCase(unittest.TestCase):
@@ -153,7 +157,7 @@ class DescriptorTestCase(unittest.TestCase):
             def fn(self, arg1):
                 @defer.inlineCallbacks
                 def inner_fn():
-                    with logcontext.PreserveLoggingContext():
+                    with PreserveLoggingContext():
                         yield complete_lookup
                     defer.returnValue(1)
 
@@ -161,10 +165,10 @@ class DescriptorTestCase(unittest.TestCase):
 
         @defer.inlineCallbacks
         def do_lookup():
-            with logcontext.LoggingContext() as c1:
+            with LoggingContext() as c1:
                 c1.name = "c1"
                 r = yield obj.fn(1)
-                self.assertEqual(logcontext.LoggingContext.current_context(), c1)
+                self.assertEqual(LoggingContext.current_context(), c1)
             defer.returnValue(r)
 
         def check_result(r):
@@ -174,18 +178,12 @@ class DescriptorTestCase(unittest.TestCase):
 
         # set off a deferred which will do a cache lookup
         d1 = do_lookup()
-        self.assertEqual(
-            logcontext.LoggingContext.current_context(),
-            logcontext.LoggingContext.sentinel,
-        )
+        self.assertEqual(LoggingContext.current_context(), LoggingContext.sentinel)
         d1.addCallback(check_result)
 
         # and another
         d2 = do_lookup()
-        self.assertEqual(
-            logcontext.LoggingContext.current_context(),
-            logcontext.LoggingContext.sentinel,
-        )
+        self.assertEqual(LoggingContext.current_context(), LoggingContext.sentinel)
         d2.addCallback(check_result)
 
         # let the lookup complete
@@ -210,29 +208,25 @@ class DescriptorTestCase(unittest.TestCase):
 
         @defer.inlineCallbacks
         def do_lookup():
-            with logcontext.LoggingContext() as c1:
+            with LoggingContext() as c1:
                 c1.name = "c1"
                 try:
                     d = obj.fn(1)
                     self.assertEqual(
-                        logcontext.LoggingContext.current_context(),
-                        logcontext.LoggingContext.sentinel,
+                        LoggingContext.current_context(), LoggingContext.sentinel
                     )
                     yield d
                     self.fail("No exception thrown")
                 except SynapseError:
                     pass
 
-                self.assertEqual(logcontext.LoggingContext.current_context(), c1)
+                self.assertEqual(LoggingContext.current_context(), c1)
 
         obj = Cls()
 
         # set off a deferred which will do a cache lookup
         d1 = do_lookup()
-        self.assertEqual(
-            logcontext.LoggingContext.current_context(),
-            logcontext.LoggingContext.sentinel,
-        )
+        self.assertEqual(LoggingContext.current_context(), LoggingContext.sentinel)
 
         return d1
 
@@ -288,23 +282,20 @@ class CachedListDescriptorTestCase(unittest.TestCase):
 
             @descriptors.cachedList("fn", "args1", inlineCallbacks=True)
             def list_fn(self, args1, arg2):
-                assert logcontext.LoggingContext.current_context().request == "c1"
+                assert LoggingContext.current_context().request == "c1"
                 # we want this to behave like an asynchronous function
                 yield run_on_reactor()
-                assert logcontext.LoggingContext.current_context().request == "c1"
+                assert LoggingContext.current_context().request == "c1"
                 defer.returnValue(self.mock(args1, arg2))
 
-        with logcontext.LoggingContext() as c1:
+        with LoggingContext() as c1:
             c1.request = "c1"
             obj = Cls()
             obj.mock.return_value = {10: "fish", 20: "chips"}
             d1 = obj.list_fn([10, 20], 2)
-            self.assertEqual(
-                logcontext.LoggingContext.current_context(),
-                logcontext.LoggingContext.sentinel,
-            )
+            self.assertEqual(LoggingContext.current_context(), LoggingContext.sentinel)
             r = yield d1
-            self.assertEqual(logcontext.LoggingContext.current_context(), c1)
+            self.assertEqual(LoggingContext.current_context(), c1)
             obj.mock.assert_called_once_with([10, 20], 2)
             self.assertEqual(r, {10: "fish", 20: "chips"})
             obj.mock.reset_mock()
diff --git a/tests/util/test_async_utils.py b/tests/util/test_async_utils.py
index bf85d3b8ec..f60918069a 100644
--- a/tests/util/test_async_utils.py
+++ b/tests/util/test_async_utils.py
@@ -16,9 +16,8 @@ from twisted.internet import defer
 from twisted.internet.defer import CancelledError, Deferred
 from twisted.internet.task import Clock
 
-from synapse.util import logcontext
+from synapse.logging.context import LoggingContext, PreserveLoggingContext
 from synapse.util.async_helpers import timeout_deferred
-from synapse.util.logcontext import LoggingContext
 
 from tests.unittest import TestCase
 
@@ -69,14 +68,14 @@ class TimeoutDeferredTest(TestCase):
         @defer.inlineCallbacks
         def blocking():
             non_completing_d = Deferred()
-            with logcontext.PreserveLoggingContext():
+            with PreserveLoggingContext():
                 try:
                     yield non_completing_d
                 except CancelledError:
                     blocking_was_cancelled[0] = True
                     raise
 
-        with logcontext.LoggingContext("one") as context_one:
+        with LoggingContext("one") as context_one:
             # the errbacks should be run in the test logcontext
             def errback(res, deferred_name):
                 self.assertIs(
diff --git a/tests/util/test_linearizer.py b/tests/util/test_linearizer.py
index ec7ba9719c..0ec8ef90ce 100644
--- a/tests/util/test_linearizer.py
+++ b/tests/util/test_linearizer.py
@@ -19,7 +19,8 @@ from six.moves import range
 from twisted.internet import defer, reactor
 from twisted.internet.defer import CancelledError
 
-from synapse.util import Clock, logcontext
+from synapse.logging.context import LoggingContext
+from synapse.util import Clock
 from synapse.util.async_helpers import Linearizer
 
 from tests import unittest
@@ -51,13 +52,13 @@ class LinearizerTestCase(unittest.TestCase):
 
         @defer.inlineCallbacks
         def func(i, sleep=False):
-            with logcontext.LoggingContext("func(%s)" % i) as lc:
+            with LoggingContext("func(%s)" % i) as lc:
                 with (yield linearizer.queue("")):
-                    self.assertEqual(logcontext.LoggingContext.current_context(), lc)
+                    self.assertEqual(LoggingContext.current_context(), lc)
                     if sleep:
                         yield Clock(reactor).sleep(0)
 
-                self.assertEqual(logcontext.LoggingContext.current_context(), lc)
+                self.assertEqual(LoggingContext.current_context(), lc)
 
         func(0, sleep=True)
         for i in range(1, 100):
diff --git a/tests/util/test_logcontext.py b/tests/util/test_logcontext.py
index 8adaee3c8d..8b8455c8b7 100644
--- a/tests/util/test_logcontext.py
+++ b/tests/util/test_logcontext.py
@@ -1,8 +1,14 @@
 import twisted.python.failure
 from twisted.internet import defer, reactor
 
-from synapse.util import Clock, logcontext
-from synapse.util.logcontext import LoggingContext
+from synapse.logging.context import (
+    LoggingContext,
+    PreserveLoggingContext,
+    make_deferred_yieldable,
+    nested_logging_context,
+    run_in_background,
+)
+from synapse.util import Clock
 
 from .. import unittest
 
@@ -39,24 +45,17 @@ class LoggingContextTestCase(unittest.TestCase):
 
         callback_completed = [False]
 
-        def test():
+        with LoggingContext() as context_one:
             context_one.request = "one"
-            d = function()
+
+            # fire off function, but don't wait on it.
+            d2 = run_in_background(function)
 
             def cb(res):
-                self._check_test_key("one")
                 callback_completed[0] = True
                 return res
 
-            d.addCallback(cb)
-
-            return d
-
-        with LoggingContext() as context_one:
-            context_one.request = "one"
-
-            # fire off function, but don't wait on it.
-            logcontext.run_in_background(test)
+            d2.addCallback(cb)
 
             self._check_test_key("one")
 
@@ -92,7 +91,7 @@ class LoggingContextTestCase(unittest.TestCase):
     def test_run_in_background_with_non_blocking_fn(self):
         @defer.inlineCallbacks
         def nonblocking_function():
-            with logcontext.PreserveLoggingContext():
+            with PreserveLoggingContext():
                 yield defer.succeed(None)
 
         return self._test_run_in_background(nonblocking_function)
@@ -101,7 +100,23 @@ class LoggingContextTestCase(unittest.TestCase):
         # a function which returns a deferred which looks like it has been
         # called, but is actually paused
         def testfunc():
-            return logcontext.make_deferred_yieldable(_chained_deferred_function())
+            return make_deferred_yieldable(_chained_deferred_function())
+
+        return self._test_run_in_background(testfunc)
+
+    def test_run_in_background_with_coroutine(self):
+        async def testfunc():
+            self._check_test_key("one")
+            d = Clock(reactor).sleep(0)
+            self.assertIs(LoggingContext.current_context(), LoggingContext.sentinel)
+            await d
+            self._check_test_key("one")
+
+        return self._test_run_in_background(testfunc)
+
+    def test_run_in_background_with_nonblocking_coroutine(self):
+        async def testfunc():
+            self._check_test_key("one")
 
         return self._test_run_in_background(testfunc)
 
@@ -119,7 +134,7 @@ class LoggingContextTestCase(unittest.TestCase):
         with LoggingContext() as context_one:
             context_one.request = "one"
 
-            d1 = logcontext.make_deferred_yieldable(blocking_function())
+            d1 = make_deferred_yieldable(blocking_function())
             # make sure that the context was reset by make_deferred_yieldable
             self.assertIs(LoggingContext.current_context(), sentinel_context)
 
@@ -135,7 +150,7 @@ class LoggingContextTestCase(unittest.TestCase):
         with LoggingContext() as context_one:
             context_one.request = "one"
 
-            d1 = logcontext.make_deferred_yieldable(_chained_deferred_function())
+            d1 = make_deferred_yieldable(_chained_deferred_function())
             # make sure that the context was reset by make_deferred_yieldable
             self.assertIs(LoggingContext.current_context(), sentinel_context)
 
@@ -152,7 +167,7 @@ class LoggingContextTestCase(unittest.TestCase):
         with LoggingContext() as context_one:
             context_one.request = "one"
 
-            d1 = logcontext.make_deferred_yieldable("bum")
+            d1 = make_deferred_yieldable("bum")
             self._check_test_key("one")
 
             r = yield d1
@@ -161,7 +176,7 @@ class LoggingContextTestCase(unittest.TestCase):
 
     def test_nested_logging_context(self):
         with LoggingContext(request="foo"):
-            nested_context = logcontext.nested_logging_context(suffix="bar")
+            nested_context = nested_logging_context(suffix="bar")
             self.assertEqual(nested_context.request, "foo-bar")
 
 
diff --git a/tests/util/test_logformatter.py b/tests/util/test_logformatter.py
index 297aebbfbe..0fb60caacb 100644
--- a/tests/util/test_logformatter.py
+++ b/tests/util/test_logformatter.py
@@ -14,7 +14,7 @@
 # limitations under the License.
 import sys
 
-from synapse.util.logformatter import LogFormatter
+from synapse.logging.formatter import LogFormatter
 
 from tests import unittest
 
diff --git a/tests/util/test_ratelimitutils.py b/tests/util/test_ratelimitutils.py
new file mode 100644
index 0000000000..4d1aee91d5
--- /dev/null
+++ b/tests/util/test_ratelimitutils.py
@@ -0,0 +1,97 @@
+# -*- coding: utf-8 -*-
+# Copyright 2019 The Matrix.org Foundation C.I.C.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+from synapse.config.homeserver import HomeServerConfig
+from synapse.util.ratelimitutils import FederationRateLimiter
+
+from tests.server import get_clock
+from tests.unittest import TestCase
+from tests.utils import default_config
+
+
+class FederationRateLimiterTestCase(TestCase):
+    def test_ratelimit(self):
+        """A simple test with the default values"""
+        reactor, clock = get_clock()
+        rc_config = build_rc_config()
+        ratelimiter = FederationRateLimiter(clock, rc_config)
+
+        with ratelimiter.ratelimit("testhost") as d1:
+            # shouldn't block
+            self.successResultOf(d1)
+
+    def test_concurrent_limit(self):
+        """Test what happens when we hit the concurrent limit"""
+        reactor, clock = get_clock()
+        rc_config = build_rc_config({"rc_federation": {"concurrent": 2}})
+        ratelimiter = FederationRateLimiter(clock, rc_config)
+
+        with ratelimiter.ratelimit("testhost") as d1:
+            # shouldn't block
+            self.successResultOf(d1)
+
+            cm2 = ratelimiter.ratelimit("testhost")
+            d2 = cm2.__enter__()
+            # also shouldn't block
+            self.successResultOf(d2)
+
+            cm3 = ratelimiter.ratelimit("testhost")
+            d3 = cm3.__enter__()
+            # this one should block, though ...
+            self.assertNoResult(d3)
+
+            # ... until we complete an earlier request
+            cm2.__exit__(None, None, None)
+            self.successResultOf(d3)
+
+    def test_sleep_limit(self):
+        """Test what happens when we hit the sleep limit"""
+        reactor, clock = get_clock()
+        rc_config = build_rc_config(
+            {"rc_federation": {"sleep_limit": 2, "sleep_delay": 500}}
+        )
+        ratelimiter = FederationRateLimiter(clock, rc_config)
+
+        with ratelimiter.ratelimit("testhost") as d1:
+            # shouldn't block
+            self.successResultOf(d1)
+
+        with ratelimiter.ratelimit("testhost") as d2:
+            # nor this
+            self.successResultOf(d2)
+
+        with ratelimiter.ratelimit("testhost") as d3:
+            # this one should block, though ...
+            self.assertNoResult(d3)
+            sleep_time = _await_resolution(reactor, d3)
+            self.assertAlmostEqual(sleep_time, 500, places=3)
+
+
+def _await_resolution(reactor, d):
+    """advance the clock until the deferred completes.
+
+    Returns the number of milliseconds it took to complete.
+    """
+    start_time = reactor.seconds()
+    while not d.called:
+        reactor.advance(0.01)
+    return (reactor.seconds() - start_time) * 1000
+
+
+def build_rc_config(settings={}):
+    config_dict = default_config("test")
+    config_dict.update(settings)
+    config = HomeServerConfig()
+    config.parse_config_dict(config_dict, "", "")
+    return config.rc_federation
diff --git a/tests/utils.py b/tests/utils.py
index da43166f3a..8a94ce0b47 100644
--- a/tests/utils.py
+++ b/tests/utils.py
@@ -34,6 +34,7 @@ from synapse.config.homeserver import HomeServerConfig
 from synapse.config.server import DEFAULT_ROOM_VERSION
 from synapse.federation.transport import server as federation_server
 from synapse.http.server import HttpServer
+from synapse.logging.context import LoggingContext
 from synapse.server import HomeServer
 from synapse.storage import DataStore
 from synapse.storage.engines import PostgresEngine, create_engine
@@ -42,7 +43,6 @@ from synapse.storage.prepare_database import (
     _setup_new_database,
     prepare_database,
 )
-from synapse.util.logcontext import LoggingContext
 from synapse.util.ratelimitutils import FederationRateLimiter
 
 # set this to True to run the tests against postgres instead of sqlite.
@@ -152,12 +152,6 @@ def default_config(name, parse=False):
         "mau_stats_only": False,
         "mau_limits_reserved_threepids": [],
         "admin_contact": None,
-        "rc_federation": {
-            "reject_limit": 10,
-            "sleep_limit": 10,
-            "sleep_delay": 10,
-            "concurrent": 10,
-        },
         "rc_message": {"per_second": 10000, "burst_count": 10000},
         "rc_registration": {"per_second": 10000, "burst_count": 10000},
         "rc_login": {