summary refs log tree commit diff
diff options
context:
space:
mode:
-rw-r--r--.circleci/config.yml48
-rw-r--r--.dockerignore3
-rw-r--r--.travis.yml10
-rw-r--r--CHANGES.md84
-rw-r--r--CONTRIBUTING.rst20
-rw-r--r--MANIFEST.in1
-rw-r--r--changelog.d/3659.feature1
-rw-r--r--changelog.d/3673.misc1
-rw-r--r--changelog.d/3680.feature1
-rw-r--r--changelog.d/3722.bugfix1
-rw-r--r--changelog.d/3724.feature1
-rw-r--r--changelog.d/3726.misc1
-rw-r--r--changelog.d/3727.misc1
-rw-r--r--changelog.d/3734.misc1
-rw-r--r--changelog.d/3735.misc1
-rw-r--r--changelog.d/3746.misc1
-rw-r--r--changelog.d/3747.bugfix1
-rw-r--r--changelog.d/3749.feature1
-rw-r--r--changelog.d/3751.feature1
-rw-r--r--changelog.d/3753.bugfix1
-rw-r--r--changelog.d/3754.bugfix1
-rw-r--r--changelog.d/3755.bugfix1
-rw-r--r--contrib/grafana/synapse.json24
-rw-r--r--docker/Dockerfile2
-rw-r--r--docs/admin_api/register_api.rst2
-rw-r--r--docs/workers.rst24
-rw-r--r--synapse/__init__.py2
-rw-r--r--synapse/api/auth.py61
-rw-r--r--synapse/api/constants.py18
-rw-r--r--synapse/api/errors.py44
-rw-r--r--synapse/api/ratelimiting.py2
-rw-r--r--synapse/app/_base.py6
-rw-r--r--synapse/app/appservice.py3
-rw-r--r--synapse/app/client_reader.py6
-rw-r--r--synapse/app/event_creator.py18
-rw-r--r--synapse/app/federation_reader.py18
-rw-r--r--synapse/app/federation_sender.py11
-rw-r--r--synapse/app/frontend_proxy.py41
-rwxr-xr-xsynapse/app/homeserver.py20
-rw-r--r--synapse/app/media_repository.py6
-rw-r--r--synapse/app/pusher.py7
-rw-r--r--synapse/app/synchrotron.py19
-rw-r--r--synapse/app/user_dir.py5
-rw-r--r--synapse/config/logger.py6
-rw-r--r--synapse/config/server.py54
-rw-r--r--synapse/crypto/context_factory.py86
-rw-r--r--synapse/crypto/keyclient.py12
-rw-r--r--synapse/crypto/keyring.py4
-rw-r--r--synapse/event_auth.py10
-rw-r--r--synapse/federation/federation_client.py38
-rw-r--r--synapse/federation/federation_server.py83
-rw-r--r--synapse/federation/transaction_queue.py14
-rw-r--r--synapse/federation/transport/client.py7
-rw-r--r--synapse/federation/transport/server.py77
-rw-r--r--synapse/handlers/appservice.py10
-rw-r--r--synapse/handlers/auth.py37
-rw-r--r--synapse/handlers/deactivate_account.py13
-rw-r--r--synapse/handlers/device.py2
-rw-r--r--synapse/handlers/federation.py98
-rw-r--r--synapse/handlers/identity.py32
-rw-r--r--synapse/handlers/initial_sync.py6
-rw-r--r--synapse/handlers/message.py114
-rw-r--r--synapse/handlers/pagination.py37
-rw-r--r--synapse/handlers/presence.py33
-rw-r--r--synapse/handlers/profile.py31
-rw-r--r--synapse/handlers/read_marker.py2
-rw-r--r--synapse/handlers/receipts.py18
-rw-r--r--synapse/handlers/register.py22
-rw-r--r--synapse/handlers/room.py32
-rw-r--r--synapse/handlers/room_list.py2
-rw-r--r--synapse/handlers/room_member.py3
-rw-r--r--synapse/handlers/room_member_worker.py41
-rw-r--r--synapse/handlers/sync.py177
-rw-r--r--synapse/handlers/user_directory.py4
-rw-r--r--synapse/http/client.py2
-rw-r--r--synapse/http/endpoint.py12
-rw-r--r--synapse/http/matrixfederationclient.py15
-rw-r--r--synapse/http/request_metrics.py13
-rw-r--r--synapse/http/server.py87
-rw-r--r--synapse/http/servlet.py56
-rw-r--r--synapse/http/site.py211
-rw-r--r--synapse/metrics/__init__.py13
-rw-r--r--synapse/metrics/background_process_metrics.py26
-rw-r--r--synapse/notifier.py2
-rw-r--r--synapse/push/bulk_push_rule_evaluator.py2
-rw-r--r--synapse/push/mailer.py2
-rw-r--r--synapse/push/pusherpool.py17
-rw-r--r--synapse/python_dependencies.py2
-rw-r--r--synapse/replication/http/__init__.py3
-rw-r--r--synapse/replication/http/_base.py215
-rw-r--r--synapse/replication/http/federation.py259
-rw-r--r--synapse/replication/http/membership.py292
-rw-r--r--synapse/replication/http/send_event.py110
-rw-r--r--synapse/replication/slave/storage/events.py2
-rw-r--r--synapse/replication/slave/storage/transactions.py13
-rw-r--r--synapse/replication/tcp/client.py4
-rw-r--r--synapse/replication/tcp/commands.py12
-rw-r--r--synapse/replication/tcp/protocol.py42
-rw-r--r--synapse/rest/client/transactions.py4
-rw-r--r--synapse/rest/client/v1/admin.py11
-rw-r--r--synapse/rest/client/v1/presence.py3
-rw-r--r--synapse/rest/client/v1/room.py34
-rw-r--r--synapse/rest/client/v1_only/register.py54
-rw-r--r--synapse/rest/client/v2_alpha/account.py22
-rw-r--r--synapse/rest/client/v2_alpha/sync.py1
-rw-r--r--synapse/rest/client/versions.py13
-rw-r--r--synapse/rest/consent/consent_resource.py15
-rw-r--r--synapse/rest/media/v1/config_resource.py48
-rw-r--r--synapse/rest/media/v1/media_repository.py5
-rw-r--r--synapse/rest/media/v1/preview_url_resource.py2
-rw-r--r--synapse/rest/media/v1/upload_resource.py10
-rw-r--r--synapse/secrets.py2
-rw-r--r--synapse/server.py13
-rw-r--r--synapse/server_notices/resource_limits_server_notices.py204
-rw-r--r--synapse/server_notices/server_notices_manager.py41
-rw-r--r--synapse/server_notices/server_notices_sender.py33
-rw-r--r--synapse/state/__init__.py (renamed from synapse/state.py)324
-rw-r--r--synapse/state/v1.py321
-rw-r--r--synapse/static/client/register/index.html2
-rw-r--r--synapse/static/client/register/js/recaptcha_ajax.js195
-rw-r--r--synapse/storage/__init__.py28
-rw-r--r--synapse/storage/_base.py7
-rw-r--r--synapse/storage/client_ips.py9
-rw-r--r--synapse/storage/events.py101
-rw-r--r--synapse/storage/events_worker.py83
-rw-r--r--synapse/storage/monthly_active_users.py222
-rw-r--r--synapse/storage/prepare_database.py2
-rw-r--r--synapse/storage/profile.py4
-rw-r--r--synapse/storage/registration.py28
-rw-r--r--synapse/storage/room.py90
-rw-r--r--synapse/storage/roommember.py2
-rw-r--r--synapse/storage/schema/delta/51/monthly_active_users.sql27
-rw-r--r--synapse/storage/state.py262
-rw-r--r--synapse/storage/stream.py4
-rw-r--r--synapse/util/async_helpers.py (renamed from synapse/util/async.py)111
-rw-r--r--synapse/util/caches/descriptors.py2
-rw-r--r--synapse/util/caches/response_cache.py2
-rw-r--r--synapse/util/caches/snapshot_cache.py2
-rw-r--r--synapse/util/logcontext.py14
-rw-r--r--synapse/util/logutils.py10
-rw-r--r--synapse/util/stringutils.py15
-rw-r--r--synapse/util/versionstring.py12
-rw-r--r--tests/__init__.py3
-rw-r--r--tests/api/test_auth.py126
-rw-r--r--tests/api/test_filtering.py358
-rw-r--r--tests/api/test_ratelimiting.py7
-rw-r--r--tests/app/__init__.py0
-rw-r--r--tests/app/test_frontend_proxy.py88
-rw-r--r--tests/appservice/test_appservice.py86
-rw-r--r--tests/appservice/test_scheduler.py28
-rw-r--r--tests/config/test_generate.py38
-rw-r--r--tests/config/test_load.py38
-rw-r--r--tests/crypto/test_event_signing.py19
-rw-r--r--tests/crypto/test_keyring.py49
-rw-r--r--tests/events/test_utils.py135
-rw-r--r--tests/federation/test_federation_server.py26
-rw-r--r--tests/handlers/test_appservice.py39
-rw-r--r--tests/handlers/test_auth.py70
-rw-r--r--tests/handlers/test_device.py100
-rw-r--r--tests/handlers/test_directory.py23
-rw-r--r--tests/handlers/test_e2e_keys.py99
-rw-r--r--tests/handlers/test_presence.py216
-rw-r--r--tests/handlers/test_profile.py30
-rw-r--r--tests/handlers/test_register.py105
-rw-r--r--tests/handlers/test_sync.py71
-rw-r--r--tests/handlers/test_typing.py230
-rw-r--r--tests/http/test_endpoint.py6
-rw-r--r--tests/replication/slave/storage/_base.py6
-rw-r--r--tests/replication/slave/storage/test_account_data.py14
-rw-r--r--tests/replication/slave/storage/test_events.py108
-rw-r--r--tests/replication/slave/storage/test_receipts.py6
-rw-r--r--tests/rest/client/test_transactions.py34
-rw-r--r--tests/rest/client/v1/test_admin.py15
-rw-r--r--tests/rest/client/v1/test_events.py34
-rw-r--r--tests/rest/client/v1/test_presence.py72
-rw-r--r--tests/rest/client/v1/test_profile.py43
-rw-r--r--tests/rest/client/v1/test_register.py10
-rw-r--r--tests/rest/client/v1/test_rooms.py1
-rw-r--r--tests/rest/client/v1/test_typing.py100
-rw-r--r--tests/rest/client/v1/utils.py84
-rw-r--r--tests/rest/client/v2_alpha/test_filter.py47
-rw-r--r--tests/rest/client/v2_alpha/test_register.py51
-rw-r--r--tests/rest/client/v2_alpha/test_sync.py80
-rw-r--r--tests/rest/media/v1/test_media_storage.py6
-rw-r--r--tests/server.py70
-rw-r--r--tests/server_notices/__init__.py0
-rw-r--r--tests/server_notices/test_resource_limits_server_notices.py212
-rw-r--r--tests/storage/test__base.py7
-rw-r--r--tests/storage/test__init__.py65
-rw-r--r--tests/storage/test_appservice.py175
-rw-r--r--tests/storage/test_background_update.py21
-rw-r--r--tests/storage/test_base.py46
-rw-r--r--tests/storage/test_client_ips.py71
-rw-r--r--tests/storage/test_devices.py71
-rw-r--r--tests/storage/test_directory.py24
-rw-r--r--tests/storage/test_end_to_end_keys.py64
-rw-r--r--tests/storage/test_event_federation.py41
-rw-r--r--tests/storage/test_event_push_actions.py80
-rw-r--r--tests/storage/test_keys.py13
-rw-r--r--tests/storage/test_monthly_active_users.py131
-rw-r--r--tests/storage/test_presence.py71
-rw-r--r--tests/storage/test_profile.py20
-rw-r--r--tests/storage/test_redaction.py74
-rw-r--r--tests/storage/test_registration.py41
-rw-r--r--tests/storage/test_room.py51
-rw-r--r--tests/storage/test_roommember.py35
-rw-r--r--tests/storage/test_state.py342
-rw-r--r--tests/storage/test_user_directory.py42
-rw-r--r--tests/test_distributor.py10
-rw-r--r--tests/test_dns.py22
-rw-r--r--tests/test_event_auth.py98
-rw-r--r--tests/test_federation.py5
-rw-r--r--tests/test_mau.py217
-rw-r--r--tests/test_preview.py55
-rw-r--r--tests/test_server.py30
-rw-r--r--tests/test_state.py248
-rw-r--r--tests/test_test_utils.py5
-rw-r--r--tests/test_types.py5
-rw-r--r--tests/test_visibility.py151
-rw-r--r--tests/unittest.py157
-rw-r--r--tests/util/caches/test_descriptors.py46
-rw-r--r--tests/util/test_dict_cache.py19
-rw-r--r--tests/util/test_expiring_cache.py1
-rw-r--r--tests/util/test_file_consumer.py5
-rw-r--r--tests/util/test_linearizer.py9
-rw-r--r--tests/util/test_logcontext.py14
-rw-r--r--tests/util/test_lrucache.py2
-rw-r--r--tests/util/test_rwlock.py11
-rw-r--r--tests/util/test_snapshot_cache.py1
-rw-r--r--tests/util/test_stream_change_cache.py13
-rw-r--r--tests/utils.py260
-rw-r--r--tox.ini20
232 files changed, 7197 insertions, 4107 deletions
diff --git a/.circleci/config.yml b/.circleci/config.yml
new file mode 100644
index 0000000000..e03f01b837
--- /dev/null
+++ b/.circleci/config.yml
@@ -0,0 +1,48 @@
+version: 2
+jobs:
+  sytestpy2:
+    machine: true
+    steps:
+      - checkout
+      - run: docker pull matrixdotorg/sytest-synapsepy2
+      - run: docker run --rm -it -v $(pwd)\:/src -v $(pwd)/logs\:/logs matrixdotorg/sytest-synapsepy2
+      - store_artifacts:
+          path: ~/project/logs
+          destination: logs
+  sytestpy2postgres:
+    machine: true
+    steps:
+      - checkout
+      - run: docker pull matrixdotorg/sytest-synapsepy2
+      - run: docker run --rm -it -v $(pwd)\:/src -v $(pwd)/logs\:/logs -e POSTGRES=1 matrixdotorg/sytest-synapsepy2
+      - store_artifacts:
+          path: ~/project/logs
+          destination: logs
+  sytestpy3:
+    machine: true
+    steps:
+      - checkout
+      - run: docker pull matrixdotorg/sytest-synapsepy3
+      - run: docker run --rm -it -v $(pwd)\:/src -v $(pwd)/logs\:/logs hawkowl/sytestpy3
+      - store_artifacts:
+          path: ~/project/logs
+          destination: logs
+  sytestpy3postgres:
+    machine: true
+    steps:
+      - checkout
+      - run: docker pull matrixdotorg/sytest-synapsepy3
+      - run: docker run --rm -it -v $(pwd)\:/src -v $(pwd)/logs\:/logs -e POSTGRES=1 matrixdotorg/sytest-synapsepy3
+      - store_artifacts:
+          path: ~/project/logs
+          destination: logs
+
+workflows:
+  version: 2
+  build:
+    jobs:
+      - sytestpy2
+      - sytestpy2postgres
+# Currently broken while the Python 3 port is incomplete
+#      - sytestpy3
+#      - sytestpy3postgres
diff --git a/.dockerignore b/.dockerignore
index f36f86fbb7..6cdb8532d3 100644
--- a/.dockerignore
+++ b/.dockerignore
@@ -3,3 +3,6 @@ Dockerfile
 .gitignore
 demo/etc
 tox.ini
+synctl
+.git/*
+.tox/*
diff --git a/.travis.yml b/.travis.yml
index b34b17af75..318701c9f8 100644
--- a/.travis.yml
+++ b/.travis.yml
@@ -8,6 +8,9 @@ before_script:
   - git remote set-branches --add origin develop
   - git fetch origin develop
 
+services:
+  - postgresql
+
 matrix:
   fast_finish: true
   include:
@@ -20,6 +23,9 @@ matrix:
   - python: 2.7
     env: TOX_ENV=py27
 
+  - python: 2.7
+    env: TOX_ENV=py27-postgres TRIAL_FLAGS="-j 4"
+
   - python: 3.6
     env: TOX_ENV=py36
 
@@ -29,6 +35,10 @@ matrix:
   - python: 3.6
     env: TOX_ENV=check-newsfragment
 
+  allow_failures:
+  - python: 2.7
+    env: TOX_ENV=py27-postgres TRIAL_FLAGS="-j 4"
+
 install:
   - pip install tox
 
diff --git a/CHANGES.md b/CHANGES.md
index a299110a6b..a35f5aebc7 100644
--- a/CHANGES.md
+++ b/CHANGES.md
@@ -1,3 +1,85 @@
+Synapse 0.33.3 (2018-08-22)
+===========================
+
+Bugfixes
+--------
+
+- Fix bug introduced in v0.33.3rc1 which made the ToS give a 500 error ([\#3732](https://github.com/matrix-org/synapse/issues/3732))
+
+
+Synapse 0.33.3rc2 (2018-08-21)
+==============================
+
+Bugfixes
+--------
+
+- Fix bug in v0.33.3rc1 which caused infinite loops and OOMs ([\#3723](https://github.com/matrix-org/synapse/issues/3723))
+
+
+Synapse 0.33.3rc1 (2018-08-21)
+==============================
+
+Features
+--------
+
+- Add support for the SNI extension to federation TLS connections. Thanks to @vojeroen! ([\#3439](https://github.com/matrix-org/synapse/issues/3439))
+- Add /_media/r0/config ([\#3184](https://github.com/matrix-org/synapse/issues/3184))
+- speed up /members API and add `at` and `membership` params as per MSC1227 ([\#3568](https://github.com/matrix-org/synapse/issues/3568))
+- implement `summary` block in /sync response as per MSC688 ([\#3574](https://github.com/matrix-org/synapse/issues/3574))
+- Add lazy-loading support to /messages as per MSC1227 ([\#3589](https://github.com/matrix-org/synapse/issues/3589))
+- Add ability to limit number of monthly active users on the server ([\#3633](https://github.com/matrix-org/synapse/issues/3633))
+- Support more federation endpoints on workers ([\#3653](https://github.com/matrix-org/synapse/issues/3653))
+- Basic support for room versioning ([\#3654](https://github.com/matrix-org/synapse/issues/3654))
+- Ability to disable client/server Synapse via conf toggle ([\#3655](https://github.com/matrix-org/synapse/issues/3655))
+- Ability to whitelist specific threepids against monthly active user limiting ([\#3662](https://github.com/matrix-org/synapse/issues/3662))
+- Add some metrics for the appservice and federation event sending loops ([\#3664](https://github.com/matrix-org/synapse/issues/3664))
+- Where server is disabled, block ability for locked out users to read new messages ([\#3670](https://github.com/matrix-org/synapse/issues/3670))
+- set admin uri via config, to be used in error messages where the user should contact the administrator ([\#3687](https://github.com/matrix-org/synapse/issues/3687))
+- Synapse's presence functionality can now be disabled with the "use_presence" configuration option. ([\#3694](https://github.com/matrix-org/synapse/issues/3694))
+- For resource limit blocked users, prevent writing into rooms ([\#3708](https://github.com/matrix-org/synapse/issues/3708))
+
+
+Bugfixes
+--------
+
+- Fix occasional glitches in the synapse_event_persisted_position metric ([\#3658](https://github.com/matrix-org/synapse/issues/3658))
+- Fix bug on deleting 3pid when using identity servers that don't support unbind API ([\#3661](https://github.com/matrix-org/synapse/issues/3661))
+- Make the tests pass on Twisted < 18.7.0 ([\#3676](https://github.com/matrix-org/synapse/issues/3676))
+- Don’t ship recaptcha_ajax.js, use it directly from Google ([\#3677](https://github.com/matrix-org/synapse/issues/3677))
+- Fixes test_reap_monthly_active_users so it passes under postgres ([\#3681](https://github.com/matrix-org/synapse/issues/3681))
+- Fix mau blocking calulation bug on login ([\#3689](https://github.com/matrix-org/synapse/issues/3689))
+- Fix missing yield in synapse.storage.monthly_active_users.initialise_reserved_users ([\#3692](https://github.com/matrix-org/synapse/issues/3692))
+- Improve HTTP request logging to include all requests ([\#3700](https://github.com/matrix-org/synapse/issues/3700))
+- Avoid timing out requests while we are streaming back the response ([\#3701](https://github.com/matrix-org/synapse/issues/3701))
+- Support more federation endpoints on workers ([\#3705](https://github.com/matrix-org/synapse/issues/3705), [\#3713](https://github.com/matrix-org/synapse/issues/3713))
+- Fix "Starting db txn 'get_all_updated_receipts' from sentinel context" warning ([\#3710](https://github.com/matrix-org/synapse/issues/3710))
+- Fix bug where `state_cache` cache factor ignored environment variables ([\#3719](https://github.com/matrix-org/synapse/issues/3719))
+
+
+Deprecations and Removals
+-------------------------
+
+- The Shared-Secret registration method of the legacy v1/register REST endpoint has been removed. For a replacement, please see [the admin/register API documentation](https://github.com/matrix-org/synapse/blob/master/docs/admin_api/register_api.rst). ([\#3703](https://github.com/matrix-org/synapse/issues/3703))
+
+
+Internal Changes
+----------------
+
+- The test suite now can run under PostgreSQL. ([\#3423](https://github.com/matrix-org/synapse/issues/3423))
+- Refactor HTTP replication endpoints to reduce code duplication ([\#3632](https://github.com/matrix-org/synapse/issues/3632))
+- Tests now correctly execute on Python 3. ([\#3647](https://github.com/matrix-org/synapse/issues/3647))
+- Sytests can now be run inside a Docker container. ([\#3660](https://github.com/matrix-org/synapse/issues/3660))
+- Port over enough to Python 3 to allow the sytests to start. ([\#3668](https://github.com/matrix-org/synapse/issues/3668))
+- Update docker base image from alpine 3.7 to 3.8. ([\#3669](https://github.com/matrix-org/synapse/issues/3669))
+- Rename synapse.util.async to synapse.util.async_helpers to mitigate async becoming a keyword on Python 3.7. ([\#3678](https://github.com/matrix-org/synapse/issues/3678))
+- Synapse's tests are now formatted with the black autoformatter. ([\#3679](https://github.com/matrix-org/synapse/issues/3679))
+- Implemented a new testing base class to reduce test boilerplate. ([\#3684](https://github.com/matrix-org/synapse/issues/3684))
+- Rename MAU prometheus metrics ([\#3690](https://github.com/matrix-org/synapse/issues/3690))
+- add new error type ResourceLimit ([\#3707](https://github.com/matrix-org/synapse/issues/3707))
+- Logcontexts for replication command handlers ([\#3709](https://github.com/matrix-org/synapse/issues/3709))
+- Update admin register API documentation to reference a real user ID. ([\#3712](https://github.com/matrix-org/synapse/issues/3712))
+
+
 Synapse 0.33.2 (2018-08-09)
 ===========================
 
@@ -24,7 +106,7 @@ Features
 Bugfixes
 --------
 
-- Make /directory/list API return 404 for room not found instead of 400 ([\#2952](https://github.com/matrix-org/synapse/issues/2952))
+- Make /directory/list API return 404 for room not found instead of 400. Thanks to @fuzzmz! ([\#3620](https://github.com/matrix-org/synapse/issues/3620))
 - Default inviter_display_name to mxid for email invites ([\#3391](https://github.com/matrix-org/synapse/issues/3391))
 - Don't generate TURN credentials if no TURN config options are set ([\#3514](https://github.com/matrix-org/synapse/issues/3514))
 - Correctly announce deleted devices over federation ([\#3520](https://github.com/matrix-org/synapse/issues/3520))
diff --git a/CONTRIBUTING.rst b/CONTRIBUTING.rst
index aa2738eea3..3d75853aa7 100644
--- a/CONTRIBUTING.rst
+++ b/CONTRIBUTING.rst
@@ -30,11 +30,11 @@ use github's pull request workflow to review the contribution, and either ask
 you to make any refinements needed or merge it and make them ourselves. The
 changes will then land on master when we next do a release.
 
-We use `Jenkins <http://matrix.org/jenkins>`_ and 
+We use `Jenkins <http://matrix.org/jenkins>`_ and
 `Travis <https://travis-ci.org/matrix-org/synapse>`_ for continuous
-integration. All pull requests to synapse get automatically tested by Travis; 
-the Jenkins builds require an adminstrator to start them. If your change 
-breaks the build, this will be shown in github, so please keep an eye on the 
+integration. All pull requests to synapse get automatically tested by Travis;
+the Jenkins builds require an adminstrator to start them. If your change
+breaks the build, this will be shown in github, so please keep an eye on the
 pull request for feedback.
 
 Code style
@@ -56,17 +56,17 @@ entry. These are managed by Towncrier
 (https://github.com/hawkowl/towncrier).
 
 To create a changelog entry, make a new file in the ``changelog.d``
-file named in the format of ``issuenumberOrPR.type``. The type can be
+file named in the format of ``PRnumber.type``. The type can be
 one of ``feature``, ``bugfix``, ``removal`` (also used for
 deprecations), or ``misc`` (for internal-only changes). The content of
 the file is your changelog entry, which can contain RestructuredText
 formatting. A note of contributors is welcomed in changelogs for
 non-misc changes (the content of misc changes is not displayed).
 
-For example, a fix for a bug reported in #1234 would have its
-changelog entry in ``changelog.d/1234.bugfix``, and contain content
-like "The security levels of Florbs are now validated when
-recieved over federation. Contributed by Jane Matrix".
+For example, a fix in PR #1234 would have its changelog entry in
+``changelog.d/1234.bugfix``, and contain content like "The security levels of
+Florbs are now validated when recieved over federation. Contributed by Jane
+Matrix".
 
 Attribution
 ~~~~~~~~~~~
@@ -125,7 +125,7 @@ the contribution or otherwise have the right to contribute it to Matrix::
         personal information I submit with it, including my sign-off) is
         maintained indefinitely and may be redistributed consistent with
         this project or the open source license(s) involved.
-        
+
 If you agree to this for your contribution, then all that's needed is to
 include the line in your commit or pull request comment::
 
diff --git a/MANIFEST.in b/MANIFEST.in
index 1ff98d95df..e0826ba544 100644
--- a/MANIFEST.in
+++ b/MANIFEST.in
@@ -36,3 +36,4 @@ recursive-include changelog.d *
 prune .github
 prune demo/etc
 prune docker
+prune .circleci
diff --git a/changelog.d/3659.feature b/changelog.d/3659.feature
new file mode 100644
index 0000000000..a5b4821c09
--- /dev/null
+++ b/changelog.d/3659.feature
@@ -0,0 +1 @@
+Support profile API endpoints on workers
diff --git a/changelog.d/3673.misc b/changelog.d/3673.misc
new file mode 100644
index 0000000000..d672111fb9
--- /dev/null
+++ b/changelog.d/3673.misc
@@ -0,0 +1 @@
+Refactor state module to support multiple room versions
diff --git a/changelog.d/3680.feature b/changelog.d/3680.feature
new file mode 100644
index 0000000000..4edaaf76a8
--- /dev/null
+++ b/changelog.d/3680.feature
@@ -0,0 +1 @@
+Server notices for resource limit blocking
diff --git a/changelog.d/3722.bugfix b/changelog.d/3722.bugfix
new file mode 100644
index 0000000000..16cbaf76cb
--- /dev/null
+++ b/changelog.d/3722.bugfix
@@ -0,0 +1 @@
+Fix error collecting prometheus metrics when run on dedicated thread due to threading concurrency issues
diff --git a/changelog.d/3724.feature b/changelog.d/3724.feature
new file mode 100644
index 0000000000..1b374ccf47
--- /dev/null
+++ b/changelog.d/3724.feature
@@ -0,0 +1 @@
+Allow guests to use /rooms/:roomId/event/:eventId
diff --git a/changelog.d/3726.misc b/changelog.d/3726.misc
new file mode 100644
index 0000000000..c4f66ec998
--- /dev/null
+++ b/changelog.d/3726.misc
@@ -0,0 +1 @@
+Split the state_group_cache into member and non-member state events (and so speed up LL /sync)
diff --git a/changelog.d/3727.misc b/changelog.d/3727.misc
new file mode 100644
index 0000000000..0b83220d90
--- /dev/null
+++ b/changelog.d/3727.misc
@@ -0,0 +1 @@
+Log failure to authenticate remote servers as warnings (without stack traces)
diff --git a/changelog.d/3734.misc b/changelog.d/3734.misc
new file mode 100644
index 0000000000..4f6e4b3848
--- /dev/null
+++ b/changelog.d/3734.misc
@@ -0,0 +1 @@
+Reference the need for an HTTP replication port when using the federation_reader worker
diff --git a/changelog.d/3735.misc b/changelog.d/3735.misc
new file mode 100644
index 0000000000..f17004be71
--- /dev/null
+++ b/changelog.d/3735.misc
@@ -0,0 +1 @@
+Fix minor spelling error in federation client documentation.
diff --git a/changelog.d/3746.misc b/changelog.d/3746.misc
new file mode 100644
index 0000000000..fc00ee773a
--- /dev/null
+++ b/changelog.d/3746.misc
@@ -0,0 +1 @@
+Fix MAU cache invalidation due to missing yield
diff --git a/changelog.d/3747.bugfix b/changelog.d/3747.bugfix
new file mode 100644
index 0000000000..c41e2a1213
--- /dev/null
+++ b/changelog.d/3747.bugfix
@@ -0,0 +1 @@
+Fix bug where we resent "limit exceeded" server notices repeatedly
diff --git a/changelog.d/3749.feature b/changelog.d/3749.feature
new file mode 100644
index 0000000000..9f8837b106
--- /dev/null
+++ b/changelog.d/3749.feature
@@ -0,0 +1 @@
+Add mau_trial_days config param, so that users only get counted as MAU after N days.
diff --git a/changelog.d/3751.feature b/changelog.d/3751.feature
new file mode 100644
index 0000000000..dc9742b15b
--- /dev/null
+++ b/changelog.d/3751.feature
@@ -0,0 +1 @@
+Require twisted 17.1 or later (fixes [#3741](https://github.com/matrix-org/synapse/issues/3741)).
diff --git a/changelog.d/3753.bugfix b/changelog.d/3753.bugfix
new file mode 100644
index 0000000000..b4301267df
--- /dev/null
+++ b/changelog.d/3753.bugfix
@@ -0,0 +1 @@
+Fix bug where we broke sync when using limit_usage_by_mau but hadn't configured server notices
diff --git a/changelog.d/3754.bugfix b/changelog.d/3754.bugfix
new file mode 100644
index 0000000000..6e3ec80194
--- /dev/null
+++ b/changelog.d/3754.bugfix
@@ -0,0 +1 @@
+Fix 'federation_domain_whitelist' such that an empty list correctly blocks all outbound federation traffic
diff --git a/changelog.d/3755.bugfix b/changelog.d/3755.bugfix
new file mode 100644
index 0000000000..6a1f83f0ce
--- /dev/null
+++ b/changelog.d/3755.bugfix
@@ -0,0 +1 @@
+Fix tagging of server notice rooms
diff --git a/contrib/grafana/synapse.json b/contrib/grafana/synapse.json
index 94a1de58f4..c58612594a 100644
--- a/contrib/grafana/synapse.json
+++ b/contrib/grafana/synapse.json
@@ -54,7 +54,7 @@
   "gnetId": null,
   "graphTooltip": 0,
   "id": null,
-  "iteration": 1533026624326,
+  "iteration": 1533598785368,
   "links": [
     {
       "asDropdown": true,
@@ -4629,7 +4629,7 @@
             "h": 9,
             "w": 12,
             "x": 0,
-            "y": 11
+            "y": 29
           },
           "id": 67,
           "legend": {
@@ -4655,11 +4655,11 @@
           "steppedLine": false,
           "targets": [
             {
-              "expr": " synapse_event_persisted_position{instance=\"$instance\"} - ignoring(index, job, name) group_right(instance) synapse_event_processing_positions{instance=\"$instance\",job=~\"$job\",index=~\"$index\"}",
+              "expr": " synapse_event_persisted_position{instance=\"$instance\",job=\"synapse\"}  - ignoring(index, job, name) group_right() synapse_event_processing_positions{instance=\"$instance\",job=~\"$job\",index=~\"$index\"}",
               "format": "time_series",
               "interval": "",
               "intervalFactor": 1,
-              "legendFormat": "{{job}}-{{index}}",
+              "legendFormat": "{{job}}-{{index}} ",
               "refId": "A"
             }
           ],
@@ -4697,7 +4697,11 @@
               "min": null,
               "show": true
             }
-          ]
+          ],
+          "yaxis": {
+            "align": false,
+            "alignLevel": null
+          }
         },
         {
           "aliasColors": {},
@@ -4710,7 +4714,7 @@
             "h": 9,
             "w": 12,
             "x": 12,
-            "y": 11
+            "y": 29
           },
           "id": 71,
           "legend": {
@@ -4778,7 +4782,11 @@
               "min": null,
               "show": true
             }
-          ]
+          ],
+          "yaxis": {
+            "align": false,
+            "alignLevel": null
+          }
         }
       ],
       "title": "Event processing loop positions",
@@ -4957,5 +4965,5 @@
   "timezone": "",
   "title": "Synapse",
   "uid": "000000012",
-  "version": 125
+  "version": 127
 }
\ No newline at end of file
diff --git a/docker/Dockerfile b/docker/Dockerfile
index 26fb3a6bff..777976217d 100644
--- a/docker/Dockerfile
+++ b/docker/Dockerfile
@@ -1,4 +1,4 @@
-FROM docker.io/python:2-alpine3.7
+FROM docker.io/python:2-alpine3.8
 
 RUN apk add --no-cache --virtual .nacl_deps \
         build-base \
diff --git a/docs/admin_api/register_api.rst b/docs/admin_api/register_api.rst
index 209cd140fd..16d65c86b3 100644
--- a/docs/admin_api/register_api.rst
+++ b/docs/admin_api/register_api.rst
@@ -33,7 +33,7 @@ As an example::
 
   < {
      "access_token": "token_here",
-     "user_id": "@pepper_roni@test",
+     "user_id": "@pepper_roni:localhost",
      "home_server": "test",
      "device_id": "device_id_here"
     }
diff --git a/docs/workers.rst b/docs/workers.rst
index c5b37c3ded..101e950020 100644
--- a/docs/workers.rst
+++ b/docs/workers.rst
@@ -74,7 +74,7 @@ replication endpoints that it's talking to on the main synapse process.
 ``worker_replication_port`` should point to the TCP replication listener port and
 ``worker_replication_http_port`` should point to the HTTP replication port.
 
-Currently, only the ``event_creator`` worker requires specifying
+Currently, the ``event_creator`` and ``federation_reader`` workers require specifying
 ``worker_replication_http_port``.
 
 For instance::
@@ -173,10 +173,23 @@ endpoints matching the following regular expressions::
     ^/_matrix/federation/v1/backfill/
     ^/_matrix/federation/v1/get_missing_events/
     ^/_matrix/federation/v1/publicRooms
+    ^/_matrix/federation/v1/query/
+    ^/_matrix/federation/v1/make_join/
+    ^/_matrix/federation/v1/make_leave/
+    ^/_matrix/federation/v1/send_join/
+    ^/_matrix/federation/v1/send_leave/
+    ^/_matrix/federation/v1/invite/
+    ^/_matrix/federation/v1/query_auth/
+    ^/_matrix/federation/v1/event_auth/
+    ^/_matrix/federation/v1/exchange_third_party_invite/
+    ^/_matrix/federation/v1/send/
 
 The above endpoints should all be routed to the federation_reader worker by the
 reverse-proxy configuration.
 
+The `^/_matrix/federation/v1/send/` endpoint must only be handled by a single
+instance.
+
 ``synapse.app.federation_sender``
 ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
 
@@ -228,6 +241,14 @@ regular expressions::
 
     ^/_matrix/client/(api/v1|r0|unstable)/keys/upload
 
+If ``use_presence`` is False in the homeserver config, it can also handle REST
+endpoints matching the following regular expressions::
+
+    ^/_matrix/client/(api/v1|r0|unstable)/presence/[^/]+/status
+
+This "stub" presence handler will pass through ``GET`` request but make the
+``PUT`` effectively a no-op.
+
 It will proxy any requests it cannot handle to the main synapse instance. It
 must therefore be configured with the location of the main instance, via
 the ``worker_main_http_uri`` setting in the frontend_proxy worker configuration
@@ -244,6 +265,7 @@ Handles some event creation. It can handle REST endpoints matching::
     ^/_matrix/client/(api/v1|r0|unstable)/rooms/.*/send
     ^/_matrix/client/(api/v1|r0|unstable)/rooms/.*/(join|invite|leave|ban|unban|kick)$
     ^/_matrix/client/(api/v1|r0|unstable)/join/
+    ^/_matrix/client/(api/v1|r0|unstable)/profile/
 
 It will create events locally and then send them on to the main synapse
 instance to be persisted and handled.
diff --git a/synapse/__init__.py b/synapse/__init__.py
index a14d578e36..e62901b761 100644
--- a/synapse/__init__.py
+++ b/synapse/__init__.py
@@ -17,4 +17,4 @@
 """ This is a reference implementation of a Matrix home server.
 """
 
-__version__ = "0.33.2"
+__version__ = "0.33.3"
diff --git a/synapse/api/auth.py b/synapse/api/auth.py
index 5bbbe8e2e7..8d2aa5870a 100644
--- a/synapse/api/auth.py
+++ b/synapse/api/auth.py
@@ -25,7 +25,7 @@ from twisted.internet import defer
 import synapse.types
 from synapse import event_auth
 from synapse.api.constants import EventTypes, JoinRules, Membership
-from synapse.api.errors import AuthError, Codes
+from synapse.api.errors import AuthError, Codes, ResourceLimitError
 from synapse.types import UserID
 from synapse.util.caches import CACHE_SIZE_FACTOR, register_cache
 from synapse.util.caches.lrucache import LruCache
@@ -211,9 +211,9 @@ class Auth(object):
             user_agent = request.requestHeaders.getRawHeaders(
                 b"User-Agent",
                 default=[b""]
-            )[0]
+            )[0].decode('ascii', 'surrogateescape')
             if user and access_token and ip_addr:
-                self.store.insert_client_ip(
+                yield self.store.insert_client_ip(
                     user_id=user.to_string(),
                     access_token=access_token,
                     ip=ip_addr,
@@ -682,7 +682,7 @@ class Auth(object):
         Returns:
             bool: False if no access_token was given, True otherwise.
         """
-        query_params = request.args.get("access_token")
+        query_params = request.args.get(b"access_token")
         auth_headers = request.requestHeaders.getRawHeaders(b"Authorization")
         return bool(query_params) or bool(auth_headers)
 
@@ -698,7 +698,7 @@ class Auth(object):
                 401 since some of the old clients depended on auth errors returning
                 403.
         Returns:
-            str: The access_token
+            unicode: The access_token
         Raises:
             AuthError: If there isn't an access_token in the request.
         """
@@ -720,9 +720,9 @@ class Auth(object):
                     "Too many Authorization headers.",
                     errcode=Codes.MISSING_TOKEN,
                 )
-            parts = auth_headers[0].split(" ")
-            if parts[0] == "Bearer" and len(parts) == 2:
-                return parts[1]
+            parts = auth_headers[0].split(b" ")
+            if parts[0] == b"Bearer" and len(parts) == 2:
+                return parts[1].decode('ascii')
             else:
                 raise AuthError(
                     token_not_found_http_status,
@@ -738,7 +738,7 @@ class Auth(object):
                     errcode=Codes.MISSING_TOKEN
                 )
 
-            return query_params[0]
+            return query_params[0].decode('ascii')
 
     @defer.inlineCallbacks
     def check_in_room_or_world_readable(self, room_id, user_id):
@@ -773,3 +773,46 @@ class Auth(object):
             raise AuthError(
                 403, "Guest access not allowed", errcode=Codes.GUEST_ACCESS_FORBIDDEN
             )
+
+    @defer.inlineCallbacks
+    def check_auth_blocking(self, user_id=None):
+        """Checks if the user should be rejected for some external reason,
+        such as monthly active user limiting or global disable flag
+
+        Args:
+            user_id(str|None): If present, checks for presence against existing
+            MAU cohort
+        """
+
+        # Never fail an auth check for the server notices users
+        # This can be a problem where event creation is prohibited due to blocking
+        if user_id == self.hs.config.server_notices_mxid:
+            return
+
+        if self.hs.config.hs_disabled:
+            raise ResourceLimitError(
+                403, self.hs.config.hs_disabled_message,
+                errcode=Codes.RESOURCE_LIMIT_EXCEEDED,
+                admin_uri=self.hs.config.admin_uri,
+                limit_type=self.hs.config.hs_disabled_limit_type
+            )
+        if self.hs.config.limit_usage_by_mau is True:
+            # If the user is already part of the MAU cohort or a trial user
+            if user_id:
+                timestamp = yield self.store.user_last_seen_monthly_active(user_id)
+                if timestamp:
+                    return
+
+                is_trial = yield self.store.is_trial_user(user_id)
+                if is_trial:
+                    return
+            # Else if there is no room in the MAU bucket, bail
+            current_mau = yield self.store.get_monthly_active_count()
+            if current_mau >= self.hs.config.max_mau_value:
+                raise ResourceLimitError(
+                    403, "Monthly Active User Limit Exceeded",
+
+                    admin_uri=self.hs.config.admin_uri,
+                    errcode=Codes.RESOURCE_LIMIT_EXCEEDED,
+                    limit_type="monthly_active_user"
+                )
diff --git a/synapse/api/constants.py b/synapse/api/constants.py
index 4df930c8d1..c2630c4c64 100644
--- a/synapse/api/constants.py
+++ b/synapse/api/constants.py
@@ -1,6 +1,7 @@
 # -*- coding: utf-8 -*-
 # Copyright 2014-2016 OpenMarket Ltd
 # Copyright 2017 Vector Creations Ltd
+# Copyright 2018 New Vector Ltd.
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
 # you may not use this file except in compliance with the License.
@@ -77,6 +78,7 @@ class EventTypes(object):
     Name = "m.room.name"
 
     ServerACL = "m.room.server_acl"
+    Pinned = "m.room.pinned_events"
 
 
 class RejectedReason(object):
@@ -94,3 +96,19 @@ class RoomCreationPreset(object):
 class ThirdPartyEntityKind(object):
     USER = "user"
     LOCATION = "location"
+
+
+class RoomVersions(object):
+    V1 = "1"
+    VDH_TEST = "vdh-test-version"
+
+
+# the version we will give rooms which are created on this server
+DEFAULT_ROOM_VERSION = RoomVersions.V1
+
+# vdh-test-version is a placeholder to get room versioning support working and tested
+# until we have a working v2.
+KNOWN_ROOM_VERSIONS = {RoomVersions.V1, RoomVersions.VDH_TEST}
+
+ServerNoticeMsgType = "m.server_notice"
+ServerNoticeLimitReached = "m.server_notice.usage_limit_reached"
diff --git a/synapse/api/errors.py b/synapse/api/errors.py
index d37bcb4082..3002c95dd1 100644
--- a/synapse/api/errors.py
+++ b/synapse/api/errors.py
@@ -1,5 +1,6 @@
 # -*- coding: utf-8 -*-
 # Copyright 2014-2016 OpenMarket Ltd
+# Copyright 2018 New Vector Ltd.
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
 # you may not use this file except in compliance with the License.
@@ -55,7 +56,9 @@ class Codes(object):
     SERVER_NOT_TRUSTED = "M_SERVER_NOT_TRUSTED"
     CONSENT_NOT_GIVEN = "M_CONSENT_NOT_GIVEN"
     CANNOT_LEAVE_SERVER_NOTICE_ROOM = "M_CANNOT_LEAVE_SERVER_NOTICE_ROOM"
-    MAU_LIMIT_EXCEEDED = "M_MAU_LIMIT_EXCEEDED"
+    RESOURCE_LIMIT_EXCEEDED = "M_RESOURCE_LIMIT_EXCEEDED"
+    UNSUPPORTED_ROOM_VERSION = "M_UNSUPPORTED_ROOM_VERSION"
+    INCOMPATIBLE_ROOM_VERSION = "M_INCOMPATIBLE_ROOM_VERSION"
     WRONG_ROOM_KEYS_VERSION = "M_WRONG_ROOM_KEYS_VERSION"
 
 
@@ -229,6 +232,30 @@ class AuthError(SynapseError):
         super(AuthError, self).__init__(*args, **kwargs)
 
 
+class ResourceLimitError(SynapseError):
+    """
+    Any error raised when there is a problem with resource usage.
+    For instance, the monthly active user limit for the server has been exceeded
+    """
+    def __init__(
+        self, code, msg,
+        errcode=Codes.RESOURCE_LIMIT_EXCEEDED,
+        admin_uri=None,
+        limit_type=None,
+    ):
+        self.admin_uri = admin_uri
+        self.limit_type = limit_type
+        super(ResourceLimitError, self).__init__(code, msg, errcode=errcode)
+
+    def error_dict(self):
+        return cs_error(
+            self.msg,
+            self.errcode,
+            admin_uri=self.admin_uri,
+            limit_type=self.limit_type
+        )
+
+
 class EventSizeError(SynapseError):
     """An error raised when an event is too big."""
 
@@ -299,11 +326,24 @@ class RoomKeysVersionError(SynapseError):
         )
         self.current_version = current_version
 
+class IncompatibleRoomVersionError(SynapseError):
+    """A server is trying to join a room whose version it does not support."""
+
+    def __init__(self, room_version):
+        super(IncompatibleRoomVersionError, self).__init__(
+            code=400,
+            msg="Your homeserver does not support the features required to "
+                "join this room",
+            errcode=Codes.INCOMPATIBLE_ROOM_VERSION,
+        )
+
+        self._room_version = room_version
+
     def error_dict(self):
         return cs_error(
             self.msg,
             self.errcode,
-            current_version=self.current_version,
+            room_version=self.current_version,
         )
 
 
diff --git a/synapse/api/ratelimiting.py b/synapse/api/ratelimiting.py
index 06cc8d90b8..3bb5b3da37 100644
--- a/synapse/api/ratelimiting.py
+++ b/synapse/api/ratelimiting.py
@@ -72,7 +72,7 @@ class Ratelimiter(object):
         return allowed, time_allowed
 
     def prune_message_counts(self, time_now_s):
-        for user_id in self.message_counts.keys():
+        for user_id in list(self.message_counts.keys()):
             message_count, time_start, msg_rate_hz = (
                 self.message_counts[user_id]
             )
diff --git a/synapse/app/_base.py b/synapse/app/_base.py
index 391bd14c5c..7c866e246a 100644
--- a/synapse/app/_base.py
+++ b/synapse/app/_base.py
@@ -140,7 +140,7 @@ def listen_metrics(bind_addresses, port):
         logger.info("Metrics now reporting on %s:%d", host, port)
 
 
-def listen_tcp(bind_addresses, port, factory, backlog=50):
+def listen_tcp(bind_addresses, port, factory, reactor=reactor, backlog=50):
     """
     Create a TCP socket for a port and several addresses
     """
@@ -156,7 +156,9 @@ def listen_tcp(bind_addresses, port, factory, backlog=50):
             check_bind_error(e, address, bind_addresses)
 
 
-def listen_ssl(bind_addresses, port, factory, context_factory, backlog=50):
+def listen_ssl(
+    bind_addresses, port, factory, context_factory, reactor=reactor, backlog=50
+):
     """
     Create an SSL socket for a port and several addresses
     """
diff --git a/synapse/app/appservice.py b/synapse/app/appservice.py
index 9a37384fb7..3348a8ec6d 100644
--- a/synapse/app/appservice.py
+++ b/synapse/app/appservice.py
@@ -117,8 +117,9 @@ class ASReplicationHandler(ReplicationClientHandler):
         super(ASReplicationHandler, self).__init__(hs.get_datastore())
         self.appservice_handler = hs.get_application_service_handler()
 
+    @defer.inlineCallbacks
     def on_rdata(self, stream_name, token, rows):
-        super(ASReplicationHandler, self).on_rdata(stream_name, token, rows)
+        yield super(ASReplicationHandler, self).on_rdata(stream_name, token, rows)
 
         if stream_name == "events":
             max_stream_id = self.store.get_room_max_stream_ordering()
diff --git a/synapse/app/client_reader.py b/synapse/app/client_reader.py
index e2c91123db..ab79a45646 100644
--- a/synapse/app/client_reader.py
+++ b/synapse/app/client_reader.py
@@ -39,7 +39,7 @@ from synapse.replication.slave.storage.events import SlavedEventStore
 from synapse.replication.slave.storage.keys import SlavedKeyStore
 from synapse.replication.slave.storage.registration import SlavedRegistrationStore
 from synapse.replication.slave.storage.room import RoomStore
-from synapse.replication.slave.storage.transactions import TransactionStore
+from synapse.replication.slave.storage.transactions import SlavedTransactionStore
 from synapse.replication.tcp.client import ReplicationClientHandler
 from synapse.rest.client.v1.room import (
     JoinedRoomMemberListRestServlet,
@@ -66,7 +66,7 @@ class ClientReaderSlavedStore(
     DirectoryStore,
     SlavedApplicationServiceStore,
     SlavedRegistrationStore,
-    TransactionStore,
+    SlavedTransactionStore,
     SlavedClientIpStore,
     BaseSlavedStore,
 ):
@@ -168,11 +168,13 @@ def start(config_options):
     database_engine = create_engine(config.database_config)
 
     tls_server_context_factory = context_factory.ServerContextFactory(config)
+    tls_client_options_factory = context_factory.ClientTLSOptionsFactory(config)
 
     ss = ClientReaderServer(
         config.server_name,
         db_config=config.database_config,
         tls_server_context_factory=tls_server_context_factory,
+        tls_client_options_factory=tls_client_options_factory,
         config=config,
         version_string="Synapse/" + get_version_string(synapse),
         database_engine=database_engine,
diff --git a/synapse/app/event_creator.py b/synapse/app/event_creator.py
index 374f115644..a34c89fa99 100644
--- a/synapse/app/event_creator.py
+++ b/synapse/app/event_creator.py
@@ -43,8 +43,13 @@ from synapse.replication.slave.storage.pushers import SlavedPusherStore
 from synapse.replication.slave.storage.receipts import SlavedReceiptsStore
 from synapse.replication.slave.storage.registration import SlavedRegistrationStore
 from synapse.replication.slave.storage.room import RoomStore
-from synapse.replication.slave.storage.transactions import TransactionStore
+from synapse.replication.slave.storage.transactions import SlavedTransactionStore
 from synapse.replication.tcp.client import ReplicationClientHandler
+from synapse.rest.client.v1.profile import (
+    ProfileAvatarURLRestServlet,
+    ProfileDisplaynameRestServlet,
+    ProfileRestServlet,
+)
 from synapse.rest.client.v1.room import (
     JoinRoomAliasServlet,
     RoomMembershipRestServlet,
@@ -53,6 +58,7 @@ from synapse.rest.client.v1.room import (
 )
 from synapse.server import HomeServer
 from synapse.storage.engines import create_engine
+from synapse.storage.user_directory import UserDirectoryStore
 from synapse.util.httpresourcetree import create_resource_tree
 from synapse.util.logcontext import LoggingContext
 from synapse.util.manhole import manhole
@@ -62,8 +68,11 @@ logger = logging.getLogger("synapse.app.event_creator")
 
 
 class EventCreatorSlavedStore(
+    # FIXME(#3714): We need to add UserDirectoryStore as we write directly
+    # rather than going via the correct worker.
+    UserDirectoryStore,
     DirectoryStore,
-    TransactionStore,
+    SlavedTransactionStore,
     SlavedProfileStore,
     SlavedAccountDataStore,
     SlavedPusherStore,
@@ -101,6 +110,9 @@ class EventCreatorServer(HomeServer):
                     RoomMembershipRestServlet(self).register(resource)
                     RoomStateEventRestServlet(self).register(resource)
                     JoinRoomAliasServlet(self).register(resource)
+                    ProfileAvatarURLRestServlet(self).register(resource)
+                    ProfileDisplaynameRestServlet(self).register(resource)
+                    ProfileRestServlet(self).register(resource)
                     resources.update({
                         "/_matrix/client/r0": resource,
                         "/_matrix/client/unstable": resource,
@@ -174,11 +186,13 @@ def start(config_options):
     database_engine = create_engine(config.database_config)
 
     tls_server_context_factory = context_factory.ServerContextFactory(config)
+    tls_client_options_factory = context_factory.ClientTLSOptionsFactory(config)
 
     ss = EventCreatorServer(
         config.server_name,
         db_config=config.database_config,
         tls_server_context_factory=tls_server_context_factory,
+        tls_client_options_factory=tls_client_options_factory,
         config=config,
         version_string="Synapse/" + get_version_string(synapse),
         database_engine=database_engine,
diff --git a/synapse/app/federation_reader.py b/synapse/app/federation_reader.py
index 7af00b8bcf..7d8105778d 100644
--- a/synapse/app/federation_reader.py
+++ b/synapse/app/federation_reader.py
@@ -32,11 +32,17 @@ from synapse.http.site import SynapseSite
 from synapse.metrics import RegistryProxy
 from synapse.metrics.resource import METRICS_PREFIX, MetricsResource
 from synapse.replication.slave.storage._base import BaseSlavedStore
+from synapse.replication.slave.storage.account_data import SlavedAccountDataStore
+from synapse.replication.slave.storage.appservice import SlavedApplicationServiceStore
 from synapse.replication.slave.storage.directory import DirectoryStore
 from synapse.replication.slave.storage.events import SlavedEventStore
 from synapse.replication.slave.storage.keys import SlavedKeyStore
+from synapse.replication.slave.storage.profile import SlavedProfileStore
+from synapse.replication.slave.storage.push_rule import SlavedPushRuleStore
+from synapse.replication.slave.storage.pushers import SlavedPusherStore
+from synapse.replication.slave.storage.receipts import SlavedReceiptsStore
 from synapse.replication.slave.storage.room import RoomStore
-from synapse.replication.slave.storage.transactions import TransactionStore
+from synapse.replication.slave.storage.transactions import SlavedTransactionStore
 from synapse.replication.tcp.client import ReplicationClientHandler
 from synapse.server import HomeServer
 from synapse.storage.engines import create_engine
@@ -49,11 +55,17 @@ logger = logging.getLogger("synapse.app.federation_reader")
 
 
 class FederationReaderSlavedStore(
+    SlavedAccountDataStore,
+    SlavedProfileStore,
+    SlavedApplicationServiceStore,
+    SlavedPusherStore,
+    SlavedPushRuleStore,
+    SlavedReceiptsStore,
     SlavedEventStore,
     SlavedKeyStore,
     RoomStore,
     DirectoryStore,
-    TransactionStore,
+    SlavedTransactionStore,
     BaseSlavedStore,
 ):
     pass
@@ -143,11 +155,13 @@ def start(config_options):
     database_engine = create_engine(config.database_config)
 
     tls_server_context_factory = context_factory.ServerContextFactory(config)
+    tls_client_options_factory = context_factory.ClientTLSOptionsFactory(config)
 
     ss = FederationReaderServer(
         config.server_name,
         db_config=config.database_config,
         tls_server_context_factory=tls_server_context_factory,
+        tls_client_options_factory=tls_client_options_factory,
         config=config,
         version_string="Synapse/" + get_version_string(synapse),
         database_engine=database_engine,
diff --git a/synapse/app/federation_sender.py b/synapse/app/federation_sender.py
index 18469013fa..d59007099b 100644
--- a/synapse/app/federation_sender.py
+++ b/synapse/app/federation_sender.py
@@ -36,11 +36,11 @@ from synapse.replication.slave.storage.events import SlavedEventStore
 from synapse.replication.slave.storage.presence import SlavedPresenceStore
 from synapse.replication.slave.storage.receipts import SlavedReceiptsStore
 from synapse.replication.slave.storage.registration import SlavedRegistrationStore
-from synapse.replication.slave.storage.transactions import TransactionStore
+from synapse.replication.slave.storage.transactions import SlavedTransactionStore
 from synapse.replication.tcp.client import ReplicationClientHandler
 from synapse.server import HomeServer
 from synapse.storage.engines import create_engine
-from synapse.util.async import Linearizer
+from synapse.util.async_helpers import Linearizer
 from synapse.util.httpresourcetree import create_resource_tree
 from synapse.util.logcontext import LoggingContext, run_in_background
 from synapse.util.manhole import manhole
@@ -50,7 +50,7 @@ logger = logging.getLogger("synapse.app.federation_sender")
 
 
 class FederationSenderSlaveStore(
-    SlavedDeviceInboxStore, TransactionStore, SlavedReceiptsStore, SlavedEventStore,
+    SlavedDeviceInboxStore, SlavedTransactionStore, SlavedReceiptsStore, SlavedEventStore,
     SlavedRegistrationStore, SlavedDeviceStore, SlavedPresenceStore,
 ):
     def __init__(self, db_conn, hs):
@@ -144,8 +144,9 @@ class FederationSenderReplicationHandler(ReplicationClientHandler):
         super(FederationSenderReplicationHandler, self).__init__(hs.get_datastore())
         self.send_handler = FederationSenderHandler(hs, self)
 
+    @defer.inlineCallbacks
     def on_rdata(self, stream_name, token, rows):
-        super(FederationSenderReplicationHandler, self).on_rdata(
+        yield super(FederationSenderReplicationHandler, self).on_rdata(
             stream_name, token, rows
         )
         self.send_handler.process_replication_rows(stream_name, token, rows)
@@ -186,11 +187,13 @@ def start(config_options):
     config.send_federation = True
 
     tls_server_context_factory = context_factory.ServerContextFactory(config)
+    tls_client_options_factory = context_factory.ClientTLSOptionsFactory(config)
 
     ps = FederationSenderServer(
         config.server_name,
         db_config=config.database_config,
         tls_server_context_factory=tls_server_context_factory,
+        tls_client_options_factory=tls_client_options_factory,
         config=config,
         version_string="Synapse/" + get_version_string(synapse),
         database_engine=database_engine,
diff --git a/synapse/app/frontend_proxy.py b/synapse/app/frontend_proxy.py
index b5f78f4640..8d484c1cd4 100644
--- a/synapse/app/frontend_proxy.py
+++ b/synapse/app/frontend_proxy.py
@@ -38,6 +38,7 @@ from synapse.replication.slave.storage.client_ips import SlavedClientIpStore
 from synapse.replication.slave.storage.devices import SlavedDeviceStore
 from synapse.replication.slave.storage.registration import SlavedRegistrationStore
 from synapse.replication.tcp.client import ReplicationClientHandler
+from synapse.rest.client.v1.base import ClientV1RestServlet, client_path_patterns
 from synapse.rest.client.v2_alpha._base import client_v2_patterns
 from synapse.server import HomeServer
 from synapse.storage.engines import create_engine
@@ -49,6 +50,35 @@ from synapse.util.versionstring import get_version_string
 logger = logging.getLogger("synapse.app.frontend_proxy")
 
 
+class PresenceStatusStubServlet(ClientV1RestServlet):
+    PATTERNS = client_path_patterns("/presence/(?P<user_id>[^/]*)/status")
+
+    def __init__(self, hs):
+        super(PresenceStatusStubServlet, self).__init__(hs)
+        self.http_client = hs.get_simple_http_client()
+        self.auth = hs.get_auth()
+        self.main_uri = hs.config.worker_main_http_uri
+
+    @defer.inlineCallbacks
+    def on_GET(self, request, user_id):
+        # Pass through the auth headers, if any, in case the access token
+        # is there.
+        auth_headers = request.requestHeaders.getRawHeaders("Authorization", [])
+        headers = {
+            "Authorization": auth_headers,
+        }
+        result = yield self.http_client.get_json(
+            self.main_uri + request.uri,
+            headers=headers,
+        )
+        defer.returnValue((200, result))
+
+    @defer.inlineCallbacks
+    def on_PUT(self, request, user_id):
+        yield self.auth.get_user_by_req(request)
+        defer.returnValue((200, {}))
+
+
 class KeyUploadServlet(RestServlet):
     PATTERNS = client_v2_patterns("/keys/upload(/(?P<device_id>[^/]+))?$")
 
@@ -135,6 +165,12 @@ class FrontendProxyServer(HomeServer):
                 elif name == "client":
                     resource = JsonResource(self, canonical_json=False)
                     KeyUploadServlet(self).register(resource)
+
+                    # If presence is disabled, use the stub servlet that does
+                    # not allow sending presence
+                    if not self.config.use_presence:
+                        PresenceStatusStubServlet(self).register(resource)
+
                     resources.update({
                         "/_matrix/client/r0": resource,
                         "/_matrix/client/unstable": resource,
@@ -153,7 +189,8 @@ class FrontendProxyServer(HomeServer):
                 listener_config,
                 root_resource,
                 self.version_string,
-            )
+            ),
+            reactor=self.get_reactor()
         )
 
         logger.info("Synapse client reader now listening on port %d", port)
@@ -208,11 +245,13 @@ def start(config_options):
     database_engine = create_engine(config.database_config)
 
     tls_server_context_factory = context_factory.ServerContextFactory(config)
+    tls_client_options_factory = context_factory.ClientTLSOptionsFactory(config)
 
     ss = FrontendProxyServer(
         config.server_name,
         db_config=config.database_config,
         tls_server_context_factory=tls_server_context_factory,
+        tls_client_options_factory=tls_client_options_factory,
         config=config,
         version_string="Synapse/" + get_version_string(synapse),
         database_engine=database_engine,
diff --git a/synapse/app/homeserver.py b/synapse/app/homeserver.py
index fba51c26e8..005921dcf7 100755
--- a/synapse/app/homeserver.py
+++ b/synapse/app/homeserver.py
@@ -303,8 +303,8 @@ class SynapseHomeServer(HomeServer):
 
 
 # Gauges to expose monthly active user control metrics
-current_mau_gauge = Gauge("synapse_admin_current_mau", "Current MAU")
-max_mau_value_gauge = Gauge("synapse_admin_max_mau_value", "MAU Limit")
+current_mau_gauge = Gauge("synapse_admin_mau:current", "Current MAU")
+max_mau_gauge = Gauge("synapse_admin_mau:max", "MAU Limit")
 
 
 def setup(config_options):
@@ -338,6 +338,7 @@ def setup(config_options):
     events.USE_FROZEN_DICTS = config.use_frozen_dicts
 
     tls_server_context_factory = context_factory.ServerContextFactory(config)
+    tls_client_options_factory = context_factory.ClientTLSOptionsFactory(config)
 
     database_engine = create_engine(config.database_config)
     config.database_config["args"]["cp_openfun"] = database_engine.on_new_connection
@@ -346,6 +347,7 @@ def setup(config_options):
         config.server_name,
         db_config=config.database_config,
         tls_server_context_factory=tls_server_context_factory,
+        tls_client_options_factory=tls_client_options_factory,
         config=config,
         version_string="Synapse/" + get_version_string(synapse),
         database_engine=database_engine,
@@ -519,17 +521,27 @@ def run(hs):
     # table will decrease
     clock.looping_call(generate_user_daily_visit_stats, 5 * 60 * 1000)
 
+    # monthly active user limiting functionality
+    clock.looping_call(
+        hs.get_datastore().reap_monthly_active_users, 1000 * 60 * 60
+    )
+    hs.get_datastore().reap_monthly_active_users()
+
     @defer.inlineCallbacks
     def generate_monthly_active_users():
         count = 0
         if hs.config.limit_usage_by_mau:
-            count = yield hs.get_datastore().count_monthly_users()
+            count = yield hs.get_datastore().get_monthly_active_count()
         current_mau_gauge.set(float(count))
-        max_mau_value_gauge.set(float(hs.config.max_mau_value))
+        max_mau_gauge.set(float(hs.config.max_mau_value))
 
+    hs.get_datastore().initialise_reserved_users(
+        hs.config.mau_limits_reserved_threepids
+    )
     generate_monthly_active_users()
     if hs.config.limit_usage_by_mau:
         clock.looping_call(generate_monthly_active_users, 5 * 60 * 1000)
+    # End of monthly active user settings
 
     if hs.config.report_stats:
         logger.info("Scheduling stats reporting for 3 hour intervals")
diff --git a/synapse/app/media_repository.py b/synapse/app/media_repository.py
index 749bbf37d0..fd1f6cbf7e 100644
--- a/synapse/app/media_repository.py
+++ b/synapse/app/media_repository.py
@@ -34,7 +34,7 @@ from synapse.replication.slave.storage._base import BaseSlavedStore
 from synapse.replication.slave.storage.appservice import SlavedApplicationServiceStore
 from synapse.replication.slave.storage.client_ips import SlavedClientIpStore
 from synapse.replication.slave.storage.registration import SlavedRegistrationStore
-from synapse.replication.slave.storage.transactions import TransactionStore
+from synapse.replication.slave.storage.transactions import SlavedTransactionStore
 from synapse.replication.tcp.client import ReplicationClientHandler
 from synapse.rest.media.v0.content_repository import ContentRepoResource
 from synapse.server import HomeServer
@@ -52,7 +52,7 @@ class MediaRepositorySlavedStore(
     SlavedApplicationServiceStore,
     SlavedRegistrationStore,
     SlavedClientIpStore,
-    TransactionStore,
+    SlavedTransactionStore,
     BaseSlavedStore,
     MediaRepositoryStore,
 ):
@@ -155,11 +155,13 @@ def start(config_options):
     database_engine = create_engine(config.database_config)
 
     tls_server_context_factory = context_factory.ServerContextFactory(config)
+    tls_client_options_factory = context_factory.ClientTLSOptionsFactory(config)
 
     ss = MediaRepositoryServer(
         config.server_name,
         db_config=config.database_config,
         tls_server_context_factory=tls_server_context_factory,
+        tls_client_options_factory=tls_client_options_factory,
         config=config,
         version_string="Synapse/" + get_version_string(synapse),
         database_engine=database_engine,
diff --git a/synapse/app/pusher.py b/synapse/app/pusher.py
index 9295a51d5b..a4fc7e91fa 100644
--- a/synapse/app/pusher.py
+++ b/synapse/app/pusher.py
@@ -148,8 +148,9 @@ class PusherReplicationHandler(ReplicationClientHandler):
 
         self.pusher_pool = hs.get_pusherpool()
 
+    @defer.inlineCallbacks
     def on_rdata(self, stream_name, token, rows):
-        super(PusherReplicationHandler, self).on_rdata(stream_name, token, rows)
+        yield super(PusherReplicationHandler, self).on_rdata(stream_name, token, rows)
         run_in_background(self.poke_pushers, stream_name, token, rows)
 
     @defer.inlineCallbacks
@@ -162,11 +163,11 @@ class PusherReplicationHandler(ReplicationClientHandler):
                     else:
                         yield self.start_pusher(row.user_id, row.app_id, row.pushkey)
             elif stream_name == "events":
-                yield self.pusher_pool.on_new_notifications(
+                self.pusher_pool.on_new_notifications(
                     token, token,
                 )
             elif stream_name == "receipts":
-                yield self.pusher_pool.on_new_receipts(
+                self.pusher_pool.on_new_receipts(
                     token, token, set(row.room_id for row in rows)
                 )
         except Exception:
diff --git a/synapse/app/synchrotron.py b/synapse/app/synchrotron.py
index e201f18efd..27e1998660 100644
--- a/synapse/app/synchrotron.py
+++ b/synapse/app/synchrotron.py
@@ -114,7 +114,10 @@ class SynchrotronPresence(object):
         logger.info("Presence process_id is %r", self.process_id)
 
     def send_user_sync(self, user_id, is_syncing, last_sync_ms):
-        self.hs.get_tcp_replication().send_user_sync(user_id, is_syncing, last_sync_ms)
+        if self.hs.config.use_presence:
+            self.hs.get_tcp_replication().send_user_sync(
+                user_id, is_syncing, last_sync_ms
+            )
 
     def mark_as_coming_online(self, user_id):
         """A user has started syncing. Send a UserSync to the master, unless they
@@ -211,10 +214,13 @@ class SynchrotronPresence(object):
         yield self.notify_from_replication(states, stream_id)
 
     def get_currently_syncing_users(self):
-        return [
-            user_id for user_id, count in iteritems(self.user_to_num_current_syncs)
-            if count > 0
-        ]
+        if self.hs.config.use_presence:
+            return [
+                user_id for user_id, count in iteritems(self.user_to_num_current_syncs)
+                if count > 0
+            ]
+        else:
+            return set()
 
 
 class SynchrotronTyping(object):
@@ -332,8 +338,9 @@ class SyncReplicationHandler(ReplicationClientHandler):
         self.presence_handler = hs.get_presence_handler()
         self.notifier = hs.get_notifier()
 
+    @defer.inlineCallbacks
     def on_rdata(self, stream_name, token, rows):
-        super(SyncReplicationHandler, self).on_rdata(stream_name, token, rows)
+        yield super(SyncReplicationHandler, self).on_rdata(stream_name, token, rows)
         run_in_background(self.process_and_notify, stream_name, token, rows)
 
     def get_streams_to_replicate(self):
diff --git a/synapse/app/user_dir.py b/synapse/app/user_dir.py
index 637a89530a..1388a42b59 100644
--- a/synapse/app/user_dir.py
+++ b/synapse/app/user_dir.py
@@ -169,8 +169,9 @@ class UserDirectoryReplicationHandler(ReplicationClientHandler):
         super(UserDirectoryReplicationHandler, self).__init__(hs.get_datastore())
         self.user_directory = hs.get_user_directory_handler()
 
+    @defer.inlineCallbacks
     def on_rdata(self, stream_name, token, rows):
-        super(UserDirectoryReplicationHandler, self).on_rdata(
+        yield super(UserDirectoryReplicationHandler, self).on_rdata(
             stream_name, token, rows
         )
         if stream_name == "current_state_deltas":
@@ -214,11 +215,13 @@ def start(config_options):
     config.update_user_directory = True
 
     tls_server_context_factory = context_factory.ServerContextFactory(config)
+    tls_client_options_factory = context_factory.ClientTLSOptionsFactory(config)
 
     ps = UserDirectoryServer(
         config.server_name,
         db_config=config.database_config,
         tls_server_context_factory=tls_server_context_factory,
+        tls_client_options_factory=tls_client_options_factory,
         config=config,
         version_string="Synapse/" + get_version_string(synapse),
         database_engine=database_engine,
diff --git a/synapse/config/logger.py b/synapse/config/logger.py
index a87b11a1df..3f187adfc8 100644
--- a/synapse/config/logger.py
+++ b/synapse/config/logger.py
@@ -168,7 +168,8 @@ def setup_logging(config, use_worker_options=False):
         if log_file:
             # TODO: Customisable file size / backup count
             handler = logging.handlers.RotatingFileHandler(
-                log_file, maxBytes=(1000 * 1000 * 100), backupCount=3
+                log_file, maxBytes=(1000 * 1000 * 100), backupCount=3,
+                encoding='utf8'
             )
 
             def sighup(signum, stack):
@@ -193,9 +194,8 @@ def setup_logging(config, use_worker_options=False):
 
         def sighup(signum, stack):
             # it might be better to use a file watcher or something for this.
-            logging.info("Reloading log config from %s due to SIGHUP",
-                         log_config)
             load_log_config()
+            logging.info("Reloaded log config from %s due to SIGHUP", log_config)
 
         load_log_config()
 
diff --git a/synapse/config/server.py b/synapse/config/server.py
index 6a471a0a5e..8eecd28e7d 100644
--- a/synapse/config/server.py
+++ b/synapse/config/server.py
@@ -49,6 +49,9 @@ class ServerConfig(Config):
         # "disable" federation
         self.send_federation = config.get("send_federation", True)
 
+        # Whether to enable user presence.
+        self.use_presence = config.get("use_presence", True)
+
         # Whether to update the user directory or not. This should be set to
         # false only if we are updating the user directory in a worker
         self.update_user_directory = config.get("update_user_directory", True)
@@ -69,12 +72,29 @@ class ServerConfig(Config):
 
         # Options to control access by tracking MAU
         self.limit_usage_by_mau = config.get("limit_usage_by_mau", False)
+        self.max_mau_value = 0
         if self.limit_usage_by_mau:
             self.max_mau_value = config.get(
                 "max_mau_value", 0,
             )
-        else:
-            self.max_mau_value = 0
+
+        self.mau_limits_reserved_threepids = config.get(
+            "mau_limit_reserved_threepids", []
+        )
+
+        self.mau_trial_days = config.get(
+            "mau_trial_days", 0,
+        )
+
+        # Options to disable HS
+        self.hs_disabled = config.get("hs_disabled", False)
+        self.hs_disabled_message = config.get("hs_disabled_message", "")
+        self.hs_disabled_limit_type = config.get("hs_disabled_limit_type", "")
+
+        # Admin uri to direct users at should their instance become blocked
+        # due to resource constraints
+        self.admin_uri = config.get("admin_uri", None)
+
         # FIXME: federation_domain_whitelist needs sytests
         self.federation_domain_whitelist = None
         federation_domain_whitelist = config.get(
@@ -238,6 +258,9 @@ class ServerConfig(Config):
         # hard limit.
         soft_file_limit: 0
 
+        # Set to false to disable presence tracking on this homeserver.
+        use_presence: true
+
         # The GC threshold parameters to pass to `gc.set_threshold`, if defined
         # gc_thresholds: [700, 10, 10]
 
@@ -329,6 +352,33 @@ class ServerConfig(Config):
           # - port: 9000
           #   bind_addresses: ['::1', '127.0.0.1']
           #   type: manhole
+
+
+          # Homeserver blocking
+          #
+          # How to reach the server admin, used in ResourceLimitError
+          # admin_uri: 'mailto:admin@server.com'
+          #
+          # Global block config
+          #
+          # hs_disabled: False
+          # hs_disabled_message: 'Human readable reason for why the HS is blocked'
+          # hs_disabled_limit_type: 'error code(str), to help clients decode reason'
+          #
+          # Monthly Active User Blocking
+          #
+          # Enables monthly active user checking
+          # limit_usage_by_mau: False
+          # max_mau_value: 50
+          # mau_trial_days: 2
+          #
+          # Sometimes the server admin will want to ensure certain accounts are
+          # never blocked by mau checking. These accounts are specified here.
+          #
+          # mau_limit_reserved_threepids:
+          # - medium: 'email'
+          #   address: 'reserved_user@example.com'
+
         """ % locals()
 
     def read_arguments(self, args):
diff --git a/synapse/crypto/context_factory.py b/synapse/crypto/context_factory.py
index a1e1d0d33a..1a391adec1 100644
--- a/synapse/crypto/context_factory.py
+++ b/synapse/crypto/context_factory.py
@@ -11,19 +11,22 @@
 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 # See the License for the specific language governing permissions and
 # limitations under the License.
-
 import logging
 
+from zope.interface import implementer
+
 from OpenSSL import SSL, crypto
-from twisted.internet import ssl
 from twisted.internet._sslverify import _defaultCurveName
+from twisted.internet.interfaces import IOpenSSLClientConnectionCreator
+from twisted.internet.ssl import CertificateOptions, ContextFactory
+from twisted.python.failure import Failure
 
 logger = logging.getLogger(__name__)
 
 
-class ServerContextFactory(ssl.ContextFactory):
+class ServerContextFactory(ContextFactory):
     """Factory for PyOpenSSL SSL contexts that are used to handle incoming
-    connections and to make connections to remote servers."""
+    connections."""
 
     def __init__(self, config):
         self._context = SSL.Context(SSL.SSLv23_METHOD)
@@ -48,3 +51,78 @@ class ServerContextFactory(ssl.ContextFactory):
 
     def getContext(self):
         return self._context
+
+
+def _idnaBytes(text):
+    """
+    Convert some text typed by a human into some ASCII bytes. This is a
+    copy of twisted.internet._idna._idnaBytes. For documentation, see the
+    twisted documentation.
+    """
+    try:
+        import idna
+    except ImportError:
+        return text.encode("idna")
+    else:
+        return idna.encode(text)
+
+
+def _tolerateErrors(wrapped):
+    """
+    Wrap up an info_callback for pyOpenSSL so that if something goes wrong
+    the error is immediately logged and the connection is dropped if possible.
+    This is a copy of twisted.internet._sslverify._tolerateErrors. For
+    documentation, see the twisted documentation.
+    """
+
+    def infoCallback(connection, where, ret):
+        try:
+            return wrapped(connection, where, ret)
+        except:  # noqa: E722, taken from the twisted implementation
+            f = Failure()
+            logger.exception("Error during info_callback")
+            connection.get_app_data().failVerification(f)
+
+    return infoCallback
+
+
+@implementer(IOpenSSLClientConnectionCreator)
+class ClientTLSOptions(object):
+    """
+    Client creator for TLS without certificate identity verification. This is a
+    copy of twisted.internet._sslverify.ClientTLSOptions with the identity
+    verification left out. For documentation, see the twisted documentation.
+    """
+
+    def __init__(self, hostname, ctx):
+        self._ctx = ctx
+        self._hostname = hostname
+        self._hostnameBytes = _idnaBytes(hostname)
+        ctx.set_info_callback(
+            _tolerateErrors(self._identityVerifyingInfoCallback)
+        )
+
+    def clientConnectionForTLS(self, tlsProtocol):
+        context = self._ctx
+        connection = SSL.Connection(context, None)
+        connection.set_app_data(tlsProtocol)
+        return connection
+
+    def _identityVerifyingInfoCallback(self, connection, where, ret):
+        if where & SSL.SSL_CB_HANDSHAKE_START:
+            connection.set_tlsext_host_name(self._hostnameBytes)
+
+
+class ClientTLSOptionsFactory(object):
+    """Factory for Twisted ClientTLSOptions that are used to make connections
+    to remote servers for federation."""
+
+    def __init__(self, config):
+        # We don't use config options yet
+        pass
+
+    def get_options(self, host):
+        return ClientTLSOptions(
+            host.decode('utf-8'),
+            CertificateOptions(verify=False).getContext()
+        )
diff --git a/synapse/crypto/keyclient.py b/synapse/crypto/keyclient.py
index 668b4f517d..e94400b8e2 100644
--- a/synapse/crypto/keyclient.py
+++ b/synapse/crypto/keyclient.py
@@ -18,7 +18,9 @@ import logging
 from canonicaljson import json
 
 from twisted.internet import defer, reactor
+from twisted.internet.error import ConnectError
 from twisted.internet.protocol import Factory
+from twisted.names.error import DomainError
 from twisted.web.http import HTTPClient
 
 from synapse.http.endpoint import matrix_federation_endpoint
@@ -30,14 +32,14 @@ KEY_API_V1 = b"/_matrix/key/v1/"
 
 
 @defer.inlineCallbacks
-def fetch_server_key(server_name, ssl_context_factory, path=KEY_API_V1):
+def fetch_server_key(server_name, tls_client_options_factory, path=KEY_API_V1):
     """Fetch the keys for a remote server."""
 
     factory = SynapseKeyClientFactory()
     factory.path = path
     factory.host = server_name
     endpoint = matrix_federation_endpoint(
-        reactor, server_name, ssl_context_factory, timeout=30
+        reactor, server_name, tls_client_options_factory, timeout=30
     )
 
     for i in range(5):
@@ -47,12 +49,14 @@ def fetch_server_key(server_name, ssl_context_factory, path=KEY_API_V1):
                 server_response, server_certificate = yield protocol.remote_key
                 defer.returnValue((server_response, server_certificate))
         except SynapseKeyClientError as e:
-            logger.exception("Error getting key for %r" % (server_name,))
+            logger.warn("Error getting key for %r: %s", server_name, e)
             if e.status.startswith("4"):
                 # Don't retry for 4xx responses.
                 raise IOError("Cannot get key for %r" % server_name)
+        except (ConnectError, DomainError) as e:
+            logger.warn("Error getting key for %r: %s", server_name, e)
         except Exception as e:
-            logger.exception(e)
+            logger.exception("Error getting key for %r", server_name)
     raise IOError("Cannot get key for %r" % server_name)
 
 
diff --git a/synapse/crypto/keyring.py b/synapse/crypto/keyring.py
index e95b9fb43e..30e2742102 100644
--- a/synapse/crypto/keyring.py
+++ b/synapse/crypto/keyring.py
@@ -512,7 +512,7 @@ class Keyring(object):
                 continue
 
             (response, tls_certificate) = yield fetch_server_key(
-                server_name, self.hs.tls_server_context_factory,
+                server_name, self.hs.tls_client_options_factory,
                 path=(b"/_matrix/key/v2/server/%s" % (
                     urllib.quote(requested_key_id),
                 )).encode("ascii"),
@@ -655,7 +655,7 @@ class Keyring(object):
         # Try to fetch the key from the remote server.
 
         (response, tls_certificate) = yield fetch_server_key(
-            server_name, self.hs.tls_server_context_factory
+            server_name, self.hs.tls_client_options_factory
         )
 
         # Check the response.
diff --git a/synapse/event_auth.py b/synapse/event_auth.py
index b32f64e729..6baeccca38 100644
--- a/synapse/event_auth.py
+++ b/synapse/event_auth.py
@@ -20,7 +20,7 @@ from signedjson.key import decode_verify_key_bytes
 from signedjson.sign import SignatureVerifyException, verify_signed_json
 from unpaddedbase64 import decode_base64
 
-from synapse.api.constants import EventTypes, JoinRules, Membership
+from synapse.api.constants import KNOWN_ROOM_VERSIONS, EventTypes, JoinRules, Membership
 from synapse.api.errors import AuthError, EventSizeError, SynapseError
 from synapse.types import UserID, get_domain_from_id
 
@@ -83,6 +83,14 @@ def check(event, auth_events, do_sig_check=True, do_size_check=True):
                 403,
                 "Creation event's room_id domain does not match sender's"
             )
+
+        room_version = event.content.get("room_version", "1")
+        if room_version not in KNOWN_ROOM_VERSIONS:
+            raise AuthError(
+                403,
+                "room appears to have unsupported version %s" % (
+                    room_version,
+                ))
         # FIXME
         logger.debug("Allowing! %s", event)
         return
diff --git a/synapse/federation/federation_client.py b/synapse/federation/federation_client.py
index 7550e11b6e..c9f3c2d352 100644
--- a/synapse/federation/federation_client.py
+++ b/synapse/federation/federation_client.py
@@ -25,7 +25,7 @@ from prometheus_client import Counter
 
 from twisted.internet import defer
 
-from synapse.api.constants import Membership
+from synapse.api.constants import KNOWN_ROOM_VERSIONS, EventTypes, Membership
 from synapse.api.errors import (
     CodeMessageException,
     FederationDeniedError,
@@ -518,10 +518,10 @@ class FederationClient(FederationBase):
                     description, destination, exc_info=1,
                 )
 
-        raise RuntimeError("Failed to %s via any server", description)
+        raise RuntimeError("Failed to %s via any server" % (description, ))
 
     def make_membership_event(self, destinations, room_id, user_id, membership,
-                              content={},):
+                              content, params):
         """
         Creates an m.room.member event, with context, without participating in the room.
 
@@ -537,8 +537,10 @@ class FederationClient(FederationBase):
             user_id (str): The user whose membership is being evented.
             membership (str): The "membership" property of the event. Must be
                 one of "join" or "leave".
-            content (object): Any additional data to put into the content field
+            content (dict): Any additional data to put into the content field
                 of the event.
+            params (dict[str, str|Iterable[str]]): Query parameters to include in the
+                request.
         Return:
             Deferred: resolves to a tuple of (origin (str), event (object))
             where origin is the remote homeserver which generated the event.
@@ -558,10 +560,12 @@ class FederationClient(FederationBase):
         @defer.inlineCallbacks
         def send_request(destination):
             ret = yield self.transport_layer.make_membership_event(
-                destination, room_id, user_id, membership
+                destination, room_id, user_id, membership, params,
             )
 
-            pdu_dict = ret["event"]
+            pdu_dict = ret.get("event", None)
+            if not isinstance(pdu_dict, dict):
+                raise InvalidResponseError("Bad 'event' field in response")
 
             logger.debug("Got response to make_%s: %s", membership, pdu_dict)
 
@@ -605,6 +609,26 @@ class FederationClient(FederationBase):
             Fails with a ``RuntimeError`` if no servers were reachable.
         """
 
+        def check_authchain_validity(signed_auth_chain):
+            for e in signed_auth_chain:
+                if e.type == EventTypes.Create:
+                    create_event = e
+                    break
+            else:
+                raise InvalidResponseError(
+                    "no %s in auth chain" % (EventTypes.Create,),
+                )
+
+            # the room version should be sane.
+            room_version = create_event.content.get("room_version", "1")
+            if room_version not in KNOWN_ROOM_VERSIONS:
+                # This shouldn't be possible, because the remote server should have
+                # rejected the join attempt during make_join.
+                raise InvalidResponseError(
+                    "room appears to have unsupported version %s" % (
+                        room_version,
+                    ))
+
         @defer.inlineCallbacks
         def send_request(destination):
             time_now = self._clock.time_msec()
@@ -661,7 +685,7 @@ class FederationClient(FederationBase):
             for s in signed_state:
                 s.internal_metadata = copy.deepcopy(s.internal_metadata)
 
-            auth_chain.sort(key=lambda e: e.depth)
+            check_authchain_validity(signed_auth)
 
             defer.returnValue({
                 "state": signed_state,
diff --git a/synapse/federation/federation_server.py b/synapse/federation/federation_server.py
index bf89d568af..3e0cd294a1 100644
--- a/synapse/federation/federation_server.py
+++ b/synapse/federation/federation_server.py
@@ -27,14 +27,24 @@ from twisted.internet.abstract import isIPAddress
 from twisted.python import failure
 
 from synapse.api.constants import EventTypes
-from synapse.api.errors import AuthError, FederationError, NotFoundError, SynapseError
+from synapse.api.errors import (
+    AuthError,
+    FederationError,
+    IncompatibleRoomVersionError,
+    NotFoundError,
+    SynapseError,
+)
 from synapse.crypto.event_signing import compute_event_signature
 from synapse.federation.federation_base import FederationBase, event_from_pdu_json
 from synapse.federation.persistence import TransactionActions
 from synapse.federation.units import Edu, Transaction
 from synapse.http.endpoint import parse_server_name
+from synapse.replication.http.federation import (
+    ReplicationFederationSendEduRestServlet,
+    ReplicationGetQueryRestServlet,
+)
 from synapse.types import get_domain_from_id
-from synapse.util import async
+from synapse.util.async_helpers import Linearizer, concurrently_execute
 from synapse.util.caches.response_cache import ResponseCache
 from synapse.util.logutils import log_function
 
@@ -61,8 +71,8 @@ class FederationServer(FederationBase):
         self.auth = hs.get_auth()
         self.handler = hs.get_handlers().federation_handler
 
-        self._server_linearizer = async.Linearizer("fed_server")
-        self._transaction_linearizer = async.Linearizer("fed_txn_handler")
+        self._server_linearizer = Linearizer("fed_server")
+        self._transaction_linearizer = Linearizer("fed_txn_handler")
 
         self.transaction_actions = TransactionActions(self.store)
 
@@ -194,7 +204,7 @@ class FederationServer(FederationBase):
                         event_id, f.getTraceback().rstrip(),
                     )
 
-        yield async.concurrently_execute(
+        yield concurrently_execute(
             process_pdus_for_room, pdus_by_room.keys(),
             TRANSACTION_CONCURRENCY_LIMIT,
         )
@@ -323,12 +333,21 @@ class FederationServer(FederationBase):
         defer.returnValue((200, resp))
 
     @defer.inlineCallbacks
-    def on_make_join_request(self, origin, room_id, user_id):
+    def on_make_join_request(self, origin, room_id, user_id, supported_versions):
         origin_host, _ = parse_server_name(origin)
         yield self.check_server_matches_acl(origin_host, room_id)
+
+        room_version = yield self.store.get_room_version(room_id)
+        if room_version not in supported_versions:
+            logger.warn("Room version %s not in %s", room_version, supported_versions)
+            raise IncompatibleRoomVersionError(room_version=room_version)
+
         pdu = yield self.handler.on_make_join_request(room_id, user_id)
         time_now = self._clock.time_msec()
-        defer.returnValue({"event": pdu.get_pdu_json(time_now)})
+        defer.returnValue({
+            "event": pdu.get_pdu_json(time_now),
+            "room_version": room_version,
+        })
 
     @defer.inlineCallbacks
     def on_invite_request(self, origin, content):
@@ -745,6 +764,8 @@ class FederationHandlerRegistry(object):
         if edu_type in self.edu_handlers:
             raise KeyError("Already have an EDU handler for %s" % (edu_type,))
 
+        logger.info("Registering federation EDU handler for %r", edu_type)
+
         self.edu_handlers[edu_type] = handler
 
     def register_query_handler(self, query_type, handler):
@@ -763,6 +784,8 @@ class FederationHandlerRegistry(object):
                 "Already have a Query handler for %s" % (query_type,)
             )
 
+        logger.info("Registering federation query handler for %r", query_type)
+
         self.query_handlers[query_type] = handler
 
     @defer.inlineCallbacks
@@ -785,3 +808,49 @@ class FederationHandlerRegistry(object):
             raise NotFoundError("No handler for Query type '%s'" % (query_type,))
 
         return handler(args)
+
+
+class ReplicationFederationHandlerRegistry(FederationHandlerRegistry):
+    """A FederationHandlerRegistry for worker processes.
+
+    When receiving EDU or queries it will check if an appropriate handler has
+    been registered on the worker, if there isn't one then it calls off to the
+    master process.
+    """
+
+    def __init__(self, hs):
+        self.config = hs.config
+        self.http_client = hs.get_simple_http_client()
+        self.clock = hs.get_clock()
+
+        self._get_query_client = ReplicationGetQueryRestServlet.make_client(hs)
+        self._send_edu = ReplicationFederationSendEduRestServlet.make_client(hs)
+
+        super(ReplicationFederationHandlerRegistry, self).__init__()
+
+    def on_edu(self, edu_type, origin, content):
+        """Overrides FederationHandlerRegistry
+        """
+        handler = self.edu_handlers.get(edu_type)
+        if handler:
+            return super(ReplicationFederationHandlerRegistry, self).on_edu(
+                edu_type, origin, content,
+            )
+
+        return self._send_edu(
+                edu_type=edu_type,
+                origin=origin,
+                content=content,
+        )
+
+    def on_query(self, query_type, args):
+        """Overrides FederationHandlerRegistry
+        """
+        handler = self.query_handlers.get(query_type)
+        if handler:
+            return handler(args)
+
+        return self._get_query_client(
+                query_type=query_type,
+                args=args,
+        )
diff --git a/synapse/federation/transaction_queue.py b/synapse/federation/transaction_queue.py
index 78f9d40a3a..94d7423d01 100644
--- a/synapse/federation/transaction_queue.py
+++ b/synapse/federation/transaction_queue.py
@@ -26,6 +26,8 @@ from synapse.api.errors import FederationDeniedError, HttpResponseException
 from synapse.handlers.presence import format_user_presence_state, get_interested_remotes
 from synapse.metrics import (
     LaterGauge,
+    event_processing_loop_counter,
+    event_processing_loop_room_count,
     events_processed_counter,
     sent_edus_counter,
     sent_transactions_counter,
@@ -56,6 +58,7 @@ class TransactionQueue(object):
     """
 
     def __init__(self, hs):
+        self.hs = hs
         self.server_name = hs.hostname
 
         self.store = hs.get_datastore()
@@ -253,7 +256,13 @@ class TransactionQueue(object):
                     synapse.metrics.event_processing_last_ts.labels(
                         "federation_sender").set(ts)
 
-                events_processed_counter.inc(len(events))
+                    events_processed_counter.inc(len(events))
+
+                    event_processing_loop_room_count.labels(
+                        "federation_sender"
+                    ).inc(len(events_by_room))
+
+                event_processing_loop_counter.labels("federation_sender").inc()
 
                 synapse.metrics.event_processing_positions.labels(
                     "federation_sender").set(next_token)
@@ -300,6 +309,9 @@ class TransactionQueue(object):
         Args:
             states (list(UserPresenceState))
         """
+        if not self.hs.config.use_presence:
+            # No-op if presence is disabled.
+            return
 
         # First we queue up the new presence by user ID, so multiple presence
         # updates in quick successtion are correctly handled
diff --git a/synapse/federation/transport/client.py b/synapse/federation/transport/client.py
index 4529d454af..1054441ca5 100644
--- a/synapse/federation/transport/client.py
+++ b/synapse/federation/transport/client.py
@@ -106,7 +106,7 @@ class TransportLayerClient(object):
             dest (str)
             room_id (str)
             event_tuples (list)
-            limt (int)
+            limit (int)
 
         Returns:
             Deferred: Results in a dict received from the remote homeserver.
@@ -195,7 +195,7 @@ class TransportLayerClient(object):
 
     @defer.inlineCallbacks
     @log_function
-    def make_membership_event(self, destination, room_id, user_id, membership):
+    def make_membership_event(self, destination, room_id, user_id, membership, params):
         """Asks a remote server to build and sign us a membership event
 
         Note that this does not append any events to any graphs.
@@ -205,6 +205,8 @@ class TransportLayerClient(object):
             room_id (str): room to join/leave
             user_id (str): user to be joined/left
             membership (str): one of join/leave
+            params (dict[str, str|Iterable[str]]): Query parameters to include in the
+                request.
 
         Returns:
             Deferred: Succeeds when we get a 2xx HTTP response. The result
@@ -241,6 +243,7 @@ class TransportLayerClient(object):
         content = yield self.client.get_json(
             destination=destination,
             path=path,
+            args=params,
             retry_on_dns_fail=retry_on_dns_fail,
             timeout=20000,
             ignore_backoff=ignore_backoff,
diff --git a/synapse/federation/transport/server.py b/synapse/federation/transport/server.py
index eae5f2b427..7a993fd1cf 100644
--- a/synapse/federation/transport/server.py
+++ b/synapse/federation/transport/server.py
@@ -190,6 +190,41 @@ def _parse_auth_header(header_bytes):
 
 
 class BaseFederationServlet(object):
+    """Abstract base class for federation servlet classes.
+
+    The servlet object should have a PATH attribute which takes the form of a regexp to
+    match against the request path (excluding the /federation/v1 prefix).
+
+    The servlet should also implement one or more of on_GET, on_POST, on_PUT, to match
+    the appropriate HTTP method. These methods have the signature:
+
+        on_<METHOD>(self, origin, content, query, **kwargs)
+
+        With arguments:
+
+            origin (unicode|None): The authenticated server_name of the calling server,
+                unless REQUIRE_AUTH is set to False and authentication failed.
+
+            content (unicode|None): decoded json body of the request. None if the
+                request was a GET.
+
+            query (dict[bytes, list[bytes]]): Query params from the request. url-decoded
+                (ie, '+' and '%xx' are decoded) but note that it is *not* utf8-decoded
+                yet.
+
+            **kwargs (dict[unicode, unicode]): the dict mapping keys to path
+                components as specified in the path match regexp.
+
+        Returns:
+            Deferred[(int, object)|None]: either (response code, response object) to
+                 return a JSON response, or None if the request has already been handled.
+
+        Raises:
+            SynapseError: to return an error code
+
+            Exception: other exceptions will be caught, logged, and a 500 will be
+                returned.
+    """
     REQUIRE_AUTH = True
 
     def __init__(self, handler, authenticator, ratelimiter, server_name):
@@ -204,6 +239,18 @@ class BaseFederationServlet(object):
         @defer.inlineCallbacks
         @functools.wraps(func)
         def new_func(request, *args, **kwargs):
+            """ A callback which can be passed to HttpServer.RegisterPaths
+
+            Args:
+                request (twisted.web.http.Request):
+                *args: unused?
+                **kwargs (dict[unicode, unicode]): the dict mapping keys to path
+                    components as specified in the path match regexp.
+
+            Returns:
+                Deferred[(int, object)|None]: (response code, response object) as returned
+                    by the callback method. None if the request has already been handled.
+            """
             content = None
             if request.method in ["PUT", "POST"]:
                 # TODO: Handle other method types? other content types?
@@ -214,10 +261,10 @@ class BaseFederationServlet(object):
             except NoAuthenticationError:
                 origin = None
                 if self.REQUIRE_AUTH:
-                    logger.exception("authenticate_request failed")
+                    logger.warn("authenticate_request failed: missing authentication")
                     raise
-            except Exception:
-                logger.exception("authenticate_request failed")
+            except Exception as e:
+                logger.warn("authenticate_request failed: %s", e)
                 raise
 
             if origin:
@@ -384,9 +431,31 @@ class FederationMakeJoinServlet(BaseFederationServlet):
     PATH = "/make_join/(?P<context>[^/]*)/(?P<user_id>[^/]*)"
 
     @defer.inlineCallbacks
-    def on_GET(self, origin, content, query, context, user_id):
+    def on_GET(self, origin, _content, query, context, user_id):
+        """
+        Args:
+            origin (unicode): The authenticated server_name of the calling server
+
+            _content (None): (GETs don't have bodies)
+
+            query (dict[bytes, list[bytes]]): Query params from the request.
+
+            **kwargs (dict[unicode, unicode]): the dict mapping keys to path
+                components as specified in the path match regexp.
+
+        Returns:
+            Deferred[(int, object)|None]: either (response code, response object) to
+                 return a JSON response, or None if the request has already been handled.
+        """
+        versions = query.get(b'ver')
+        if versions is not None:
+            supported_versions = [v.decode("utf-8") for v in versions]
+        else:
+            supported_versions = ["1"]
+
         content = yield self.handler.on_make_join_request(
             origin, context, user_id,
+            supported_versions=supported_versions,
         )
         defer.returnValue((200, content))
 
diff --git a/synapse/handlers/appservice.py b/synapse/handlers/appservice.py
index ee41aed69e..f0f89af7dc 100644
--- a/synapse/handlers/appservice.py
+++ b/synapse/handlers/appservice.py
@@ -23,6 +23,10 @@ from twisted.internet import defer
 
 import synapse
 from synapse.api.constants import EventTypes
+from synapse.metrics import (
+    event_processing_loop_counter,
+    event_processing_loop_room_count,
+)
 from synapse.metrics.background_process_metrics import run_as_background_process
 from synapse.util.logcontext import make_deferred_yieldable, run_in_background
 from synapse.util.metrics import Measure
@@ -136,6 +140,12 @@ class ApplicationServicesHandler(object):
 
                     events_processed_counter.inc(len(events))
 
+                    event_processing_loop_room_count.labels(
+                        "appservice_sender"
+                    ).inc(len(events_by_room))
+
+                    event_processing_loop_counter.labels("appservice_sender").inc()
+
                     synapse.metrics.event_processing_lag.labels(
                         "appservice_sender").set(now - ts)
                     synapse.metrics.event_processing_last_ts.labels(
diff --git a/synapse/handlers/auth.py b/synapse/handlers/auth.py
index 184eef09d0..4a81bd2ba9 100644
--- a/synapse/handlers/auth.py
+++ b/synapse/handlers/auth.py
@@ -520,7 +520,7 @@ class AuthHandler(BaseHandler):
         """
         logger.info("Logging in user %s on device %s", user_id, device_id)
         access_token = yield self.issue_access_token(user_id, device_id)
-        yield self._check_mau_limits()
+        yield self.auth.check_auth_blocking(user_id)
 
         # the device *should* have been registered before we got here; however,
         # it's possible we raced against a DELETE operation. The thing we
@@ -734,7 +734,6 @@ class AuthHandler(BaseHandler):
 
     @defer.inlineCallbacks
     def validate_short_term_login_token_and_get_user_id(self, login_token):
-        yield self._check_mau_limits()
         auth_api = self.hs.get_auth()
         user_id = None
         try:
@@ -743,6 +742,7 @@ class AuthHandler(BaseHandler):
             auth_api.validate_macaroon(macaroon, "login", True, user_id)
         except Exception:
             raise AuthError(403, "Invalid token", errcode=Codes.FORBIDDEN)
+        yield self.auth.check_auth_blocking(user_id)
         defer.returnValue(user_id)
 
     @defer.inlineCallbacks
@@ -828,12 +828,26 @@ class AuthHandler(BaseHandler):
 
     @defer.inlineCallbacks
     def delete_threepid(self, user_id, medium, address):
+        """Attempts to unbind the 3pid on the identity servers and deletes it
+        from the local database.
+
+        Args:
+            user_id (str)
+            medium (str)
+            address (str)
+
+        Returns:
+            Deferred[bool]: Returns True if successfully unbound the 3pid on
+            the identity server, False if identity server doesn't support the
+            unbind API.
+        """
+
         # 'Canonicalise' email addresses as per above
         if medium == 'email':
             address = address.lower()
 
         identity_handler = self.hs.get_handlers().identity_handler
-        yield identity_handler.unbind_threepid(
+        result = yield identity_handler.try_unbind_threepid(
             user_id,
             {
                 'medium': medium,
@@ -841,10 +855,10 @@ class AuthHandler(BaseHandler):
             },
         )
 
-        ret = yield self.store.user_delete_threepid(
+        yield self.store.user_delete_threepid(
             user_id, medium, address,
         )
-        defer.returnValue(ret)
+        defer.returnValue(result)
 
     def _save_session(self, session):
         # TODO: Persistent storage
@@ -907,19 +921,6 @@ class AuthHandler(BaseHandler):
         else:
             return defer.succeed(False)
 
-    @defer.inlineCallbacks
-    def _check_mau_limits(self):
-        """
-        Ensure that if mau blocking is enabled that invalid users cannot
-        log in.
-        """
-        if self.hs.config.limit_usage_by_mau is True:
-            current_mau = yield self.store.count_monthly_users()
-            if current_mau >= self.hs.config.max_mau_value:
-                raise AuthError(
-                    403, "MAU Limit Exceeded", errcode=Codes.MAU_LIMIT_EXCEEDED
-                )
-
 
 @attr.s
 class MacaroonGenerator(object):
diff --git a/synapse/handlers/deactivate_account.py b/synapse/handlers/deactivate_account.py
index b3c5a9ee64..b078df4a76 100644
--- a/synapse/handlers/deactivate_account.py
+++ b/synapse/handlers/deactivate_account.py
@@ -51,7 +51,8 @@ class DeactivateAccountHandler(BaseHandler):
             erase_data (bool): whether to GDPR-erase the user's data
 
         Returns:
-            Deferred
+            Deferred[bool]: True if identity server supports removing
+            threepids, otherwise False.
         """
         # FIXME: Theoretically there is a race here wherein user resets
         # password using threepid.
@@ -60,16 +61,22 @@ class DeactivateAccountHandler(BaseHandler):
         # leave the user still active so they can try again.
         # Ideally we would prevent password resets and then do this in the
         # background thread.
+
+        # This will be set to false if the identity server doesn't support
+        # unbinding
+        identity_server_supports_unbinding = True
+
         threepids = yield self.store.user_get_threepids(user_id)
         for threepid in threepids:
             try:
-                yield self._identity_handler.unbind_threepid(
+                result = yield self._identity_handler.try_unbind_threepid(
                     user_id,
                     {
                         'medium': threepid['medium'],
                         'address': threepid['address'],
                     },
                 )
+                identity_server_supports_unbinding &= result
             except Exception:
                 # Do we want this to be a fatal error or should we carry on?
                 logger.exception("Failed to remove threepid from ID server")
@@ -103,6 +110,8 @@ class DeactivateAccountHandler(BaseHandler):
         # parts users from rooms (if it isn't already running)
         self._start_user_parting()
 
+        defer.returnValue(identity_server_supports_unbinding)
+
     def _start_user_parting(self):
         """
         Start the process that goes through the table of users
diff --git a/synapse/handlers/device.py b/synapse/handlers/device.py
index 2d44f15da3..9e017116a9 100644
--- a/synapse/handlers/device.py
+++ b/synapse/handlers/device.py
@@ -23,7 +23,7 @@ from synapse.api.constants import EventTypes
 from synapse.api.errors import FederationDeniedError
 from synapse.types import RoomStreamToken, get_domain_from_id
 from synapse.util import stringutils
-from synapse.util.async import Linearizer
+from synapse.util.async_helpers import Linearizer
 from synapse.util.caches.expiringcache import ExpiringCache
 from synapse.util.metrics import measure_func
 from synapse.util.retryutils import NotRetryingDestination
diff --git a/synapse/handlers/federation.py b/synapse/handlers/federation.py
index 533b82c783..0ebf0fd188 100644
--- a/synapse/handlers/federation.py
+++ b/synapse/handlers/federation.py
@@ -30,7 +30,12 @@ from unpaddedbase64 import decode_base64
 
 from twisted.internet import defer
 
-from synapse.api.constants import EventTypes, Membership, RejectedReason
+from synapse.api.constants import (
+    KNOWN_ROOM_VERSIONS,
+    EventTypes,
+    Membership,
+    RejectedReason,
+)
 from synapse.api.errors import (
     AuthError,
     CodeMessageException,
@@ -44,10 +49,15 @@ from synapse.crypto.event_signing import (
     compute_event_signature,
 )
 from synapse.events.validator import EventValidator
+from synapse.replication.http.federation import (
+    ReplicationCleanRoomRestServlet,
+    ReplicationFederationSendEventsRestServlet,
+)
+from synapse.replication.http.membership import ReplicationUserJoinedLeftRoomRestServlet
 from synapse.state import resolve_events_with_factory
 from synapse.types import UserID, get_domain_from_id
 from synapse.util import logcontext, unwrapFirstError
-from synapse.util.async import Linearizer
+from synapse.util.async_helpers import Linearizer
 from synapse.util.distributor import user_joined_room
 from synapse.util.frozenutils import unfreeze
 from synapse.util.logutils import log_function
@@ -86,6 +96,18 @@ class FederationHandler(BaseHandler):
         self.spam_checker = hs.get_spam_checker()
         self.event_creation_handler = hs.get_event_creation_handler()
         self._server_notices_mxid = hs.config.server_notices_mxid
+        self.config = hs.config
+        self.http_client = hs.get_simple_http_client()
+
+        self._send_events_to_master = (
+            ReplicationFederationSendEventsRestServlet.make_client(hs)
+        )
+        self._notify_user_membership_change = (
+            ReplicationUserJoinedLeftRoomRestServlet.make_client(hs)
+        )
+        self._clean_room_for_join_client = (
+            ReplicationCleanRoomRestServlet.make_client(hs)
+        )
 
         # When joining a room we need to queue any events for that room up
         self.room_queues = {}
@@ -269,8 +291,9 @@ class FederationHandler(BaseHandler):
                             ev_ids, get_prev_content=False, check_redacted=False
                         )
 
+                    room_version = yield self.store.get_room_version(pdu.room_id)
                     state_map = yield resolve_events_with_factory(
-                        state_groups, {pdu.event_id: pdu}, fetch
+                        room_version, state_groups, {pdu.event_id: pdu}, fetch
                     )
 
                     state = (yield self.store.get_events(state_map.values())).values()
@@ -922,6 +945,9 @@ class FederationHandler(BaseHandler):
             joinee,
             "join",
             content,
+            params={
+                "ver": KNOWN_ROOM_VERSIONS,
+            },
         )
 
         # This shouldn't happen, because the RoomMemberHandler has a
@@ -1150,7 +1176,7 @@ class FederationHandler(BaseHandler):
         )
 
         context = yield self.state_handler.compute_event_context(event)
-        yield self._persist_events([(event, context)])
+        yield self.persist_events_and_notify([(event, context)])
 
         defer.returnValue(event)
 
@@ -1181,19 +1207,20 @@ class FederationHandler(BaseHandler):
         )
 
         context = yield self.state_handler.compute_event_context(event)
-        yield self._persist_events([(event, context)])
+        yield self.persist_events_and_notify([(event, context)])
 
         defer.returnValue(event)
 
     @defer.inlineCallbacks
     def _make_and_verify_event(self, target_hosts, room_id, user_id, membership,
-                               content={},):
+                               content={}, params=None):
         origin, pdu = yield self.federation_client.make_membership_event(
             target_hosts,
             room_id,
             user_id,
             membership,
             content,
+            params=params,
         )
 
         logger.debug("Got response to make_%s: %s", membership, pdu)
@@ -1423,7 +1450,7 @@ class FederationHandler(BaseHandler):
                     event, context
                 )
 
-            yield self._persist_events(
+            yield self.persist_events_and_notify(
                 [(event, context)],
                 backfilled=backfilled,
             )
@@ -1461,7 +1488,7 @@ class FederationHandler(BaseHandler):
             ], consumeErrors=True,
         ))
 
-        yield self._persist_events(
+        yield self.persist_events_and_notify(
             [
                 (ev_info["event"], context)
                 for ev_info, context in zip(event_infos, contexts)
@@ -1549,7 +1576,7 @@ class FederationHandler(BaseHandler):
                     raise
                 events_to_context[e.event_id].rejected = RejectedReason.AUTH_ERROR
 
-        yield self._persist_events(
+        yield self.persist_events_and_notify(
             [
                 (e, events_to_context[e.event_id])
                 for e in itertools.chain(auth_events, state)
@@ -1560,7 +1587,7 @@ class FederationHandler(BaseHandler):
             event, old_state=state
         )
 
-        yield self._persist_events(
+        yield self.persist_events_and_notify(
             [(event, new_event_context)],
         )
 
@@ -1802,7 +1829,10 @@ class FederationHandler(BaseHandler):
                     (d.type, d.state_key): d for d in different_events if d
                 })
 
+                room_version = yield self.store.get_room_version(event.room_id)
+
                 new_state = self.state_handler.resolve_events(
+                    room_version,
                     [list(local_view.values()), list(remote_view.values())],
                     event
                 )
@@ -2288,7 +2318,7 @@ class FederationHandler(BaseHandler):
                 for revocation.
         """
         try:
-            response = yield self.hs.get_simple_http_client().get_json(
+            response = yield self.http_client.get_json(
                 url,
                 {"public_key": public_key}
             )
@@ -2301,7 +2331,7 @@ class FederationHandler(BaseHandler):
             raise AuthError(403, "Third party certificate was invalid")
 
     @defer.inlineCallbacks
-    def _persist_events(self, event_and_contexts, backfilled=False):
+    def persist_events_and_notify(self, event_and_contexts, backfilled=False):
         """Persists events and tells the notifier/pushers about them, if
         necessary.
 
@@ -2313,14 +2343,21 @@ class FederationHandler(BaseHandler):
         Returns:
             Deferred
         """
-        max_stream_id = yield self.store.persist_events(
-            event_and_contexts,
-            backfilled=backfilled,
-        )
+        if self.config.worker_app:
+            yield self._send_events_to_master(
+                store=self.store,
+                event_and_contexts=event_and_contexts,
+                backfilled=backfilled
+            )
+        else:
+            max_stream_id = yield self.store.persist_events(
+                event_and_contexts,
+                backfilled=backfilled,
+            )
 
-        if not backfilled:  # Never notify for backfilled events
-            for event, _ in event_and_contexts:
-                self._notify_persisted_event(event, max_stream_id)
+            if not backfilled:  # Never notify for backfilled events
+                for event, _ in event_and_contexts:
+                    self._notify_persisted_event(event, max_stream_id)
 
     def _notify_persisted_event(self, event, max_stream_id):
         """Checks to see if notifier/pushers should be notified about the
@@ -2353,15 +2390,30 @@ class FederationHandler(BaseHandler):
             extra_users=extra_users
         )
 
-        logcontext.run_in_background(
-            self.pusher_pool.on_new_notifications,
+        self.pusher_pool.on_new_notifications(
             event_stream_id, max_stream_id,
         )
 
     def _clean_room_for_join(self, room_id):
-        return self.store.clean_room_for_join(room_id)
+        """Called to clean up any data in DB for a given room, ready for the
+        server to join the room.
+
+        Args:
+            room_id (str)
+        """
+        if self.config.worker_app:
+            return self._clean_room_for_join_client(room_id)
+        else:
+            return self.store.clean_room_for_join(room_id)
 
     def user_joined_room(self, user, room_id):
         """Called when a new user has joined the room
         """
-        return user_joined_room(self.distributor, user, room_id)
+        if self.config.worker_app:
+            return self._notify_user_membership_change(
+                room_id=room_id,
+                user_id=user.to_string(),
+                change="joined",
+            )
+        else:
+            return user_joined_room(self.distributor, user, room_id)
diff --git a/synapse/handlers/identity.py b/synapse/handlers/identity.py
index 1d36d967c3..5feb3f22a6 100644
--- a/synapse/handlers/identity.py
+++ b/synapse/handlers/identity.py
@@ -137,15 +137,19 @@ class IdentityHandler(BaseHandler):
         defer.returnValue(data)
 
     @defer.inlineCallbacks
-    def unbind_threepid(self, mxid, threepid):
-        """
-        Removes a binding from an identity server
+    def try_unbind_threepid(self, mxid, threepid):
+        """Removes a binding from an identity server
+
         Args:
             mxid (str): Matrix user ID of binding to be removed
             threepid (dict): Dict with medium & address of binding to be removed
 
+        Raises:
+            SynapseError: If we failed to contact the identity server
+
         Returns:
-            Deferred[bool]: True on success, otherwise False
+            Deferred[bool]: True on success, otherwise False if the identity
+            server doesn't support unbinding
         """
         logger.debug("unbinding threepid %r from %s", threepid, mxid)
         if not self.trusted_id_servers:
@@ -175,11 +179,21 @@ class IdentityHandler(BaseHandler):
             content=content,
             destination_is=id_server,
         )
-        yield self.http_client.post_json_get_json(
-            url,
-            content,
-            headers,
-        )
+        try:
+            yield self.http_client.post_json_get_json(
+                url,
+                content,
+                headers,
+            )
+        except HttpResponseException as e:
+            if e.code in (400, 404, 501,):
+                # The remote server probably doesn't support unbinding (yet)
+                logger.warn("Received %d response while unbinding threepid", e.code)
+                defer.returnValue(False)
+            else:
+                logger.error("Failed to unbind threepid on identity server: %s", e)
+                raise SynapseError(502, "Failed to contact identity server")
+
         defer.returnValue(True)
 
     @defer.inlineCallbacks
diff --git a/synapse/handlers/initial_sync.py b/synapse/handlers/initial_sync.py
index 40e7580a61..e009395207 100644
--- a/synapse/handlers/initial_sync.py
+++ b/synapse/handlers/initial_sync.py
@@ -25,7 +25,7 @@ from synapse.handlers.presence import format_user_presence_state
 from synapse.streams.config import PaginationConfig
 from synapse.types import StreamToken, UserID
 from synapse.util import unwrapFirstError
-from synapse.util.async import concurrently_execute
+from synapse.util.async_helpers import concurrently_execute
 from synapse.util.caches.snapshot_cache import SnapshotCache
 from synapse.util.logcontext import make_deferred_yieldable, run_in_background
 from synapse.visibility import filter_events_for_client
@@ -372,6 +372,10 @@ class InitialSyncHandler(BaseHandler):
 
         @defer.inlineCallbacks
         def get_presence():
+            # If presence is disabled, return an empty list
+            if not self.hs.config.use_presence:
+                defer.returnValue([])
+
             states = yield presence_handler.get_states(
                 [m.user_id for m in room_members],
                 as_event=True,
diff --git a/synapse/handlers/message.py b/synapse/handlers/message.py
index 39d7724778..e484061cc0 100644
--- a/synapse/handlers/message.py
+++ b/synapse/handlers/message.py
@@ -25,17 +25,24 @@ from twisted.internet import defer
 from twisted.internet.defer import succeed
 
 from synapse.api.constants import MAX_DEPTH, EventTypes, Membership
-from synapse.api.errors import AuthError, Codes, ConsentNotGivenError, SynapseError
+from synapse.api.errors import (
+    AuthError,
+    Codes,
+    ConsentNotGivenError,
+    NotFoundError,
+    SynapseError,
+)
 from synapse.api.urls import ConsentURIBuilder
 from synapse.crypto.event_signing import add_hashes_and_signatures
 from synapse.events.utils import serialize_event
 from synapse.events.validator import EventValidator
-from synapse.replication.http.send_event import send_event_to_master
+from synapse.replication.http.send_event import ReplicationSendEventRestServlet
 from synapse.types import RoomAlias, UserID
-from synapse.util.async import Linearizer
+from synapse.util.async_helpers import Linearizer
 from synapse.util.frozenutils import frozendict_json_encoder
 from synapse.util.logcontext import run_in_background
 from synapse.util.metrics import measure_func
+from synapse.visibility import filter_events_for_client
 
 from ._base import BaseHandler
 
@@ -82,28 +89,85 @@ class MessageHandler(object):
         defer.returnValue(data)
 
     @defer.inlineCallbacks
-    def get_state_events(self, user_id, room_id, is_guest=False):
+    def get_state_events(
+        self, user_id, room_id, types=None, filtered_types=None,
+        at_token=None, is_guest=False,
+    ):
         """Retrieve all state events for a given room. If the user is
         joined to the room then return the current state. If the user has
-        left the room return the state events from when they left.
+        left the room return the state events from when they left. If an explicit
+        'at' parameter is passed, return the state events as of that event, if
+        visible.
 
         Args:
             user_id(str): The user requesting state events.
             room_id(str): The room ID to get all state events from.
+            types(list[(str, str|None)]|None): List of (type, state_key) tuples
+                which are used to filter the state fetched. If `state_key` is None,
+                all events are returned of the given type.
+                May be None, which matches any key.
+            filtered_types(list[str]|None): Only apply filtering via `types` to this
+                list of event types.  Other types of events are returned unfiltered.
+                If None, `types` filtering is applied to all events.
+            at_token(StreamToken|None): the stream token of the at which we are requesting
+                the stats. If the user is not allowed to view the state as of that
+                stream token, we raise a 403 SynapseError. If None, returns the current
+                state based on the current_state_events table.
+            is_guest(bool): whether this user is a guest
         Returns:
             A list of dicts representing state events. [{}, {}, {}]
+        Raises:
+            NotFoundError (404) if the at token does not yield an event
+
+            AuthError (403) if the user doesn't have permission to view
+            members of this room.
         """
-        membership, membership_event_id = yield self.auth.check_in_room_or_world_readable(
-            room_id, user_id
-        )
+        if at_token:
+            # FIXME this claims to get the state at a stream position, but
+            # get_recent_events_for_room operates by topo ordering. This therefore
+            # does not reliably give you the state at the given stream position.
+            # (https://github.com/matrix-org/synapse/issues/3305)
+            last_events, _ = yield self.store.get_recent_events_for_room(
+                room_id, end_token=at_token.room_key, limit=1,
+            )
 
-        if membership == Membership.JOIN:
-            room_state = yield self.state.get_current_state(room_id)
-        elif membership == Membership.LEAVE:
-            room_state = yield self.store.get_state_for_events(
-                [membership_event_id], None
+            if not last_events:
+                raise NotFoundError("Can't find event for token %s" % (at_token, ))
+
+            visible_events = yield filter_events_for_client(
+                self.store, user_id, last_events,
             )
-            room_state = room_state[membership_event_id]
+
+            event = last_events[0]
+            if visible_events:
+                room_state = yield self.store.get_state_for_events(
+                    [event.event_id], types, filtered_types=filtered_types,
+                )
+                room_state = room_state[event.event_id]
+            else:
+                raise AuthError(
+                    403,
+                    "User %s not allowed to view events in room %s at token %s" % (
+                        user_id, room_id, at_token,
+                    )
+                )
+        else:
+            membership, membership_event_id = (
+                yield self.auth.check_in_room_or_world_readable(
+                    room_id, user_id,
+                )
+            )
+
+            if membership == Membership.JOIN:
+                state_ids = yield self.store.get_filtered_current_state_ids(
+                    room_id, types, filtered_types=filtered_types,
+                )
+                room_state = yield self.store.get_events(state_ids.values())
+            elif membership == Membership.LEAVE:
+                room_state = yield self.store.get_state_for_events(
+                    [membership_event_id], types, filtered_types=filtered_types,
+                )
+                room_state = room_state[membership_event_id]
 
         now = self.clock.time_msec()
         defer.returnValue(
@@ -171,7 +235,7 @@ class EventCreationHandler(object):
         self.notifier = hs.get_notifier()
         self.config = hs.config
 
-        self.http_client = hs.get_simple_http_client()
+        self.send_event_to_master = ReplicationSendEventRestServlet.make_client(hs)
 
         # This is only used to get at ratelimit function, and maybe_kick_guest_users
         self.base_handler = BaseHandler(hs)
@@ -212,10 +276,14 @@ class EventCreationHandler(object):
                 where *hashes* is a map from algorithm to hash.
 
                 If None, they will be requested from the database.
-
+        Raises:
+            ResourceLimitError if server is blocked to some resource being
+            exceeded
         Returns:
             Tuple of created event (FrozenEvent), Context
         """
+        yield self.auth.check_auth_blocking(requester.user.to_string())
+
         builder = self.event_builder_factory.new(event_dict)
 
         self.validator.validate_new(builder)
@@ -559,12 +627,9 @@ class EventCreationHandler(object):
         try:
             # If we're a worker we need to hit out to the master.
             if self.config.worker_app:
-                yield send_event_to_master(
-                    clock=self.hs.get_clock(),
+                yield self.send_event_to_master(
+                    event_id=event.event_id,
                     store=self.store,
-                    client=self.http_client,
-                    host=self.config.worker_replication_host,
-                    port=self.config.worker_replication_http_port,
                     requester=requester,
                     event=event,
                     context=context,
@@ -713,11 +778,8 @@ class EventCreationHandler(object):
             event, context=context
         )
 
-        # this intentionally does not yield: we don't care about the result
-        # and don't need to wait for it.
-        run_in_background(
-            self.pusher_pool.on_new_notifications,
-            event_stream_id, max_stream_id
+        self.pusher_pool.on_new_notifications(
+            event_stream_id, max_stream_id,
         )
 
         def _notify():
diff --git a/synapse/handlers/pagination.py b/synapse/handlers/pagination.py
index b2849783ed..5170d093e3 100644
--- a/synapse/handlers/pagination.py
+++ b/synapse/handlers/pagination.py
@@ -18,11 +18,11 @@ import logging
 from twisted.internet import defer
 from twisted.python.failure import Failure
 
-from synapse.api.constants import Membership
+from synapse.api.constants import EventTypes, Membership
 from synapse.api.errors import SynapseError
 from synapse.events.utils import serialize_event
 from synapse.types import RoomStreamToken
-from synapse.util.async import ReadWriteLock
+from synapse.util.async_helpers import ReadWriteLock
 from synapse.util.logcontext import run_in_background
 from synapse.util.stringutils import random_string
 from synapse.visibility import filter_events_for_client
@@ -251,6 +251,33 @@ class PaginationHandler(object):
             is_peeking=(member_event_id is None),
         )
 
+        state = None
+        if event_filter and event_filter.lazy_load_members():
+            # TODO: remove redundant members
+
+            types = [
+                (EventTypes.Member, state_key)
+                for state_key in set(
+                    event.sender  # FIXME: we also care about invite targets etc.
+                    for event in events
+                )
+            ]
+
+            state_ids = yield self.store.get_state_ids_for_event(
+                events[0].event_id, types=types,
+            )
+
+            if state_ids:
+                state = yield self.store.get_events(list(state_ids.values()))
+
+            if state:
+                state = yield filter_events_for_client(
+                    self.store,
+                    user_id,
+                    state.values(),
+                    is_peeking=(member_event_id is None),
+                )
+
         time_now = self.clock.time_msec()
 
         chunk = {
@@ -262,4 +289,10 @@ class PaginationHandler(object):
             "end": next_token.to_string(),
         }
 
+        if state:
+            chunk["state"] = [
+                serialize_event(e, time_now, as_client_event)
+                for e in state
+            ]
+
         defer.returnValue(chunk)
diff --git a/synapse/handlers/presence.py b/synapse/handlers/presence.py
index 3732830194..ba3856674d 100644
--- a/synapse/handlers/presence.py
+++ b/synapse/handlers/presence.py
@@ -36,7 +36,7 @@ from synapse.api.errors import SynapseError
 from synapse.metrics import LaterGauge
 from synapse.storage.presence import UserPresenceState
 from synapse.types import UserID, get_domain_from_id
-from synapse.util.async import Linearizer
+from synapse.util.async_helpers import Linearizer
 from synapse.util.caches.descriptors import cachedInlineCallbacks
 from synapse.util.logcontext import run_in_background
 from synapse.util.logutils import log_function
@@ -95,6 +95,7 @@ class PresenceHandler(object):
         Args:
             hs (synapse.server.HomeServer):
         """
+        self.hs = hs
         self.is_mine = hs.is_mine
         self.is_mine_id = hs.is_mine_id
         self.clock = hs.get_clock()
@@ -230,6 +231,10 @@ class PresenceHandler(object):
         earlier than they should when synapse is restarted. This affect of this
         is some spurious presence changes that will self-correct.
         """
+        # If the DB pool has already terminated, don't try updating
+        if not self.hs.get_db_pool().running:
+            return
+
         logger.info(
             "Performing _on_shutdown. Persisting %d unpersisted changes",
             len(self.user_to_current_state)
@@ -390,6 +395,10 @@ class PresenceHandler(object):
         """We've seen the user do something that indicates they're interacting
         with the app.
         """
+        # If presence is disabled, no-op
+        if not self.hs.config.use_presence:
+            return
+
         user_id = user.to_string()
 
         bump_active_time_counter.inc()
@@ -419,6 +428,11 @@ class PresenceHandler(object):
                 Useful for streams that are not associated with an actual
                 client that is being used by a user.
         """
+        # Override if it should affect the user's presence, if presence is
+        # disabled.
+        if not self.hs.config.use_presence:
+            affect_presence = False
+
         if affect_presence:
             curr_sync = self.user_to_num_current_syncs.get(user_id, 0)
             self.user_to_num_current_syncs[user_id] = curr_sync + 1
@@ -464,13 +478,16 @@ class PresenceHandler(object):
         Returns:
             set(str): A set of user_id strings.
         """
-        syncing_user_ids = {
-            user_id for user_id, count in self.user_to_num_current_syncs.items()
-            if count
-        }
-        for user_ids in self.external_process_to_current_syncs.values():
-            syncing_user_ids.update(user_ids)
-        return syncing_user_ids
+        if self.hs.config.use_presence:
+            syncing_user_ids = {
+                user_id for user_id, count in self.user_to_num_current_syncs.items()
+                if count
+            }
+            for user_ids in self.external_process_to_current_syncs.values():
+                syncing_user_ids.update(user_ids)
+            return syncing_user_ids
+        else:
+            return set()
 
     @defer.inlineCallbacks
     def update_external_syncs_row(self, process_id, user_id, is_syncing, sync_time_msec):
diff --git a/synapse/handlers/profile.py b/synapse/handlers/profile.py
index 9af2e8f869..75b8b7ce6a 100644
--- a/synapse/handlers/profile.py
+++ b/synapse/handlers/profile.py
@@ -32,12 +32,16 @@ from ._base import BaseHandler
 logger = logging.getLogger(__name__)
 
 
-class ProfileHandler(BaseHandler):
-    PROFILE_UPDATE_MS = 60 * 1000
-    PROFILE_UPDATE_EVERY_MS = 24 * 60 * 60 * 1000
+class BaseProfileHandler(BaseHandler):
+    """Handles fetching and updating user profile information.
+
+    BaseProfileHandler can be instantiated directly on workers and will
+    delegate to master when necessary. The master process should use the
+    subclass MasterProfileHandler
+    """
 
     def __init__(self, hs):
-        super(ProfileHandler, self).__init__(hs)
+        super(BaseProfileHandler, self).__init__(hs)
 
         self.federation = hs.get_federation_client()
         hs.get_federation_registry().register_query_handler(
@@ -46,11 +50,6 @@ class ProfileHandler(BaseHandler):
 
         self.user_directory_handler = hs.get_user_directory_handler()
 
-        if hs.config.worker_app is None:
-            self.clock.looping_call(
-                self._start_update_remote_profile_cache, self.PROFILE_UPDATE_MS,
-            )
-
     @defer.inlineCallbacks
     def get_profile(self, user_id):
         target_user = UserID.from_string(user_id)
@@ -282,6 +281,20 @@ class ProfileHandler(BaseHandler):
                     room_id, str(e.message)
                 )
 
+
+class MasterProfileHandler(BaseProfileHandler):
+    PROFILE_UPDATE_MS = 60 * 1000
+    PROFILE_UPDATE_EVERY_MS = 24 * 60 * 60 * 1000
+
+    def __init__(self, hs):
+        super(MasterProfileHandler, self).__init__(hs)
+
+        assert hs.config.worker_app is None
+
+        self.clock.looping_call(
+            self._start_update_remote_profile_cache, self.PROFILE_UPDATE_MS,
+        )
+
     def _start_update_remote_profile_cache(self):
         return run_as_background_process(
             "Update remote profile", self._update_remote_profile_cache,
diff --git a/synapse/handlers/read_marker.py b/synapse/handlers/read_marker.py
index 995460f82a..32108568c6 100644
--- a/synapse/handlers/read_marker.py
+++ b/synapse/handlers/read_marker.py
@@ -17,7 +17,7 @@ import logging
 
 from twisted.internet import defer
 
-from synapse.util.async import Linearizer
+from synapse.util.async_helpers import Linearizer
 
 from ._base import BaseHandler
 
diff --git a/synapse/handlers/receipts.py b/synapse/handlers/receipts.py
index cb905a3903..a6f3181f09 100644
--- a/synapse/handlers/receipts.py
+++ b/synapse/handlers/receipts.py
@@ -18,7 +18,6 @@ from twisted.internet import defer
 
 from synapse.types import get_domain_from_id
 from synapse.util import logcontext
-from synapse.util.logcontext import PreserveLoggingContext
 
 from ._base import BaseHandler
 
@@ -116,16 +115,15 @@ class ReceiptsHandler(BaseHandler):
 
         affected_room_ids = list(set([r["room_id"] for r in receipts]))
 
-        with PreserveLoggingContext():
-            self.notifier.on_new_event(
-                "receipt_key", max_batch_id, rooms=affected_room_ids
-            )
-            # Note that the min here shouldn't be relied upon to be accurate.
-            self.hs.get_pusherpool().on_new_receipts(
-                min_batch_id, max_batch_id, affected_room_ids
-            )
+        self.notifier.on_new_event(
+            "receipt_key", max_batch_id, rooms=affected_room_ids
+        )
+        # Note that the min here shouldn't be relied upon to be accurate.
+        self.hs.get_pusherpool().on_new_receipts(
+            min_batch_id, max_batch_id, affected_room_ids,
+        )
 
-            defer.returnValue(True)
+        defer.returnValue(True)
 
     @logcontext.preserve_fn   # caller should not yield on this
     @defer.inlineCallbacks
diff --git a/synapse/handlers/register.py b/synapse/handlers/register.py
index 289704b241..f03ee1476b 100644
--- a/synapse/handlers/register.py
+++ b/synapse/handlers/register.py
@@ -28,7 +28,7 @@ from synapse.api.errors import (
 )
 from synapse.http.client import CaptchaServerHttpClient
 from synapse.types import RoomAlias, RoomID, UserID, create_requester
-from synapse.util.async import Linearizer
+from synapse.util.async_helpers import Linearizer
 from synapse.util.threepids import check_3pid_allowed
 
 from ._base import BaseHandler
@@ -144,7 +144,8 @@ class RegistrationHandler(BaseHandler):
         Raises:
             RegistrationError if there was a problem registering.
         """
-        yield self._check_mau_limits()
+
+        yield self.auth.check_auth_blocking()
         password_hash = None
         if password:
             password_hash = yield self.auth_handler().hash(password)
@@ -289,7 +290,7 @@ class RegistrationHandler(BaseHandler):
                 400,
                 "User ID can only contain characters a-z, 0-9, or '=_-./'",
             )
-        yield self._check_mau_limits()
+        yield self.auth.check_auth_blocking()
         user = UserID(localpart, self.hs.hostname)
         user_id = user.to_string()
 
@@ -439,7 +440,7 @@ class RegistrationHandler(BaseHandler):
         """
         if localpart is None:
             raise SynapseError(400, "Request must include user id")
-        yield self._check_mau_limits()
+        yield self.auth.check_auth_blocking()
         need_register = True
 
         try:
@@ -533,16 +534,3 @@ class RegistrationHandler(BaseHandler):
             remote_room_hosts=remote_room_hosts,
             action="join",
         )
-
-    @defer.inlineCallbacks
-    def _check_mau_limits(self):
-        """
-        Do not accept registrations if monthly active user limits exceeded
-         and limiting is enabled
-        """
-        if self.hs.config.limit_usage_by_mau is True:
-            current_mau = yield self.store.count_monthly_users()
-            if current_mau >= self.hs.config.max_mau_value:
-                raise RegistrationError(
-                    403, "MAU Limit Exceeded", Codes.MAU_LIMIT_EXCEEDED
-                )
diff --git a/synapse/handlers/room.py b/synapse/handlers/room.py
index 7b7804d9b2..c3f820b975 100644
--- a/synapse/handlers/room.py
+++ b/synapse/handlers/room.py
@@ -21,9 +21,17 @@ import math
 import string
 from collections import OrderedDict
 
+from six import string_types
+
 from twisted.internet import defer
 
-from synapse.api.constants import EventTypes, JoinRules, RoomCreationPreset
+from synapse.api.constants import (
+    DEFAULT_ROOM_VERSION,
+    KNOWN_ROOM_VERSIONS,
+    EventTypes,
+    JoinRules,
+    RoomCreationPreset,
+)
 from synapse.api.errors import AuthError, Codes, StoreError, SynapseError
 from synapse.types import RoomAlias, RoomID, RoomStreamToken, StreamToken, UserID
 from synapse.util import stringutils
@@ -90,15 +98,34 @@ class RoomCreationHandler(BaseHandler):
         Raises:
             SynapseError if the room ID couldn't be stored, or something went
             horribly wrong.
+            ResourceLimitError if server is blocked to some resource being
+            exceeded
         """
         user_id = requester.user.to_string()
 
+        self.auth.check_auth_blocking(user_id)
+
         if not self.spam_checker.user_may_create_room(user_id):
             raise SynapseError(403, "You are not permitted to create rooms")
 
         if ratelimit:
             yield self.ratelimit(requester)
 
+        room_version = config.get("room_version", DEFAULT_ROOM_VERSION)
+        if not isinstance(room_version, string_types):
+            raise SynapseError(
+                400,
+                "room_version must be a string",
+                Codes.BAD_JSON,
+            )
+
+        if room_version not in KNOWN_ROOM_VERSIONS:
+            raise SynapseError(
+                400,
+                "Your homeserver does not support this room version",
+                Codes.UNSUPPORTED_ROOM_VERSION,
+            )
+
         if "room_alias_name" in config:
             for wchar in string.whitespace:
                 if wchar in config["room_alias_name"]:
@@ -184,6 +211,9 @@ class RoomCreationHandler(BaseHandler):
 
         creation_content = config.get("creation_content", {})
 
+        # override any attempt to set room versions via the creation_content
+        creation_content["room_version"] = room_version
+
         room_member_handler = self.hs.get_room_member_handler()
 
         yield self._send_events_for_new_room(
diff --git a/synapse/handlers/room_list.py b/synapse/handlers/room_list.py
index 828229f5c3..37e41afd61 100644
--- a/synapse/handlers/room_list.py
+++ b/synapse/handlers/room_list.py
@@ -26,7 +26,7 @@ from twisted.internet import defer
 
 from synapse.api.constants import EventTypes, JoinRules
 from synapse.types import ThirdPartyInstanceID
-from synapse.util.async import concurrently_execute
+from synapse.util.async_helpers import concurrently_execute
 from synapse.util.caches.descriptors import cachedInlineCallbacks
 from synapse.util.caches.response_cache import ResponseCache
 
diff --git a/synapse/handlers/room_member.py b/synapse/handlers/room_member.py
index 0d4a3f4677..f643619047 100644
--- a/synapse/handlers/room_member.py
+++ b/synapse/handlers/room_member.py
@@ -30,7 +30,7 @@ import synapse.types
 from synapse.api.constants import EventTypes, Membership
 from synapse.api.errors import AuthError, Codes, SynapseError
 from synapse.types import RoomID, UserID
-from synapse.util.async import Linearizer
+from synapse.util.async_helpers import Linearizer
 from synapse.util.distributor import user_joined_room, user_left_room
 
 logger = logging.getLogger(__name__)
@@ -344,6 +344,7 @@ class RoomMemberHandler(object):
         latest_event_ids = (
             event_id for (event_id, _, _) in prev_events_and_hashes
         )
+
         current_state_ids = yield self.state_handler.get_current_state_ids(
             room_id, latest_event_ids=latest_event_ids,
         )
diff --git a/synapse/handlers/room_member_worker.py b/synapse/handlers/room_member_worker.py
index 22d8b4b0d3..acc6eb8099 100644
--- a/synapse/handlers/room_member_worker.py
+++ b/synapse/handlers/room_member_worker.py
@@ -20,16 +20,24 @@ from twisted.internet import defer
 from synapse.api.errors import SynapseError
 from synapse.handlers.room_member import RoomMemberHandler
 from synapse.replication.http.membership import (
-    get_or_register_3pid_guest,
-    notify_user_membership_change,
-    remote_join,
-    remote_reject_invite,
+    ReplicationRegister3PIDGuestRestServlet as Repl3PID,
+    ReplicationRemoteJoinRestServlet as ReplRemoteJoin,
+    ReplicationRemoteRejectInviteRestServlet as ReplRejectInvite,
+    ReplicationUserJoinedLeftRoomRestServlet as ReplJoinedLeft,
 )
 
 logger = logging.getLogger(__name__)
 
 
 class RoomMemberWorkerHandler(RoomMemberHandler):
+    def __init__(self, hs):
+        super(RoomMemberWorkerHandler, self).__init__(hs)
+
+        self._get_register_3pid_client = Repl3PID.make_client(hs)
+        self._remote_join_client = ReplRemoteJoin.make_client(hs)
+        self._remote_reject_client = ReplRejectInvite.make_client(hs)
+        self._notify_change_client = ReplJoinedLeft.make_client(hs)
+
     @defer.inlineCallbacks
     def _remote_join(self, requester, remote_room_hosts, room_id, user, content):
         """Implements RoomMemberHandler._remote_join
@@ -37,10 +45,7 @@ class RoomMemberWorkerHandler(RoomMemberHandler):
         if len(remote_room_hosts) == 0:
             raise SynapseError(404, "No known servers")
 
-        ret = yield remote_join(
-            self.simple_http_client,
-            host=self.config.worker_replication_host,
-            port=self.config.worker_replication_http_port,
+        ret = yield self._remote_join_client(
             requester=requester,
             remote_room_hosts=remote_room_hosts,
             room_id=room_id,
@@ -55,10 +60,7 @@ class RoomMemberWorkerHandler(RoomMemberHandler):
     def _remote_reject_invite(self, requester, remote_room_hosts, room_id, target):
         """Implements RoomMemberHandler._remote_reject_invite
         """
-        return remote_reject_invite(
-            self.simple_http_client,
-            host=self.config.worker_replication_host,
-            port=self.config.worker_replication_http_port,
+        return self._remote_reject_client(
             requester=requester,
             remote_room_hosts=remote_room_hosts,
             room_id=room_id,
@@ -68,10 +70,7 @@ class RoomMemberWorkerHandler(RoomMemberHandler):
     def _user_joined_room(self, target, room_id):
         """Implements RoomMemberHandler._user_joined_room
         """
-        return notify_user_membership_change(
-            self.simple_http_client,
-            host=self.config.worker_replication_host,
-            port=self.config.worker_replication_http_port,
+        return self._notify_change_client(
             user_id=target.to_string(),
             room_id=room_id,
             change="joined",
@@ -80,10 +79,7 @@ class RoomMemberWorkerHandler(RoomMemberHandler):
     def _user_left_room(self, target, room_id):
         """Implements RoomMemberHandler._user_left_room
         """
-        return notify_user_membership_change(
-            self.simple_http_client,
-            host=self.config.worker_replication_host,
-            port=self.config.worker_replication_http_port,
+        return self._notify_change_client(
             user_id=target.to_string(),
             room_id=room_id,
             change="left",
@@ -92,10 +88,7 @@ class RoomMemberWorkerHandler(RoomMemberHandler):
     def get_or_register_3pid_guest(self, requester, medium, address, inviter_user_id):
         """Implements RoomMemberHandler.get_or_register_3pid_guest
         """
-        return get_or_register_3pid_guest(
-            self.simple_http_client,
-            host=self.config.worker_replication_host,
-            port=self.config.worker_replication_http_port,
+        return self._get_register_3pid_client(
             requester=requester,
             medium=medium,
             address=address,
diff --git a/synapse/handlers/sync.py b/synapse/handlers/sync.py
index dff1f67dcb..648debc8aa 100644
--- a/synapse/handlers/sync.py
+++ b/synapse/handlers/sync.py
@@ -25,7 +25,7 @@ from twisted.internet import defer
 from synapse.api.constants import EventTypes, Membership
 from synapse.push.clientformat import format_push_rules_for_user
 from synapse.types import RoomStreamToken
-from synapse.util.async import concurrently_execute
+from synapse.util.async_helpers import concurrently_execute
 from synapse.util.caches.expiringcache import ExpiringCache
 from synapse.util.caches.lrucache import LruCache
 from synapse.util.caches.response_cache import ResponseCache
@@ -75,6 +75,7 @@ class JoinedSyncResult(collections.namedtuple("JoinedSyncResult", [
     "ephemeral",
     "account_data",
     "unread_notifications",
+    "summary",
 ])):
     __slots__ = []
 
@@ -184,6 +185,7 @@ class SyncResult(collections.namedtuple("SyncResult", [
 class SyncHandler(object):
 
     def __init__(self, hs):
+        self.hs_config = hs.config
         self.store = hs.get_datastore()
         self.notifier = hs.get_notifier()
         self.presence_handler = hs.get_presence_handler()
@@ -191,6 +193,7 @@ class SyncHandler(object):
         self.clock = hs.get_clock()
         self.response_cache = ResponseCache(hs, "sync")
         self.state = hs.get_state_handler()
+        self.auth = hs.get_auth()
 
         # ExpiringCache((User, Device)) -> LruCache(state_key => event_id)
         self.lazy_loaded_members_cache = ExpiringCache(
@@ -198,19 +201,27 @@ class SyncHandler(object):
             max_len=0, expiry_ms=LAZY_LOADED_MEMBERS_CACHE_MAX_AGE,
         )
 
+    @defer.inlineCallbacks
     def wait_for_sync_for_user(self, sync_config, since_token=None, timeout=0,
                                full_state=False):
         """Get the sync for a client if we have new data for it now. Otherwise
         wait for new data to arrive on the server. If the timeout expires, then
         return an empty sync result.
         Returns:
-            A Deferred SyncResult.
+            Deferred[SyncResult]
         """
-        return self.response_cache.wrap(
+        # If the user is not part of the mau group, then check that limits have
+        # not been exceeded (if not part of the group by this point, almost certain
+        # auth_blocking will occur)
+        user_id = sync_config.user.to_string()
+        yield self.auth.check_auth_blocking(user_id)
+
+        res = yield self.response_cache.wrap(
             sync_config.request_key,
             self._wait_for_sync_for_user,
             sync_config, since_token, timeout, full_state,
         )
+        defer.returnValue(res)
 
     @defer.inlineCallbacks
     def _wait_for_sync_for_user(self, sync_config, since_token, timeout,
@@ -495,9 +506,141 @@ class SyncHandler(object):
         defer.returnValue(state)
 
     @defer.inlineCallbacks
+    def compute_summary(self, room_id, sync_config, batch, state, now_token):
+        """ Works out a room summary block for this room, summarising the number
+        of joined members in the room, and providing the 'hero' members if the
+        room has no name so clients can consistently name rooms.  Also adds
+        state events to 'state' if needed to describe the heroes.
+
+        Args:
+            room_id(str):
+            sync_config(synapse.handlers.sync.SyncConfig):
+            batch(synapse.handlers.sync.TimelineBatch): The timeline batch for
+                the room that will be sent to the user.
+            state(dict): dict of (type, state_key) -> Event as returned by
+                compute_state_delta
+            now_token(str): Token of the end of the current batch.
+
+        Returns:
+             A deferred dict describing the room summary
+        """
+
+        # FIXME: this promulgates https://github.com/matrix-org/synapse/issues/3305
+        last_events, _ = yield self.store.get_recent_event_ids_for_room(
+            room_id, end_token=now_token.room_key, limit=1,
+        )
+
+        if not last_events:
+            defer.returnValue(None)
+            return
+
+        last_event = last_events[-1]
+        state_ids = yield self.store.get_state_ids_for_event(
+            last_event.event_id, [
+                (EventTypes.Member, None),
+                (EventTypes.Name, ''),
+                (EventTypes.CanonicalAlias, ''),
+            ]
+        )
+
+        member_ids = {
+            state_key: event_id
+            for (t, state_key), event_id in state_ids.iteritems()
+            if t == EventTypes.Member
+        }
+        name_id = state_ids.get((EventTypes.Name, ''))
+        canonical_alias_id = state_ids.get((EventTypes.CanonicalAlias, ''))
+
+        summary = {}
+
+        # FIXME: it feels very heavy to load up every single membership event
+        # just to calculate the counts.
+        member_events = yield self.store.get_events(member_ids.values())
+
+        joined_user_ids = []
+        invited_user_ids = []
+
+        for ev in member_events.values():
+            if ev.content.get("membership") == Membership.JOIN:
+                joined_user_ids.append(ev.state_key)
+            elif ev.content.get("membership") == Membership.INVITE:
+                invited_user_ids.append(ev.state_key)
+
+        # TODO: only send these when they change.
+        summary["m.joined_member_count"] = len(joined_user_ids)
+        summary["m.invited_member_count"] = len(invited_user_ids)
+
+        if name_id or canonical_alias_id:
+            defer.returnValue(summary)
+
+        # FIXME: order by stream ordering, not alphabetic
+
+        me = sync_config.user.to_string()
+        if (joined_user_ids or invited_user_ids):
+            summary['m.heroes'] = sorted(
+                [
+                    user_id
+                    for user_id in (joined_user_ids + invited_user_ids)
+                    if user_id != me
+                ]
+            )[0:5]
+        else:
+            summary['m.heroes'] = sorted(
+                [user_id for user_id in member_ids.keys() if user_id != me]
+            )[0:5]
+
+        if not sync_config.filter_collection.lazy_load_members():
+            defer.returnValue(summary)
+
+        # ensure we send membership events for heroes if needed
+        cache_key = (sync_config.user.to_string(), sync_config.device_id)
+        cache = self.get_lazy_loaded_members_cache(cache_key)
+
+        # track which members the client should already know about via LL:
+        # Ones which are already in state...
+        existing_members = set(
+            user_id for (typ, user_id) in state.keys()
+            if typ == EventTypes.Member
+        )
+
+        # ...or ones which are in the timeline...
+        for ev in batch.events:
+            if ev.type == EventTypes.Member:
+                existing_members.add(ev.state_key)
+
+        # ...and then ensure any missing ones get included in state.
+        missing_hero_event_ids = [
+            member_ids[hero_id]
+            for hero_id in summary['m.heroes']
+            if (
+                cache.get(hero_id) != member_ids[hero_id] and
+                hero_id not in existing_members
+            )
+        ]
+
+        missing_hero_state = yield self.store.get_events(missing_hero_event_ids)
+        missing_hero_state = missing_hero_state.values()
+
+        for s in missing_hero_state:
+            cache.set(s.state_key, s.event_id)
+            state[(EventTypes.Member, s.state_key)] = s
+
+        defer.returnValue(summary)
+
+    def get_lazy_loaded_members_cache(self, cache_key):
+        cache = self.lazy_loaded_members_cache.get(cache_key)
+        if cache is None:
+            logger.debug("creating LruCache for %r", cache_key)
+            cache = LruCache(LAZY_LOADED_MEMBERS_CACHE_MAX_SIZE)
+            self.lazy_loaded_members_cache[cache_key] = cache
+        else:
+            logger.debug("found LruCache for %r", cache_key)
+        return cache
+
+    @defer.inlineCallbacks
     def compute_state_delta(self, room_id, batch, sync_config, since_token, now_token,
                             full_state):
-        """ Works out the differnce in state between the start of the timeline
+        """ Works out the difference in state between the start of the timeline
         and the previous sync.
 
         Args:
@@ -511,7 +654,7 @@ class SyncHandler(object):
             full_state(bool): Whether to force returning the full state.
 
         Returns:
-             A deferred new event dictionary
+             A deferred dict of (type, state_key) -> Event
         """
         # TODO(mjark) Check if the state events were received by the server
         # after the previous sync, since we need to include those state
@@ -609,13 +752,7 @@ class SyncHandler(object):
 
             if lazy_load_members and not include_redundant_members:
                 cache_key = (sync_config.user.to_string(), sync_config.device_id)
-                cache = self.lazy_loaded_members_cache.get(cache_key)
-                if cache is None:
-                    logger.debug("creating LruCache for %r", cache_key)
-                    cache = LruCache(LAZY_LOADED_MEMBERS_CACHE_MAX_SIZE)
-                    self.lazy_loaded_members_cache[cache_key] = cache
-                else:
-                    logger.debug("found LruCache for %r", cache_key)
+                cache = self.get_lazy_loaded_members_cache(cache_key)
 
                 # if it's a new sync sequence, then assume the client has had
                 # amnesia and doesn't want any recent lazy-loaded members
@@ -724,7 +861,7 @@ class SyncHandler(object):
             since_token is None and
             sync_config.filter_collection.blocks_all_presence()
         )
-        if not block_all_presence_data:
+        if self.hs_config.use_presence and not block_all_presence_data:
             yield self._generate_sync_entry_for_presence(
                 sync_result_builder, newly_joined_rooms, newly_joined_users
             )
@@ -1416,7 +1553,6 @@ class SyncHandler(object):
             if events == [] and tags is None:
                 return
 
-        since_token = sync_result_builder.since_token
         now_token = sync_result_builder.now_token
         sync_config = sync_result_builder.sync_config
 
@@ -1459,6 +1595,18 @@ class SyncHandler(object):
             full_state=full_state
         )
 
+        summary = {}
+        if (
+            sync_config.filter_collection.lazy_load_members() and
+            (
+                any(ev.type == EventTypes.Member for ev in batch.events) or
+                since_token is None
+            )
+        ):
+            summary = yield self.compute_summary(
+                room_id, sync_config, batch, state, now_token
+            )
+
         if room_builder.rtype == "joined":
             unread_notifications = {}
             room_sync = JoinedSyncResult(
@@ -1468,6 +1616,7 @@ class SyncHandler(object):
                 ephemeral=ephemeral,
                 account_data=account_data_events,
                 unread_notifications=unread_notifications,
+                summary=summary,
             )
 
             if room_sync or always_include:
diff --git a/synapse/handlers/user_directory.py b/synapse/handlers/user_directory.py
index 37dda64587..d8413d6aa7 100644
--- a/synapse/handlers/user_directory.py
+++ b/synapse/handlers/user_directory.py
@@ -119,6 +119,8 @@ class UserDirectoryHandler(object):
         """Called to update index of our local user profiles when they change
         irrespective of any rooms the user may be in.
         """
+        # FIXME(#3714): We should probably do this in the same worker as all
+        # the other changes.
         yield self.store.update_profile_in_user_dir(
             user_id, profile.display_name, profile.avatar_url, None,
         )
@@ -127,6 +129,8 @@ class UserDirectoryHandler(object):
     def handle_user_deactivated(self, user_id):
         """Called when a user ID is deactivated
         """
+        # FIXME(#3714): We should probably do this in the same worker as all
+        # the other changes.
         yield self.store.remove_from_user_dir(user_id)
         yield self.store.remove_from_user_in_public_room(user_id)
 
diff --git a/synapse/http/client.py b/synapse/http/client.py
index 3771e0b3f6..ab4fbf59b2 100644
--- a/synapse/http/client.py
+++ b/synapse/http/client.py
@@ -42,7 +42,7 @@ from twisted.web.http_headers import Headers
 from synapse.api.errors import Codes, HttpResponseException, SynapseError
 from synapse.http import cancelled_to_request_timed_out_error, redact_uri
 from synapse.http.endpoint import SpiderEndpoint
-from synapse.util.async import add_timeout_to_deferred
+from synapse.util.async_helpers import add_timeout_to_deferred
 from synapse.util.caches import CACHE_SIZE_FACTOR
 from synapse.util.logcontext import make_deferred_yieldable
 
diff --git a/synapse/http/endpoint.py b/synapse/http/endpoint.py
index d65daa72bb..b0c9369519 100644
--- a/synapse/http/endpoint.py
+++ b/synapse/http/endpoint.py
@@ -26,7 +26,6 @@ from twisted.names.error import DNSNameError, DomainError
 
 logger = logging.getLogger(__name__)
 
-
 SERVER_CACHE = {}
 
 # our record of an individual server which can be tried to reach a destination.
@@ -103,15 +102,16 @@ def parse_and_validate_server_name(server_name):
     return host, port
 
 
-def matrix_federation_endpoint(reactor, destination, ssl_context_factory=None,
+def matrix_federation_endpoint(reactor, destination, tls_client_options_factory=None,
                                timeout=None):
     """Construct an endpoint for the given matrix destination.
 
     Args:
         reactor: Twisted reactor.
         destination (bytes): The name of the server to connect to.
-        ssl_context_factory (twisted.internet.ssl.ContextFactory): Factory
-            which generates SSL contexts to use for TLS.
+        tls_client_options_factory
+            (synapse.crypto.context_factory.ClientTLSOptionsFactory):
+            Factory which generates TLS options for client connections.
         timeout (int): connection timeout in seconds
     """
 
@@ -122,13 +122,13 @@ def matrix_federation_endpoint(reactor, destination, ssl_context_factory=None,
     if timeout is not None:
         endpoint_kw_args.update(timeout=timeout)
 
-    if ssl_context_factory is None:
+    if tls_client_options_factory is None:
         transport_endpoint = HostnameEndpoint
         default_port = 8008
     else:
         def transport_endpoint(reactor, host, port, timeout):
             return wrapClientTLS(
-                ssl_context_factory,
+                tls_client_options_factory.get_options(host),
                 HostnameEndpoint(reactor, host, port, timeout=timeout))
         default_port = 8448
 
diff --git a/synapse/http/matrixfederationclient.py b/synapse/http/matrixfederationclient.py
index bf1aa29502..b34bb8e31a 100644
--- a/synapse/http/matrixfederationclient.py
+++ b/synapse/http/matrixfederationclient.py
@@ -43,7 +43,7 @@ from synapse.api.errors import (
 from synapse.http import cancelled_to_request_timed_out_error
 from synapse.http.endpoint import matrix_federation_endpoint
 from synapse.util import logcontext
-from synapse.util.async import add_timeout_to_deferred
+from synapse.util.async_helpers import add_timeout_to_deferred
 from synapse.util.logcontext import make_deferred_yieldable
 
 logger = logging.getLogger(__name__)
@@ -61,14 +61,14 @@ MAX_SHORT_RETRIES = 3
 
 class MatrixFederationEndpointFactory(object):
     def __init__(self, hs):
-        self.tls_server_context_factory = hs.tls_server_context_factory
+        self.tls_client_options_factory = hs.tls_client_options_factory
 
     def endpointForURI(self, uri):
         destination = uri.netloc
 
         return matrix_federation_endpoint(
             reactor, destination, timeout=10,
-            ssl_context_factory=self.tls_server_context_factory
+            tls_client_options_factory=self.tls_client_options_factory
         )
 
 
@@ -133,7 +133,7 @@ class MatrixFederationHttpClient(object):
                 failures, connection failures, SSL failures.)
         """
         if (
-            self.hs.config.federation_domain_whitelist and
+            self.hs.config.federation_domain_whitelist is not None and
             destination not in self.hs.config.federation_domain_whitelist
         ):
             raise FederationDeniedError(destination)
@@ -439,7 +439,7 @@ class MatrixFederationHttpClient(object):
         defer.returnValue(json.loads(body))
 
     @defer.inlineCallbacks
-    def get_json(self, destination, path, args={}, retry_on_dns_fail=True,
+    def get_json(self, destination, path, args=None, retry_on_dns_fail=True,
                  timeout=None, ignore_backoff=False):
         """ GETs some json from the given host homeserver and path
 
@@ -447,7 +447,7 @@ class MatrixFederationHttpClient(object):
             destination (str): The remote server to send the HTTP request
                 to.
             path (str): The HTTP path.
-            args (dict): A dictionary used to create query strings, defaults to
+            args (dict|None): A dictionary used to create query strings, defaults to
                 None.
             timeout (int): How long to try (in ms) the destination for before
                 giving up. None indicates no timeout and that the request will
@@ -702,6 +702,9 @@ def check_content_type_is_json(headers):
 
 
 def encode_query_args(args):
+    if args is None:
+        return b""
+
     encoded_args = {}
     for k, vs in args.items():
         if isinstance(vs, string_types):
diff --git a/synapse/http/request_metrics.py b/synapse/http/request_metrics.py
index 588e280571..72c2654678 100644
--- a/synapse/http/request_metrics.py
+++ b/synapse/http/request_metrics.py
@@ -15,6 +15,7 @@
 # limitations under the License.
 
 import logging
+import threading
 
 from prometheus_client.core import Counter, Histogram
 
@@ -111,6 +112,9 @@ in_flight_requests_db_sched_duration = Counter(
 # The set of all in flight requests, set[RequestMetrics]
 _in_flight_requests = set()
 
+# Protects the _in_flight_requests set from concurrent accesss
+_in_flight_requests_lock = threading.Lock()
+
 
 def _get_in_flight_counts():
     """Returns a count of all in flight requests by (method, server_name)
@@ -120,7 +124,8 @@ def _get_in_flight_counts():
     """
     # Cast to a list to prevent it changing while the Prometheus
     # thread is collecting metrics
-    reqs = list(_in_flight_requests)
+    with _in_flight_requests_lock:
+        reqs = list(_in_flight_requests)
 
     for rm in reqs:
         rm.update_metrics()
@@ -154,10 +159,12 @@ class RequestMetrics(object):
         # to the "in flight" metrics.
         self._request_stats = self.start_context.get_resource_usage()
 
-        _in_flight_requests.add(self)
+        with _in_flight_requests_lock:
+            _in_flight_requests.add(self)
 
     def stop(self, time_sec, request):
-        _in_flight_requests.discard(self)
+        with _in_flight_requests_lock:
+            _in_flight_requests.discard(self)
 
         context = LoggingContext.current_context()
 
diff --git a/synapse/http/server.py b/synapse/http/server.py
index 6dacb31037..2d5c23e673 100644
--- a/synapse/http/server.py
+++ b/synapse/http/server.py
@@ -25,8 +25,9 @@ from canonicaljson import encode_canonical_json, encode_pretty_printed_json, jso
 
 from twisted.internet import defer
 from twisted.python import failure
-from twisted.web import resource, server
+from twisted.web import resource
 from twisted.web.server import NOT_DONE_YET
+from twisted.web.static import NoRangeStaticProducer
 from twisted.web.util import redirectTo
 
 import synapse.events
@@ -37,10 +38,13 @@ from synapse.api.errors import (
     SynapseError,
     UnrecognizedRequestError,
 )
-from synapse.http.request_metrics import requests_counter
 from synapse.util.caches import intern_dict
-from synapse.util.logcontext import LoggingContext, PreserveLoggingContext
-from synapse.util.metrics import Measure
+from synapse.util.logcontext import preserve_fn
+
+if PY3:
+    from io import BytesIO
+else:
+    from cStringIO import StringIO as BytesIO
 
 logger = logging.getLogger(__name__)
 
@@ -60,11 +64,10 @@ HTML_ERROR_TEMPLATE = """<!DOCTYPE html>
 def wrap_json_request_handler(h):
     """Wraps a request handler method with exception handling.
 
-    Also adds logging as per wrap_request_handler_with_logging.
+    Also does the wrapping with request.processing as per wrap_async_request_handler.
 
     The handler method must have a signature of "handle_foo(self, request)",
-    where "self" must have a "clock" attribute (and "request" must be a
-    SynapseRequest).
+    where "request" must be a SynapseRequest.
 
     The handler must return a deferred. If the deferred succeeds we assume that
     a response has been sent. If the deferred fails with a SynapseError we use
@@ -108,24 +111,23 @@ def wrap_json_request_handler(h):
                 pretty_print=_request_user_agent_is_curl(request),
             )
 
-    return wrap_request_handler_with_logging(wrapped_request_handler)
+    return wrap_async_request_handler(wrapped_request_handler)
 
 
 def wrap_html_request_handler(h):
     """Wraps a request handler method with exception handling.
 
-    Also adds logging as per wrap_request_handler_with_logging.
+    Also does the wrapping with request.processing as per wrap_async_request_handler.
 
     The handler method must have a signature of "handle_foo(self, request)",
-    where "self" must have a "clock" attribute (and "request" must be a
-    SynapseRequest).
+    where "request" must be a SynapseRequest.
     """
     def wrapped_request_handler(self, request):
         d = defer.maybeDeferred(h, self, request)
         d.addErrback(_return_html_error, request)
         return d
 
-    return wrap_request_handler_with_logging(wrapped_request_handler)
+    return wrap_async_request_handler(wrapped_request_handler)
 
 
 def _return_html_error(f, request):
@@ -170,46 +172,26 @@ def _return_html_error(f, request):
     finish_request(request)
 
 
-def wrap_request_handler_with_logging(h):
-    """Wraps a request handler to provide logging and metrics
+def wrap_async_request_handler(h):
+    """Wraps an async request handler so that it calls request.processing.
+
+    This helps ensure that work done by the request handler after the request is completed
+    is correctly recorded against the request metrics/logs.
 
     The handler method must have a signature of "handle_foo(self, request)",
-    where "self" must have a "clock" attribute (and "request" must be a
-    SynapseRequest).
+    where "request" must be a SynapseRequest.
 
-    As well as calling `request.processing` (which will log the response and
-    duration for this request), the wrapped request handler will insert the
-    request id into the logging context.
+    The handler may return a deferred, in which case the completion of the request isn't
+    logged until the deferred completes.
     """
     @defer.inlineCallbacks
-    def wrapped_request_handler(self, request):
-        """
-        Args:
-            self:
-            request (synapse.http.site.SynapseRequest):
-        """
+    def wrapped_async_request_handler(self, request):
+        with request.processing():
+            yield h(self, request)
 
-        request_id = request.get_request_id()
-        with LoggingContext(request_id) as request_context:
-            request_context.request = request_id
-            with Measure(self.clock, "wrapped_request_handler"):
-                # we start the request metrics timer here with an initial stab
-                # at the servlet name. For most requests that name will be
-                # JsonResource (or a subclass), and JsonResource._async_render
-                # will update it once it picks a servlet.
-                servlet_name = self.__class__.__name__
-                with request.processing(servlet_name):
-                    with PreserveLoggingContext(request_context):
-                        d = defer.maybeDeferred(h, self, request)
-
-                        # record the arrival of the request *after*
-                        # dispatching to the handler, so that the handler
-                        # can update the servlet name in the request
-                        # metrics
-                        requests_counter.labels(request.method,
-                                                request.request_metrics.name).inc()
-                        yield d
-    return wrapped_request_handler
+    # we need to preserve_fn here, because the synchronous render method won't yield for
+    # us (obviously)
+    return preserve_fn(wrapped_async_request_handler)
 
 
 class HttpServer(object):
@@ -272,7 +254,7 @@ class JsonResource(HttpServer, resource.Resource):
         """ This gets called by twisted every time someone sends us a request.
         """
         self._async_render(request)
-        return server.NOT_DONE_YET
+        return NOT_DONE_YET
 
     @wrap_json_request_handler
     @defer.inlineCallbacks
@@ -413,8 +395,7 @@ def respond_with_json(request, code, json_object, send_cors=False,
         return
 
     if pretty_print:
-        json_bytes = (encode_pretty_printed_json(json_object) + "\n"
-                      ).encode("utf-8")
+        json_bytes = encode_pretty_printed_json(json_object) + b"\n"
     else:
         if canonical_json or synapse.events.USE_FROZEN_DICTS:
             # canonicaljson already encodes to bytes
@@ -450,8 +431,12 @@ def respond_with_json_bytes(request, code, json_bytes, send_cors=False,
     if send_cors:
         set_cors_headers(request)
 
-    request.write(json_bytes)
-    finish_request(request)
+    # todo: we can almost certainly avoid this copy and encode the json straight into
+    # the bytesIO, but it would involve faffing around with string->bytes wrappers.
+    bytes_io = BytesIO(json_bytes)
+
+    producer = NoRangeStaticProducer(request, bytes_io)
+    producer.start()
     return NOT_DONE_YET
 
 
diff --git a/synapse/http/servlet.py b/synapse/http/servlet.py
index 69f7085291..a1e4b88e6d 100644
--- a/synapse/http/servlet.py
+++ b/synapse/http/servlet.py
@@ -29,7 +29,7 @@ def parse_integer(request, name, default=None, required=False):
 
     Args:
         request: the twisted HTTP request.
-        name (str): the name of the query parameter.
+        name (bytes/unicode): the name of the query parameter.
         default (int|None): value to use if the parameter is absent, defaults
             to None.
         required (bool): whether to raise a 400 SynapseError if the
@@ -46,6 +46,10 @@ def parse_integer(request, name, default=None, required=False):
 
 
 def parse_integer_from_args(args, name, default=None, required=False):
+
+    if not isinstance(name, bytes):
+        name = name.encode('ascii')
+
     if name in args:
         try:
             return int(args[name][0])
@@ -65,7 +69,7 @@ def parse_boolean(request, name, default=None, required=False):
 
     Args:
         request: the twisted HTTP request.
-        name (str): the name of the query parameter.
+        name (bytes/unicode): the name of the query parameter.
         default (bool|None): value to use if the parameter is absent, defaults
             to None.
         required (bool): whether to raise a 400 SynapseError if the
@@ -83,11 +87,15 @@ def parse_boolean(request, name, default=None, required=False):
 
 
 def parse_boolean_from_args(args, name, default=None, required=False):
+
+    if not isinstance(name, bytes):
+        name = name.encode('ascii')
+
     if name in args:
         try:
             return {
-                "true": True,
-                "false": False,
+                b"true": True,
+                b"false": False,
             }[args[name][0]]
         except Exception:
             message = (
@@ -104,21 +112,29 @@ def parse_boolean_from_args(args, name, default=None, required=False):
 
 
 def parse_string(request, name, default=None, required=False,
-                 allowed_values=None, param_type="string"):
-    """Parse a string parameter from the request query string.
+                 allowed_values=None, param_type="string", encoding='ascii'):
+    """
+    Parse a string parameter from the request query string.
+
+    If encoding is not None, the content of the query param will be
+    decoded to Unicode using the encoding, otherwise it will be encoded
 
     Args:
         request: the twisted HTTP request.
-        name (str): the name of the query parameter.
-        default (str|None): value to use if the parameter is absent, defaults
-            to None.
+        name (bytes/unicode): the name of the query parameter.
+        default (bytes/unicode|None): value to use if the parameter is absent,
+            defaults to None. Must be bytes if encoding is None.
         required (bool): whether to raise a 400 SynapseError if the
             parameter is absent, defaults to False.
-        allowed_values (list[str]): List of allowed values for the string,
-            or None if any value is allowed, defaults to None
+        allowed_values (list[bytes/unicode]): List of allowed values for the
+            string, or None if any value is allowed, defaults to None. Must be
+            the same type as name, if given.
+        encoding: The encoding to decode the name to, and decode the string
+            content with.
 
     Returns:
-        str|None: A string value or the default.
+        bytes/unicode|None: A string value or the default. Unicode if encoding
+        was given, bytes otherwise.
 
     Raises:
         SynapseError if the parameter is absent and required, or if the
@@ -126,14 +142,22 @@ def parse_string(request, name, default=None, required=False,
             is not one of those allowed values.
     """
     return parse_string_from_args(
-        request.args, name, default, required, allowed_values, param_type,
+        request.args, name, default, required, allowed_values, param_type, encoding
     )
 
 
 def parse_string_from_args(args, name, default=None, required=False,
-                           allowed_values=None, param_type="string"):
+                           allowed_values=None, param_type="string", encoding='ascii'):
+
+    if not isinstance(name, bytes):
+        name = name.encode('ascii')
+
     if name in args:
         value = args[name][0]
+
+        if encoding:
+            value = value.decode(encoding)
+
         if allowed_values is not None and value not in allowed_values:
             message = "Query parameter %r must be one of [%s]" % (
                 name, ", ".join(repr(v) for v in allowed_values)
@@ -146,6 +170,10 @@ def parse_string_from_args(args, name, default=None, required=False,
             message = "Missing %s query parameter %r" % (param_type, name)
             raise SynapseError(400, message, errcode=Codes.MISSING_PARAM)
         else:
+
+            if encoding and isinstance(default, bytes):
+                return default.decode(encoding)
+
             return default
 
 
diff --git a/synapse/http/site.py b/synapse/http/site.py
index 5fd30a4c2c..88ed3714f9 100644
--- a/synapse/http/site.py
+++ b/synapse/http/site.py
@@ -11,7 +11,6 @@
 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 # See the License for the specific language governing permissions and
 # limitations under the License.
-
 import contextlib
 import logging
 import time
@@ -19,8 +18,8 @@ import time
 from twisted.web.server import Request, Site
 
 from synapse.http import redact_uri
-from synapse.http.request_metrics import RequestMetrics
-from synapse.util.logcontext import ContextResourceUsage, LoggingContext
+from synapse.http.request_metrics import RequestMetrics, requests_counter
+from synapse.util.logcontext import LoggingContext, PreserveLoggingContext
 
 logger = logging.getLogger(__name__)
 
@@ -34,25 +33,43 @@ class SynapseRequest(Request):
 
     It extends twisted's twisted.web.server.Request, and adds:
      * Unique request ID
+     * A log context associated with the request
      * Redaction of access_token query-params in __repr__
      * Logging at start and end
      * Metrics to record CPU, wallclock and DB time by endpoint.
 
-    It provides a method `processing` which should be called by the Resource
-    which is handling the request, and returns a context manager.
+    It also provides a method `processing`, which returns a context manager. If this
+    method is called, the request won't be logged until the context manager is closed;
+    this is useful for asynchronous request handlers which may go on processing the
+    request even after the client has disconnected.
 
+    Attributes:
+        logcontext(LoggingContext) : the log context for this request
     """
     def __init__(self, site, channel, *args, **kw):
         Request.__init__(self, channel, *args, **kw)
         self.site = site
-        self._channel = channel
+        self._channel = channel     # this is used by the tests
         self.authenticated_entity = None
         self.start_time = 0
 
+        # we can't yet create the logcontext, as we don't know the method.
+        self.logcontext = None
+
         global _next_request_seq
         self.request_seq = _next_request_seq
         _next_request_seq += 1
 
+        # whether an asynchronous request handler has called processing()
+        self._is_processing = False
+
+        # the time when the asynchronous request handler completed its processing
+        self._processing_finished_time = None
+
+        # what time we finished sending the response to the client (or the connection
+        # dropped)
+        self.finish_time = None
+
     def __repr__(self):
         # We overwrite this so that we don't log ``access_token``
         return '<%s at 0x%x method=%r uri=%r clientproto=%r site=%r>' % (
@@ -74,11 +91,116 @@ class SynapseRequest(Request):
         return self.requestHeaders.getRawHeaders(b"User-Agent", [None])[-1]
 
     def render(self, resrc):
+        # this is called once a Resource has been found to serve the request; in our
+        # case the Resource in question will normally be a JsonResource.
+
+        # create a LogContext for this request
+        request_id = self.get_request_id()
+        logcontext = self.logcontext = LoggingContext(request_id)
+        logcontext.request = request_id
+
         # override the Server header which is set by twisted
         self.setHeader("Server", self.site.server_version_string)
-        return Request.render(self, resrc)
+
+        with PreserveLoggingContext(self.logcontext):
+            # we start the request metrics timer here with an initial stab
+            # at the servlet name. For most requests that name will be
+            # JsonResource (or a subclass), and JsonResource._async_render
+            # will update it once it picks a servlet.
+            servlet_name = resrc.__class__.__name__
+            self._started_processing(servlet_name)
+
+            Request.render(self, resrc)
+
+            # record the arrival of the request *after*
+            # dispatching to the handler, so that the handler
+            # can update the servlet name in the request
+            # metrics
+            requests_counter.labels(self.method,
+                                    self.request_metrics.name).inc()
+
+    @contextlib.contextmanager
+    def processing(self):
+        """Record the fact that we are processing this request.
+
+        Returns a context manager; the correct way to use this is:
+
+        @defer.inlineCallbacks
+        def handle_request(request):
+            with request.processing("FooServlet"):
+                yield really_handle_the_request()
+
+        Once the context manager is closed, the completion of the request will be logged,
+        and the various metrics will be updated.
+        """
+        if self._is_processing:
+            raise RuntimeError("Request is already processing")
+        self._is_processing = True
+
+        try:
+            yield
+        except Exception:
+            # this should already have been caught, and sent back to the client as a 500.
+            logger.exception("Asynchronous messge handler raised an uncaught exception")
+        finally:
+            # the request handler has finished its work and either sent the whole response
+            # back, or handed over responsibility to a Producer.
+
+            self._processing_finished_time = time.time()
+            self._is_processing = False
+
+            # if we've already sent the response, log it now; otherwise, we wait for the
+            # response to be sent.
+            if self.finish_time is not None:
+                self._finished_processing()
+
+    def finish(self):
+        """Called when all response data has been written to this Request.
+
+        Overrides twisted.web.server.Request.finish to record the finish time and do
+        logging.
+        """
+        self.finish_time = time.time()
+        Request.finish(self)
+        if not self._is_processing:
+            with PreserveLoggingContext(self.logcontext):
+                self._finished_processing()
+
+    def connectionLost(self, reason):
+        """Called when the client connection is closed before the response is written.
+
+        Overrides twisted.web.server.Request.connectionLost to record the finish time and
+        do logging.
+        """
+        self.finish_time = time.time()
+        Request.connectionLost(self, reason)
+
+        # we only get here if the connection to the client drops before we send
+        # the response.
+        #
+        # It's useful to log it here so that we can get an idea of when
+        # the client disconnects.
+        with PreserveLoggingContext(self.logcontext):
+            logger.warn(
+                "Error processing request %r: %s %s", self, reason.type, reason.value,
+            )
+
+            if not self._is_processing:
+                self._finished_processing()
 
     def _started_processing(self, servlet_name):
+        """Record the fact that we are processing this request.
+
+        This will log the request's arrival. Once the request completes,
+        be sure to call finished_processing.
+
+        Args:
+            servlet_name (str): the name of the servlet which will be
+                processing this request. This is used in the metrics.
+
+                It is possible to update this afterwards by updating
+                self.request_metrics.name.
+        """
         self.start_time = time.time()
         self.request_metrics = RequestMetrics()
         self.request_metrics.start(
@@ -94,18 +216,32 @@ class SynapseRequest(Request):
         )
 
     def _finished_processing(self):
-        try:
-            context = LoggingContext.current_context()
-            usage = context.get_resource_usage()
-        except Exception:
-            usage = ContextResourceUsage()
+        """Log the completion of this request and update the metrics
+        """
+
+        if self.logcontext is None:
+            # this can happen if the connection closed before we read the
+            # headers (so render was never called). In that case we'll already
+            # have logged a warning, so just bail out.
+            return
+
+        usage = self.logcontext.get_resource_usage()
+
+        if self._processing_finished_time is None:
+            # we completed the request without anything calling processing()
+            self._processing_finished_time = time.time()
 
-        end_time = time.time()
+        # the time between receiving the request and the request handler finishing
+        processing_time = self._processing_finished_time - self.start_time
+
+        # the time between the request handler finishing and the response being sent
+        # to the client (nb may be negative)
+        response_send_time = self.finish_time - self._processing_finished_time
 
         # need to decode as it could be raw utf-8 bytes
         # from a IDN servname in an auth header
         authenticated_entity = self.authenticated_entity
-        if authenticated_entity is not None:
+        if authenticated_entity is not None and isinstance(authenticated_entity, bytes):
             authenticated_entity = authenticated_entity.decode("utf-8", "replace")
 
         # ...or could be raw utf-8 bytes in the User-Agent header.
@@ -116,22 +252,31 @@ class SynapseRequest(Request):
         user_agent = self.get_user_agent()
         if user_agent is not None:
             user_agent = user_agent.decode("utf-8", "replace")
+        else:
+            user_agent = "-"
+
+        code = str(self.code)
+        if not self.finished:
+            # we didn't send the full response before we gave up (presumably because
+            # the connection dropped)
+            code += "!"
 
         self.site.access_logger.info(
             "%s - %s - {%s}"
-            " Processed request: %.3fsec (%.3fsec, %.3fsec) (%.3fsec/%.3fsec/%d)"
+            " Processed request: %.3fsec/%.3fsec (%.3fsec, %.3fsec) (%.3fsec/%.3fsec/%d)"
             " %sB %s \"%s %s %s\" \"%s\" [%d dbevts]",
             self.getClientIP(),
             self.site.site_tag,
             authenticated_entity,
-            end_time - self.start_time,
+            processing_time,
+            response_send_time,
             usage.ru_utime,
             usage.ru_stime,
             usage.db_sched_duration_sec,
             usage.db_txn_duration_sec,
             int(usage.db_txn_count),
             self.sentLength,
-            self.code,
+            code,
             self.method,
             self.get_redacted_uri(),
             self.clientproto,
@@ -140,38 +285,10 @@ class SynapseRequest(Request):
         )
 
         try:
-            self.request_metrics.stop(end_time, self)
+            self.request_metrics.stop(self.finish_time, self)
         except Exception as e:
             logger.warn("Failed to stop metrics: %r", e)
 
-    @contextlib.contextmanager
-    def processing(self, servlet_name):
-        """Record the fact that we are processing this request.
-
-        Returns a context manager; the correct way to use this is:
-
-        @defer.inlineCallbacks
-        def handle_request(request):
-            with request.processing("FooServlet"):
-                yield really_handle_the_request()
-
-        This will log the request's arrival. Once the context manager is
-        closed, the completion of the request will be logged, and the various
-        metrics will be updated.
-
-        Args:
-            servlet_name (str): the name of the servlet which will be
-                processing this request. This is used in the metrics.
-
-                It is possible to update this afterwards by updating
-                self.request_metrics.servlet_name.
-        """
-        # TODO: we should probably just move this into render() and finish(),
-        # to save having to call a separate method.
-        self._started_processing(servlet_name)
-        yield
-        self._finished_processing()
-
 
 class XForwardedForRequest(SynapseRequest):
     def __init__(self, *args, **kw):
@@ -217,7 +334,7 @@ class SynapseSite(Site):
         proxied = config.get("x_forwarded", False)
         self.requestFactory = SynapseRequestFactory(self, proxied)
         self.access_logger = logging.getLogger(logger_name)
-        self.server_version_string = server_version_string
+        self.server_version_string = server_version_string.encode('ascii')
 
     def log(self, request):
         pass
diff --git a/synapse/metrics/__init__.py b/synapse/metrics/__init__.py
index a9158fc066..550f8443f7 100644
--- a/synapse/metrics/__init__.py
+++ b/synapse/metrics/__init__.py
@@ -174,6 +174,19 @@ sent_transactions_counter = Counter("synapse_federation_client_sent_transactions
 
 events_processed_counter = Counter("synapse_federation_client_events_processed", "")
 
+event_processing_loop_counter = Counter(
+    "synapse_event_processing_loop_count",
+    "Event processing loop iterations",
+    ["name"],
+)
+
+event_processing_loop_room_count = Counter(
+    "synapse_event_processing_loop_room_count",
+    "Rooms seen per event processing loop iteration",
+    ["name"],
+)
+
+
 # Used to track where various components have processed in the event stream,
 # e.g. federation sending, appservice sending, etc.
 event_processing_positions = Gauge("synapse_event_processing_positions", "", ["name"])
diff --git a/synapse/metrics/background_process_metrics.py b/synapse/metrics/background_process_metrics.py
index ce678d5f75..167167be0a 100644
--- a/synapse/metrics/background_process_metrics.py
+++ b/synapse/metrics/background_process_metrics.py
@@ -13,6 +13,8 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
+import threading
+
 import six
 
 from prometheus_client.core import REGISTRY, Counter, GaugeMetricFamily
@@ -78,6 +80,9 @@ _background_process_counts = dict()  # type: dict[str, int]
 # of process descriptions that no longer have any active processes.
 _background_processes = dict()  # type: dict[str, set[_BackgroundProcess]]
 
+# A lock that covers the above dicts
+_bg_metrics_lock = threading.Lock()
+
 
 class _Collector(object):
     """A custom metrics collector for the background process metrics.
@@ -92,7 +97,11 @@ class _Collector(object):
             labels=["name"],
         )
 
-        for desc, processes in six.iteritems(_background_processes):
+        # We copy the dict so that it doesn't change from underneath us
+        with _bg_metrics_lock:
+            _background_processes_copy = dict(_background_processes)
+
+        for desc, processes in six.iteritems(_background_processes_copy):
             background_process_in_flight_count.add_metric(
                 (desc,), len(processes),
             )
@@ -167,19 +176,26 @@ def run_as_background_process(desc, func, *args, **kwargs):
     """
     @defer.inlineCallbacks
     def run():
-        count = _background_process_counts.get(desc, 0)
-        _background_process_counts[desc] = count + 1
+        with _bg_metrics_lock:
+            count = _background_process_counts.get(desc, 0)
+            _background_process_counts[desc] = count + 1
+
         _background_process_start_count.labels(desc).inc()
 
         with LoggingContext(desc) as context:
             context.request = "%s-%i" % (desc, count)
             proc = _BackgroundProcess(desc, context)
-            _background_processes.setdefault(desc, set()).add(proc)
+
+            with _bg_metrics_lock:
+                _background_processes.setdefault(desc, set()).add(proc)
+
             try:
                 yield func(*args, **kwargs)
             finally:
                 proc.update_metrics()
-                _background_processes[desc].remove(proc)
+
+                with _bg_metrics_lock:
+                    _background_processes[desc].remove(proc)
 
     with PreserveLoggingContext():
         return run()
diff --git a/synapse/notifier.py b/synapse/notifier.py
index e650c3e494..82f391481c 100644
--- a/synapse/notifier.py
+++ b/synapse/notifier.py
@@ -25,7 +25,7 @@ from synapse.api.errors import AuthError
 from synapse.handlers.presence import format_user_presence_state
 from synapse.metrics import LaterGauge
 from synapse.types import StreamToken
-from synapse.util.async import (
+from synapse.util.async_helpers import (
     DeferredTimeoutError,
     ObservableDeferred,
     add_timeout_to_deferred,
diff --git a/synapse/push/bulk_push_rule_evaluator.py b/synapse/push/bulk_push_rule_evaluator.py
index 1d14d3639c..8f9a76147f 100644
--- a/synapse/push/bulk_push_rule_evaluator.py
+++ b/synapse/push/bulk_push_rule_evaluator.py
@@ -26,7 +26,7 @@ from twisted.internet import defer
 from synapse.api.constants import EventTypes, Membership
 from synapse.event_auth import get_user_power_level
 from synapse.state import POWER_KEY
-from synapse.util.async import Linearizer
+from synapse.util.async_helpers import Linearizer
 from synapse.util.caches import register_cache
 from synapse.util.caches.descriptors import cached
 
diff --git a/synapse/push/mailer.py b/synapse/push/mailer.py
index 9d601208fd..bfa6df7b68 100644
--- a/synapse/push/mailer.py
+++ b/synapse/push/mailer.py
@@ -35,7 +35,7 @@ from synapse.push.presentable_names import (
     name_from_member_event,
 )
 from synapse.types import UserID
-from synapse.util.async import concurrently_execute
+from synapse.util.async_helpers import concurrently_execute
 from synapse.visibility import filter_events_for_client
 
 logger = logging.getLogger(__name__)
diff --git a/synapse/push/pusherpool.py b/synapse/push/pusherpool.py
index 36bb5bbc65..9f7d5ef217 100644
--- a/synapse/push/pusherpool.py
+++ b/synapse/push/pusherpool.py
@@ -18,6 +18,7 @@ import logging
 
 from twisted.internet import defer
 
+from synapse.metrics.background_process_metrics import run_as_background_process
 from synapse.push.pusher import PusherFactory
 from synapse.util.logcontext import make_deferred_yieldable, run_in_background
 
@@ -122,8 +123,14 @@ class PusherPool:
                     p['app_id'], p['pushkey'], p['user_name'],
                 )
 
-    @defer.inlineCallbacks
     def on_new_notifications(self, min_stream_id, max_stream_id):
+        run_as_background_process(
+            "on_new_notifications",
+            self._on_new_notifications, min_stream_id, max_stream_id,
+        )
+
+    @defer.inlineCallbacks
+    def _on_new_notifications(self, min_stream_id, max_stream_id):
         try:
             users_affected = yield self.store.get_push_action_users_in_range(
                 min_stream_id, max_stream_id
@@ -147,8 +154,14 @@ class PusherPool:
         except Exception:
             logger.exception("Exception in pusher on_new_notifications")
 
-    @defer.inlineCallbacks
     def on_new_receipts(self, min_stream_id, max_stream_id, affected_room_ids):
+        run_as_background_process(
+            "on_new_receipts",
+            self._on_new_receipts, min_stream_id, max_stream_id, affected_room_ids,
+        )
+
+    @defer.inlineCallbacks
+    def _on_new_receipts(self, min_stream_id, max_stream_id, affected_room_ids):
         try:
             # Need to subtract 1 from the minimum because the lower bound here
             # is not inclusive
diff --git a/synapse/python_dependencies.py b/synapse/python_dependencies.py
index 987eec3ef2..9c55e79ef5 100644
--- a/synapse/python_dependencies.py
+++ b/synapse/python_dependencies.py
@@ -39,7 +39,7 @@ REQUIREMENTS = {
     "signedjson>=1.0.0": ["signedjson>=1.0.0"],
     "pynacl>=1.2.1": ["nacl>=1.2.1", "nacl.bindings"],
     "service_identity>=1.0.0": ["service_identity>=1.0.0"],
-    "Twisted>=16.0.0": ["twisted>=16.0.0"],
+    "Twisted>=17.1.0": ["twisted>=17.1.0"],
 
     # We use crypto.get_elliptic_curve which is only supported in >=0.15
     "pyopenssl>=0.15": ["OpenSSL>=0.15"],
diff --git a/synapse/replication/http/__init__.py b/synapse/replication/http/__init__.py
index 589ee94c66..19f214281e 100644
--- a/synapse/replication/http/__init__.py
+++ b/synapse/replication/http/__init__.py
@@ -14,7 +14,7 @@
 # limitations under the License.
 
 from synapse.http.server import JsonResource
-from synapse.replication.http import membership, send_event
+from synapse.replication.http import federation, membership, send_event
 
 REPLICATION_PREFIX = "/_synapse/replication"
 
@@ -27,3 +27,4 @@ class ReplicationRestResource(JsonResource):
     def register_servlets(self, hs):
         send_event.register_servlets(hs, self)
         membership.register_servlets(hs, self)
+        federation.register_servlets(hs, self)
diff --git a/synapse/replication/http/_base.py b/synapse/replication/http/_base.py
new file mode 100644
index 0000000000..5e5376cf58
--- /dev/null
+++ b/synapse/replication/http/_base.py
@@ -0,0 +1,215 @@
+# -*- coding: utf-8 -*-
+# Copyright 2018 New Vector Ltd
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import abc
+import logging
+import re
+
+from six.moves import urllib
+
+from twisted.internet import defer
+
+from synapse.api.errors import CodeMessageException, HttpResponseException
+from synapse.util.caches.response_cache import ResponseCache
+from synapse.util.stringutils import random_string
+
+logger = logging.getLogger(__name__)
+
+
+class ReplicationEndpoint(object):
+    """Helper base class for defining new replication HTTP endpoints.
+
+    This creates an endpoint under `/_synapse/replication/:NAME/:PATH_ARGS..`
+    (with an `/:txn_id` prefix for cached requests.), where NAME is a name,
+    PATH_ARGS are a tuple of parameters to be encoded in the URL.
+
+    For example, if `NAME` is "send_event" and `PATH_ARGS` is `("event_id",)`,
+    with `CACHE` set to true then this generates an endpoint:
+
+        /_synapse/replication/send_event/:event_id/:txn_id
+
+    For POST/PUT requests the payload is serialized to json and sent as the
+    body, while for GET requests the payload is added as query parameters. See
+    `_serialize_payload` for details.
+
+    Incoming requests are handled by overriding `_handle_request`. Servers
+    must call `register` to register the path with the HTTP server.
+
+    Requests can be sent by calling the client returned by `make_client`.
+
+    Attributes:
+        NAME (str): A name for the endpoint, added to the path as well as used
+            in logging and metrics.
+        PATH_ARGS (tuple[str]): A list of parameters to be added to the path.
+            Adding parameters to the path (rather than payload) can make it
+            easier to follow along in the log files.
+        METHOD (str): The method of the HTTP request, defaults to POST. Can be
+            one of POST, PUT or GET. If GET then the payload is sent as query
+            parameters rather than a JSON body.
+        CACHE (bool): Whether server should cache the result of the request/
+            If true then transparently adds a txn_id to all requests, and
+            `_handle_request` must return a Deferred.
+        RETRY_ON_TIMEOUT(bool): Whether or not to retry the request when a 504
+            is received.
+    """
+
+    __metaclass__ = abc.ABCMeta
+
+    NAME = abc.abstractproperty()
+    PATH_ARGS = abc.abstractproperty()
+
+    METHOD = "POST"
+    CACHE = True
+    RETRY_ON_TIMEOUT = True
+
+    def __init__(self, hs):
+        if self.CACHE:
+            self.response_cache = ResponseCache(
+                hs, "repl." + self.NAME,
+                timeout_ms=30 * 60 * 1000,
+            )
+
+        assert self.METHOD in ("PUT", "POST", "GET")
+
+    @abc.abstractmethod
+    def _serialize_payload(**kwargs):
+        """Static method that is called when creating a request.
+
+        Concrete implementations should have explicit parameters (rather than
+        kwargs) so that an appropriate exception is raised if the client is
+        called with unexpected parameters. All PATH_ARGS must appear in
+        argument list.
+
+        Returns:
+            Deferred[dict]|dict: If POST/PUT request then dictionary must be
+            JSON serialisable, otherwise must be appropriate for adding as
+            query args.
+        """
+        return {}
+
+    @abc.abstractmethod
+    def _handle_request(self, request, **kwargs):
+        """Handle incoming request.
+
+        This is called with the request object and PATH_ARGS.
+
+        Returns:
+            Deferred[dict]: A JSON serialisable dict to be used as response
+            body of request.
+        """
+        pass
+
+    @classmethod
+    def make_client(cls, hs):
+        """Create a client that makes requests.
+
+        Returns a callable that accepts the same parameters as `_serialize_payload`.
+        """
+        clock = hs.get_clock()
+        host = hs.config.worker_replication_host
+        port = hs.config.worker_replication_http_port
+
+        client = hs.get_simple_http_client()
+
+        @defer.inlineCallbacks
+        def send_request(**kwargs):
+            data = yield cls._serialize_payload(**kwargs)
+
+            url_args = [urllib.parse.quote(kwargs[name]) for name in cls.PATH_ARGS]
+
+            if cls.CACHE:
+                txn_id = random_string(10)
+                url_args.append(txn_id)
+
+            if cls.METHOD == "POST":
+                request_func = client.post_json_get_json
+            elif cls.METHOD == "PUT":
+                request_func = client.put_json
+            elif cls.METHOD == "GET":
+                request_func = client.get_json
+            else:
+                # We have already asserted in the constructor that a
+                # compatible was picked, but lets be paranoid.
+                raise Exception(
+                    "Unknown METHOD on %s replication endpoint" % (cls.NAME,)
+                )
+
+            uri = "http://%s:%s/_synapse/replication/%s/%s" % (
+                host, port, cls.NAME, "/".join(url_args)
+            )
+
+            try:
+                # We keep retrying the same request for timeouts. This is so that we
+                # have a good idea that the request has either succeeded or failed on
+                # the master, and so whether we should clean up or not.
+                while True:
+                    try:
+                        result = yield request_func(uri, data)
+                        break
+                    except CodeMessageException as e:
+                        if e.code != 504 or not cls.RETRY_ON_TIMEOUT:
+                            raise
+
+                    logger.warn("%s request timed out", cls.NAME)
+
+                    # If we timed out we probably don't need to worry about backing
+                    # off too much, but lets just wait a little anyway.
+                    yield clock.sleep(1)
+            except HttpResponseException as e:
+                # We convert to SynapseError as we know that it was a SynapseError
+                # on the master process that we should send to the client. (And
+                # importantly, not stack traces everywhere)
+                raise e.to_synapse_error()
+
+            defer.returnValue(result)
+
+        return send_request
+
+    def register(self, http_server):
+        """Called by the server to register this as a handler to the
+        appropriate path.
+        """
+
+        url_args = list(self.PATH_ARGS)
+        handler = self._handle_request
+        method = self.METHOD
+
+        if self.CACHE:
+            handler = self._cached_handler
+            url_args.append("txn_id")
+
+        args = "/".join("(?P<%s>[^/]+)" % (arg,) for arg in url_args)
+        pattern = re.compile("^/_synapse/replication/%s/%s$" % (
+            self.NAME,
+            args
+        ))
+
+        http_server.register_paths(method, [pattern], handler)
+
+    def _cached_handler(self, request, txn_id, **kwargs):
+        """Called on new incoming requests when caching is enabled. Checks
+        if there is a cached response for the request and returns that,
+        otherwise calls `_handle_request` and caches its response.
+        """
+        # We just use the txn_id here, but we probably also want to use the
+        # other PATH_ARGS as well.
+
+        assert self.CACHE
+
+        return self.response_cache.wrap(
+            txn_id,
+            self._handle_request,
+            request, **kwargs
+        )
diff --git a/synapse/replication/http/federation.py b/synapse/replication/http/federation.py
new file mode 100644
index 0000000000..64a79da162
--- /dev/null
+++ b/synapse/replication/http/federation.py
@@ -0,0 +1,259 @@
+# -*- coding: utf-8 -*-
+# Copyright 2018 New Vector Ltd
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import logging
+
+from twisted.internet import defer
+
+from synapse.events import FrozenEvent
+from synapse.events.snapshot import EventContext
+from synapse.http.servlet import parse_json_object_from_request
+from synapse.replication.http._base import ReplicationEndpoint
+from synapse.util.metrics import Measure
+
+logger = logging.getLogger(__name__)
+
+
+class ReplicationFederationSendEventsRestServlet(ReplicationEndpoint):
+    """Handles events newly received from federation, including persisting and
+    notifying.
+
+    The API looks like:
+
+        POST /_synapse/replication/fed_send_events/:txn_id
+
+        {
+            "events": [{
+                "event": { .. serialized event .. },
+                "internal_metadata": { .. serialized internal_metadata .. },
+                "rejected_reason": ..,   // The event.rejected_reason field
+                "context": { .. serialized event context .. },
+            }],
+            "backfilled": false
+    """
+
+    NAME = "fed_send_events"
+    PATH_ARGS = ()
+
+    def __init__(self, hs):
+        super(ReplicationFederationSendEventsRestServlet, self).__init__(hs)
+
+        self.store = hs.get_datastore()
+        self.clock = hs.get_clock()
+        self.federation_handler = hs.get_handlers().federation_handler
+
+    @staticmethod
+    @defer.inlineCallbacks
+    def _serialize_payload(store, event_and_contexts, backfilled):
+        """
+        Args:
+            store
+            event_and_contexts (list[tuple[FrozenEvent, EventContext]])
+            backfilled (bool): Whether or not the events are the result of
+                backfilling
+        """
+        event_payloads = []
+        for event, context in event_and_contexts:
+            serialized_context = yield context.serialize(event, store)
+
+            event_payloads.append({
+                "event": event.get_pdu_json(),
+                "internal_metadata": event.internal_metadata.get_dict(),
+                "rejected_reason": event.rejected_reason,
+                "context": serialized_context,
+            })
+
+        payload = {
+            "events": event_payloads,
+            "backfilled": backfilled,
+        }
+
+        defer.returnValue(payload)
+
+    @defer.inlineCallbacks
+    def _handle_request(self, request):
+        with Measure(self.clock, "repl_fed_send_events_parse"):
+            content = parse_json_object_from_request(request)
+
+            backfilled = content["backfilled"]
+
+            event_payloads = content["events"]
+
+            event_and_contexts = []
+            for event_payload in event_payloads:
+                event_dict = event_payload["event"]
+                internal_metadata = event_payload["internal_metadata"]
+                rejected_reason = event_payload["rejected_reason"]
+                event = FrozenEvent(event_dict, internal_metadata, rejected_reason)
+
+                context = yield EventContext.deserialize(
+                    self.store, event_payload["context"],
+                )
+
+                event_and_contexts.append((event, context))
+
+        logger.info(
+            "Got %d events from federation",
+            len(event_and_contexts),
+        )
+
+        yield self.federation_handler.persist_events_and_notify(
+            event_and_contexts, backfilled,
+        )
+
+        defer.returnValue((200, {}))
+
+
+class ReplicationFederationSendEduRestServlet(ReplicationEndpoint):
+    """Handles EDUs newly received from federation, including persisting and
+    notifying.
+
+    Request format:
+
+        POST /_synapse/replication/fed_send_edu/:edu_type/:txn_id
+
+        {
+            "origin": ...,
+            "content: { ... }
+        }
+    """
+
+    NAME = "fed_send_edu"
+    PATH_ARGS = ("edu_type",)
+
+    def __init__(self, hs):
+        super(ReplicationFederationSendEduRestServlet, self).__init__(hs)
+
+        self.store = hs.get_datastore()
+        self.clock = hs.get_clock()
+        self.registry = hs.get_federation_registry()
+
+    @staticmethod
+    def _serialize_payload(edu_type, origin, content):
+        return {
+            "origin": origin,
+            "content": content,
+        }
+
+    @defer.inlineCallbacks
+    def _handle_request(self, request, edu_type):
+        with Measure(self.clock, "repl_fed_send_edu_parse"):
+            content = parse_json_object_from_request(request)
+
+            origin = content["origin"]
+            edu_content = content["content"]
+
+        logger.info(
+            "Got %r edu from %s",
+            edu_type, origin,
+        )
+
+        result = yield self.registry.on_edu(edu_type, origin, edu_content)
+
+        defer.returnValue((200, result))
+
+
+class ReplicationGetQueryRestServlet(ReplicationEndpoint):
+    """Handle responding to queries from federation.
+
+    Request format:
+
+        POST /_synapse/replication/fed_query/:query_type
+
+        {
+            "args": { ... }
+        }
+    """
+
+    NAME = "fed_query"
+    PATH_ARGS = ("query_type",)
+
+    # This is a query, so let's not bother caching
+    CACHE = False
+
+    def __init__(self, hs):
+        super(ReplicationGetQueryRestServlet, self).__init__(hs)
+
+        self.store = hs.get_datastore()
+        self.clock = hs.get_clock()
+        self.registry = hs.get_federation_registry()
+
+    @staticmethod
+    def _serialize_payload(query_type, args):
+        """
+        Args:
+            query_type (str)
+            args (dict): The arguments received for the given query type
+        """
+        return {
+            "args": args,
+        }
+
+    @defer.inlineCallbacks
+    def _handle_request(self, request, query_type):
+        with Measure(self.clock, "repl_fed_query_parse"):
+            content = parse_json_object_from_request(request)
+
+            args = content["args"]
+
+        logger.info(
+            "Got %r query",
+            query_type,
+        )
+
+        result = yield self.registry.on_query(query_type, args)
+
+        defer.returnValue((200, result))
+
+
+class ReplicationCleanRoomRestServlet(ReplicationEndpoint):
+    """Called to clean up any data in DB for a given room, ready for the
+    server to join the room.
+
+    Request format:
+
+        POST /_synapse/replication/fed_query/:fed_cleanup_room/:txn_id
+
+        {}
+    """
+
+    NAME = "fed_cleanup_room"
+    PATH_ARGS = ("room_id",)
+
+    def __init__(self, hs):
+        super(ReplicationCleanRoomRestServlet, self).__init__(hs)
+
+        self.store = hs.get_datastore()
+
+    @staticmethod
+    def _serialize_payload(room_id, args):
+        """
+        Args:
+            room_id (str)
+        """
+        return {}
+
+    @defer.inlineCallbacks
+    def _handle_request(self, request, room_id):
+        yield self.store.clean_room_for_join(room_id)
+
+        defer.returnValue((200, {}))
+
+
+def register_servlets(hs, http_server):
+    ReplicationFederationSendEventsRestServlet(hs).register(http_server)
+    ReplicationFederationSendEduRestServlet(hs).register(http_server)
+    ReplicationGetQueryRestServlet(hs).register(http_server)
+    ReplicationCleanRoomRestServlet(hs).register(http_server)
diff --git a/synapse/replication/http/membership.py b/synapse/replication/http/membership.py
index 7a3cfb159c..e58bebf12a 100644
--- a/synapse/replication/http/membership.py
+++ b/synapse/replication/http/membership.py
@@ -14,182 +14,63 @@
 # limitations under the License.
 
 import logging
-import re
 
 from twisted.internet import defer
 
-from synapse.api.errors import HttpResponseException
-from synapse.http.servlet import RestServlet, parse_json_object_from_request
+from synapse.http.servlet import parse_json_object_from_request
+from synapse.replication.http._base import ReplicationEndpoint
 from synapse.types import Requester, UserID
 from synapse.util.distributor import user_joined_room, user_left_room
 
 logger = logging.getLogger(__name__)
 
 
-@defer.inlineCallbacks
-def remote_join(client, host, port, requester, remote_room_hosts,
-                room_id, user_id, content):
-    """Ask the master to do a remote join for the given user to the given room
+class ReplicationRemoteJoinRestServlet(ReplicationEndpoint):
+    """Does a remote join for the given user to the given room
 
-    Args:
-        client (SimpleHttpClient)
-        host (str): host of master
-        port (int): port on master listening for HTTP replication
-        requester (Requester)
-        remote_room_hosts (list[str]): Servers to try and join via
-        room_id (str)
-        user_id (str)
-        content (dict): The event content to use for the join event
+    Request format:
 
-    Returns:
-        Deferred
-    """
-    uri = "http://%s:%s/_synapse/replication/remote_join" % (host, port)
-
-    payload = {
-        "requester": requester.serialize(),
-        "remote_room_hosts": remote_room_hosts,
-        "room_id": room_id,
-        "user_id": user_id,
-        "content": content,
-    }
-
-    try:
-        result = yield client.post_json_get_json(uri, payload)
-    except HttpResponseException as e:
-        # We convert to SynapseError as we know that it was a SynapseError
-        # on the master process that we should send to the client. (And
-        # importantly, not stack traces everywhere)
-        raise e.to_synapse_error()
-    defer.returnValue(result)
-
-
-@defer.inlineCallbacks
-def remote_reject_invite(client, host, port, requester, remote_room_hosts,
-                         room_id, user_id):
-    """Ask master to reject the invite for the user and room.
-
-    Args:
-        client (SimpleHttpClient)
-        host (str): host of master
-        port (int): port on master listening for HTTP replication
-        requester (Requester)
-        remote_room_hosts (list[str]): Servers to try and reject via
-        room_id (str)
-        user_id (str)
-
-    Returns:
-        Deferred
-    """
-    uri = "http://%s:%s/_synapse/replication/remote_reject_invite" % (host, port)
-
-    payload = {
-        "requester": requester.serialize(),
-        "remote_room_hosts": remote_room_hosts,
-        "room_id": room_id,
-        "user_id": user_id,
-    }
-
-    try:
-        result = yield client.post_json_get_json(uri, payload)
-    except HttpResponseException as e:
-        # We convert to SynapseError as we know that it was a SynapseError
-        # on the master process that we should send to the client. (And
-        # importantly, not stack traces everywhere)
-        raise e.to_synapse_error()
-    defer.returnValue(result)
-
-
-@defer.inlineCallbacks
-def get_or_register_3pid_guest(client, host, port, requester,
-                               medium, address, inviter_user_id):
-    """Ask the master to get/create a guest account for given 3PID.
-
-    Args:
-        client (SimpleHttpClient)
-        host (str): host of master
-        port (int): port on master listening for HTTP replication
-        requester (Requester)
-        medium (str)
-        address (str)
-        inviter_user_id (str): The user ID who is trying to invite the
-            3PID
-
-    Returns:
-        Deferred[(str, str)]: A 2-tuple of `(user_id, access_token)` of the
-        3PID guest account.
-    """
+        POST /_synapse/replication/remote_join/:room_id/:user_id
 
-    uri = "http://%s:%s/_synapse/replication/get_or_register_3pid_guest" % (host, port)
-
-    payload = {
-        "requester": requester.serialize(),
-        "medium": medium,
-        "address": address,
-        "inviter_user_id": inviter_user_id,
-    }
-
-    try:
-        result = yield client.post_json_get_json(uri, payload)
-    except HttpResponseException as e:
-        # We convert to SynapseError as we know that it was a SynapseError
-        # on the master process that we should send to the client. (And
-        # importantly, not stack traces everywhere)
-        raise e.to_synapse_error()
-    defer.returnValue(result)
-
-
-@defer.inlineCallbacks
-def notify_user_membership_change(client, host, port, user_id, room_id, change):
-    """Notify master that a user has joined or left the room
-
-    Args:
-        client (SimpleHttpClient)
-        host (str): host of master
-        port (int): port on master listening for HTTP replication.
-        user_id (str)
-        room_id (str)
-        change (str): Either "join" or "left"
-
-    Returns:
-        Deferred
+        {
+            "requester": ...,
+            "remote_room_hosts": [...],
+            "content": { ... }
+        }
     """
-    assert change in ("joined", "left")
-
-    uri = "http://%s:%s/_synapse/replication/user_%s_room" % (host, port, change)
-
-    payload = {
-        "user_id": user_id,
-        "room_id": room_id,
-    }
-
-    try:
-        result = yield client.post_json_get_json(uri, payload)
-    except HttpResponseException as e:
-        # We convert to SynapseError as we know that it was a SynapseError
-        # on the master process that we should send to the client. (And
-        # importantly, not stack traces everywhere)
-        raise e.to_synapse_error()
-    defer.returnValue(result)
-
 
-class ReplicationRemoteJoinRestServlet(RestServlet):
-    PATTERNS = [re.compile("^/_synapse/replication/remote_join$")]
+    NAME = "remote_join"
+    PATH_ARGS = ("room_id", "user_id",)
 
     def __init__(self, hs):
-        super(ReplicationRemoteJoinRestServlet, self).__init__()
+        super(ReplicationRemoteJoinRestServlet, self).__init__(hs)
 
         self.federation_handler = hs.get_handlers().federation_handler
         self.store = hs.get_datastore()
         self.clock = hs.get_clock()
 
+    @staticmethod
+    def _serialize_payload(requester, room_id, user_id, remote_room_hosts,
+                           content):
+        """
+        Args:
+            requester(Requester)
+            room_id (str)
+            user_id (str)
+            remote_room_hosts (list[str]): Servers to try and join via
+            content(dict): The event content to use for the join event
+        """
+        return {
+            "requester": requester.serialize(),
+            "remote_room_hosts": remote_room_hosts,
+            "content": content,
+        }
+
     @defer.inlineCallbacks
-    def on_POST(self, request):
+    def _handle_request(self, request, room_id, user_id):
         content = parse_json_object_from_request(request)
 
         remote_room_hosts = content["remote_room_hosts"]
-        room_id = content["room_id"]
-        user_id = content["user_id"]
         event_content = content["content"]
 
         requester = Requester.deserialize(self.store, content["requester"])
@@ -212,23 +93,48 @@ class ReplicationRemoteJoinRestServlet(RestServlet):
         defer.returnValue((200, {}))
 
 
-class ReplicationRemoteRejectInviteRestServlet(RestServlet):
-    PATTERNS = [re.compile("^/_synapse/replication/remote_reject_invite$")]
+class ReplicationRemoteRejectInviteRestServlet(ReplicationEndpoint):
+    """Rejects the invite for the user and room.
+
+    Request format:
+
+        POST /_synapse/replication/remote_reject_invite/:room_id/:user_id
+
+        {
+            "requester": ...,
+            "remote_room_hosts": [...],
+        }
+    """
+
+    NAME = "remote_reject_invite"
+    PATH_ARGS = ("room_id", "user_id",)
 
     def __init__(self, hs):
-        super(ReplicationRemoteRejectInviteRestServlet, self).__init__()
+        super(ReplicationRemoteRejectInviteRestServlet, self).__init__(hs)
 
         self.federation_handler = hs.get_handlers().federation_handler
         self.store = hs.get_datastore()
         self.clock = hs.get_clock()
 
+    @staticmethod
+    def _serialize_payload(requester, room_id, user_id, remote_room_hosts):
+        """
+        Args:
+            requester(Requester)
+            room_id (str)
+            user_id (str)
+            remote_room_hosts (list[str]): Servers to try and reject via
+        """
+        return {
+            "requester": requester.serialize(),
+            "remote_room_hosts": remote_room_hosts,
+        }
+
     @defer.inlineCallbacks
-    def on_POST(self, request):
+    def _handle_request(self, request, room_id, user_id):
         content = parse_json_object_from_request(request)
 
         remote_room_hosts = content["remote_room_hosts"]
-        room_id = content["room_id"]
-        user_id = content["user_id"]
 
         requester = Requester.deserialize(self.store, content["requester"])
 
@@ -264,18 +170,50 @@ class ReplicationRemoteRejectInviteRestServlet(RestServlet):
         defer.returnValue((200, ret))
 
 
-class ReplicationRegister3PIDGuestRestServlet(RestServlet):
-    PATTERNS = [re.compile("^/_synapse/replication/get_or_register_3pid_guest$")]
+class ReplicationRegister3PIDGuestRestServlet(ReplicationEndpoint):
+    """Gets/creates a guest account for given 3PID.
+
+    Request format:
+
+        POST /_synapse/replication/get_or_register_3pid_guest/
+
+        {
+            "requester": ...,
+            "medium": ...,
+            "address": ...,
+            "inviter_user_id": ...
+        }
+    """
+
+    NAME = "get_or_register_3pid_guest"
+    PATH_ARGS = ()
 
     def __init__(self, hs):
-        super(ReplicationRegister3PIDGuestRestServlet, self).__init__()
+        super(ReplicationRegister3PIDGuestRestServlet, self).__init__(hs)
 
         self.registeration_handler = hs.get_handlers().registration_handler
         self.store = hs.get_datastore()
         self.clock = hs.get_clock()
 
+    @staticmethod
+    def _serialize_payload(requester, medium, address, inviter_user_id):
+        """
+        Args:
+            requester(Requester)
+            medium (str)
+            address (str)
+            inviter_user_id (str): The user ID who is trying to invite the
+                3PID
+        """
+        return {
+            "requester": requester.serialize(),
+            "medium": medium,
+            "address": address,
+            "inviter_user_id": inviter_user_id,
+        }
+
     @defer.inlineCallbacks
-    def on_POST(self, request):
+    def _handle_request(self, request):
         content = parse_json_object_from_request(request)
 
         medium = content["medium"]
@@ -296,23 +234,41 @@ class ReplicationRegister3PIDGuestRestServlet(RestServlet):
         defer.returnValue((200, ret))
 
 
-class ReplicationUserJoinedLeftRoomRestServlet(RestServlet):
-    PATTERNS = [re.compile("^/_synapse/replication/user_(?P<change>joined|left)_room$")]
+class ReplicationUserJoinedLeftRoomRestServlet(ReplicationEndpoint):
+    """Notifies that a user has joined or left the room
+
+    Request format:
+
+        POST /_synapse/replication/membership_change/:room_id/:user_id/:change
+
+        {}
+    """
+
+    NAME = "membership_change"
+    PATH_ARGS = ("room_id", "user_id", "change")
+    CACHE = False  # No point caching as should return instantly.
 
     def __init__(self, hs):
-        super(ReplicationUserJoinedLeftRoomRestServlet, self).__init__()
+        super(ReplicationUserJoinedLeftRoomRestServlet, self).__init__(hs)
 
         self.registeration_handler = hs.get_handlers().registration_handler
         self.store = hs.get_datastore()
         self.clock = hs.get_clock()
         self.distributor = hs.get_distributor()
 
-    def on_POST(self, request, change):
-        content = parse_json_object_from_request(request)
+    @staticmethod
+    def _serialize_payload(room_id, user_id, change):
+        """
+        Args:
+            room_id (str)
+            user_id (str)
+            change (str): Either "joined" or "left"
+        """
+        assert change in ("joined", "left",)
 
-        user_id = content["user_id"]
-        room_id = content["room_id"]
+        return {}
 
+    def _handle_request(self, request, room_id, user_id, change):
         logger.info("user membership change: %s in %s", user_id, room_id)
 
         user = UserID.from_string(user_id)
diff --git a/synapse/replication/http/send_event.py b/synapse/replication/http/send_event.py
index d3509dc288..5b52c91650 100644
--- a/synapse/replication/http/send_event.py
+++ b/synapse/replication/http/send_event.py
@@ -14,86 +14,26 @@
 # limitations under the License.
 
 import logging
-import re
 
 from twisted.internet import defer
 
-from synapse.api.errors import CodeMessageException, HttpResponseException
 from synapse.events import FrozenEvent
 from synapse.events.snapshot import EventContext
-from synapse.http.servlet import RestServlet, parse_json_object_from_request
+from synapse.http.servlet import parse_json_object_from_request
+from synapse.replication.http._base import ReplicationEndpoint
 from synapse.types import Requester, UserID
-from synapse.util.caches.response_cache import ResponseCache
 from synapse.util.metrics import Measure
 
 logger = logging.getLogger(__name__)
 
 
-@defer.inlineCallbacks
-def send_event_to_master(clock, store, client, host, port, requester, event, context,
-                         ratelimit, extra_users):
-    """Send event to be handled on the master
-
-    Args:
-        clock (synapse.util.Clock)
-        store (DataStore)
-        client (SimpleHttpClient)
-        host (str): host of master
-        port (int): port on master listening for HTTP replication
-        requester (Requester)
-        event (FrozenEvent)
-        context (EventContext)
-        ratelimit (bool)
-        extra_users (list(UserID)): Any extra users to notify about event
-    """
-    uri = "http://%s:%s/_synapse/replication/send_event/%s" % (
-        host, port, event.event_id,
-    )
-
-    serialized_context = yield context.serialize(event, store)
-
-    payload = {
-        "event": event.get_pdu_json(),
-        "internal_metadata": event.internal_metadata.get_dict(),
-        "rejected_reason": event.rejected_reason,
-        "context": serialized_context,
-        "requester": requester.serialize(),
-        "ratelimit": ratelimit,
-        "extra_users": [u.to_string() for u in extra_users],
-    }
-
-    try:
-        # We keep retrying the same request for timeouts. This is so that we
-        # have a good idea that the request has either succeeded or failed on
-        # the master, and so whether we should clean up or not.
-        while True:
-            try:
-                result = yield client.put_json(uri, payload)
-                break
-            except CodeMessageException as e:
-                if e.code != 504:
-                    raise
-
-            logger.warn("send_event request timed out")
-
-            # If we timed out we probably don't need to worry about backing
-            # off too much, but lets just wait a little anyway.
-            yield clock.sleep(1)
-    except HttpResponseException as e:
-        # We convert to SynapseError as we know that it was a SynapseError
-        # on the master process that we should send to the client. (And
-        # importantly, not stack traces everywhere)
-        raise e.to_synapse_error()
-    defer.returnValue(result)
-
-
-class ReplicationSendEventRestServlet(RestServlet):
+class ReplicationSendEventRestServlet(ReplicationEndpoint):
     """Handles events newly created on workers, including persisting and
     notifying.
 
     The API looks like:
 
-        POST /_synapse/replication/send_event/:event_id
+        POST /_synapse/replication/send_event/:event_id/:txn_id
 
         {
             "event": { .. serialized event .. },
@@ -105,27 +45,47 @@ class ReplicationSendEventRestServlet(RestServlet):
             "extra_users": [],
         }
     """
-    PATTERNS = [re.compile("^/_synapse/replication/send_event/(?P<event_id>[^/]+)$")]
+    NAME = "send_event"
+    PATH_ARGS = ("event_id",)
 
     def __init__(self, hs):
-        super(ReplicationSendEventRestServlet, self).__init__()
+        super(ReplicationSendEventRestServlet, self).__init__(hs)
 
         self.event_creation_handler = hs.get_event_creation_handler()
         self.store = hs.get_datastore()
         self.clock = hs.get_clock()
 
-        # The responses are tiny, so we may as well cache them for a while
-        self.response_cache = ResponseCache(hs, "send_event", timeout_ms=30 * 60 * 1000)
+    @staticmethod
+    @defer.inlineCallbacks
+    def _serialize_payload(event_id, store, event, context, requester,
+                           ratelimit, extra_users):
+        """
+        Args:
+            event_id (str)
+            store (DataStore)
+            requester (Requester)
+            event (FrozenEvent)
+            context (EventContext)
+            ratelimit (bool)
+            extra_users (list(UserID)): Any extra users to notify about event
+        """
+
+        serialized_context = yield context.serialize(event, store)
+
+        payload = {
+            "event": event.get_pdu_json(),
+            "internal_metadata": event.internal_metadata.get_dict(),
+            "rejected_reason": event.rejected_reason,
+            "context": serialized_context,
+            "requester": requester.serialize(),
+            "ratelimit": ratelimit,
+            "extra_users": [u.to_string() for u in extra_users],
+        }
 
-    def on_PUT(self, request, event_id):
-        return self.response_cache.wrap(
-            event_id,
-            self._handle_request,
-            request
-        )
+        defer.returnValue(payload)
 
     @defer.inlineCallbacks
-    def _handle_request(self, request):
+    def _handle_request(self, request, event_id):
         with Measure(self.clock, "repl_send_event_parse"):
             content = parse_json_object_from_request(request)
 
diff --git a/synapse/replication/slave/storage/events.py b/synapse/replication/slave/storage/events.py
index bdb5eee4af..4830c68f35 100644
--- a/synapse/replication/slave/storage/events.py
+++ b/synapse/replication/slave/storage/events.py
@@ -44,8 +44,8 @@ class SlavedEventStore(EventFederationWorkerStore,
                        RoomMemberWorkerStore,
                        EventPushActionsWorkerStore,
                        StreamWorkerStore,
-                       EventsWorkerStore,
                        StateGroupWorkerStore,
+                       EventsWorkerStore,
                        SignatureWorkerStore,
                        UserErasureWorkerStore,
                        BaseSlavedStore):
diff --git a/synapse/replication/slave/storage/transactions.py b/synapse/replication/slave/storage/transactions.py
index 9c9a5eadd9..3527beb3c9 100644
--- a/synapse/replication/slave/storage/transactions.py
+++ b/synapse/replication/slave/storage/transactions.py
@@ -13,19 +13,10 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-from synapse.storage import DataStore
 from synapse.storage.transactions import TransactionStore
 
 from ._base import BaseSlavedStore
 
 
-class TransactionStore(BaseSlavedStore):
-    get_destination_retry_timings = TransactionStore.__dict__[
-        "get_destination_retry_timings"
-    ]
-    _get_destination_retry_timings = DataStore._get_destination_retry_timings.__func__
-    set_destination_retry_timings = DataStore.set_destination_retry_timings.__func__
-    _set_destination_retry_timings = DataStore._set_destination_retry_timings.__func__
-
-    prep_send_transaction = DataStore.prep_send_transaction.__func__
-    delivered_txn = DataStore.delivered_txn.__func__
+class SlavedTransactionStore(TransactionStore, BaseSlavedStore):
+    pass
diff --git a/synapse/replication/tcp/client.py b/synapse/replication/tcp/client.py
index 970e94313e..cbe9645817 100644
--- a/synapse/replication/tcp/client.py
+++ b/synapse/replication/tcp/client.py
@@ -107,7 +107,7 @@ class ReplicationClientHandler(object):
         Can be overriden in subclasses to handle more.
         """
         logger.info("Received rdata %s -> %s", stream_name, token)
-        self.store.process_replication_rows(stream_name, token, rows)
+        return self.store.process_replication_rows(stream_name, token, rows)
 
     def on_position(self, stream_name, token):
         """Called when we get new position data. By default this just pokes
@@ -115,7 +115,7 @@ class ReplicationClientHandler(object):
 
         Can be overriden in subclasses to handle more.
         """
-        self.store.process_replication_rows(stream_name, token, [])
+        return self.store.process_replication_rows(stream_name, token, [])
 
     def on_sync(self, data):
         """When we received a SYNC we wake up any deferreds that were waiting
diff --git a/synapse/replication/tcp/commands.py b/synapse/replication/tcp/commands.py
index f3908df642..327556f6a1 100644
--- a/synapse/replication/tcp/commands.py
+++ b/synapse/replication/tcp/commands.py
@@ -59,6 +59,12 @@ class Command(object):
         """
         return self.data
 
+    def get_logcontext_id(self):
+        """Get a suitable string for the logcontext when processing this command"""
+
+        # by default, we just use the command name.
+        return self.NAME
+
 
 class ServerCommand(Command):
     """Sent by the server on new connection and includes the server_name.
@@ -116,6 +122,9 @@ class RdataCommand(Command):
             _json_encoder.encode(self.row),
         ))
 
+    def get_logcontext_id(self):
+        return "RDATA-" + self.stream_name
+
 
 class PositionCommand(Command):
     """Sent by the client to tell the client the stream postition without
@@ -190,6 +199,9 @@ class ReplicateCommand(Command):
     def to_line(self):
         return " ".join((self.stream_name, str(self.token),))
 
+    def get_logcontext_id(self):
+        return "REPLICATE-" + self.stream_name
+
 
 class UserSyncCommand(Command):
     """Sent by the client to inform the server that a user has started or
diff --git a/synapse/replication/tcp/protocol.py b/synapse/replication/tcp/protocol.py
index dec5ac0913..74e892c104 100644
--- a/synapse/replication/tcp/protocol.py
+++ b/synapse/replication/tcp/protocol.py
@@ -63,6 +63,8 @@ from twisted.protocols.basic import LineOnlyReceiver
 from twisted.python.failure import Failure
 
 from synapse.metrics import LaterGauge
+from synapse.metrics.background_process_metrics import run_as_background_process
+from synapse.util.logcontext import make_deferred_yieldable, run_in_background
 from synapse.util.stringutils import random_string
 
 from .commands import (
@@ -222,7 +224,11 @@ class BaseReplicationStreamProtocol(LineOnlyReceiver):
 
         # Now lets try and call on_<CMD_NAME> function
         try:
-            getattr(self, "on_%s" % (cmd_name,))(cmd)
+            run_as_background_process(
+                "replication-" + cmd.get_logcontext_id(),
+                getattr(self, "on_%s" % (cmd_name,)),
+                cmd,
+            )
         except Exception:
             logger.exception("[%s] Failed to handle line: %r", self.id(), line)
 
@@ -387,7 +393,7 @@ class ServerReplicationStreamProtocol(BaseReplicationStreamProtocol):
         self.name = cmd.data
 
     def on_USER_SYNC(self, cmd):
-        self.streamer.on_user_sync(
+        return self.streamer.on_user_sync(
             self.conn_id, cmd.user_id, cmd.is_syncing, cmd.last_sync_ms,
         )
 
@@ -397,22 +403,33 @@ class ServerReplicationStreamProtocol(BaseReplicationStreamProtocol):
 
         if stream_name == "ALL":
             # Subscribe to all streams we're publishing to.
-            for stream in iterkeys(self.streamer.streams_by_name):
-                self.subscribe_to_stream(stream, token)
+            deferreds = [
+                run_in_background(
+                    self.subscribe_to_stream,
+                    stream, token,
+                )
+                for stream in iterkeys(self.streamer.streams_by_name)
+            ]
+
+            return make_deferred_yieldable(
+                defer.gatherResults(deferreds, consumeErrors=True)
+            )
         else:
-            self.subscribe_to_stream(stream_name, token)
+            return self.subscribe_to_stream(stream_name, token)
 
     def on_FEDERATION_ACK(self, cmd):
-        self.streamer.federation_ack(cmd.token)
+        return self.streamer.federation_ack(cmd.token)
 
     def on_REMOVE_PUSHER(self, cmd):
-        self.streamer.on_remove_pusher(cmd.app_id, cmd.push_key, cmd.user_id)
+        return self.streamer.on_remove_pusher(
+            cmd.app_id, cmd.push_key, cmd.user_id,
+        )
 
     def on_INVALIDATE_CACHE(self, cmd):
-        self.streamer.on_invalidate_cache(cmd.cache_func, cmd.keys)
+        return self.streamer.on_invalidate_cache(cmd.cache_func, cmd.keys)
 
     def on_USER_IP(self, cmd):
-        self.streamer.on_user_ip(
+        return self.streamer.on_user_ip(
             cmd.user_id, cmd.access_token, cmd.ip, cmd.user_agent, cmd.device_id,
             cmd.last_seen,
         )
@@ -542,14 +559,13 @@ class ClientReplicationStreamProtocol(BaseReplicationStreamProtocol):
             # Check if this is the last of a batch of updates
             rows = self.pending_batches.pop(stream_name, [])
             rows.append(row)
-
-            self.handler.on_rdata(stream_name, cmd.token, rows)
+            return self.handler.on_rdata(stream_name, cmd.token, rows)
 
     def on_POSITION(self, cmd):
-        self.handler.on_position(cmd.stream_name, cmd.token)
+        return self.handler.on_position(cmd.stream_name, cmd.token)
 
     def on_SYNC(self, cmd):
-        self.handler.on_sync(cmd.data)
+        return self.handler.on_sync(cmd.data)
 
     def replicate(self, stream_name, token):
         """Send the subscription request to the server
diff --git a/synapse/rest/client/transactions.py b/synapse/rest/client/transactions.py
index 00b1b3066e..48c17f1b6d 100644
--- a/synapse/rest/client/transactions.py
+++ b/synapse/rest/client/transactions.py
@@ -17,7 +17,7 @@
 to ensure idempotency when performing PUTs using the REST API."""
 import logging
 
-from synapse.util.async import ObservableDeferred
+from synapse.util.async_helpers import ObservableDeferred
 from synapse.util.logcontext import make_deferred_yieldable, run_in_background
 
 logger = logging.getLogger(__name__)
@@ -53,7 +53,7 @@ class HttpTransactionCache(object):
             str: A transaction key
         """
         token = self.auth.get_access_token_from_request(request)
-        return request.path + "/" + token
+        return request.path.decode('utf8') + "/" + token
 
     def fetch_or_execute_request(self, request, fn, *args, **kwargs):
         """A helper function for fetch_or_execute which extracts
diff --git a/synapse/rest/client/v1/admin.py b/synapse/rest/client/v1/admin.py
index 80d625eecc..ad536ab570 100644
--- a/synapse/rest/client/v1/admin.py
+++ b/synapse/rest/client/v1/admin.py
@@ -391,10 +391,17 @@ class DeactivateAccountRestServlet(ClientV1RestServlet):
         if not is_admin:
             raise AuthError(403, "You are not a server admin")
 
-        yield self._deactivate_account_handler.deactivate_account(
+        result = yield self._deactivate_account_handler.deactivate_account(
             target_user_id, erase,
         )
-        defer.returnValue((200, {}))
+        if result:
+            id_server_unbind_result = "success"
+        else:
+            id_server_unbind_result = "no-support"
+
+        defer.returnValue((200, {
+            "id_server_unbind_result": id_server_unbind_result,
+        }))
 
 
 class ShutdownRoomRestServlet(ClientV1RestServlet):
diff --git a/synapse/rest/client/v1/presence.py b/synapse/rest/client/v1/presence.py
index a14f0c807e..b5a6d6aebf 100644
--- a/synapse/rest/client/v1/presence.py
+++ b/synapse/rest/client/v1/presence.py
@@ -84,7 +84,8 @@ class PresenceStatusRestServlet(ClientV1RestServlet):
         except Exception:
             raise SynapseError(400, "Unable to parse state")
 
-        yield self.presence_handler.set_state(user, state)
+        if self.hs.config.use_presence:
+            yield self.presence_handler.set_state(user, state)
 
         defer.returnValue((200, {}))
 
diff --git a/synapse/rest/client/v1/room.py b/synapse/rest/client/v1/room.py
index fa5989e74e..976d98387d 100644
--- a/synapse/rest/client/v1/room.py
+++ b/synapse/rest/client/v1/room.py
@@ -34,7 +34,7 @@ from synapse.http.servlet import (
     parse_string,
 )
 from synapse.streams.config import PaginationConfig
-from synapse.types import RoomAlias, RoomID, ThirdPartyInstanceID, UserID
+from synapse.types import RoomAlias, RoomID, StreamToken, ThirdPartyInstanceID, UserID
 
 from .base import ClientV1RestServlet, client_path_patterns
 
@@ -384,15 +384,39 @@ class RoomMemberListRestServlet(ClientV1RestServlet):
     def on_GET(self, request, room_id):
         # TODO support Pagination stream API (limit/tokens)
         requester = yield self.auth.get_user_by_req(request)
-        events = yield self.message_handler.get_state_events(
+        handler = self.message_handler
+
+        # request the state as of a given event, as identified by a stream token,
+        # for consistency with /messages etc.
+        # useful for getting the membership in retrospect as of a given /sync
+        # response.
+        at_token_string = parse_string(request, "at")
+        if at_token_string is None:
+            at_token = None
+        else:
+            at_token = StreamToken.from_string(at_token_string)
+
+        # let you filter down on particular memberships.
+        # XXX: this may not be the best shape for this API - we could pass in a filter
+        # instead, except filters aren't currently aware of memberships.
+        # See https://github.com/matrix-org/matrix-doc/issues/1337 for more details.
+        membership = parse_string(request, "membership")
+        not_membership = parse_string(request, "not_membership")
+
+        events = yield handler.get_state_events(
             room_id=room_id,
             user_id=requester.user.to_string(),
+            at_token=at_token,
+            types=[(EventTypes.Member, None)],
         )
 
         chunk = []
 
         for event in events:
-            if event["type"] != EventTypes.Member:
+            if (
+                (membership and event['content'].get("membership") != membership) or
+                (not_membership and event['content'].get("membership") == not_membership)
+            ):
                 continue
             chunk.append(event)
 
@@ -401,6 +425,8 @@ class RoomMemberListRestServlet(ClientV1RestServlet):
         }))
 
 
+# deprecated in favour of /members?membership=join?
+# except it does custom AS logic and has a simpler return format
 class JoinedRoomMemberListRestServlet(ClientV1RestServlet):
     PATTERNS = client_path_patterns("/rooms/(?P<room_id>[^/]*)/joined_members$")
 
@@ -505,7 +531,7 @@ class RoomEventServlet(ClientV1RestServlet):
 
     @defer.inlineCallbacks
     def on_GET(self, request, room_id, event_id):
-        requester = yield self.auth.get_user_by_req(request)
+        requester = yield self.auth.get_user_by_req(request, allow_guest=True)
         event = yield self.event_handler.get_event(requester.user, room_id, event_id)
 
         time_now = self.clock.time_msec()
diff --git a/synapse/rest/client/v1_only/register.py b/synapse/rest/client/v1_only/register.py
index 3439c3c6d4..5e99cffbcb 100644
--- a/synapse/rest/client/v1_only/register.py
+++ b/synapse/rest/client/v1_only/register.py
@@ -129,12 +129,9 @@ class RegisterRestServlet(ClientV1RestServlet):
             login_type = register_json["type"]
 
             is_application_server = login_type == LoginType.APPLICATION_SERVICE
-            is_using_shared_secret = login_type == LoginType.SHARED_SECRET
-
             can_register = (
                 self.enable_registration
                 or is_application_server
-                or is_using_shared_secret
             )
             if not can_register:
                 raise SynapseError(403, "Registration has been disabled")
@@ -144,7 +141,6 @@ class RegisterRestServlet(ClientV1RestServlet):
                 LoginType.PASSWORD: self._do_password,
                 LoginType.EMAIL_IDENTITY: self._do_email_identity,
                 LoginType.APPLICATION_SERVICE: self._do_app_service,
-                LoginType.SHARED_SECRET: self._do_shared_secret,
             }
 
             session_info = self._get_session_info(request, session)
@@ -325,56 +321,6 @@ class RegisterRestServlet(ClientV1RestServlet):
             "home_server": self.hs.hostname,
         })
 
-    @defer.inlineCallbacks
-    def _do_shared_secret(self, request, register_json, session):
-        assert_params_in_dict(register_json, ["mac", "user", "password"])
-
-        if not self.hs.config.registration_shared_secret:
-            raise SynapseError(400, "Shared secret registration is not enabled")
-
-        user = register_json["user"].encode("utf-8")
-        password = register_json["password"].encode("utf-8")
-        admin = register_json.get("admin", None)
-
-        # Its important to check as we use null bytes as HMAC field separators
-        if b"\x00" in user:
-            raise SynapseError(400, "Invalid user")
-        if b"\x00" in password:
-            raise SynapseError(400, "Invalid password")
-
-        # str() because otherwise hmac complains that 'unicode' does not
-        # have the buffer interface
-        got_mac = str(register_json["mac"])
-
-        want_mac = hmac.new(
-            key=self.hs.config.registration_shared_secret.encode(),
-            digestmod=sha1,
-        )
-        want_mac.update(user)
-        want_mac.update(b"\x00")
-        want_mac.update(password)
-        want_mac.update(b"\x00")
-        want_mac.update(b"admin" if admin else b"notadmin")
-        want_mac = want_mac.hexdigest()
-
-        if compare_digest(want_mac, got_mac):
-            handler = self.handlers.registration_handler
-            user_id, token = yield handler.register(
-                localpart=user.lower(),
-                password=password,
-                admin=bool(admin),
-            )
-            self._remove_session(session)
-            defer.returnValue({
-                "user_id": user_id,
-                "access_token": token,
-                "home_server": self.hs.hostname,
-            })
-        else:
-            raise SynapseError(
-                403, "HMAC incorrect",
-            )
-
 
 class CreateUserRestServlet(ClientV1RestServlet):
     """Handles user creation via a server-to-server interface
diff --git a/synapse/rest/client/v2_alpha/account.py b/synapse/rest/client/v2_alpha/account.py
index eeae466d82..372648cafd 100644
--- a/synapse/rest/client/v2_alpha/account.py
+++ b/synapse/rest/client/v2_alpha/account.py
@@ -209,10 +209,17 @@ class DeactivateAccountRestServlet(RestServlet):
         yield self.auth_handler.validate_user_via_ui_auth(
             requester, body, self.hs.get_ip_from_request(request),
         )
-        yield self._deactivate_account_handler.deactivate_account(
+        result = yield self._deactivate_account_handler.deactivate_account(
             requester.user.to_string(), erase,
         )
-        defer.returnValue((200, {}))
+        if result:
+            id_server_unbind_result = "success"
+        else:
+            id_server_unbind_result = "no-support"
+
+        defer.returnValue((200, {
+            "id_server_unbind_result": id_server_unbind_result,
+        }))
 
 
 class EmailThreepidRequestTokenRestServlet(RestServlet):
@@ -364,7 +371,7 @@ class ThreepidDeleteRestServlet(RestServlet):
         user_id = requester.user.to_string()
 
         try:
-            yield self.auth_handler.delete_threepid(
+            ret = yield self.auth_handler.delete_threepid(
                 user_id, body['medium'], body['address']
             )
         except Exception:
@@ -374,7 +381,14 @@ class ThreepidDeleteRestServlet(RestServlet):
             logger.exception("Failed to remove threepid")
             raise SynapseError(500, "Failed to remove threepid")
 
-        defer.returnValue((200, {}))
+        if ret:
+            id_server_unbind_result = "success"
+        else:
+            id_server_unbind_result = "no-support"
+
+        defer.returnValue((200, {
+            "id_server_unbind_result": id_server_unbind_result,
+        }))
 
 
 class WhoamiRestServlet(RestServlet):
diff --git a/synapse/rest/client/v2_alpha/sync.py b/synapse/rest/client/v2_alpha/sync.py
index 8aa06faf23..1275baa1ba 100644
--- a/synapse/rest/client/v2_alpha/sync.py
+++ b/synapse/rest/client/v2_alpha/sync.py
@@ -370,6 +370,7 @@ class SyncRestServlet(RestServlet):
             ephemeral_events = room.ephemeral
             result["ephemeral"] = {"events": ephemeral_events}
             result["unread_notifications"] = room.unread_notifications
+            result["summary"] = room.summary
 
         return result
 
diff --git a/synapse/rest/client/versions.py b/synapse/rest/client/versions.py
index 6ac2987b98..29e62bfcdd 100644
--- a/synapse/rest/client/versions.py
+++ b/synapse/rest/client/versions.py
@@ -27,11 +27,22 @@ class VersionsRestServlet(RestServlet):
     def on_GET(self, request):
         return (200, {
             "versions": [
+                # XXX: at some point we need to decide whether we need to include
+                # the previous version numbers, given we've defined r0.3.0 to be
+                # backwards compatible with r0.2.0.  But need to check how
+                # conscientious we've been in compatibility, and decide whether the
+                # middle number is the major revision when at 0.X.Y (as opposed to
+                # X.Y.Z).  And we need to decide whether it's fair to make clients
+                # parse the version string to figure out what's going on.
                 "r0.0.1",
                 "r0.1.0",
                 "r0.2.0",
                 "r0.3.0",
-            ]
+            ],
+            # as per MSC1497:
+            "unstable_features": {
+                "m.lazy_load_members": True,
+            }
         })
 
 
diff --git a/synapse/rest/consent/consent_resource.py b/synapse/rest/consent/consent_resource.py
index 147ff7d79b..7362e1858d 100644
--- a/synapse/rest/consent/consent_resource.py
+++ b/synapse/rest/consent/consent_resource.py
@@ -140,7 +140,7 @@ class ConsentResource(Resource):
         version = parse_string(request, "v",
                                default=self._default_consent_version)
         username = parse_string(request, "u", required=True)
-        userhmac = parse_string(request, "h", required=True)
+        userhmac = parse_string(request, "h", required=True, encoding=None)
 
         self._check_hash(username, userhmac)
 
@@ -175,7 +175,7 @@ class ConsentResource(Resource):
         """
         version = parse_string(request, "v", required=True)
         username = parse_string(request, "u", required=True)
-        userhmac = parse_string(request, "h", required=True)
+        userhmac = parse_string(request, "h", required=True, encoding=None)
 
         self._check_hash(username, userhmac)
 
@@ -210,9 +210,18 @@ class ConsentResource(Resource):
         finish_request(request)
 
     def _check_hash(self, userid, userhmac):
+        """
+        Args:
+            userid (unicode):
+            userhmac (bytes):
+
+        Raises:
+              SynapseError if the hash doesn't match
+
+        """
         want_mac = hmac.new(
             key=self._hmac_secret,
-            msg=userid,
+            msg=userid.encode('utf-8'),
             digestmod=sha256,
         ).hexdigest()
 
diff --git a/synapse/rest/media/v1/config_resource.py b/synapse/rest/media/v1/config_resource.py
new file mode 100644
index 0000000000..d6605b6027
--- /dev/null
+++ b/synapse/rest/media/v1/config_resource.py
@@ -0,0 +1,48 @@
+# -*- coding: utf-8 -*-
+# Copyright 2018 Will Hunt <will@half-shot.uk>
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+from twisted.internet import defer
+from twisted.web.resource import Resource
+from twisted.web.server import NOT_DONE_YET
+
+from synapse.http.server import respond_with_json, wrap_json_request_handler
+
+
+class MediaConfigResource(Resource):
+    isLeaf = True
+
+    def __init__(self, hs):
+        Resource.__init__(self)
+        config = hs.get_config()
+        self.clock = hs.get_clock()
+        self.auth = hs.get_auth()
+        self.limits_dict = {
+            "m.upload.size": config.max_upload_size,
+        }
+
+    def render_GET(self, request):
+        self._async_render_GET(request)
+        return NOT_DONE_YET
+
+    @wrap_json_request_handler
+    @defer.inlineCallbacks
+    def _async_render_GET(self, request):
+        yield self.auth.get_user_by_req(request)
+        respond_with_json(request, 200, self.limits_dict)
+
+    def render_OPTIONS(self, request):
+        respond_with_json(request, 200, {}, send_cors=True)
+        return NOT_DONE_YET
diff --git a/synapse/rest/media/v1/media_repository.py b/synapse/rest/media/v1/media_repository.py
index 8fb413d825..241c972070 100644
--- a/synapse/rest/media/v1/media_repository.py
+++ b/synapse/rest/media/v1/media_repository.py
@@ -36,12 +36,13 @@ from synapse.api.errors import (
 )
 from synapse.http.matrixfederationclient import MatrixFederationHttpClient
 from synapse.metrics.background_process_metrics import run_as_background_process
-from synapse.util.async import Linearizer
+from synapse.util.async_helpers import Linearizer
 from synapse.util.logcontext import make_deferred_yieldable
 from synapse.util.retryutils import NotRetryingDestination
 from synapse.util.stringutils import is_ascii, random_string
 
 from ._base import FileInfo, respond_404, respond_with_responder
+from .config_resource import MediaConfigResource
 from .download_resource import DownloadResource
 from .filepath import MediaFilePaths
 from .identicon_resource import IdenticonResource
@@ -754,7 +755,6 @@ class MediaRepositoryResource(Resource):
         Resource.__init__(self)
 
         media_repo = hs.get_media_repository()
-
         self.putChild("upload", UploadResource(hs, media_repo))
         self.putChild("download", DownloadResource(hs, media_repo))
         self.putChild("thumbnail", ThumbnailResource(
@@ -765,3 +765,4 @@ class MediaRepositoryResource(Resource):
             self.putChild("preview_url", PreviewUrlResource(
                 hs, media_repo, media_repo.media_storage,
             ))
+        self.putChild("config", MediaConfigResource(hs))
diff --git a/synapse/rest/media/v1/preview_url_resource.py b/synapse/rest/media/v1/preview_url_resource.py
index 27aa0def2f..778ef97337 100644
--- a/synapse/rest/media/v1/preview_url_resource.py
+++ b/synapse/rest/media/v1/preview_url_resource.py
@@ -42,7 +42,7 @@ from synapse.http.server import (
 )
 from synapse.http.servlet import parse_integer, parse_string
 from synapse.metrics.background_process_metrics import run_as_background_process
-from synapse.util.async import ObservableDeferred
+from synapse.util.async_helpers import ObservableDeferred
 from synapse.util.caches.expiringcache import ExpiringCache
 from synapse.util.logcontext import make_deferred_yieldable, run_in_background
 from synapse.util.stringutils import is_ascii, random_string
diff --git a/synapse/rest/media/v1/upload_resource.py b/synapse/rest/media/v1/upload_resource.py
index 9b22d204a6..c1240e1963 100644
--- a/synapse/rest/media/v1/upload_resource.py
+++ b/synapse/rest/media/v1/upload_resource.py
@@ -55,7 +55,7 @@ class UploadResource(Resource):
         requester = yield self.auth.get_user_by_req(request)
         # TODO: The checks here are a bit late. The content will have
         # already been uploaded to a tmp file at this point
-        content_length = request.getHeader("Content-Length")
+        content_length = request.getHeader(b"Content-Length").decode('ascii')
         if content_length is None:
             raise SynapseError(
                 msg="Request must specify a Content-Length", code=400
@@ -66,10 +66,10 @@ class UploadResource(Resource):
                 code=413,
             )
 
-        upload_name = parse_string(request, "filename")
+        upload_name = parse_string(request, b"filename", encoding=None)
         if upload_name:
             try:
-                upload_name = upload_name.decode('UTF-8')
+                upload_name = upload_name.decode('utf8')
             except UnicodeDecodeError:
                 raise SynapseError(
                     msg="Invalid UTF-8 filename parameter: %r" % (upload_name),
@@ -78,8 +78,8 @@ class UploadResource(Resource):
 
         headers = request.requestHeaders
 
-        if headers.hasHeader("Content-Type"):
-            media_type = headers.getRawHeaders(b"Content-Type")[0]
+        if headers.hasHeader(b"Content-Type"):
+            media_type = headers.getRawHeaders(b"Content-Type")[0].decode('ascii')
         else:
             raise SynapseError(
                 msg="Upload request missing 'Content-Type'",
diff --git a/synapse/secrets.py b/synapse/secrets.py
index f05e9ea535..f6280f951c 100644
--- a/synapse/secrets.py
+++ b/synapse/secrets.py
@@ -38,4 +38,4 @@ else:
             return os.urandom(nbytes)
 
         def token_hex(self, nbytes=32):
-            return binascii.hexlify(self.token_bytes(nbytes))
+            return binascii.hexlify(self.token_bytes(nbytes)).decode('ascii')
diff --git a/synapse/server.py b/synapse/server.py
index 706cb1361f..a795643add 100644
--- a/synapse/server.py
+++ b/synapse/server.py
@@ -36,6 +36,7 @@ from synapse.federation.federation_client import FederationClient
 from synapse.federation.federation_server import (
     FederationHandlerRegistry,
     FederationServer,
+    ReplicationFederationHandlerRegistry,
 )
 from synapse.federation.send_queue import FederationRemoteSendQueue
 from synapse.federation.transaction_queue import TransactionQueue
@@ -56,7 +57,7 @@ from synapse.handlers.initial_sync import InitialSyncHandler
 from synapse.handlers.message import EventCreationHandler, MessageHandler
 from synapse.handlers.pagination import PaginationHandler
 from synapse.handlers.presence import PresenceHandler
-from synapse.handlers.profile import ProfileHandler
+from synapse.handlers.profile import BaseProfileHandler, MasterProfileHandler
 from synapse.handlers.read_marker import ReadMarkerHandler
 from synapse.handlers.receipts import ReceiptsHandler
 from synapse.handlers.room import RoomContextHandler, RoomCreationHandler
@@ -312,7 +313,10 @@ class HomeServer(object):
         return InitialSyncHandler(self)
 
     def build_profile_handler(self):
-        return ProfileHandler(self)
+        if self.config.worker_app:
+            return BaseProfileHandler(self)
+        else:
+            return MasterProfileHandler(self)
 
     def build_event_creation_handler(self):
         return EventCreationHandler(self)
@@ -428,7 +432,10 @@ class HomeServer(object):
         return RoomMemberMasterHandler(self)
 
     def build_federation_registry(self):
-        return FederationHandlerRegistry()
+        if self.config.worker_app:
+            return ReplicationFederationHandlerRegistry(self)
+        else:
+            return FederationHandlerRegistry()
 
     def build_server_notices_manager(self):
         if self.config.worker_app:
diff --git a/synapse/server_notices/resource_limits_server_notices.py b/synapse/server_notices/resource_limits_server_notices.py
new file mode 100644
index 0000000000..893b9001cd
--- /dev/null
+++ b/synapse/server_notices/resource_limits_server_notices.py
@@ -0,0 +1,204 @@
+# -*- coding: utf-8 -*-
+# Copyright 2018 New Vector Ltd
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+import logging
+
+from six import iteritems
+
+from twisted.internet import defer
+
+from synapse.api.constants import (
+    EventTypes,
+    ServerNoticeLimitReached,
+    ServerNoticeMsgType,
+)
+from synapse.api.errors import AuthError, ResourceLimitError, SynapseError
+from synapse.server_notices.server_notices_manager import SERVER_NOTICE_ROOM_TAG
+
+logger = logging.getLogger(__name__)
+
+
+class ResourceLimitsServerNotices(object):
+    """ Keeps track of whether the server has reached it's resource limit and
+    ensures that the client is kept up to date.
+    """
+    def __init__(self, hs):
+        """
+        Args:
+            hs (synapse.server.HomeServer):
+        """
+        self._server_notices_manager = hs.get_server_notices_manager()
+        self._store = hs.get_datastore()
+        self._auth = hs.get_auth()
+        self._config = hs.config
+        self._resouce_limited = False
+        self._message_handler = hs.get_message_handler()
+        self._state = hs.get_state_handler()
+
+        self._notifier = hs.get_notifier()
+
+    @defer.inlineCallbacks
+    def maybe_send_server_notice_to_user(self, user_id):
+        """Check if we need to send a notice to this user, this will be true in
+        two cases.
+        1. The server has reached its limit does not reflect this
+        2. The room state indicates that the server has reached its limit when
+        actually the server is fine
+
+        Args:
+            user_id (str): user to check
+
+        Returns:
+            Deferred
+        """
+        if self._config.hs_disabled is True:
+            return
+
+        if self._config.limit_usage_by_mau is False:
+            return
+
+        if not self._server_notices_manager.is_enabled():
+            # Don't try and send server notices unles they've been enabled
+            return
+
+        timestamp = yield self._store.user_last_seen_monthly_active(user_id)
+        if timestamp is None:
+            # This user will be blocked from receiving the notice anyway.
+            # In practice, not sure we can ever get here
+            return
+
+        # Determine current state of room
+
+        room_id = yield self._server_notices_manager.get_notice_room_for_user(user_id)
+
+        if not room_id:
+            logger.warn("Failed to get server notices room")
+            return
+
+        yield self._check_and_set_tags(user_id, room_id)
+        currently_blocked, ref_events = yield self._is_room_currently_blocked(room_id)
+
+        try:
+            # Normally should always pass in user_id if you have it, but in
+            # this case are checking what would happen to other users if they
+            # were to arrive.
+            try:
+                yield self._auth.check_auth_blocking()
+                is_auth_blocking = False
+            except ResourceLimitError as e:
+                is_auth_blocking = True
+                event_content = e.msg
+                event_limit_type = e.limit_type
+
+            if currently_blocked and not is_auth_blocking:
+                # Room is notifying of a block, when it ought not to be.
+                # Remove block notification
+                content = {
+                    "pinned": ref_events
+                }
+                yield self._server_notices_manager.send_notice(
+                    user_id, content, EventTypes.Pinned, '',
+                )
+
+            elif not currently_blocked and is_auth_blocking:
+                # Room is not notifying of a block, when it ought to be.
+                # Add block notification
+                content = {
+                    'body': event_content,
+                    'msgtype': ServerNoticeMsgType,
+                    'server_notice_type': ServerNoticeLimitReached,
+                    'admin_uri': self._config.admin_uri,
+                    'limit_type': event_limit_type
+                }
+                event = yield self._server_notices_manager.send_notice(
+                    user_id, content, EventTypes.Message,
+                )
+
+                content = {
+                    "pinned": [
+                        event.event_id,
+                    ]
+                }
+                yield self._server_notices_manager.send_notice(
+                    user_id, content, EventTypes.Pinned, '',
+                )
+
+        except SynapseError as e:
+            logger.error("Error sending resource limits server notice: %s", e)
+
+    @defer.inlineCallbacks
+    def _check_and_set_tags(self, user_id, room_id):
+        """
+        Since server notices rooms were originally not with tags,
+        important to check that tags have been set correctly
+        Args:
+            user_id(str): the user in question
+            room_id(str): the server notices room for that user
+        """
+        tags = yield self._store.get_tags_for_user(user_id)
+        server_notices_tags = tags.get(room_id)
+        need_to_set_tag = True
+        if server_notices_tags:
+            if server_notices_tags.get(SERVER_NOTICE_ROOM_TAG):
+                # tag already present, nothing to do here
+                need_to_set_tag = False
+        if need_to_set_tag:
+            max_id = yield self._store.add_tag_to_room(
+                user_id, room_id, SERVER_NOTICE_ROOM_TAG, {}
+            )
+            self._notifier.on_new_event(
+                "account_data_key", max_id, users=[user_id]
+            )
+
+    @defer.inlineCallbacks
+    def _is_room_currently_blocked(self, room_id):
+        """
+        Determines if the room is currently blocked
+
+        Args:
+            room_id(str): The room id of the server notices room
+
+        Returns:
+
+            bool: Is the room currently blocked
+            list: The list of pinned events that are unrelated to limit blocking
+            This list can be used as a convenience in the case where the block
+            is to be lifted and the remaining pinned event references need to be
+            preserved
+        """
+        currently_blocked = False
+        pinned_state_event = None
+        try:
+            pinned_state_event = yield self._state.get_current_state(
+                room_id, event_type=EventTypes.Pinned
+            )
+        except AuthError:
+            # The user has yet to join the server notices room
+            pass
+
+        referenced_events = []
+        if pinned_state_event is not None:
+            referenced_events = list(pinned_state_event.content.get('pinned', []))
+
+        events = yield self._store.get_events(referenced_events)
+        for event_id, event in iteritems(events):
+            if event.type != EventTypes.Message:
+                continue
+            if event.content.get("msgtype") == ServerNoticeMsgType:
+                currently_blocked = True
+                # remove event in case we need to disable blocking later on.
+                if event_id in referenced_events:
+                    referenced_events.remove(event.event_id)
+
+        defer.returnValue((currently_blocked, referenced_events))
diff --git a/synapse/server_notices/server_notices_manager.py b/synapse/server_notices/server_notices_manager.py
index a26deace53..c5cc6d728e 100644
--- a/synapse/server_notices/server_notices_manager.py
+++ b/synapse/server_notices/server_notices_manager.py
@@ -22,6 +22,8 @@ from synapse.util.caches.descriptors import cachedInlineCallbacks
 
 logger = logging.getLogger(__name__)
 
+SERVER_NOTICE_ROOM_TAG = "m.server_notice"
+
 
 class ServerNoticesManager(object):
     def __init__(self, hs):
@@ -37,6 +39,8 @@ class ServerNoticesManager(object):
         self._event_creation_handler = hs.get_event_creation_handler()
         self._is_mine_id = hs.is_mine_id
 
+        self._notifier = hs.get_notifier()
+
     def is_enabled(self):
         """Checks if server notices are enabled on this server.
 
@@ -46,7 +50,10 @@ class ServerNoticesManager(object):
         return self._config.server_notices_mxid is not None
 
     @defer.inlineCallbacks
-    def send_notice(self, user_id, event_content):
+    def send_notice(
+        self, user_id, event_content,
+        type=EventTypes.Message, state_key=None
+    ):
         """Send a notice to the given user
 
         Creates the server notices room, if none exists.
@@ -54,9 +61,11 @@ class ServerNoticesManager(object):
         Args:
             user_id (str): mxid of user to send event to.
             event_content (dict): content of event to send
+            type(EventTypes): type of event
+            is_state_event(bool): Is the event a state event
 
         Returns:
-            Deferred[None]
+            Deferred[FrozenEvent]
         """
         room_id = yield self.get_notice_room_for_user(user_id)
 
@@ -65,15 +74,20 @@ class ServerNoticesManager(object):
 
         logger.info("Sending server notice to %s", user_id)
 
-        yield self._event_creation_handler.create_and_send_nonmember_event(
-            requester, {
-                "type": EventTypes.Message,
-                "room_id": room_id,
-                "sender": system_mxid,
-                "content": event_content,
-            },
-            ratelimit=False,
+        event_dict = {
+            "type": type,
+            "room_id": room_id,
+            "sender": system_mxid,
+            "content": event_content,
+        }
+
+        if state_key is not None:
+            event_dict['state_key'] = state_key
+
+        res = yield self._event_creation_handler.create_and_send_nonmember_event(
+            requester, event_dict, ratelimit=False,
         )
+        defer.returnValue(res)
 
     @cachedInlineCallbacks()
     def get_notice_room_for_user(self, user_id):
@@ -142,5 +156,12 @@ class ServerNoticesManager(object):
         )
         room_id = info['room_id']
 
+        max_id = yield self._store.add_tag_to_room(
+            user_id, room_id, SERVER_NOTICE_ROOM_TAG, {},
+        )
+        self._notifier.on_new_event(
+            "account_data_key", max_id, users=[user_id]
+        )
+
         logger.info("Created server notices room %s for %s", room_id, user_id)
         defer.returnValue(room_id)
diff --git a/synapse/server_notices/server_notices_sender.py b/synapse/server_notices/server_notices_sender.py
index 5d23965f34..6121b2f267 100644
--- a/synapse/server_notices/server_notices_sender.py
+++ b/synapse/server_notices/server_notices_sender.py
@@ -12,7 +12,12 @@
 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 # See the License for the specific language governing permissions and
 # limitations under the License.
+from twisted.internet import defer
+
 from synapse.server_notices.consent_server_notices import ConsentServerNotices
+from synapse.server_notices.resource_limits_server_notices import (
+    ResourceLimitsServerNotices,
+)
 
 
 class ServerNoticesSender(object):
@@ -25,34 +30,34 @@ class ServerNoticesSender(object):
         Args:
             hs (synapse.server.HomeServer):
         """
-        # todo: it would be nice to make this more dynamic
-        self._consent_server_notices = ConsentServerNotices(hs)
+        self._server_notices = (
+            ConsentServerNotices(hs),
+            ResourceLimitsServerNotices(hs)
+        )
 
+    @defer.inlineCallbacks
     def on_user_syncing(self, user_id):
         """Called when the user performs a sync operation.
 
         Args:
             user_id (str): mxid of user who synced
-
-        Returns:
-            Deferred
         """
-        return self._consent_server_notices.maybe_send_server_notice_to_user(
-            user_id,
-        )
+        for sn in self._server_notices:
+            yield sn.maybe_send_server_notice_to_user(
+                user_id,
+            )
 
+    @defer.inlineCallbacks
     def on_user_ip(self, user_id):
         """Called on the master when a worker process saw a client request.
 
         Args:
             user_id (str): mxid
-
-        Returns:
-            Deferred
         """
         # The synchrotrons use a stubbed version of ServerNoticesSender, so
         # we check for notices to send to the user in on_user_ip as well as
         # in on_user_syncing
-        return self._consent_server_notices.maybe_send_server_notice_to_user(
-            user_id,
-        )
+        for sn in self._server_notices:
+            yield sn.maybe_send_server_notice_to_user(
+                user_id,
+            )
diff --git a/synapse/state.py b/synapse/state/__init__.py
index e1092b97a9..b34970e4d1 100644
--- a/synapse/state.py
+++ b/synapse/state/__init__.py
@@ -1,5 +1,6 @@
 # -*- coding: utf-8 -*-
 # Copyright 2014-2016 OpenMarket Ltd
+# Copyright 2018 New Vector Ltd
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
 # you may not use this file except in compliance with the License.
@@ -13,23 +14,20 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-
-import hashlib
 import logging
 from collections import namedtuple
 
-from six import iteritems, iterkeys, itervalues
+from six import iteritems, itervalues
 
 from frozendict import frozendict
 
 from twisted.internet import defer
 
-from synapse import event_auth
-from synapse.api.constants import EventTypes
-from synapse.api.errors import AuthError
+from synapse.api.constants import EventTypes, RoomVersions
 from synapse.events.snapshot import EventContext
-from synapse.util.async import Linearizer
-from synapse.util.caches import CACHE_SIZE_FACTOR
+from synapse.state import v1
+from synapse.util.async_helpers import Linearizer
+from synapse.util.caches import get_cache_factor_for
 from synapse.util.caches.expiringcache import ExpiringCache
 from synapse.util.logutils import log_function
 from synapse.util.metrics import Measure
@@ -40,7 +38,7 @@ logger = logging.getLogger(__name__)
 KeyStateTuple = namedtuple("KeyStateTuple", ("context", "type", "state_key"))
 
 
-SIZE_OF_CACHE = int(100000 * CACHE_SIZE_FACTOR)
+SIZE_OF_CACHE = 100000 * get_cache_factor_for("state_cache")
 EVICTION_TIMEOUT_SECONDS = 60 * 60
 
 
@@ -264,6 +262,7 @@ class StateHandler(object):
             defer.returnValue(context)
 
         logger.debug("calling resolve_state_groups from compute_event_context")
+
         entry = yield self.resolve_state_groups_for_events(
             event.room_id, [e for e, _ in event.prev_events],
         )
@@ -338,8 +337,11 @@ class StateHandler(object):
         event, resolves conflicts between them and returns them.
 
         Args:
-            room_id (str):
-            event_ids (list[str]):
+            room_id (str)
+            event_ids (list[str])
+            explicit_room_version (str|None): If set uses the the given room
+                version to choose the resolution algorithm. If None, then
+                checks the database for room version.
 
         Returns:
             Deferred[_StateCacheEntry]: resolved state
@@ -353,7 +355,12 @@ class StateHandler(object):
             room_id, event_ids
         )
 
-        if len(state_groups_ids) == 1:
+        if len(state_groups_ids) == 0:
+            defer.returnValue(_StateCacheEntry(
+                state={},
+                state_group=None,
+            ))
+        elif len(state_groups_ids) == 1:
             name, state_list = list(state_groups_ids.items()).pop()
 
             prev_group, delta_ids = yield self.store.get_state_group_delta(name)
@@ -365,8 +372,11 @@ class StateHandler(object):
                 delta_ids=delta_ids,
             ))
 
+        room_version = yield self.store.get_room_version(room_id)
+
         result = yield self._state_resolution_handler.resolve_state_groups(
-            room_id, state_groups_ids, None, self._state_map_factory,
+            room_id, room_version, state_groups_ids, None,
+            self._state_map_factory,
         )
         defer.returnValue(result)
 
@@ -375,7 +385,7 @@ class StateHandler(object):
             ev_ids, get_prev_content=False, check_redacted=False,
         )
 
-    def resolve_events(self, state_sets, event):
+    def resolve_events(self, room_version, state_sets, event):
         logger.info(
             "Resolving state for %s with %d groups", event.room_id, len(state_sets)
         )
@@ -391,7 +401,9 @@ class StateHandler(object):
         }
 
         with Measure(self.clock, "state._resolve_events"):
-            new_state = resolve_events_with_state_map(state_set_ids, state_map)
+            new_state = resolve_events_with_state_map(
+                room_version, state_set_ids, state_map,
+            )
 
         new_state = {
             key: state_map[ev_id] for key, ev_id in iteritems(new_state)
@@ -430,7 +442,7 @@ class StateResolutionHandler(object):
     @defer.inlineCallbacks
     @log_function
     def resolve_state_groups(
-        self, room_id, state_groups_ids, event_map, state_map_factory,
+        self, room_id, room_version, state_groups_ids, event_map, state_map_factory,
     ):
         """Resolves conflicts between a set of state groups
 
@@ -439,6 +451,7 @@ class StateResolutionHandler(object):
 
         Args:
             room_id (str): room we are resolving for (used for logging)
+            room_version (str): version of the room
             state_groups_ids (dict[int, dict[(str, str), str]]):
                  map from state group id to the state in that state group
                 (where 'state' is a map from state key to event id)
@@ -492,6 +505,7 @@ class StateResolutionHandler(object):
                 logger.info("Resolving conflicted state for %r", room_id)
                 with Measure(self.clock, "state._resolve_events"):
                     new_state = yield resolve_events_with_factory(
+                        room_version,
                         list(itervalues(state_groups_ids)),
                         event_map=event_map,
                         state_map_factory=state_map_factory,
@@ -575,16 +589,10 @@ def _make_state_cache_entry(
     )
 
 
-def _ordered_events(events):
-    def key_func(e):
-        return -int(e.depth), hashlib.sha1(e.event_id.encode('ascii')).hexdigest()
-
-    return sorted(events, key=key_func)
-
-
-def resolve_events_with_state_map(state_sets, state_map):
+def resolve_events_with_state_map(room_version, state_sets, state_map):
     """
     Args:
+        room_version(str): Version of the room
         state_sets(list): List of dicts of (type, state_key) -> event_id,
             which are the different state groups to resolve.
         state_map(dict): a dict from event_id to event, for all events in
@@ -594,75 +602,23 @@ def resolve_events_with_state_map(state_sets, state_map):
         dict[(str, str), str]:
             a map from (type, state_key) to event_id.
     """
-    if len(state_sets) == 1:
-        return state_sets[0]
-
-    unconflicted_state, conflicted_state = _seperate(
-        state_sets,
-    )
-
-    auth_events = _create_auth_events_from_maps(
-        unconflicted_state, conflicted_state, state_map
-    )
-
-    return _resolve_with_state(
-        unconflicted_state, conflicted_state, auth_events, state_map
-    )
-
-
-def _seperate(state_sets):
-    """Takes the state_sets and figures out which keys are conflicted and
-    which aren't. i.e., which have multiple different event_ids associated
-    with them in different state sets.
-
-    Args:
-        state_sets(iterable[dict[(str, str), str]]):
-            List of dicts of (type, state_key) -> event_id, which are the
-            different state groups to resolve.
-
-    Returns:
-        (dict[(str, str), str], dict[(str, str), set[str]]):
-            A tuple of (unconflicted_state, conflicted_state), where:
-
-            unconflicted_state is a dict mapping (type, state_key)->event_id
-            for unconflicted state keys.
-
-            conflicted_state is a dict mapping (type, state_key) to a set of
-            event ids for conflicted state keys.
-    """
-    state_set_iterator = iter(state_sets)
-    unconflicted_state = dict(next(state_set_iterator))
-    conflicted_state = {}
-
-    for state_set in state_set_iterator:
-        for key, value in iteritems(state_set):
-            # Check if there is an unconflicted entry for the state key.
-            unconflicted_value = unconflicted_state.get(key)
-            if unconflicted_value is None:
-                # There isn't an unconflicted entry so check if there is a
-                # conflicted entry.
-                ls = conflicted_state.get(key)
-                if ls is None:
-                    # There wasn't a conflicted entry so haven't seen this key before.
-                    # Therefore it isn't conflicted yet.
-                    unconflicted_state[key] = value
-                else:
-                    # This key is already conflicted, add our value to the conflict set.
-                    ls.add(value)
-            elif unconflicted_value != value:
-                # If the unconflicted value is not the same as our value then we
-                # have a new conflict. So move the key from the unconflicted_state
-                # to the conflicted state.
-                conflicted_state[key] = {value, unconflicted_value}
-                unconflicted_state.pop(key, None)
-
-    return unconflicted_state, conflicted_state
+    if room_version in (RoomVersions.V1, RoomVersions.VDH_TEST,):
+        return v1.resolve_events_with_state_map(
+            state_sets, state_map,
+        )
+    else:
+        # This should only happen if we added a version but forgot to add it to
+        # the list above.
+        raise Exception(
+            "No state resolution algorithm defined for version %r" % (room_version,)
+        )
 
 
-@defer.inlineCallbacks
-def resolve_events_with_factory(state_sets, event_map, state_map_factory):
+def resolve_events_with_factory(room_version, state_sets, event_map, state_map_factory):
     """
     Args:
+        room_version(str): Version of the room
+
         state_sets(list): List of dicts of (type, state_key) -> event_id,
             which are the different state groups to resolve.
 
@@ -682,185 +638,13 @@ def resolve_events_with_factory(state_sets, event_map, state_map_factory):
         Deferred[dict[(str, str), str]]:
             a map from (type, state_key) to event_id.
     """
-    if len(state_sets) == 1:
-        defer.returnValue(state_sets[0])
-
-    unconflicted_state, conflicted_state = _seperate(
-        state_sets,
-    )
-
-    needed_events = set(
-        event_id
-        for event_ids in itervalues(conflicted_state)
-        for event_id in event_ids
-    )
-    if event_map is not None:
-        needed_events -= set(iterkeys(event_map))
-
-    logger.info("Asking for %d conflicted events", len(needed_events))
-
-    # dict[str, FrozenEvent]: a map from state event id to event. Only includes
-    # the state events which are in conflict (and those in event_map)
-    state_map = yield state_map_factory(needed_events)
-    if event_map is not None:
-        state_map.update(event_map)
-
-    # get the ids of the auth events which allow us to authenticate the
-    # conflicted state, picking only from the unconflicting state.
-    #
-    # dict[(str, str), str]: a map from state key to event id
-    auth_events = _create_auth_events_from_maps(
-        unconflicted_state, conflicted_state, state_map
-    )
-
-    new_needed_events = set(itervalues(auth_events))
-    new_needed_events -= needed_events
-    if event_map is not None:
-        new_needed_events -= set(iterkeys(event_map))
-
-    logger.info("Asking for %d auth events", len(new_needed_events))
-
-    state_map_new = yield state_map_factory(new_needed_events)
-    state_map.update(state_map_new)
-
-    defer.returnValue(_resolve_with_state(
-        unconflicted_state, conflicted_state, auth_events, state_map
-    ))
-
-
-def _create_auth_events_from_maps(unconflicted_state, conflicted_state, state_map):
-    auth_events = {}
-    for event_ids in itervalues(conflicted_state):
-        for event_id in event_ids:
-            if event_id in state_map:
-                keys = event_auth.auth_types_for_event(state_map[event_id])
-                for key in keys:
-                    if key not in auth_events:
-                        event_id = unconflicted_state.get(key, None)
-                        if event_id:
-                            auth_events[key] = event_id
-    return auth_events
-
-
-def _resolve_with_state(unconflicted_state_ids, conflicted_state_ids, auth_event_ids,
-                        state_map):
-    conflicted_state = {}
-    for key, event_ids in iteritems(conflicted_state_ids):
-        events = [state_map[ev_id] for ev_id in event_ids if ev_id in state_map]
-        if len(events) > 1:
-            conflicted_state[key] = events
-        elif len(events) == 1:
-            unconflicted_state_ids[key] = events[0].event_id
-
-    auth_events = {
-        key: state_map[ev_id]
-        for key, ev_id in iteritems(auth_event_ids)
-        if ev_id in state_map
-    }
-
-    try:
-        resolved_state = _resolve_state_events(
-            conflicted_state, auth_events
+    if room_version in (RoomVersions.V1, RoomVersions.VDH_TEST,):
+        return v1.resolve_events_with_factory(
+            state_sets, event_map, state_map_factory,
+        )
+    else:
+        # This should only happen if we added a version but forgot to add it to
+        # the list above.
+        raise Exception(
+            "No state resolution algorithm defined for version %r" % (room_version,)
         )
-    except Exception:
-        logger.exception("Failed to resolve state")
-        raise
-
-    new_state = unconflicted_state_ids
-    for key, event in iteritems(resolved_state):
-        new_state[key] = event.event_id
-
-    return new_state
-
-
-def _resolve_state_events(conflicted_state, auth_events):
-    """ This is where we actually decide which of the conflicted state to
-    use.
-
-    We resolve conflicts in the following order:
-        1. power levels
-        2. join rules
-        3. memberships
-        4. other events.
-    """
-    resolved_state = {}
-    if POWER_KEY in conflicted_state:
-        events = conflicted_state[POWER_KEY]
-        logger.debug("Resolving conflicted power levels %r", events)
-        resolved_state[POWER_KEY] = _resolve_auth_events(
-            events, auth_events)
-
-    auth_events.update(resolved_state)
-
-    for key, events in iteritems(conflicted_state):
-        if key[0] == EventTypes.JoinRules:
-            logger.debug("Resolving conflicted join rules %r", events)
-            resolved_state[key] = _resolve_auth_events(
-                events,
-                auth_events
-            )
-
-    auth_events.update(resolved_state)
-
-    for key, events in iteritems(conflicted_state):
-        if key[0] == EventTypes.Member:
-            logger.debug("Resolving conflicted member lists %r", events)
-            resolved_state[key] = _resolve_auth_events(
-                events,
-                auth_events
-            )
-
-    auth_events.update(resolved_state)
-
-    for key, events in iteritems(conflicted_state):
-        if key not in resolved_state:
-            logger.debug("Resolving conflicted state %r:%r", key, events)
-            resolved_state[key] = _resolve_normal_events(
-                events, auth_events
-            )
-
-    return resolved_state
-
-
-def _resolve_auth_events(events, auth_events):
-    reverse = [i for i in reversed(_ordered_events(events))]
-
-    auth_keys = set(
-        key
-        for event in events
-        for key in event_auth.auth_types_for_event(event)
-    )
-
-    new_auth_events = {}
-    for key in auth_keys:
-        auth_event = auth_events.get(key, None)
-        if auth_event:
-            new_auth_events[key] = auth_event
-
-    auth_events = new_auth_events
-
-    prev_event = reverse[0]
-    for event in reverse[1:]:
-        auth_events[(prev_event.type, prev_event.state_key)] = prev_event
-        try:
-            # The signatures have already been checked at this point
-            event_auth.check(event, auth_events, do_sig_check=False, do_size_check=False)
-            prev_event = event
-        except AuthError:
-            return prev_event
-
-    return event
-
-
-def _resolve_normal_events(events, auth_events):
-    for event in _ordered_events(events):
-        try:
-            # The signatures have already been checked at this point
-            event_auth.check(event, auth_events, do_sig_check=False, do_size_check=False)
-            return event
-        except AuthError:
-            pass
-
-    # Use the last event (the one with the least depth) if they all fail
-    # the auth check.
-    return event
diff --git a/synapse/state/v1.py b/synapse/state/v1.py
new file mode 100644
index 0000000000..3a1f7054a1
--- /dev/null
+++ b/synapse/state/v1.py
@@ -0,0 +1,321 @@
+# -*- coding: utf-8 -*-
+# Copyright 2018 New Vector Ltd
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import hashlib
+import logging
+
+from six import iteritems, iterkeys, itervalues
+
+from twisted.internet import defer
+
+from synapse import event_auth
+from synapse.api.constants import EventTypes
+from synapse.api.errors import AuthError
+
+logger = logging.getLogger(__name__)
+
+
+POWER_KEY = (EventTypes.PowerLevels, "")
+
+
+def resolve_events_with_state_map(state_sets, state_map):
+    """
+    Args:
+        state_sets(list): List of dicts of (type, state_key) -> event_id,
+            which are the different state groups to resolve.
+        state_map(dict): a dict from event_id to event, for all events in
+            state_sets.
+
+    Returns
+        dict[(str, str), str]:
+            a map from (type, state_key) to event_id.
+    """
+    if len(state_sets) == 1:
+        return state_sets[0]
+
+    unconflicted_state, conflicted_state = _seperate(
+        state_sets,
+    )
+
+    auth_events = _create_auth_events_from_maps(
+        unconflicted_state, conflicted_state, state_map
+    )
+
+    return _resolve_with_state(
+        unconflicted_state, conflicted_state, auth_events, state_map
+    )
+
+
+@defer.inlineCallbacks
+def resolve_events_with_factory(state_sets, event_map, state_map_factory):
+    """
+    Args:
+        state_sets(list): List of dicts of (type, state_key) -> event_id,
+            which are the different state groups to resolve.
+
+        event_map(dict[str,FrozenEvent]|None):
+            a dict from event_id to event, for any events that we happen to
+            have in flight (eg, those currently being persisted). This will be
+            used as a starting point fof finding the state we need; any missing
+            events will be requested via state_map_factory.
+
+            If None, all events will be fetched via state_map_factory.
+
+        state_map_factory(func): will be called
+            with a list of event_ids that are needed, and should return with
+            a Deferred of dict of event_id to event.
+
+    Returns
+        Deferred[dict[(str, str), str]]:
+            a map from (type, state_key) to event_id.
+    """
+    if len(state_sets) == 1:
+        defer.returnValue(state_sets[0])
+
+    unconflicted_state, conflicted_state = _seperate(
+        state_sets,
+    )
+
+    needed_events = set(
+        event_id
+        for event_ids in itervalues(conflicted_state)
+        for event_id in event_ids
+    )
+    if event_map is not None:
+        needed_events -= set(iterkeys(event_map))
+
+    logger.info("Asking for %d conflicted events", len(needed_events))
+
+    # dict[str, FrozenEvent]: a map from state event id to event. Only includes
+    # the state events which are in conflict (and those in event_map)
+    state_map = yield state_map_factory(needed_events)
+    if event_map is not None:
+        state_map.update(event_map)
+
+    # get the ids of the auth events which allow us to authenticate the
+    # conflicted state, picking only from the unconflicting state.
+    #
+    # dict[(str, str), str]: a map from state key to event id
+    auth_events = _create_auth_events_from_maps(
+        unconflicted_state, conflicted_state, state_map
+    )
+
+    new_needed_events = set(itervalues(auth_events))
+    new_needed_events -= needed_events
+    if event_map is not None:
+        new_needed_events -= set(iterkeys(event_map))
+
+    logger.info("Asking for %d auth events", len(new_needed_events))
+
+    state_map_new = yield state_map_factory(new_needed_events)
+    state_map.update(state_map_new)
+
+    defer.returnValue(_resolve_with_state(
+        unconflicted_state, conflicted_state, auth_events, state_map
+    ))
+
+
+def _seperate(state_sets):
+    """Takes the state_sets and figures out which keys are conflicted and
+    which aren't. i.e., which have multiple different event_ids associated
+    with them in different state sets.
+
+    Args:
+        state_sets(iterable[dict[(str, str), str]]):
+            List of dicts of (type, state_key) -> event_id, which are the
+            different state groups to resolve.
+
+    Returns:
+        (dict[(str, str), str], dict[(str, str), set[str]]):
+            A tuple of (unconflicted_state, conflicted_state), where:
+
+            unconflicted_state is a dict mapping (type, state_key)->event_id
+            for unconflicted state keys.
+
+            conflicted_state is a dict mapping (type, state_key) to a set of
+            event ids for conflicted state keys.
+    """
+    state_set_iterator = iter(state_sets)
+    unconflicted_state = dict(next(state_set_iterator))
+    conflicted_state = {}
+
+    for state_set in state_set_iterator:
+        for key, value in iteritems(state_set):
+            # Check if there is an unconflicted entry for the state key.
+            unconflicted_value = unconflicted_state.get(key)
+            if unconflicted_value is None:
+                # There isn't an unconflicted entry so check if there is a
+                # conflicted entry.
+                ls = conflicted_state.get(key)
+                if ls is None:
+                    # There wasn't a conflicted entry so haven't seen this key before.
+                    # Therefore it isn't conflicted yet.
+                    unconflicted_state[key] = value
+                else:
+                    # This key is already conflicted, add our value to the conflict set.
+                    ls.add(value)
+            elif unconflicted_value != value:
+                # If the unconflicted value is not the same as our value then we
+                # have a new conflict. So move the key from the unconflicted_state
+                # to the conflicted state.
+                conflicted_state[key] = {value, unconflicted_value}
+                unconflicted_state.pop(key, None)
+
+    return unconflicted_state, conflicted_state
+
+
+def _create_auth_events_from_maps(unconflicted_state, conflicted_state, state_map):
+    auth_events = {}
+    for event_ids in itervalues(conflicted_state):
+        for event_id in event_ids:
+            if event_id in state_map:
+                keys = event_auth.auth_types_for_event(state_map[event_id])
+                for key in keys:
+                    if key not in auth_events:
+                        event_id = unconflicted_state.get(key, None)
+                        if event_id:
+                            auth_events[key] = event_id
+    return auth_events
+
+
+def _resolve_with_state(unconflicted_state_ids, conflicted_state_ids, auth_event_ids,
+                        state_map):
+    conflicted_state = {}
+    for key, event_ids in iteritems(conflicted_state_ids):
+        events = [state_map[ev_id] for ev_id in event_ids if ev_id in state_map]
+        if len(events) > 1:
+            conflicted_state[key] = events
+        elif len(events) == 1:
+            unconflicted_state_ids[key] = events[0].event_id
+
+    auth_events = {
+        key: state_map[ev_id]
+        for key, ev_id in iteritems(auth_event_ids)
+        if ev_id in state_map
+    }
+
+    try:
+        resolved_state = _resolve_state_events(
+            conflicted_state, auth_events
+        )
+    except Exception:
+        logger.exception("Failed to resolve state")
+        raise
+
+    new_state = unconflicted_state_ids
+    for key, event in iteritems(resolved_state):
+        new_state[key] = event.event_id
+
+    return new_state
+
+
+def _resolve_state_events(conflicted_state, auth_events):
+    """ This is where we actually decide which of the conflicted state to
+    use.
+
+    We resolve conflicts in the following order:
+        1. power levels
+        2. join rules
+        3. memberships
+        4. other events.
+    """
+    resolved_state = {}
+    if POWER_KEY in conflicted_state:
+        events = conflicted_state[POWER_KEY]
+        logger.debug("Resolving conflicted power levels %r", events)
+        resolved_state[POWER_KEY] = _resolve_auth_events(
+            events, auth_events)
+
+    auth_events.update(resolved_state)
+
+    for key, events in iteritems(conflicted_state):
+        if key[0] == EventTypes.JoinRules:
+            logger.debug("Resolving conflicted join rules %r", events)
+            resolved_state[key] = _resolve_auth_events(
+                events,
+                auth_events
+            )
+
+    auth_events.update(resolved_state)
+
+    for key, events in iteritems(conflicted_state):
+        if key[0] == EventTypes.Member:
+            logger.debug("Resolving conflicted member lists %r", events)
+            resolved_state[key] = _resolve_auth_events(
+                events,
+                auth_events
+            )
+
+    auth_events.update(resolved_state)
+
+    for key, events in iteritems(conflicted_state):
+        if key not in resolved_state:
+            logger.debug("Resolving conflicted state %r:%r", key, events)
+            resolved_state[key] = _resolve_normal_events(
+                events, auth_events
+            )
+
+    return resolved_state
+
+
+def _resolve_auth_events(events, auth_events):
+    reverse = [i for i in reversed(_ordered_events(events))]
+
+    auth_keys = set(
+        key
+        for event in events
+        for key in event_auth.auth_types_for_event(event)
+    )
+
+    new_auth_events = {}
+    for key in auth_keys:
+        auth_event = auth_events.get(key, None)
+        if auth_event:
+            new_auth_events[key] = auth_event
+
+    auth_events = new_auth_events
+
+    prev_event = reverse[0]
+    for event in reverse[1:]:
+        auth_events[(prev_event.type, prev_event.state_key)] = prev_event
+        try:
+            # The signatures have already been checked at this point
+            event_auth.check(event, auth_events, do_sig_check=False, do_size_check=False)
+            prev_event = event
+        except AuthError:
+            return prev_event
+
+    return event
+
+
+def _resolve_normal_events(events, auth_events):
+    for event in _ordered_events(events):
+        try:
+            # The signatures have already been checked at this point
+            event_auth.check(event, auth_events, do_sig_check=False, do_size_check=False)
+            return event
+        except AuthError:
+            pass
+
+    # Use the last event (the one with the least depth) if they all fail
+    # the auth check.
+    return event
+
+
+def _ordered_events(events):
+    def key_func(e):
+        return -int(e.depth), hashlib.sha1(e.event_id.encode('ascii')).hexdigest()
+
+    return sorted(events, key=key_func)
diff --git a/synapse/static/client/register/index.html b/synapse/static/client/register/index.html
index 600b3ee41e..886f2edd1f 100644
--- a/synapse/static/client/register/index.html
+++ b/synapse/static/client/register/index.html
@@ -4,7 +4,7 @@
 <meta name='viewport' content='width=device-width, initial-scale=1, user-scalable=no, minimum-scale=1.0, maximum-scale=1.0'> 
 <link rel="stylesheet" href="style.css">
 <script src="js/jquery-2.1.3.min.js"></script>
-<script src="js/recaptcha_ajax.js"></script>
+<script src="https://www.google.com/recaptcha/api/js/recaptcha_ajax.js"></script>
 <script src="register_config.js"></script>
 <script src="js/register.js"></script>
 </head>
diff --git a/synapse/static/client/register/js/recaptcha_ajax.js b/synapse/static/client/register/js/recaptcha_ajax.js
deleted file mode 100644
index d0e71e5b88..0000000000
--- a/synapse/static/client/register/js/recaptcha_ajax.js
+++ /dev/null
@@ -1,195 +0,0 @@
-(function(){var h,k=this,l=function(a){return void 0!==a},ba=function(){},n=function(a){var b=typeof a;if("object"==b)if(a){if(a instanceof Array)return"array";if(a instanceof Object)return b;var c=Object.prototype.toString.call(a);if("[object Window]"==c)return"object";if("[object Array]"==c||"number"==typeof a.length&&"undefined"!=typeof a.splice&&"undefined"!=typeof a.propertyIsEnumerable&&!a.propertyIsEnumerable("splice"))return"array";if("[object Function]"==c||"undefined"!=typeof a.call&&"undefined"!=typeof a.propertyIsEnumerable&&
-!a.propertyIsEnumerable("call"))return"function"}else return"null";else if("function"==b&&"undefined"==typeof a.call)return"object";return b},p=function(a){return"array"==n(a)},ca=function(a){var b=n(a);return"array"==b||"object"==b&&"number"==typeof a.length},q=function(a){return"string"==typeof a},r=function(a){return"function"==n(a)},da=function(a){var b=typeof a;return"object"==b&&null!=a||"function"==b},ea=function(a,b,c){return a.call.apply(a.bind,arguments)},fa=function(a,b,c){if(!a)throw Error();
-if(2<arguments.length){var d=Array.prototype.slice.call(arguments,2);return function(){var c=Array.prototype.slice.call(arguments);Array.prototype.unshift.apply(c,d);return a.apply(b,c)}}return function(){return a.apply(b,arguments)}},s=function(a,b,c){s=Function.prototype.bind&&-1!=Function.prototype.bind.toString().indexOf("native code")?ea:fa;return s.apply(null,arguments)},ga=function(a,b){var c=Array.prototype.slice.call(arguments,1);return function(){var b=c.slice();b.push.apply(b,arguments);
-return a.apply(this,b)}},ha=Date.now||function(){return+new Date},ia=null,t=function(a,b){var c=a.split("."),d=k;c[0]in d||!d.execScript||d.execScript("var "+c[0]);for(var e;c.length&&(e=c.shift());)!c.length&&l(b)?d[e]=b:d=d[e]?d[e]:d[e]={}},u=function(a,b){function c(){}c.prototype=b.prototype;a.superClass_=b.prototype;a.prototype=new c;a.base=function(a,c,g){return b.prototype[c].apply(a,Array.prototype.slice.call(arguments,2))}};
-Function.prototype.bind=Function.prototype.bind||function(a,b){if(1<arguments.length){var c=Array.prototype.slice.call(arguments,1);c.unshift(this,a);return s.apply(null,c)}return s(this,a)};var v={};t("RecaptchaTemplates",v);v.VertHtml='<table id="recaptcha_table" class="recaptchatable" > <tr> <td colspan="6" class=\'recaptcha_r1_c1\'></td> </tr> <tr> <td class=\'recaptcha_r2_c1\'></td> <td colspan="4" class=\'recaptcha_image_cell\'><center><div id="recaptcha_image"></div></center></td> <td class=\'recaptcha_r2_c2\'></td> </tr> <tr> <td rowspan="6" class=\'recaptcha_r3_c1\'></td> <td colspan="4" class=\'recaptcha_r3_c2\'></td> <td rowspan="6" class=\'recaptcha_r3_c3\'></td> </tr> <tr> <td rowspan="3" class=\'recaptcha_r4_c1\' height="49"> <div class="recaptcha_input_area"> <input name="recaptcha_response_field" id="recaptcha_response_field" type="text" autocorrect="off" autocapitalize="off" placeholder="" /> <span id="recaptcha_privacy" class="recaptcha_only_if_privacy"></span> </div> </td> <td rowspan="4" class=\'recaptcha_r4_c2\'></td> <td><a id=\'recaptcha_reload_btn\'><img id=\'recaptcha_reload\' width="25" height="17" /></a></td> <td rowspan="4" class=\'recaptcha_r4_c4\'></td> </tr> <tr> <td><a id=\'recaptcha_switch_audio_btn\' class="recaptcha_only_if_image"><img id=\'recaptcha_switch_audio\' width="25" height="16" alt="" /></a><a id=\'recaptcha_switch_img_btn\' class="recaptcha_only_if_audio"><img id=\'recaptcha_switch_img\' width="25" height="16" alt=""/></a></td> </tr> <tr> <td><a id=\'recaptcha_whatsthis_btn\'><img id=\'recaptcha_whatsthis\' width="25" height="16" /></a></td> </tr> <tr> <td class=\'recaptcha_r7_c1\'></td> <td class=\'recaptcha_r8_c1\'></td> </tr> </table> ';v.CleanCss=".recaptchatable td img{display:block}.recaptchatable .recaptcha_image_cell center img{height:57px}.recaptchatable .recaptcha_image_cell center{height:57px}.recaptchatable .recaptcha_image_cell{background-color:white;height:57px;padding:7px!important}.recaptchatable,#recaptcha_area tr,#recaptcha_area td,#recaptcha_area th{margin:0!important;border:0!important;border-collapse:collapse!important;vertical-align:middle!important}.recaptchatable *{margin:0;padding:0;border:0;color:black;position:static;top:auto;left:auto;right:auto;bottom:auto}.recaptchatable #recaptcha_image{position:relative;margin:auto;border:1px solid #dfdfdf!important}.recaptchatable #recaptcha_image #recaptcha_challenge_image{display:block}.recaptchatable #recaptcha_image #recaptcha_ad_image{display:block;position:absolute;top:0}.recaptchatable a img{border:0}.recaptchatable a,.recaptchatable a:hover{cursor:pointer;outline:none;border:0!important;padding:0!important;text-decoration:none;color:blue;background:none!important;font-weight:normal}.recaptcha_input_area{position:relative!important;background:none!important}.recaptchatable label.recaptcha_input_area_text{border:1px solid #dfdfdf!important;margin:0!important;padding:0!important;position:static!important;top:auto!important;left:auto!important;right:auto!important;bottom:auto!important}.recaptcha_theme_red label.recaptcha_input_area_text,.recaptcha_theme_white label.recaptcha_input_area_text{color:black!important}.recaptcha_theme_blackglass label.recaptcha_input_area_text{color:white!important}.recaptchatable #recaptcha_response_field{font-size:11pt}.recaptcha_theme_blackglass #recaptcha_response_field,.recaptcha_theme_white #recaptcha_response_field{border:1px solid gray}.recaptcha_theme_red #recaptcha_response_field{border:1px solid #cca940}.recaptcha_audio_cant_hear_link{font-size:7pt;color:black}.recaptchatable{line-height:1em;border:1px solid #dfdfdf!important}.recaptcha_error_text{color:red}.recaptcha_only_if_privacy{float:right;text-align:right;margin-right:7px}#recaptcha-ad-choices{position:absolute;height:15px;top:0;right:0}#recaptcha-ad-choices img{height:15px}.recaptcha-ad-choices-collapsed{width:30px;height:15px;display:block}.recaptcha-ad-choices-expanded{width:75px;height:15px;display:none}#recaptcha-ad-choices:hover .recaptcha-ad-choices-collapsed{display:none}#recaptcha-ad-choices:hover .recaptcha-ad-choices-expanded{display:block}";v.CleanHtml='<table id="recaptcha_table" class="recaptchatable"> <tr height="73"> <td class=\'recaptcha_image_cell\' width="302"><center><div id="recaptcha_image"></div></center></td> <td style="padding: 10px 7px 7px 7px;"> <a id=\'recaptcha_reload_btn\'><img id=\'recaptcha_reload\' width="25" height="18" alt="" /></a> <a id=\'recaptcha_switch_audio_btn\' class="recaptcha_only_if_image"><img id=\'recaptcha_switch_audio\' width="25" height="15" alt="" /></a><a id=\'recaptcha_switch_img_btn\' class="recaptcha_only_if_audio"><img id=\'recaptcha_switch_img\' width="25" height="15" alt=""/></a> <a id=\'recaptcha_whatsthis_btn\'><img id=\'recaptcha_whatsthis\' width="25" height="16" /></a> </td> <td style="padding: 18px 7px 18px 7px;"> <img id=\'recaptcha_logo\' alt="" width="71" height="36" /> </td> </tr> <tr> <td style="padding-left: 7px;"> <div class="recaptcha_input_area" style="padding-top: 2px; padding-bottom: 7px;"> <input style="border: 1px solid #3c3c3c; width: 302px;" name="recaptcha_response_field" id="recaptcha_response_field" type="text" /> </div> </td> <td colspan=2><span id="recaptcha_privacy" class="recaptcha_only_if_privacy"></span></td> </tr> </table> ';v.VertCss=".recaptchatable td img{display:block}.recaptchatable .recaptcha_r1_c1{background:url('IMGROOT/sprite.png') 0 -63px no-repeat;width:318px;height:9px}.recaptchatable .recaptcha_r2_c1{background:url('IMGROOT/sprite.png') -18px 0 no-repeat;width:9px;height:57px}.recaptchatable .recaptcha_r2_c2{background:url('IMGROOT/sprite.png') -27px 0 no-repeat;width:9px;height:57px}.recaptchatable .recaptcha_r3_c1{background:url('IMGROOT/sprite.png') 0 0 no-repeat;width:9px;height:63px}.recaptchatable .recaptcha_r3_c2{background:url('IMGROOT/sprite.png') -18px -57px no-repeat;width:300px;height:6px}.recaptchatable .recaptcha_r3_c3{background:url('IMGROOT/sprite.png') -9px 0 no-repeat;width:9px;height:63px}.recaptchatable .recaptcha_r4_c1{background:url('IMGROOT/sprite.png') -43px 0 no-repeat;width:171px;height:49px}.recaptchatable .recaptcha_r4_c2{background:url('IMGROOT/sprite.png') -36px 0 no-repeat;width:7px;height:57px}.recaptchatable .recaptcha_r4_c4{background:url('IMGROOT/sprite.png') -214px 0 no-repeat;width:97px;height:57px}.recaptchatable .recaptcha_r7_c1{background:url('IMGROOT/sprite.png') -43px -49px no-repeat;width:171px;height:8px}.recaptchatable .recaptcha_r8_c1{background:url('IMGROOT/sprite.png') -43px -49px no-repeat;width:25px;height:8px}.recaptchatable .recaptcha_image_cell center img{height:57px}.recaptchatable .recaptcha_image_cell center{height:57px}.recaptchatable .recaptcha_image_cell{background-color:white;height:57px}#recaptcha_area,#recaptcha_table{width:318px!important}.recaptchatable,#recaptcha_area tr,#recaptcha_area td,#recaptcha_area th{margin:0!important;border:0!important;padding:0!important;border-collapse:collapse!important;vertical-align:middle!important}.recaptchatable *{margin:0;padding:0;border:0;font-family:helvetica,sans-serif;font-size:8pt;color:black;position:static;top:auto;left:auto;right:auto;bottom:auto}.recaptchatable #recaptcha_image{position:relative;margin:auto}.recaptchatable #recaptcha_image #recaptcha_challenge_image{display:block}.recaptchatable #recaptcha_image #recaptcha_ad_image{display:block;position:absolute;top:0}.recaptchatable img{border:0!important;margin:0!important;padding:0!important}.recaptchatable a,.recaptchatable a:hover{cursor:pointer;outline:none;border:0!important;padding:0!important;text-decoration:none;color:blue;background:none!important;font-weight:normal}.recaptcha_input_area{position:relative!important;width:153px!important;height:45px!important;margin-left:7px!important;margin-right:7px!important;background:none!important}.recaptchatable label.recaptcha_input_area_text{margin:0!important;padding:0!important;position:static!important;top:auto!important;left:auto!important;right:auto!important;bottom:auto!important;background:none!important;height:auto!important;width:auto!important}.recaptcha_theme_red label.recaptcha_input_area_text,.recaptcha_theme_white label.recaptcha_input_area_text{color:black!important}.recaptcha_theme_blackglass label.recaptcha_input_area_text{color:white!important}.recaptchatable #recaptcha_response_field{width:153px!important;position:relative!important;bottom:7px!important;padding:0!important;margin:15px 0 0 0!important;font-size:10pt}.recaptcha_theme_blackglass #recaptcha_response_field,.recaptcha_theme_white #recaptcha_response_field{border:1px solid gray}.recaptcha_theme_red #recaptcha_response_field{border:1px solid #cca940}.recaptcha_audio_cant_hear_link{font-size:7pt;color:black}.recaptchatable{line-height:1!important}#recaptcha_instructions_error{color:red!important}.recaptcha_only_if_privacy{float:right;text-align:right}#recaptcha-ad-choices{position:absolute;height:15px;top:0;right:0}#recaptcha-ad-choices img{height:15px}.recaptcha-ad-choices-collapsed{width:30px;height:15px;display:block}.recaptcha-ad-choices-expanded{width:75px;height:15px;display:none}#recaptcha-ad-choices:hover .recaptcha-ad-choices-collapsed{display:none}#recaptcha-ad-choices:hover .recaptcha-ad-choices-expanded{display:block}";var w={visual_challenge:"Get a visual challenge",audio_challenge:"Get an audio challenge",refresh_btn:"Get a new challenge",instructions_visual:"Type the text:",instructions_audio:"Type what you hear:",help_btn:"Help",play_again:"Play sound again",cant_hear_this:"Download sound as MP3",incorrect_try_again:"Incorrect. Try again.",image_alt_text:"reCAPTCHA challenge image",privacy_and_terms:"Privacy & Terms"},ja={visual_challenge:"\u0627\u0644\u062d\u0635\u0648\u0644 \u0639\u0644\u0649 \u062a\u062d\u062f\u064d \u0645\u0631\u0626\u064a",
-audio_challenge:"\u0627\u0644\u062d\u0635\u0648\u0644 \u0639\u0644\u0649 \u062a\u062d\u062f\u064d \u0635\u0648\u062a\u064a",refresh_btn:"\u0627\u0644\u062d\u0635\u0648\u0644 \u0639\u0644\u0649 \u062a\u062d\u062f\u064d \u062c\u062f\u064a\u062f",instructions_visual:"\u064a\u0631\u062c\u0649 \u0643\u062a\u0627\u0628\u0629 \u0627\u0644\u0646\u0635:",instructions_audio:"\u0627\u0643\u062a\u0628 \u0645\u0627 \u062a\u0633\u0645\u0639\u0647:",help_btn:"\u0645\u0633\u0627\u0639\u062f\u0629",play_again:"\u062a\u0634\u063a\u064a\u0644 \u0627\u0644\u0635\u0648\u062a \u0645\u0631\u0629 \u0623\u062e\u0631\u0649",
-cant_hear_this:"\u062a\u0646\u0632\u064a\u0644 \u0627\u0644\u0635\u0648\u062a \u0628\u062a\u0646\u0633\u064a\u0642 MP3",incorrect_try_again:"\u063a\u064a\u0631 \u0635\u062d\u064a\u062d. \u0623\u0639\u062f \u0627\u0644\u0645\u062d\u0627\u0648\u0644\u0629.",image_alt_text:"\u0635\u0648\u0631\u0629 \u0627\u0644\u062a\u062d\u062f\u064a \u0645\u0646 reCAPTCHA",privacy_and_terms:"\u0627\u0644\u062e\u0635\u0648\u0635\u064a\u0629 \u0648\u0627\u0644\u0628\u0646\u0648\u062f"},ka={visual_challenge:"Obtener una pista visual",
-audio_challenge:"Obtener una pista sonora",refresh_btn:"Obtener una pista nueva",instructions_visual:"Introduzca el texto:",instructions_audio:"Escribe lo que oigas:",help_btn:"Ayuda",play_again:"Volver a reproducir el sonido",cant_hear_this:"Descargar el sonido en MP3",incorrect_try_again:"Incorrecto. Vu\u00e9lvelo a intentar.",image_alt_text:"Pista de imagen reCAPTCHA",privacy_and_terms:"Privacidad y condiciones"},la={visual_challenge:"Kumuha ng pagsubok na visual",audio_challenge:"Kumuha ng pagsubok na audio",
-refresh_btn:"Kumuha ng bagong pagsubok",instructions_visual:"I-type ang teksto:",instructions_audio:"I-type ang iyong narinig",help_btn:"Tulong",play_again:"I-play muli ang tunog",cant_hear_this:"I-download ang tunog bilang MP3",incorrect_try_again:"Hindi wasto. Muling subukan.",image_alt_text:"larawang panghamon ng reCAPTCHA",privacy_and_terms:"Privacy at Mga Tuntunin"},ma={visual_challenge:"Test visuel",audio_challenge:"Test audio",refresh_btn:"Nouveau test",instructions_visual:"Saisissez le texte\u00a0:",
-instructions_audio:"Qu'entendez-vous ?",help_btn:"Aide",play_again:"R\u00e9\u00e9couter",cant_hear_this:"T\u00e9l\u00e9charger l'audio au format MP3",incorrect_try_again:"Incorrect. Veuillez r\u00e9essayer.",image_alt_text:"Image reCAPTCHA",privacy_and_terms:"Confidentialit\u00e9 et conditions d'utilisation"},na={visual_challenge:"Dapatkan kata pengujian berbentuk visual",audio_challenge:"Dapatkan kata pengujian berbentuk audio",refresh_btn:"Dapatkan kata pengujian baru",instructions_visual:"Ketik teks:",
-instructions_audio:"Ketik yang Anda dengar:",help_btn:"Bantuan",play_again:"Putar suara sekali lagi",cant_hear_this:"Unduh suara sebagai MP3",incorrect_try_again:"Salah. Coba lagi.",image_alt_text:"Gambar tantangan reCAPTCHA",privacy_and_terms:"Privasi & Persyaratan"},oa={visual_challenge:"\u05e7\u05d1\u05dc \u05d0\u05ea\u05d2\u05e8 \u05d7\u05d6\u05d5\u05ea\u05d9",audio_challenge:"\u05e7\u05d1\u05dc \u05d0\u05ea\u05d2\u05e8 \u05e9\u05de\u05e2",refresh_btn:"\u05e7\u05d1\u05dc \u05d0\u05ea\u05d2\u05e8 \u05d7\u05d3\u05e9",
-instructions_visual:"\u05d4\u05e7\u05dc\u05d3 \u05d0\u05ea \u05d4\u05d8\u05e7\u05e1\u05d8:",instructions_audio:"\u05d4\u05e7\u05dc\u05d3 \u05d0\u05ea \u05de\u05d4 \u05e9\u05d0\u05ea\u05d4 \u05e9\u05d5\u05de\u05e2:",help_btn:"\u05e2\u05d6\u05e8\u05d4",play_again:"\u05d4\u05e4\u05e2\u05dc \u05e9\u05d5\u05d1 \u05d0\u05ea \u05d4\u05e9\u05de\u05e2",cant_hear_this:"\u05d4\u05d5\u05e8\u05d3 \u05e9\u05de\u05e2 \u05db-3MP",incorrect_try_again:"\u05e9\u05d2\u05d5\u05d9. \u05e0\u05e1\u05d4 \u05e9\u05d5\u05d1.",
-image_alt_text:"\u05ea\u05de\u05d5\u05e0\u05ea \u05d0\u05ea\u05d2\u05e8 \u05e9\u05dc reCAPTCHA",privacy_and_terms:"\u05e4\u05e8\u05d8\u05d9\u05d5\u05ea \u05d5\u05ea\u05e0\u05d0\u05d9\u05dd"},pa={visual_challenge:"Obter um desafio visual",audio_challenge:"Obter um desafio de \u00e1udio",refresh_btn:"Obter um novo desafio",instructions_visual:"Digite o texto:",instructions_audio:"Digite o que voc\u00ea ouve:",help_btn:"Ajuda",play_again:"Reproduzir som novamente",cant_hear_this:"Fazer download do som no formato MP3",
-incorrect_try_again:"Incorreto. Tente novamente.",image_alt_text:"Imagem de desafio reCAPTCHA",privacy_and_terms:"Privacidade e Termos"},qa={visual_challenge:"Ob\u0163ine\u0163i un cod captcha vizual",audio_challenge:"Ob\u0163ine\u0163i un cod captcha audio",refresh_btn:"Ob\u0163ine\u0163i un nou cod captcha",instructions_visual:"Introduce\u021bi textul:",instructions_audio:"Introduce\u0163i ceea ce auzi\u0163i:",help_btn:"Ajutor",play_again:"Reda\u0163i sunetul din nou",cant_hear_this:"Desc\u0103rca\u0163i fi\u015fierul audio ca MP3",
-incorrect_try_again:"Incorect. \u00cencerca\u0163i din nou.",image_alt_text:"Imagine de verificare reCAPTCHA",privacy_and_terms:"Confiden\u0163ialitate \u015fi termeni"},ra={visual_challenge:"\u6536\u5230\u4e00\u4e2a\u89c6\u9891\u9080\u8bf7",audio_challenge:"\u6362\u4e00\u7ec4\u97f3\u9891\u9a8c\u8bc1\u7801",refresh_btn:"\u6362\u4e00\u7ec4\u9a8c\u8bc1\u7801",instructions_visual:"\u8f93\u5165\u6587\u5b57\uff1a",instructions_audio:"\u8bf7\u952e\u5165\u60a8\u542c\u5230\u7684\u5185\u5bb9\uff1a",help_btn:"\u5e2e\u52a9",
-play_again:"\u91cd\u65b0\u64ad\u653e",cant_hear_this:"\u4ee5 MP3 \u683c\u5f0f\u4e0b\u8f7d\u58f0\u97f3",incorrect_try_again:"\u4e0d\u6b63\u786e\uff0c\u8bf7\u91cd\u8bd5\u3002",image_alt_text:"reCAPTCHA \u9a8c\u8bc1\u56fe\u7247",privacy_and_terms:"\u9690\u79c1\u6743\u548c\u4f7f\u7528\u6761\u6b3e"},sa={en:w,af:{visual_challenge:"Kry 'n visuele verifi\u00ebring",audio_challenge:"Kry 'n klankverifi\u00ebring",refresh_btn:"Kry 'n nuwe verifi\u00ebring",instructions_visual:"",instructions_audio:"Tik wat jy hoor:",
-help_btn:"Hulp",play_again:"Speel geluid weer",cant_hear_this:"Laai die klank af as MP3",incorrect_try_again:"Verkeerd. Probeer weer.",image_alt_text:"reCAPTCHA-uitdagingprent",privacy_and_terms:"Privaatheid en bepalings"},am:{visual_challenge:"\u12e8\u12a5\u12ed\u1273 \u1270\u130b\u1323\u121a \u12a0\u130d\u129d",audio_challenge:"\u120c\u120b \u12a0\u12f2\u1235 \u12e8\u12f5\u121d\u133d \u1325\u12eb\u1244 \u12ed\u1245\u1228\u1265",refresh_btn:"\u120c\u120b \u12a0\u12f2\u1235 \u1325\u12eb\u1244 \u12ed\u1245\u1228\u1265",
-instructions_visual:"",instructions_audio:"\u12e8\u121d\u1275\u1230\u121b\u12cd\u1295 \u1270\u12ed\u1265\u1361-",help_btn:"\u12a5\u1308\u12db",play_again:"\u12f5\u121d\u1339\u1295 \u12a5\u1295\u12f0\u1308\u1293 \u12a0\u132b\u12cd\u1275",cant_hear_this:"\u12f5\u121d\u1339\u1295 \u1260MP3 \u1245\u122d\u133d \u12a0\u12cd\u122d\u12f5",incorrect_try_again:"\u1275\u12ad\u12ad\u120d \u12a0\u12ed\u12f0\u1208\u121d\u1362 \u12a5\u1295\u12f0\u1308\u1293 \u121e\u12ad\u122d\u1362",image_alt_text:"reCAPTCHA \u121d\u1235\u120d \u130d\u1320\u121d",
-privacy_and_terms:"\u130d\u120b\u12ca\u1290\u1275 \u12a5\u1293 \u12cd\u120d"},ar:ja,"ar-EG":ja,bg:{visual_challenge:"\u041f\u043e\u043b\u0443\u0447\u0430\u0432\u0430\u043d\u0435 \u043d\u0430 \u0432\u0438\u0437\u0443\u0430\u043b\u043d\u0430 \u043f\u0440\u043e\u0432\u0435\u0440\u043a\u0430",audio_challenge:"\u0417\u0430\u0440\u0435\u0436\u0434\u0430\u043d\u0435 \u043d\u0430 \u0430\u0443\u0434\u0438\u043e\u0442\u0435\u0441\u0442",refresh_btn:"\u0417\u0430\u0440\u0435\u0436\u0434\u0430\u043d\u0435 \u043d\u0430 \u043d\u043e\u0432 \u0442\u0435\u0441\u0442",
-instructions_visual:"\u0412\u044a\u0432\u0435\u0434\u0435\u0442\u0435 \u0442\u0435\u043a\u0441\u0442\u0430:",instructions_audio:"\u0412\u044a\u0432\u0435\u0434\u0435\u0442\u0435 \u0447\u0443\u0442\u043e\u0442\u043e:",help_btn:"\u041f\u043e\u043c\u043e\u0449",play_again:"\u041f\u043e\u0432\u0442\u043e\u0440\u043d\u043e \u043f\u0443\u0441\u043a\u0430\u043d\u0435 \u043d\u0430 \u0437\u0432\u0443\u043a\u0430",cant_hear_this:"\u0418\u0437\u0442\u0435\u0433\u043b\u044f\u043d\u0435 \u043d\u0430 \u0437\u0432\u0443\u043a\u0430 \u0432\u044a\u0432 \u0444\u043e\u0440\u043c\u0430\u0442 MP3",
-incorrect_try_again:"\u041d\u0435\u043f\u0440\u0430\u0432\u0438\u043b\u043d\u043e. \u041e\u043f\u0438\u0442\u0430\u0439\u0442\u0435 \u043e\u0442\u043d\u043e\u0432\u043e.",image_alt_text:"\u0418\u0437\u043e\u0431\u0440\u0430\u0436\u0435\u043d\u0438\u0435 \u043d\u0430 \u043f\u0440\u043e\u0432\u0435\u0440\u043a\u0430\u0442\u0430 \u0441 reCAPTCHA",privacy_and_terms:"\u041f\u043e\u0432\u0435\u0440\u0438\u0442\u0435\u043b\u043d\u043e\u0441\u0442 \u0438 \u041e\u0431\u0449\u0438 \u0443\u0441\u043b\u043e\u0432\u0438\u044f"},
-bn:{visual_challenge:"\u098f\u0995\u099f\u09bf \u09a6\u09c3\u09b6\u09cd\u09af\u09ae\u09be\u09a8 \u09aa\u09cd\u09b0\u09a4\u09bf\u09a6\u09cd\u09ac\u09a8\u09cd\u09a6\u09cd\u09ac\u09bf\u09a4\u09be \u09aa\u09be\u09a8",audio_challenge:"\u098f\u0995\u099f\u09bf \u0985\u09a1\u09bf\u0993 \u09aa\u09cd\u09b0\u09a4\u09bf\u09a6\u09cd\u09ac\u09a8\u09cd\u09a6\u09cd\u09ac\u09bf\u09a4\u09be  \u09aa\u09be\u09a8",refresh_btn:"\u098f\u0995\u099f\u09bf \u09a8\u09a4\u09c1\u09a8 \u09aa\u09cd\u09b0\u09a4\u09bf\u09a6\u09cd\u09ac\u09a8\u09cd\u09a6\u09cd\u09ac\u09bf\u09a4\u09be  \u09aa\u09be\u09a8",
-instructions_visual:"",instructions_audio:"\u0986\u09aa\u09a8\u09bf \u09af\u09be \u09b6\u09c1\u09a8\u099b\u09c7\u09a8 \u09a4\u09be \u09b2\u09bf\u0996\u09c1\u09a8:",help_btn:"\u09b8\u09b9\u09be\u09df\u09a4\u09be",play_again:"\u0986\u09ac\u09be\u09b0 \u09b8\u09be\u0989\u09a8\u09cd\u09a1 \u09aa\u09cd\u09b2\u09c7 \u0995\u09b0\u09c1\u09a8",cant_hear_this:"MP3 \u09b0\u09c2\u09aa\u09c7 \u09b6\u09ac\u09cd\u09a6 \u09a1\u09be\u0989\u09a8\u09b2\u09cb\u09a1 \u0995\u09b0\u09c1\u09a8",incorrect_try_again:"\u09ac\u09c7\u09a0\u09bf\u0995\u09f7 \u0986\u09ac\u09be\u09b0 \u099a\u09c7\u09b7\u09cd\u099f\u09be \u0995\u09b0\u09c1\u09a8\u09f7",
-image_alt_text:"reCAPTCHA \u099a\u09cd\u09af\u09be\u09b2\u09c7\u099e\u09cd\u099c \u099a\u09bf\u09a4\u09cd\u09b0",privacy_and_terms:"\u0997\u09cb\u09aa\u09a8\u09c0\u09af\u09bc\u09a4\u09be \u0993 \u09b6\u09b0\u09cd\u09a4\u09be\u09ac\u09b2\u09c0"},ca:{visual_challenge:"Obt\u00e9n un repte visual",audio_challenge:"Obteniu una pista sonora",refresh_btn:"Obteniu una pista nova",instructions_visual:"Escriviu el text:",instructions_audio:"Escriviu el que escolteu:",help_btn:"Ajuda",play_again:"Torna a reproduir el so",
-cant_hear_this:"Baixa el so com a MP3",incorrect_try_again:"No \u00e9s correcte. Torna-ho a provar.",image_alt_text:"Imatge del repte de reCAPTCHA",privacy_and_terms:"Privadesa i condicions"},cs:{visual_challenge:"Zobrazit vizu\u00e1ln\u00ed podobu v\u00fdrazu",audio_challenge:"P\u0159ehr\u00e1t zvukovou podobu v\u00fdrazu",refresh_btn:"Zobrazit nov\u00fd v\u00fdraz",instructions_visual:"Zadejte text:",instructions_audio:"Napi\u0161te, co jste sly\u0161eli:",help_btn:"N\u00e1pov\u011bda",play_again:"Znovu p\u0159ehr\u00e1t zvuk",
-cant_hear_this:"St\u00e1hnout zvuk ve form\u00e1tu MP3",incorrect_try_again:"\u0160patn\u011b. Zkuste to znovu.",image_alt_text:"Obr\u00e1zek reCAPTCHA",privacy_and_terms:"Ochrana soukrom\u00ed a smluvn\u00ed podm\u00ednky"},da:{visual_challenge:"Hent en visuel udfordring",audio_challenge:"Hent en lydudfordring",refresh_btn:"Hent en ny udfordring",instructions_visual:"Indtast teksten:",instructions_audio:"Indtast det, du h\u00f8rer:",help_btn:"Hj\u00e6lp",play_again:"Afspil lyden igen",cant_hear_this:"Download lyd som MP3",
-incorrect_try_again:"Forkert. Pr\u00f8v igen.",image_alt_text:"reCAPTCHA-udfordringsbillede",privacy_and_terms:"Privatliv og vilk\u00e5r"},de:{visual_challenge:"Captcha abrufen",audio_challenge:"Audio-Captcha abrufen",refresh_btn:"Neues Captcha abrufen",instructions_visual:"Geben Sie den angezeigten Text ein:",instructions_audio:"Geben Sie das Geh\u00f6rte ein:",help_btn:"Hilfe",play_again:"Wort erneut abspielen",cant_hear_this:"Wort als MP3 herunterladen",incorrect_try_again:"Falsch. Bitte versuchen Sie es erneut.",
-image_alt_text:"reCAPTCHA-Bild",privacy_and_terms:"Datenschutzerkl\u00e4rung & Nutzungsbedingungen"},el:{visual_challenge:"\u039f\u03c0\u03c4\u03b9\u03ba\u03ae \u03c0\u03c1\u03cc\u03ba\u03bb\u03b7\u03c3\u03b7",audio_challenge:"\u0397\u03c7\u03b7\u03c4\u03b9\u03ba\u03ae \u03c0\u03c1\u03cc\u03ba\u03bb\u03b7\u03c3\u03b7",refresh_btn:"\u039d\u03ad\u03b1 \u03c0\u03c1\u03cc\u03ba\u03bb\u03b7\u03c3\u03b7",instructions_visual:"\u03a0\u03bb\u03b7\u03ba\u03c4\u03c1\u03bf\u03bb\u03bf\u03b3\u03ae\u03c3\u03c4\u03b5 \u03c4\u03bf \u03ba\u03b5\u03af\u03bc\u03b5\u03bd\u03bf:",
-instructions_audio:"\u03a0\u03bb\u03b7\u03ba\u03c4\u03c1\u03bf\u03bb\u03bf\u03b3\u03ae\u03c3\u03c4\u03b5 \u03cc\u03c4\u03b9 \u03b1\u03ba\u03bf\u03cd\u03c4\u03b5:",help_btn:"\u0392\u03bf\u03ae\u03b8\u03b5\u03b9\u03b1",play_again:"\u0391\u03bd\u03b1\u03c0\u03b1\u03c1\u03b1\u03b3\u03c9\u03b3\u03ae \u03ae\u03c7\u03bf\u03c5 \u03be\u03b1\u03bd\u03ac",cant_hear_this:"\u039b\u03ae\u03c8\u03b7 \u03ae\u03c7\u03bf\u03c5 \u03c9\u03c2 \u039c\u03a13",incorrect_try_again:"\u039b\u03ac\u03b8\u03bf\u03c2. \u0394\u03bf\u03ba\u03b9\u03bc\u03ac\u03c3\u03c4\u03b5 \u03be\u03b1\u03bd\u03ac.",
-image_alt_text:"\u0395\u03b9\u03ba\u03cc\u03bd\u03b1 \u03c0\u03c1\u03cc\u03ba\u03bb\u03b7\u03c3\u03b7\u03c2 reCAPTCHA",privacy_and_terms:"\u0391\u03c0\u03cc\u03c1\u03c1\u03b7\u03c4\u03bf \u03ba\u03b1\u03b9 \u03cc\u03c1\u03bf\u03b9"},"en-GB":w,"en-US":w,es:ka,"es-419":{visual_challenge:"Enfrentar un desaf\u00edo visual",audio_challenge:"Enfrentar un desaf\u00edo de audio",refresh_btn:"Enfrentar un nuevo desaf\u00edo",instructions_visual:"Escriba el texto:",instructions_audio:"Escribe lo que escuchas:",
-help_btn:"Ayuda",play_again:"Reproducir sonido de nuevo",cant_hear_this:"Descargar sonido en formato MP3",incorrect_try_again:"Incorrecto. Vuelve a intentarlo.",image_alt_text:"Imagen del desaf\u00edo de la reCAPTCHA",privacy_and_terms:"Privacidad y condiciones"},"es-ES":ka,et:{visual_challenge:"Kuva kuvap\u00f5hine robotil\u00f5ks",audio_challenge:"Kuva helip\u00f5hine robotil\u00f5ks",refresh_btn:"Kuva uus robotil\u00f5ks",instructions_visual:"Tippige tekst:",instructions_audio:"Tippige, mida kuulete.",
-help_btn:"Abi",play_again:"Esita heli uuesti",cant_hear_this:"Laadi heli alla MP3-vormingus",incorrect_try_again:"Vale. Proovige uuesti.",image_alt_text:"reCAPTCHA robotil\u00f5ksu kujutis",privacy_and_terms:"Privaatsus ja tingimused"},eu:{visual_challenge:"Eskuratu ikusizko erronka",audio_challenge:"Eskuratu audio-erronka",refresh_btn:"Eskuratu erronka berria",instructions_visual:"",instructions_audio:"Idatzi entzuten duzuna:",help_btn:"Laguntza",play_again:"Erreproduzitu soinua berriro",cant_hear_this:"Deskargatu soinua MP3 gisa",
-incorrect_try_again:"Ez da zuzena. Saiatu berriro.",image_alt_text:"reCAPTCHA erronkaren irudia",privacy_and_terms:"Pribatutasuna eta baldintzak"},fa:{visual_challenge:"\u062f\u0631\u06cc\u0627\u0641\u062a \u06cc\u06a9 \u0645\u0639\u0645\u0627\u06cc \u062f\u06cc\u062f\u0627\u0631\u06cc",audio_challenge:"\u062f\u0631\u06cc\u0627\u0641\u062a \u06cc\u06a9 \u0645\u0639\u0645\u0627\u06cc \u0635\u0648\u062a\u06cc",refresh_btn:"\u062f\u0631\u06cc\u0627\u0641\u062a \u06cc\u06a9 \u0645\u0639\u0645\u0627\u06cc \u062c\u062f\u06cc\u062f",
-instructions_visual:"",instructions_audio:"\u0622\u0646\u0686\u0647 \u0631\u0627 \u06a9\u0647 \u0645\u06cc\u200c\u0634\u0646\u0648\u06cc\u062f \u062a\u0627\u06cc\u067e \u06a9\u0646\u06cc\u062f:",help_btn:"\u0631\u0627\u0647\u0646\u0645\u0627\u06cc\u06cc",play_again:"\u067e\u062e\u0634 \u0645\u062c\u062f\u062f \u0635\u062f\u0627",cant_hear_this:"\u062f\u0627\u0646\u0644\u0648\u062f \u0635\u062f\u0627 \u0628\u0647 \u0635\u0648\u0631\u062a MP3",incorrect_try_again:"\u0646\u0627\u062f\u0631\u0633\u062a. \u062f\u0648\u0628\u0627\u0631\u0647 \u0627\u0645\u062a\u062d\u0627\u0646 \u06a9\u0646\u06cc\u062f.",
-image_alt_text:"\u062a\u0635\u0648\u06cc\u0631 \u0686\u0627\u0644\u0634\u06cc reCAPTCHA",privacy_and_terms:"\u062d\u0631\u06cc\u0645 \u062e\u0635\u0648\u0635\u06cc \u0648 \u0634\u0631\u0627\u06cc\u0637"},fi:{visual_challenge:"Kuvavahvistus",audio_challenge:"\u00c4\u00e4nivahvistus",refresh_btn:"Uusi kuva",instructions_visual:"Kirjoita teksti:",instructions_audio:"Kirjoita kuulemasi:",help_btn:"Ohje",play_again:"Toista \u00e4\u00e4ni uudelleen",cant_hear_this:"Lataa \u00e4\u00e4ni MP3-tiedostona",
-incorrect_try_again:"V\u00e4\u00e4rin. Yrit\u00e4 uudelleen.",image_alt_text:"reCAPTCHA-kuva",privacy_and_terms:"Tietosuoja ja k\u00e4ytt\u00f6ehdot"},fil:la,fr:ma,"fr-CA":{visual_challenge:"Obtenir un test visuel",audio_challenge:"Obtenir un test audio",refresh_btn:"Obtenir un nouveau test",instructions_visual:"Saisissez le texte\u00a0:",instructions_audio:"Tapez ce que vous entendez\u00a0:",help_btn:"Aide",play_again:"Jouer le son de nouveau",cant_hear_this:"T\u00e9l\u00e9charger le son en format MP3",
-incorrect_try_again:"Erreur, essayez \u00e0 nouveau",image_alt_text:"Image reCAPTCHA",privacy_and_terms:"Confidentialit\u00e9 et conditions d'utilisation"},"fr-FR":ma,gl:{visual_challenge:"Obter unha proba visual",audio_challenge:"Obter unha proba de audio",refresh_btn:"Obter unha proba nova",instructions_visual:"",instructions_audio:"Escribe o que escoitas:",help_btn:"Axuda",play_again:"Reproducir o son de novo",cant_hear_this:"Descargar son como MP3",incorrect_try_again:"Incorrecto. T\u00e9ntao de novo.",
-image_alt_text:"Imaxe de proba de reCAPTCHA",privacy_and_terms:"Privacidade e condici\u00f3ns"},gu:{visual_challenge:"\u0a8f\u0a95 \u0aa6\u0ac3\u0ab6\u0acd\u0aaf\u0abe\u0aa4\u0acd\u0aae\u0a95 \u0aaa\u0aa1\u0a95\u0abe\u0ab0 \u0aae\u0ac7\u0ab3\u0ab5\u0acb",audio_challenge:"\u0a8f\u0a95 \u0a91\u0aa1\u0abf\u0a93 \u0aaa\u0aa1\u0a95\u0abe\u0ab0 \u0aae\u0ac7\u0ab3\u0ab5\u0acb",refresh_btn:"\u0a8f\u0a95 \u0aa8\u0ab5\u0acb \u0aaa\u0aa1\u0a95\u0abe\u0ab0 \u0aae\u0ac7\u0ab3\u0ab5\u0acb",instructions_visual:"",
-instructions_audio:"\u0aa4\u0aae\u0ac7 \u0a9c\u0ac7 \u0ab8\u0abe\u0a82\u0aad\u0ab3\u0acb \u0a9b\u0acb \u0aa4\u0ac7 \u0ab2\u0a96\u0acb:",help_btn:"\u0ab8\u0ab9\u0abe\u0aaf",play_again:"\u0aa7\u0acd\u0ab5\u0aa8\u0abf \u0aab\u0ab0\u0ac0\u0aa5\u0ac0 \u0a9a\u0ab2\u0abe\u0ab5\u0acb",cant_hear_this:"MP3 \u0aa4\u0ab0\u0ac0\u0a95\u0ac7 \u0aa7\u0acd\u0ab5\u0aa8\u0abf\u0aa8\u0ac7 \u0aa1\u0abe\u0a89\u0aa8\u0ab2\u0acb\u0aa1 \u0a95\u0ab0\u0acb",incorrect_try_again:"\u0a96\u0acb\u0a9f\u0ac1\u0a82. \u0aab\u0ab0\u0ac0 \u0aaa\u0acd\u0ab0\u0aaf\u0abe\u0ab8 \u0a95\u0ab0\u0acb.",
-image_alt_text:"reCAPTCHA \u0aaa\u0aa1\u0a95\u0abe\u0ab0 \u0a9b\u0aac\u0ac0",privacy_and_terms:"\u0a97\u0acb\u0aaa\u0aa8\u0ac0\u0aaf\u0aa4\u0abe \u0a85\u0aa8\u0ac7 \u0ab6\u0ab0\u0aa4\u0acb"},hi:{visual_challenge:"\u0915\u094b\u0908 \u0935\u093f\u091c\u0941\u0905\u0932 \u091a\u0941\u0928\u094c\u0924\u0940 \u0932\u0947\u0902",audio_challenge:"\u0915\u094b\u0908 \u0911\u0921\u093f\u092f\u094b \u091a\u0941\u0928\u094c\u0924\u0940 \u0932\u0947\u0902",refresh_btn:"\u0915\u094b\u0908 \u0928\u0908 \u091a\u0941\u0928\u094c\u0924\u0940 \u0932\u0947\u0902",
-instructions_visual:"\u091f\u0947\u0915\u094d\u0938\u094d\u091f \u091f\u093e\u0907\u092a \u0915\u0930\u0947\u0902:",instructions_audio:"\u091c\u094b \u0906\u092a \u0938\u0941\u0928 \u0930\u0939\u0947 \u0939\u0948\u0902 \u0909\u0938\u0947 \u0932\u093f\u0916\u0947\u0902:",help_btn:"\u0938\u0939\u093e\u092f\u0924\u093e",play_again:"\u0927\u094d\u200d\u0935\u0928\u093f \u092a\u0941\u0928: \u091a\u0932\u093e\u090f\u0902",cant_hear_this:"\u0927\u094d\u200d\u0935\u0928\u093f \u0915\u094b MP3 \u0915\u0947 \u0930\u0942\u092a \u092e\u0947\u0902 \u0921\u093e\u0909\u0928\u0932\u094b\u0921 \u0915\u0930\u0947\u0902",
-incorrect_try_again:"\u0917\u0932\u0924. \u092a\u0941\u0928: \u092a\u094d\u0930\u092f\u093e\u0938 \u0915\u0930\u0947\u0902.",image_alt_text:"reCAPTCHA \u091a\u0941\u0928\u094c\u0924\u0940 \u091a\u093f\u0924\u094d\u0930",privacy_and_terms:"\u0917\u094b\u092a\u0928\u0940\u092f\u0924\u093e \u0914\u0930 \u0936\u0930\u094d\u0924\u0947\u0902"},hr:{visual_challenge:"Dohvati vizualni upit",audio_challenge:"Dohvati zvu\u010dni upit",refresh_btn:"Dohvati novi upit",instructions_visual:"Unesite tekst:",instructions_audio:"Upi\u0161ite \u0161to \u010dujete:",
-help_btn:"Pomo\u0107",play_again:"Ponovi zvuk",cant_hear_this:"Preuzmi zvuk u MP3 formatu",incorrect_try_again:"Nije to\u010dno. Poku\u0161ajte ponovno.",image_alt_text:"Slikovni izazov reCAPTCHA",privacy_and_terms:"Privatnost i odredbe"},hu:{visual_challenge:"Vizu\u00e1lis kih\u00edv\u00e1s k\u00e9r\u00e9se",audio_challenge:"Hangkih\u00edv\u00e1s k\u00e9r\u00e9se",refresh_btn:"\u00daj kih\u00edv\u00e1s k\u00e9r\u00e9se",instructions_visual:"\u00cdrja be a sz\u00f6veget:",instructions_audio:"\u00cdrja le, amit hall:",
-help_btn:"S\u00fag\u00f3",play_again:"Hang ism\u00e9telt lej\u00e1tsz\u00e1sa",cant_hear_this:"Hang let\u00f6lt\u00e9se MP3 form\u00e1tumban",incorrect_try_again:"Hib\u00e1s. Pr\u00f3b\u00e1lkozzon \u00fajra.",image_alt_text:"reCAPTCHA ellen\u0151rz\u0151 k\u00e9p",privacy_and_terms:"Adatv\u00e9delem \u00e9s Szerz\u0151d\u00e9si Felt\u00e9telek"},hy:{visual_challenge:"\u054d\u057f\u0561\u0576\u0561\u056c \u057f\u0565\u057d\u0578\u0572\u0561\u056f\u0561\u0576 \u056d\u0576\u0564\u056b\u0580",audio_challenge:"\u054d\u057f\u0561\u0576\u0561\u056c \u0571\u0561\u0575\u0576\u0561\u0575\u056b\u0576 \u056d\u0576\u0564\u056b\u0580",
-refresh_btn:"\u054d\u057f\u0561\u0576\u0561\u056c \u0576\u0578\u0580 \u056d\u0576\u0564\u056b\u0580",instructions_visual:"\u0544\u0578\u0582\u057f\u0584\u0561\u0563\u0580\u0565\u0584 \u057f\u0565\u0584\u057d\u057f\u0568\u055d",instructions_audio:"\u0544\u0578\u0582\u057f\u0584\u0561\u0563\u0580\u0565\u0584 \u0561\u0575\u0576, \u056b\u0576\u0579 \u056c\u057d\u0578\u0582\u0574 \u0565\u0584\u055d",help_btn:"\u0555\u0563\u0576\u0578\u0582\u0569\u0575\u0578\u0582\u0576",play_again:"\u0546\u057e\u0561\u0563\u0561\u0580\u056f\u0565\u056c \u0571\u0561\u0575\u0576\u0568 \u056f\u0580\u056f\u056b\u0576",
-cant_hear_this:"\u0532\u0565\u057c\u0576\u0565\u056c \u0571\u0561\u0575\u0576\u0568 \u0578\u0580\u057a\u0565\u057d MP3",incorrect_try_again:"\u054d\u056d\u0561\u056c \u0567: \u0553\u0578\u0580\u0571\u0565\u0584 \u056f\u0580\u056f\u056b\u0576:",image_alt_text:"reCAPTCHA \u057a\u0561\u057f\u056f\u0565\u0580\u0578\u057e \u056d\u0576\u0564\u056b\u0580",privacy_and_terms:"\u0533\u0561\u0572\u057f\u0576\u056b\u0578\u0582\u0569\u0575\u0561\u0576 & \u057a\u0561\u0575\u0574\u0561\u0576\u0576\u0565\u0580"},
-id:na,is:{visual_challenge:"F\u00e1 a\u00f0gangspr\u00f3f sem mynd",audio_challenge:"F\u00e1 a\u00f0gangspr\u00f3f sem hlj\u00f3\u00f0skr\u00e1",refresh_btn:"F\u00e1 n\u00fdtt a\u00f0gangspr\u00f3f",instructions_visual:"",instructions_audio:"Sl\u00e1\u00f0u inn \u00fea\u00f0 sem \u00fe\u00fa heyrir:",help_btn:"Hj\u00e1lp",play_again:"Spila hlj\u00f3\u00f0 aftur",cant_hear_this:"S\u00e6kja hlj\u00f3\u00f0 sem MP3",incorrect_try_again:"Rangt. Reyndu aftur.",image_alt_text:"mynd reCAPTCHA a\u00f0gangspr\u00f3fs",
-privacy_and_terms:"Pers\u00f3nuvernd og skilm\u00e1lar"},it:{visual_challenge:"Verifica visiva",audio_challenge:"Verifica audio",refresh_btn:"Nuova verifica",instructions_visual:"Digita il testo:",instructions_audio:"Digita ci\u00f2 che senti:",help_btn:"Guida",play_again:"Riproduci di nuovo audio",cant_hear_this:"Scarica audio in MP3",incorrect_try_again:"Sbagliato. Riprova.",image_alt_text:"Immagine di verifica reCAPTCHA",privacy_and_terms:"Privacy e Termini"},iw:oa,ja:{visual_challenge:"\u753b\u50cf\u3067\u78ba\u8a8d\u3057\u307e\u3059",
-audio_challenge:"\u97f3\u58f0\u3067\u78ba\u8a8d\u3057\u307e\u3059",refresh_btn:"\u5225\u306e\u5358\u8a9e\u3067\u3084\u308a\u76f4\u3057\u307e\u3059",instructions_visual:"\u30c6\u30ad\u30b9\u30c8\u3092\u5165\u529b:",instructions_audio:"\u805e\u3053\u3048\u305f\u5358\u8a9e\u3092\u5165\u529b\u3057\u307e\u3059:",help_btn:"\u30d8\u30eb\u30d7",play_again:"\u3082\u3046\u4e00\u5ea6\u805e\u304f",cant_hear_this:"MP3 \u3067\u97f3\u58f0\u3092\u30c0\u30a6\u30f3\u30ed\u30fc\u30c9",incorrect_try_again:"\u6b63\u3057\u304f\u3042\u308a\u307e\u305b\u3093\u3002\u3082\u3046\u4e00\u5ea6\u3084\u308a\u76f4\u3057\u3066\u304f\u3060\u3055\u3044\u3002",
-image_alt_text:"reCAPTCHA \u78ba\u8a8d\u7528\u753b\u50cf",privacy_and_terms:"\u30d7\u30e9\u30a4\u30d0\u30b7\u30fc\u3068\u5229\u7528\u898f\u7d04"},kn:{visual_challenge:"\u0ca6\u0cc3\u0cb6\u0ccd\u0caf \u0cb8\u0cb5\u0cbe\u0cb2\u0cca\u0c82\u0ca6\u0ca8\u0ccd\u0ca8\u0cc1 \u0cb8\u0ccd\u0cb5\u0cc0\u0c95\u0cb0\u0cbf\u0cb8\u0cbf",audio_challenge:"\u0c86\u0ca1\u0cbf\u0caf\u0ccb \u0cb8\u0cb5\u0cbe\u0cb2\u0cca\u0c82\u0ca6\u0ca8\u0ccd\u0ca8\u0cc1 \u0cb8\u0ccd\u0cb5\u0cc0\u0c95\u0cb0\u0cbf\u0cb8\u0cbf",refresh_btn:"\u0cb9\u0cca\u0cb8 \u0cb8\u0cb5\u0cbe\u0cb2\u0cca\u0c82\u0ca6\u0ca8\u0ccd\u0ca8\u0cc1 \u0caa\u0ca1\u0cc6\u0caf\u0cbf\u0cb0\u0cbf",
-instructions_visual:"",instructions_audio:"\u0ca8\u0cbf\u0cae\u0c97\u0cc6 \u0c95\u0cc7\u0cb3\u0cbf\u0cb8\u0cc1\u0cb5\u0cc1\u0ca6\u0ca8\u0ccd\u0ca8\u0cc1 \u0c9f\u0cc8\u0caa\u0ccd\u200c \u0cae\u0cbe\u0ca1\u0cbf:",help_btn:"\u0cb8\u0cb9\u0cbe\u0caf",play_again:"\u0ca7\u0ccd\u0cb5\u0ca8\u0cbf\u0caf\u0ca8\u0ccd\u0ca8\u0cc1 \u0cae\u0ca4\u0ccd\u0ca4\u0cc6 \u0caa\u0ccd\u0cb2\u0cc7 \u0cae\u0cbe\u0ca1\u0cbf",cant_hear_this:"\u0ca7\u0ccd\u0cb5\u0ca8\u0cbf\u0caf\u0ca8\u0ccd\u0ca8\u0cc1 MP3 \u0cb0\u0cc2\u0caa\u0ca6\u0cb2\u0ccd\u0cb2\u0cbf \u0ca1\u0ccc\u0ca8\u0ccd\u200c\u0cb2\u0ccb\u0ca1\u0ccd \u0cae\u0cbe\u0ca1\u0cbf",
-incorrect_try_again:"\u0ca4\u0caa\u0ccd\u0caa\u0cbe\u0c97\u0cbf\u0ca6\u0cc6. \u0cae\u0ca4\u0ccd\u0ca4\u0cca\u0cae\u0ccd\u0cae\u0cc6 \u0caa\u0ccd\u0cb0\u0caf\u0ca4\u0ccd\u0ca8\u0cbf\u0cb8\u0cbf.",image_alt_text:"reCAPTCHA \u0cb8\u0cb5\u0cbe\u0cb2\u0cc1 \u0c9a\u0cbf\u0ca4\u0ccd\u0cb0",privacy_and_terms:"\u0c97\u0ccc\u0caa\u0ccd\u0caf\u0ca4\u0cc6 \u0cae\u0ca4\u0ccd\u0ca4\u0cc1 \u0ca8\u0cbf\u0caf\u0cae\u0c97\u0cb3\u0cc1"},ko:{visual_challenge:"\uadf8\ub9bc\uc73c\ub85c \ubcf4\uc548\ubb38\uc790 \ubc1b\uae30",
-audio_challenge:"\uc74c\uc131\uc73c\ub85c \ubcf4\uc548\ubb38\uc790 \ubc1b\uae30",refresh_btn:"\ubcf4\uc548\ubb38\uc790 \uc0c8\ub85c \ubc1b\uae30",instructions_visual:"\ud14d\uc2a4\ud2b8 \uc785\ub825:",instructions_audio:"\uc74c\uc131 \ubcf4\uc548\ubb38\uc790 \uc785\ub825:",help_btn:"\ub3c4\uc6c0\ub9d0",play_again:"\uc74c\uc131 \ub2e4\uc2dc \ub4e3\uae30",cant_hear_this:"\uc74c\uc131\uc744 MP3\ub85c \ub2e4\uc6b4\ub85c\ub4dc",incorrect_try_again:"\ud2c0\ub838\uc2b5\ub2c8\ub2e4. \ub2e4\uc2dc \uc2dc\ub3c4\ud574 \uc8fc\uc138\uc694.",
-image_alt_text:"reCAPTCHA \ubcf4\uc548\ubb38\uc790 \uc774\ubbf8\uc9c0",privacy_and_terms:"\uac1c\uc778\uc815\ubcf4 \ubcf4\ud638 \ubc0f \uc57d\uad00"},ln:ma,lt:{visual_challenge:"Gauti vaizdin\u012f atpa\u017einimo test\u0105",audio_challenge:"Gauti garso atpa\u017einimo test\u0105",refresh_btn:"Gauti nauj\u0105 atpa\u017einimo test\u0105",instructions_visual:"\u012eveskite tekst\u0105:",instructions_audio:"\u012eveskite tai, k\u0105 girdite:",help_btn:"Pagalba",play_again:"Dar kart\u0105 paleisti gars\u0105",
-cant_hear_this:"Atsisi\u0173sti gars\u0105 kaip MP3",incorrect_try_again:"Neteisingai. Bandykite dar kart\u0105.",image_alt_text:"Testo \u201ereCAPTCHA\u201c vaizdas",privacy_and_terms:"Privatumas ir s\u0105lygos"},lv:{visual_challenge:"Sa\u0146emt vizu\u0101lu izaicin\u0101jumu",audio_challenge:"Sa\u0146emt audio izaicin\u0101jumu",refresh_btn:"Sa\u0146emt jaunu izaicin\u0101jumu",instructions_visual:"Ievadiet tekstu:",instructions_audio:"Ierakstiet dzirdamo:",help_btn:"Pal\u012bdz\u012bba",play_again:"V\u0113lreiz atska\u0146ot ska\u0146u",
-cant_hear_this:"Lejupiel\u0101d\u0113t ska\u0146u MP3\u00a0form\u0101t\u0101",incorrect_try_again:"Nepareizi. M\u0113\u0123iniet v\u0113lreiz.",image_alt_text:"reCAPTCHA izaicin\u0101juma att\u0113ls",privacy_and_terms:"Konfidencialit\u0101te un noteikumi"},ml:{visual_challenge:"\u0d12\u0d30\u0d41 \u0d26\u0d43\u0d36\u0d4d\u0d2f \u0d1a\u0d32\u0d1e\u0d4d\u0d1a\u0d4d \u0d28\u0d47\u0d1f\u0d41\u0d15",audio_challenge:"\u0d12\u0d30\u0d41 \u0d13\u0d21\u0d3f\u0d2f\u0d4b \u0d1a\u0d32\u0d1e\u0d4d\u0d1a\u0d4d \u0d28\u0d47\u0d1f\u0d41\u0d15",
-refresh_btn:"\u0d12\u0d30\u0d41 \u0d2a\u0d41\u0d24\u0d3f\u0d2f \u0d1a\u0d32\u0d1e\u0d4d\u0d1a\u0d4d \u0d28\u0d47\u0d1f\u0d41\u0d15",instructions_visual:"",instructions_audio:"\u0d15\u0d47\u0d7e\u0d15\u0d4d\u0d15\u0d41\u0d28\u0d4d\u0d28\u0d24\u0d4d \u0d1f\u0d48\u0d2a\u0d4d\u0d2a\u0d4d \u0d1a\u0d46\u0d2f\u0d4d\u0d2f\u0d42:",help_btn:"\u0d38\u0d39\u0d3e\u0d2f\u0d02",play_again:"\u0d36\u0d2c\u0d4d\u200c\u0d26\u0d02 \u0d35\u0d40\u0d23\u0d4d\u0d1f\u0d41\u0d02 \u0d2a\u0d4d\u0d32\u0d47 \u0d1a\u0d46\u0d2f\u0d4d\u0d2f\u0d41\u0d15",
-cant_hear_this:"\u0d36\u0d2c\u0d4d\u200c\u0d26\u0d02 MP3 \u0d06\u0d2f\u0d3f \u0d21\u0d57\u0d7a\u0d32\u0d4b\u0d21\u0d4d \u0d1a\u0d46\u0d2f\u0d4d\u0d2f\u0d41\u0d15",incorrect_try_again:"\u0d24\u0d46\u0d31\u0d4d\u0d31\u0d3e\u0d23\u0d4d. \u0d35\u0d40\u0d23\u0d4d\u0d1f\u0d41\u0d02 \u0d36\u0d4d\u0d30\u0d2e\u0d3f\u0d15\u0d4d\u0d15\u0d41\u0d15.",image_alt_text:"reCAPTCHA \u0d1a\u0d32\u0d1e\u0d4d\u0d1a\u0d4d \u0d07\u0d2e\u0d47\u0d1c\u0d4d",privacy_and_terms:"\u0d38\u0d4d\u0d35\u0d15\u0d3e\u0d30\u0d4d\u0d2f\u0d24\u0d2f\u0d41\u0d02 \u0d28\u0d3f\u0d2c\u0d28\u0d4d\u0d27\u0d28\u0d15\u0d33\u0d41\u0d02"},
-mr:{visual_challenge:"\u0926\u0943\u0936\u094d\u200d\u092f\u092e\u093e\u0928 \u0906\u0935\u094d\u0939\u093e\u0928 \u092a\u094d\u0930\u093e\u092a\u094d\u0924 \u0915\u0930\u093e",audio_challenge:"\u0911\u0921\u0940\u0913 \u0906\u0935\u094d\u0939\u093e\u0928 \u092a\u094d\u0930\u093e\u092a\u094d\u0924 \u0915\u0930\u093e",refresh_btn:"\u090f\u0915 \u0928\u0935\u0940\u0928 \u0906\u0935\u094d\u0939\u093e\u0928 \u092a\u094d\u0930\u093e\u092a\u094d\u0924 \u0915\u0930\u093e",instructions_visual:"",instructions_audio:"\u0906\u092a\u0932\u094d\u092f\u093e\u0932\u093e \u091c\u0947 \u0910\u0915\u0942 \u092f\u0947\u0908\u0932 \u0924\u0947 \u091f\u093e\u0907\u092a \u0915\u0930\u093e:",
-help_btn:"\u092e\u0926\u0924",play_again:"\u0927\u094d\u200d\u0935\u0928\u0940 \u092a\u0941\u0928\u094d\u0939\u093e \u092a\u094d\u200d\u0932\u0947 \u0915\u0930\u093e",cant_hear_this:"MP3 \u0930\u0941\u092a\u093e\u0924 \u0927\u094d\u200d\u0935\u0928\u0940 \u0921\u093e\u0909\u0928\u0932\u094b\u0921 \u0915\u0930\u093e",incorrect_try_again:"\u0905\u092f\u094b\u0917\u094d\u200d\u092f. \u092a\u0941\u0928\u094d\u200d\u0939\u093e \u092a\u094d\u0930\u092f\u0924\u094d\u200d\u0928 \u0915\u0930\u093e.",image_alt_text:"reCAPTCHA \u0906\u0935\u094d\u200d\u0939\u093e\u0928 \u092a\u094d\u0930\u0924\u093f\u092e\u093e",
-privacy_and_terms:"\u0917\u094b\u092a\u0928\u0940\u092f\u0924\u093e \u0906\u0923\u093f \u0905\u091f\u0940"},ms:{visual_challenge:"Dapatkan cabaran visual",audio_challenge:"Dapatkan cabaran audio",refresh_btn:"Dapatkan cabaran baru",instructions_visual:"Taipkan teksnya:",instructions_audio:"Taip apa yang didengari:",help_btn:"Bantuan",play_again:"Mainkan bunyi sekali lagi",cant_hear_this:"Muat turun bunyi sebagai MP3",incorrect_try_again:"Tidak betul. Cuba lagi.",image_alt_text:"Imej cabaran reCAPTCHA",
-privacy_and_terms:"Privasi & Syarat"},nl:{visual_challenge:"Een visuele uitdaging proberen",audio_challenge:"Een audio-uitdaging proberen",refresh_btn:"Een nieuwe uitdaging proberen",instructions_visual:"Typ de tekst:",instructions_audio:"Typ wat u hoort:",help_btn:"Help",play_again:"Geluid opnieuw afspelen",cant_hear_this:"Geluid downloaden als MP3",incorrect_try_again:"Onjuist. Probeer het opnieuw.",image_alt_text:"reCAPTCHA-uitdagingsafbeelding",privacy_and_terms:"Privacy en voorwaarden"},no:{visual_challenge:"F\u00e5 en bildeutfordring",
-audio_challenge:"F\u00e5 en lydutfordring",refresh_btn:"F\u00e5 en ny utfordring",instructions_visual:"Skriv inn teksten:",instructions_audio:"Skriv inn det du h\u00f8rer:",help_btn:"Hjelp",play_again:"Spill av lyd p\u00e5 nytt",cant_hear_this:"Last ned lyd som MP3",incorrect_try_again:"Feil. Pr\u00f8v p\u00e5 nytt.",image_alt_text:"reCAPTCHA-utfordringsbilde",privacy_and_terms:"Personvern og vilk\u00e5r"},pl:{visual_challenge:"Poka\u017c podpowied\u017a wizualn\u0105",audio_challenge:"Odtw\u00f3rz podpowied\u017a d\u017awi\u0119kow\u0105",
-refresh_btn:"Nowa podpowied\u017a",instructions_visual:"Przepisz tekst:",instructions_audio:"Wpisz us\u0142yszane s\u0142owa:",help_btn:"Pomoc",play_again:"Odtw\u00f3rz d\u017awi\u0119k ponownie",cant_hear_this:"Pobierz d\u017awi\u0119k jako plik MP3",incorrect_try_again:"Nieprawid\u0142owo. Spr\u00f3buj ponownie.",image_alt_text:"Zadanie obrazkowe reCAPTCHA",privacy_and_terms:"Prywatno\u015b\u0107 i warunki"},pt:pa,"pt-BR":pa,"pt-PT":{visual_challenge:"Obter um desafio visual",audio_challenge:"Obter um desafio de \u00e1udio",
-refresh_btn:"Obter um novo desafio",instructions_visual:"Introduza o texto:",instructions_audio:"Escreva o que ouvir:",help_btn:"Ajuda",play_again:"Reproduzir som novamente",cant_hear_this:"Transferir som como MP3",incorrect_try_again:"Incorreto. Tente novamente.",image_alt_text:"Imagem de teste reCAPTCHA",privacy_and_terms:"Privacidade e Termos de Utiliza\u00e7\u00e3o"},ro:qa,ru:{visual_challenge:"\u0412\u0438\u0437\u0443\u0430\u043b\u044c\u043d\u0430\u044f \u043f\u0440\u043e\u0432\u0435\u0440\u043a\u0430",
-audio_challenge:"\u0417\u0432\u0443\u043a\u043e\u0432\u0430\u044f \u043f\u0440\u043e\u0432\u0435\u0440\u043a\u0430",refresh_btn:"\u041e\u0431\u043d\u043e\u0432\u0438\u0442\u044c",instructions_visual:"\u0412\u0432\u0435\u0434\u0438\u0442\u0435 \u0442\u0435\u043a\u0441\u0442:",instructions_audio:"\u0412\u0432\u0435\u0434\u0438\u0442\u0435 \u0442\u043e, \u0447\u0442\u043e \u0441\u043b\u044b\u0448\u0438\u0442\u0435:",help_btn:"\u0421\u043f\u0440\u0430\u0432\u043a\u0430",play_again:"\u041f\u0440\u043e\u0441\u043b\u0443\u0448\u0430\u0442\u044c \u0435\u0449\u0435 \u0440\u0430\u0437",
-cant_hear_this:"\u0417\u0430\u0433\u0440\u0443\u0437\u0438\u0442\u044c MP3-\u0444\u0430\u0439\u043b",incorrect_try_again:"\u041d\u0435\u043f\u0440\u0430\u0432\u0438\u043b\u044c\u043d\u043e. \u041f\u043e\u0432\u0442\u043e\u0440\u0438\u0442\u0435 \u043f\u043e\u043f\u044b\u0442\u043a\u0443.",image_alt_text:"\u041f\u0440\u043e\u0432\u0435\u0440\u043a\u0430 \u043f\u043e \u0441\u043b\u043e\u0432\u0443 reCAPTCHA",privacy_and_terms:"\u041f\u0440\u0430\u0432\u0438\u043b\u0430 \u0438 \u043f\u0440\u0438\u043d\u0446\u0438\u043f\u044b"},
-sk:{visual_challenge:"Zobrazi\u0165 vizu\u00e1lnu podobu",audio_challenge:"Prehra\u0165 zvukov\u00fa podobu",refresh_btn:"Zobrazi\u0165 nov\u00fd v\u00fdraz",instructions_visual:"Zadajte text:",instructions_audio:"Zadajte, \u010do po\u010dujete:",help_btn:"Pomocn\u00edk",play_again:"Znova prehra\u0165 zvuk",cant_hear_this:"Prevzia\u0165 zvuk v podobe s\u00faboru MP3",incorrect_try_again:"Nespr\u00e1vne. Sk\u00faste to znova.",image_alt_text:"Obr\u00e1zok zadania reCAPTCHA",privacy_and_terms:"Ochrana osobn\u00fdch \u00fadajov a Zmluvn\u00e9 podmienky"},
-sl:{visual_challenge:"Vizualni preskus",audio_challenge:"Zvo\u010dni preskus",refresh_btn:"Nov preskus",instructions_visual:"Vnesite besedilo:",instructions_audio:"Natipkajte, kaj sli\u0161ite:",help_btn:"Pomo\u010d",play_again:"Znova predvajaj zvok",cant_hear_this:"Prenesi zvok kot MP3",incorrect_try_again:"Napa\u010dno. Poskusite znova.",image_alt_text:"Slika izziva reCAPTCHA",privacy_and_terms:"Zasebnost in pogoji"},sr:{visual_challenge:"\u041f\u0440\u0438\u043c\u0438\u0442\u0435 \u0432\u0438\u0437\u0443\u0435\u043b\u043d\u0438 \u0443\u043f\u0438\u0442",
-audio_challenge:"\u041f\u0440\u0438\u043c\u0438\u0442\u0435 \u0430\u0443\u0434\u0438\u043e \u0443\u043f\u0438\u0442",refresh_btn:"\u041f\u0440\u0438\u043c\u0438\u0442\u0435 \u043d\u043e\u0432\u0438 \u0443\u043f\u0438\u0442",instructions_visual:"\u0423\u043d\u0435\u0441\u0438\u0442\u0435 \u0442\u0435\u043a\u0441\u0442:",instructions_audio:"\u041e\u0442\u043a\u0443\u0446\u0430\u0458\u0442\u0435 \u043e\u043d\u043e \u0448\u0442\u043e \u0447\u0443\u0458\u0435\u0442\u0435:",help_btn:"\u041f\u043e\u043c\u043e\u045b",
-play_again:"\u041f\u043e\u043d\u043e\u0432\u043e \u043f\u0443\u0441\u0442\u0438 \u0437\u0432\u0443\u043a",cant_hear_this:"\u041f\u0440\u0435\u0443\u0437\u043c\u0438 \u0437\u0432\u0443\u043a \u043a\u0430\u043e MP3 \u0441\u043d\u0438\u043c\u0430\u043a",incorrect_try_again:"\u041d\u0435\u0442\u0430\u0447\u043d\u043e. \u041f\u043e\u043a\u0443\u0448\u0430\u0458\u0442\u0435 \u043f\u043e\u043d\u043e\u0432\u043e.",image_alt_text:"\u0421\u043b\u0438\u043a\u0430 reCAPTCHA \u043f\u0440\u043e\u0432\u0435\u0440\u0435",
-privacy_and_terms:"\u041f\u0440\u0438\u0432\u0430\u0442\u043d\u043e\u0441\u0442 \u0438 \u0443\u0441\u043b\u043e\u0432\u0438"},sv:{visual_challenge:"H\u00e4mta captcha i bildformat",audio_challenge:"H\u00e4mta captcha i ljudformat",refresh_btn:"H\u00e4mta ny captcha",instructions_visual:"Skriv texten:",instructions_audio:"Skriv det du h\u00f6r:",help_btn:"Hj\u00e4lp",play_again:"Spela upp ljudet igen",cant_hear_this:"H\u00e4mta ljud som MP3",incorrect_try_again:"Fel. F\u00f6rs\u00f6k igen.",image_alt_text:"reCAPTCHA-bild",
-privacy_and_terms:"Sekretess och villkor"},sw:{visual_challenge:"Pata herufi za kusoma",audio_challenge:"Pata herufi za kusikiliza",refresh_btn:"Pata herufi mpya",instructions_visual:"",instructions_audio:"Charaza unachosikia:",help_btn:"Usaidizi",play_again:"Cheza sauti tena",cant_hear_this:"Pakua sauti kama MP3",incorrect_try_again:"Sio sahihi. Jaribu tena.",image_alt_text:"picha ya changamoto ya reCAPTCHA",privacy_and_terms:"Faragha & Masharti"},ta:{visual_challenge:"\u0baa\u0bbe\u0bb0\u0bcd\u0bb5\u0bc8 \u0b9a\u0bc7\u0bb2\u0b9e\u0bcd\u0b9a\u0bc8\u0baa\u0bcd \u0baa\u0bc6\u0bb1\u0bc1\u0b95",
-audio_challenge:"\u0b86\u0b9f\u0bbf\u0baf\u0bcb \u0b9a\u0bc7\u0bb2\u0b9e\u0bcd\u0b9a\u0bc8\u0baa\u0bcd \u0baa\u0bc6\u0bb1\u0bc1\u0b95",refresh_btn:"\u0baa\u0bc1\u0ba4\u0bbf\u0baf \u0b9a\u0bc7\u0bb2\u0b9e\u0bcd\u0b9a\u0bc8\u0baa\u0bcd \u0baa\u0bc6\u0bb1\u0bc1\u0b95",instructions_visual:"",instructions_audio:"\u0b95\u0bc7\u0b9f\u0bcd\u0baa\u0ba4\u0bc8 \u0b9f\u0bc8\u0baa\u0bcd \u0b9a\u0bc6\u0baf\u0bcd\u0b95:",help_btn:"\u0b89\u0ba4\u0bb5\u0bbf",play_again:"\u0b92\u0bb2\u0bbf\u0baf\u0bc8 \u0bae\u0bc0\u0ba3\u0bcd\u0b9f\u0bc1\u0bae\u0bcd \u0b87\u0baf\u0b95\u0bcd\u0b95\u0bc1",
-cant_hear_this:"\u0b92\u0bb2\u0bbf\u0baf\u0bc8 MP3 \u0b86\u0b95 \u0baa\u0ba4\u0bbf\u0bb5\u0bbf\u0bb1\u0b95\u0bcd\u0b95\u0bc1\u0b95",incorrect_try_again:"\u0ba4\u0bb5\u0bb1\u0bbe\u0ba9\u0ba4\u0bc1. \u0bae\u0bc0\u0ba3\u0bcd\u0b9f\u0bc1\u0bae\u0bcd \u0bae\u0bc1\u0baf\u0bb2\u0bb5\u0bc1\u0bae\u0bcd.",image_alt_text:"reCAPTCHA \u0b9a\u0bc7\u0bb2\u0b9e\u0bcd\u0b9a\u0bcd \u0baa\u0b9f\u0bae\u0bcd",privacy_and_terms:"\u0ba4\u0ba9\u0bbf\u0baf\u0bc1\u0bb0\u0bbf\u0bae\u0bc8 & \u0bb5\u0bbf\u0ba4\u0bbf\u0bae\u0bc1\u0bb1\u0bc8\u0b95\u0bb3\u0bcd"},
-te:{visual_challenge:"\u0c12\u0c15 \u0c26\u0c43\u0c36\u0c4d\u0c2f\u0c2e\u0c3e\u0c28 \u0c38\u0c35\u0c3e\u0c32\u0c41\u0c28\u0c41 \u0c38\u0c4d\u0c35\u0c40\u0c15\u0c30\u0c3f\u0c02\u0c1a\u0c02\u0c21\u0c3f",audio_challenge:"\u0c12\u0c15 \u0c06\u0c21\u0c3f\u0c2f\u0c4b \u0c38\u0c35\u0c3e\u0c32\u0c41\u0c28\u0c41 \u0c38\u0c4d\u0c35\u0c40\u0c15\u0c30\u0c3f\u0c02\u0c1a\u0c02\u0c21\u0c3f",refresh_btn:"\u0c15\u0c4d\u0c30\u0c4a\u0c24\u0c4d\u0c24 \u0c38\u0c35\u0c3e\u0c32\u0c41\u0c28\u0c41 \u0c38\u0c4d\u0c35\u0c40\u0c15\u0c30\u0c3f\u0c02\u0c1a\u0c02\u0c21\u0c3f",
-instructions_visual:"",instructions_audio:"\u0c2e\u0c40\u0c30\u0c41 \u0c35\u0c3f\u0c28\u0c4d\u0c28\u0c26\u0c3f \u0c1f\u0c48\u0c2a\u0c4d \u0c1a\u0c47\u0c2f\u0c02\u0c21\u0c3f:",help_btn:"\u0c38\u0c39\u0c3e\u0c2f\u0c02",play_again:"\u0c27\u0c4d\u0c35\u0c28\u0c3f\u0c28\u0c3f \u0c2e\u0c33\u0c4d\u0c32\u0c40 \u0c2a\u0c4d\u0c32\u0c47 \u0c1a\u0c47\u0c2f\u0c3f",cant_hear_this:"\u0c27\u0c4d\u0c35\u0c28\u0c3f\u0c28\u0c3f MP3 \u0c35\u0c32\u0c46 \u0c21\u0c4c\u0c28\u0c4d\u200c\u0c32\u0c4b\u0c21\u0c4d \u0c1a\u0c47\u0c2f\u0c3f",
-incorrect_try_again:"\u0c24\u0c2a\u0c4d\u0c2a\u0c41. \u0c2e\u0c33\u0c4d\u0c32\u0c40 \u0c2a\u0c4d\u0c30\u0c2f\u0c24\u0c4d\u0c28\u0c3f\u0c02\u0c1a\u0c02\u0c21\u0c3f.",image_alt_text:"reCAPTCHA \u0c38\u0c35\u0c3e\u0c32\u0c41 \u0c1a\u0c3f\u0c24\u0c4d\u0c30\u0c02",privacy_and_terms:"\u0c17\u0c4b\u0c2a\u0c4d\u0c2f\u0c24 & \u0c28\u0c3f\u0c2c\u0c02\u0c27\u0c28\u0c32\u0c41"},th:{visual_challenge:"\u0e23\u0e31\u0e1a\u0e04\u0e27\u0e32\u0e21\u0e17\u0e49\u0e32\u0e17\u0e32\u0e22\u0e14\u0e49\u0e32\u0e19\u0e20\u0e32\u0e1e",
-audio_challenge:"\u0e23\u0e31\u0e1a\u0e04\u0e27\u0e32\u0e21\u0e17\u0e49\u0e32\u0e17\u0e32\u0e22\u0e14\u0e49\u0e32\u0e19\u0e40\u0e2a\u0e35\u0e22\u0e07",refresh_btn:"\u0e23\u0e31\u0e1a\u0e04\u0e27\u0e32\u0e21\u0e17\u0e49\u0e32\u0e17\u0e32\u0e22\u0e43\u0e2b\u0e21\u0e48",instructions_visual:"\u0e1e\u0e34\u0e21\u0e1e\u0e4c\u0e02\u0e49\u0e2d\u0e04\u0e27\u0e32\u0e21\u0e19\u0e35\u0e49:",instructions_audio:"\u0e1e\u0e34\u0e21\u0e1e\u0e4c\u0e2a\u0e34\u0e48\u0e07\u0e17\u0e35\u0e48\u0e04\u0e38\u0e13\u0e44\u0e14\u0e49\u0e22\u0e34\u0e19:",
-help_btn:"\u0e04\u0e27\u0e32\u0e21\u0e0a\u0e48\u0e27\u0e22\u0e40\u0e2b\u0e25\u0e37\u0e2d",play_again:"\u0e40\u0e25\u0e48\u0e19\u0e40\u0e2a\u0e35\u0e22\u0e07\u0e2d\u0e35\u0e01\u0e04\u0e23\u0e31\u0e49\u0e07",cant_hear_this:"\u0e14\u0e32\u0e27\u0e42\u0e2b\u0e25\u0e14\u0e40\u0e2a\u0e35\u0e22\u0e07\u0e40\u0e1b\u0e47\u0e19 MP3",incorrect_try_again:"\u0e44\u0e21\u0e48\u0e16\u0e39\u0e01\u0e15\u0e49\u0e2d\u0e07 \u0e25\u0e2d\u0e07\u0e2d\u0e35\u0e01\u0e04\u0e23\u0e31\u0e49\u0e07",image_alt_text:"\u0e23\u0e2b\u0e31\u0e2a\u0e20\u0e32\u0e1e reCAPTCHA",
-privacy_and_terms:"\u0e19\u0e42\u0e22\u0e1a\u0e32\u0e22\u0e2a\u0e48\u0e27\u0e19\u0e1a\u0e38\u0e04\u0e04\u0e25\u0e41\u0e25\u0e30\u0e02\u0e49\u0e2d\u0e01\u0e33\u0e2b\u0e19\u0e14"},tr:{visual_challenge:"G\u00f6rsel sorgu al",audio_challenge:"Sesli sorgu al",refresh_btn:"Yeniden y\u00fckle",instructions_visual:"Metni yaz\u0131n:",instructions_audio:"Duydu\u011funuzu yaz\u0131n:",help_btn:"Yard\u0131m",play_again:"Sesi tekrar \u00e7al",cant_hear_this:"Sesi MP3 olarak indir",incorrect_try_again:"Yanl\u0131\u015f. Tekrar deneyin.",
-image_alt_text:"reCAPTCHA sorusu resmi",privacy_and_terms:"Gizlilik ve \u015eartlar"},uk:{visual_challenge:"\u041e\u0442\u0440\u0438\u043c\u0430\u0442\u0438 \u0432\u0456\u0437\u0443\u0430\u043b\u044c\u043d\u0438\u0439 \u0442\u0435\u043a\u0441\u0442",audio_challenge:"\u041e\u0442\u0440\u0438\u043c\u0430\u0442\u0438 \u0430\u0443\u0434\u0456\u043e\u0437\u0430\u043f\u0438\u0441",refresh_btn:"\u041e\u043d\u043e\u0432\u0438\u0442\u0438 \u0442\u0435\u043a\u0441\u0442",instructions_visual:"\u0412\u0432\u0435\u0434\u0456\u0442\u044c \u0442\u0435\u043a\u0441\u0442:",
-instructions_audio:"\u0412\u0432\u0435\u0434\u0456\u0442\u044c \u043f\u043e\u0447\u0443\u0442\u0435:",help_btn:"\u0414\u043e\u0432\u0456\u0434\u043a\u0430",play_again:"\u0412\u0456\u0434\u0442\u0432\u043e\u0440\u0438\u0442\u0438 \u0437\u0430\u043f\u0438\u0441 \u0449\u0435 \u0440\u0430\u0437",cant_hear_this:"\u0417\u0430\u0432\u0430\u043d\u0442\u0430\u0436\u0438\u0442\u0438 \u0437\u0430\u043f\u0438\u0441 \u044f\u043a MP3",incorrect_try_again:"\u041d\u0435\u043f\u0440\u0430\u0432\u0438\u043b\u044c\u043d\u043e. \u0421\u043f\u0440\u043e\u0431\u0443\u0439\u0442\u0435 \u0449\u0435 \u0440\u0430\u0437.",
-image_alt_text:"\u0417\u043e\u0431\u0440\u0430\u0436\u0435\u043d\u043d\u044f \u0437\u0430\u0432\u0434\u0430\u043d\u043d\u044f reCAPTCHA",privacy_and_terms:"\u041a\u043e\u043d\u0444\u0456\u0434\u0435\u043d\u0446\u0456\u0439\u043d\u0456\u0441\u0442\u044c \u0456 \u0443\u043c\u043e\u0432\u0438"},ur:{visual_challenge:"\u0627\u06cc\u06a9 \u0645\u0631\u0626\u06cc \u0686\u06cc\u0644\u0646\u062c \u062d\u0627\u0635\u0644 \u06a9\u0631\u06cc\u06ba",audio_challenge:"\u0627\u06cc\u06a9 \u0622\u0688\u06cc\u0648 \u0686\u06cc\u0644\u0646\u062c \u062d\u0627\u0635\u0644 \u06a9\u0631\u06cc\u06ba",
-refresh_btn:"\u0627\u06cc\u06a9 \u0646\u06cc\u0627 \u0686\u06cc\u0644\u0646\u062c \u062d\u0627\u0635\u0644 \u06a9\u0631\u06cc\u06ba",instructions_visual:"",instructions_audio:"\u062c\u0648 \u0633\u0646\u0627\u0626\u06cc \u062f\u06cc\u062a\u0627 \u06c1\u06d2 \u0648\u06c1 \u0679\u0627\u0626\u067e \u06a9\u0631\u06cc\u06ba:",help_btn:"\u0645\u062f\u062f",play_again:"\u0622\u0648\u0627\u0632 \u062f\u0648\u0628\u0627\u0631\u06c1 \u0686\u0644\u0627\u0626\u06cc\u06ba",cant_hear_this:"\u0622\u0648\u0627\u0632 \u06a9\u0648 MP3 \u06a9\u06d2 \u0628\u0637\u0648\u0631 \u0688\u0627\u0624\u0646 \u0644\u0648\u0688 \u06a9\u0631\u06cc\u06ba",
-incorrect_try_again:"\u063a\u0644\u0637\u06d4 \u062f\u0648\u0628\u0627\u0631\u06c1 \u06a9\u0648\u0634\u0634 \u06a9\u0631\u06cc\u06ba\u06d4",image_alt_text:"reCAPTCHA \u0686\u06cc\u0644\u0646\u062c \u0648\u0627\u0644\u06cc \u0634\u0628\u06cc\u06c1",privacy_and_terms:"\u0631\u0627\u0632\u062f\u0627\u0631\u06cc \u0648 \u0634\u0631\u0627\u0626\u0637"},vi:{visual_challenge:"Nh\u1eadn th\u1eed th\u00e1ch h\u00ecnh \u1ea3nh",audio_challenge:"Nh\u1eadn th\u1eed th\u00e1ch \u00e2m thanh",refresh_btn:"Nh\u1eadn th\u1eed th\u00e1ch m\u1edbi",
-instructions_visual:"Nh\u1eadp v\u0103n b\u1ea3n:",instructions_audio:"Nh\u1eadp n\u1ed9i dung b\u1ea1n nghe th\u1ea5y:",help_btn:"Tr\u1ee3 gi\u00fap",play_again:"Ph\u00e1t l\u1ea1i \u00e2m thanh",cant_hear_this:"T\u1ea3i \u00e2m thanh xu\u1ed1ng d\u01b0\u1edbi d\u1ea1ng MP3",incorrect_try_again:"Kh\u00f4ng ch\u00ednh x\u00e1c. H\u00e3y th\u1eed l\u1ea1i.",image_alt_text:"H\u00ecnh x\u00e1c th\u1ef1c reCAPTCHA",privacy_and_terms:"B\u1ea3o m\u1eadt v\u00e0 \u0111i\u1ec1u kho\u1ea3n"},"zh-CN":ra,"zh-HK":{visual_challenge:"\u56de\u7b54\u5716\u50cf\u9a57\u8b49\u554f\u984c",
-audio_challenge:"\u53d6\u5f97\u8a9e\u97f3\u9a57\u8b49\u554f\u984c",refresh_btn:"\u63db\u4e00\u500b\u9a57\u8b49\u554f\u984c",instructions_visual:"\u8f38\u5165\u6587\u5b57\uff1a",instructions_audio:"\u9375\u5165\u60a8\u6240\u807d\u5230\u7684\uff1a",help_btn:"\u8aaa\u660e",play_again:"\u518d\u6b21\u64ad\u653e\u8072\u97f3",cant_hear_this:"\u5c07\u8072\u97f3\u4e0b\u8f09\u70ba MP3",incorrect_try_again:"\u4e0d\u6b63\u78ba\uff0c\u518d\u8a66\u4e00\u6b21\u3002",image_alt_text:"reCAPTCHA \u9a57\u8b49\u6587\u5b57\u5716\u7247",
-privacy_and_terms:"\u79c1\u96b1\u6b0a\u8207\u689d\u6b3e"},"zh-TW":{visual_challenge:"\u53d6\u5f97\u5716\u7247\u9a57\u8b49\u554f\u984c",audio_challenge:"\u53d6\u5f97\u8a9e\u97f3\u9a57\u8b49\u554f\u984c",refresh_btn:"\u53d6\u5f97\u65b0\u7684\u9a57\u8b49\u554f\u984c",instructions_visual:"\u8acb\u8f38\u5165\u5716\u7247\u4e2d\u7684\u6587\u5b57\uff1a",instructions_audio:"\u8acb\u8f38\u5165\u8a9e\u97f3\u5167\u5bb9\uff1a",help_btn:"\u8aaa\u660e",play_again:"\u518d\u6b21\u64ad\u653e",cant_hear_this:"\u4ee5 MP3 \u683c\u5f0f\u4e0b\u8f09\u8072\u97f3",
-incorrect_try_again:"\u9a57\u8b49\u78bc\u6709\u8aa4\uff0c\u8acb\u518d\u8a66\u4e00\u6b21\u3002",image_alt_text:"reCAPTCHA \u9a57\u8b49\u6587\u5b57\u5716\u7247",privacy_and_terms:"\u96b1\u79c1\u6b0a\u8207\u689d\u6b3e"},zu:{visual_challenge:"Thola inselelo ebonakalayo",audio_challenge:"Thola inselelo yokulalelwayo",refresh_btn:"Thola inselelo entsha",instructions_visual:"",instructions_audio:"Bhala okuzwayo:",help_btn:"Usizo",play_again:"Phinda udlale okulalelwayo futhi",cant_hear_this:"Layisha umsindo njenge-MP3",
-incorrect_try_again:"Akulungile. Zama futhi.",image_alt_text:"umfanekiso oyinselelo we-reCAPTCHA",privacy_and_terms:"Okwangasese kanye nemigomo"},tl:la,he:oa,"in":na,mo:qa,zh:ra};var x=function(a){if(Error.captureStackTrace)Error.captureStackTrace(this,x);else{var b=Error().stack;b&&(this.stack=b)}a&&(this.message=String(a))};u(x,Error);x.prototype.name="CustomError";var ta;var ua=function(a,b){for(var c=a.split("%s"),d="",e=Array.prototype.slice.call(arguments,1);e.length&&1<c.length;)d+=c.shift()+e.shift();return d+c.join("%s")},va=String.prototype.trim?function(a){return a.trim()}:function(a){return a.replace(/^[\s\xa0]+|[\s\xa0]+$/g,"")},Da=function(a){if(!wa.test(a))return a;-1!=a.indexOf("&")&&(a=a.replace(xa,"&amp;"));-1!=a.indexOf("<")&&(a=a.replace(ya,"&lt;"));-1!=a.indexOf(">")&&(a=a.replace(za,"&gt;"));-1!=a.indexOf('"')&&(a=a.replace(Aa,"&quot;"));-1!=a.indexOf("'")&&
-(a=a.replace(Ba,"&#39;"));-1!=a.indexOf("\x00")&&(a=a.replace(Ca,"&#0;"));return a},xa=/&/g,ya=/</g,za=/>/g,Aa=/"/g,Ba=/'/g,Ca=/\x00/g,wa=/[\x00&<>"']/,Ea=function(a,b){return a<b?-1:a>b?1:0},Fa=function(a){return String(a).replace(/\-([a-z])/g,function(a,c){return c.toUpperCase()})},Ga=function(a){var b=q(void 0)?"undefined".replace(/([-()\[\]{}+?*.$\^|,:#<!\\])/g,"\\$1").replace(/\x08/g,"\\x08"):"\\s";return a.replace(new RegExp("(^"+(b?"|["+b+"]+":"")+")([a-z])","g"),function(a,b,e){return b+e.toUpperCase()})};var Ha=function(a,b){b.unshift(a);x.call(this,ua.apply(null,b));b.shift()};u(Ha,x);Ha.prototype.name="AssertionError";
-var Ia=function(a,b,c,d){var e="Assertion failed";if(c)var e=e+(": "+c),g=d;else a&&(e+=": "+a,g=b);throw new Ha(""+e,g||[]);},y=function(a,b,c){a||Ia("",null,b,Array.prototype.slice.call(arguments,2))},Ja=function(a,b){throw new Ha("Failure"+(a?": "+a:""),Array.prototype.slice.call(arguments,1));},Ka=function(a,b,c){q(a)||Ia("Expected string but got %s: %s.",[n(a),a],b,Array.prototype.slice.call(arguments,2));return a},La=function(a,b,c){r(a)||Ia("Expected function but got %s: %s.",[n(a),a],b,Array.prototype.slice.call(arguments,
-2))};var z=Array.prototype,Ma=z.indexOf?function(a,b,c){y(null!=a.length);return z.indexOf.call(a,b,c)}:function(a,b,c){c=null==c?0:0>c?Math.max(0,a.length+c):c;if(q(a))return q(b)&&1==b.length?a.indexOf(b,c):-1;for(;c<a.length;c++)if(c in a&&a[c]===b)return c;return-1},Na=z.forEach?function(a,b,c){y(null!=a.length);z.forEach.call(a,b,c)}:function(a,b,c){for(var d=a.length,e=q(a)?a.split(""):a,g=0;g<d;g++)g in e&&b.call(c,e[g],g,a)},Oa=z.map?function(a,b,c){y(null!=a.length);return z.map.call(a,b,c)}:
-function(a,b,c){for(var d=a.length,e=Array(d),g=q(a)?a.split(""):a,f=0;f<d;f++)f in g&&(e[f]=b.call(c,g[f],f,a));return e},Pa=z.some?function(a,b,c){y(null!=a.length);return z.some.call(a,b,c)}:function(a,b,c){for(var d=a.length,e=q(a)?a.split(""):a,g=0;g<d;g++)if(g in e&&b.call(c,e[g],g,a))return!0;return!1},Qa=function(a,b){var c=Ma(a,b),d;if(d=0<=c)y(null!=a.length),z.splice.call(a,c,1);return d},Ra=function(a){var b=a.length;if(0<b){for(var c=Array(b),d=0;d<b;d++)c[d]=a[d];return c}return[]},
-Sa=function(a,b,c){y(null!=a.length);return 2>=arguments.length?z.slice.call(a,b):z.slice.call(a,b,c)};var Ta=function(a,b){for(var c in a)b.call(void 0,a[c],c,a)},Ua=function(a){var b=[],c=0,d;for(d in a)b[c++]=d;return b},Va=function(a){for(var b in a)return!1;return!0},Xa=function(){var a=Wa()?k.google_ad:null,b={},c;for(c in a)b[c]=a[c];return b},Ya="constructor hasOwnProperty isPrototypeOf propertyIsEnumerable toLocaleString toString valueOf".split(" "),Za=function(a,b){for(var c,d,e=1;e<arguments.length;e++){d=arguments[e];for(c in d)a[c]=d[c];for(var g=0;g<Ya.length;g++)c=Ya[g],Object.prototype.hasOwnProperty.call(d,
-c)&&(a[c]=d[c])}},$a=function(a){var b=arguments.length;if(1==b&&p(arguments[0]))return $a.apply(null,arguments[0]);for(var c={},d=0;d<b;d++)c[arguments[d]]=!0;return c};var A;t:{var ab=k.navigator;if(ab){var bb=ab.userAgent;if(bb){A=bb;break t}}A=""}var B=function(a){return-1!=A.indexOf(a)};var cb=B("Opera")||B("OPR"),C=B("Trident")||B("MSIE"),D=B("Gecko")&&-1==A.toLowerCase().indexOf("webkit")&&!(B("Trident")||B("MSIE")),E=-1!=A.toLowerCase().indexOf("webkit"),db=function(){var a=k.document;return a?a.documentMode:void 0},eb=function(){var a="",b;if(cb&&k.opera)return a=k.opera.version,r(a)?a():a;D?b=/rv\:([^\);]+)(\)|;)/:C?b=/\b(?:MSIE|rv)[: ]([^\);]+)(\)|;)/:E&&(b=/WebKit\/(\S+)/);b&&(a=(a=b.exec(A))?a[1]:"");return C&&(b=db(),b>parseFloat(a))?String(b):a}(),fb={},F=function(a){var b;
-if(!(b=fb[a])){b=0;for(var c=va(String(eb)).split("."),d=va(String(a)).split("."),e=Math.max(c.length,d.length),g=0;0==b&&g<e;g++){var f=c[g]||"",m=d[g]||"",$=RegExp("(\\d*)(\\D*)","g"),K=RegExp("(\\d*)(\\D*)","g");do{var G=$.exec(f)||["","",""],aa=K.exec(m)||["","",""];if(0==G[0].length&&0==aa[0].length)break;b=Ea(0==G[1].length?0:parseInt(G[1],10),0==aa[1].length?0:parseInt(aa[1],10))||Ea(0==G[2].length,0==aa[2].length)||Ea(G[2],aa[2])}while(0==b)}b=fb[a]=0<=b}return b},gb=k.document,hb=gb&&C?db()||
-("CSS1Compat"==gb.compatMode?parseInt(eb,10):5):void 0;var ib=function(a){if(8192>a.length)return String.fromCharCode.apply(null,a);for(var b="",c=0;c<a.length;c+=8192)var d=Sa(a,c,c+8192),b=b+String.fromCharCode.apply(null,d);return b},jb=function(a){return Oa(a,function(a){a=a.toString(16);return 1<a.length?a:"0"+a}).join("")};var kb=null,lb=null,mb=function(a){if(!kb){kb={};lb={};for(var b=0;65>b;b++)kb[b]="ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/=".charAt(b),lb[kb[b]]=b,62<=b&&(lb["ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789-_.".charAt(b)]=b)}for(var b=lb,c=[],d=0;d<a.length;){var e=b[a.charAt(d++)],g=d<a.length?b[a.charAt(d)]:0;++d;var f=d<a.length?b[a.charAt(d)]:64;++d;var m=d<a.length?b[a.charAt(d)]:64;++d;if(null==e||null==g||null==f||null==m)throw Error();c.push(e<<2|g>>
-4);64!=f&&(c.push(g<<4&240|f>>2),64!=m&&c.push(f<<6&192|m))}return c};var H=function(){this.disposed_=this.disposed_;this.onDisposeCallbacks_=this.onDisposeCallbacks_};H.prototype.disposed_=!1;H.prototype.dispose=function(){this.disposed_||(this.disposed_=!0,this.disposeInternal())};var nb=function(a,b){a.onDisposeCallbacks_||(a.onDisposeCallbacks_=[]);a.onDisposeCallbacks_.push(l(void 0)?s(b,void 0):b)};H.prototype.disposeInternal=function(){if(this.onDisposeCallbacks_)for(;this.onDisposeCallbacks_.length;)this.onDisposeCallbacks_.shift()()};
-var ob=function(a){a&&"function"==typeof a.dispose&&a.dispose()};var pb=!C||C&&9<=hb;!D&&!C||C&&C&&9<=hb||D&&F("1.9.1");C&&F("9");var sb=function(a){return a?new qb(rb(a)):ta||(ta=new qb)},tb=function(a,b){return q(b)?a.getElementById(b):b},vb=function(a,b){Ta(b,function(b,d){"style"==d?a.style.cssText=b:"class"==d?a.className=b:"for"==d?a.htmlFor=b:d in ub?a.setAttribute(ub[d],b):0==d.lastIndexOf("aria-",0)||0==d.lastIndexOf("data-",0)?a.setAttribute(d,b):a[d]=b})},ub={cellpadding:"cellPadding",cellspacing:"cellSpacing",colspan:"colSpan",frameborder:"frameBorder",height:"height",maxlength:"maxLength",role:"role",rowspan:"rowSpan",
-type:"type",usemap:"useMap",valign:"vAlign",width:"width"},xb=function(a,b,c){return wb(document,arguments)},wb=function(a,b){var c=b[0],d=b[1];if(!pb&&d&&(d.name||d.type)){c=["<",c];d.name&&c.push(' name="',Da(d.name),'"');if(d.type){c.push(' type="',Da(d.type),'"');var e={};Za(e,d);delete e.type;d=e}c.push(">");c=c.join("")}c=a.createElement(c);d&&(q(d)?c.className=d:p(d)?c.className=d.join(" "):vb(c,d));2<b.length&&yb(a,c,b);return c},yb=function(a,b,c){function d(c){c&&b.appendChild(q(c)?a.createTextNode(c):
-c)}for(var e=2;e<c.length;e++){var g=c[e];!ca(g)||da(g)&&0<g.nodeType?d(g):Na(zb(g)?Ra(g):g,d)}},Ab=function(a){for(var b;b=a.firstChild;)a.removeChild(b)},Bb=function(a){a&&a.parentNode&&a.parentNode.removeChild(a)},rb=function(a){y(a,"Node cannot be null or undefined.");return 9==a.nodeType?a:a.ownerDocument||a.document},zb=function(a){if(a&&"number"==typeof a.length){if(da(a))return"function"==typeof a.item||"string"==typeof a.item;if(r(a))return"function"==typeof a.item}return!1},qb=function(a){this.document_=
-a||k.document||document};h=qb.prototype;h.getDomHelper=sb;h.getElement=function(a){return tb(this.document_,a)};h.$=qb.prototype.getElement;h.createDom=function(a,b,c){return wb(this.document_,arguments)};h.createElement=function(a){return this.document_.createElement(a)};h.createTextNode=function(a){return this.document_.createTextNode(String(a))};h.appendChild=function(a,b){a.appendChild(b)};var Cb=function(a){k.setTimeout(function(){throw a;},0)},Db,Eb=function(){var a=k.MessageChannel;"undefined"===typeof a&&"undefined"!==typeof window&&window.postMessage&&window.addEventListener&&(a=function(){var a=document.createElement("iframe");a.style.display="none";a.src="";document.documentElement.appendChild(a);var b=a.contentWindow,a=b.document;a.open();a.write("");a.close();var c="callImmediate"+Math.random(),d="file:"==b.location.protocol?"*":b.location.protocol+"//"+b.location.host,a=s(function(a){if(("*"==
-d||a.origin==d)&&a.data==c)this.port1.onmessage()},this);b.addEventListener("message",a,!1);this.port1={};this.port2={postMessage:function(){b.postMessage(c,d)}}});if("undefined"!==typeof a&&!B("Trident")&&!B("MSIE")){var b=new a,c={},d=c;b.port1.onmessage=function(){if(l(c.next)){c=c.next;var a=c.cb;c.cb=null;a()}};return function(a){d.next={cb:a};d=d.next;b.port2.postMessage(0)}}return"undefined"!==typeof document&&"onreadystatechange"in document.createElement("script")?function(a){var b=document.createElement("script");
-b.onreadystatechange=function(){b.onreadystatechange=null;b.parentNode.removeChild(b);b=null;a();a=null};document.documentElement.appendChild(b)}:function(a){k.setTimeout(a,0)}};var Kb=function(a,b){Fb||Gb();Hb||(Fb(),Hb=!0);Ib.push(new Jb(a,b))},Fb,Gb=function(){if(k.Promise&&k.Promise.resolve){var a=k.Promise.resolve();Fb=function(){a.then(Lb)}}else Fb=function(){var a=Lb;!r(k.setImmediate)||k.Window&&k.Window.prototype.setImmediate==k.setImmediate?(Db||(Db=Eb()),Db(a)):k.setImmediate(a)}},Hb=!1,Ib=[],Lb=function(){for(;Ib.length;){var a=Ib;Ib=[];for(var b=0;b<a.length;b++){var c=a[b];try{c.fn.call(c.scope)}catch(d){Cb(d)}}}Hb=!1},Jb=function(a,b){this.fn=a;this.scope=
-b};var Mb=function(a){a.prototype.then=a.prototype.then;a.prototype.$goog_Thenable=!0},Nb=function(a){if(!a)return!1;try{return!!a.$goog_Thenable}catch(b){return!1}};var L=function(a,b){this.state_=0;this.result_=void 0;this.callbackEntries_=this.parent_=null;this.hadUnhandledRejection_=this.executing_=!1;try{var c=this;a.call(b,function(a){I(c,2,a)},function(a){if(!(a instanceof J))try{if(a instanceof Error)throw a;throw Error("Promise rejected.");}catch(b){}I(c,3,a)})}catch(d){I(this,3,d)}};
-L.prototype.then=function(a,b,c){null!=a&&La(a,"opt_onFulfilled should be a function.");null!=b&&La(b,"opt_onRejected should be a function. Did you pass opt_context as the second argument instead of the third?");return Ob(this,r(a)?a:null,r(b)?b:null,c)};Mb(L);L.prototype.cancel=function(a){0==this.state_&&Kb(function(){var b=new J(a);Pb(this,b)},this)};
-var Pb=function(a,b){if(0==a.state_)if(a.parent_){var c=a.parent_;if(c.callbackEntries_){for(var d=0,e=-1,g=0,f;f=c.callbackEntries_[g];g++)if(f=f.child)if(d++,f==a&&(e=g),0<=e&&1<d)break;0<=e&&(0==c.state_&&1==d?Pb(c,b):(d=c.callbackEntries_.splice(e,1)[0],d.child&&Qb(c),d.onRejected(b)))}}else I(a,3,b)},Sb=function(a,b){a.callbackEntries_&&a.callbackEntries_.length||2!=a.state_&&3!=a.state_||Rb(a);a.callbackEntries_||(a.callbackEntries_=[]);a.callbackEntries_.push(b)},Ob=function(a,b,c,d){var e=
-{child:null,onFulfilled:null,onRejected:null};e.child=new L(function(a,f){e.onFulfilled=b?function(c){try{var e=b.call(d,c);a(e)}catch(K){f(K)}}:a;e.onRejected=c?function(b){try{var e=c.call(d,b);!l(e)&&b instanceof J?f(b):a(e)}catch(K){f(K)}}:f});e.child.parent_=a;Sb(a,e);return e.child};L.prototype.unblockAndFulfill_=function(a){y(1==this.state_);this.state_=0;I(this,2,a)};L.prototype.unblockAndReject_=function(a){y(1==this.state_);this.state_=0;I(this,3,a)};
-var I=function(a,b,c){if(0==a.state_){if(a==c)b=3,c=new TypeError("Promise cannot resolve to itself");else{if(Nb(c)){a.state_=1;c.then(a.unblockAndFulfill_,a.unblockAndReject_,a);return}if(da(c))try{var d=c.then;if(r(d)){Tb(a,c,d);return}}catch(e){b=3,c=e}}a.result_=c;a.state_=b;Rb(a);3!=b||c instanceof J||Ub(a,c)}},Tb=function(a,b,c){a.state_=1;var d=!1,e=function(b){d||(d=!0,a.unblockAndFulfill_(b))},g=function(b){d||(d=!0,a.unblockAndReject_(b))};try{c.call(b,e,g)}catch(f){g(f)}},Rb=function(a){a.executing_||
-(a.executing_=!0,Kb(a.executeCallbacks_,a))};L.prototype.executeCallbacks_=function(){for(;this.callbackEntries_&&this.callbackEntries_.length;){var a=this.callbackEntries_;this.callbackEntries_=[];for(var b=0;b<a.length;b++){var c=a[b],d=this.result_;if(2==this.state_)c.onFulfilled(d);else c.child&&Qb(this),c.onRejected(d)}}this.executing_=!1};
-var Qb=function(a){for(;a&&a.hadUnhandledRejection_;a=a.parent_)a.hadUnhandledRejection_=!1},Ub=function(a,b){a.hadUnhandledRejection_=!0;Kb(function(){a.hadUnhandledRejection_&&Vb.call(null,b)})},Vb=Cb,J=function(a){x.call(this,a)};u(J,x);J.prototype.name="cancel";/*
- Portions of this code are from MochiKit, received by
- The Closure Authors under the MIT license. All other code is Copyright
- 2005-2009 The Closure Authors. All Rights Reserved.
-*/
-var M=function(a,b){this.sequence_=[];this.onCancelFunction_=a;this.defaultScope_=b||null;this.hadError_=this.fired_=!1;this.result_=void 0;this.silentlyCanceled_=this.blocking_=this.blocked_=!1;this.unhandledErrorId_=0;this.parent_=null;this.branches_=0};
-M.prototype.cancel=function(a){if(this.fired_)this.result_ instanceof M&&this.result_.cancel();else{if(this.parent_){var b=this.parent_;delete this.parent_;a?b.cancel(a):(b.branches_--,0>=b.branches_&&b.cancel())}this.onCancelFunction_?this.onCancelFunction_.call(this.defaultScope_,this):this.silentlyCanceled_=!0;this.fired_||Wb(this,new Xb)}};M.prototype.continue_=function(a,b){this.blocked_=!1;Yb(this,a,b)};
-var Yb=function(a,b,c){a.fired_=!0;a.result_=c;a.hadError_=!b;Zb(a)},ac=function(a){if(a.fired_){if(!a.silentlyCanceled_)throw new $b;a.silentlyCanceled_=!1}};M.prototype.callback=function(a){ac(this);bc(a);Yb(this,!0,a)};var Wb=function(a,b){ac(a);bc(b);Yb(a,!1,b)},bc=function(a){y(!(a instanceof M),"An execution sequence may not be initiated with a blocking Deferred.")},cc=function(a,b,c,d){y(!a.blocking_,"Blocking Deferreds can not be re-used");a.sequence_.push([b,c,d]);a.fired_&&Zb(a)};
-M.prototype.then=function(a,b,c){var d,e,g=new L(function(a,b){d=a;e=b});cc(this,d,function(a){a instanceof Xb?g.cancel():e(a)});return g.then(a,b,c)};Mb(M);
-var dc=function(a){return Pa(a.sequence_,function(a){return r(a[1])})},Zb=function(a){if(a.unhandledErrorId_&&a.fired_&&dc(a)){var b=a.unhandledErrorId_,c=ec[b];c&&(k.clearTimeout(c.id_),delete ec[b]);a.unhandledErrorId_=0}a.parent_&&(a.parent_.branches_--,delete a.parent_);for(var b=a.result_,d=c=!1;a.sequence_.length&&!a.blocked_;){var e=a.sequence_.shift(),g=e[0],f=e[1],e=e[2];if(g=a.hadError_?f:g)try{var m=g.call(e||a.defaultScope_,b);l(m)&&(a.hadError_=a.hadError_&&(m==b||m instanceof Error),
-a.result_=b=m);Nb(b)&&(d=!0,a.blocked_=!0)}catch($){b=$,a.hadError_=!0,dc(a)||(c=!0)}}a.result_=b;d&&(m=s(a.continue_,a,!0),d=s(a.continue_,a,!1),b instanceof M?(cc(b,m,d),b.blocking_=!0):b.then(m,d));c&&(b=new fc(b),ec[b.id_]=b,a.unhandledErrorId_=b.id_)},$b=function(){x.call(this)};u($b,x);$b.prototype.message="Deferred has already fired";$b.prototype.name="AlreadyCalledError";var Xb=function(){x.call(this)};u(Xb,x);Xb.prototype.message="Deferred was canceled";Xb.prototype.name="CanceledError";
-var fc=function(a){this.id_=k.setTimeout(s(this.throwError,this),0);this.error_=a};fc.prototype.throwError=function(){y(ec[this.id_],"Cannot throw an error that is not scheduled.");delete ec[this.id_];throw this.error_;};var ec={};var kc=function(a){var b={},c=b.document||document,d=document.createElement("SCRIPT"),e={script_:d,timeout_:void 0},g=new M(gc,e),f=null,m=null!=b.timeout?b.timeout:5E3;0<m&&(f=window.setTimeout(function(){hc(d,!0);Wb(g,new ic(1,"Timeout reached for loading script "+a))},m),e.timeout_=f);d.onload=d.onreadystatechange=function(){d.readyState&&"loaded"!=d.readyState&&"complete"!=d.readyState||(hc(d,b.cleanupWhenDone||!1,f),g.callback(null))};d.onerror=function(){hc(d,!0,f);Wb(g,new ic(0,"Error while loading script "+
-a))};vb(d,{type:"text/javascript",charset:"UTF-8",src:a});jc(c).appendChild(d);return g},jc=function(a){var b=a.getElementsByTagName("HEAD");return b&&0!=b.length?b[0]:a.documentElement},gc=function(){if(this&&this.script_){var a=this.script_;a&&"SCRIPT"==a.tagName&&hc(a,!0,this.timeout_)}},hc=function(a,b,c){null!=c&&k.clearTimeout(c);a.onload=ba;a.onerror=ba;a.onreadystatechange=ba;b&&window.setTimeout(function(){Bb(a)},0)},ic=function(a,b){var c="Jsloader error (code #"+a+")";b&&(c+=": "+b);x.call(this,
-c);this.code=a};u(ic,x);var lc=function(a){lc[" "](a);return a};lc[" "]=ba;var mc=!C||C&&9<=hb,nc=C&&!F("9");!E||F("528");D&&F("1.9b")||C&&F("8")||cb&&F("9.5")||E&&F("528");D&&!F("8")||C&&F("9");var N=function(a,b){this.type=a;this.currentTarget=this.target=b;this.defaultPrevented=this.propagationStopped_=!1;this.returnValue_=!0};N.prototype.disposeInternal=function(){};N.prototype.dispose=function(){};N.prototype.preventDefault=function(){this.defaultPrevented=!0;this.returnValue_=!1};var O=function(a,b){N.call(this,a?a.type:"");this.relatedTarget=this.currentTarget=this.target=null;this.charCode=this.keyCode=this.button=this.screenY=this.screenX=this.clientY=this.clientX=this.offsetY=this.offsetX=0;this.metaKey=this.shiftKey=this.altKey=this.ctrlKey=!1;this.event_=this.state=null;if(a){var c=this.type=a.type;this.target=a.target||a.srcElement;this.currentTarget=b;var d=a.relatedTarget;if(d){if(D){var e;t:{try{lc(d.nodeName);e=!0;break t}catch(g){}e=!1}e||(d=null)}}else"mouseover"==
-c?d=a.fromElement:"mouseout"==c&&(d=a.toElement);this.relatedTarget=d;this.offsetX=E||void 0!==a.offsetX?a.offsetX:a.layerX;this.offsetY=E||void 0!==a.offsetY?a.offsetY:a.layerY;this.clientX=void 0!==a.clientX?a.clientX:a.pageX;this.clientY=void 0!==a.clientY?a.clientY:a.pageY;this.screenX=a.screenX||0;this.screenY=a.screenY||0;this.button=a.button;this.keyCode=a.keyCode||0;this.charCode=a.charCode||("keypress"==c?a.keyCode:0);this.ctrlKey=a.ctrlKey;this.altKey=a.altKey;this.shiftKey=a.shiftKey;this.metaKey=
-a.metaKey;this.state=a.state;this.event_=a;a.defaultPrevented&&this.preventDefault()}};u(O,N);O.prototype.preventDefault=function(){O.superClass_.preventDefault.call(this);var a=this.event_;if(a.preventDefault)a.preventDefault();else if(a.returnValue=!1,nc)try{if(a.ctrlKey||112<=a.keyCode&&123>=a.keyCode)a.keyCode=-1}catch(b){}};O.prototype.disposeInternal=function(){};var oc="closure_listenable_"+(1E6*Math.random()|0),pc=0;var qc=function(a,b,c,d,e){this.listener=a;this.proxy=null;this.src=b;this.type=c;this.capture=!!d;this.handler=e;this.key=++pc;this.removed=this.callOnce=!1},rc=function(a){a.removed=!0;a.listener=null;a.proxy=null;a.src=null;a.handler=null};var P=function(a){this.src=a;this.listeners={};this.typeCount_=0};P.prototype.add=function(a,b,c,d,e){var g=a.toString();a=this.listeners[g];a||(a=this.listeners[g]=[],this.typeCount_++);var f=sc(a,b,d,e);-1<f?(b=a[f],c||(b.callOnce=!1)):(b=new qc(b,this.src,g,!!d,e),b.callOnce=c,a.push(b));return b};
-P.prototype.remove=function(a,b,c,d){a=a.toString();if(!(a in this.listeners))return!1;var e=this.listeners[a];b=sc(e,b,c,d);return-1<b?(rc(e[b]),y(null!=e.length),z.splice.call(e,b,1),0==e.length&&(delete this.listeners[a],this.typeCount_--),!0):!1};var tc=function(a,b){var c=b.type;if(!(c in a.listeners))return!1;var d=Qa(a.listeners[c],b);d&&(rc(b),0==a.listeners[c].length&&(delete a.listeners[c],a.typeCount_--));return d};
-P.prototype.removeAll=function(a){a=a&&a.toString();var b=0,c;for(c in this.listeners)if(!a||c==a){for(var d=this.listeners[c],e=0;e<d.length;e++)++b,rc(d[e]);delete this.listeners[c];this.typeCount_--}return b};P.prototype.getListener=function(a,b,c,d){a=this.listeners[a.toString()];var e=-1;a&&(e=sc(a,b,c,d));return-1<e?a[e]:null};var sc=function(a,b,c,d){for(var e=0;e<a.length;++e){var g=a[e];if(!g.removed&&g.listener==b&&g.capture==!!c&&g.handler==d)return e}return-1};var uc="closure_lm_"+(1E6*Math.random()|0),vc={},wc=0,xc=function(a,b,c,d,e){if(p(b)){for(var g=0;g<b.length;g++)xc(a,b[g],c,d,e);return null}c=yc(c);if(a&&a[oc])a=a.listen(b,c,d,e);else{if(!b)throw Error("Invalid event type");var g=!!d,f=zc(a);f||(a[uc]=f=new P(a));c=f.add(b,c,!1,d,e);c.proxy||(d=Ac(),c.proxy=d,d.src=a,d.listener=c,a.addEventListener?a.addEventListener(b.toString(),d,g):a.attachEvent(Bc(b.toString()),d),wc++);a=c}return a},Ac=function(){var a=Cc,b=mc?function(c){return a.call(b.src,
-b.listener,c)}:function(c){c=a.call(b.src,b.listener,c);if(!c)return c};return b},Dc=function(a,b,c,d,e){if(p(b))for(var g=0;g<b.length;g++)Dc(a,b[g],c,d,e);else c=yc(c),a&&a[oc]?a.unlisten(b,c,d,e):a&&(a=zc(a))&&(b=a.getListener(b,c,!!d,e))&&Ec(b)},Ec=function(a){if("number"==typeof a||!a||a.removed)return!1;var b=a.src;if(b&&b[oc])return tc(b.eventTargetListeners_,a);var c=a.type,d=a.proxy;b.removeEventListener?b.removeEventListener(c,d,a.capture):b.detachEvent&&b.detachEvent(Bc(c),d);wc--;(c=zc(b))?
-(tc(c,a),0==c.typeCount_&&(c.src=null,b[uc]=null)):rc(a);return!0},Bc=function(a){return a in vc?vc[a]:vc[a]="on"+a},Gc=function(a,b,c,d){var e=1;if(a=zc(a))if(b=a.listeners[b.toString()])for(b=b.concat(),a=0;a<b.length;a++){var g=b[a];g&&g.capture==c&&!g.removed&&(e&=!1!==Fc(g,d))}return Boolean(e)},Fc=function(a,b){var c=a.listener,d=a.handler||a.src;a.callOnce&&Ec(a);return c.call(d,b)},Cc=function(a,b){if(a.removed)return!0;if(!mc){var c;if(!(c=b))t:{c=["window","event"];for(var d=k,e;e=c.shift();)if(null!=
-d[e])d=d[e];else{c=null;break t}c=d}e=c;c=new O(e,this);d=!0;if(!(0>e.keyCode||void 0!=e.returnValue)){t:{var g=!1;if(0==e.keyCode)try{e.keyCode=-1;break t}catch(f){g=!0}if(g||void 0==e.returnValue)e.returnValue=!0}e=[];for(g=c.currentTarget;g;g=g.parentNode)e.push(g);for(var g=a.type,m=e.length-1;!c.propagationStopped_&&0<=m;m--)c.currentTarget=e[m],d&=Gc(e[m],g,!0,c);for(m=0;!c.propagationStopped_&&m<e.length;m++)c.currentTarget=e[m],d&=Gc(e[m],g,!1,c)}return d}return Fc(a,new O(b,this))},zc=function(a){a=
-a[uc];return a instanceof P?a:null},Hc="__closure_events_fn_"+(1E9*Math.random()>>>0),yc=function(a){y(a,"Listener can not be null.");if(r(a))return a;y(a.handleEvent,"An object listener must have handleEvent method.");a[Hc]||(a[Hc]=function(b){return a.handleEvent(b)});return a[Hc]};var Q=function(a){H.call(this);this.handler_=a;this.keys_={}};u(Q,H);var Ic=[];h=Q.prototype;h.listen=function(a,b,c,d){p(b)||(b&&(Ic[0]=b.toString()),b=Ic);for(var e=0;e<b.length;e++){var g=xc(a,b[e],c||this.handleEvent,d||!1,this.handler_||this);if(!g)break;this.keys_[g.key]=g}return this};
-h.unlisten=function(a,b,c,d,e){if(p(b))for(var g=0;g<b.length;g++)this.unlisten(a,b[g],c,d,e);else c=c||this.handleEvent,e=e||this.handler_||this,c=yc(c),d=!!d,b=a&&a[oc]?a.getListener(b,c,d,e):a?(a=zc(a))?a.getListener(b,c,d,e):null:null,b&&(Ec(b),delete this.keys_[b.key]);return this};h.removeAll=function(){Ta(this.keys_,Ec);this.keys_={}};h.disposeInternal=function(){Q.superClass_.disposeInternal.call(this);this.removeAll()};
-h.handleEvent=function(){throw Error("EventHandler.handleEvent not implemented");};var R=function(){H.call(this);this.eventTargetListeners_=new P(this);this.actualEventTarget_=this;this.parentEventTarget_=null};u(R,H);R.prototype[oc]=!0;h=R.prototype;h.setParentEventTarget=function(a){this.parentEventTarget_=a};h.addEventListener=function(a,b,c,d){xc(this,a,b,c,d)};h.removeEventListener=function(a,b,c,d){Dc(this,a,b,c,d)};
-h.dispatchEvent=function(a){Jc(this);var b,c=this.parentEventTarget_;if(c){b=[];for(var d=1;c;c=c.parentEventTarget_)b.push(c),y(1E3>++d,"infinite loop")}c=this.actualEventTarget_;d=a.type||a;if(q(a))a=new N(a,c);else if(a instanceof N)a.target=a.target||c;else{var e=a;a=new N(d,c);Za(a,e)}var e=!0,g;if(b)for(var f=b.length-1;!a.propagationStopped_&&0<=f;f--)g=a.currentTarget=b[f],e=Kc(g,d,!0,a)&&e;a.propagationStopped_||(g=a.currentTarget=c,e=Kc(g,d,!0,a)&&e,a.propagationStopped_||(e=Kc(g,d,!1,a)&&
-e));if(b)for(f=0;!a.propagationStopped_&&f<b.length;f++)g=a.currentTarget=b[f],e=Kc(g,d,!1,a)&&e;return e};h.disposeInternal=function(){R.superClass_.disposeInternal.call(this);this.eventTargetListeners_&&this.eventTargetListeners_.removeAll(void 0);this.parentEventTarget_=null};h.listen=function(a,b,c,d){Jc(this);return this.eventTargetListeners_.add(String(a),b,!1,c,d)};h.unlisten=function(a,b,c,d){return this.eventTargetListeners_.remove(String(a),b,c,d)};
-var Kc=function(a,b,c,d){b=a.eventTargetListeners_.listeners[String(b)];if(!b)return!0;b=b.concat();for(var e=!0,g=0;g<b.length;++g){var f=b[g];if(f&&!f.removed&&f.capture==c){var m=f.listener,$=f.handler||f.src;f.callOnce&&tc(a.eventTargetListeners_,f);e=!1!==m.call($,d)&&e}}return e&&0!=d.returnValue_};R.prototype.getListener=function(a,b,c,d){return this.eventTargetListeners_.getListener(String(a),b,c,d)};var Jc=function(a){y(a.eventTargetListeners_,"Event target is not initialized. Did you call the superclass (goog.events.EventTarget) constructor?")};var S=function(a){R.call(this);this.imageIdToRequestMap_={};this.imageIdToImageMap_={};this.handler_=new Q(this);this.parent_=a};u(S,R);var Lc=[C&&!F("11")?"readystatechange":"load","abort","error"],Mc=function(a,b,c){(c=q(c)?c:c.src)&&(a.imageIdToRequestMap_[b]={src:c,corsRequestType:l(void 0)?void 0:null})};
-S.prototype.start=function(){var a=this.imageIdToRequestMap_;Na(Ua(a),function(b){var c=a[b];if(c&&(delete a[b],!this.disposed_)){var d;d=this.parent_?sb(this.parent_).createDom("img"):new Image;c.corsRequestType&&(d.crossOrigin=c.corsRequestType);this.handler_.listen(d,Lc,this.onNetworkEvent_);this.imageIdToImageMap_[b]=d;d.id=b;d.src=c.src}},this)};
-S.prototype.onNetworkEvent_=function(a){var b=a.currentTarget;if(b){if("readystatechange"==a.type)if("complete"==b.readyState)a.type="load";else return;"undefined"==typeof b.naturalWidth&&("load"==a.type?(b.naturalWidth=b.width,b.naturalHeight=b.height):(b.naturalWidth=0,b.naturalHeight=0));this.dispatchEvent({type:a.type,target:b});!this.disposed_&&(a=b.id,delete this.imageIdToRequestMap_[a],b=this.imageIdToImageMap_[a])&&(delete this.imageIdToImageMap_[a],this.handler_.unlisten(b,Lc,this.onNetworkEvent_),
-Va(this.imageIdToImageMap_)&&Va(this.imageIdToRequestMap_)&&this.dispatchEvent("complete"))}};S.prototype.disposeInternal=function(){delete this.imageIdToRequestMap_;delete this.imageIdToImageMap_;ob(this.handler_);S.superClass_.disposeInternal.call(this)};var T=function(){};T.getInstance=function(){return T.instance_?T.instance_:T.instance_=new T};T.prototype.nextId_=0;var U=function(a){R.call(this);this.dom_=a||sb();this.id_=null;this.inDocument_=!1;this.element_=null;this.googUiComponentHandler_=void 0;this.childIndex_=this.children_=this.parent_=null;this.wasDecorated_=!1};u(U,R);h=U.prototype;h.idGenerator_=T.getInstance();h.getElement=function(){return this.element_};h.setParentEventTarget=function(a){if(this.parent_&&this.parent_!=a)throw Error("Method not supported");U.superClass_.setParentEventTarget.call(this,a)};h.getDomHelper=function(){return this.dom_};
-h.createDom=function(){this.element_=this.dom_.createElement("div")};
-var Oc=function(a,b){if(a.inDocument_)throw Error("Component already rendered");a.element_||a.createDom();b?b.insertBefore(a.element_,null):a.dom_.document_.body.appendChild(a.element_);a.parent_&&!a.parent_.inDocument_||Nc(a)},Nc=function(a){a.inDocument_=!0;Pc(a,function(a){!a.inDocument_&&a.getElement()&&Nc(a)})},Qc=function(a){Pc(a,function(a){a.inDocument_&&Qc(a)});a.googUiComponentHandler_&&a.googUiComponentHandler_.removeAll();a.inDocument_=!1};
-U.prototype.disposeInternal=function(){this.inDocument_&&Qc(this);this.googUiComponentHandler_&&(this.googUiComponentHandler_.dispose(),delete this.googUiComponentHandler_);Pc(this,function(a){a.dispose()});!this.wasDecorated_&&this.element_&&Bb(this.element_);this.parent_=this.element_=this.childIndex_=this.children_=null;U.superClass_.disposeInternal.call(this)};var Pc=function(a,b){a.children_&&Na(a.children_,b,void 0)};
-U.prototype.removeChild=function(a,b){if(a){var c=q(a)?a:a.id_||(a.id_=":"+(a.idGenerator_.nextId_++).toString(36)),d;this.childIndex_&&c?(d=this.childIndex_,d=(c in d?d[c]:void 0)||null):d=null;a=d;if(c&&a){d=this.childIndex_;c in d&&delete d[c];Qa(this.children_,a);b&&(Qc(a),a.element_&&Bb(a.element_));c=a;if(null==c)throw Error("Unable to set parent component");c.parent_=null;U.superClass_.setParentEventTarget.call(c,null)}}if(!a)throw Error("Child is not in parent component");return a};var V=function(a,b,c){U.call(this,c);this.captchaImage_=a;this.adImage_=b&&300==b.naturalWidth&&57==b.naturalHeight?b:null};u(V,U);V.prototype.createDom=function(){V.superClass_.createDom.call(this);var a=this.getElement();this.captchaImage_.alt=W.image_alt_text;this.getDomHelper().appendChild(a,this.captchaImage_);this.adImage_&&(this.adImage_.alt=W.image_alt_text,this.getDomHelper().appendChild(a,this.adImage_),this.adImage_&&Rc(this.adImage_)&&(a.innerHTML+='<div id="recaptcha-ad-choices"><div class="recaptcha-ad-choices-collapsed"><img height="15" width="30" alt="AdChoices" border="0" src="//www.gstatic.com/recaptcha/api/img/adicon.png"/></div><div class="recaptcha-ad-choices-expanded"><a href="https://support.google.com/adsense/troubleshooter/1631343" target="_blank"><img height="15" width="75" alt="AdChoices" border="0" src="//www.gstatic.com/recaptcha/api/img/adchoices.png"/></a></div></div>'))};
-var Rc=function(a){var b=Sc(a,"visibility");a=Sc(a,"display");return"hidden"!=b&&"none"!=a},Sc=function(a,b){var c;t:{c=rb(a);if(c.defaultView&&c.defaultView.getComputedStyle&&(c=c.defaultView.getComputedStyle(a,null))){c=c[b]||c.getPropertyValue(b)||"";break t}c=""}if(!(c=c||(a.currentStyle?a.currentStyle[b]:null))&&(c=a.style[Fa(b)],"undefined"===typeof c)){c=a.style;var d;t:if(d=Fa(b),void 0===a.style[d]){var e=(E?"Webkit":D?"Moz":C?"ms":cb?"O":null)+Ga(d);if(void 0!==a.style[e]){d=e;break t}}c=
-c[d]||""}return c};V.prototype.disposeInternal=function(){delete this.captchaImage_;delete this.adImage_;V.superClass_.disposeInternal.call(this)};var Tc=function(a,b,c){H.call(this);this.listener_=a;this.interval_=b||0;this.handler_=c;this.callback_=s(this.doAction_,this)};u(Tc,H);h=Tc.prototype;h.id_=0;h.disposeInternal=function(){Tc.superClass_.disposeInternal.call(this);this.stop();delete this.listener_;delete this.handler_};
-h.start=function(a){this.stop();var b=this.callback_;a=l(a)?a:this.interval_;if(!r(b))if(b&&"function"==typeof b.handleEvent)b=s(b.handleEvent,b);else throw Error("Invalid listener argument");this.id_=2147483647<a?-1:k.setTimeout(b,a||0)};h.stop=function(){this.isActive()&&k.clearTimeout(this.id_);this.id_=0};h.isActive=function(){return 0!=this.id_};h.doAction_=function(){this.id_=0;this.listener_&&this.listener_.call(this.handler_)};var Uc=function(a,b){H.call(this);this.listener_=a;this.handler_=b;this.delay_=new Tc(s(this.onTick_,this),0,this)};u(Uc,H);h=Uc.prototype;h.interval_=0;h.runUntil_=0;h.disposeInternal=function(){this.delay_.dispose();delete this.listener_;delete this.handler_;Uc.superClass_.disposeInternal.call(this)};h.start=function(a,b){this.stop();var c=b||0;this.interval_=Math.max(a||0,0);this.runUntil_=0>c?-1:ha()+c;this.delay_.start(0>c?this.interval_:Math.min(this.interval_,c))};h.stop=function(){this.delay_.stop()};
-h.isActive=function(){return this.delay_.isActive()};h.onSuccess=function(){};h.onFailure=function(){};h.onTick_=function(){if(this.listener_.call(this.handler_))this.onSuccess();else if(0>this.runUntil_)this.delay_.start(this.interval_);else{var a=this.runUntil_-ha();if(0>=a)this.onFailure();else this.delay_.start(Math.min(this.interval_,a))}};$a("area base br col command embed hr img input keygen link meta param source track wbr".split(" "));$a("action","cite","data","formaction","href","manifest","poster","src");$a("link","script","style");var Vc={sanitizedContentKindHtml:!0},Wc={sanitizedContentKindText:!0},Xc=function(){throw Error("Do not instantiate directly");};Xc.prototype.contentDir=null;Xc.prototype.toString=function(){return this.content};var bd=function(a){var b=Yc;y(b,"Soy template may not be null.");var c=sb().createElement("DIV");a=Zc(b(a||$c,void 0,void 0));b=a.match(ad);y(!b,"This template starts with a %s, which cannot be a child of a <div>, as required by soy internals. Consider using goog.soy.renderElement instead.\nTemplate output: %s",b&&b[0],a);c.innerHTML=a;return 1==c.childNodes.length&&(a=c.firstChild,1==a.nodeType)?a:c},Zc=function(a){if(!da(a))return String(a);if(a instanceof Xc){if(a.contentKind===Vc)return Ka(a.content);
-if(a.contentKind===Wc)return Da(a.content)}Ja("Soy template output is unsafe for use as HTML: "+a);return"zSoyz"},ad=/^<(body|caption|col|colgroup|head|html|tr|td|tbody|thead|tfoot)>/i,$c={};C&&F(8);var cd=function(){Xc.call(this)};u(cd,Xc);cd.prototype.contentKind=Vc;var dd=function(a){function b(a){this.content=a}b.prototype=a.prototype;return function(a,d){var e=new b(String(a));void 0!==d&&(e.contentDir=d);return e}}(cd);(function(a){function b(a){this.content=a}b.prototype=a.prototype;return function(a,d){var e=String(a);if(!e)return"";e=new b(e);void 0!==d&&(e.contentDir=d);return e}})(cd);
-var ed={"\x00":"\\x00","\b":"\\x08","\t":"\\t","\n":"\\n","\x0B":"\\x0b","\f":"\\f","\r":"\\r",'"':"\\x22",$:"\\x24","&":"\\x26","'":"\\x27","(":"\\x28",")":"\\x29","*":"\\x2a","+":"\\x2b",",":"\\x2c","-":"\\x2d",".":"\\x2e","/":"\\/",":":"\\x3a","<":"\\x3c","=":"\\x3d",">":"\\x3e","?":"\\x3f","[":"\\x5b","\\":"\\\\","]":"\\x5d","^":"\\x5e","{":"\\x7b","|":"\\x7c","}":"\\x7d","\u0085":"\\x85","\u2028":"\\u2028","\u2029":"\\u2029"},fd=function(a){return ed[a]},gd=/[\x00\x08-\x0d\x22\x26\x27\/\x3c-\x3e\\\x85\u2028\u2029]/g;var Yc=function(a){return dd('<script type="text/javascript">var challenge = \''+String(a.challenge).replace(gd,fd)+"'; var publisherId = '"+String(a.publisherId).replace(gd,fd)+"';"+("ca-mongoogle"==a.publisherId?'google_page_url = "3pcerttesting.com/dab/recaptcha.html";':"")+"\n    google_ad_client = publisherId;\n    google_ad_type = 'html';\n    google_ad_output = 'js';\n    google_image_size = '300x57';\n    google_captcha_token = challenge;\n    google_ad_request_done = function(ad) {\n      window.parent.recaptcha.ads.adutils.googleAdRequestDone(ad);\n    };\n    \x3c/script><script type=\"text/javascript\" src=\"//pagead2.googlesyndication.com/pagead/show_ads.js\">\x3c/script>")};
-Yc.soyTemplateName="recaptcha.soy.ads.iframeAdsLoader.main";var Wa=function(){var a=k.google_ad;return!!(a&&a.token&&a.imageAdUrl&&a.hashedAnswer&&a.salt&&a.delayedImpressionUrl&&a.engagementUrl)},hd=function(){k.google_ad&&(k.google_ad=null)},id=function(a){a=a||document.body;var b=k.google_ad;b&&b.searchUpliftUrl&&(b=xb("iframe",{src:'data:text/html;charset=utf-8,<body><img src="https://'+b.searchUpliftUrl+'"></img></body>',style:"display:none"}),a.appendChild(b))},jd=0,kd=function(a){var b=new S;Mc(b,"recaptcha-url-"+jd++,a);b.start()},ld=function(a,b){var c=
-RecaptchaState.publisher_id;hd();var d=xb("iframe",{id:"recaptcha-loader-"+jd++,style:"display: none"});document.body.appendChild(d);var e=d.contentWindow?d.contentWindow.document:d.contentDocument;e.open("text/html","replace");e.write(bd({challenge:a,publisherId:c}).innerHTML);e.close();c=new Uc(function(){return!!k.google_ad});c.onSuccess=function(){Bb(d);b()};c.onFailure=function(){Bb(d);b()};c.start(50,2E3)};t("recaptcha.ads.adutils.googleAdRequestDone",function(a){k.google_ad=a});var md=function(){this.blockSize=-1};var nd=function(){this.blockSize=-1;this.blockSize=64;this.chain_=Array(4);this.block_=Array(this.blockSize);this.totalLength_=this.blockLength_=0;this.reset()};u(nd,md);nd.prototype.reset=function(){this.chain_[0]=1732584193;this.chain_[1]=4023233417;this.chain_[2]=2562383102;this.chain_[3]=271733878;this.totalLength_=this.blockLength_=0};
-var od=function(a,b,c){c||(c=0);var d=Array(16);if(q(b))for(var e=0;16>e;++e)d[e]=b.charCodeAt(c++)|b.charCodeAt(c++)<<8|b.charCodeAt(c++)<<16|b.charCodeAt(c++)<<24;else for(e=0;16>e;++e)d[e]=b[c++]|b[c++]<<8|b[c++]<<16|b[c++]<<24;b=a.chain_[0];c=a.chain_[1];var e=a.chain_[2],g=a.chain_[3],f=0,f=b+(g^c&(e^g))+d[0]+3614090360&4294967295;b=c+(f<<7&4294967295|f>>>25);f=g+(e^b&(c^e))+d[1]+3905402710&4294967295;g=b+(f<<12&4294967295|f>>>20);f=e+(c^g&(b^c))+d[2]+606105819&4294967295;e=g+(f<<17&4294967295|
-f>>>15);f=c+(b^e&(g^b))+d[3]+3250441966&4294967295;c=e+(f<<22&4294967295|f>>>10);f=b+(g^c&(e^g))+d[4]+4118548399&4294967295;b=c+(f<<7&4294967295|f>>>25);f=g+(e^b&(c^e))+d[5]+1200080426&4294967295;g=b+(f<<12&4294967295|f>>>20);f=e+(c^g&(b^c))+d[6]+2821735955&4294967295;e=g+(f<<17&4294967295|f>>>15);f=c+(b^e&(g^b))+d[7]+4249261313&4294967295;c=e+(f<<22&4294967295|f>>>10);f=b+(g^c&(e^g))+d[8]+1770035416&4294967295;b=c+(f<<7&4294967295|f>>>25);f=g+(e^b&(c^e))+d[9]+2336552879&4294967295;g=b+(f<<12&4294967295|
-f>>>20);f=e+(c^g&(b^c))+d[10]+4294925233&4294967295;e=g+(f<<17&4294967295|f>>>15);f=c+(b^e&(g^b))+d[11]+2304563134&4294967295;c=e+(f<<22&4294967295|f>>>10);f=b+(g^c&(e^g))+d[12]+1804603682&4294967295;b=c+(f<<7&4294967295|f>>>25);f=g+(e^b&(c^e))+d[13]+4254626195&4294967295;g=b+(f<<12&4294967295|f>>>20);f=e+(c^g&(b^c))+d[14]+2792965006&4294967295;e=g+(f<<17&4294967295|f>>>15);f=c+(b^e&(g^b))+d[15]+1236535329&4294967295;c=e+(f<<22&4294967295|f>>>10);f=b+(e^g&(c^e))+d[1]+4129170786&4294967295;b=c+(f<<
-5&4294967295|f>>>27);f=g+(c^e&(b^c))+d[6]+3225465664&4294967295;g=b+(f<<9&4294967295|f>>>23);f=e+(b^c&(g^b))+d[11]+643717713&4294967295;e=g+(f<<14&4294967295|f>>>18);f=c+(g^b&(e^g))+d[0]+3921069994&4294967295;c=e+(f<<20&4294967295|f>>>12);f=b+(e^g&(c^e))+d[5]+3593408605&4294967295;b=c+(f<<5&4294967295|f>>>27);f=g+(c^e&(b^c))+d[10]+38016083&4294967295;g=b+(f<<9&4294967295|f>>>23);f=e+(b^c&(g^b))+d[15]+3634488961&4294967295;e=g+(f<<14&4294967295|f>>>18);f=c+(g^b&(e^g))+d[4]+3889429448&4294967295;c=
-e+(f<<20&4294967295|f>>>12);f=b+(e^g&(c^e))+d[9]+568446438&4294967295;b=c+(f<<5&4294967295|f>>>27);f=g+(c^e&(b^c))+d[14]+3275163606&4294967295;g=b+(f<<9&4294967295|f>>>23);f=e+(b^c&(g^b))+d[3]+4107603335&4294967295;e=g+(f<<14&4294967295|f>>>18);f=c+(g^b&(e^g))+d[8]+1163531501&4294967295;c=e+(f<<20&4294967295|f>>>12);f=b+(e^g&(c^e))+d[13]+2850285829&4294967295;b=c+(f<<5&4294967295|f>>>27);f=g+(c^e&(b^c))+d[2]+4243563512&4294967295;g=b+(f<<9&4294967295|f>>>23);f=e+(b^c&(g^b))+d[7]+1735328473&4294967295;
-e=g+(f<<14&4294967295|f>>>18);f=c+(g^b&(e^g))+d[12]+2368359562&4294967295;c=e+(f<<20&4294967295|f>>>12);f=b+(c^e^g)+d[5]+4294588738&4294967295;b=c+(f<<4&4294967295|f>>>28);f=g+(b^c^e)+d[8]+2272392833&4294967295;g=b+(f<<11&4294967295|f>>>21);f=e+(g^b^c)+d[11]+1839030562&4294967295;e=g+(f<<16&4294967295|f>>>16);f=c+(e^g^b)+d[14]+4259657740&4294967295;c=e+(f<<23&4294967295|f>>>9);f=b+(c^e^g)+d[1]+2763975236&4294967295;b=c+(f<<4&4294967295|f>>>28);f=g+(b^c^e)+d[4]+1272893353&4294967295;g=b+(f<<11&4294967295|
-f>>>21);f=e+(g^b^c)+d[7]+4139469664&4294967295;e=g+(f<<16&4294967295|f>>>16);f=c+(e^g^b)+d[10]+3200236656&4294967295;c=e+(f<<23&4294967295|f>>>9);f=b+(c^e^g)+d[13]+681279174&4294967295;b=c+(f<<4&4294967295|f>>>28);f=g+(b^c^e)+d[0]+3936430074&4294967295;g=b+(f<<11&4294967295|f>>>21);f=e+(g^b^c)+d[3]+3572445317&4294967295;e=g+(f<<16&4294967295|f>>>16);f=c+(e^g^b)+d[6]+76029189&4294967295;c=e+(f<<23&4294967295|f>>>9);f=b+(c^e^g)+d[9]+3654602809&4294967295;b=c+(f<<4&4294967295|f>>>28);f=g+(b^c^e)+d[12]+
-3873151461&4294967295;g=b+(f<<11&4294967295|f>>>21);f=e+(g^b^c)+d[15]+530742520&4294967295;e=g+(f<<16&4294967295|f>>>16);f=c+(e^g^b)+d[2]+3299628645&4294967295;c=e+(f<<23&4294967295|f>>>9);f=b+(e^(c|~g))+d[0]+4096336452&4294967295;b=c+(f<<6&4294967295|f>>>26);f=g+(c^(b|~e))+d[7]+1126891415&4294967295;g=b+(f<<10&4294967295|f>>>22);f=e+(b^(g|~c))+d[14]+2878612391&4294967295;e=g+(f<<15&4294967295|f>>>17);f=c+(g^(e|~b))+d[5]+4237533241&4294967295;c=e+(f<<21&4294967295|f>>>11);f=b+(e^(c|~g))+d[12]+1700485571&
-4294967295;b=c+(f<<6&4294967295|f>>>26);f=g+(c^(b|~e))+d[3]+2399980690&4294967295;g=b+(f<<10&4294967295|f>>>22);f=e+(b^(g|~c))+d[10]+4293915773&4294967295;e=g+(f<<15&4294967295|f>>>17);f=c+(g^(e|~b))+d[1]+2240044497&4294967295;c=e+(f<<21&4294967295|f>>>11);f=b+(e^(c|~g))+d[8]+1873313359&4294967295;b=c+(f<<6&4294967295|f>>>26);f=g+(c^(b|~e))+d[15]+4264355552&4294967295;g=b+(f<<10&4294967295|f>>>22);f=e+(b^(g|~c))+d[6]+2734768916&4294967295;e=g+(f<<15&4294967295|f>>>17);f=c+(g^(e|~b))+d[13]+1309151649&
-4294967295;c=e+(f<<21&4294967295|f>>>11);f=b+(e^(c|~g))+d[4]+4149444226&4294967295;b=c+(f<<6&4294967295|f>>>26);f=g+(c^(b|~e))+d[11]+3174756917&4294967295;g=b+(f<<10&4294967295|f>>>22);f=e+(b^(g|~c))+d[2]+718787259&4294967295;e=g+(f<<15&4294967295|f>>>17);f=c+(g^(e|~b))+d[9]+3951481745&4294967295;a.chain_[0]=a.chain_[0]+b&4294967295;a.chain_[1]=a.chain_[1]+(e+(f<<21&4294967295|f>>>11))&4294967295;a.chain_[2]=a.chain_[2]+e&4294967295;a.chain_[3]=a.chain_[3]+g&4294967295};
-nd.prototype.update=function(a,b){l(b)||(b=a.length);for(var c=b-this.blockSize,d=this.block_,e=this.blockLength_,g=0;g<b;){if(0==e)for(;g<=c;)od(this,a,g),g+=this.blockSize;if(q(a))for(;g<b;){if(d[e++]=a.charCodeAt(g++),e==this.blockSize){od(this,d);e=0;break}}else for(;g<b;)if(d[e++]=a[g++],e==this.blockSize){od(this,d);e=0;break}}this.blockLength_=e;this.totalLength_+=b};var X=function(){Q.call(this);this.callback_=this.element_=null;this.md5_=new nd};u(X,Q);var pd=function(a,b,c,d,e){a.unwatch();a.element_=b;a.callback_=e;a.listen(b,"keyup",s(a.onChanged_,a,c,d))};X.prototype.unwatch=function(){this.element_&&this.callback_&&(this.removeAll(),this.callback_=this.element_=null)};
-X.prototype.onChanged_=function(a,b){var c;c=(c=this.element_.value)?c.replace(/[\s\xa0]+/g,"").toLowerCase():"";this.md5_.reset();this.md5_.update(c+"."+b);c=this.md5_;var d=Array((56>c.blockLength_?c.blockSize:2*c.blockSize)-c.blockLength_);d[0]=128;for(var e=1;e<d.length-8;++e)d[e]=0;for(var g=8*c.totalLength_,e=d.length-8;e<d.length;++e)d[e]=g&255,g/=256;c.update(d);d=Array(16);for(e=g=0;4>e;++e)for(var f=0;32>f;f+=8)d[g++]=c.chain_[e]>>>f&255;jb(d).toLowerCase()==a.toLowerCase()&&this.callback_()};
-X.prototype.disposeInternal=function(){this.element_=null;X.superClass_.disposeInternal.call(this)};var rd=function(a,b,c){this.adObject_=a;this.captchaImageUrl_=b;this.opt_successCallback_=c||null;qd(this)};u(rd,H);var qd=function(a){var b=new S;nb(a,ga(ob,b));Mc(b,"recaptcha_challenge_image",a.captchaImageUrl_);Mc(b,"recaptcha_ad_image",a.adObject_.imageAdUrl);var c={};xc(b,"load",s(function(a,b){a[b.target.id]=b.target},a,c));xc(b,"complete",s(a.handleImagesLoaded_,a,c));b.start()};
-rd.prototype.handleImagesLoaded_=function(a){a=new V(a.recaptcha_challenge_image,a.recaptcha_ad_image);nb(this,ga(ob,a));var b=tb(document,"recaptcha_image");Ab(b);Oc(a,b);a.adImage_&&Rc(a.adImage_)&&(kd(this.adObject_.delayedImpressionUrl),a=new X,nb(this,ga(ob,a)),pd(a,tb(document,"recaptcha_response_field"),this.adObject_.hashedAnswer,this.adObject_.salt,s(function(a,b){a.unwatch();kd(b)},this,a,this.adObject_.engagementUrl)),this.opt_successCallback_&&this.opt_successCallback_("04"+this.adObject_.token))};var W=w;t("RecaptchaStr",W);var Y=k.RecaptchaOptions;t("RecaptchaOptions",Y);var sd={tabindex:0,theme:"red",callback:null,lang:null,custom_theme_widget:null,custom_translations:null};t("RecaptchaDefaultOptions",sd);
-var Z={widget:null,timer_id:-1,style_set:!1,theme:null,type:"image",ajax_verify_cb:null,th1:null,th2:null,th3:null,element:"",ad_captcha_plugin:null,reload_timeout:-1,force_reload:!1,$:function(a){return"string"==typeof a?document.getElementById(a):a},attachEvent:function(a,b,c){a&&a.addEventListener?a.addEventListener(b,c,!1):a&&a.attachEvent&&a.attachEvent("on"+b,c)},create:function(a,b,c){Z.destroy();b&&(Z.widget=Z.$(b),Z.element=b);Z._init_options(c);Z._call_challenge(a)},destroy:function(){var a=
-Z.$("recaptcha_challenge_field");a&&a.parentNode.removeChild(a);-1!=Z.timer_id&&clearInterval(Z.timer_id);Z.timer_id=-1;if(a=Z.$("recaptcha_image"))a.innerHTML="";Z.update_widget();Z.widget&&("custom"!=Z.theme?Z.widget.innerHTML="":Z.widget.style.display="none",Z.widget=null)},focus_response_field:function(){var a=Z.$("recaptcha_response_field");a&&a.focus()},get_challenge:function(){return"undefined"==typeof RecaptchaState?null:RecaptchaState.challenge},get_response:function(){var a=Z.$("recaptcha_response_field");
-return a?a.value:null},ajax_verify:function(a){Z.ajax_verify_cb=a;a=Z.get_challenge()||"";var b=Z.get_response()||"";a=Z._get_api_server()+"/ajaxverify?c="+encodeURIComponent(a)+"&response="+encodeURIComponent(b);Z._add_script(a)},_ajax_verify_callback:function(a){Z.ajax_verify_cb(a)},_get_overridable_url:function(a){var b=window.location.protocol;if("undefined"!=typeof _RecaptchaOverrideApiServer)a=_RecaptchaOverrideApiServer;else if("undefined"!=typeof RecaptchaState&&"string"==typeof RecaptchaState.server&&
-0<RecaptchaState.server.length)return RecaptchaState.server.replace(/\/+$/,"");return b+"//"+a},_get_api_server:function(){return Z._get_overridable_url("www.google.com/recaptcha/api")},_get_static_url_root:function(){return Z._get_overridable_url("www.gstatic.com/recaptcha/api")},_call_challenge:function(a){a=Z._get_api_server()+"/challenge?k="+a+"&ajax=1&cachestop="+Math.random();Z.getLang_()&&(a+="&lang="+Z.getLang_());"undefined"!=typeof Y.extra_challenge_params&&(a+="&"+Y.extra_challenge_params);
-Z._add_script(a)},_add_script:function(a){var b=document.createElement("script");b.type="text/javascript";b.src=a;Z._get_script_area().appendChild(b)},_get_script_area:function(){var a=document.getElementsByTagName("head");return a=!a||1>a.length?document.body:a[0]},_hash_merge:function(a){for(var b={},c=0;c<a.length;c++)for(var d in a[c])b[d]=a[c][d];return b},_init_options:function(a){Y=Z._hash_merge([sd,a||{}])},challenge_callback_internal:function(){Z.update_widget();Z._reset_timer();W=Z._hash_merge([w,
-sa[Z.getLang_()]||{},Y.custom_translations||{}]);window.addEventListener&&window.addEventListener("unload",function(){Z.destroy()},!1);Z._is_ie()&&window.attachEvent&&window.attachEvent("onbeforeunload",function(){});if(0<navigator.userAgent.indexOf("KHTML")){var a=document.createElement("iframe");a.src="about:blank";a.style.height="0px";a.style.width="0px";a.style.visibility="hidden";a.style.border="none";a.appendChild(document.createTextNode("This frame prevents back/forward cache problems in Safari."));
-document.body.appendChild(a)}Z._finish_widget()},_add_css:function(a){if(-1!=navigator.appVersion.indexOf("MSIE 5"))document.write('<style type="text/css">'+a+"</style>");else{var b=document.createElement("style");b.type="text/css";b.styleSheet?b.styleSheet.cssText=a:b.appendChild(document.createTextNode(a));Z._get_script_area().appendChild(b)}},_set_style:function(a){Z.style_set||(Z.style_set=!0,Z._add_css(a+"\n\n.recaptcha_is_showing_audio .recaptcha_only_if_image,.recaptcha_isnot_showing_audio .recaptcha_only_if_audio,.recaptcha_had_incorrect_sol .recaptcha_only_if_no_incorrect_sol,.recaptcha_nothad_incorrect_sol .recaptcha_only_if_incorrect_sol{display:none !important}"))},
-_init_builtin_theme:function(){var a=Z.$,b=Z._get_static_url_root(),c=v.VertCss,d=v.VertHtml,e=b+"/img/"+Z.theme,g="gif",b=Z.theme;"clean"==b&&(c=v.CleanCss,d=v.CleanHtml,g="png");c=c.replace(/IMGROOT/g,e);Z._set_style(c);Z.update_widget();Z.widget.innerHTML='<div id="recaptcha_area">'+d+"</div>";c=Z.getLang_();a("recaptcha_privacy")&&null!=c&&"en"==c.substring(0,2).toLowerCase()&&null!=W.privacy_and_terms&&0<W.privacy_and_terms.length&&(c=document.createElement("a"),c.href="http://www.google.com/intl/en/policies/",
-c.target="_blank",c.innerHTML=W.privacy_and_terms,a("recaptcha_privacy").appendChild(c));c=function(b,c,d,K){var G=a(b);G.src=e+"/"+c+"."+g;c=W[d];G.alt=c;b=a(b+"_btn");b.title=c;Z.attachEvent(b,"click",K)};c("recaptcha_reload","refresh","refresh_btn",function(){Z.reload_internal("r")});c("recaptcha_switch_audio","audio","audio_challenge",function(){Z.switch_type("audio")});c("recaptcha_switch_img","text","visual_challenge",function(){Z.switch_type("image")});c("recaptcha_whatsthis","help","help_btn",
-Z.showhelp);"clean"==b&&(a("recaptcha_logo").src=e+"/logo."+g);a("recaptcha_table").className="recaptchatable recaptcha_theme_"+Z.theme;b=function(b,c){var d=a(b);d&&(RecaptchaState.rtl&&"span"==d.tagName.toLowerCase()&&(d.dir="rtl"),d.appendChild(document.createTextNode(W[c])))};b("recaptcha_instructions_image","instructions_visual");b("recaptcha_instructions_audio","instructions_audio");b("recaptcha_instructions_error","incorrect_try_again");a("recaptcha_instructions_image")||a("recaptcha_instructions_audio")||
-(b="audio"==Z.type?W.instructions_audio:W.instructions_visual,b=b.replace(/:$/,""),a("recaptcha_response_field").setAttribute("placeholder",b))},_finish_widget:function(){var a=Z.$,b=Y,c=b.theme;c in{blackglass:1,clean:1,custom:1,red:1,white:1}||(c="red");Z.theme||(Z.theme=c);"custom"!=Z.theme?Z._init_builtin_theme():Z._set_style("");c=document.createElement("span");c.id="recaptcha_challenge_field_holder";c.style.display="none";a("recaptcha_response_field").parentNode.insertBefore(c,a("recaptcha_response_field"));
-a("recaptcha_response_field").setAttribute("autocomplete","off");a("recaptcha_image").style.width="300px";a("recaptcha_image").style.height="57px";a("recaptcha_challenge_field_holder").innerHTML='<input type="hidden" name="recaptcha_challenge_field" id="recaptcha_challenge_field" value=""/>';Z.th_init();Z.should_focus=!1;Z.th3||Z.force_reload?(Z._set_challenge(RecaptchaState.challenge,"image",!0),setTimeout(function(){Z.reload_internal("i")},100)):Z._set_challenge(RecaptchaState.challenge,"image",
-!1);Z.updateTabIndexes_();Z.update_widget();Z.widget&&(Z.widget.style.display="");b.callback&&b.callback()},updateTabIndexes_:function(){var a=Z.$,b=Y;b.tabindex&&(b=b.tabindex,a("recaptcha_response_field").tabIndex=b++,"audio"==Z.type&&a("recaptcha_audio_play_again")&&(a("recaptcha_audio_play_again").tabIndex=b++,a("recaptcha_audio_download"),a("recaptcha_audio_download").tabIndex=b++),"custom"!=Z.theme&&(a("recaptcha_reload_btn").tabIndex=b++,a("recaptcha_switch_audio_btn").tabIndex=b++,a("recaptcha_switch_img_btn").tabIndex=
-b++,a("recaptcha_whatsthis_btn").tabIndex=b,a("recaptcha_privacy").tabIndex=b++))},switch_type:function(a){if(!((new Date).getTime()<Z.reload_timeout)&&(Z.type=a,Z.reload_internal("audio"==Z.type?"a":"v"),"custom"!=Z.theme)){a=Z.$;var b="audio"==Z.type?W.instructions_audio:W.instructions_visual,b=b.replace(/:$/,"");a("recaptcha_response_field").setAttribute("placeholder",b)}},reload:function(){Z.reload_internal("r")},reload_internal:function(a){var b=Y,c=RecaptchaState,d=(new Date).getTime();d<Z.reload_timeout||
-(Z.reload_timeout=d+1E3,"undefined"==typeof a&&(a="r"),d=Z._get_api_server()+"/reload?c="+c.challenge+"&k="+c.site+"&reason="+a+"&type="+Z.type,Z.getLang_()&&(d+="&lang="+Z.getLang_()),"undefined"!=typeof b.extra_challenge_params&&(d+="&"+b.extra_challenge_params),Z.th_callback_invoke(),Z.th1&&(d+="&th="+Z.th1,Z.th1=""),"audio"==Z.type&&(d=b.audio_beta_12_08?d+"&audio_beta_12_08=1":d+"&new_audio_default=1"),Z.should_focus="t"!=a&&"i"!=a,Z._add_script(d),ob(Z.ad_captcha_plugin),c.publisher_id=null)},
-th_callback_invoke:function(){if(Z.th3)try{var a=Z.th3.exec();a&&1600>a.length&&(Z.th1=a)}catch(b){Z.th1=""}},finish_reload:function(a,b,c,d){RecaptchaState.payload_url=c;RecaptchaState.is_incorrect=!1;RecaptchaState.publisher_id=d;Z._set_challenge(a,b,!1);Z.updateTabIndexes_()},_set_challenge:function(a,b,c){"image"==b&&RecaptchaState.publisher_id?ld(a,function(){Z._set_challenge_internal(a,b,c)}):Z._set_challenge_internal(a,b,c)},_set_challenge_internal:function(a,b,c){var d=Z.$,e=RecaptchaState;
-e.challenge=a;Z.type=b;d("recaptcha_challenge_field").value=e.challenge;c||("audio"==b?(d("recaptcha_image").innerHTML=Z.getAudioCaptchaHtml(),Z._loop_playback()):"image"==b&&(a=e.payload_url,a||(a=Z._get_api_server()+"/image?c="+e.challenge,Z.th_callback_invoke(),Z.th1&&(a+="&th="+Z.th1,Z.th1="")),id(d("recaptcha_widget_div")),Wa()?Z.ad_captcha_plugin=new rd(Xa(),a,function(a){RecaptchaState.challenge=a;d("recaptcha_challenge_field").value=a}):d("recaptcha_image").innerHTML='<img id="recaptcha_challenge_image" alt="'+
-W.image_alt_text+'" height="57" width="300" src="'+a+'" />',hd()));Z._css_toggle("recaptcha_had_incorrect_sol","recaptcha_nothad_incorrect_sol",e.is_incorrect);Z._css_toggle("recaptcha_is_showing_audio","recaptcha_isnot_showing_audio","audio"==b);Z._clear_input();Z.should_focus&&Z.focus_response_field();Z._reset_timer()},_reset_timer:function(){clearInterval(Z.timer_id);var a=Math.max(1E3*(RecaptchaState.timeout-60),6E4);Z.timer_id=setInterval(function(){Z.reload_internal("t")},a);return a},showhelp:function(){window.open(Z._get_help_link(),
-"recaptcha_popup","width=460,height=580,location=no,menubar=no,status=no,toolbar=no,scrollbars=yes,resizable=yes")},_clear_input:function(){Z.$("recaptcha_response_field").value=""},_displayerror:function(a){var b=Z.$;b("recaptcha_image").innerHTML="";b("recaptcha_image").appendChild(document.createTextNode(a))},reloaderror:function(a){Z._displayerror(a)},_is_ie:function(){return 0<navigator.userAgent.indexOf("MSIE")&&!window.opera},_css_toggle:function(a,b,c){Z.update_widget();var d=Z.widget;d||
-(d=document.body);var e=d.className,e=e.replace(new RegExp("(^|\\s+)"+a+"(\\s+|$)")," "),e=e.replace(new RegExp("(^|\\s+)"+b+"(\\s+|$)")," ");d.className=e+(" "+(c?a:b))},_get_help_link:function(){var a="https://support.google.com/recaptcha/";Z.getLang_()&&(a+="?hl="+Z.getLang_());return a},playAgain:function(){Z.$("recaptcha_image").innerHTML=Z.getAudioCaptchaHtml();Z._loop_playback()},_loop_playback:function(){var a=Z.$("recaptcha_audio_play_again");a&&Z.attachEvent(a,"click",function(){Z.playAgain();
-return!1})},getAudioCaptchaHtml:function(){var a=RecaptchaState.payload_url;a||(a=Z._get_api_server()+"/audio.mp3?c="+RecaptchaState.challenge,Z.th_callback_invoke(),Z.th1&&(a+="&th="+Z.th1,Z.th1=""));var b=Z._get_api_server()+"/swf/audiocaptcha.swf?v2",b=Z._is_ie()?'<object classid="clsid:D27CDB6E-AE6D-11cf-96B8-444553540000" id="audiocaptcha" width="0" height="0" codebase="https://fpdownload.macromedia.com/get/flashplayer/current/swflash.cab"><param name="movie" value="'+b+'" /><param name="quality" value="high" /><param name="bgcolor" value="#869ca7" /><param name="allowScriptAccess" value="always" /></object><br/>':
-'<embed src="'+b+'" quality="high" bgcolor="#869ca7" width="0" height="0" name="audiocaptcha" align="middle" play="true" loop="false" quality="high" allowScriptAccess="always" type="application/x-shockwave-flash" pluginspage="http://www.adobe.com/go/getflashplayer" /></embed>',c="";Z.checkFlashVer()&&(c="<br/>"+Z.getSpan_('<a id="recaptcha_audio_play_again" class="recaptcha_audio_cant_hear_link">'+W.play_again+"</a>"));c+="<br/>"+Z.getSpan_('<a id="recaptcha_audio_download" class="recaptcha_audio_cant_hear_link" target="_blank" href="'+
-a+'">'+W.cant_hear_this+"</a>");return b+c},getSpan_:function(a){return"<span"+(RecaptchaState&&RecaptchaState.rtl?' dir="rtl"':"")+">"+a+"</span>"},gethttpwavurl:function(){if("audio"!=Z.type)return"";var a=RecaptchaState.payload_url;a||(a=Z._get_api_server()+"/image?c="+RecaptchaState.challenge,Z.th_callback_invoke(),Z.th1&&(a+="&th="+Z.th1,Z.th1=""));return a},checkFlashVer:function(){var a=-1!=navigator.appVersion.indexOf("MSIE"),b=-1!=navigator.appVersion.toLowerCase().indexOf("win"),c=-1!=navigator.userAgent.indexOf("Opera"),
-d=-1;if(null!=navigator.plugins&&0<navigator.plugins.length){if(navigator.plugins["Shockwave Flash 2.0"]||navigator.plugins["Shockwave Flash"])d=navigator.plugins["Shockwave Flash"+(navigator.plugins["Shockwave Flash 2.0"]?" 2.0":"")].description.split(" ")[2].split(".")[0]}else if(a&&b&&!c)try{d=(new ActiveXObject("ShockwaveFlash.ShockwaveFlash.7")).GetVariable("$version").split(" ")[1].split(",")[0]}catch(e){}return 9<=d},getLang_:function(){return Y.lang?Y.lang:"undefined"!=typeof RecaptchaState&&
-RecaptchaState.lang?RecaptchaState.lang:null},challenge_callback:function(){Z.force_reload=!!RecaptchaState.force_reload;if(RecaptchaState.t3){var a=RecaptchaState.t1?ib(mb(RecaptchaState.t1)):"",b=RecaptchaState.t2?ib(mb(RecaptchaState.t2)):"",c=RecaptchaState.t3?ib(mb(RecaptchaState.t3)):"";Z.th2=c;if(a)b=kc(a),cc(b,Z.challenge_callback_internal,null,void 0),cc(b,null,Z.challenge_callback_internal,void 0);else{if(k.execScript)k.execScript(b,"JavaScript");else if(k.eval)null==ia&&(k.eval("var _et_ = 1;"),
-"undefined"!=typeof k._et_?(delete k._et_,ia=!0):ia=!1),ia?k.eval(b):(a=k.document,c=a.createElement("script"),c.type="text/javascript",c.defer=!1,c.appendChild(a.createTextNode(b)),a.body.appendChild(c),a.body.removeChild(c));else throw Error("goog.globalEval not available");Z.challenge_callback_internal()}}else Z.challenge_callback_internal()},th_init:function(){try{k.thintinel&&k.thintinel.th&&(Z.th3=new k.thintinel.th(Z.th2),Z.th2="")}catch(a){}},update_widget:function(){Z.element&&(Z.widget=
-Z.$(Z.element))}};t("Recaptcha",Z);})()
diff --git a/synapse/storage/__init__.py b/synapse/storage/__init__.py
index 69cb28268a..53c685c173 100644
--- a/synapse/storage/__init__.py
+++ b/synapse/storage/__init__.py
@@ -40,6 +40,7 @@ from .filtering import FilteringStore
 from .group_server import GroupServerStore
 from .keys import KeyStore
 from .media_repository import MediaRepositoryStore
+from .monthly_active_users import MonthlyActiveUsersStore
 from .openid import OpenIdStore
 from .presence import PresenceStore, UserPresenceState
 from .profile import ProfileStore
@@ -89,6 +90,7 @@ class DataStore(RoomMemberStore, RoomStore,
                 UserDirectoryStore,
                 GroupServerStore,
                 UserErasureStore,
+                MonthlyActiveUsersStore,
                 ):
 
     def __init__(self, db_conn, hs):
@@ -96,7 +98,6 @@ class DataStore(RoomMemberStore, RoomStore,
         self._clock = hs.get_clock()
         self.database_engine = hs.database_engine
 
-        self.db_conn = db_conn
         self._stream_id_gen = StreamIdGenerator(
             db_conn, "events", "stream_ordering",
             extra_tables=[("local_invites", "stream_id")]
@@ -269,31 +270,6 @@ class DataStore(RoomMemberStore, RoomStore,
 
         return self.runInteraction("count_users", _count_users)
 
-    def count_monthly_users(self):
-        """Counts the number of users who used this homeserver in the last 30 days
-
-        This method should be refactored with count_daily_users - the only
-        reason not to is waiting on definition of mau
-
-        Returns:
-            Defered[int]
-        """
-        def _count_monthly_users(txn):
-            thirty_days_ago = int(self._clock.time_msec()) - (1000 * 60 * 60 * 24 * 30)
-            sql = """
-                SELECT COALESCE(count(*), 0) FROM (
-                    SELECT user_id FROM user_ips
-                    WHERE last_seen > ?
-                    GROUP BY user_id
-                ) u
-            """
-
-            txn.execute(sql, (thirty_days_ago,))
-            count, = txn.fetchone()
-            return count
-
-        return self.runInteraction("count_monthly_users", _count_monthly_users)
-
     def count_r30_users(self):
         """
         Counts the number of 30 day retained users, defined as:-
diff --git a/synapse/storage/_base.py b/synapse/storage/_base.py
index 44f37b4c1e..08dffd774f 100644
--- a/synapse/storage/_base.py
+++ b/synapse/storage/_base.py
@@ -1150,17 +1150,16 @@ class SQLBaseStore(object):
         defer.returnValue(retval)
 
     def get_user_count_txn(self, txn):
-        """Get a total number of registerd users in the users list.
+        """Get a total number of registered users in the users list.
 
         Args:
             txn : Transaction object
         Returns:
-            defer.Deferred: resolves to int
+            int : number of users
         """
         sql_count = "SELECT COUNT(*) FROM users WHERE is_guest = 0;"
         txn.execute(sql_count)
-        count = txn.fetchone()[0]
-        defer.returnValue(count)
+        return txn.fetchone()[0]
 
     def _simple_search_list(self, table, term, col, retcols,
                             desc="_simple_search_list"):
diff --git a/synapse/storage/client_ips.py b/synapse/storage/client_ips.py
index b8cefd43d6..8fc678fa67 100644
--- a/synapse/storage/client_ips.py
+++ b/synapse/storage/client_ips.py
@@ -35,6 +35,7 @@ LAST_SEEN_GRANULARITY = 120 * 1000
 
 class ClientIpStore(background_updates.BackgroundUpdateStore):
     def __init__(self, db_conn, hs):
+
         self.client_ip_last_seen = Cache(
             name="client_ip_last_seen",
             keylen=4,
@@ -74,6 +75,7 @@ class ClientIpStore(background_updates.BackgroundUpdateStore):
             "before", "shutdown", self._update_client_ips_batch
         )
 
+    @defer.inlineCallbacks
     def insert_client_ip(self, user_id, access_token, ip, user_agent, device_id,
                          now=None):
         if not now:
@@ -84,7 +86,7 @@ class ClientIpStore(background_updates.BackgroundUpdateStore):
             last_seen = self.client_ip_last_seen.get(key)
         except KeyError:
             last_seen = None
-
+        yield self.populate_monthly_active_users(user_id)
         # Rate-limited inserts
         if last_seen is not None and (now - last_seen) < LAST_SEEN_GRANULARITY:
             return
@@ -94,6 +96,11 @@ class ClientIpStore(background_updates.BackgroundUpdateStore):
         self._batch_row_update[key] = (user_agent, device_id, now)
 
     def _update_client_ips_batch(self):
+
+        # If the DB pool has already terminated, don't try updating
+        if not self.hs.get_db_pool().running:
+            return
+
         def update():
             to_update = self._batch_row_update
             self._batch_row_update = {}
diff --git a/synapse/storage/events.py b/synapse/storage/events.py
index e8e5a0fe44..f39c8c8461 100644
--- a/synapse/storage/events.py
+++ b/synapse/storage/events.py
@@ -38,7 +38,7 @@ from synapse.storage.background_updates import BackgroundUpdateStore
 from synapse.storage.event_federation import EventFederationStore
 from synapse.storage.events_worker import EventsWorkerStore
 from synapse.types import RoomStreamToken, get_domain_from_id
-from synapse.util.async import ObservableDeferred
+from synapse.util.async_helpers import ObservableDeferred
 from synapse.util.caches.descriptors import cached, cachedInlineCallbacks
 from synapse.util.frozenutils import frozendict_json_encoder
 from synapse.util.logcontext import PreserveLoggingContext, make_deferred_yieldable
@@ -485,9 +485,14 @@ class EventsStore(EventFederationStore, EventsWorkerStore, BackgroundUpdateStore
                     new_forward_extremeties=new_forward_extremeties,
                 )
                 persist_event_counter.inc(len(chunk))
-                synapse.metrics.event_persisted_position.set(
-                    chunk[-1][0].internal_metadata.stream_ordering,
-                )
+
+                if not backfilled:
+                    # backfilled events have negative stream orderings, so we don't
+                    # want to set the event_persisted_position to that.
+                    synapse.metrics.event_persisted_position.set(
+                        chunk[-1][0].internal_metadata.stream_ordering,
+                    )
+
                 for event, context in chunk:
                     if context.app_service:
                         origin_type = "local"
@@ -700,9 +705,11 @@ class EventsStore(EventFederationStore, EventsWorkerStore, BackgroundUpdateStore
         }
 
         events_map = {ev.event_id: ev for ev, _ in events_context}
+        room_version = yield self.get_room_version(room_id)
+
         logger.debug("calling resolve_state_groups from preserve_events")
         res = yield self._state_resolution_handler.resolve_state_groups(
-            room_id, state_groups, events_map, get_events
+            room_id, room_version, state_groups, events_map, get_events
         )
 
         defer.returnValue((res.state, None))
@@ -1431,88 +1438,6 @@ class EventsStore(EventFederationStore, EventsWorkerStore, BackgroundUpdateStore
         )
 
     @defer.inlineCallbacks
-    def have_events_in_timeline(self, event_ids):
-        """Given a list of event ids, check if we have already processed and
-        stored them as non outliers.
-        """
-        rows = yield self._simple_select_many_batch(
-            table="events",
-            retcols=("event_id",),
-            column="event_id",
-            iterable=list(event_ids),
-            keyvalues={"outlier": False},
-            desc="have_events_in_timeline",
-        )
-
-        defer.returnValue(set(r["event_id"] for r in rows))
-
-    @defer.inlineCallbacks
-    def have_seen_events(self, event_ids):
-        """Given a list of event ids, check if we have already processed them.
-
-        Args:
-            event_ids (iterable[str]):
-
-        Returns:
-            Deferred[set[str]]: The events we have already seen.
-        """
-        results = set()
-
-        def have_seen_events_txn(txn, chunk):
-            sql = (
-                "SELECT event_id FROM events as e WHERE e.event_id IN (%s)"
-                % (",".join("?" * len(chunk)), )
-            )
-            txn.execute(sql, chunk)
-            for (event_id, ) in txn:
-                results.add(event_id)
-
-        # break the input up into chunks of 100
-        input_iterator = iter(event_ids)
-        for chunk in iter(lambda: list(itertools.islice(input_iterator, 100)),
-                          []):
-            yield self.runInteraction(
-                "have_seen_events",
-                have_seen_events_txn,
-                chunk,
-            )
-        defer.returnValue(results)
-
-    def get_seen_events_with_rejections(self, event_ids):
-        """Given a list of event ids, check if we rejected them.
-
-        Args:
-            event_ids (list[str])
-
-        Returns:
-            Deferred[dict[str, str|None):
-                Has an entry for each event id we already have seen. Maps to
-                the rejected reason string if we rejected the event, else maps
-                to None.
-        """
-        if not event_ids:
-            return defer.succeed({})
-
-        def f(txn):
-            sql = (
-                "SELECT e.event_id, reason FROM events as e "
-                "LEFT JOIN rejections as r ON e.event_id = r.event_id "
-                "WHERE e.event_id = ?"
-            )
-
-            res = {}
-            for event_id in event_ids:
-                txn.execute(sql, (event_id,))
-                row = txn.fetchone()
-                if row:
-                    _, rejected = row
-                    res[event_id] = rejected
-
-            return res
-
-        return self.runInteraction("get_rejection_reasons", f)
-
-    @defer.inlineCallbacks
     def count_daily_messages(self):
         """
         Returns an estimate of the number of messages sent in the last day.
@@ -1988,7 +1913,7 @@ class EventsStore(EventFederationStore, EventsWorkerStore, BackgroundUpdateStore
         max_depth = max(row[0] for row in rows)
 
         if max_depth <= token.topological:
-            # We need to ensure we don't delete all the events from the datanase
+            # We need to ensure we don't delete all the events from the database
             # otherwise we wouldn't be able to send any events (due to not
             # having any backwards extremeties)
             raise SynapseError(
diff --git a/synapse/storage/events_worker.py b/synapse/storage/events_worker.py
index 9b4cfeb899..59822178ff 100644
--- a/synapse/storage/events_worker.py
+++ b/synapse/storage/events_worker.py
@@ -12,6 +12,7 @@
 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 # See the License for the specific language governing permissions and
 # limitations under the License.
+import itertools
 import logging
 from collections import namedtuple
 
@@ -442,3 +443,85 @@ class EventsWorkerStore(SQLBaseStore):
             self._get_event_cache.prefill((original_ev.event_id,), cache_entry)
 
         defer.returnValue(cache_entry)
+
+    @defer.inlineCallbacks
+    def have_events_in_timeline(self, event_ids):
+        """Given a list of event ids, check if we have already processed and
+        stored them as non outliers.
+        """
+        rows = yield self._simple_select_many_batch(
+            table="events",
+            retcols=("event_id",),
+            column="event_id",
+            iterable=list(event_ids),
+            keyvalues={"outlier": False},
+            desc="have_events_in_timeline",
+        )
+
+        defer.returnValue(set(r["event_id"] for r in rows))
+
+    @defer.inlineCallbacks
+    def have_seen_events(self, event_ids):
+        """Given a list of event ids, check if we have already processed them.
+
+        Args:
+            event_ids (iterable[str]):
+
+        Returns:
+            Deferred[set[str]]: The events we have already seen.
+        """
+        results = set()
+
+        def have_seen_events_txn(txn, chunk):
+            sql = (
+                "SELECT event_id FROM events as e WHERE e.event_id IN (%s)"
+                % (",".join("?" * len(chunk)), )
+            )
+            txn.execute(sql, chunk)
+            for (event_id, ) in txn:
+                results.add(event_id)
+
+        # break the input up into chunks of 100
+        input_iterator = iter(event_ids)
+        for chunk in iter(lambda: list(itertools.islice(input_iterator, 100)),
+                          []):
+            yield self.runInteraction(
+                "have_seen_events",
+                have_seen_events_txn,
+                chunk,
+            )
+        defer.returnValue(results)
+
+    def get_seen_events_with_rejections(self, event_ids):
+        """Given a list of event ids, check if we rejected them.
+
+        Args:
+            event_ids (list[str])
+
+        Returns:
+            Deferred[dict[str, str|None):
+                Has an entry for each event id we already have seen. Maps to
+                the rejected reason string if we rejected the event, else maps
+                to None.
+        """
+        if not event_ids:
+            return defer.succeed({})
+
+        def f(txn):
+            sql = (
+                "SELECT e.event_id, reason FROM events as e "
+                "LEFT JOIN rejections as r ON e.event_id = r.event_id "
+                "WHERE e.event_id = ?"
+            )
+
+            res = {}
+            for event_id in event_ids:
+                txn.execute(sql, (event_id,))
+                row = txn.fetchone()
+                if row:
+                    _, rejected = row
+                    res[event_id] = rejected
+
+            return res
+
+        return self.runInteraction("get_rejection_reasons", f)
diff --git a/synapse/storage/monthly_active_users.py b/synapse/storage/monthly_active_users.py
new file mode 100644
index 0000000000..d178f5c5ba
--- /dev/null
+++ b/synapse/storage/monthly_active_users.py
@@ -0,0 +1,222 @@
+# -*- coding: utf-8 -*-
+# Copyright 2018 New Vector
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+import logging
+
+from twisted.internet import defer
+
+from synapse.util.caches.descriptors import cached
+
+from ._base import SQLBaseStore
+
+logger = logging.getLogger(__name__)
+
+# Number of msec of granularity to store the monthly_active_user timestamp
+# This means it is not necessary to update the table on every request
+LAST_SEEN_GRANULARITY = 60 * 60 * 1000
+
+
+class MonthlyActiveUsersStore(SQLBaseStore):
+    def __init__(self, dbconn, hs):
+        super(MonthlyActiveUsersStore, self).__init__(None, hs)
+        self._clock = hs.get_clock()
+        self.hs = hs
+        self.reserved_users = ()
+
+    @defer.inlineCallbacks
+    def initialise_reserved_users(self, threepids):
+        # TODO Why can't I do this in init?
+        store = self.hs.get_datastore()
+        reserved_user_list = []
+
+        # Do not add more reserved users than the total allowable number
+        for tp in threepids[:self.hs.config.max_mau_value]:
+            user_id = yield store.get_user_id_by_threepid(
+                tp["medium"], tp["address"]
+            )
+            if user_id:
+                yield self.upsert_monthly_active_user(user_id)
+                reserved_user_list.append(user_id)
+            else:
+                logger.warning(
+                    "mau limit reserved threepid %s not found in db" % tp
+                )
+        self.reserved_users = tuple(reserved_user_list)
+
+    @defer.inlineCallbacks
+    def reap_monthly_active_users(self):
+        """
+        Cleans out monthly active user table to ensure that no stale
+        entries exist.
+
+        Returns:
+            Deferred[]
+        """
+        def _reap_users(txn):
+            # Purge stale users
+
+            thirty_days_ago = (
+                int(self._clock.time_msec()) - (1000 * 60 * 60 * 24 * 30)
+            )
+            query_args = [thirty_days_ago]
+            base_sql = "DELETE FROM monthly_active_users WHERE timestamp < ?"
+
+            # Need if/else since 'AND user_id NOT IN ({})' fails on Postgres
+            # when len(reserved_users) == 0. Works fine on sqlite.
+            if len(self.reserved_users) > 0:
+                # questionmarks is a hack to overcome sqlite not supporting
+                # tuples in 'WHERE IN %s'
+                questionmarks = '?' * len(self.reserved_users)
+
+                query_args.extend(self.reserved_users)
+                sql = base_sql + """ AND user_id NOT IN ({})""".format(
+                    ','.join(questionmarks)
+                )
+            else:
+                sql = base_sql
+
+            txn.execute(sql, query_args)
+
+            # If MAU user count still exceeds the MAU threshold, then delete on
+            # a least recently active basis.
+            # Note it is not possible to write this query using OFFSET due to
+            # incompatibilities in how sqlite and postgres support the feature.
+            # sqlite requires 'LIMIT -1 OFFSET ?', the LIMIT must be present
+            # While Postgres does not require 'LIMIT', but also does not support
+            # negative LIMIT values. So there is no way to write it that both can
+            # support
+            safe_guard = self.hs.config.max_mau_value - len(self.reserved_users)
+            # Must be greater than zero for postgres
+            safe_guard = safe_guard if safe_guard > 0 else 0
+            query_args = [safe_guard]
+
+            base_sql = """
+                DELETE FROM monthly_active_users
+                WHERE user_id NOT IN (
+                    SELECT user_id FROM monthly_active_users
+                    ORDER BY timestamp DESC
+                    LIMIT ?
+                    )
+                """
+            # Need if/else since 'AND user_id NOT IN ({})' fails on Postgres
+            # when len(reserved_users) == 0. Works fine on sqlite.
+            if len(self.reserved_users) > 0:
+                query_args.extend(self.reserved_users)
+                sql = base_sql + """ AND user_id NOT IN ({})""".format(
+                    ','.join(questionmarks)
+                )
+            else:
+                sql = base_sql
+            txn.execute(sql, query_args)
+
+        yield self.runInteraction("reap_monthly_active_users", _reap_users)
+        # It seems poor to invalidate the whole cache, Postgres supports
+        # 'Returning' which would allow me to invalidate only the
+        # specific users, but sqlite has no way to do this and instead
+        # I would need to SELECT and the DELETE which without locking
+        # is racy.
+        # Have resolved to invalidate the whole cache for now and do
+        # something about it if and when the perf becomes significant
+        self.user_last_seen_monthly_active.invalidate_all()
+        self.get_monthly_active_count.invalidate_all()
+
+    @cached(num_args=0)
+    def get_monthly_active_count(self):
+        """Generates current count of monthly active users
+
+        Returns:
+            Defered[int]: Number of current monthly active users
+        """
+
+        def _count_users(txn):
+            sql = "SELECT COALESCE(count(*), 0) FROM monthly_active_users"
+
+            txn.execute(sql)
+            count, = txn.fetchone()
+            return count
+        return self.runInteraction("count_users", _count_users)
+
+    @defer.inlineCallbacks
+    def upsert_monthly_active_user(self, user_id):
+        """
+            Updates or inserts monthly active user member
+            Arguments:
+                user_id (str): user to add/update
+            Deferred[bool]: True if a new entry was created, False if an
+                existing one was updated.
+        """
+        is_insert = yield self._simple_upsert(
+            desc="upsert_monthly_active_user",
+            table="monthly_active_users",
+            keyvalues={
+                "user_id": user_id,
+            },
+            values={
+                "timestamp": int(self._clock.time_msec()),
+            },
+            lock=False,
+        )
+        if is_insert:
+            self.user_last_seen_monthly_active.invalidate((user_id,))
+            self.get_monthly_active_count.invalidate(())
+
+    @cached(num_args=1)
+    def user_last_seen_monthly_active(self, user_id):
+        """
+            Checks if a given user is part of the monthly active user group
+            Arguments:
+                user_id (str): user to add/update
+            Return:
+                Deferred[int] : timestamp since last seen, None if never seen
+
+        """
+
+        return(self._simple_select_one_onecol(
+            table="monthly_active_users",
+            keyvalues={
+                "user_id": user_id,
+            },
+            retcol="timestamp",
+            allow_none=True,
+            desc="user_last_seen_monthly_active",
+        ))
+
+    @defer.inlineCallbacks
+    def populate_monthly_active_users(self, user_id):
+        """Checks on the state of monthly active user limits and optionally
+        add the user to the monthly active tables
+
+        Args:
+            user_id(str): the user_id to query
+        """
+        if self.hs.config.limit_usage_by_mau:
+            is_trial = yield self.is_trial_user(user_id)
+            if is_trial:
+                # we don't track trial users in the MAU table.
+                return
+
+            last_seen_timestamp = yield self.user_last_seen_monthly_active(user_id)
+            now = self.hs.get_clock().time_msec()
+
+            # We want to reduce to the total number of db writes, and are happy
+            # to trade accuracy of timestamp in order to lighten load. This means
+            # We always insert new users (where MAU threshold has not been reached),
+            # but only update if we have not previously seen the user for
+            # LAST_SEEN_GRANULARITY ms
+            if last_seen_timestamp is None:
+                count = yield self.get_monthly_active_count()
+                if count < self.hs.config.max_mau_value:
+                    yield self.upsert_monthly_active_user(user_id)
+            elif now - last_seen_timestamp > LAST_SEEN_GRANULARITY:
+                yield self.upsert_monthly_active_user(user_id)
diff --git a/synapse/storage/prepare_database.py b/synapse/storage/prepare_database.py
index b290f834b3..b364719312 100644
--- a/synapse/storage/prepare_database.py
+++ b/synapse/storage/prepare_database.py
@@ -25,7 +25,7 @@ logger = logging.getLogger(__name__)
 
 # Remember to update this number every time a change is made to database
 # schema files, so the users will be informed on server restarts.
-SCHEMA_VERSION = 50
+SCHEMA_VERSION = 51
 
 dir_path = os.path.abspath(os.path.dirname(__file__))
 
diff --git a/synapse/storage/profile.py b/synapse/storage/profile.py
index 60295da254..88b50f33b5 100644
--- a/synapse/storage/profile.py
+++ b/synapse/storage/profile.py
@@ -71,8 +71,6 @@ class ProfileWorkerStore(SQLBaseStore):
             desc="get_from_remote_profile_cache",
         )
 
-
-class ProfileStore(ProfileWorkerStore):
     def create_profile(self, user_localpart):
         return self._simple_insert(
             table="profiles",
@@ -96,6 +94,8 @@ class ProfileStore(ProfileWorkerStore):
             desc="set_profile_avatar_url",
         )
 
+
+class ProfileStore(ProfileWorkerStore):
     def add_remote_profile_cache(self, user_id, displayname, avatar_url):
         """Ensure we are caching the remote user's profiles.
 
diff --git a/synapse/storage/registration.py b/synapse/storage/registration.py
index 07333f777d..26b429e307 100644
--- a/synapse/storage/registration.py
+++ b/synapse/storage/registration.py
@@ -26,6 +26,11 @@ from synapse.util.caches.descriptors import cached, cachedInlineCallbacks
 
 
 class RegistrationWorkerStore(SQLBaseStore):
+    def __init__(self, db_conn, hs):
+        super(RegistrationWorkerStore, self).__init__(db_conn, hs)
+
+        self.config = hs.config
+
     @cached()
     def get_user_by_id(self, user_id):
         return self._simple_select_one(
@@ -36,12 +41,33 @@ class RegistrationWorkerStore(SQLBaseStore):
             retcols=[
                 "name", "password_hash", "is_guest",
                 "consent_version", "consent_server_notice_sent",
-                "appservice_id",
+                "appservice_id", "creation_ts",
             ],
             allow_none=True,
             desc="get_user_by_id",
         )
 
+    @defer.inlineCallbacks
+    def is_trial_user(self, user_id):
+        """Checks if user is in the "trial" period, i.e. within the first
+        N days of registration defined by `mau_trial_days` config
+
+        Args:
+            user_id (str)
+
+        Returns:
+            Deferred[bool]
+        """
+
+        info = yield self.get_user_by_id(user_id)
+        if not info:
+            defer.returnValue(False)
+
+        now = self.clock.time_msec()
+        trial_duration_ms = self.config.mau_trial_days * 24 * 60 * 60 * 1000
+        is_trial = (now - info["creation_ts"] * 1000) < trial_duration_ms
+        defer.returnValue(is_trial)
+
     @cached()
     def get_user_by_access_token(self, token):
         """Get a user from the given access token.
diff --git a/synapse/storage/room.py b/synapse/storage/room.py
index 3147fb6827..61013b8919 100644
--- a/synapse/storage/room.py
+++ b/synapse/storage/room.py
@@ -41,6 +41,22 @@ RatelimitOverride = collections.namedtuple(
 
 
 class RoomWorkerStore(SQLBaseStore):
+    def get_room(self, room_id):
+        """Retrieve a room.
+
+        Args:
+            room_id (str): The ID of the room to retrieve.
+        Returns:
+            A namedtuple containing the room information, or an empty list.
+        """
+        return self._simple_select_one(
+            table="rooms",
+            keyvalues={"room_id": room_id},
+            retcols=("room_id", "is_public", "creator"),
+            desc="get_room",
+            allow_none=True,
+        )
+
     def get_public_room_ids(self):
         return self._simple_select_onecol(
             table="rooms",
@@ -170,6 +186,35 @@ class RoomWorkerStore(SQLBaseStore):
             desc="is_room_blocked",
         )
 
+    @cachedInlineCallbacks(max_entries=10000)
+    def get_ratelimit_for_user(self, user_id):
+        """Check if there are any overrides for ratelimiting for the given
+        user
+
+        Args:
+            user_id (str)
+
+        Returns:
+            RatelimitOverride if there is an override, else None. If the contents
+            of RatelimitOverride are None or 0 then ratelimitng has been
+            disabled for that user entirely.
+        """
+        row = yield self._simple_select_one(
+            table="ratelimit_override",
+            keyvalues={"user_id": user_id},
+            retcols=("messages_per_second", "burst_count"),
+            allow_none=True,
+            desc="get_ratelimit_for_user",
+        )
+
+        if row:
+            defer.returnValue(RatelimitOverride(
+                messages_per_second=row["messages_per_second"],
+                burst_count=row["burst_count"],
+            ))
+        else:
+            defer.returnValue(None)
+
 
 class RoomStore(RoomWorkerStore, SearchStore):
 
@@ -215,22 +260,6 @@ class RoomStore(RoomWorkerStore, SearchStore):
             logger.error("store_room with room_id=%s failed: %s", room_id, e)
             raise StoreError(500, "Problem creating room.")
 
-    def get_room(self, room_id):
-        """Retrieve a room.
-
-        Args:
-            room_id (str): The ID of the room to retrieve.
-        Returns:
-            A namedtuple containing the room information, or an empty list.
-        """
-        return self._simple_select_one(
-            table="rooms",
-            keyvalues={"room_id": room_id},
-            retcols=("room_id", "is_public", "creator"),
-            desc="get_room",
-            allow_none=True,
-        )
-
     @defer.inlineCallbacks
     def set_room_is_public(self, room_id, is_public):
         def set_room_is_public_txn(txn, next_id):
@@ -469,35 +498,6 @@ class RoomStore(RoomWorkerStore, SearchStore):
             "get_all_new_public_rooms", get_all_new_public_rooms
         )
 
-    @cachedInlineCallbacks(max_entries=10000)
-    def get_ratelimit_for_user(self, user_id):
-        """Check if there are any overrides for ratelimiting for the given
-        user
-
-        Args:
-            user_id (str)
-
-        Returns:
-            RatelimitOverride if there is an override, else None. If the contents
-            of RatelimitOverride are None or 0 then ratelimitng has been
-            disabled for that user entirely.
-        """
-        row = yield self._simple_select_one(
-            table="ratelimit_override",
-            keyvalues={"user_id": user_id},
-            retcols=("messages_per_second", "burst_count"),
-            allow_none=True,
-            desc="get_ratelimit_for_user",
-        )
-
-        if row:
-            defer.returnValue(RatelimitOverride(
-                messages_per_second=row["messages_per_second"],
-                burst_count=row["burst_count"],
-            ))
-        else:
-            defer.returnValue(None)
-
     @defer.inlineCallbacks
     def block_room(self, room_id, user_id):
         yield self._simple_insert(
diff --git a/synapse/storage/roommember.py b/synapse/storage/roommember.py
index 10dce21cea..9b4e6d6aa8 100644
--- a/synapse/storage/roommember.py
+++ b/synapse/storage/roommember.py
@@ -26,7 +26,7 @@ from twisted.internet import defer
 from synapse.api.constants import EventTypes, Membership
 from synapse.storage.events_worker import EventsWorkerStore
 from synapse.types import get_domain_from_id
-from synapse.util.async import Linearizer
+from synapse.util.async_helpers import Linearizer
 from synapse.util.caches import intern_string
 from synapse.util.caches.descriptors import cached, cachedInlineCallbacks
 from synapse.util.stringutils import to_ascii
diff --git a/synapse/storage/schema/delta/51/monthly_active_users.sql b/synapse/storage/schema/delta/51/monthly_active_users.sql
new file mode 100644
index 0000000000..c9d537d5a3
--- /dev/null
+++ b/synapse/storage/schema/delta/51/monthly_active_users.sql
@@ -0,0 +1,27 @@
+/* Copyright 2018 New Vector Ltd
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+-- a table of monthly active users, for use where blocking based on mau limits
+CREATE TABLE monthly_active_users (
+    user_id TEXT NOT NULL,
+    -- Last time we saw the user. Not guaranteed to be accurate due to rate limiting
+    -- on updates, Granularity of updates governed by
+    -- synapse.storage.monthly_active_users.LAST_SEEN_GRANULARITY
+    -- Measured in ms since epoch.
+    timestamp BIGINT NOT NULL
+);
+
+CREATE UNIQUE INDEX monthly_active_users_users ON monthly_active_users(user_id);
+CREATE INDEX monthly_active_users_time_stamp ON monthly_active_users(timestamp);
diff --git a/synapse/storage/state.py b/synapse/storage/state.py
index b27b3ae144..4b971efdba 100644
--- a/synapse/storage/state.py
+++ b/synapse/storage/state.py
@@ -21,15 +21,17 @@ from six.moves import range
 
 from twisted.internet import defer
 
+from synapse.api.constants import EventTypes
+from synapse.api.errors import NotFoundError
+from synapse.storage._base import SQLBaseStore
 from synapse.storage.background_updates import BackgroundUpdateStore
 from synapse.storage.engines import PostgresEngine
+from synapse.storage.events_worker import EventsWorkerStore
 from synapse.util.caches import get_cache_factor_for, intern_string
 from synapse.util.caches.descriptors import cached, cachedList
 from synapse.util.caches.dictionary_cache import DictionaryCache
 from synapse.util.stringutils import to_ascii
 
-from ._base import SQLBaseStore
-
 logger = logging.getLogger(__name__)
 
 
@@ -46,7 +48,8 @@ class _GetStateGroupDelta(namedtuple("_GetStateGroupDelta", ("prev_group", "delt
         return len(self.delta_ids) if self.delta_ids else 0
 
 
-class StateGroupWorkerStore(SQLBaseStore):
+# this inherits from EventsWorkerStore because it calls self.get_events
+class StateGroupWorkerStore(EventsWorkerStore, SQLBaseStore):
     """The parts of StateGroupStore that can be called from workers.
     """
 
@@ -57,10 +60,69 @@ class StateGroupWorkerStore(SQLBaseStore):
     def __init__(self, db_conn, hs):
         super(StateGroupWorkerStore, self).__init__(db_conn, hs)
 
+        # Originally the state store used a single DictionaryCache to cache the
+        # event IDs for the state types in a given state group to avoid hammering
+        # on the state_group* tables.
+        #
+        # The point of using a DictionaryCache is that it can cache a subset
+        # of the state events for a given state group (i.e. a subset of the keys for a
+        # given dict which is an entry in the cache for a given state group ID).
+        #
+        # However, this poses problems when performing complicated queries
+        # on the store - for instance: "give me all the state for this group, but
+        # limit members to this subset of users", as DictionaryCache's API isn't
+        # rich enough to say "please cache any of these fields, apart from this subset".
+        # This is problematic when lazy loading members, which requires this behaviour,
+        # as without it the cache has no choice but to speculatively load all
+        # state events for the group, which negates the efficiency being sought.
+        #
+        # Rather than overcomplicating DictionaryCache's API, we instead split the
+        # state_group_cache into two halves - one for tracking non-member events,
+        # and the other for tracking member_events.  This means that lazy loading
+        # queries can be made in a cache-friendly manner by querying both caches
+        # separately and then merging the result.  So for the example above, you
+        # would query the members cache for a specific subset of state keys
+        # (which DictionaryCache will handle efficiently and fine) and the non-members
+        # cache for all state (which DictionaryCache will similarly handle fine)
+        # and then just merge the results together.
+        #
+        # We size the non-members cache to be smaller than the members cache as the
+        # vast majority of state in Matrix (today) is member events.
+
         self._state_group_cache = DictionaryCache(
-            "*stateGroupCache*", 500000 * get_cache_factor_for("stateGroupCache")
+            "*stateGroupCache*",
+            # TODO: this hasn't been tuned yet
+            50000 * get_cache_factor_for("stateGroupCache")
+        )
+        self._state_group_members_cache = DictionaryCache(
+            "*stateGroupMembersCache*",
+            500000 * get_cache_factor_for("stateGroupMembersCache")
         )
 
+    @defer.inlineCallbacks
+    def get_room_version(self, room_id):
+        """Get the room_version of a given room
+
+        Args:
+            room_id (str)
+
+        Returns:
+            Deferred[str]
+
+        Raises:
+            NotFoundError if the room is unknown
+        """
+        # for now we do this by looking at the create event. We may want to cache this
+        # more intelligently in future.
+        state_ids = yield self.get_current_state_ids(room_id)
+        create_id = state_ids.get((EventTypes.Create, ""))
+
+        if not create_id:
+            raise NotFoundError("Unknown room")
+
+        create_event = yield self.get_event(create_id)
+        defer.returnValue(create_event.content.get("room_version", "1"))
+
     @cached(max_entries=100000, iterable=True)
     def get_current_state_ids(self, room_id):
         """Get the current state event ids for a room based on the
@@ -89,6 +151,69 @@ class StateGroupWorkerStore(SQLBaseStore):
             _get_current_state_ids_txn,
         )
 
+    # FIXME: how should this be cached?
+    def get_filtered_current_state_ids(self, room_id, types, filtered_types=None):
+        """Get the current state event of a given type for a room based on the
+        current_state_events table.  This may not be as up-to-date as the result
+        of doing a fresh state resolution as per state_handler.get_current_state
+        Args:
+            room_id (str)
+            types (list[(Str, (Str|None))]): List of (type, state_key) tuples
+                which are used to filter the state fetched. `state_key` may be
+                None, which matches any `state_key`
+            filtered_types (list[Str]|None): List of types to apply the above filter to.
+        Returns:
+            deferred: dict of (type, state_key) -> event
+        """
+
+        include_other_types = False if filtered_types is None else True
+
+        def _get_filtered_current_state_ids_txn(txn):
+            results = {}
+            sql = """SELECT type, state_key, event_id FROM current_state_events
+                     WHERE room_id = ? %s"""
+            # Turns out that postgres doesn't like doing a list of OR's and
+            # is about 1000x slower, so we just issue a query for each specific
+            # type seperately.
+            if types:
+                clause_to_args = [
+                    (
+                        "AND type = ? AND state_key = ?",
+                        (etype, state_key)
+                    ) if state_key is not None else (
+                        "AND type = ?",
+                        (etype,)
+                    )
+                    for etype, state_key in types
+                ]
+
+                if include_other_types:
+                    unique_types = set(filtered_types)
+                    clause_to_args.append(
+                        (
+                            "AND type <> ? " * len(unique_types),
+                            list(unique_types)
+                        )
+                    )
+            else:
+                # If types is None we fetch all the state, and so just use an
+                # empty where clause with no extra args.
+                clause_to_args = [("", [])]
+            for where_clause, where_args in clause_to_args:
+                args = [room_id]
+                args.extend(where_args)
+                txn.execute(sql % (where_clause,), args)
+                for row in txn:
+                    typ, state_key, event_id = row
+                    key = (intern_string(typ), intern_string(state_key))
+                    results[key] = event_id
+            return results
+
+        return self.runInteraction(
+            "get_filtered_current_state_ids",
+            _get_filtered_current_state_ids_txn,
+        )
+
     @cached(max_entries=10000, iterable=True)
     def get_state_group_delta(self, state_group):
         """Given a state group try to return a previous group and a delta between
@@ -185,7 +310,7 @@ class StateGroupWorkerStore(SQLBaseStore):
         })
 
     @defer.inlineCallbacks
-    def _get_state_groups_from_groups(self, groups, types):
+    def _get_state_groups_from_groups(self, groups, types, members=None):
         """Returns the state groups for a given set of groups, filtering on
         types of state events.
 
@@ -194,6 +319,9 @@ class StateGroupWorkerStore(SQLBaseStore):
             types (Iterable[str, str|None]|None): list of 2-tuples of the form
                 (`type`, `state_key`), where a `state_key` of `None` matches all
                 state_keys for the `type`. If None, all types are returned.
+            members (bool|None): If not None, then, in addition to any filtering
+                implied by types, the results are also filtered to only include
+                member events (if True), or to exclude member events (if False)
 
         Returns:
             dictionary state_group -> (dict of (type, state_key) -> event id)
@@ -204,14 +332,14 @@ class StateGroupWorkerStore(SQLBaseStore):
         for chunk in chunks:
             res = yield self.runInteraction(
                 "_get_state_groups_from_groups",
-                self._get_state_groups_from_groups_txn, chunk, types,
+                self._get_state_groups_from_groups_txn, chunk, types, members,
             )
             results.update(res)
 
         defer.returnValue(results)
 
     def _get_state_groups_from_groups_txn(
-        self, txn, groups, types=None,
+        self, txn, groups, types=None, members=None,
     ):
         results = {group: {} for group in groups}
 
@@ -249,6 +377,11 @@ class StateGroupWorkerStore(SQLBaseStore):
                 %s
             """)
 
+            if members is True:
+                sql += " AND type = '%s'" % (EventTypes.Member,)
+            elif members is False:
+                sql += " AND type <> '%s'" % (EventTypes.Member,)
+
             # Turns out that postgres doesn't like doing a list of OR's and
             # is about 1000x slower, so we just issue a query for each specific
             # type seperately.
@@ -296,6 +429,11 @@ class StateGroupWorkerStore(SQLBaseStore):
             else:
                 where_clause = ""
 
+            if members is True:
+                where_clause += " AND type = '%s'" % EventTypes.Member
+            elif members is False:
+                where_clause += " AND type <> '%s'" % EventTypes.Member
+
             # We don't use WITH RECURSIVE on sqlite3 as there are distributions
             # that ship with an sqlite3 version that doesn't support it (e.g. wheezy)
             for group in groups:
@@ -362,8 +500,7 @@ class StateGroupWorkerStore(SQLBaseStore):
                 If None, `types` filtering is applied to all events.
 
         Returns:
-            deferred: A list of dicts corresponding to the event_ids given.
-            The dicts are mappings from (type, state_key) -> state_events
+            deferred: A dict of (event_id) -> (type, state_key) -> [state_events]
         """
         event_to_groups = yield self._get_state_group_for_events(
             event_ids,
@@ -391,7 +528,8 @@ class StateGroupWorkerStore(SQLBaseStore):
     @defer.inlineCallbacks
     def get_state_ids_for_events(self, event_ids, types=None, filtered_types=None):
         """
-        Get the state dicts corresponding to a list of events
+        Get the state dicts corresponding to a list of events, containing the event_ids
+        of the state events (as opposed to the events themselves)
 
         Args:
             event_ids(list(str)): events whose state should be returned
@@ -404,7 +542,7 @@ class StateGroupWorkerStore(SQLBaseStore):
                 If None, `types` filtering is applied to all events.
 
         Returns:
-            A deferred dict from event_id -> (type, state_key) -> state_event
+            A deferred dict from event_id -> (type, state_key) -> event_id
         """
         event_to_groups = yield self._get_state_group_for_events(
             event_ids,
@@ -490,10 +628,11 @@ class StateGroupWorkerStore(SQLBaseStore):
 
         defer.returnValue({row["event_id"]: row["state_group"] for row in rows})
 
-    def _get_some_state_from_cache(self, group, types, filtered_types=None):
+    def _get_some_state_from_cache(self, cache, group, types, filtered_types=None):
         """Checks if group is in cache. See `_get_state_for_groups`
 
         Args:
+            cache(DictionaryCache): the state group cache to use
             group(int): The state group to lookup
             types(list[str, str|None]): List of 2-tuples of the form
                 (`type`, `state_key`), where a `state_key` of `None` matches all
@@ -507,11 +646,11 @@ class StateGroupWorkerStore(SQLBaseStore):
         requests state from the cache, if False we need to query the DB for the
         missing state.
         """
-        is_all, known_absent, state_dict_ids = self._state_group_cache.get(group)
+        is_all, known_absent, state_dict_ids = cache.get(group)
 
         type_to_key = {}
 
-        # tracks whether any of ourrequested types are missing from the cache
+        # tracks whether any of our requested types are missing from the cache
         missing_types = False
 
         for typ, state_key in types:
@@ -558,7 +697,7 @@ class StateGroupWorkerStore(SQLBaseStore):
             if include(k[0], k[1])
         }, got_all
 
-    def _get_all_state_from_cache(self, group):
+    def _get_all_state_from_cache(self, cache, group):
         """Checks if group is in cache. See `_get_state_for_groups`
 
         Returns 2-tuple (`state_dict`, `got_all`). `got_all` is a bool
@@ -566,9 +705,10 @@ class StateGroupWorkerStore(SQLBaseStore):
         cache, if False we need to query the DB for the missing state.
 
         Args:
+            cache(DictionaryCache): the state group cache to use
             group: The state group to lookup
         """
-        is_all, _, state_dict_ids = self._state_group_cache.get(group)
+        is_all, _, state_dict_ids = cache.get(group)
 
         return state_dict_ids, is_all
 
@@ -595,6 +735,62 @@ class StateGroupWorkerStore(SQLBaseStore):
             Deferred[dict[int, dict[(type, state_key), EventBase]]]
                 a dictionary mapping from state group to state dictionary.
         """
+        if types is not None:
+            non_member_types = [t for t in types if t[0] != EventTypes.Member]
+
+            if filtered_types is not None and EventTypes.Member not in filtered_types:
+                # we want all of the membership events
+                member_types = None
+            else:
+                member_types = [t for t in types if t[0] == EventTypes.Member]
+
+        else:
+            non_member_types = None
+            member_types = None
+
+        non_member_state = yield self._get_state_for_groups_using_cache(
+            groups, self._state_group_cache, non_member_types, filtered_types,
+        )
+        # XXX: we could skip this entirely if member_types is []
+        member_state = yield self._get_state_for_groups_using_cache(
+            # we set filtered_types=None as member_state only ever contain members.
+            groups, self._state_group_members_cache, member_types, None,
+        )
+
+        state = non_member_state
+        for group in groups:
+            state[group].update(member_state[group])
+
+        defer.returnValue(state)
+
+    @defer.inlineCallbacks
+    def _get_state_for_groups_using_cache(
+        self, groups, cache, types=None, filtered_types=None
+    ):
+        """Gets the state at each of a list of state groups, optionally
+        filtering by type/state_key, querying from a specific cache.
+
+        Args:
+            groups (iterable[int]): list of state groups for which we want
+                to get the state.
+            cache (DictionaryCache): the cache of group ids to state dicts which
+                we will pass through - either the normal state cache or the specific
+                members state cache.
+            types (None|iterable[(str, None|str)]):
+                indicates the state type/keys required. If None, the whole
+                state is fetched and returned.
+
+                Otherwise, each entry should be a `(type, state_key)` tuple to
+                include in the response. A `state_key` of None is a wildcard
+                meaning that we require all state with that type.
+            filtered_types(list[str]|None): Only apply filtering via `types` to this
+                list of event types.  Other types of events are returned unfiltered.
+                If None, `types` filtering is applied to all events.
+
+        Returns:
+            Deferred[dict[int, dict[(type, state_key), EventBase]]]
+                a dictionary mapping from state group to state dictionary.
+        """
         if types:
             types = frozenset(types)
         results = {}
@@ -602,7 +798,7 @@ class StateGroupWorkerStore(SQLBaseStore):
         if types is not None:
             for group in set(groups):
                 state_dict_ids, got_all = self._get_some_state_from_cache(
-                    group, types, filtered_types
+                    cache, group, types, filtered_types
                 )
                 results[group] = state_dict_ids
 
@@ -611,7 +807,7 @@ class StateGroupWorkerStore(SQLBaseStore):
         else:
             for group in set(groups):
                 state_dict_ids, got_all = self._get_all_state_from_cache(
-                    group
+                    cache, group
                 )
 
                 results[group] = state_dict_ids
@@ -620,8 +816,8 @@ class StateGroupWorkerStore(SQLBaseStore):
                     missing_groups.append(group)
 
         if missing_groups:
-            # Okay, so we have some missing_types, lets fetch them.
-            cache_seq_num = self._state_group_cache.sequence
+            # Okay, so we have some missing_types, let's fetch them.
+            cache_seq_num = cache.sequence
 
             # the DictionaryCache knows if it has *all* the state, but
             # does not know if it has all of the keys of a particular type,
@@ -635,7 +831,7 @@ class StateGroupWorkerStore(SQLBaseStore):
                 types_to_fetch = types
 
             group_to_state_dict = yield self._get_state_groups_from_groups(
-                missing_groups, types_to_fetch
+                missing_groups, types_to_fetch, cache == self._state_group_members_cache,
             )
 
             for group, group_state_dict in iteritems(group_to_state_dict):
@@ -655,7 +851,7 @@ class StateGroupWorkerStore(SQLBaseStore):
 
                 # update the cache with all the things we fetched from the
                 # database.
-                self._state_group_cache.update(
+                cache.update(
                     cache_seq_num,
                     key=group,
                     value=group_state_dict,
@@ -757,15 +953,33 @@ class StateGroupWorkerStore(SQLBaseStore):
                     ],
                 )
 
-            # Prefill the state group cache with this group.
+            # Prefill the state group caches with this group.
             # It's fine to use the sequence like this as the state group map
             # is immutable. (If the map wasn't immutable then this prefill could
             # race with another update)
+
+            current_member_state_ids = {
+                s: ev
+                for (s, ev) in iteritems(current_state_ids)
+                if s[0] == EventTypes.Member
+            }
+            txn.call_after(
+                self._state_group_members_cache.update,
+                self._state_group_members_cache.sequence,
+                key=state_group,
+                value=dict(current_member_state_ids),
+            )
+
+            current_non_member_state_ids = {
+                s: ev
+                for (s, ev) in iteritems(current_state_ids)
+                if s[0] != EventTypes.Member
+            }
             txn.call_after(
                 self._state_group_cache.update,
                 self._state_group_cache.sequence,
                 key=state_group,
-                value=dict(current_state_ids),
+                value=dict(current_non_member_state_ids),
             )
 
             return state_group
diff --git a/synapse/storage/stream.py b/synapse/storage/stream.py
index b9f2b74ac6..4c296d72c0 100644
--- a/synapse/storage/stream.py
+++ b/synapse/storage/stream.py
@@ -348,7 +348,7 @@ class StreamWorkerStore(EventsWorkerStore, SQLBaseStore):
             end_token (str): The stream token representing now.
 
         Returns:
-            Deferred[tuple[list[FrozenEvent],  str]]: Returns a list of
+            Deferred[tuple[list[FrozenEvent], str]]: Returns a list of
             events and a token pointing to the start of the returned
             events.
             The events returned are in ascending order.
@@ -379,7 +379,7 @@ class StreamWorkerStore(EventsWorkerStore, SQLBaseStore):
             end_token (str): The stream token representing now.
 
         Returns:
-            Deferred[tuple[list[_EventDictReturn],  str]]: Returns a list of
+            Deferred[tuple[list[_EventDictReturn], str]]: Returns a list of
             _EventDictReturn and a token pointing to the start of the returned
             events.
             The events returned are in ascending order.
diff --git a/synapse/util/async.py b/synapse/util/async_helpers.py
index a7094e2fb4..9b3f2f4b96 100644
--- a/synapse/util/async.py
+++ b/synapse/util/async_helpers.py
@@ -188,62 +188,30 @@ class Linearizer(object):
         # things blocked from executing.
         self.key_to_defer = {}
 
-    @defer.inlineCallbacks
     def queue(self, key):
+        # we avoid doing defer.inlineCallbacks here, so that cancellation works correctly.
+        # (https://twistedmatrix.com/trac/ticket/4632 meant that cancellations were not
+        # propagated inside inlineCallbacks until Twisted 18.7)
         entry = self.key_to_defer.setdefault(key, [0, collections.OrderedDict()])
 
         # If the number of things executing is greater than the maximum
         # then add a deferred to the list of blocked items
-        # When on of the things currently executing finishes it will callback
+        # When one of the things currently executing finishes it will callback
         # this item so that it can continue executing.
         if entry[0] >= self.max_count:
-            new_defer = defer.Deferred()
-            entry[1][new_defer] = 1
-
-            logger.info(
-                "Waiting to acquire linearizer lock %r for key %r", self.name, key,
-            )
-            try:
-                yield make_deferred_yieldable(new_defer)
-            except Exception as e:
-                if isinstance(e, CancelledError):
-                    logger.info(
-                        "Cancelling wait for linearizer lock %r for key %r",
-                        self.name, key,
-                    )
-                else:
-                    logger.warn(
-                        "Unexpected exception waiting for linearizer lock %r for key %r",
-                        self.name, key,
-                    )
-
-                # we just have to take ourselves back out of the queue.
-                del entry[1][new_defer]
-                raise
-
-            logger.info("Acquired linearizer lock %r for key %r", self.name, key)
-            entry[0] += 1
-
-            # if the code holding the lock completes synchronously, then it
-            # will recursively run the next claimant on the list. That can
-            # relatively rapidly lead to stack exhaustion. This is essentially
-            # the same problem as http://twistedmatrix.com/trac/ticket/9304.
-            #
-            # In order to break the cycle, we add a cheeky sleep(0) here to
-            # ensure that we fall back to the reactor between each iteration.
-            #
-            # (This needs to happen while we hold the lock, and the context manager's exit
-            # code must be synchronous, so this is the only sensible place.)
-            yield self._clock.sleep(0)
-
+            res = self._await_lock(key)
         else:
             logger.info(
                 "Acquired uncontended linearizer lock %r for key %r", self.name, key,
             )
             entry[0] += 1
+            res = defer.succeed(None)
+
+        # once we successfully get the lock, we need to return a context manager which
+        # will release the lock.
 
         @contextmanager
-        def _ctx_manager():
+        def _ctx_manager(_):
             try:
                 yield
             finally:
@@ -264,7 +232,64 @@ class Linearizer(object):
                     # map.
                     del self.key_to_defer[key]
 
-        defer.returnValue(_ctx_manager())
+        res.addCallback(_ctx_manager)
+        return res
+
+    def _await_lock(self, key):
+        """Helper for queue: adds a deferred to the queue
+
+        Assumes that we've already checked that we've reached the limit of the number
+        of lock-holders we allow. Creates a new deferred which is added to the list, and
+        adds some management around cancellations.
+
+        Returns the deferred, which will callback once we have secured the lock.
+
+        """
+        entry = self.key_to_defer[key]
+
+        logger.info(
+            "Waiting to acquire linearizer lock %r for key %r", self.name, key,
+        )
+
+        new_defer = make_deferred_yieldable(defer.Deferred())
+        entry[1][new_defer] = 1
+
+        def cb(_r):
+            logger.info("Acquired linearizer lock %r for key %r", self.name, key)
+            entry[0] += 1
+
+            # if the code holding the lock completes synchronously, then it
+            # will recursively run the next claimant on the list. That can
+            # relatively rapidly lead to stack exhaustion. This is essentially
+            # the same problem as http://twistedmatrix.com/trac/ticket/9304.
+            #
+            # In order to break the cycle, we add a cheeky sleep(0) here to
+            # ensure that we fall back to the reactor between each iteration.
+            #
+            # (This needs to happen while we hold the lock, and the context manager's exit
+            # code must be synchronous, so this is the only sensible place.)
+            return self._clock.sleep(0)
+
+        def eb(e):
+            logger.info("defer %r got err %r", new_defer, e)
+            if isinstance(e, CancelledError):
+                logger.info(
+                    "Cancelling wait for linearizer lock %r for key %r",
+                    self.name, key,
+                )
+
+            else:
+                logger.warn(
+                    "Unexpected exception waiting for linearizer lock %r for key %r",
+                    self.name, key,
+                )
+
+            # we just have to take ourselves back out of the queue.
+            del entry[1][new_defer]
+            return e
+
+        new_defer.addCallbacks(cb, eb)
+        return new_defer
 
 
 class ReadWriteLock(object):
diff --git a/synapse/util/caches/descriptors.py b/synapse/util/caches/descriptors.py
index 861c24809c..187510576a 100644
--- a/synapse/util/caches/descriptors.py
+++ b/synapse/util/caches/descriptors.py
@@ -25,7 +25,7 @@ from six import itervalues, string_types
 from twisted.internet import defer
 
 from synapse.util import logcontext, unwrapFirstError
-from synapse.util.async import ObservableDeferred
+from synapse.util.async_helpers import ObservableDeferred
 from synapse.util.caches import get_cache_factor_for
 from synapse.util.caches.lrucache import LruCache
 from synapse.util.caches.treecache import TreeCache, iterate_tree_cache_entry
diff --git a/synapse/util/caches/response_cache.py b/synapse/util/caches/response_cache.py
index a8491b42d5..afb03b2e1b 100644
--- a/synapse/util/caches/response_cache.py
+++ b/synapse/util/caches/response_cache.py
@@ -16,7 +16,7 @@ import logging
 
 from twisted.internet import defer
 
-from synapse.util.async import ObservableDeferred
+from synapse.util.async_helpers import ObservableDeferred
 from synapse.util.caches import register_cache
 from synapse.util.logcontext import make_deferred_yieldable, run_in_background
 
diff --git a/synapse/util/caches/snapshot_cache.py b/synapse/util/caches/snapshot_cache.py
index d03678b8c8..8318db8d2c 100644
--- a/synapse/util/caches/snapshot_cache.py
+++ b/synapse/util/caches/snapshot_cache.py
@@ -13,7 +13,7 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-from synapse.util.async import ObservableDeferred
+from synapse.util.async_helpers import ObservableDeferred
 
 
 class SnapshotCache(object):
diff --git a/synapse/util/logcontext.py b/synapse/util/logcontext.py
index 8dcae50b39..a0c2d37610 100644
--- a/synapse/util/logcontext.py
+++ b/synapse/util/logcontext.py
@@ -385,7 +385,13 @@ class LoggingContextFilter(logging.Filter):
         context = LoggingContext.current_context()
         for key, value in self.defaults.items():
             setattr(record, key, value)
-        context.copy_to(record)
+
+        # context should never be None, but if it somehow ends up being, then
+        # we end up in a death spiral of infinite loops, so let's check, for
+        # robustness' sake.
+        if context is not None:
+            context.copy_to(record)
+
         return True
 
 
@@ -396,7 +402,9 @@ class PreserveLoggingContext(object):
 
     __slots__ = ["current_context", "new_context", "has_parent"]
 
-    def __init__(self, new_context=LoggingContext.sentinel):
+    def __init__(self, new_context=None):
+        if new_context is None:
+            new_context = LoggingContext.sentinel
         self.new_context = new_context
 
     def __enter__(self):
@@ -526,7 +534,7 @@ _to_ignore = [
     "synapse.util.logcontext",
     "synapse.http.server",
     "synapse.storage._base",
-    "synapse.util.async",
+    "synapse.util.async_helpers",
 ]
 
 
diff --git a/synapse/util/logutils.py b/synapse/util/logutils.py
index 62a00189cc..ef31458226 100644
--- a/synapse/util/logutils.py
+++ b/synapse/util/logutils.py
@@ -20,6 +20,8 @@ import time
 from functools import wraps
 from inspect import getcallargs
 
+from six import PY3
+
 _TIME_FUNC_ID = 0
 
 
@@ -28,8 +30,12 @@ def _log_debug_as_f(f, msg, msg_args):
     logger = logging.getLogger(name)
 
     if logger.isEnabledFor(logging.DEBUG):
-        lineno = f.func_code.co_firstlineno
-        pathname = f.func_code.co_filename
+        if PY3:
+            lineno = f.__code__.co_firstlineno
+            pathname = f.__code__.co_filename
+        else:
+            lineno = f.func_code.co_firstlineno
+            pathname = f.func_code.co_filename
 
         record = logging.LogRecord(
             name=name,
diff --git a/synapse/util/stringutils.py b/synapse/util/stringutils.py
index 43d9db67ec..6f318c6a29 100644
--- a/synapse/util/stringutils.py
+++ b/synapse/util/stringutils.py
@@ -16,6 +16,7 @@
 import random
 import string
 
+from six import PY3
 from six.moves import range
 
 _string_with_symbols = (
@@ -34,6 +35,17 @@ def random_string_with_symbols(length):
 
 
 def is_ascii(s):
+
+    if PY3:
+        if isinstance(s, bytes):
+            try:
+                s.decode('ascii').encode('ascii')
+            except UnicodeDecodeError:
+                return False
+            except UnicodeEncodeError:
+                return False
+            return True
+
     try:
         s.encode("ascii")
     except UnicodeEncodeError:
@@ -49,6 +61,9 @@ def to_ascii(s):
 
     If given None then will return None.
     """
+    if PY3:
+        return s
+
     if s is None:
         return None
 
diff --git a/synapse/util/versionstring.py b/synapse/util/versionstring.py
index 1fbcd41115..3baba3225a 100644
--- a/synapse/util/versionstring.py
+++ b/synapse/util/versionstring.py
@@ -30,7 +30,7 @@ def get_version_string(module):
                 ['git', 'rev-parse', '--abbrev-ref', 'HEAD'],
                 stderr=null,
                 cwd=cwd,
-            ).strip()
+            ).strip().decode('ascii')
             git_branch = "b=" + git_branch
         except subprocess.CalledProcessError:
             git_branch = ""
@@ -40,7 +40,7 @@ def get_version_string(module):
                 ['git', 'describe', '--exact-match'],
                 stderr=null,
                 cwd=cwd,
-            ).strip()
+            ).strip().decode('ascii')
             git_tag = "t=" + git_tag
         except subprocess.CalledProcessError:
             git_tag = ""
@@ -50,7 +50,7 @@ def get_version_string(module):
                 ['git', 'rev-parse', '--short', 'HEAD'],
                 stderr=null,
                 cwd=cwd,
-            ).strip()
+            ).strip().decode('ascii')
         except subprocess.CalledProcessError:
             git_commit = ""
 
@@ -60,7 +60,7 @@ def get_version_string(module):
                 ['git', 'describe', '--dirty=' + dirty_string],
                 stderr=null,
                 cwd=cwd,
-            ).strip().endswith(dirty_string)
+            ).strip().decode('ascii').endswith(dirty_string)
 
             git_dirty = "dirty" if is_dirty else ""
         except subprocess.CalledProcessError:
@@ -77,8 +77,8 @@ def get_version_string(module):
                 "%s (%s)" % (
                     module.__version__, git_version,
                 )
-            ).encode("ascii")
+            )
     except Exception as e:
         logger.info("Failed to check for git repository: %s", e)
 
-    return module.__version__.encode("ascii")
+    return module.__version__
diff --git a/tests/__init__.py b/tests/__init__.py
index 24006c949e..9d9ca22829 100644
--- a/tests/__init__.py
+++ b/tests/__init__.py
@@ -15,4 +15,7 @@
 
 from twisted.trial import util
 
+from tests import utils
+
 util.DEFAULT_TIMEOUT_DURATION = 10
+utils.setupdb()
diff --git a/tests/api/test_auth.py b/tests/api/test_auth.py
index a82d737e71..ed960090c4 100644
--- a/tests/api/test_auth.py
+++ b/tests/api/test_auth.py
@@ -21,7 +21,7 @@ from twisted.internet import defer
 
 import synapse.handlers.auth
 from synapse.api.auth import Auth
-from synapse.api.errors import AuthError
+from synapse.api.errors import AuthError, Codes, ResourceLimitError
 from synapse.types import UserID
 
 from tests import unittest
@@ -34,13 +34,12 @@ class TestHandlers(object):
 
 
 class AuthTestCase(unittest.TestCase):
-
     @defer.inlineCallbacks
     def setUp(self):
         self.state_handler = Mock()
         self.store = Mock()
 
-        self.hs = yield setup_test_homeserver(handlers=None)
+        self.hs = yield setup_test_homeserver(self.addCleanup, handlers=None)
         self.hs.get_datastore = Mock(return_value=self.store)
         self.hs.handlers = TestHandlers(self.hs)
         self.auth = Auth(self.hs)
@@ -53,11 +52,7 @@ class AuthTestCase(unittest.TestCase):
 
     @defer.inlineCallbacks
     def test_get_user_by_req_user_valid_token(self):
-        user_info = {
-            "name": self.test_user,
-            "token_id": "ditto",
-            "device_id": "device",
-        }
+        user_info = {"name": self.test_user, "token_id": "ditto", "device_id": "device"}
         self.store.get_user_by_access_token = Mock(return_value=user_info)
 
         request = Mock(args={})
@@ -76,10 +71,7 @@ class AuthTestCase(unittest.TestCase):
         self.failureResultOf(d, AuthError)
 
     def test_get_user_by_req_user_missing_token(self):
-        user_info = {
-            "name": self.test_user,
-            "token_id": "ditto",
-        }
+        user_info = {"name": self.test_user, "token_id": "ditto"}
         self.store.get_user_by_access_token = Mock(return_value=user_info)
 
         request = Mock(args={})
@@ -90,8 +82,7 @@ class AuthTestCase(unittest.TestCase):
     @defer.inlineCallbacks
     def test_get_user_by_req_appservice_valid_token(self):
         app_service = Mock(
-            token="foobar", url="a_url", sender=self.test_user,
-            ip_range_whitelist=None,
+            token="foobar", url="a_url", sender=self.test_user, ip_range_whitelist=None
         )
         self.store.get_app_service_by_token = Mock(return_value=app_service)
         self.store.get_user_by_access_token = Mock(return_value=None)
@@ -106,8 +97,11 @@ class AuthTestCase(unittest.TestCase):
     @defer.inlineCallbacks
     def test_get_user_by_req_appservice_valid_token_good_ip(self):
         from netaddr import IPSet
+
         app_service = Mock(
-            token="foobar", url="a_url", sender=self.test_user,
+            token="foobar",
+            url="a_url",
+            sender=self.test_user,
             ip_range_whitelist=IPSet(["192.168/16"]),
         )
         self.store.get_app_service_by_token = Mock(return_value=app_service)
@@ -122,8 +116,11 @@ class AuthTestCase(unittest.TestCase):
 
     def test_get_user_by_req_appservice_valid_token_bad_ip(self):
         from netaddr import IPSet
+
         app_service = Mock(
-            token="foobar", url="a_url", sender=self.test_user,
+            token="foobar",
+            url="a_url",
+            sender=self.test_user,
             ip_range_whitelist=IPSet(["192.168/16"]),
         )
         self.store.get_app_service_by_token = Mock(return_value=app_service)
@@ -160,8 +157,7 @@ class AuthTestCase(unittest.TestCase):
     def test_get_user_by_req_appservice_valid_token_valid_user_id(self):
         masquerading_user_id = b"@doppelganger:matrix.org"
         app_service = Mock(
-            token="foobar", url="a_url", sender=self.test_user,
-            ip_range_whitelist=None,
+            token="foobar", url="a_url", sender=self.test_user, ip_range_whitelist=None
         )
         app_service.is_interested_in_user = Mock(return_value=True)
         self.store.get_app_service_by_token = Mock(return_value=app_service)
@@ -174,15 +170,13 @@ class AuthTestCase(unittest.TestCase):
         request.requestHeaders.getRawHeaders = mock_getRawHeaders()
         requester = yield self.auth.get_user_by_req(request)
         self.assertEquals(
-            requester.user.to_string(),
-            masquerading_user_id.decode('utf8')
+            requester.user.to_string(), masquerading_user_id.decode('utf8')
         )
 
     def test_get_user_by_req_appservice_valid_token_bad_user_id(self):
         masquerading_user_id = b"@doppelganger:matrix.org"
         app_service = Mock(
-            token="foobar", url="a_url", sender=self.test_user,
-            ip_range_whitelist=None,
+            token="foobar", url="a_url", sender=self.test_user, ip_range_whitelist=None
         )
         app_service.is_interested_in_user = Mock(return_value=False)
         self.store.get_app_service_by_token = Mock(return_value=app_service)
@@ -201,17 +195,15 @@ class AuthTestCase(unittest.TestCase):
         # TODO(danielwh): Remove this mock when we remove the
         # get_user_by_access_token fallback.
         self.store.get_user_by_access_token = Mock(
-            return_value={
-                "name": "@baldrick:matrix.org",
-                "device_id": "device",
-            }
+            return_value={"name": "@baldrick:matrix.org", "device_id": "device"}
         )
 
         user_id = "@baldrick:matrix.org"
         macaroon = pymacaroons.Macaroon(
             location=self.hs.config.server_name,
             identifier="key",
-            key=self.hs.config.macaroon_secret_key)
+            key=self.hs.config.macaroon_secret_key,
+        )
         macaroon.add_first_party_caveat("gen = 1")
         macaroon.add_first_party_caveat("type = access")
         macaroon.add_first_party_caveat("user_id = %s" % (user_id,))
@@ -225,15 +217,14 @@ class AuthTestCase(unittest.TestCase):
 
     @defer.inlineCallbacks
     def test_get_guest_user_from_macaroon(self):
-        self.store.get_user_by_id = Mock(return_value={
-            "is_guest": True,
-        })
+        self.store.get_user_by_id = Mock(return_value={"is_guest": True})
 
         user_id = "@baldrick:matrix.org"
         macaroon = pymacaroons.Macaroon(
             location=self.hs.config.server_name,
             identifier="key",
-            key=self.hs.config.macaroon_secret_key)
+            key=self.hs.config.macaroon_secret_key,
+        )
         macaroon.add_first_party_caveat("gen = 1")
         macaroon.add_first_party_caveat("type = access")
         macaroon.add_first_party_caveat("user_id = %s" % (user_id,))
@@ -257,7 +248,8 @@ class AuthTestCase(unittest.TestCase):
         macaroon = pymacaroons.Macaroon(
             location=self.hs.config.server_name,
             identifier="key",
-            key=self.hs.config.macaroon_secret_key)
+            key=self.hs.config.macaroon_secret_key,
+        )
         macaroon.add_first_party_caveat("gen = 1")
         macaroon.add_first_party_caveat("type = access")
         macaroon.add_first_party_caveat("user_id = %s" % (user,))
@@ -277,7 +269,8 @@ class AuthTestCase(unittest.TestCase):
         macaroon = pymacaroons.Macaroon(
             location=self.hs.config.server_name,
             identifier="key",
-            key=self.hs.config.macaroon_secret_key)
+            key=self.hs.config.macaroon_secret_key,
+        )
         macaroon.add_first_party_caveat("gen = 1")
         macaroon.add_first_party_caveat("type = access")
 
@@ -298,7 +291,8 @@ class AuthTestCase(unittest.TestCase):
         macaroon = pymacaroons.Macaroon(
             location=self.hs.config.server_name,
             identifier="key",
-            key=self.hs.config.macaroon_secret_key + "wrong")
+            key=self.hs.config.macaroon_secret_key + "wrong",
+        )
         macaroon.add_first_party_caveat("gen = 1")
         macaroon.add_first_party_caveat("type = access")
         macaroon.add_first_party_caveat("user_id = %s" % (user,))
@@ -320,7 +314,8 @@ class AuthTestCase(unittest.TestCase):
         macaroon = pymacaroons.Macaroon(
             location=self.hs.config.server_name,
             identifier="key",
-            key=self.hs.config.macaroon_secret_key)
+            key=self.hs.config.macaroon_secret_key,
+        )
         macaroon.add_first_party_caveat("gen = 1")
         macaroon.add_first_party_caveat("type = access")
         macaroon.add_first_party_caveat("user_id = %s" % (user,))
@@ -347,7 +342,8 @@ class AuthTestCase(unittest.TestCase):
         macaroon = pymacaroons.Macaroon(
             location=self.hs.config.server_name,
             identifier="key",
-            key=self.hs.config.macaroon_secret_key)
+            key=self.hs.config.macaroon_secret_key,
+        )
         macaroon.add_first_party_caveat("gen = 1")
         macaroon.add_first_party_caveat("type = access")
         macaroon.add_first_party_caveat("user_id = %s" % (user,))
@@ -380,7 +376,8 @@ class AuthTestCase(unittest.TestCase):
         macaroon = pymacaroons.Macaroon(
             location=self.hs.config.server_name,
             identifier="key",
-            key=self.hs.config.macaroon_secret_key)
+            key=self.hs.config.macaroon_secret_key,
+        )
         macaroon.add_first_party_caveat("gen = 1")
         macaroon.add_first_party_caveat("type = access")
         macaroon.add_first_party_caveat("user_id = %s" % (user_id,))
@@ -401,9 +398,7 @@ class AuthTestCase(unittest.TestCase):
         token = yield self.hs.handlers.auth_handler.issue_access_token(
             USER_ID, "DEVICE"
         )
-        self.store.add_access_token_to_user.assert_called_with(
-            USER_ID, token, "DEVICE"
-        )
+        self.store.add_access_token_to_user.assert_called_with(USER_ID, token, "DEVICE")
 
         def get_user(tok):
             if token != tok:
@@ -414,10 +409,9 @@ class AuthTestCase(unittest.TestCase):
                 "token_id": 1234,
                 "device_id": "DEVICE",
             }
+
         self.store.get_user_by_access_token = get_user
-        self.store.get_user_by_id = Mock(return_value={
-            "is_guest": False,
-        })
+        self.store.get_user_by_id = Mock(return_value={"is_guest": False})
 
         # check the token works
         request = Mock(args={})
@@ -444,3 +438,49 @@ class AuthTestCase(unittest.TestCase):
         self.assertEqual("Guest access token used for regular user", cm.exception.msg)
 
         self.store.get_user_by_id.assert_called_with(USER_ID)
+
+    @defer.inlineCallbacks
+    def test_blocking_mau(self):
+        self.hs.config.limit_usage_by_mau = False
+        self.hs.config.max_mau_value = 50
+        lots_of_users = 100
+        small_number_of_users = 1
+
+        # Ensure no error thrown
+        yield self.auth.check_auth_blocking()
+
+        self.hs.config.limit_usage_by_mau = True
+
+        self.store.get_monthly_active_count = Mock(
+            return_value=defer.succeed(lots_of_users)
+        )
+
+        with self.assertRaises(ResourceLimitError) as e:
+            yield self.auth.check_auth_blocking()
+        self.assertEquals(e.exception.admin_uri, self.hs.config.admin_uri)
+        self.assertEquals(e.exception.errcode, Codes.RESOURCE_LIMIT_EXCEEDED)
+        self.assertEquals(e.exception.code, 403)
+
+        # Ensure does not throw an error
+        self.store.get_monthly_active_count = Mock(
+            return_value=defer.succeed(small_number_of_users)
+        )
+        yield self.auth.check_auth_blocking()
+
+    @defer.inlineCallbacks
+    def test_hs_disabled(self):
+        self.hs.config.hs_disabled = True
+        self.hs.config.hs_disabled_message = "Reason for being disabled"
+        with self.assertRaises(ResourceLimitError) as e:
+            yield self.auth.check_auth_blocking()
+        self.assertEquals(e.exception.admin_uri, self.hs.config.admin_uri)
+        self.assertEquals(e.exception.errcode, Codes.RESOURCE_LIMIT_EXCEEDED)
+        self.assertEquals(e.exception.code, 403)
+
+    @defer.inlineCallbacks
+    def test_server_notices_mxid_special_cased(self):
+        self.hs.config.hs_disabled = True
+        user = "@user:server"
+        self.hs.config.server_notices_mxid = user
+        self.hs.config.hs_disabled_message = "Reason for being disabled"
+        yield self.auth.check_auth_blocking(user)
diff --git a/tests/api/test_filtering.py b/tests/api/test_filtering.py
index 836a23fb54..48b2d3d663 100644
--- a/tests/api/test_filtering.py
+++ b/tests/api/test_filtering.py
@@ -38,7 +38,6 @@ def MockEvent(**kwargs):
 
 
 class FilteringTestCase(unittest.TestCase):
-
     @defer.inlineCallbacks
     def setUp(self):
         self.mock_federation_resource = MockHttpResource()
@@ -47,6 +46,7 @@ class FilteringTestCase(unittest.TestCase):
         self.mock_http_client.put_json = DeferredMockCallable()
 
         hs = yield setup_test_homeserver(
+            self.addCleanup,
             handlers=None,
             http_client=self.mock_http_client,
             keyring=Mock(),
@@ -64,7 +64,7 @@ class FilteringTestCase(unittest.TestCase):
             {"room": {"timeline": {"limit": 0}, "state": {"not_bars": ["*"]}}},
             {"event_format": "other"},
             {"room": {"not_rooms": ["#foo:pik-test"]}},
-            {"presence": {"senders": ["@bar;pik.test.com"]}}
+            {"presence": {"senders": ["@bar;pik.test.com"]}},
         ]
         for filter in invalid_filters:
             with self.assertRaises(SynapseError) as check_filter_error:
@@ -81,34 +81,34 @@ class FilteringTestCase(unittest.TestCase):
                     "include_leave": False,
                     "rooms": ["!dee:pik-test"],
                     "not_rooms": ["!gee:pik-test"],
-                    "account_data": {"limit": 0, "types": ["*"]}
+                    "account_data": {"limit": 0, "types": ["*"]},
                 }
             },
             {
                 "room": {
                     "state": {
                         "types": ["m.room.*"],
-                        "not_rooms": ["!726s6s6q:example.com"]
+                        "not_rooms": ["!726s6s6q:example.com"],
                     },
                     "timeline": {
                         "limit": 10,
                         "types": ["m.room.message"],
                         "not_rooms": ["!726s6s6q:example.com"],
-                        "not_senders": ["@spam:example.com"]
+                        "not_senders": ["@spam:example.com"],
                     },
                     "ephemeral": {
                         "types": ["m.receipt", "m.typing"],
                         "not_rooms": ["!726s6s6q:example.com"],
-                        "not_senders": ["@spam:example.com"]
-                    }
+                        "not_senders": ["@spam:example.com"],
+                    },
                 },
                 "presence": {
                     "types": ["m.presence"],
-                    "not_senders": ["@alice:example.com"]
+                    "not_senders": ["@alice:example.com"],
                 },
                 "event_format": "client",
-                "event_fields": ["type", "content", "sender"]
-            }
+                "event_fields": ["type", "content", "sender"],
+            },
         ]
         for filter in valid_filters:
             try:
@@ -121,229 +121,131 @@ class FilteringTestCase(unittest.TestCase):
         pass
 
     def test_definition_types_works_with_literals(self):
-        definition = {
-            "types": ["m.room.message", "org.matrix.foo.bar"]
-        }
-        event = MockEvent(
-            sender="@foo:bar",
-            type="m.room.message",
-            room_id="!foo:bar"
-        )
+        definition = {"types": ["m.room.message", "org.matrix.foo.bar"]}
+        event = MockEvent(sender="@foo:bar", type="m.room.message", room_id="!foo:bar")
 
-        self.assertTrue(
-            Filter(definition).check(event)
-        )
+        self.assertTrue(Filter(definition).check(event))
 
     def test_definition_types_works_with_wildcards(self):
-        definition = {
-            "types": ["m.*", "org.matrix.foo.bar"]
-        }
-        event = MockEvent(
-            sender="@foo:bar",
-            type="m.room.message",
-            room_id="!foo:bar"
-        )
-        self.assertTrue(
-            Filter(definition).check(event)
-        )
+        definition = {"types": ["m.*", "org.matrix.foo.bar"]}
+        event = MockEvent(sender="@foo:bar", type="m.room.message", room_id="!foo:bar")
+        self.assertTrue(Filter(definition).check(event))
 
     def test_definition_types_works_with_unknowns(self):
-        definition = {
-            "types": ["m.room.message", "org.matrix.foo.bar"]
-        }
+        definition = {"types": ["m.room.message", "org.matrix.foo.bar"]}
         event = MockEvent(
             sender="@foo:bar",
             type="now.for.something.completely.different",
-            room_id="!foo:bar"
-        )
-        self.assertFalse(
-            Filter(definition).check(event)
+            room_id="!foo:bar",
         )
+        self.assertFalse(Filter(definition).check(event))
 
     def test_definition_not_types_works_with_literals(self):
-        definition = {
-            "not_types": ["m.room.message", "org.matrix.foo.bar"]
-        }
-        event = MockEvent(
-            sender="@foo:bar",
-            type="m.room.message",
-            room_id="!foo:bar"
-        )
-        self.assertFalse(
-            Filter(definition).check(event)
-        )
+        definition = {"not_types": ["m.room.message", "org.matrix.foo.bar"]}
+        event = MockEvent(sender="@foo:bar", type="m.room.message", room_id="!foo:bar")
+        self.assertFalse(Filter(definition).check(event))
 
     def test_definition_not_types_works_with_wildcards(self):
-        definition = {
-            "not_types": ["m.room.message", "org.matrix.*"]
-        }
+        definition = {"not_types": ["m.room.message", "org.matrix.*"]}
         event = MockEvent(
-            sender="@foo:bar",
-            type="org.matrix.custom.event",
-            room_id="!foo:bar"
-        )
-        self.assertFalse(
-            Filter(definition).check(event)
+            sender="@foo:bar", type="org.matrix.custom.event", room_id="!foo:bar"
         )
+        self.assertFalse(Filter(definition).check(event))
 
     def test_definition_not_types_works_with_unknowns(self):
-        definition = {
-            "not_types": ["m.*", "org.*"]
-        }
-        event = MockEvent(
-            sender="@foo:bar",
-            type="com.nom.nom.nom",
-            room_id="!foo:bar"
-        )
-        self.assertTrue(
-            Filter(definition).check(event)
-        )
+        definition = {"not_types": ["m.*", "org.*"]}
+        event = MockEvent(sender="@foo:bar", type="com.nom.nom.nom", room_id="!foo:bar")
+        self.assertTrue(Filter(definition).check(event))
 
     def test_definition_not_types_takes_priority_over_types(self):
         definition = {
             "not_types": ["m.*", "org.*"],
-            "types": ["m.room.message", "m.room.topic"]
+            "types": ["m.room.message", "m.room.topic"],
         }
-        event = MockEvent(
-            sender="@foo:bar",
-            type="m.room.topic",
-            room_id="!foo:bar"
-        )
-        self.assertFalse(
-            Filter(definition).check(event)
-        )
+        event = MockEvent(sender="@foo:bar", type="m.room.topic", room_id="!foo:bar")
+        self.assertFalse(Filter(definition).check(event))
 
     def test_definition_senders_works_with_literals(self):
-        definition = {
-            "senders": ["@flibble:wibble"]
-        }
+        definition = {"senders": ["@flibble:wibble"]}
         event = MockEvent(
-            sender="@flibble:wibble",
-            type="com.nom.nom.nom",
-            room_id="!foo:bar"
-        )
-        self.assertTrue(
-            Filter(definition).check(event)
+            sender="@flibble:wibble", type="com.nom.nom.nom", room_id="!foo:bar"
         )
+        self.assertTrue(Filter(definition).check(event))
 
     def test_definition_senders_works_with_unknowns(self):
-        definition = {
-            "senders": ["@flibble:wibble"]
-        }
+        definition = {"senders": ["@flibble:wibble"]}
         event = MockEvent(
-            sender="@challenger:appears",
-            type="com.nom.nom.nom",
-            room_id="!foo:bar"
-        )
-        self.assertFalse(
-            Filter(definition).check(event)
+            sender="@challenger:appears", type="com.nom.nom.nom", room_id="!foo:bar"
         )
+        self.assertFalse(Filter(definition).check(event))
 
     def test_definition_not_senders_works_with_literals(self):
-        definition = {
-            "not_senders": ["@flibble:wibble"]
-        }
+        definition = {"not_senders": ["@flibble:wibble"]}
         event = MockEvent(
-            sender="@flibble:wibble",
-            type="com.nom.nom.nom",
-            room_id="!foo:bar"
-        )
-        self.assertFalse(
-            Filter(definition).check(event)
+            sender="@flibble:wibble", type="com.nom.nom.nom", room_id="!foo:bar"
         )
+        self.assertFalse(Filter(definition).check(event))
 
     def test_definition_not_senders_works_with_unknowns(self):
-        definition = {
-            "not_senders": ["@flibble:wibble"]
-        }
+        definition = {"not_senders": ["@flibble:wibble"]}
         event = MockEvent(
-            sender="@challenger:appears",
-            type="com.nom.nom.nom",
-            room_id="!foo:bar"
-        )
-        self.assertTrue(
-            Filter(definition).check(event)
+            sender="@challenger:appears", type="com.nom.nom.nom", room_id="!foo:bar"
         )
+        self.assertTrue(Filter(definition).check(event))
 
     def test_definition_not_senders_takes_priority_over_senders(self):
         definition = {
             "not_senders": ["@misspiggy:muppets"],
-            "senders": ["@kermit:muppets", "@misspiggy:muppets"]
+            "senders": ["@kermit:muppets", "@misspiggy:muppets"],
         }
         event = MockEvent(
-            sender="@misspiggy:muppets",
-            type="m.room.topic",
-            room_id="!foo:bar"
-        )
-        self.assertFalse(
-            Filter(definition).check(event)
+            sender="@misspiggy:muppets", type="m.room.topic", room_id="!foo:bar"
         )
+        self.assertFalse(Filter(definition).check(event))
 
     def test_definition_rooms_works_with_literals(self):
-        definition = {
-            "rooms": ["!secretbase:unknown"]
-        }
+        definition = {"rooms": ["!secretbase:unknown"]}
         event = MockEvent(
-            sender="@foo:bar",
-            type="m.room.message",
-            room_id="!secretbase:unknown"
-        )
-        self.assertTrue(
-            Filter(definition).check(event)
+            sender="@foo:bar", type="m.room.message", room_id="!secretbase:unknown"
         )
+        self.assertTrue(Filter(definition).check(event))
 
     def test_definition_rooms_works_with_unknowns(self):
-        definition = {
-            "rooms": ["!secretbase:unknown"]
-        }
+        definition = {"rooms": ["!secretbase:unknown"]}
         event = MockEvent(
             sender="@foo:bar",
             type="m.room.message",
-            room_id="!anothersecretbase:unknown"
-        )
-        self.assertFalse(
-            Filter(definition).check(event)
+            room_id="!anothersecretbase:unknown",
         )
+        self.assertFalse(Filter(definition).check(event))
 
     def test_definition_not_rooms_works_with_literals(self):
-        definition = {
-            "not_rooms": ["!anothersecretbase:unknown"]
-        }
+        definition = {"not_rooms": ["!anothersecretbase:unknown"]}
         event = MockEvent(
             sender="@foo:bar",
             type="m.room.message",
-            room_id="!anothersecretbase:unknown"
-        )
-        self.assertFalse(
-            Filter(definition).check(event)
+            room_id="!anothersecretbase:unknown",
         )
+        self.assertFalse(Filter(definition).check(event))
 
     def test_definition_not_rooms_works_with_unknowns(self):
-        definition = {
-            "not_rooms": ["!secretbase:unknown"]
-        }
+        definition = {"not_rooms": ["!secretbase:unknown"]}
         event = MockEvent(
             sender="@foo:bar",
             type="m.room.message",
-            room_id="!anothersecretbase:unknown"
-        )
-        self.assertTrue(
-            Filter(definition).check(event)
+            room_id="!anothersecretbase:unknown",
         )
+        self.assertTrue(Filter(definition).check(event))
 
     def test_definition_not_rooms_takes_priority_over_rooms(self):
         definition = {
             "not_rooms": ["!secretbase:unknown"],
-            "rooms": ["!secretbase:unknown"]
+            "rooms": ["!secretbase:unknown"],
         }
         event = MockEvent(
-            sender="@foo:bar",
-            type="m.room.message",
-            room_id="!secretbase:unknown"
-        )
-        self.assertFalse(
-            Filter(definition).check(event)
+            sender="@foo:bar", type="m.room.message", room_id="!secretbase:unknown"
         )
+        self.assertFalse(Filter(definition).check(event))
 
     def test_definition_combined_event(self):
         definition = {
@@ -352,16 +254,14 @@ class FilteringTestCase(unittest.TestCase):
             "rooms": ["!stage:unknown"],
             "not_rooms": ["!piggyshouse:muppets"],
             "types": ["m.room.message", "muppets.kermit.*"],
-            "not_types": ["muppets.misspiggy.*"]
+            "not_types": ["muppets.misspiggy.*"],
         }
         event = MockEvent(
             sender="@kermit:muppets",  # yup
             type="m.room.message",  # yup
-            room_id="!stage:unknown"  # yup
-        )
-        self.assertTrue(
-            Filter(definition).check(event)
+            room_id="!stage:unknown",  # yup
         )
+        self.assertTrue(Filter(definition).check(event))
 
     def test_definition_combined_event_bad_sender(self):
         definition = {
@@ -370,16 +270,14 @@ class FilteringTestCase(unittest.TestCase):
             "rooms": ["!stage:unknown"],
             "not_rooms": ["!piggyshouse:muppets"],
             "types": ["m.room.message", "muppets.kermit.*"],
-            "not_types": ["muppets.misspiggy.*"]
+            "not_types": ["muppets.misspiggy.*"],
         }
         event = MockEvent(
             sender="@misspiggy:muppets",  # nope
             type="m.room.message",  # yup
-            room_id="!stage:unknown"  # yup
-        )
-        self.assertFalse(
-            Filter(definition).check(event)
+            room_id="!stage:unknown",  # yup
         )
+        self.assertFalse(Filter(definition).check(event))
 
     def test_definition_combined_event_bad_room(self):
         definition = {
@@ -388,16 +286,14 @@ class FilteringTestCase(unittest.TestCase):
             "rooms": ["!stage:unknown"],
             "not_rooms": ["!piggyshouse:muppets"],
             "types": ["m.room.message", "muppets.kermit.*"],
-            "not_types": ["muppets.misspiggy.*"]
+            "not_types": ["muppets.misspiggy.*"],
         }
         event = MockEvent(
             sender="@kermit:muppets",  # yup
             type="m.room.message",  # yup
-            room_id="!piggyshouse:muppets"  # nope
-        )
-        self.assertFalse(
-            Filter(definition).check(event)
+            room_id="!piggyshouse:muppets",  # nope
         )
+        self.assertFalse(Filter(definition).check(event))
 
     def test_definition_combined_event_bad_type(self):
         definition = {
@@ -406,37 +302,26 @@ class FilteringTestCase(unittest.TestCase):
             "rooms": ["!stage:unknown"],
             "not_rooms": ["!piggyshouse:muppets"],
             "types": ["m.room.message", "muppets.kermit.*"],
-            "not_types": ["muppets.misspiggy.*"]
+            "not_types": ["muppets.misspiggy.*"],
         }
         event = MockEvent(
             sender="@kermit:muppets",  # yup
             type="muppets.misspiggy.kisses",  # nope
-            room_id="!stage:unknown"  # yup
-        )
-        self.assertFalse(
-            Filter(definition).check(event)
+            room_id="!stage:unknown",  # yup
         )
+        self.assertFalse(Filter(definition).check(event))
 
     @defer.inlineCallbacks
     def test_filter_presence_match(self):
-        user_filter_json = {
-            "presence": {
-                "types": ["m.*"]
-            }
-        }
+        user_filter_json = {"presence": {"types": ["m.*"]}}
         filter_id = yield self.datastore.add_user_filter(
-            user_localpart=user_localpart,
-            user_filter=user_filter_json,
-        )
-        event = MockEvent(
-            sender="@foo:bar",
-            type="m.profile",
+            user_localpart=user_localpart, user_filter=user_filter_json
         )
+        event = MockEvent(sender="@foo:bar", type="m.profile")
         events = [event]
 
         user_filter = yield self.filtering.get_user_filter(
-            user_localpart=user_localpart,
-            filter_id=filter_id,
+            user_localpart=user_localpart, filter_id=filter_id
         )
 
         results = user_filter.filter_presence(events=events)
@@ -444,15 +329,10 @@ class FilteringTestCase(unittest.TestCase):
 
     @defer.inlineCallbacks
     def test_filter_presence_no_match(self):
-        user_filter_json = {
-            "presence": {
-                "types": ["m.*"]
-            }
-        }
+        user_filter_json = {"presence": {"types": ["m.*"]}}
 
         filter_id = yield self.datastore.add_user_filter(
-            user_localpart=user_localpart + "2",
-            user_filter=user_filter_json,
+            user_localpart=user_localpart + "2", user_filter=user_filter_json
         )
         event = MockEvent(
             event_id="$asdasd:localhost",
@@ -462,8 +342,7 @@ class FilteringTestCase(unittest.TestCase):
         events = [event]
 
         user_filter = yield self.filtering.get_user_filter(
-            user_localpart=user_localpart + "2",
-            filter_id=filter_id,
+            user_localpart=user_localpart + "2", filter_id=filter_id
         )
 
         results = user_filter.filter_presence(events=events)
@@ -471,27 +350,15 @@ class FilteringTestCase(unittest.TestCase):
 
     @defer.inlineCallbacks
     def test_filter_room_state_match(self):
-        user_filter_json = {
-            "room": {
-                "state": {
-                    "types": ["m.*"]
-                }
-            }
-        }
+        user_filter_json = {"room": {"state": {"types": ["m.*"]}}}
         filter_id = yield self.datastore.add_user_filter(
-            user_localpart=user_localpart,
-            user_filter=user_filter_json,
-        )
-        event = MockEvent(
-            sender="@foo:bar",
-            type="m.room.topic",
-            room_id="!foo:bar"
+            user_localpart=user_localpart, user_filter=user_filter_json
         )
+        event = MockEvent(sender="@foo:bar", type="m.room.topic", room_id="!foo:bar")
         events = [event]
 
         user_filter = yield self.filtering.get_user_filter(
-            user_localpart=user_localpart,
-            filter_id=filter_id,
+            user_localpart=user_localpart, filter_id=filter_id
         )
 
         results = user_filter.filter_room_state(events=events)
@@ -499,27 +366,17 @@ class FilteringTestCase(unittest.TestCase):
 
     @defer.inlineCallbacks
     def test_filter_room_state_no_match(self):
-        user_filter_json = {
-            "room": {
-                "state": {
-                    "types": ["m.*"]
-                }
-            }
-        }
+        user_filter_json = {"room": {"state": {"types": ["m.*"]}}}
         filter_id = yield self.datastore.add_user_filter(
-            user_localpart=user_localpart,
-            user_filter=user_filter_json,
+            user_localpart=user_localpart, user_filter=user_filter_json
         )
         event = MockEvent(
-            sender="@foo:bar",
-            type="org.matrix.custom.event",
-            room_id="!foo:bar"
+            sender="@foo:bar", type="org.matrix.custom.event", room_id="!foo:bar"
         )
         events = [event]
 
         user_filter = yield self.filtering.get_user_filter(
-            user_localpart=user_localpart,
-            filter_id=filter_id,
+            user_localpart=user_localpart, filter_id=filter_id
         )
 
         results = user_filter.filter_room_state(events)
@@ -543,45 +400,32 @@ class FilteringTestCase(unittest.TestCase):
 
     @defer.inlineCallbacks
     def test_add_filter(self):
-        user_filter_json = {
-            "room": {
-                "state": {
-                    "types": ["m.*"]
-                }
-            }
-        }
+        user_filter_json = {"room": {"state": {"types": ["m.*"]}}}
 
         filter_id = yield self.filtering.add_user_filter(
-            user_localpart=user_localpart,
-            user_filter=user_filter_json,
+            user_localpart=user_localpart, user_filter=user_filter_json
         )
 
         self.assertEquals(filter_id, 0)
-        self.assertEquals(user_filter_json, (
-            yield self.datastore.get_user_filter(
-                user_localpart=user_localpart,
-                filter_id=0,
-            )
-        ))
+        self.assertEquals(
+            user_filter_json,
+            (
+                yield self.datastore.get_user_filter(
+                    user_localpart=user_localpart, filter_id=0
+                )
+            ),
+        )
 
     @defer.inlineCallbacks
     def test_get_filter(self):
-        user_filter_json = {
-            "room": {
-                "state": {
-                    "types": ["m.*"]
-                }
-            }
-        }
+        user_filter_json = {"room": {"state": {"types": ["m.*"]}}}
 
         filter_id = yield self.datastore.add_user_filter(
-            user_localpart=user_localpart,
-            user_filter=user_filter_json,
+            user_localpart=user_localpart, user_filter=user_filter_json
         )
 
         filter = yield self.filtering.get_user_filter(
-            user_localpart=user_localpart,
-            filter_id=filter_id,
+            user_localpart=user_localpart, filter_id=filter_id
         )
 
         self.assertEquals(filter.get_filter_json(), user_filter_json)
diff --git a/tests/api/test_ratelimiting.py b/tests/api/test_ratelimiting.py
index c45b59b36c..8933fe3b72 100644
--- a/tests/api/test_ratelimiting.py
+++ b/tests/api/test_ratelimiting.py
@@ -4,17 +4,16 @@ from tests import unittest
 
 
 class TestRatelimiter(unittest.TestCase):
-
     def test_allowed(self):
         limiter = Ratelimiter()
         allowed, time_allowed = limiter.send_message(
-            user_id="test_id", time_now_s=0, msg_rate_hz=0.1, burst_count=1,
+            user_id="test_id", time_now_s=0, msg_rate_hz=0.1, burst_count=1
         )
         self.assertTrue(allowed)
         self.assertEquals(10., time_allowed)
 
         allowed, time_allowed = limiter.send_message(
-            user_id="test_id", time_now_s=5, msg_rate_hz=0.1, burst_count=1,
+            user_id="test_id", time_now_s=5, msg_rate_hz=0.1, burst_count=1
         )
         self.assertFalse(allowed)
         self.assertEquals(10., time_allowed)
@@ -28,7 +27,7 @@ class TestRatelimiter(unittest.TestCase):
     def test_pruning(self):
         limiter = Ratelimiter()
         allowed, time_allowed = limiter.send_message(
-            user_id="test_id_1", time_now_s=0, msg_rate_hz=0.1, burst_count=1,
+            user_id="test_id_1", time_now_s=0, msg_rate_hz=0.1, burst_count=1
         )
 
         self.assertIn("test_id_1", limiter.message_counts)
diff --git a/tests/app/__init__.py b/tests/app/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/tests/app/__init__.py
diff --git a/tests/app/test_frontend_proxy.py b/tests/app/test_frontend_proxy.py
new file mode 100644
index 0000000000..76b5090fff
--- /dev/null
+++ b/tests/app/test_frontend_proxy.py
@@ -0,0 +1,88 @@
+# -*- coding: utf-8 -*-
+# Copyright 2018 New Vector Ltd
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from synapse.app.frontend_proxy import FrontendProxyServer
+
+from tests.unittest import HomeserverTestCase
+
+
+class FrontendProxyTests(HomeserverTestCase):
+    def make_homeserver(self, reactor, clock):
+
+        hs = self.setup_test_homeserver(
+            http_client=None, homeserverToUse=FrontendProxyServer
+        )
+
+        return hs
+
+    def test_listen_http_with_presence_enabled(self):
+        """
+        When presence is on, the stub servlet will not register.
+        """
+        # Presence is on
+        self.hs.config.use_presence = True
+
+        config = {
+            "port": 8080,
+            "bind_addresses": ["0.0.0.0"],
+            "resources": [{"names": ["client"]}],
+        }
+
+        # Listen with the config
+        self.hs._listen_http(config)
+
+        # Grab the resource from the site that was told to listen
+        self.assertEqual(len(self.reactor.tcpServers), 1)
+        site = self.reactor.tcpServers[0][1]
+        self.resource = (
+            site.resource.children["_matrix"].children["client"].children["r0"]
+        )
+
+        request, channel = self.make_request("PUT", "presence/a/status")
+        self.render(request)
+
+        # 400 + unrecognised, because nothing is registered
+        self.assertEqual(channel.code, 400)
+        self.assertEqual(channel.json_body["errcode"], "M_UNRECOGNIZED")
+
+    def test_listen_http_with_presence_disabled(self):
+        """
+        When presence is on, the stub servlet will register.
+        """
+        # Presence is off
+        self.hs.config.use_presence = False
+
+        config = {
+            "port": 8080,
+            "bind_addresses": ["0.0.0.0"],
+            "resources": [{"names": ["client"]}],
+        }
+
+        # Listen with the config
+        self.hs._listen_http(config)
+
+        # Grab the resource from the site that was told to listen
+        self.assertEqual(len(self.reactor.tcpServers), 1)
+        site = self.reactor.tcpServers[0][1]
+        self.resource = (
+            site.resource.children["_matrix"].children["client"].children["r0"]
+        )
+
+        request, channel = self.make_request("PUT", "presence/a/status")
+        self.render(request)
+
+        # 401, because the stub servlet still checks authentication
+        self.assertEqual(channel.code, 401)
+        self.assertEqual(channel.json_body["errcode"], "M_MISSING_TOKEN")
diff --git a/tests/appservice/test_appservice.py b/tests/appservice/test_appservice.py
index 891e0cc973..4003869ed6 100644
--- a/tests/appservice/test_appservice.py
+++ b/tests/appservice/test_appservice.py
@@ -24,14 +24,10 @@ from tests import unittest
 
 
 def _regex(regex, exclusive=True):
-    return {
-        "regex": re.compile(regex),
-        "exclusive": exclusive
-    }
+    return {"regex": re.compile(regex), "exclusive": exclusive}
 
 
 class ApplicationServiceTestCase(unittest.TestCase):
-
     def setUp(self):
         self.service = ApplicationService(
             id="unique_identifier",
@@ -41,8 +37,8 @@ class ApplicationServiceTestCase(unittest.TestCase):
             namespaces={
                 ApplicationService.NS_USERS: [],
                 ApplicationService.NS_ROOMS: [],
-                ApplicationService.NS_ALIASES: []
-            }
+                ApplicationService.NS_ALIASES: [],
+            },
         )
         self.event = Mock(
             type="m.something", room_id="!foo:bar", sender="@someone:somewhere"
@@ -52,25 +48,19 @@ class ApplicationServiceTestCase(unittest.TestCase):
 
     @defer.inlineCallbacks
     def test_regex_user_id_prefix_match(self):
-        self.service.namespaces[ApplicationService.NS_USERS].append(
-            _regex("@irc_.*")
-        )
+        self.service.namespaces[ApplicationService.NS_USERS].append(_regex("@irc_.*"))
         self.event.sender = "@irc_foobar:matrix.org"
         self.assertTrue((yield self.service.is_interested(self.event)))
 
     @defer.inlineCallbacks
     def test_regex_user_id_prefix_no_match(self):
-        self.service.namespaces[ApplicationService.NS_USERS].append(
-            _regex("@irc_.*")
-        )
+        self.service.namespaces[ApplicationService.NS_USERS].append(_regex("@irc_.*"))
         self.event.sender = "@someone_else:matrix.org"
         self.assertFalse((yield self.service.is_interested(self.event)))
 
     @defer.inlineCallbacks
     def test_regex_room_member_is_checked(self):
-        self.service.namespaces[ApplicationService.NS_USERS].append(
-            _regex("@irc_.*")
-        )
+        self.service.namespaces[ApplicationService.NS_USERS].append(_regex("@irc_.*"))
         self.event.sender = "@someone_else:matrix.org"
         self.event.type = "m.room.member"
         self.event.state_key = "@irc_foobar:matrix.org"
@@ -98,60 +88,47 @@ class ApplicationServiceTestCase(unittest.TestCase):
             _regex("#irc_.*:matrix.org")
         )
         self.store.get_aliases_for_room.return_value = [
-            "#irc_foobar:matrix.org", "#athing:matrix.org"
+            "#irc_foobar:matrix.org",
+            "#athing:matrix.org",
         ]
         self.store.get_users_in_room.return_value = []
-        self.assertTrue((yield self.service.is_interested(
-            self.event, self.store
-        )))
+        self.assertTrue((yield self.service.is_interested(self.event, self.store)))
 
     def test_non_exclusive_alias(self):
         self.service.namespaces[ApplicationService.NS_ALIASES].append(
             _regex("#irc_.*:matrix.org", exclusive=False)
         )
-        self.assertFalse(self.service.is_exclusive_alias(
-            "#irc_foobar:matrix.org"
-        ))
+        self.assertFalse(self.service.is_exclusive_alias("#irc_foobar:matrix.org"))
 
     def test_non_exclusive_room(self):
         self.service.namespaces[ApplicationService.NS_ROOMS].append(
             _regex("!irc_.*:matrix.org", exclusive=False)
         )
-        self.assertFalse(self.service.is_exclusive_room(
-            "!irc_foobar:matrix.org"
-        ))
+        self.assertFalse(self.service.is_exclusive_room("!irc_foobar:matrix.org"))
 
     def test_non_exclusive_user(self):
         self.service.namespaces[ApplicationService.NS_USERS].append(
             _regex("@irc_.*:matrix.org", exclusive=False)
         )
-        self.assertFalse(self.service.is_exclusive_user(
-            "@irc_foobar:matrix.org"
-        ))
+        self.assertFalse(self.service.is_exclusive_user("@irc_foobar:matrix.org"))
 
     def test_exclusive_alias(self):
         self.service.namespaces[ApplicationService.NS_ALIASES].append(
             _regex("#irc_.*:matrix.org", exclusive=True)
         )
-        self.assertTrue(self.service.is_exclusive_alias(
-            "#irc_foobar:matrix.org"
-        ))
+        self.assertTrue(self.service.is_exclusive_alias("#irc_foobar:matrix.org"))
 
     def test_exclusive_user(self):
         self.service.namespaces[ApplicationService.NS_USERS].append(
             _regex("@irc_.*:matrix.org", exclusive=True)
         )
-        self.assertTrue(self.service.is_exclusive_user(
-            "@irc_foobar:matrix.org"
-        ))
+        self.assertTrue(self.service.is_exclusive_user("@irc_foobar:matrix.org"))
 
     def test_exclusive_room(self):
         self.service.namespaces[ApplicationService.NS_ROOMS].append(
             _regex("!irc_.*:matrix.org", exclusive=True)
         )
-        self.assertTrue(self.service.is_exclusive_room(
-            "!irc_foobar:matrix.org"
-        ))
+        self.assertTrue(self.service.is_exclusive_room("!irc_foobar:matrix.org"))
 
     @defer.inlineCallbacks
     def test_regex_alias_no_match(self):
@@ -159,47 +136,36 @@ class ApplicationServiceTestCase(unittest.TestCase):
             _regex("#irc_.*:matrix.org")
         )
         self.store.get_aliases_for_room.return_value = [
-            "#xmpp_foobar:matrix.org", "#athing:matrix.org"
+            "#xmpp_foobar:matrix.org",
+            "#athing:matrix.org",
         ]
         self.store.get_users_in_room.return_value = []
-        self.assertFalse((yield self.service.is_interested(
-            self.event, self.store
-        )))
+        self.assertFalse((yield self.service.is_interested(self.event, self.store)))
 
     @defer.inlineCallbacks
     def test_regex_multiple_matches(self):
         self.service.namespaces[ApplicationService.NS_ALIASES].append(
             _regex("#irc_.*:matrix.org")
         )
-        self.service.namespaces[ApplicationService.NS_USERS].append(
-            _regex("@irc_.*")
-        )
+        self.service.namespaces[ApplicationService.NS_USERS].append(_regex("@irc_.*"))
         self.event.sender = "@irc_foobar:matrix.org"
         self.store.get_aliases_for_room.return_value = ["#irc_barfoo:matrix.org"]
         self.store.get_users_in_room.return_value = []
-        self.assertTrue((yield self.service.is_interested(
-            self.event, self.store
-        )))
+        self.assertTrue((yield self.service.is_interested(self.event, self.store)))
 
     @defer.inlineCallbacks
     def test_interested_in_self(self):
         # make sure invites get through
         self.service.sender = "@appservice:name"
-        self.service.namespaces[ApplicationService.NS_USERS].append(
-            _regex("@irc_.*")
-        )
+        self.service.namespaces[ApplicationService.NS_USERS].append(_regex("@irc_.*"))
         self.event.type = "m.room.member"
-        self.event.content = {
-            "membership": "invite"
-        }
+        self.event.content = {"membership": "invite"}
         self.event.state_key = self.service.sender
         self.assertTrue((yield self.service.is_interested(self.event)))
 
     @defer.inlineCallbacks
     def test_member_list_match(self):
-        self.service.namespaces[ApplicationService.NS_USERS].append(
-            _regex("@irc_.*")
-        )
+        self.service.namespaces[ApplicationService.NS_USERS].append(_regex("@irc_.*"))
         self.store.get_users_in_room.return_value = [
             "@alice:here",
             "@irc_fo:here",  # AS user
@@ -208,6 +174,6 @@ class ApplicationServiceTestCase(unittest.TestCase):
         self.store.get_aliases_for_room.return_value = []
 
         self.event.sender = "@xmpp_foobar:matrix.org"
-        self.assertTrue((yield self.service.is_interested(
-            event=self.event, store=self.store
-        )))
+        self.assertTrue(
+            (yield self.service.is_interested(event=self.event, store=self.store))
+        )
diff --git a/tests/appservice/test_scheduler.py b/tests/appservice/test_scheduler.py
index b9f4863e9a..db9f86bdac 100644
--- a/tests/appservice/test_scheduler.py
+++ b/tests/appservice/test_scheduler.py
@@ -30,7 +30,6 @@ from ..utils import MockClock
 
 
 class ApplicationServiceSchedulerTransactionCtrlTestCase(unittest.TestCase):
-
     def setUp(self):
         self.clock = MockClock()
         self.store = Mock()
@@ -38,8 +37,10 @@ class ApplicationServiceSchedulerTransactionCtrlTestCase(unittest.TestCase):
         self.recoverer = Mock()
         self.recoverer_fn = Mock(return_value=self.recoverer)
         self.txnctrl = _TransactionController(
-            clock=self.clock, store=self.store, as_api=self.as_api,
-            recoverer_fn=self.recoverer_fn
+            clock=self.clock,
+            store=self.store,
+            as_api=self.as_api,
+            recoverer_fn=self.recoverer_fn,
         )
 
     def test_single_service_up_txn_sent(self):
@@ -54,9 +55,7 @@ class ApplicationServiceSchedulerTransactionCtrlTestCase(unittest.TestCase):
             return_value=defer.succeed(ApplicationServiceState.UP)
         )
         txn.send = Mock(return_value=defer.succeed(True))
-        self.store.create_appservice_txn = Mock(
-            return_value=defer.succeed(txn)
-        )
+        self.store.create_appservice_txn = Mock(return_value=defer.succeed(txn))
 
         # actual call
         self.txnctrl.send(service, events)
@@ -77,9 +76,7 @@ class ApplicationServiceSchedulerTransactionCtrlTestCase(unittest.TestCase):
         self.store.get_appservice_state = Mock(
             return_value=defer.succeed(ApplicationServiceState.DOWN)
         )
-        self.store.create_appservice_txn = Mock(
-            return_value=defer.succeed(txn)
-        )
+        self.store.create_appservice_txn = Mock(return_value=defer.succeed(txn))
 
         # actual call
         self.txnctrl.send(service, events)
@@ -104,9 +101,7 @@ class ApplicationServiceSchedulerTransactionCtrlTestCase(unittest.TestCase):
         )
         self.store.set_appservice_state = Mock(return_value=defer.succeed(True))
         txn.send = Mock(return_value=defer.succeed(False))  # fails to send
-        self.store.create_appservice_txn = Mock(
-            return_value=defer.succeed(txn)
-        )
+        self.store.create_appservice_txn = Mock(return_value=defer.succeed(txn))
 
         # actual call
         self.txnctrl.send(service, events)
@@ -124,7 +119,6 @@ class ApplicationServiceSchedulerTransactionCtrlTestCase(unittest.TestCase):
 
 
 class ApplicationServiceSchedulerRecovererTestCase(unittest.TestCase):
-
     def setUp(self):
         self.clock = MockClock()
         self.as_api = Mock()
@@ -146,6 +140,7 @@ class ApplicationServiceSchedulerRecovererTestCase(unittest.TestCase):
 
         def take_txn(*args, **kwargs):
             return defer.succeed(txns.pop(0))
+
         self.store.get_oldest_unsent_txn = Mock(side_effect=take_txn)
 
         self.recoverer.recover()
@@ -171,6 +166,7 @@ class ApplicationServiceSchedulerRecovererTestCase(unittest.TestCase):
                 return defer.succeed(txns.pop(0))
             else:
                 return defer.succeed(txn)
+
         self.store.get_oldest_unsent_txn = Mock(side_effect=take_txn)
 
         self.recoverer.recover()
@@ -197,7 +193,6 @@ class ApplicationServiceSchedulerRecovererTestCase(unittest.TestCase):
 
 
 class ApplicationServiceSchedulerQueuerTestCase(unittest.TestCase):
-
     def setUp(self):
         self.txn_ctrl = Mock()
         self.queuer = _ServiceQueuer(self.txn_ctrl, MockClock())
@@ -211,9 +206,7 @@ class ApplicationServiceSchedulerQueuerTestCase(unittest.TestCase):
 
     def test_send_single_event_with_queue(self):
         d = defer.Deferred()
-        self.txn_ctrl.send = Mock(
-            side_effect=lambda x, y: make_deferred_yieldable(d),
-        )
+        self.txn_ctrl.send = Mock(side_effect=lambda x, y: make_deferred_yieldable(d))
         service = Mock(id=4)
         event = Mock(event_id="first")
         event2 = Mock(event_id="second")
@@ -247,6 +240,7 @@ class ApplicationServiceSchedulerQueuerTestCase(unittest.TestCase):
 
         def do_send(x, y):
             return make_deferred_yieldable(send_return_list.pop(0))
+
         self.txn_ctrl.send = Mock(side_effect=do_send)
 
         # send events for different ASes and make sure they are sent
diff --git a/tests/config/test_generate.py b/tests/config/test_generate.py
index eb7f0ab12a..f88d28a19d 100644
--- a/tests/config/test_generate.py
+++ b/tests/config/test_generate.py
@@ -24,7 +24,6 @@ from tests import unittest
 
 
 class ConfigGenerationTestCase(unittest.TestCase):
-
     def setUp(self):
         self.dir = tempfile.mkdtemp()
         self.file = os.path.join(self.dir, "homeserver.yaml")
@@ -33,23 +32,30 @@ class ConfigGenerationTestCase(unittest.TestCase):
         shutil.rmtree(self.dir)
 
     def test_generate_config_generates_files(self):
-        HomeServerConfig.load_or_generate_config("", [
-            "--generate-config",
-            "-c", self.file,
-            "--report-stats=yes",
-            "-H", "lemurs.win"
-        ])
+        HomeServerConfig.load_or_generate_config(
+            "",
+            [
+                "--generate-config",
+                "-c",
+                self.file,
+                "--report-stats=yes",
+                "-H",
+                "lemurs.win",
+            ],
+        )
 
         self.assertSetEqual(
-            set([
-                "homeserver.yaml",
-                "lemurs.win.log.config",
-                "lemurs.win.signing.key",
-                "lemurs.win.tls.crt",
-                "lemurs.win.tls.dh",
-                "lemurs.win.tls.key",
-            ]),
-            set(os.listdir(self.dir))
+            set(
+                [
+                    "homeserver.yaml",
+                    "lemurs.win.log.config",
+                    "lemurs.win.signing.key",
+                    "lemurs.win.tls.crt",
+                    "lemurs.win.tls.dh",
+                    "lemurs.win.tls.key",
+                ]
+            ),
+            set(os.listdir(self.dir)),
         )
 
         self.assert_log_filename_is(
diff --git a/tests/config/test_load.py b/tests/config/test_load.py
index 5c422eff38..d5f1777093 100644
--- a/tests/config/test_load.py
+++ b/tests/config/test_load.py
@@ -24,7 +24,6 @@ from tests import unittest
 
 
 class ConfigLoadingTestCase(unittest.TestCase):
-
     def setUp(self):
         self.dir = tempfile.mkdtemp()
         print(self.dir)
@@ -43,15 +42,14 @@ class ConfigLoadingTestCase(unittest.TestCase):
     def test_generates_and_loads_macaroon_secret_key(self):
         self.generate_config()
 
-        with open(self.file,
-                  "r") as f:
+        with open(self.file, "r") as f:
             raw = yaml.load(f)
         self.assertIn("macaroon_secret_key", raw)
 
         config = HomeServerConfig.load_config("", ["-c", self.file])
         self.assertTrue(
             hasattr(config, "macaroon_secret_key"),
-            "Want config to have attr macaroon_secret_key"
+            "Want config to have attr macaroon_secret_key",
         )
         if len(config.macaroon_secret_key) < 5:
             self.fail(
@@ -62,7 +60,7 @@ class ConfigLoadingTestCase(unittest.TestCase):
         config = HomeServerConfig.load_or_generate_config("", ["-c", self.file])
         self.assertTrue(
             hasattr(config, "macaroon_secret_key"),
-            "Want config to have attr macaroon_secret_key"
+            "Want config to have attr macaroon_secret_key",
         )
         if len(config.macaroon_secret_key) < 5:
             self.fail(
@@ -80,10 +78,9 @@ class ConfigLoadingTestCase(unittest.TestCase):
 
     def test_disable_registration(self):
         self.generate_config()
-        self.add_lines_to_config([
-            "enable_registration: true",
-            "disable_registration: true",
-        ])
+        self.add_lines_to_config(
+            ["enable_registration: true", "disable_registration: true"]
+        )
         # Check that disable_registration clobbers enable_registration.
         config = HomeServerConfig.load_config("", ["-c", self.file])
         self.assertFalse(config.enable_registration)
@@ -92,18 +89,23 @@ class ConfigLoadingTestCase(unittest.TestCase):
         self.assertFalse(config.enable_registration)
 
         # Check that either config value is clobbered by the command line.
-        config = HomeServerConfig.load_or_generate_config("", [
-            "-c", self.file, "--enable-registration"
-        ])
+        config = HomeServerConfig.load_or_generate_config(
+            "", ["-c", self.file, "--enable-registration"]
+        )
         self.assertTrue(config.enable_registration)
 
     def generate_config(self):
-        HomeServerConfig.load_or_generate_config("", [
-            "--generate-config",
-            "-c", self.file,
-            "--report-stats=yes",
-            "-H", "lemurs.win"
-        ])
+        HomeServerConfig.load_or_generate_config(
+            "",
+            [
+                "--generate-config",
+                "-c",
+                self.file,
+                "--report-stats=yes",
+                "-H",
+                "lemurs.win",
+            ],
+        )
 
     def generate_config_and_remove_lines_containing(self, needle):
         self.generate_config()
diff --git a/tests/crypto/test_event_signing.py b/tests/crypto/test_event_signing.py
index cd11871b80..b2536c1e69 100644
--- a/tests/crypto/test_event_signing.py
+++ b/tests/crypto/test_event_signing.py
@@ -24,9 +24,7 @@ from tests import unittest
 
 # Perform these tests using given secret key so we get entirely deterministic
 # signatures output that we can test against.
-SIGNING_KEY_SEED = decode_base64(
-    "YJDBA9Xnr2sVqXD9Vj7XVUnmFZcZrlw8Md7kMW+3XA1"
-)
+SIGNING_KEY_SEED = decode_base64("YJDBA9Xnr2sVqXD9Vj7XVUnmFZcZrlw8Md7kMW+3XA1")
 
 KEY_ALG = "ed25519"
 KEY_VER = 1
@@ -36,7 +34,6 @@ HOSTNAME = "domain"
 
 
 class EventSigningTestCase(unittest.TestCase):
-
     def setUp(self):
         self.signing_key = nacl.signing.SigningKey(SIGNING_KEY_SEED)
         self.signing_key.alg = KEY_ALG
@@ -51,7 +48,7 @@ class EventSigningTestCase(unittest.TestCase):
                 'signatures': {},
                 'type': "X",
                 'unsigned': {'age_ts': 1000000},
-            },
+            }
         )
 
         add_hashes_and_signatures(builder, HOSTNAME, self.signing_key)
@@ -61,8 +58,7 @@ class EventSigningTestCase(unittest.TestCase):
         self.assertTrue(hasattr(event, 'hashes'))
         self.assertIn('sha256', event.hashes)
         self.assertEquals(
-            event.hashes['sha256'],
-            "6tJjLpXtggfke8UxFhAKg82QVkJzvKOVOOSjUDK4ZSI",
+            event.hashes['sha256'], "6tJjLpXtggfke8UxFhAKg82QVkJzvKOVOOSjUDK4ZSI"
         )
 
         self.assertTrue(hasattr(event, 'signatures'))
@@ -77,9 +73,7 @@ class EventSigningTestCase(unittest.TestCase):
     def test_sign_message(self):
         builder = EventBuilder(
             {
-                'content': {
-                    'body': "Here is the message content",
-                },
+                'content': {'body': "Here is the message content"},
                 'event_id': "$0:domain",
                 'origin': "domain",
                 'origin_server_ts': 1000000,
@@ -98,8 +92,7 @@ class EventSigningTestCase(unittest.TestCase):
         self.assertTrue(hasattr(event, 'hashes'))
         self.assertIn('sha256', event.hashes)
         self.assertEquals(
-            event.hashes['sha256'],
-            "onLKD1bGljeBWQhWZ1kaP9SorVmRQNdN5aM2JYU2n/g",
+            event.hashes['sha256'], "onLKD1bGljeBWQhWZ1kaP9SorVmRQNdN5aM2JYU2n/g"
         )
 
         self.assertTrue(hasattr(event, 'signatures'))
@@ -108,5 +101,5 @@ class EventSigningTestCase(unittest.TestCase):
         self.assertEquals(
             event.signatures[HOSTNAME][KEY_NAME],
             "Wm+VzmOUOz08Ds+0NTWb1d4CZrVsJSikkeRxh6aCcUw"
-            "u6pNC78FunoD7KNWzqFn241eYHYMGCA5McEiVPdhzBA"
+            "u6pNC78FunoD7KNWzqFn241eYHYMGCA5McEiVPdhzBA",
         )
diff --git a/tests/crypto/test_keyring.py b/tests/crypto/test_keyring.py
index a9d37fe084..8299dc72c8 100644
--- a/tests/crypto/test_keyring.py
+++ b/tests/crypto/test_keyring.py
@@ -36,9 +36,7 @@ class MockPerspectiveServer(object):
 
     def get_verify_keys(self):
         vk = signedjson.key.get_verify_key(self.key)
-        return {
-            "%s:%s" % (vk.alg, vk.version): vk,
-        }
+        return {"%s:%s" % (vk.alg, vk.version): vk}
 
     def get_signed_key(self, server_name, verify_key):
         key_id = "%s:%s" % (verify_key.alg, verify_key.version)
@@ -47,10 +45,8 @@ class MockPerspectiveServer(object):
             "old_verify_keys": {},
             "valid_until_ts": time.time() * 1000 + 3600,
             "verify_keys": {
-                key_id: {
-                    "key": signedjson.key.encode_verify_key_base64(verify_key)
-                }
-            }
+                key_id: {"key": signedjson.key.encode_verify_key_base64(verify_key)}
+            },
         }
         signedjson.sign.sign_json(res, self.server_name, self.key)
         return res
@@ -62,18 +58,14 @@ class KeyringTestCase(unittest.TestCase):
         self.mock_perspective_server = MockPerspectiveServer()
         self.http_client = Mock()
         self.hs = yield utils.setup_test_homeserver(
-            handlers=None,
-            http_client=self.http_client,
+            self.addCleanup, handlers=None, http_client=self.http_client
         )
-        self.hs.config.perspectives = {
-            self.mock_perspective_server.server_name:
-                self.mock_perspective_server.get_verify_keys()
-        }
+        keys = self.mock_perspective_server.get_verify_keys()
+        self.hs.config.perspectives = {self.mock_perspective_server.server_name: keys}
 
     def check_context(self, _, expected):
         self.assertEquals(
-            getattr(LoggingContext.current_context(), "request", None),
-            expected
+            getattr(LoggingContext.current_context(), "request", None), expected
         )
 
     @defer.inlineCallbacks
@@ -89,8 +81,7 @@ class KeyringTestCase(unittest.TestCase):
             context_one.request = "one"
 
             wait_1_deferred = kr.wait_for_previous_lookups(
-                ["server1"],
-                {"server1": lookup_1_deferred},
+                ["server1"], {"server1": lookup_1_deferred}
             )
 
             # there were no previous lookups, so the deferred should be ready
@@ -105,8 +96,7 @@ class KeyringTestCase(unittest.TestCase):
             # set off another wait. It should block because the first lookup
             # hasn't yet completed.
             wait_2_deferred = kr.wait_for_previous_lookups(
-                ["server1"],
-                {"server1": lookup_2_deferred},
+                ["server1"], {"server1": lookup_2_deferred}
             )
             self.assertFalse(wait_2_deferred.called)
             # ... so we should have reset the LoggingContext.
@@ -132,21 +122,19 @@ class KeyringTestCase(unittest.TestCase):
         persp_resp = {
             "server_keys": [
                 self.mock_perspective_server.get_signed_key(
-                    "server10",
-                    signedjson.key.get_verify_key(key1)
-                ),
+                    "server10", signedjson.key.get_verify_key(key1)
+                )
             ]
         }
         persp_deferred = defer.Deferred()
 
         @defer.inlineCallbacks
         def get_perspectives(**kwargs):
-            self.assertEquals(
-                LoggingContext.current_context().request, "11",
-            )
+            self.assertEquals(LoggingContext.current_context().request, "11")
             with logcontext.PreserveLoggingContext():
                 yield persp_deferred
             defer.returnValue(persp_resp)
+
         self.http_client.post_json.side_effect = get_perspectives
 
         with LoggingContext("11") as context_11:
@@ -154,9 +142,7 @@ class KeyringTestCase(unittest.TestCase):
 
             # start off a first set of lookups
             res_deferreds = kr.verify_json_objects_for_server(
-                [("server10", json1),
-                 ("server11", {})
-                 ]
+                [("server10", json1), ("server11", {})]
             )
 
             # the unsigned json should be rejected pretty quickly
@@ -172,7 +158,7 @@ class KeyringTestCase(unittest.TestCase):
 
             # wait a tick for it to send the request to the perspectives server
             # (it first tries the datastore)
-            yield clock.sleep(1)   # XXX find out why this takes so long!
+            yield clock.sleep(1)  # XXX find out why this takes so long!
             self.http_client.post_json.assert_called_once()
 
             self.assertIs(LoggingContext.current_context(), context_11)
@@ -186,7 +172,7 @@ class KeyringTestCase(unittest.TestCase):
                 self.http_client.post_json.return_value = defer.Deferred()
 
                 res_deferreds_2 = kr.verify_json_objects_for_server(
-                    [("server10", json1)],
+                    [("server10", json1)]
                 )
                 yield clock.sleep(1)
                 self.http_client.post_json.assert_not_called()
@@ -207,8 +193,7 @@ class KeyringTestCase(unittest.TestCase):
 
         key1 = signedjson.key.generate_signing_key(1)
         yield self.hs.datastore.store_server_verify_key(
-            "server9", "", time.time() * 1000,
-            signedjson.key.get_verify_key(key1),
+            "server9", "", time.time() * 1000, signedjson.key.get_verify_key(key1)
         )
         json1 = {}
         signedjson.sign.sign_json(json1, "server9", key1)
diff --git a/tests/events/test_utils.py b/tests/events/test_utils.py
index f51d99419e..ff217ca8b9 100644
--- a/tests/events/test_utils.py
+++ b/tests/events/test_utils.py
@@ -31,25 +31,20 @@ def MockEvent(**kwargs):
 class PruneEventTestCase(unittest.TestCase):
     """ Asserts that a new event constructed with `evdict` will look like
     `matchdict` when it is redacted. """
+
     def run_test(self, evdict, matchdict):
-        self.assertEquals(
-            prune_event(FrozenEvent(evdict)).get_dict(),
-            matchdict
-        )
+        self.assertEquals(prune_event(FrozenEvent(evdict)).get_dict(), matchdict)
 
     def test_minimal(self):
         self.run_test(
-            {
-                'type': 'A',
-                'event_id': '$test:domain',
-            },
+            {'type': 'A', 'event_id': '$test:domain'},
             {
                 'type': 'A',
                 'event_id': '$test:domain',
                 'content': {},
                 'signatures': {},
                 'unsigned': {},
-            }
+            },
         )
 
     def test_basic_keys(self):
@@ -70,23 +65,19 @@ class PruneEventTestCase(unittest.TestCase):
                 'content': {},
                 'signatures': {},
                 'unsigned': {},
-            }
+            },
         )
 
     def test_unsigned_age_ts(self):
         self.run_test(
-            {
-                'type': 'B',
-                'event_id': '$test:domain',
-                'unsigned': {'age_ts': 20},
-            },
+            {'type': 'B', 'event_id': '$test:domain', 'unsigned': {'age_ts': 20}},
             {
                 'type': 'B',
                 'event_id': '$test:domain',
                 'content': {},
                 'signatures': {},
                 'unsigned': {'age_ts': 20},
-            }
+            },
         )
 
         self.run_test(
@@ -101,23 +92,19 @@ class PruneEventTestCase(unittest.TestCase):
                 'content': {},
                 'signatures': {},
                 'unsigned': {},
-            }
+            },
         )
 
     def test_content(self):
         self.run_test(
-            {
-                'type': 'C',
-                'event_id': '$test:domain',
-                'content': {'things': 'here'},
-            },
+            {'type': 'C', 'event_id': '$test:domain', 'content': {'things': 'here'}},
             {
                 'type': 'C',
                 'event_id': '$test:domain',
                 'content': {},
                 'signatures': {},
                 'unsigned': {},
-            }
+            },
         )
 
         self.run_test(
@@ -132,27 +119,20 @@ class PruneEventTestCase(unittest.TestCase):
                 'content': {'creator': '@2:domain'},
                 'signatures': {},
                 'unsigned': {},
-            }
+            },
         )
 
 
 class SerializeEventTestCase(unittest.TestCase):
-
     def serialize(self, ev, fields):
         return serialize_event(ev, 1479807801915, only_event_fields=fields)
 
     def test_event_fields_works_with_keys(self):
         self.assertEquals(
             self.serialize(
-                MockEvent(
-                    sender="@alice:localhost",
-                    room_id="!foo:bar"
-                ),
-                ["room_id"]
+                MockEvent(sender="@alice:localhost", room_id="!foo:bar"), ["room_id"]
             ),
-            {
-                "room_id": "!foo:bar",
-            }
+            {"room_id": "!foo:bar"},
         )
 
     def test_event_fields_works_with_nested_keys(self):
@@ -161,17 +141,11 @@ class SerializeEventTestCase(unittest.TestCase):
                 MockEvent(
                     sender="@alice:localhost",
                     room_id="!foo:bar",
-                    content={
-                        "body": "A message",
-                    },
+                    content={"body": "A message"},
                 ),
-                ["content.body"]
+                ["content.body"],
             ),
-            {
-                "content": {
-                    "body": "A message",
-                }
-            }
+            {"content": {"body": "A message"}},
         )
 
     def test_event_fields_works_with_dot_keys(self):
@@ -180,17 +154,11 @@ class SerializeEventTestCase(unittest.TestCase):
                 MockEvent(
                     sender="@alice:localhost",
                     room_id="!foo:bar",
-                    content={
-                        "key.with.dots": {},
-                    },
+                    content={"key.with.dots": {}},
                 ),
-                ["content.key\.with\.dots"]
+                ["content.key\.with\.dots"],
             ),
-            {
-                "content": {
-                    "key.with.dots": {},
-                }
-            }
+            {"content": {"key.with.dots": {}}},
         )
 
     def test_event_fields_works_with_nested_dot_keys(self):
@@ -201,21 +169,12 @@ class SerializeEventTestCase(unittest.TestCase):
                     room_id="!foo:bar",
                     content={
                         "not_me": 1,
-                        "nested.dot.key": {
-                            "leaf.key": 42,
-                            "not_me_either": 1,
-                        },
+                        "nested.dot.key": {"leaf.key": 42, "not_me_either": 1},
                     },
                 ),
-                ["content.nested\.dot\.key.leaf\.key"]
+                ["content.nested\.dot\.key.leaf\.key"],
             ),
-            {
-                "content": {
-                    "nested.dot.key": {
-                        "leaf.key": 42,
-                    },
-                }
-            }
+            {"content": {"nested.dot.key": {"leaf.key": 42}}},
         )
 
     def test_event_fields_nops_with_unknown_keys(self):
@@ -224,17 +183,11 @@ class SerializeEventTestCase(unittest.TestCase):
                 MockEvent(
                     sender="@alice:localhost",
                     room_id="!foo:bar",
-                    content={
-                        "foo": "bar",
-                    },
+                    content={"foo": "bar"},
                 ),
-                ["content.foo", "content.notexists"]
+                ["content.foo", "content.notexists"],
             ),
-            {
-                "content": {
-                    "foo": "bar",
-                }
-            }
+            {"content": {"foo": "bar"}},
         )
 
     def test_event_fields_nops_with_non_dict_keys(self):
@@ -243,13 +196,11 @@ class SerializeEventTestCase(unittest.TestCase):
                 MockEvent(
                     sender="@alice:localhost",
                     room_id="!foo:bar",
-                    content={
-                        "foo": ["I", "am", "an", "array"],
-                    },
+                    content={"foo": ["I", "am", "an", "array"]},
                 ),
-                ["content.foo.am"]
+                ["content.foo.am"],
             ),
-            {}
+            {},
         )
 
     def test_event_fields_nops_with_array_keys(self):
@@ -258,13 +209,11 @@ class SerializeEventTestCase(unittest.TestCase):
                 MockEvent(
                     sender="@alice:localhost",
                     room_id="!foo:bar",
-                    content={
-                        "foo": ["I", "am", "an", "array"],
-                    },
+                    content={"foo": ["I", "am", "an", "array"]},
                 ),
-                ["content.foo.1"]
+                ["content.foo.1"],
             ),
-            {}
+            {},
         )
 
     def test_event_fields_all_fields_if_empty(self):
@@ -274,31 +223,21 @@ class SerializeEventTestCase(unittest.TestCase):
                     type="foo",
                     event_id="test",
                     room_id="!foo:bar",
-                    content={
-                        "foo": "bar",
-                    },
+                    content={"foo": "bar"},
                 ),
-                []
+                [],
             ),
             {
                 "type": "foo",
                 "event_id": "test",
                 "room_id": "!foo:bar",
-                "content": {
-                    "foo": "bar",
-                },
-                "unsigned": {}
-            }
+                "content": {"foo": "bar"},
+                "unsigned": {},
+            },
         )
 
     def test_event_fields_fail_if_fields_not_str(self):
         with self.assertRaises(TypeError):
             self.serialize(
-                MockEvent(
-                    room_id="!foo:bar",
-                    content={
-                        "foo": "bar",
-                    },
-                ),
-                ["room_id", 4]
+                MockEvent(room_id="!foo:bar", content={"foo": "bar"}), ["room_id", 4]
             )
diff --git a/tests/federation/test_federation_server.py b/tests/federation/test_federation_server.py
index c91e25f54f..af15f4cc5a 100644
--- a/tests/federation/test_federation_server.py
+++ b/tests/federation/test_federation_server.py
@@ -23,10 +23,7 @@ from tests import unittest
 @unittest.DEBUG
 class ServerACLsTestCase(unittest.TestCase):
     def test_blacklisted_server(self):
-        e = _create_acl_event({
-            "allow": ["*"],
-            "deny": ["evil.com"],
-        })
+        e = _create_acl_event({"allow": ["*"], "deny": ["evil.com"]})
         logging.info("ACL event: %s", e.content)
 
         self.assertFalse(server_matches_acl_event("evil.com", e))
@@ -36,10 +33,7 @@ class ServerACLsTestCase(unittest.TestCase):
         self.assertTrue(server_matches_acl_event("honestly.not.evil.com", e))
 
     def test_block_ip_literals(self):
-        e = _create_acl_event({
-            "allow_ip_literals": False,
-            "allow": ["*"],
-        })
+        e = _create_acl_event({"allow_ip_literals": False, "allow": ["*"]})
         logging.info("ACL event: %s", e.content)
 
         self.assertFalse(server_matches_acl_event("1.2.3.4", e))
@@ -49,10 +43,12 @@ class ServerACLsTestCase(unittest.TestCase):
 
 
 def _create_acl_event(content):
-    return FrozenEvent({
-        "room_id": "!a:b",
-        "event_id": "$a:b",
-        "type": "m.room.server_acls",
-        "sender": "@a:b",
-        "content": content
-    })
+    return FrozenEvent(
+        {
+            "room_id": "!a:b",
+            "event_id": "$a:b",
+            "type": "m.room.server_acls",
+            "sender": "@a:b",
+            "content": content,
+        }
+    )
diff --git a/tests/handlers/test_appservice.py b/tests/handlers/test_appservice.py
index 57c0771cf3..ba7148ec01 100644
--- a/tests/handlers/test_appservice.py
+++ b/tests/handlers/test_appservice.py
@@ -45,20 +45,18 @@ class AppServiceHandlerTestCase(unittest.TestCase):
         services = [
             self._mkservice(is_interested=False),
             interested_service,
-            self._mkservice(is_interested=False)
+            self._mkservice(is_interested=False),
         ]
 
         self.mock_store.get_app_services = Mock(return_value=services)
         self.mock_store.get_user_by_id = Mock(return_value=[])
 
         event = Mock(
-            sender="@someone:anywhere",
-            type="m.room.message",
-            room_id="!foo:bar"
+            sender="@someone:anywhere", type="m.room.message", room_id="!foo:bar"
         )
         self.mock_store.get_new_events_for_appservice.side_effect = [
             (0, [event]),
-            (0, [])
+            (0, []),
         ]
         self.mock_as_api.push = Mock()
         yield self.handler.notify_interested_services(0)
@@ -74,21 +72,15 @@ class AppServiceHandlerTestCase(unittest.TestCase):
         self.mock_store.get_app_services = Mock(return_value=services)
         self.mock_store.get_user_by_id = Mock(return_value=None)
 
-        event = Mock(
-            sender=user_id,
-            type="m.room.message",
-            room_id="!foo:bar"
-        )
+        event = Mock(sender=user_id, type="m.room.message", room_id="!foo:bar")
         self.mock_as_api.push = Mock()
         self.mock_as_api.query_user = Mock()
         self.mock_store.get_new_events_for_appservice.side_effect = [
             (0, [event]),
-            (0, [])
+            (0, []),
         ]
         yield self.handler.notify_interested_services(0)
-        self.mock_as_api.query_user.assert_called_once_with(
-            services[0], user_id
-        )
+        self.mock_as_api.query_user.assert_called_once_with(services[0], user_id)
 
     @defer.inlineCallbacks
     def test_query_user_exists_known_user(self):
@@ -96,25 +88,19 @@ class AppServiceHandlerTestCase(unittest.TestCase):
         services = [self._mkservice(is_interested=True)]
         services[0].is_interested_in_user = Mock(return_value=True)
         self.mock_store.get_app_services = Mock(return_value=services)
-        self.mock_store.get_user_by_id = Mock(return_value={
-            "name": user_id
-        })
+        self.mock_store.get_user_by_id = Mock(return_value={"name": user_id})
 
-        event = Mock(
-            sender=user_id,
-            type="m.room.message",
-            room_id="!foo:bar"
-        )
+        event = Mock(sender=user_id, type="m.room.message", room_id="!foo:bar")
         self.mock_as_api.push = Mock()
         self.mock_as_api.query_user = Mock()
         self.mock_store.get_new_events_for_appservice.side_effect = [
             (0, [event]),
-            (0, [])
+            (0, []),
         ]
         yield self.handler.notify_interested_services(0)
         self.assertFalse(
             self.mock_as_api.query_user.called,
-            "query_user called when it shouldn't have been."
+            "query_user called when it shouldn't have been.",
         )
 
     @defer.inlineCallbacks
@@ -129,7 +115,7 @@ class AppServiceHandlerTestCase(unittest.TestCase):
         services = [
             self._mkservice_alias(is_interested_in_alias=False),
             interested_service,
-            self._mkservice_alias(is_interested_in_alias=False)
+            self._mkservice_alias(is_interested_in_alias=False),
         ]
 
         self.mock_store.get_app_services = Mock(return_value=services)
@@ -140,8 +126,7 @@ class AppServiceHandlerTestCase(unittest.TestCase):
         result = yield self.handler.query_room_alias_exists(room_alias)
 
         self.mock_as_api.query_alias.assert_called_once_with(
-            interested_service,
-            room_alias_str
+            interested_service, room_alias_str
         )
         self.assertEquals(result.room_id, room_id)
         self.assertEquals(result.servers, servers)
diff --git a/tests/handlers/test_auth.py b/tests/handlers/test_auth.py
index 55eab9e9cf..1e39fe0ec2 100644
--- a/tests/handlers/test_auth.py
+++ b/tests/handlers/test_auth.py
@@ -20,7 +20,7 @@ from twisted.internet import defer
 
 import synapse
 import synapse.api.errors
-from synapse.api.errors import AuthError
+from synapse.api.errors import ResourceLimitError
 from synapse.handlers.auth import AuthHandler
 
 from tests import unittest
@@ -35,7 +35,7 @@ class AuthHandlers(object):
 class AuthTestCase(unittest.TestCase):
     @defer.inlineCallbacks
     def setUp(self):
-        self.hs = yield setup_test_homeserver(handlers=None)
+        self.hs = yield setup_test_homeserver(self.addCleanup, handlers=None)
         self.hs.handlers = AuthHandlers(self.hs)
         self.auth_handler = self.hs.handlers.auth_handler
         self.macaroon_generator = self.hs.get_macaroon_generator()
@@ -81,9 +81,7 @@ class AuthTestCase(unittest.TestCase):
     def test_short_term_login_token_gives_user_id(self):
         self.hs.clock.now = 1000
 
-        token = self.macaroon_generator.generate_short_term_login_token(
-            "a_user", 5000
-        )
+        token = self.macaroon_generator.generate_short_term_login_token("a_user", 5000)
         user_id = yield self.auth_handler.validate_short_term_login_token_and_get_user_id(
             token
         )
@@ -98,17 +96,13 @@ class AuthTestCase(unittest.TestCase):
 
     @defer.inlineCallbacks
     def test_short_term_login_token_cannot_replace_user_id(self):
-        token = self.macaroon_generator.generate_short_term_login_token(
-            "a_user", 5000
-        )
+        token = self.macaroon_generator.generate_short_term_login_token("a_user", 5000)
         macaroon = pymacaroons.Macaroon.deserialize(token)
 
         user_id = yield self.auth_handler.validate_short_term_login_token_and_get_user_id(
             macaroon.serialize()
         )
-        self.assertEqual(
-            "a_user", user_id
-        )
+        self.assertEqual("a_user", user_id)
 
         # add another "user_id" caveat, which might allow us to override the
         # user_id.
@@ -130,34 +124,70 @@ class AuthTestCase(unittest.TestCase):
         )
 
     @defer.inlineCallbacks
-    def test_mau_limits_exceeded(self):
+    def test_mau_limits_exceeded_large(self):
         self.hs.config.limit_usage_by_mau = True
-        self.hs.get_datastore().count_monthly_users = Mock(
+        self.hs.get_datastore().get_monthly_active_count = Mock(
             return_value=defer.succeed(self.large_number_of_users)
         )
 
-        with self.assertRaises(AuthError):
+        with self.assertRaises(ResourceLimitError):
             yield self.auth_handler.get_access_token_for_user_id('user_a')
 
-        self.hs.get_datastore().count_monthly_users = Mock(
+        self.hs.get_datastore().get_monthly_active_count = Mock(
             return_value=defer.succeed(self.large_number_of_users)
         )
-        with self.assertRaises(AuthError):
+        with self.assertRaises(ResourceLimitError):
+            yield self.auth_handler.validate_short_term_login_token_and_get_user_id(
+                self._get_macaroon().serialize()
+            )
+
+    @defer.inlineCallbacks
+    def test_mau_limits_parity(self):
+        self.hs.config.limit_usage_by_mau = True
+
+        # If not in monthly active cohort
+        self.hs.get_datastore().get_monthly_active_count = Mock(
+            return_value=defer.succeed(self.hs.config.max_mau_value)
+        )
+        with self.assertRaises(ResourceLimitError):
+            yield self.auth_handler.get_access_token_for_user_id('user_a')
+
+        self.hs.get_datastore().get_monthly_active_count = Mock(
+            return_value=defer.succeed(self.hs.config.max_mau_value)
+        )
+        with self.assertRaises(ResourceLimitError):
             yield self.auth_handler.validate_short_term_login_token_and_get_user_id(
                 self._get_macaroon().serialize()
             )
+        # If in monthly active cohort
+        self.hs.get_datastore().user_last_seen_monthly_active = Mock(
+            return_value=defer.succeed(self.hs.get_clock().time_msec())
+        )
+        self.hs.get_datastore().get_monthly_active_count = Mock(
+            return_value=defer.succeed(self.hs.config.max_mau_value)
+        )
+        yield self.auth_handler.get_access_token_for_user_id('user_a')
+        self.hs.get_datastore().user_last_seen_monthly_active = Mock(
+            return_value=defer.succeed(self.hs.get_clock().time_msec())
+        )
+        self.hs.get_datastore().get_monthly_active_count = Mock(
+            return_value=defer.succeed(self.hs.config.max_mau_value)
+        )
+        yield self.auth_handler.validate_short_term_login_token_and_get_user_id(
+            self._get_macaroon().serialize()
+        )
 
     @defer.inlineCallbacks
     def test_mau_limits_not_exceeded(self):
         self.hs.config.limit_usage_by_mau = True
 
-        self.hs.get_datastore().count_monthly_users = Mock(
+        self.hs.get_datastore().get_monthly_active_count = Mock(
             return_value=defer.succeed(self.small_number_of_users)
         )
         # Ensure does not raise exception
         yield self.auth_handler.get_access_token_for_user_id('user_a')
 
-        self.hs.get_datastore().count_monthly_users = Mock(
+        self.hs.get_datastore().get_monthly_active_count = Mock(
             return_value=defer.succeed(self.small_number_of_users)
         )
         yield self.auth_handler.validate_short_term_login_token_and_get_user_id(
@@ -165,7 +195,5 @@ class AuthTestCase(unittest.TestCase):
         )
 
     def _get_macaroon(self):
-        token = self.macaroon_generator.generate_short_term_login_token(
-            "user_a", 5000
-        )
+        token = self.macaroon_generator.generate_short_term_login_token("user_a", 5000)
         return pymacaroons.Macaroon.deserialize(token)
diff --git a/tests/handlers/test_device.py b/tests/handlers/test_device.py
index 633a0b7f36..56e7acd37c 100644
--- a/tests/handlers/test_device.py
+++ b/tests/handlers/test_device.py
@@ -28,13 +28,13 @@ user2 = "@theresa:bbb"
 class DeviceTestCase(unittest.TestCase):
     def __init__(self, *args, **kwargs):
         super(DeviceTestCase, self).__init__(*args, **kwargs)
-        self.store = None    # type: synapse.storage.DataStore
+        self.store = None  # type: synapse.storage.DataStore
         self.handler = None  # type: synapse.handlers.device.DeviceHandler
-        self.clock = None    # type: utils.MockClock
+        self.clock = None  # type: utils.MockClock
 
     @defer.inlineCallbacks
     def setUp(self):
-        hs = yield utils.setup_test_homeserver()
+        hs = yield utils.setup_test_homeserver(self.addCleanup)
         self.handler = hs.get_device_handler()
         self.store = hs.get_datastore()
         self.clock = hs.get_clock()
@@ -44,7 +44,7 @@ class DeviceTestCase(unittest.TestCase):
         res = yield self.handler.check_device_registered(
             user_id="@boris:foo",
             device_id="fco",
-            initial_device_display_name="display name"
+            initial_device_display_name="display name",
         )
         self.assertEqual(res, "fco")
 
@@ -56,14 +56,14 @@ class DeviceTestCase(unittest.TestCase):
         res1 = yield self.handler.check_device_registered(
             user_id="@boris:foo",
             device_id="fco",
-            initial_device_display_name="display name"
+            initial_device_display_name="display name",
         )
         self.assertEqual(res1, "fco")
 
         res2 = yield self.handler.check_device_registered(
             user_id="@boris:foo",
             device_id="fco",
-            initial_device_display_name="new display name"
+            initial_device_display_name="new display name",
         )
         self.assertEqual(res2, "fco")
 
@@ -75,7 +75,7 @@ class DeviceTestCase(unittest.TestCase):
         device_id = yield self.handler.check_device_registered(
             user_id="@theresa:foo",
             device_id=None,
-            initial_device_display_name="display"
+            initial_device_display_name="display",
         )
 
         dev = yield self.handler.store.get_device("@theresa:foo", device_id)
@@ -87,43 +87,53 @@ class DeviceTestCase(unittest.TestCase):
 
         res = yield self.handler.get_devices_by_user(user1)
         self.assertEqual(3, len(res))
-        device_map = {
-            d["device_id"]: d for d in res
-        }
-        self.assertDictContainsSubset({
-            "user_id": user1,
-            "device_id": "xyz",
-            "display_name": "display 0",
-            "last_seen_ip": None,
-            "last_seen_ts": None,
-        }, device_map["xyz"])
-        self.assertDictContainsSubset({
-            "user_id": user1,
-            "device_id": "fco",
-            "display_name": "display 1",
-            "last_seen_ip": "ip1",
-            "last_seen_ts": 1000000,
-        }, device_map["fco"])
-        self.assertDictContainsSubset({
-            "user_id": user1,
-            "device_id": "abc",
-            "display_name": "display 2",
-            "last_seen_ip": "ip3",
-            "last_seen_ts": 3000000,
-        }, device_map["abc"])
+        device_map = {d["device_id"]: d for d in res}
+        self.assertDictContainsSubset(
+            {
+                "user_id": user1,
+                "device_id": "xyz",
+                "display_name": "display 0",
+                "last_seen_ip": None,
+                "last_seen_ts": None,
+            },
+            device_map["xyz"],
+        )
+        self.assertDictContainsSubset(
+            {
+                "user_id": user1,
+                "device_id": "fco",
+                "display_name": "display 1",
+                "last_seen_ip": "ip1",
+                "last_seen_ts": 1000000,
+            },
+            device_map["fco"],
+        )
+        self.assertDictContainsSubset(
+            {
+                "user_id": user1,
+                "device_id": "abc",
+                "display_name": "display 2",
+                "last_seen_ip": "ip3",
+                "last_seen_ts": 3000000,
+            },
+            device_map["abc"],
+        )
 
     @defer.inlineCallbacks
     def test_get_device(self):
         yield self._record_users()
 
         res = yield self.handler.get_device(user1, "abc")
-        self.assertDictContainsSubset({
-            "user_id": user1,
-            "device_id": "abc",
-            "display_name": "display 2",
-            "last_seen_ip": "ip3",
-            "last_seen_ts": 3000000,
-        }, res)
+        self.assertDictContainsSubset(
+            {
+                "user_id": user1,
+                "device_id": "abc",
+                "display_name": "display 2",
+                "last_seen_ip": "ip3",
+                "last_seen_ts": 3000000,
+            },
+            res,
+        )
 
     @defer.inlineCallbacks
     def test_delete_device(self):
@@ -153,8 +163,7 @@ class DeviceTestCase(unittest.TestCase):
     def test_update_unknown_device(self):
         update = {"display_name": "new_display"}
         with self.assertRaises(synapse.api.errors.NotFoundError):
-            yield self.handler.update_device("user_id", "unknown_device_id",
-                                             update)
+            yield self.handler.update_device("user_id", "unknown_device_id", update)
 
     @defer.inlineCallbacks
     def _record_users(self):
@@ -168,16 +177,17 @@ class DeviceTestCase(unittest.TestCase):
         yield self._record_user(user2, "def", "dispkay", "token4", "ip4")
 
     @defer.inlineCallbacks
-    def _record_user(self, user_id, device_id, display_name,
-                     access_token=None, ip=None):
+    def _record_user(
+        self, user_id, device_id, display_name, access_token=None, ip=None
+    ):
         device_id = yield self.handler.check_device_registered(
             user_id=user_id,
             device_id=device_id,
-            initial_device_display_name=display_name
+            initial_device_display_name=display_name,
         )
 
         if ip is not None:
             yield self.store.insert_client_ip(
-                user_id,
-                access_token, ip, "user_agent", device_id)
+                user_id, access_token, ip, "user_agent", device_id
+            )
             self.clock.advance_time(1000)
diff --git a/tests/handlers/test_directory.py b/tests/handlers/test_directory.py
index a353070316..ec7355688b 100644
--- a/tests/handlers/test_directory.py
+++ b/tests/handlers/test_directory.py
@@ -42,9 +42,11 @@ class DirectoryTestCase(unittest.TestCase):
 
         def register_query_handler(query_type, handler):
             self.query_handlers[query_type] = handler
+
         self.mock_registry.register_query_handler = register_query_handler
 
         hs = yield setup_test_homeserver(
+            self.addCleanup,
             http_client=None,
             resource_for_federation=Mock(),
             federation_client=self.mock_federation,
@@ -68,10 +70,7 @@ class DirectoryTestCase(unittest.TestCase):
 
         result = yield self.handler.get_association(self.my_room)
 
-        self.assertEquals({
-            "room_id": "!8765qwer:test",
-            "servers": ["test"],
-        }, result)
+        self.assertEquals({"room_id": "!8765qwer:test", "servers": ["test"]}, result)
 
     @defer.inlineCallbacks
     def test_get_remote_association(self):
@@ -81,16 +80,13 @@ class DirectoryTestCase(unittest.TestCase):
 
         result = yield self.handler.get_association(self.remote_room)
 
-        self.assertEquals({
-            "room_id": "!8765qwer:test",
-            "servers": ["test", "remote"],
-        }, result)
+        self.assertEquals(
+            {"room_id": "!8765qwer:test", "servers": ["test", "remote"]}, result
+        )
         self.mock_federation.make_query.assert_called_with(
             destination="remote",
             query_type="directory",
-            args={
-                "room_alias": "#another:remote",
-            },
+            args={"room_alias": "#another:remote"},
             retry_on_dns_fail=False,
             ignore_backoff=True,
         )
@@ -105,7 +101,4 @@ class DirectoryTestCase(unittest.TestCase):
             {"room_alias": "#your-room:test"}
         )
 
-        self.assertEquals({
-            "room_id": "!8765asdf:test",
-            "servers": ["test"],
-        }, response)
+        self.assertEquals({"room_id": "!8765asdf:test", "servers": ["test"]}, response)
diff --git a/tests/handlers/test_e2e_keys.py b/tests/handlers/test_e2e_keys.py
index ca1542236d..8dccc6826e 100644
--- a/tests/handlers/test_e2e_keys.py
+++ b/tests/handlers/test_e2e_keys.py
@@ -28,14 +28,13 @@ from tests import unittest, utils
 class E2eKeysHandlerTestCase(unittest.TestCase):
     def __init__(self, *args, **kwargs):
         super(E2eKeysHandlerTestCase, self).__init__(*args, **kwargs)
-        self.hs = None       # type: synapse.server.HomeServer
+        self.hs = None  # type: synapse.server.HomeServer
         self.handler = None  # type: synapse.handlers.e2e_keys.E2eKeysHandler
 
     @defer.inlineCallbacks
     def setUp(self):
         self.hs = yield utils.setup_test_homeserver(
-            handlers=None,
-            federation_client=mock.Mock(),
+            self.addCleanup, handlers=None, federation_client=mock.Mock()
         )
         self.handler = synapse.handlers.e2e_keys.E2eKeysHandler(self.hs)
 
@@ -54,30 +53,21 @@ class E2eKeysHandlerTestCase(unittest.TestCase):
         device_id = "xyz"
         keys = {
             "alg1:k1": "key1",
-            "alg2:k2": {
-                "key": "key2",
-                "signatures": {"k1": "sig1"}
-            },
-            "alg2:k3": {
-                "key": "key3",
-            },
+            "alg2:k2": {"key": "key2", "signatures": {"k1": "sig1"}},
+            "alg2:k3": {"key": "key3"},
         }
 
         res = yield self.handler.upload_keys_for_user(
-            local_user, device_id, {"one_time_keys": keys},
+            local_user, device_id, {"one_time_keys": keys}
         )
-        self.assertDictEqual(res, {
-            "one_time_key_counts": {"alg1": 1, "alg2": 2}
-        })
+        self.assertDictEqual(res, {"one_time_key_counts": {"alg1": 1, "alg2": 2}})
 
         # we should be able to change the signature without a problem
         keys["alg2:k2"]["signatures"]["k1"] = "sig2"
         res = yield self.handler.upload_keys_for_user(
-            local_user, device_id, {"one_time_keys": keys},
+            local_user, device_id, {"one_time_keys": keys}
         )
-        self.assertDictEqual(res, {
-            "one_time_key_counts": {"alg1": 1, "alg2": 2}
-        })
+        self.assertDictEqual(res, {"one_time_key_counts": {"alg1": 1, "alg2": 2}})
 
     @defer.inlineCallbacks
     def test_change_one_time_keys(self):
@@ -87,25 +77,18 @@ class E2eKeysHandlerTestCase(unittest.TestCase):
         device_id = "xyz"
         keys = {
             "alg1:k1": "key1",
-            "alg2:k2": {
-                "key": "key2",
-                "signatures": {"k1": "sig1"}
-            },
-            "alg2:k3": {
-                "key": "key3",
-            },
+            "alg2:k2": {"key": "key2", "signatures": {"k1": "sig1"}},
+            "alg2:k3": {"key": "key3"},
         }
 
         res = yield self.handler.upload_keys_for_user(
-            local_user, device_id, {"one_time_keys": keys},
+            local_user, device_id, {"one_time_keys": keys}
         )
-        self.assertDictEqual(res, {
-            "one_time_key_counts": {"alg1": 1, "alg2": 2}
-        })
+        self.assertDictEqual(res, {"one_time_key_counts": {"alg1": 1, "alg2": 2}})
 
         try:
             yield self.handler.upload_keys_for_user(
-                local_user, device_id, {"one_time_keys": {"alg1:k1": "key2"}},
+                local_user, device_id, {"one_time_keys": {"alg1:k1": "key2"}}
             )
             self.fail("No error when changing string key")
         except errors.SynapseError:
@@ -113,7 +96,7 @@ class E2eKeysHandlerTestCase(unittest.TestCase):
 
         try:
             yield self.handler.upload_keys_for_user(
-                local_user, device_id, {"one_time_keys": {"alg2:k3": "key2"}},
+                local_user, device_id, {"one_time_keys": {"alg2:k3": "key2"}}
             )
             self.fail("No error when replacing dict key with string")
         except errors.SynapseError:
@@ -121,9 +104,7 @@ class E2eKeysHandlerTestCase(unittest.TestCase):
 
         try:
             yield self.handler.upload_keys_for_user(
-                local_user, device_id, {
-                    "one_time_keys": {"alg1:k1": {"key": "key"}}
-                },
+                local_user, device_id, {"one_time_keys": {"alg1:k1": {"key": "key"}}}
             )
             self.fail("No error when replacing string key with dict")
         except errors.SynapseError:
@@ -131,13 +112,12 @@ class E2eKeysHandlerTestCase(unittest.TestCase):
 
         try:
             yield self.handler.upload_keys_for_user(
-                local_user, device_id, {
+                local_user,
+                device_id,
+                {
                     "one_time_keys": {
-                        "alg2:k2": {
-                            "key": "key3",
-                            "signatures": {"k1": "sig1"},
-                        }
-                    },
+                        "alg2:k2": {"key": "key3", "signatures": {"k1": "sig1"}}
+                    }
                 },
             )
             self.fail("No error when replacing dict key")
@@ -148,31 +128,20 @@ class E2eKeysHandlerTestCase(unittest.TestCase):
     def test_claim_one_time_key(self):
         local_user = "@boris:" + self.hs.hostname
         device_id = "xyz"
-        keys = {
-            "alg1:k1": "key1",
-        }
+        keys = {"alg1:k1": "key1"}
 
         res = yield self.handler.upload_keys_for_user(
-            local_user, device_id, {"one_time_keys": keys},
+            local_user, device_id, {"one_time_keys": keys}
+        )
+        self.assertDictEqual(res, {"one_time_key_counts": {"alg1": 1}})
+
+        res2 = yield self.handler.claim_one_time_keys(
+            {"one_time_keys": {local_user: {device_id: "alg1"}}}, timeout=None
+        )
+        self.assertEqual(
+            res2,
+            {
+                "failures": {},
+                "one_time_keys": {local_user: {device_id: {"alg1:k1": "key1"}}},
+            },
         )
-        self.assertDictEqual(res, {
-            "one_time_key_counts": {"alg1": 1}
-        })
-
-        res2 = yield self.handler.claim_one_time_keys({
-            "one_time_keys": {
-                local_user: {
-                    device_id: "alg1"
-                }
-            }
-        }, timeout=None)
-        self.assertEqual(res2, {
-            "failures": {},
-            "one_time_keys": {
-                local_user: {
-                    device_id: {
-                        "alg1:k1": "key1"
-                    }
-                }
-            }
-        })
diff --git a/tests/handlers/test_presence.py b/tests/handlers/test_presence.py
index 121ce78634..fc2b646ba2 100644
--- a/tests/handlers/test_presence.py
+++ b/tests/handlers/test_presence.py
@@ -39,8 +39,7 @@ class PresenceUpdateTestCase(unittest.TestCase):
 
         prev_state = UserPresenceState.default(user_id)
         new_state = prev_state.copy_and_replace(
-            state=PresenceState.ONLINE,
-            last_active_ts=now,
+            state=PresenceState.ONLINE, last_active_ts=now
         )
 
         state, persist_and_notify, federation_ping = handle_update(
@@ -54,23 +53,22 @@ class PresenceUpdateTestCase(unittest.TestCase):
         self.assertEquals(state.last_federation_update_ts, now)
 
         self.assertEquals(wheel_timer.insert.call_count, 3)
-        wheel_timer.insert.assert_has_calls([
-            call(
-                now=now,
-                obj=user_id,
-                then=new_state.last_active_ts + IDLE_TIMER
-            ),
-            call(
-                now=now,
-                obj=user_id,
-                then=new_state.last_user_sync_ts + SYNC_ONLINE_TIMEOUT
-            ),
-            call(
-                now=now,
-                obj=user_id,
-                then=new_state.last_active_ts + LAST_ACTIVE_GRANULARITY
-            ),
-        ], any_order=True)
+        wheel_timer.insert.assert_has_calls(
+            [
+                call(now=now, obj=user_id, then=new_state.last_active_ts + IDLE_TIMER),
+                call(
+                    now=now,
+                    obj=user_id,
+                    then=new_state.last_user_sync_ts + SYNC_ONLINE_TIMEOUT,
+                ),
+                call(
+                    now=now,
+                    obj=user_id,
+                    then=new_state.last_active_ts + LAST_ACTIVE_GRANULARITY,
+                ),
+            ],
+            any_order=True,
+        )
 
     def test_online_to_online(self):
         wheel_timer = Mock()
@@ -79,14 +77,11 @@ class PresenceUpdateTestCase(unittest.TestCase):
 
         prev_state = UserPresenceState.default(user_id)
         prev_state = prev_state.copy_and_replace(
-            state=PresenceState.ONLINE,
-            last_active_ts=now,
-            currently_active=True,
+            state=PresenceState.ONLINE, last_active_ts=now, currently_active=True
         )
 
         new_state = prev_state.copy_and_replace(
-            state=PresenceState.ONLINE,
-            last_active_ts=now,
+            state=PresenceState.ONLINE, last_active_ts=now
         )
 
         state, persist_and_notify, federation_ping = handle_update(
@@ -101,23 +96,22 @@ class PresenceUpdateTestCase(unittest.TestCase):
         self.assertEquals(state.last_federation_update_ts, now)
 
         self.assertEquals(wheel_timer.insert.call_count, 3)
-        wheel_timer.insert.assert_has_calls([
-            call(
-                now=now,
-                obj=user_id,
-                then=new_state.last_active_ts + IDLE_TIMER
-            ),
-            call(
-                now=now,
-                obj=user_id,
-                then=new_state.last_user_sync_ts + SYNC_ONLINE_TIMEOUT
-            ),
-            call(
-                now=now,
-                obj=user_id,
-                then=new_state.last_active_ts + LAST_ACTIVE_GRANULARITY
-            ),
-        ], any_order=True)
+        wheel_timer.insert.assert_has_calls(
+            [
+                call(now=now, obj=user_id, then=new_state.last_active_ts + IDLE_TIMER),
+                call(
+                    now=now,
+                    obj=user_id,
+                    then=new_state.last_user_sync_ts + SYNC_ONLINE_TIMEOUT,
+                ),
+                call(
+                    now=now,
+                    obj=user_id,
+                    then=new_state.last_active_ts + LAST_ACTIVE_GRANULARITY,
+                ),
+            ],
+            any_order=True,
+        )
 
     def test_online_to_online_last_active_noop(self):
         wheel_timer = Mock()
@@ -132,8 +126,7 @@ class PresenceUpdateTestCase(unittest.TestCase):
         )
 
         new_state = prev_state.copy_and_replace(
-            state=PresenceState.ONLINE,
-            last_active_ts=now,
+            state=PresenceState.ONLINE, last_active_ts=now
         )
 
         state, persist_and_notify, federation_ping = handle_update(
@@ -148,23 +141,22 @@ class PresenceUpdateTestCase(unittest.TestCase):
         self.assertEquals(state.last_federation_update_ts, now)
 
         self.assertEquals(wheel_timer.insert.call_count, 3)
-        wheel_timer.insert.assert_has_calls([
-            call(
-                now=now,
-                obj=user_id,
-                then=new_state.last_active_ts + IDLE_TIMER
-            ),
-            call(
-                now=now,
-                obj=user_id,
-                then=new_state.last_user_sync_ts + SYNC_ONLINE_TIMEOUT
-            ),
-            call(
-                now=now,
-                obj=user_id,
-                then=new_state.last_active_ts + LAST_ACTIVE_GRANULARITY
-            ),
-        ], any_order=True)
+        wheel_timer.insert.assert_has_calls(
+            [
+                call(now=now, obj=user_id, then=new_state.last_active_ts + IDLE_TIMER),
+                call(
+                    now=now,
+                    obj=user_id,
+                    then=new_state.last_user_sync_ts + SYNC_ONLINE_TIMEOUT,
+                ),
+                call(
+                    now=now,
+                    obj=user_id,
+                    then=new_state.last_active_ts + LAST_ACTIVE_GRANULARITY,
+                ),
+            ],
+            any_order=True,
+        )
 
     def test_online_to_online_last_active(self):
         wheel_timer = Mock()
@@ -178,9 +170,7 @@ class PresenceUpdateTestCase(unittest.TestCase):
             currently_active=True,
         )
 
-        new_state = prev_state.copy_and_replace(
-            state=PresenceState.ONLINE,
-        )
+        new_state = prev_state.copy_and_replace(state=PresenceState.ONLINE)
 
         state, persist_and_notify, federation_ping = handle_update(
             prev_state, new_state, is_mine=True, wheel_timer=wheel_timer, now=now
@@ -193,18 +183,17 @@ class PresenceUpdateTestCase(unittest.TestCase):
         self.assertEquals(state.last_federation_update_ts, now)
 
         self.assertEquals(wheel_timer.insert.call_count, 2)
-        wheel_timer.insert.assert_has_calls([
-            call(
-                now=now,
-                obj=user_id,
-                then=new_state.last_active_ts + IDLE_TIMER
-            ),
-            call(
-                now=now,
-                obj=user_id,
-                then=new_state.last_user_sync_ts + SYNC_ONLINE_TIMEOUT
-            )
-        ], any_order=True)
+        wheel_timer.insert.assert_has_calls(
+            [
+                call(now=now, obj=user_id, then=new_state.last_active_ts + IDLE_TIMER),
+                call(
+                    now=now,
+                    obj=user_id,
+                    then=new_state.last_user_sync_ts + SYNC_ONLINE_TIMEOUT,
+                ),
+            ],
+            any_order=True,
+        )
 
     def test_remote_ping_timer(self):
         wheel_timer = Mock()
@@ -213,13 +202,10 @@ class PresenceUpdateTestCase(unittest.TestCase):
 
         prev_state = UserPresenceState.default(user_id)
         prev_state = prev_state.copy_and_replace(
-            state=PresenceState.ONLINE,
-            last_active_ts=now,
+            state=PresenceState.ONLINE, last_active_ts=now
         )
 
-        new_state = prev_state.copy_and_replace(
-            state=PresenceState.ONLINE,
-        )
+        new_state = prev_state.copy_and_replace(state=PresenceState.ONLINE)
 
         state, persist_and_notify, federation_ping = handle_update(
             prev_state, new_state, is_mine=False, wheel_timer=wheel_timer, now=now
@@ -232,13 +218,16 @@ class PresenceUpdateTestCase(unittest.TestCase):
         self.assertEquals(new_state.status_msg, state.status_msg)
 
         self.assertEquals(wheel_timer.insert.call_count, 1)
-        wheel_timer.insert.assert_has_calls([
-            call(
-                now=now,
-                obj=user_id,
-                then=new_state.last_federation_update_ts + FEDERATION_TIMEOUT
-            ),
-        ], any_order=True)
+        wheel_timer.insert.assert_has_calls(
+            [
+                call(
+                    now=now,
+                    obj=user_id,
+                    then=new_state.last_federation_update_ts + FEDERATION_TIMEOUT,
+                )
+            ],
+            any_order=True,
+        )
 
     def test_online_to_offline(self):
         wheel_timer = Mock()
@@ -247,14 +236,10 @@ class PresenceUpdateTestCase(unittest.TestCase):
 
         prev_state = UserPresenceState.default(user_id)
         prev_state = prev_state.copy_and_replace(
-            state=PresenceState.ONLINE,
-            last_active_ts=now,
-            currently_active=True,
+            state=PresenceState.ONLINE, last_active_ts=now, currently_active=True
         )
 
-        new_state = prev_state.copy_and_replace(
-            state=PresenceState.OFFLINE,
-        )
+        new_state = prev_state.copy_and_replace(state=PresenceState.OFFLINE)
 
         state, persist_and_notify, federation_ping = handle_update(
             prev_state, new_state, is_mine=True, wheel_timer=wheel_timer, now=now
@@ -273,14 +258,10 @@ class PresenceUpdateTestCase(unittest.TestCase):
 
         prev_state = UserPresenceState.default(user_id)
         prev_state = prev_state.copy_and_replace(
-            state=PresenceState.ONLINE,
-            last_active_ts=now,
-            currently_active=True,
+            state=PresenceState.ONLINE, last_active_ts=now, currently_active=True
         )
 
-        new_state = prev_state.copy_and_replace(
-            state=PresenceState.UNAVAILABLE,
-        )
+        new_state = prev_state.copy_and_replace(state=PresenceState.UNAVAILABLE)
 
         state, persist_and_notify, federation_ping = handle_update(
             prev_state, new_state, is_mine=True, wheel_timer=wheel_timer, now=now
@@ -293,13 +274,16 @@ class PresenceUpdateTestCase(unittest.TestCase):
         self.assertEquals(new_state.status_msg, state.status_msg)
 
         self.assertEquals(wheel_timer.insert.call_count, 1)
-        wheel_timer.insert.assert_has_calls([
-            call(
-                now=now,
-                obj=user_id,
-                then=new_state.last_user_sync_ts + SYNC_ONLINE_TIMEOUT
-            )
-        ], any_order=True)
+        wheel_timer.insert.assert_has_calls(
+            [
+                call(
+                    now=now,
+                    obj=user_id,
+                    then=new_state.last_user_sync_ts + SYNC_ONLINE_TIMEOUT,
+                )
+            ],
+            any_order=True,
+        )
 
 
 class PresenceTimeoutTestCase(unittest.TestCase):
@@ -314,9 +298,7 @@ class PresenceTimeoutTestCase(unittest.TestCase):
             last_user_sync_ts=now,
         )
 
-        new_state = handle_timeout(
-            state, is_mine=True, syncing_user_ids=set(), now=now
-        )
+        new_state = handle_timeout(state, is_mine=True, syncing_user_ids=set(), now=now)
 
         self.assertIsNotNone(new_state)
         self.assertEquals(new_state.state, PresenceState.UNAVAILABLE)
@@ -332,9 +314,7 @@ class PresenceTimeoutTestCase(unittest.TestCase):
             last_user_sync_ts=now - SYNC_ONLINE_TIMEOUT - 1,
         )
 
-        new_state = handle_timeout(
-            state, is_mine=True, syncing_user_ids=set(), now=now
-        )
+        new_state = handle_timeout(state, is_mine=True, syncing_user_ids=set(), now=now)
 
         self.assertIsNotNone(new_state)
         self.assertEquals(new_state.state, PresenceState.OFFLINE)
@@ -369,9 +349,7 @@ class PresenceTimeoutTestCase(unittest.TestCase):
             last_federation_update_ts=now - FEDERATION_PING_INTERVAL - 1,
         )
 
-        new_state = handle_timeout(
-            state, is_mine=True, syncing_user_ids=set(), now=now
-        )
+        new_state = handle_timeout(state, is_mine=True, syncing_user_ids=set(), now=now)
 
         self.assertIsNotNone(new_state)
         self.assertEquals(new_state, new_state)
@@ -388,9 +366,7 @@ class PresenceTimeoutTestCase(unittest.TestCase):
             last_federation_update_ts=now,
         )
 
-        new_state = handle_timeout(
-            state, is_mine=True, syncing_user_ids=set(), now=now
-        )
+        new_state = handle_timeout(state, is_mine=True, syncing_user_ids=set(), now=now)
 
         self.assertIsNone(new_state)
 
@@ -425,9 +401,7 @@ class PresenceTimeoutTestCase(unittest.TestCase):
             last_federation_update_ts=now,
         )
 
-        new_state = handle_timeout(
-            state, is_mine=True, syncing_user_ids=set(), now=now
-        )
+        new_state = handle_timeout(state, is_mine=True, syncing_user_ids=set(), now=now)
 
         self.assertIsNotNone(new_state)
         self.assertEquals(state, new_state)
diff --git a/tests/handlers/test_profile.py b/tests/handlers/test_profile.py
index dc17918a3d..80da1c8954 100644
--- a/tests/handlers/test_profile.py
+++ b/tests/handlers/test_profile.py
@@ -20,7 +20,7 @@ from twisted.internet import defer
 
 import synapse.types
 from synapse.api.errors import AuthError
-from synapse.handlers.profile import ProfileHandler
+from synapse.handlers.profile import MasterProfileHandler
 from synapse.types import UserID
 
 from tests import unittest
@@ -29,7 +29,7 @@ from tests.utils import setup_test_homeserver
 
 class ProfileHandlers(object):
     def __init__(self, hs):
-        self.profile_handler = ProfileHandler(hs)
+        self.profile_handler = MasterProfileHandler(hs)
 
 
 class ProfileTestCase(unittest.TestCase):
@@ -48,15 +48,14 @@ class ProfileTestCase(unittest.TestCase):
         self.mock_registry.register_query_handler = register_query_handler
 
         hs = yield setup_test_homeserver(
+            self.addCleanup,
             http_client=None,
             handlers=None,
             resource_for_federation=Mock(),
             federation_client=self.mock_federation,
             federation_server=Mock(),
             federation_registry=self.mock_registry,
-            ratelimiter=NonCallableMock(spec_set=[
-                "send_message",
-            ])
+            ratelimiter=NonCallableMock(spec_set=["send_message"]),
         )
 
         self.ratelimiter = hs.get_ratelimiter()
@@ -74,9 +73,7 @@ class ProfileTestCase(unittest.TestCase):
 
     @defer.inlineCallbacks
     def test_get_my_name(self):
-        yield self.store.set_profile_displayname(
-            self.frank.localpart, "Frank"
-        )
+        yield self.store.set_profile_displayname(self.frank.localpart, "Frank")
 
         displayname = yield self.handler.get_displayname(self.frank)
 
@@ -85,22 +82,18 @@ class ProfileTestCase(unittest.TestCase):
     @defer.inlineCallbacks
     def test_set_my_name(self):
         yield self.handler.set_displayname(
-            self.frank,
-            synapse.types.create_requester(self.frank),
-            "Frank Jr."
+            self.frank, synapse.types.create_requester(self.frank), "Frank Jr."
         )
 
         self.assertEquals(
             (yield self.store.get_profile_displayname(self.frank.localpart)),
-            "Frank Jr."
+            "Frank Jr.",
         )
 
     @defer.inlineCallbacks
     def test_set_my_name_noauth(self):
         d = self.handler.set_displayname(
-            self.frank,
-            synapse.types.create_requester(self.bob),
-            "Frank Jr."
+            self.frank, synapse.types.create_requester(self.bob), "Frank Jr."
         )
 
         yield self.assertFailure(d, AuthError)
@@ -145,11 +138,12 @@ class ProfileTestCase(unittest.TestCase):
     @defer.inlineCallbacks
     def test_set_my_avatar(self):
         yield self.handler.set_avatar_url(
-            self.frank, synapse.types.create_requester(self.frank),
-            "http://my.server/pic.gif"
+            self.frank,
+            synapse.types.create_requester(self.frank),
+            "http://my.server/pic.gif",
         )
 
         self.assertEquals(
             (yield self.store.get_profile_avatar_url(self.frank.localpart)),
-            "http://my.server/pic.gif"
+            "http://my.server/pic.gif",
         )
diff --git a/tests/handlers/test_register.py b/tests/handlers/test_register.py
index 0937d71cf6..7b4ade3dfb 100644
--- a/tests/handlers/test_register.py
+++ b/tests/handlers/test_register.py
@@ -17,7 +17,7 @@ from mock import Mock
 
 from twisted.internet import defer
 
-from synapse.api.errors import RegistrationError
+from synapse.api.errors import ResourceLimitError
 from synapse.handlers.register import RegistrationHandler
 from synapse.types import UserID, create_requester
 
@@ -40,16 +40,22 @@ class RegistrationTestCase(unittest.TestCase):
         self.mock_distributor.declare("registered_user")
         self.mock_captcha_client = Mock()
         self.hs = yield setup_test_homeserver(
+            self.addCleanup,
             handlers=None,
             http_client=None,
             expire_access_token=True,
             profile_handler=Mock(),
         )
         self.macaroon_generator = Mock(
-            generate_access_token=Mock(return_value='secret'))
+            generate_access_token=Mock(return_value='secret')
+        )
         self.hs.get_macaroon_generator = Mock(return_value=self.macaroon_generator)
         self.hs.handlers = RegistrationHandlers(self.hs)
         self.handler = self.hs.get_handlers().registration_handler
+        self.store = self.hs.get_datastore()
+        self.hs.config.max_mau_value = 50
+        self.lots_of_users = 100
+        self.small_number_of_users = 1
 
     @defer.inlineCallbacks
     def test_user_is_created_and_logged_in_if_doesnt_exist(self):
@@ -58,7 +64,8 @@ class RegistrationTestCase(unittest.TestCase):
         user_id = "@someone:test"
         requester = create_requester("@as:test")
         result_user_id, result_token = yield self.handler.get_or_create_user(
-            requester, local_part, display_name)
+            requester, local_part, display_name
+        )
         self.assertEquals(result_user_id, user_id)
         self.assertEquals(result_token, 'secret')
 
@@ -69,62 +76,74 @@ class RegistrationTestCase(unittest.TestCase):
         yield store.register(
             user_id=frank.to_string(),
             token="jkv;g498752-43gj['eamb!-5",
-            password_hash=None)
+            password_hash=None,
+        )
         local_part = "frank"
         display_name = "Frank"
         user_id = "@frank:test"
         requester = create_requester("@as:test")
         result_user_id, result_token = yield self.handler.get_or_create_user(
-            requester, local_part, display_name)
+            requester, local_part, display_name
+        )
         self.assertEquals(result_user_id, user_id)
         self.assertEquals(result_token, 'secret')
 
     @defer.inlineCallbacks
-    def test_cannot_register_when_mau_limits_exceeded(self):
-        local_part = "someone"
-        display_name = "someone"
-        requester = create_requester("@as:test")
-        store = self.hs.get_datastore()
+    def test_mau_limits_when_disabled(self):
         self.hs.config.limit_usage_by_mau = False
-        self.hs.config.max_mau_value = 50
-        lots_of_users = 100
-        small_number_users = 1
-
-        store.count_monthly_users = Mock(return_value=defer.succeed(lots_of_users))
-
         # Ensure does not throw exception
-        yield self.handler.get_or_create_user(requester, 'a', display_name)
+        yield self.handler.get_or_create_user("requester", 'a', "display_name")
 
+    @defer.inlineCallbacks
+    def test_get_or_create_user_mau_not_blocked(self):
         self.hs.config.limit_usage_by_mau = True
-
-        with self.assertRaises(RegistrationError):
-            yield self.handler.get_or_create_user(requester, 'b', display_name)
-
-        store.count_monthly_users = Mock(return_value=defer.succeed(small_number_users))
-
-        self._macaroon_mock_generator("another_secret")
-
+        self.store.count_monthly_users = Mock(
+            return_value=defer.succeed(self.hs.config.max_mau_value - 1)
+        )
         # Ensure does not throw exception
-        yield self.handler.get_or_create_user("@neil:matrix.org", 'c', "Neil")
+        yield self.handler.get_or_create_user("@user:server", 'c', "User")
+
+    @defer.inlineCallbacks
+    def test_get_or_create_user_mau_blocked(self):
+        self.hs.config.limit_usage_by_mau = True
+        self.store.get_monthly_active_count = Mock(
+            return_value=defer.succeed(self.lots_of_users)
+        )
+        with self.assertRaises(ResourceLimitError):
+            yield self.handler.get_or_create_user("requester", 'b', "display_name")
 
-        self._macaroon_mock_generator("another another secret")
-        store.count_monthly_users = Mock(return_value=defer.succeed(lots_of_users))
+        self.store.get_monthly_active_count = Mock(
+            return_value=defer.succeed(self.hs.config.max_mau_value)
+        )
+        with self.assertRaises(ResourceLimitError):
+            yield self.handler.get_or_create_user("requester", 'b', "display_name")
 
-        with self.assertRaises(RegistrationError):
-            yield self.handler.register(localpart=local_part)
+    @defer.inlineCallbacks
+    def test_register_mau_blocked(self):
+        self.hs.config.limit_usage_by_mau = True
+        self.store.get_monthly_active_count = Mock(
+            return_value=defer.succeed(self.lots_of_users)
+        )
+        with self.assertRaises(ResourceLimitError):
+            yield self.handler.register(localpart="local_part")
 
-        self._macaroon_mock_generator("another another secret")
-        store.count_monthly_users = Mock(return_value=defer.succeed(lots_of_users))
+        self.store.get_monthly_active_count = Mock(
+            return_value=defer.succeed(self.hs.config.max_mau_value)
+        )
+        with self.assertRaises(ResourceLimitError):
+            yield self.handler.register(localpart="local_part")
 
-        with self.assertRaises(RegistrationError):
-            yield self.handler.register_saml2(local_part)
+    @defer.inlineCallbacks
+    def test_register_saml2_mau_blocked(self):
+        self.hs.config.limit_usage_by_mau = True
+        self.store.get_monthly_active_count = Mock(
+            return_value=defer.succeed(self.lots_of_users)
+        )
+        with self.assertRaises(ResourceLimitError):
+            yield self.handler.register_saml2(localpart="local_part")
 
-    def _macaroon_mock_generator(self, secret):
-        """
-        Reset macaroon generator in the case where the test creates multiple users
-        """
-        macaroon_generator = Mock(
-            generate_access_token=Mock(return_value=secret))
-        self.hs.get_macaroon_generator = Mock(return_value=macaroon_generator)
-        self.hs.handlers = RegistrationHandlers(self.hs)
-        self.handler = self.hs.get_handlers().registration_handler
+        self.store.get_monthly_active_count = Mock(
+            return_value=defer.succeed(self.hs.config.max_mau_value)
+        )
+        with self.assertRaises(ResourceLimitError):
+            yield self.handler.register_saml2(localpart="local_part")
diff --git a/tests/handlers/test_sync.py b/tests/handlers/test_sync.py
new file mode 100644
index 0000000000..31f54bbd7d
--- /dev/null
+++ b/tests/handlers/test_sync.py
@@ -0,0 +1,71 @@
+# -*- coding: utf-8 -*-
+# Copyright 2018 New Vector Ltd
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+from twisted.internet import defer
+
+from synapse.api.errors import Codes, ResourceLimitError
+from synapse.api.filtering import DEFAULT_FILTER_COLLECTION
+from synapse.handlers.sync import SyncConfig, SyncHandler
+from synapse.types import UserID
+
+import tests.unittest
+import tests.utils
+from tests.utils import setup_test_homeserver
+
+
+class SyncTestCase(tests.unittest.TestCase):
+    """ Tests Sync Handler. """
+
+    @defer.inlineCallbacks
+    def setUp(self):
+        self.hs = yield setup_test_homeserver(self.addCleanup)
+        self.sync_handler = SyncHandler(self.hs)
+        self.store = self.hs.get_datastore()
+
+    @defer.inlineCallbacks
+    def test_wait_for_sync_for_user_auth_blocking(self):
+
+        user_id1 = "@user1:server"
+        user_id2 = "@user2:server"
+        sync_config = self._generate_sync_config(user_id1)
+
+        self.hs.config.limit_usage_by_mau = True
+        self.hs.config.max_mau_value = 1
+
+        # Check that the happy case does not throw errors
+        yield self.store.upsert_monthly_active_user(user_id1)
+        yield self.sync_handler.wait_for_sync_for_user(sync_config)
+
+        # Test that global lock works
+        self.hs.config.hs_disabled = True
+        with self.assertRaises(ResourceLimitError) as e:
+            yield self.sync_handler.wait_for_sync_for_user(sync_config)
+        self.assertEquals(e.exception.errcode, Codes.RESOURCE_LIMIT_EXCEEDED)
+
+        self.hs.config.hs_disabled = False
+
+        sync_config = self._generate_sync_config(user_id2)
+
+        with self.assertRaises(ResourceLimitError) as e:
+            yield self.sync_handler.wait_for_sync_for_user(sync_config)
+        self.assertEquals(e.exception.errcode, Codes.RESOURCE_LIMIT_EXCEEDED)
+
+    def _generate_sync_config(self, user_id):
+        return SyncConfig(
+            user=UserID(user_id.split(":")[0][1:], user_id.split(":")[1]),
+            filter_collection=DEFAULT_FILTER_COLLECTION,
+            is_guest=False,
+            request_key="request_key",
+            device_id="device_id",
+        )
diff --git a/tests/handlers/test_typing.py b/tests/handlers/test_typing.py
index 2c263af1a3..ad58073a14 100644
--- a/tests/handlers/test_typing.py
+++ b/tests/handlers/test_typing.py
@@ -38,21 +38,19 @@ def _expect_edu(destination, edu_type, content, origin="test"):
         "origin": origin,
         "origin_server_ts": 1000000,
         "pdus": [],
-        "edus": [
-            {
-                "edu_type": edu_type,
-                "content": content,
-            }
-        ],
+        "edus": [{"edu_type": edu_type, "content": content}],
     }
 
 
 def _make_edu_json(origin, edu_type, content):
-    return json.dumps(_expect_edu("test", edu_type, content, origin=origin))
+    return json.dumps(_expect_edu("test", edu_type, content, origin=origin)).encode(
+        'utf8'
+    )
 
 
 class TypingNotificationsTestCase(unittest.TestCase):
     """Tests typing notifications to rooms."""
+
     @defer.inlineCallbacks
     def setUp(self):
         self.clock = MockClock()
@@ -69,21 +67,24 @@ class TypingNotificationsTestCase(unittest.TestCase):
         self.state_handler = Mock()
 
         hs = yield setup_test_homeserver(
+            self.addCleanup,
             "test",
             auth=self.auth,
             clock=self.clock,
-            datastore=Mock(spec=[
-                # Bits that Federation needs
-                "prep_send_transaction",
-                "delivered_txn",
-                "get_received_txn_response",
-                "set_received_txn_response",
-                "get_destination_retry_timings",
-                "get_devices_by_remote",
-                # Bits that user_directory needs
-                "get_user_directory_stream_pos",
-                "get_current_state_deltas",
-            ]),
+            datastore=Mock(
+                spec=[
+                    # Bits that Federation needs
+                    "prep_send_transaction",
+                    "delivered_txn",
+                    "get_received_txn_response",
+                    "set_received_txn_response",
+                    "get_destination_retry_timings",
+                    "get_devices_by_remote",
+                    # Bits that user_directory needs
+                    "get_user_directory_stream_pos",
+                    "get_current_state_deltas",
+                ]
+            ),
             state_handler=self.state_handler,
             handlers=Mock(),
             notifier=mock_notifier,
@@ -98,19 +99,16 @@ class TypingNotificationsTestCase(unittest.TestCase):
         self.event_source = hs.get_event_sources().sources["typing"]
 
         self.datastore = hs.get_datastore()
-        retry_timings_res = {
-            "destination": "",
-            "retry_last_ts": 0,
-            "retry_interval": 0,
-        }
-        self.datastore.get_destination_retry_timings.return_value = (
-            defer.succeed(retry_timings_res)
+        retry_timings_res = {"destination": "", "retry_last_ts": 0, "retry_interval": 0}
+        self.datastore.get_destination_retry_timings.return_value = defer.succeed(
+            retry_timings_res
         )
 
         self.datastore.get_devices_by_remote.return_value = (0, [])
 
         def get_received_txn_response(*args):
             return defer.succeed(None)
+
         self.datastore.get_received_txn_response = get_received_txn_response
 
         self.room_id = "a-room"
@@ -123,10 +121,12 @@ class TypingNotificationsTestCase(unittest.TestCase):
 
         def get_joined_hosts_for_room(room_id):
             return set(member.domain for member in self.room_members)
+
         self.datastore.get_joined_hosts_for_room = get_joined_hosts_for_room
 
         def get_current_user_in_room(room_id):
             return set(str(u) for u in self.room_members)
+
         self.state_handler.get_current_user_in_room = get_current_user_in_room
 
         self.datastore.get_user_directory_stream_pos.return_value = (
@@ -134,19 +134,13 @@ class TypingNotificationsTestCase(unittest.TestCase):
             defer.succeed(1)
         )
 
-        self.datastore.get_current_state_deltas.return_value = (
-            None
-        )
+        self.datastore.get_current_state_deltas.return_value = None
 
         self.auth.check_joined_room = check_joined_room
 
         self.datastore.get_to_device_stream_token = lambda: 0
-        self.datastore.get_new_device_msgs_for_remote = (
-            lambda *args, **kargs: ([], 0)
-        )
-        self.datastore.delete_device_msgs_for_remote = (
-            lambda *args, **kargs: None
-        )
+        self.datastore.get_new_device_msgs_for_remote = lambda *args, **kargs: ([], 0)
+        self.datastore.delete_device_msgs_for_remote = lambda *args, **kargs: None
 
         # Some local users to test with
         self.u_apple = UserID.from_string("@apple:test")
@@ -168,24 +162,23 @@ class TypingNotificationsTestCase(unittest.TestCase):
             timeout=20000,
         )
 
-        self.on_new_event.assert_has_calls([
-            call('typing_key', 1, rooms=[self.room_id]),
-        ])
+        self.on_new_event.assert_has_calls(
+            [call('typing_key', 1, rooms=[self.room_id])]
+        )
 
         self.assertEquals(self.event_source.get_current_key(), 1)
         events = yield self.event_source.get_new_events(
-            room_ids=[self.room_id],
-            from_key=0,
+            room_ids=[self.room_id], from_key=0
         )
         self.assertEquals(
             events[0],
             [
-                {"type": "m.typing",
-                 "room_id": self.room_id,
-                 "content": {
-                     "user_ids": [self.u_apple.to_string()],
-                 }},
-            ]
+                {
+                    "type": "m.typing",
+                    "room_id": self.room_id,
+                    "content": {"user_ids": [self.u_apple.to_string()]},
+                }
+            ],
         )
 
     @defer.inlineCallbacks
@@ -204,13 +197,13 @@ class TypingNotificationsTestCase(unittest.TestCase):
                         "room_id": self.room_id,
                         "user_id": self.u_apple.to_string(),
                         "typing": True,
-                    }
+                    },
                 ),
                 json_data_callback=ANY,
                 long_retries=True,
                 backoff_on_404=True,
             ),
-            defer.succeed((200, "OK"))
+            defer.succeed((200, "OK")),
         )
 
         yield self.handler.started_typing(
@@ -238,27 +231,29 @@ class TypingNotificationsTestCase(unittest.TestCase):
                     "room_id": self.room_id,
                     "user_id": self.u_onion.to_string(),
                     "typing": True,
-                }
+                },
             ),
             federation_auth=True,
         )
 
-        self.on_new_event.assert_has_calls([
-            call('typing_key', 1, rooms=[self.room_id]),
-        ])
+        self.on_new_event.assert_has_calls(
+            [call('typing_key', 1, rooms=[self.room_id])]
+        )
 
         self.assertEquals(self.event_source.get_current_key(), 1)
         events = yield self.event_source.get_new_events(
-            room_ids=[self.room_id],
-            from_key=0
+            room_ids=[self.room_id], from_key=0
+        )
+        self.assertEquals(
+            events[0],
+            [
+                {
+                    "type": "m.typing",
+                    "room_id": self.room_id,
+                    "content": {"user_ids": [self.u_onion.to_string()]},
+                }
+            ],
         )
-        self.assertEquals(events[0], [{
-            "type": "m.typing",
-            "room_id": self.room_id,
-            "content": {
-                "user_ids": [self.u_onion.to_string()],
-            },
-        }])
 
     @defer.inlineCallbacks
     def test_stopped_typing(self):
@@ -276,17 +271,18 @@ class TypingNotificationsTestCase(unittest.TestCase):
                         "room_id": self.room_id,
                         "user_id": self.u_apple.to_string(),
                         "typing": False,
-                    }
+                    },
                 ),
                 json_data_callback=ANY,
                 long_retries=True,
                 backoff_on_404=True,
             ),
-            defer.succeed((200, "OK"))
+            defer.succeed((200, "OK")),
         )
 
         # Gut-wrenching
         from synapse.handlers.typing import RoomMember
+
         member = RoomMember(self.room_id, self.u_apple.to_string())
         self.handler._member_typing_until[member] = 1002000
         self.handler._room_typing[self.room_id] = set([self.u_apple.to_string()])
@@ -294,29 +290,29 @@ class TypingNotificationsTestCase(unittest.TestCase):
         self.assertEquals(self.event_source.get_current_key(), 0)
 
         yield self.handler.stopped_typing(
-            target_user=self.u_apple,
-            auth_user=self.u_apple,
-            room_id=self.room_id,
+            target_user=self.u_apple, auth_user=self.u_apple, room_id=self.room_id
         )
 
-        self.on_new_event.assert_has_calls([
-            call('typing_key', 1, rooms=[self.room_id]),
-        ])
+        self.on_new_event.assert_has_calls(
+            [call('typing_key', 1, rooms=[self.room_id])]
+        )
 
         yield put_json.await_calls()
 
         self.assertEquals(self.event_source.get_current_key(), 1)
         events = yield self.event_source.get_new_events(
-            room_ids=[self.room_id],
-            from_key=0,
+            room_ids=[self.room_id], from_key=0
+        )
+        self.assertEquals(
+            events[0],
+            [
+                {
+                    "type": "m.typing",
+                    "room_id": self.room_id,
+                    "content": {"user_ids": []},
+                }
+            ],
         )
-        self.assertEquals(events[0], [{
-            "type": "m.typing",
-            "room_id": self.room_id,
-            "content": {
-                "user_ids": [],
-            },
-        }])
 
     @defer.inlineCallbacks
     def test_typing_timeout(self):
@@ -331,42 +327,46 @@ class TypingNotificationsTestCase(unittest.TestCase):
             timeout=10000,
         )
 
-        self.on_new_event.assert_has_calls([
-            call('typing_key', 1, rooms=[self.room_id]),
-        ])
+        self.on_new_event.assert_has_calls(
+            [call('typing_key', 1, rooms=[self.room_id])]
+        )
         self.on_new_event.reset_mock()
 
         self.assertEquals(self.event_source.get_current_key(), 1)
         events = yield self.event_source.get_new_events(
-            room_ids=[self.room_id],
-            from_key=0,
+            room_ids=[self.room_id], from_key=0
+        )
+        self.assertEquals(
+            events[0],
+            [
+                {
+                    "type": "m.typing",
+                    "room_id": self.room_id,
+                    "content": {"user_ids": [self.u_apple.to_string()]},
+                }
+            ],
         )
-        self.assertEquals(events[0], [{
-            "type": "m.typing",
-            "room_id": self.room_id,
-            "content": {
-                "user_ids": [self.u_apple.to_string()],
-            },
-        }])
 
         self.clock.advance_time(16)
 
-        self.on_new_event.assert_has_calls([
-            call('typing_key', 2, rooms=[self.room_id]),
-        ])
+        self.on_new_event.assert_has_calls(
+            [call('typing_key', 2, rooms=[self.room_id])]
+        )
 
         self.assertEquals(self.event_source.get_current_key(), 2)
         events = yield self.event_source.get_new_events(
-            room_ids=[self.room_id],
-            from_key=1,
+            room_ids=[self.room_id], from_key=1
+        )
+        self.assertEquals(
+            events[0],
+            [
+                {
+                    "type": "m.typing",
+                    "room_id": self.room_id,
+                    "content": {"user_ids": []},
+                }
+            ],
         )
-        self.assertEquals(events[0], [{
-            "type": "m.typing",
-            "room_id": self.room_id,
-            "content": {
-                "user_ids": [],
-            },
-        }])
 
         # SYN-230 - see if we can still set after timeout
 
@@ -377,20 +377,22 @@ class TypingNotificationsTestCase(unittest.TestCase):
             timeout=10000,
         )
 
-        self.on_new_event.assert_has_calls([
-            call('typing_key', 3, rooms=[self.room_id]),
-        ])
+        self.on_new_event.assert_has_calls(
+            [call('typing_key', 3, rooms=[self.room_id])]
+        )
         self.on_new_event.reset_mock()
 
         self.assertEquals(self.event_source.get_current_key(), 3)
         events = yield self.event_source.get_new_events(
-            room_ids=[self.room_id],
-            from_key=0,
+            room_ids=[self.room_id], from_key=0
+        )
+        self.assertEquals(
+            events[0],
+            [
+                {
+                    "type": "m.typing",
+                    "room_id": self.room_id,
+                    "content": {"user_ids": [self.u_apple.to_string()]},
+                }
+            ],
         )
-        self.assertEquals(events[0], [{
-            "type": "m.typing",
-            "room_id": self.room_id,
-            "content": {
-                "user_ids": [self.u_apple.to_string()],
-            },
-        }])
diff --git a/tests/http/test_endpoint.py b/tests/http/test_endpoint.py
index 60e6a75953..3b0155ed03 100644
--- a/tests/http/test_endpoint.py
+++ b/tests/http/test_endpoint.py
@@ -39,15 +39,13 @@ class ServerNameTestCase(unittest.TestCase):
             "[1234",
             "underscore_.com",
             "percent%65.com",
-            "1234:5678:80",   # too many colons
+            "1234:5678:80",  # too many colons
         ]
         for i in test_data:
             try:
                 parse_and_validate_server_name(i)
                 self.fail(
-                    "Expected parse_and_validate_server_name('%s') to throw" % (
-                        i,
-                    ),
+                    "Expected parse_and_validate_server_name('%s') to throw" % (i,)
                 )
             except ValueError:
                 pass
diff --git a/tests/replication/slave/storage/_base.py b/tests/replication/slave/storage/_base.py
index a103e7be80..65df116efc 100644
--- a/tests/replication/slave/storage/_base.py
+++ b/tests/replication/slave/storage/_base.py
@@ -31,6 +31,7 @@ from tests.utils import setup_test_homeserver
 
 class TestReplicationClientHandler(ReplicationClientHandler):
     """Overrides on_rdata so that we can wait for it to happen"""
+
     def __init__(self, store):
         super(TestReplicationClientHandler, self).__init__(store)
         self._rdata_awaiters = []
@@ -53,12 +54,11 @@ class BaseSlavedStoreTestCase(unittest.TestCase):
     @defer.inlineCallbacks
     def setUp(self):
         self.hs = yield setup_test_homeserver(
+            self.addCleanup,
             "blue",
             http_client=None,
             federation_client=Mock(),
-            ratelimiter=NonCallableMock(spec_set=[
-                "send_message",
-            ]),
+            ratelimiter=NonCallableMock(spec_set=["send_message"]),
         )
         self.hs.get_ratelimiter().send_message.return_value = (True, 0)
 
diff --git a/tests/replication/slave/storage/test_account_data.py b/tests/replication/slave/storage/test_account_data.py
index adf226404e..87cc2b2fba 100644
--- a/tests/replication/slave/storage/test_account_data.py
+++ b/tests/replication/slave/storage/test_account_data.py
@@ -29,20 +29,14 @@ class SlavedAccountDataStoreTestCase(BaseSlavedStoreTestCase):
 
     @defer.inlineCallbacks
     def test_user_account_data(self):
-        yield self.master_store.add_account_data_for_user(
-            USER_ID, TYPE, {"a": 1}
-        )
+        yield self.master_store.add_account_data_for_user(USER_ID, TYPE, {"a": 1})
         yield self.replicate()
         yield self.check(
-            "get_global_account_data_by_type_for_user",
-            [TYPE, USER_ID], {"a": 1}
+            "get_global_account_data_by_type_for_user", [TYPE, USER_ID], {"a": 1}
         )
 
-        yield self.master_store.add_account_data_for_user(
-            USER_ID, TYPE, {"a": 2}
-        )
+        yield self.master_store.add_account_data_for_user(USER_ID, TYPE, {"a": 2})
         yield self.replicate()
         yield self.check(
-            "get_global_account_data_by_type_for_user",
-            [TYPE, USER_ID], {"a": 2}
+            "get_global_account_data_by_type_for_user", [TYPE, USER_ID], {"a": 2}
         )
diff --git a/tests/replication/slave/storage/test_events.py b/tests/replication/slave/storage/test_events.py
index f5b47f5ec0..2ba80ccdcf 100644
--- a/tests/replication/slave/storage/test_events.py
+++ b/tests/replication/slave/storage/test_events.py
@@ -38,6 +38,7 @@ def patch__eq__(cls):
     def unpatch():
         if eq is not None:
             cls.__eq__ = eq
+
     return unpatch
 
 
@@ -48,10 +49,7 @@ class SlavedEventStoreTestCase(BaseSlavedStoreTestCase):
     def setUp(self):
         # Patch up the equality operator for events so that we can check
         # whether lists of events match using assertEquals
-        self.unpatches = [
-            patch__eq__(_EventInternalMetadata),
-            patch__eq__(FrozenEvent),
-        ]
+        self.unpatches = [patch__eq__(_EventInternalMetadata), patch__eq__(FrozenEvent)]
         return super(SlavedEventStoreTestCase, self).setUp()
 
     def tearDown(self):
@@ -61,33 +59,27 @@ class SlavedEventStoreTestCase(BaseSlavedStoreTestCase):
     def test_get_latest_event_ids_in_room(self):
         create = yield self.persist(type="m.room.create", key="", creator=USER_ID)
         yield self.replicate()
-        yield self.check(
-            "get_latest_event_ids_in_room", (ROOM_ID,), [create.event_id]
-        )
+        yield self.check("get_latest_event_ids_in_room", (ROOM_ID,), [create.event_id])
 
         join = yield self.persist(
-            type="m.room.member", key=USER_ID, membership="join",
+            type="m.room.member",
+            key=USER_ID,
+            membership="join",
             prev_events=[(create.event_id, {})],
         )
         yield self.replicate()
-        yield self.check(
-            "get_latest_event_ids_in_room", (ROOM_ID,), [join.event_id]
-        )
+        yield self.check("get_latest_event_ids_in_room", (ROOM_ID,), [join.event_id])
 
     @defer.inlineCallbacks
     def test_redactions(self):
         yield self.persist(type="m.room.create", key="", creator=USER_ID)
         yield self.persist(type="m.room.member", key=USER_ID, membership="join")
 
-        msg = yield self.persist(
-            type="m.room.message", msgtype="m.text", body="Hello"
-        )
+        msg = yield self.persist(type="m.room.message", msgtype="m.text", body="Hello")
         yield self.replicate()
         yield self.check("get_event", [msg.event_id], msg)
 
-        redaction = yield self.persist(
-            type="m.room.redaction", redacts=msg.event_id
-        )
+        redaction = yield self.persist(type="m.room.redaction", redacts=msg.event_id)
         yield self.replicate()
 
         msg_dict = msg.get_dict()
@@ -102,9 +94,7 @@ class SlavedEventStoreTestCase(BaseSlavedStoreTestCase):
         yield self.persist(type="m.room.create", key="", creator=USER_ID)
         yield self.persist(type="m.room.member", key=USER_ID, membership="join")
 
-        msg = yield self.persist(
-            type="m.room.message", msgtype="m.text", body="Hello"
-        )
+        msg = yield self.persist(type="m.room.message", msgtype="m.text", body="Hello")
         yield self.replicate()
         yield self.check("get_event", [msg.event_id], msg)
 
@@ -122,19 +112,29 @@ class SlavedEventStoreTestCase(BaseSlavedStoreTestCase):
 
     @defer.inlineCallbacks
     def test_invites(self):
+        yield self.persist(type="m.room.create", key="", creator=USER_ID)
         yield self.check("get_invited_rooms_for_user", [USER_ID_2], [])
         event = yield self.persist(
             type="m.room.member", key=USER_ID_2, membership="invite"
         )
         yield self.replicate()
-        yield self.check("get_invited_rooms_for_user", [USER_ID_2], [RoomsForUser(
-            ROOM_ID, USER_ID, "invite", event.event_id,
-            event.internal_metadata.stream_ordering
-        )])
+        yield self.check(
+            "get_invited_rooms_for_user",
+            [USER_ID_2],
+            [
+                RoomsForUser(
+                    ROOM_ID,
+                    USER_ID,
+                    "invite",
+                    event.event_id,
+                    event.internal_metadata.stream_ordering,
+                )
+            ],
+        )
 
     @defer.inlineCallbacks
     def test_push_actions_for_user(self):
-        yield self.persist(type="m.room.create", creator=USER_ID)
+        yield self.persist(type="m.room.create", key="", creator=USER_ID)
         yield self.persist(type="m.room.join", key=USER_ID, membership="join")
         yield self.persist(
             type="m.room.join", sender=USER_ID, key=USER_ID_2, membership="join"
@@ -146,40 +146,55 @@ class SlavedEventStoreTestCase(BaseSlavedStoreTestCase):
         yield self.check(
             "get_unread_event_push_actions_by_room_for_user",
             [ROOM_ID, USER_ID_2, event1.event_id],
-            {"highlight_count": 0, "notify_count": 0}
+            {"highlight_count": 0, "notify_count": 0},
         )
 
         yield self.persist(
-            type="m.room.message", msgtype="m.text", body="world",
+            type="m.room.message",
+            msgtype="m.text",
+            body="world",
             push_actions=[(USER_ID_2, ["notify"])],
         )
         yield self.replicate()
         yield self.check(
             "get_unread_event_push_actions_by_room_for_user",
             [ROOM_ID, USER_ID_2, event1.event_id],
-            {"highlight_count": 0, "notify_count": 1}
+            {"highlight_count": 0, "notify_count": 1},
         )
 
         yield self.persist(
-            type="m.room.message", msgtype="m.text", body="world",
-            push_actions=[(USER_ID_2, [
-                "notify", {"set_tweak": "highlight", "value": True}
-            ])],
+            type="m.room.message",
+            msgtype="m.text",
+            body="world",
+            push_actions=[
+                (USER_ID_2, ["notify", {"set_tweak": "highlight", "value": True}])
+            ],
         )
         yield self.replicate()
         yield self.check(
             "get_unread_event_push_actions_by_room_for_user",
             [ROOM_ID, USER_ID_2, event1.event_id],
-            {"highlight_count": 1, "notify_count": 2}
+            {"highlight_count": 1, "notify_count": 2},
         )
 
     event_id = 0
 
     @defer.inlineCallbacks
     def persist(
-        self, sender=USER_ID, room_id=ROOM_ID, type={}, key=None, internal={},
-        state=None, reset_state=False, backfill=False,
-        depth=None, prev_events=[], auth_events=[], prev_state=[], redacts=None,
+        self,
+        sender=USER_ID,
+        room_id=ROOM_ID,
+        type={},
+        key=None,
+        internal={},
+        state=None,
+        reset_state=False,
+        backfill=False,
+        depth=None,
+        prev_events=[],
+        auth_events=[],
+        prev_state=[],
+        redacts=None,
         push_actions=[],
         **content
     ):
@@ -219,34 +234,23 @@ class SlavedEventStoreTestCase(BaseSlavedStoreTestCase):
         self.event_id += 1
 
         if state is not None:
-            state_ids = {
-                key: e.event_id for key, e in state.items()
-            }
+            state_ids = {key: e.event_id for key, e in state.items()}
             context = EventContext.with_state(
-                state_group=None,
-                current_state_ids=state_ids,
-                prev_state_ids=state_ids
+                state_group=None, current_state_ids=state_ids, prev_state_ids=state_ids
             )
         else:
             state_handler = self.hs.get_state_handler()
             context = yield state_handler.compute_event_context(event)
 
         yield self.master_store.add_push_actions_to_staging(
-            event.event_id, {
-                user_id: actions
-                for user_id, actions in push_actions
-            },
+            event.event_id, {user_id: actions for user_id, actions in push_actions}
         )
 
         ordering = None
         if backfill:
-            yield self.master_store.persist_events(
-                [(event, context)], backfilled=True
-            )
+            yield self.master_store.persist_events([(event, context)], backfilled=True)
         else:
-            ordering, _ = yield self.master_store.persist_event(
-                event, context,
-            )
+            ordering, _ = yield self.master_store.persist_event(event, context)
 
         if ordering:
             event.internal_metadata.stream_ordering = ordering
diff --git a/tests/replication/slave/storage/test_receipts.py b/tests/replication/slave/storage/test_receipts.py
index e6d670cc1f..ae1adeded1 100644
--- a/tests/replication/slave/storage/test_receipts.py
+++ b/tests/replication/slave/storage/test_receipts.py
@@ -34,6 +34,6 @@ class SlavedReceiptTestCase(BaseSlavedStoreTestCase):
             ROOM_ID, "m.read", USER_ID, [EVENT_ID], {}
         )
         yield self.replicate()
-        yield self.check("get_receipts_for_user", [USER_ID, "m.read"], {
-            ROOM_ID: EVENT_ID
-        })
+        yield self.check(
+            "get_receipts_for_user", [USER_ID, "m.read"], {ROOM_ID: EVENT_ID}
+        )
diff --git a/tests/rest/client/test_transactions.py b/tests/rest/client/test_transactions.py
index 34e68ae82f..708dc26e61 100644
--- a/tests/rest/client/test_transactions.py
+++ b/tests/rest/client/test_transactions.py
@@ -11,7 +11,6 @@ from tests.utils import MockClock
 
 
 class HttpTransactionCacheTestCase(unittest.TestCase):
-
     def setUp(self):
         self.clock = MockClock()
         self.hs = Mock()
@@ -24,9 +23,7 @@ class HttpTransactionCacheTestCase(unittest.TestCase):
 
     @defer.inlineCallbacks
     def test_executes_given_function(self):
-        cb = Mock(
-            return_value=defer.succeed(self.mock_http_response)
-        )
+        cb = Mock(return_value=defer.succeed(self.mock_http_response))
         res = yield self.cache.fetch_or_execute(
             self.mock_key, cb, "some_arg", keyword="arg"
         )
@@ -35,9 +32,7 @@ class HttpTransactionCacheTestCase(unittest.TestCase):
 
     @defer.inlineCallbacks
     def test_deduplicates_based_on_key(self):
-        cb = Mock(
-            return_value=defer.succeed(self.mock_http_response)
-        )
+        cb = Mock(return_value=defer.succeed(self.mock_http_response))
         for i in range(3):  # invoke multiple times
             res = yield self.cache.fetch_or_execute(
                 self.mock_key, cb, "some_arg", keyword="arg", changing_args=i
@@ -85,7 +80,7 @@ class HttpTransactionCacheTestCase(unittest.TestCase):
             try:
                 yield self.cache.fetch_or_execute(self.mock_key, cb)
             except Exception as e:
-                self.assertEqual(e.message, "boo")
+                self.assertEqual(e.args[0], "boo")
             self.assertIs(LoggingContext.current_context(), test_context)
 
             res = yield self.cache.fetch_or_execute(self.mock_key, cb)
@@ -111,7 +106,7 @@ class HttpTransactionCacheTestCase(unittest.TestCase):
             try:
                 yield self.cache.fetch_or_execute(self.mock_key, cb)
             except Exception as e:
-                self.assertEqual(e.message, "boo")
+                self.assertEqual(e.args[0], "boo")
             self.assertIs(LoggingContext.current_context(), test_context)
 
             res = yield self.cache.fetch_or_execute(self.mock_key, cb)
@@ -120,29 +115,18 @@ class HttpTransactionCacheTestCase(unittest.TestCase):
 
     @defer.inlineCallbacks
     def test_cleans_up(self):
-        cb = Mock(
-            return_value=defer.succeed(self.mock_http_response)
-        )
-        yield self.cache.fetch_or_execute(
-            self.mock_key, cb, "an arg"
-        )
+        cb = Mock(return_value=defer.succeed(self.mock_http_response))
+        yield self.cache.fetch_or_execute(self.mock_key, cb, "an arg")
         # should NOT have cleaned up yet
         self.clock.advance_time_msec(CLEANUP_PERIOD_MS / 2)
 
-        yield self.cache.fetch_or_execute(
-            self.mock_key, cb, "an arg"
-        )
+        yield self.cache.fetch_or_execute(self.mock_key, cb, "an arg")
         # still using cache
         cb.assert_called_once_with("an arg")
 
         self.clock.advance_time_msec(CLEANUP_PERIOD_MS)
 
-        yield self.cache.fetch_or_execute(
-            self.mock_key, cb, "an arg"
-        )
+        yield self.cache.fetch_or_execute(self.mock_key, cb, "an arg")
         # no longer using cache
         self.assertEqual(cb.call_count, 2)
-        self.assertEqual(
-            cb.call_args_list,
-            [call("an arg",), call("an arg",)]
-        )
+        self.assertEqual(cb.call_args_list, [call("an arg"), call("an arg")])
diff --git a/tests/rest/client/v1/test_admin.py b/tests/rest/client/v1/test_admin.py
index 8c90145601..1a553fa3f9 100644
--- a/tests/rest/client/v1/test_admin.py
+++ b/tests/rest/client/v1/test_admin.py
@@ -51,7 +51,7 @@ class UserRegisterTestCase(unittest.TestCase):
         self.secrets = Mock()
 
         self.hs = setup_test_homeserver(
-            http_client=None, clock=self.hs_clock, reactor=self.clock
+            self.addCleanup, http_client=None, clock=self.hs_clock, reactor=self.clock
         )
 
         self.hs.config.registration_shared_secret = u"shared"
@@ -140,7 +140,7 @@ class UserRegisterTestCase(unittest.TestCase):
                 "admin": True,
                 "mac": want_mac,
             }
-        ).encode('utf8')
+        )
         request, channel = make_request("POST", self.url, body.encode('utf8'))
         render(request, self.resource, self.clock)
 
@@ -168,7 +168,7 @@ class UserRegisterTestCase(unittest.TestCase):
                 "admin": True,
                 "mac": want_mac,
             }
-        ).encode('utf8')
+        )
         request, channel = make_request("POST", self.url, body.encode('utf8'))
         render(request, self.resource, self.clock)
 
@@ -195,7 +195,7 @@ class UserRegisterTestCase(unittest.TestCase):
                 "admin": True,
                 "mac": want_mac,
             }
-        ).encode('utf8')
+        )
         request, channel = make_request("POST", self.url, body.encode('utf8'))
         render(request, self.resource, self.clock)
 
@@ -215,6 +215,7 @@ class UserRegisterTestCase(unittest.TestCase):
         mac.  Admin is optional.  Additional checks are done for length and
         type.
         """
+
         def nonce():
             request, channel = make_request("GET", self.url)
             render(request, self.resource, self.clock)
@@ -253,7 +254,7 @@ class UserRegisterTestCase(unittest.TestCase):
         self.assertEqual('Invalid username', channel.json_body["error"])
 
         # Must not have null bytes
-        body = json.dumps({"nonce": nonce(), "username": b"abcd\x00"})
+        body = json.dumps({"nonce": nonce(), "username": u"abcd\u0000"})
         request, channel = make_request("POST", self.url, body.encode('utf8'))
         render(request, self.resource, self.clock)
 
@@ -289,7 +290,9 @@ class UserRegisterTestCase(unittest.TestCase):
         self.assertEqual('Invalid password', channel.json_body["error"])
 
         # Must not have null bytes
-        body = json.dumps({"nonce": nonce(), "username": "a", "password": b"abcd\x00"})
+        body = json.dumps(
+            {"nonce": nonce(), "username": "a", "password": u"abcd\u0000"}
+        )
         request, channel = make_request("POST", self.url, body.encode('utf8'))
         render(request, self.resource, self.clock)
 
diff --git a/tests/rest/client/v1/test_events.py b/tests/rest/client/v1/test_events.py
index 50418153fa..956f7fc4c4 100644
--- a/tests/rest/client/v1/test_events.py
+++ b/tests/rest/client/v1/test_events.py
@@ -41,11 +41,10 @@ class EventStreamPermissionsTestCase(RestTestCase):
         self.mock_resource = MockHttpResource(prefix=PATH_PREFIX)
 
         hs = yield setup_test_homeserver(
+            self.addCleanup,
             http_client=None,
             federation_client=Mock(),
-            ratelimiter=NonCallableMock(spec_set=[
-                "send_message",
-            ]),
+            ratelimiter=NonCallableMock(spec_set=["send_message"]),
         )
         self.ratelimiter = hs.get_ratelimiter()
         self.ratelimiter.send_message.return_value = (True, 0)
@@ -83,7 +82,7 @@ class EventStreamPermissionsTestCase(RestTestCase):
         # behaviour is used instead to be consistent with the r0 spec.
         # see issue #2602
         (code, response) = yield self.mock_resource.trigger_get(
-            "/events?access_token=%s" % ("invalid" + self.token, )
+            "/events?access_token=%s" % ("invalid" + self.token,)
         )
         self.assertEquals(401, code, msg=str(response))
 
@@ -98,18 +97,12 @@ class EventStreamPermissionsTestCase(RestTestCase):
 
     @defer.inlineCallbacks
     def test_stream_room_permissions(self):
-        room_id = yield self.create_room_as(
-            self.other_user,
-            tok=self.other_token
-        )
+        room_id = yield self.create_room_as(self.other_user, tok=self.other_token)
         yield self.send(room_id, tok=self.other_token)
 
         # invited to room (expect no content for room)
         yield self.invite(
-            room_id,
-            src=self.other_user,
-            targ=self.user_id,
-            tok=self.other_token
+            room_id, src=self.other_user, targ=self.user_id, tok=self.other_token
         )
 
         (code, response) = yield self.mock_resource.trigger_get(
@@ -120,13 +113,16 @@ class EventStreamPermissionsTestCase(RestTestCase):
         # We may get a presence event for ourselves down
         self.assertEquals(
             0,
-            len([
-                c for c in response["chunk"]
-                if not (
-                    c.get("type") == "m.presence"
-                    and c["content"].get("user_id") == self.user_id
-                )
-            ])
+            len(
+                [
+                    c
+                    for c in response["chunk"]
+                    if not (
+                        c.get("type") == "m.presence"
+                        and c["content"].get("user_id") == self.user_id
+                    )
+                ]
+            ),
         )
 
         # joined room (expect all content for room)
diff --git a/tests/rest/client/v1/test_presence.py b/tests/rest/client/v1/test_presence.py
new file mode 100644
index 0000000000..66c2b68707
--- /dev/null
+++ b/tests/rest/client/v1/test_presence.py
@@ -0,0 +1,72 @@
+# -*- coding: utf-8 -*-
+# Copyright 2018 New Vector Ltd
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from mock import Mock
+
+from synapse.rest.client.v1 import presence
+from synapse.types import UserID
+
+from tests import unittest
+
+
+class PresenceTestCase(unittest.HomeserverTestCase):
+    """ Tests presence REST API. """
+
+    user_id = "@sid:red"
+
+    user = UserID.from_string(user_id)
+    servlets = [presence.register_servlets]
+
+    def make_homeserver(self, reactor, clock):
+
+        hs = self.setup_test_homeserver(
+            "red", http_client=None, federation_client=Mock()
+        )
+
+        hs.presence_handler = Mock()
+
+        return hs
+
+    def test_put_presence(self):
+        """
+        PUT to the status endpoint with use_presence enabled will call
+        set_state on the presence handler.
+        """
+        self.hs.config.use_presence = True
+
+        body = {"presence": "here", "status_msg": "beep boop"}
+        request, channel = self.make_request(
+            "PUT", "/presence/%s/status" % (self.user_id,), body
+        )
+        self.render(request)
+
+        self.assertEqual(channel.code, 200)
+        self.assertEqual(self.hs.presence_handler.set_state.call_count, 1)
+
+    def test_put_presence_disabled(self):
+        """
+        PUT to the status endpoint with use_presence disbled will NOT call
+        set_state on the presence handler.
+        """
+        self.hs.config.use_presence = False
+
+        body = {"presence": "here", "status_msg": "beep boop"}
+        request, channel = self.make_request(
+            "PUT", "/presence/%s/status" % (self.user_id,), body
+        )
+        self.render(request)
+
+        self.assertEqual(channel.code, 200)
+        self.assertEqual(self.hs.presence_handler.set_state.call_count, 0)
diff --git a/tests/rest/client/v1/test_profile.py b/tests/rest/client/v1/test_profile.py
index d71cc8e0db..1eab9c3bdb 100644
--- a/tests/rest/client/v1/test_profile.py
+++ b/tests/rest/client/v1/test_profile.py
@@ -36,20 +36,23 @@ class ProfileTestCase(unittest.TestCase):
     @defer.inlineCallbacks
     def setUp(self):
         self.mock_resource = MockHttpResource(prefix=PATH_PREFIX)
-        self.mock_handler = Mock(spec=[
-            "get_displayname",
-            "set_displayname",
-            "get_avatar_url",
-            "set_avatar_url",
-        ])
+        self.mock_handler = Mock(
+            spec=[
+                "get_displayname",
+                "set_displayname",
+                "get_avatar_url",
+                "set_avatar_url",
+            ]
+        )
 
         hs = yield setup_test_homeserver(
+            self.addCleanup,
             "test",
             http_client=None,
             resource_for_client=self.mock_resource,
             federation=Mock(),
             federation_client=Mock(),
-            profile_handler=self.mock_handler
+            profile_handler=self.mock_handler,
         )
 
         def _get_user_by_req(request=None, allow_guest=False):
@@ -78,9 +81,7 @@ class ProfileTestCase(unittest.TestCase):
         mocked_set.return_value = defer.succeed(())
 
         (code, response) = yield self.mock_resource.trigger(
-            "PUT",
-            "/profile/%s/displayname" % (myid),
-            '{"displayname": "Frank Jr."}'
+            "PUT", "/profile/%s/displayname" % (myid), b'{"displayname": "Frank Jr."}'
         )
 
         self.assertEquals(200, code)
@@ -94,14 +95,12 @@ class ProfileTestCase(unittest.TestCase):
         mocked_set.side_effect = AuthError(400, "message")
 
         (code, response) = yield self.mock_resource.trigger(
-            "PUT", "/profile/%s/displayname" % ("@4567:test"),
-            '{"displayname": "Frank Jr."}'
+            "PUT",
+            "/profile/%s/displayname" % ("@4567:test"),
+            b'{"displayname": "Frank Jr."}',
         )
 
-        self.assertTrue(
-            400 <= code < 499,
-            msg="code %d is in the 4xx range" % (code)
-        )
+        self.assertTrue(400 <= code < 499, msg="code %d is in the 4xx range" % (code))
 
     @defer.inlineCallbacks
     def test_get_other_name(self):
@@ -121,14 +120,12 @@ class ProfileTestCase(unittest.TestCase):
         mocked_set.side_effect = SynapseError(400, "message")
 
         (code, response) = yield self.mock_resource.trigger(
-            "PUT", "/profile/%s/displayname" % ("@opaque:elsewhere"),
-            '{"displayname":"bob"}'
+            "PUT",
+            "/profile/%s/displayname" % ("@opaque:elsewhere"),
+            b'{"displayname":"bob"}',
         )
 
-        self.assertTrue(
-            400 <= code <= 499,
-            msg="code %d is in the 4xx range" % (code)
-        )
+        self.assertTrue(400 <= code <= 499, msg="code %d is in the 4xx range" % (code))
 
     @defer.inlineCallbacks
     def test_get_my_avatar(self):
@@ -151,7 +148,7 @@ class ProfileTestCase(unittest.TestCase):
         (code, response) = yield self.mock_resource.trigger(
             "PUT",
             "/profile/%s/avatar_url" % (myid),
-            '{"avatar_url": "http://my.server/pic.gif"}'
+            b'{"avatar_url": "http://my.server/pic.gif"}',
         )
 
         self.assertEquals(200, code)
diff --git a/tests/rest/client/v1/test_register.py b/tests/rest/client/v1/test_register.py
index 83a23cd8fe..6b7ff813d5 100644
--- a/tests/rest/client/v1/test_register.py
+++ b/tests/rest/client/v1/test_register.py
@@ -25,13 +25,14 @@ from synapse.rest.client.v1_only.register import register_servlets
 from synapse.util import Clock
 
 from tests import unittest
-from tests.server import make_request, setup_test_homeserver
+from tests.server import make_request, render, setup_test_homeserver
 
 
 class CreateUserServletTestCase(unittest.TestCase):
     """
     Tests for CreateUserRestServlet.
     """
+
     if PY3:
         skip = "Not ported to Python 3."
 
@@ -48,7 +49,7 @@ class CreateUserServletTestCase(unittest.TestCase):
         self.hs_clock = Clock(self.clock)
 
         self.hs = self.hs = setup_test_homeserver(
-            http_client=None, clock=self.hs_clock, reactor=self.clock
+            self.addCleanup, http_client=None, clock=self.hs_clock, reactor=self.clock
         )
         self.hs.get_datastore = Mock(return_value=self.datastore)
         self.hs.get_handlers = Mock(return_value=handlers)
@@ -76,10 +77,7 @@ class CreateUserServletTestCase(unittest.TestCase):
         )
 
         request, channel = make_request(b"POST", url, request_data)
-        request.render(res)
-
-        # Advance the clock because it waits
-        self.clock.advance(1)
+        render(request, res, self.clock)
 
         self.assertEquals(channel.result["code"], b"200")
 
diff --git a/tests/rest/client/v1/test_rooms.py b/tests/rest/client/v1/test_rooms.py
index 00fc796787..9fe0760496 100644
--- a/tests/rest/client/v1/test_rooms.py
+++ b/tests/rest/client/v1/test_rooms.py
@@ -50,6 +50,7 @@ class RoomBase(unittest.TestCase):
         self.hs_clock = Clock(self.clock)
 
         self.hs = setup_test_homeserver(
+            self.addCleanup,
             "red",
             http_client=None,
             clock=self.hs_clock,
diff --git a/tests/rest/client/v1/test_typing.py b/tests/rest/client/v1/test_typing.py
index bddb3302e4..0ad814c5e5 100644
--- a/tests/rest/client/v1/test_typing.py
+++ b/tests/rest/client/v1/test_typing.py
@@ -1,5 +1,6 @@
 # -*- coding: utf-8 -*-
 # Copyright 2014-2016 OpenMarket Ltd
+# Copyright 2018 New Vector
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
 # you may not use this file except in compliance with the License.
@@ -17,41 +18,32 @@
 
 from mock import Mock, NonCallableMock
 
-# twisted imports
 from twisted.internet import defer
 
-import synapse.rest.client.v1.room
+from synapse.rest.client.v1 import room
 from synapse.types import UserID
 
-from ....utils import MockClock, MockHttpResource, setup_test_homeserver
-from .utils import RestTestCase
+from tests import unittest
 
 PATH_PREFIX = "/_matrix/client/api/v1"
 
 
-class RoomTypingTestCase(RestTestCase):
+class RoomTypingTestCase(unittest.HomeserverTestCase):
     """ Tests /rooms/$room_id/typing/$user_id REST API. """
+
     user_id = "@sid:red"
 
     user = UserID.from_string(user_id)
+    servlets = [room.register_servlets]
 
-    @defer.inlineCallbacks
-    def setUp(self):
-        self.clock = MockClock()
-
-        self.mock_resource = MockHttpResource(prefix=PATH_PREFIX)
-        self.auth_user_id = self.user_id
+    def make_homeserver(self, reactor, clock):
 
-        hs = yield setup_test_homeserver(
+        hs = self.setup_test_homeserver(
             "red",
-            clock=self.clock,
             http_client=None,
             federation_client=Mock(),
-            ratelimiter=NonCallableMock(spec_set=[
-                "send_message",
-            ]),
+            ratelimiter=NonCallableMock(spec_set=["send_message"]),
         )
-        self.hs = hs
 
         self.event_source = hs.get_event_sources().sources["typing"]
 
@@ -71,6 +63,7 @@ class RoomTypingTestCase(RestTestCase):
 
         def _insert_client_ip(*args, **kwargs):
             return defer.succeed(None)
+
         hs.get_datastore().insert_client_ip = _insert_client_ip
 
         def get_room_members(room_id):
@@ -94,63 +87,70 @@ class RoomTypingTestCase(RestTestCase):
                 else:
                     if remotedomains is not None:
                         remotedomains.add(member.domain)
+
         hs.get_room_member_handler().fetch_room_distributions_into = (
             fetch_room_distributions_into
         )
 
-        synapse.rest.client.v1.room.register_servlets(hs, self.mock_resource)
+        return hs
 
-        self.room_id = yield self.create_room_as(self.user_id)
+    def prepare(self, reactor, clock, hs):
+        self.room_id = self.helper.create_room_as(self.user_id)
         # Need another user to make notifications actually work
-        yield self.join(self.room_id, user="@jim:red")
+        self.helper.join(self.room_id, user="@jim:red")
 
-    @defer.inlineCallbacks
     def test_set_typing(self):
-        (code, _) = yield self.mock_resource.trigger(
-            "PUT", "/rooms/%s/typing/%s" % (self.room_id, self.user_id),
-            '{"typing": true, "timeout": 30000}'
+        request, channel = self.make_request(
+            "PUT",
+            "/rooms/%s/typing/%s" % (self.room_id, self.user_id),
+            b'{"typing": true, "timeout": 30000}',
         )
-        self.assertEquals(200, code)
+        self.render(request)
+        self.assertEquals(200, channel.code)
 
         self.assertEquals(self.event_source.get_current_key(), 1)
-        events = yield self.event_source.get_new_events(
-            from_key=0,
-            room_ids=[self.room_id],
+        events = self.event_source.get_new_events(from_key=0, room_ids=[self.room_id])
+        self.assertEquals(
+            events[0],
+            [
+                {
+                    "type": "m.typing",
+                    "room_id": self.room_id,
+                    "content": {"user_ids": [self.user_id]},
+                }
+            ],
         )
-        self.assertEquals(events[0], [{
-            "type": "m.typing",
-            "room_id": self.room_id,
-            "content": {
-                "user_ids": [self.user_id],
-            }
-        }])
 
-    @defer.inlineCallbacks
     def test_set_not_typing(self):
-        (code, _) = yield self.mock_resource.trigger(
-            "PUT", "/rooms/%s/typing/%s" % (self.room_id, self.user_id),
-            '{"typing": false}'
+        request, channel = self.make_request(
+            "PUT",
+            "/rooms/%s/typing/%s" % (self.room_id, self.user_id),
+            b'{"typing": false}',
         )
-        self.assertEquals(200, code)
+        self.render(request)
+        self.assertEquals(200, channel.code)
 
-    @defer.inlineCallbacks
     def test_typing_timeout(self):
-        (code, _) = yield self.mock_resource.trigger(
-            "PUT", "/rooms/%s/typing/%s" % (self.room_id, self.user_id),
-            '{"typing": true, "timeout": 30000}'
+        request, channel = self.make_request(
+            "PUT",
+            "/rooms/%s/typing/%s" % (self.room_id, self.user_id),
+            b'{"typing": true, "timeout": 30000}',
         )
-        self.assertEquals(200, code)
+        self.render(request)
+        self.assertEquals(200, channel.code)
 
         self.assertEquals(self.event_source.get_current_key(), 1)
 
-        self.clock.advance_time(36)
+        self.reactor.advance(36)
 
         self.assertEquals(self.event_source.get_current_key(), 2)
 
-        (code, _) = yield self.mock_resource.trigger(
-            "PUT", "/rooms/%s/typing/%s" % (self.room_id, self.user_id),
-            '{"typing": true, "timeout": 30000}'
+        request, channel = self.make_request(
+            "PUT",
+            "/rooms/%s/typing/%s" % (self.room_id, self.user_id),
+            b'{"typing": true, "timeout": 30000}',
         )
-        self.assertEquals(200, code)
+        self.render(request)
+        self.assertEquals(200, channel.code)
 
         self.assertEquals(self.event_source.get_current_key(), 3)
diff --git a/tests/rest/client/v1/utils.py b/tests/rest/client/v1/utils.py
index 41de8e0762..40dc4ea256 100644
--- a/tests/rest/client/v1/utils.py
+++ b/tests/rest/client/v1/utils.py
@@ -23,7 +23,7 @@ from twisted.internet import defer
 from synapse.api.constants import Membership
 
 from tests import unittest
-from tests.server import make_request, wait_until_result
+from tests.server import make_request, render
 
 
 class RestTestCase(unittest.TestCase):
@@ -55,25 +55,39 @@ class RestTestCase(unittest.TestCase):
 
     @defer.inlineCallbacks
     def invite(self, room=None, src=None, targ=None, expect_code=200, tok=None):
-        yield self.change_membership(room=room, src=src, targ=targ, tok=tok,
-                                     membership=Membership.INVITE,
-                                     expect_code=expect_code)
+        yield self.change_membership(
+            room=room,
+            src=src,
+            targ=targ,
+            tok=tok,
+            membership=Membership.INVITE,
+            expect_code=expect_code,
+        )
 
     @defer.inlineCallbacks
     def join(self, room=None, user=None, expect_code=200, tok=None):
-        yield self.change_membership(room=room, src=user, targ=user, tok=tok,
-                                     membership=Membership.JOIN,
-                                     expect_code=expect_code)
+        yield self.change_membership(
+            room=room,
+            src=user,
+            targ=user,
+            tok=tok,
+            membership=Membership.JOIN,
+            expect_code=expect_code,
+        )
 
     @defer.inlineCallbacks
     def leave(self, room=None, user=None, expect_code=200, tok=None):
-        yield self.change_membership(room=room, src=user, targ=user, tok=tok,
-                                     membership=Membership.LEAVE,
-                                     expect_code=expect_code)
+        yield self.change_membership(
+            room=room,
+            src=user,
+            targ=user,
+            tok=tok,
+            membership=Membership.LEAVE,
+            expect_code=expect_code,
+        )
 
     @defer.inlineCallbacks
-    def change_membership(self, room, src, targ, membership, tok=None,
-                          expect_code=200):
+    def change_membership(self, room, src, targ, membership, tok=None, expect_code=200):
         temp_id = self.auth_user_id
         self.auth_user_id = src
 
@@ -81,16 +95,15 @@ class RestTestCase(unittest.TestCase):
         if tok:
             path = path + "?access_token=%s" % tok
 
-        data = {
-            "membership": membership
-        }
+        data = {"membership": membership}
 
         (code, response) = yield self.mock_resource.trigger(
             "PUT", path, json.dumps(data)
         )
         self.assertEquals(
-            expect_code, code,
-            msg="Expected: %d, got: %d, resp: %r" % (expect_code, code, response)
+            expect_code,
+            code,
+            msg="Expected: %d, got: %d, resp: %r" % (expect_code, code, response),
         )
 
         self.auth_user_id = temp_id
@@ -100,17 +113,15 @@ class RestTestCase(unittest.TestCase):
         (code, response) = yield self.mock_resource.trigger(
             "POST",
             "/register",
-            json.dumps({
-                "user": user_id,
-                "password": "test",
-                "type": "m.login.password"
-            }))
-        self.assertEquals(200, code)
+            json.dumps(
+                {"user": user_id, "password": "test", "type": "m.login.password"}
+            ),
+        )
+        self.assertEquals(200, code, msg=response)
         defer.returnValue(response)
 
     @defer.inlineCallbacks
-    def send(self, room_id, body=None, txn_id=None, tok=None,
-             expect_code=200):
+    def send(self, room_id, body=None, txn_id=None, tok=None, expect_code=200):
         if txn_id is None:
             txn_id = "m%s" % (str(time.time()))
         if body is None:
@@ -132,8 +143,9 @@ class RestTestCase(unittest.TestCase):
             actual (dict): The test result. Extra keys will not be checked.
         """
         for key in required:
-            self.assertEquals(required[key], actual[key],
-                              msg="%s mismatch. %s" % (key, actual))
+            self.assertEquals(
+                required[key], actual[key], msg="%s mismatch. %s" % (key, actual)
+            )
 
 
 @attr.s
@@ -149,16 +161,17 @@ class RestHelper(object):
     def create_room_as(self, room_creator, is_public=True, tok=None):
         temp_id = self.auth_user_id
         self.auth_user_id = room_creator
-        path = b"/_matrix/client/r0/createRoom"
+        path = "/_matrix/client/r0/createRoom"
         content = {}
         if not is_public:
             content["visibility"] = "private"
         if tok:
-            path = path + b"?access_token=%s" % tok.encode('ascii')
+            path = path + "?access_token=%s" % tok
 
-        request, channel = make_request(b"POST", path, json.dumps(content).encode('utf8'))
-        request.render(self.resource)
-        wait_until_result(self.hs.get_reactor(), channel)
+        request, channel = make_request(
+            "POST", path, json.dumps(content).encode('utf8')
+        )
+        render(request, self.resource, self.hs.get_reactor())
 
         assert channel.result["code"] == b"200", channel.result
         self.auth_user_id = temp_id
@@ -204,12 +217,9 @@ class RestHelper(object):
 
         data = {"membership": membership}
 
-        request, channel = make_request(
-            b"PUT", path.encode('ascii'), json.dumps(data).encode('utf8')
-        )
+        request, channel = make_request("PUT", path, json.dumps(data).encode('utf8'))
 
-        request.render(self.resource)
-        wait_until_result(self.hs.get_reactor(), channel)
+        render(request, self.resource, self.hs.get_reactor())
 
         assert int(channel.result["code"]) == expect_code, (
             "Expected: %d, got: %d, resp: %r"
diff --git a/tests/rest/client/v2_alpha/test_filter.py b/tests/rest/client/v2_alpha/test_filter.py
index e890f0feac..6a886ee3b8 100644
--- a/tests/rest/client/v2_alpha/test_filter.py
+++ b/tests/rest/client/v2_alpha/test_filter.py
@@ -24,8 +24,8 @@ from tests import unittest
 from tests.server import (
     ThreadedMemoryReactorClock as MemoryReactorClock,
     make_request,
+    render,
     setup_test_homeserver,
-    wait_until_result,
 )
 
 PATH_PREFIX = "/_matrix/client/v2_alpha"
@@ -33,7 +33,7 @@ PATH_PREFIX = "/_matrix/client/v2_alpha"
 
 class FilterTestCase(unittest.TestCase):
 
-    USER_ID = b"@apple:test"
+    USER_ID = "@apple:test"
     EXAMPLE_FILTER = {"room": {"timeline": {"types": ["m.room.message"]}}}
     EXAMPLE_FILTER_JSON = b'{"room": {"timeline": {"types": ["m.room.message"]}}}'
     TO_REGISTER = [filter]
@@ -43,7 +43,7 @@ class FilterTestCase(unittest.TestCase):
         self.hs_clock = Clock(self.clock)
 
         self.hs = setup_test_homeserver(
-            http_client=None, clock=self.hs_clock, reactor=self.clock
+            self.addCleanup, http_client=None, clock=self.hs_clock, reactor=self.clock
         )
 
         self.auth = self.hs.get_auth()
@@ -72,12 +72,11 @@ class FilterTestCase(unittest.TestCase):
 
     def test_add_filter(self):
         request, channel = make_request(
-            b"POST",
-            b"/_matrix/client/r0/user/%s/filter" % (self.USER_ID),
+            "POST",
+            "/_matrix/client/r0/user/%s/filter" % (self.USER_ID),
             self.EXAMPLE_FILTER_JSON,
         )
-        request.render(self.resource)
-        wait_until_result(self.clock, channel)
+        render(request, self.resource, self.clock)
 
         self.assertEqual(channel.result["code"], b"200")
         self.assertEqual(channel.json_body, {"filter_id": "0"})
@@ -87,12 +86,11 @@ class FilterTestCase(unittest.TestCase):
 
     def test_add_filter_for_other_user(self):
         request, channel = make_request(
-            b"POST",
-            b"/_matrix/client/r0/user/%s/filter" % (b"@watermelon:test"),
+            "POST",
+            "/_matrix/client/r0/user/%s/filter" % ("@watermelon:test"),
             self.EXAMPLE_FILTER_JSON,
         )
-        request.render(self.resource)
-        wait_until_result(self.clock, channel)
+        render(request, self.resource, self.clock)
 
         self.assertEqual(channel.result["code"], b"403")
         self.assertEquals(channel.json_body["errcode"], Codes.FORBIDDEN)
@@ -101,12 +99,11 @@ class FilterTestCase(unittest.TestCase):
         _is_mine = self.hs.is_mine
         self.hs.is_mine = lambda target_user: False
         request, channel = make_request(
-            b"POST",
-            b"/_matrix/client/r0/user/%s/filter" % (self.USER_ID),
+            "POST",
+            "/_matrix/client/r0/user/%s/filter" % (self.USER_ID),
             self.EXAMPLE_FILTER_JSON,
         )
-        request.render(self.resource)
-        wait_until_result(self.clock, channel)
+        render(request, self.resource, self.clock)
 
         self.hs.is_mine = _is_mine
         self.assertEqual(channel.result["code"], b"403")
@@ -119,20 +116,18 @@ class FilterTestCase(unittest.TestCase):
         self.clock.advance(1)
         filter_id = filter_id.result
         request, channel = make_request(
-            b"GET", b"/_matrix/client/r0/user/%s/filter/%s" % (self.USER_ID, filter_id)
+            "GET", "/_matrix/client/r0/user/%s/filter/%s" % (self.USER_ID, filter_id)
         )
-        request.render(self.resource)
-        wait_until_result(self.clock, channel)
+        render(request, self.resource, self.clock)
 
         self.assertEqual(channel.result["code"], b"200")
         self.assertEquals(channel.json_body, self.EXAMPLE_FILTER)
 
     def test_get_filter_non_existant(self):
         request, channel = make_request(
-            b"GET", "/_matrix/client/r0/user/%s/filter/12382148321" % (self.USER_ID)
+            "GET", "/_matrix/client/r0/user/%s/filter/12382148321" % (self.USER_ID)
         )
-        request.render(self.resource)
-        wait_until_result(self.clock, channel)
+        render(request, self.resource, self.clock)
 
         self.assertEqual(channel.result["code"], b"400")
         self.assertEquals(channel.json_body["errcode"], Codes.NOT_FOUND)
@@ -141,19 +136,17 @@ class FilterTestCase(unittest.TestCase):
     # in errors.py
     def test_get_filter_invalid_id(self):
         request, channel = make_request(
-            b"GET", "/_matrix/client/r0/user/%s/filter/foobar" % (self.USER_ID)
+            "GET", "/_matrix/client/r0/user/%s/filter/foobar" % (self.USER_ID)
         )
-        request.render(self.resource)
-        wait_until_result(self.clock, channel)
+        render(request, self.resource, self.clock)
 
         self.assertEqual(channel.result["code"], b"400")
 
     # No ID also returns an invalid_id error
     def test_get_filter_no_id(self):
         request, channel = make_request(
-            b"GET", "/_matrix/client/r0/user/%s/filter/" % (self.USER_ID)
+            "GET", "/_matrix/client/r0/user/%s/filter/" % (self.USER_ID)
         )
-        request.render(self.resource)
-        wait_until_result(self.clock, channel)
+        render(request, self.resource, self.clock)
 
         self.assertEqual(channel.result["code"], b"400")
diff --git a/tests/rest/client/v2_alpha/test_register.py b/tests/rest/client/v2_alpha/test_register.py
index e004d8fc73..1c128e81f5 100644
--- a/tests/rest/client/v2_alpha/test_register.py
+++ b/tests/rest/client/v2_alpha/test_register.py
@@ -11,7 +11,7 @@ from synapse.rest.client.v2_alpha.register import register_servlets
 from synapse.util import Clock
 
 from tests import unittest
-from tests.server import make_request, setup_test_homeserver, wait_until_result
+from tests.server import make_request, render, setup_test_homeserver
 
 
 class RegisterRestServletTestCase(unittest.TestCase):
@@ -47,7 +47,7 @@ class RegisterRestServletTestCase(unittest.TestCase):
             login_handler=self.login_handler,
         )
         self.hs = setup_test_homeserver(
-            http_client=None, clock=self.hs_clock, reactor=self.clock
+            self.addCleanup, http_client=None, clock=self.hs_clock, reactor=self.clock
         )
         self.hs.get_auth = Mock(return_value=self.auth)
         self.hs.get_handlers = Mock(return_value=self.handlers)
@@ -72,8 +72,7 @@ class RegisterRestServletTestCase(unittest.TestCase):
         request, channel = make_request(
             b"POST", self.url + b"?access_token=i_am_an_app_service", request_data
         )
-        request.render(self.resource)
-        wait_until_result(self.clock, channel)
+        render(request, self.resource, self.clock)
 
         self.assertEquals(channel.result["code"], b"200", channel.result)
         det_data = {
@@ -81,7 +80,7 @@ class RegisterRestServletTestCase(unittest.TestCase):
             "access_token": token,
             "home_server": self.hs.hostname,
         }
-        self.assertDictContainsSubset(det_data, json.loads(channel.result["body"]))
+        self.assertDictContainsSubset(det_data, channel.json_body)
 
     def test_POST_appservice_registration_invalid(self):
         self.appservice = None  # no application service exists
@@ -89,32 +88,25 @@ class RegisterRestServletTestCase(unittest.TestCase):
         request, channel = make_request(
             b"POST", self.url + b"?access_token=i_am_an_app_service", request_data
         )
-        request.render(self.resource)
-        wait_until_result(self.clock, channel)
+        render(request, self.resource, self.clock)
 
         self.assertEquals(channel.result["code"], b"401", channel.result)
 
     def test_POST_bad_password(self):
         request_data = json.dumps({"username": "kermit", "password": 666})
         request, channel = make_request(b"POST", self.url, request_data)
-        request.render(self.resource)
-        wait_until_result(self.clock, channel)
+        render(request, self.resource, self.clock)
 
         self.assertEquals(channel.result["code"], b"400", channel.result)
-        self.assertEquals(
-            json.loads(channel.result["body"])["error"], "Invalid password"
-        )
+        self.assertEquals(channel.json_body["error"], "Invalid password")
 
     def test_POST_bad_username(self):
         request_data = json.dumps({"username": 777, "password": "monkey"})
         request, channel = make_request(b"POST", self.url, request_data)
-        request.render(self.resource)
-        wait_until_result(self.clock, channel)
+        render(request, self.resource, self.clock)
 
         self.assertEquals(channel.result["code"], b"400", channel.result)
-        self.assertEquals(
-            json.loads(channel.result["body"])["error"], "Invalid username"
-        )
+        self.assertEquals(channel.json_body["error"], "Invalid username")
 
     def test_POST_user_valid(self):
         user_id = "@kermit:muppet"
@@ -130,8 +122,7 @@ class RegisterRestServletTestCase(unittest.TestCase):
         self.device_handler.check_device_registered = Mock(return_value=device_id)
 
         request, channel = make_request(b"POST", self.url, request_data)
-        request.render(self.resource)
-        wait_until_result(self.clock, channel)
+        render(request, self.resource, self.clock)
 
         det_data = {
             "user_id": user_id,
@@ -140,7 +131,7 @@ class RegisterRestServletTestCase(unittest.TestCase):
             "device_id": device_id,
         }
         self.assertEquals(channel.result["code"], b"200", channel.result)
-        self.assertDictContainsSubset(det_data, json.loads(channel.result["body"]))
+        self.assertDictContainsSubset(det_data, channel.json_body)
         self.auth_handler.get_login_tuple_for_user_id(
             user_id, device_id=device_id, initial_device_display_name=None
         )
@@ -153,14 +144,10 @@ class RegisterRestServletTestCase(unittest.TestCase):
         self.registration_handler.register = Mock(return_value=("@user:id", "t"))
 
         request, channel = make_request(b"POST", self.url, request_data)
-        request.render(self.resource)
-        wait_until_result(self.clock, channel)
+        render(request, self.resource, self.clock)
 
         self.assertEquals(channel.result["code"], b"403", channel.result)
-        self.assertEquals(
-            json.loads(channel.result["body"])["error"],
-            "Registration has been disabled",
-        )
+        self.assertEquals(channel.json_body["error"], "Registration has been disabled")
 
     def test_POST_guest_registration(self):
         user_id = "a@b"
@@ -169,8 +156,7 @@ class RegisterRestServletTestCase(unittest.TestCase):
         self.registration_handler.register = Mock(return_value=(user_id, None))
 
         request, channel = make_request(b"POST", self.url + b"?kind=guest", b"{}")
-        request.render(self.resource)
-        wait_until_result(self.clock, channel)
+        render(request, self.resource, self.clock)
 
         det_data = {
             "user_id": user_id,
@@ -178,16 +164,13 @@ class RegisterRestServletTestCase(unittest.TestCase):
             "device_id": "guest_device",
         }
         self.assertEquals(channel.result["code"], b"200", channel.result)
-        self.assertDictContainsSubset(det_data, json.loads(channel.result["body"]))
+        self.assertDictContainsSubset(det_data, channel.json_body)
 
     def test_POST_disabled_guest_registration(self):
         self.hs.config.allow_guest_access = False
 
         request, channel = make_request(b"POST", self.url + b"?kind=guest", b"{}")
-        request.render(self.resource)
-        wait_until_result(self.clock, channel)
+        render(request, self.resource, self.clock)
 
         self.assertEquals(channel.result["code"], b"403", channel.result)
-        self.assertEquals(
-            json.loads(channel.result["body"])["error"], "Guest access is disabled"
-        )
+        self.assertEquals(channel.json_body["error"], "Guest access is disabled")
diff --git a/tests/rest/client/v2_alpha/test_sync.py b/tests/rest/client/v2_alpha/test_sync.py
index 03ec3993b2..560b1fba96 100644
--- a/tests/rest/client/v2_alpha/test_sync.py
+++ b/tests/rest/client/v2_alpha/test_sync.py
@@ -13,72 +13,58 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-import synapse.types
-from synapse.http.server import JsonResource
+from mock import Mock
+
 from synapse.rest.client.v2_alpha import sync
-from synapse.types import UserID
-from synapse.util import Clock
 
 from tests import unittest
-from tests.server import (
-    ThreadedMemoryReactorClock as MemoryReactorClock,
-    make_request,
-    setup_test_homeserver,
-    wait_until_result,
-)
-
-PATH_PREFIX = "/_matrix/client/v2_alpha"
 
 
-class FilterTestCase(unittest.TestCase):
+class FilterTestCase(unittest.HomeserverTestCase):
 
-    USER_ID = b"@apple:test"
-    TO_REGISTER = [sync]
+    user_id = "@apple:test"
+    servlets = [sync.register_servlets]
 
-    def setUp(self):
-        self.clock = MemoryReactorClock()
-        self.hs_clock = Clock(self.clock)
+    def make_homeserver(self, reactor, clock):
 
-        self.hs = setup_test_homeserver(
-            http_client=None, clock=self.hs_clock, reactor=self.clock
+        hs = self.setup_test_homeserver(
+            "red", http_client=None, federation_client=Mock()
         )
+        return hs
 
-        self.auth = self.hs.get_auth()
-
-        def get_user_by_access_token(token=None, allow_guest=False):
-            return {
-                "user": UserID.from_string(self.USER_ID),
-                "token_id": 1,
-                "is_guest": False,
-            }
-
-        def get_user_by_req(request, allow_guest=False, rights="access"):
-            return synapse.types.create_requester(
-                UserID.from_string(self.USER_ID), 1, False, None
-            )
-
-        self.auth.get_user_by_access_token = get_user_by_access_token
-        self.auth.get_user_by_req = get_user_by_req
+    def test_sync_argless(self):
+        request, channel = self.make_request("GET", "/sync")
+        self.render(request)
 
-        self.store = self.hs.get_datastore()
-        self.filtering = self.hs.get_filtering()
-        self.resource = JsonResource(self.hs)
+        self.assertEqual(channel.code, 200)
+        self.assertTrue(
+            set(
+                [
+                    "next_batch",
+                    "rooms",
+                    "presence",
+                    "account_data",
+                    "to_device",
+                    "device_lists",
+                ]
+            ).issubset(set(channel.json_body.keys()))
+        )
 
-        for r in self.TO_REGISTER:
-            r.register_servlets(self.hs, self.resource)
+    def test_sync_presence_disabled(self):
+        """
+        When presence is disabled, the key does not appear in /sync.
+        """
+        self.hs.config.use_presence = False
 
-    def test_sync_argless(self):
-        request, channel = make_request(b"GET", b"/_matrix/client/r0/sync")
-        request.render(self.resource)
-        wait_until_result(self.clock, channel)
+        request, channel = self.make_request("GET", "/sync")
+        self.render(request)
 
-        self.assertEqual(channel.result["code"], b"200")
+        self.assertEqual(channel.code, 200)
         self.assertTrue(
             set(
                 [
                     "next_batch",
                     "rooms",
-                    "presence",
                     "account_data",
                     "to_device",
                     "device_lists",
diff --git a/tests/rest/media/v1/test_media_storage.py b/tests/rest/media/v1/test_media_storage.py
index bf254a260d..a86901c2d8 100644
--- a/tests/rest/media/v1/test_media_storage.py
+++ b/tests/rest/media/v1/test_media_storage.py
@@ -41,13 +41,11 @@ class MediaStorageTests(unittest.TestCase):
         hs.get_reactor = Mock(return_value=reactor)
         hs.config.media_store_path = self.primary_base_path
 
-        storage_providers = [FileStorageProviderBackend(
-            hs, self.secondary_base_path
-        )]
+        storage_providers = [FileStorageProviderBackend(hs, self.secondary_base_path)]
 
         self.filepaths = MediaFilePaths(self.primary_base_path)
         self.media_storage = MediaStorage(
-            hs, self.primary_base_path, self.filepaths, storage_providers,
+            hs, self.primary_base_path, self.filepaths, storage_providers
         )
 
     def tearDown(self):
diff --git a/tests/server.py b/tests/server.py
index c611dd6059..7dbdb7f8ea 100644
--- a/tests/server.py
+++ b/tests/server.py
@@ -5,12 +5,13 @@ from six import text_type
 
 import attr
 
-from twisted.internet import threads
+from twisted.internet import address, threads
 from twisted.internet.defer import Deferred
 from twisted.python.failure import Failure
 from twisted.test.proto_helpers import MemoryReactorClock
 
 from synapse.http.site import SynapseRequest
+from synapse.util import Clock
 
 from tests.utils import setup_test_homeserver as _sth
 
@@ -23,12 +24,19 @@ class FakeChannel(object):
     """
 
     result = attr.ib(default=attr.Factory(dict))
+    _producer = None
 
     @property
     def json_body(self):
         if not self.result:
             raise Exception("No result yet.")
-        return json.loads(self.result["body"])
+        return json.loads(self.result["body"].decode('utf8'))
+
+    @property
+    def code(self):
+        if not self.result:
+            raise Exception("No result yet.")
+        return int(self.result["code"])
 
     def writeHeaders(self, version, code, reason, headers):
         self.result["version"] = version
@@ -42,11 +50,22 @@ class FakeChannel(object):
 
         self.result["body"] += content
 
+    def registerProducer(self, producer, streaming):
+        self._producer = producer
+
+    def unregisterProducer(self):
+        if self._producer is None:
+            return
+
+        self._producer = None
+
     def requestDone(self, _self):
         self.result["done"] = True
 
     def getPeer(self):
-        return None
+        # We give an address so that getClientIP returns a non null entry,
+        # causing us to record the MAU
+        return address.IPv4Address(b"TCP", "127.0.0.1", 3423)
 
     def getHost(self):
         return None
@@ -74,16 +93,21 @@ class FakeSite:
         return FakeLogger()
 
 
-def make_request(method, path, content=b""):
+def make_request(method, path, content=b"", access_token=None):
     """
     Make a web request using the given method and path, feed it the
     content, and return the Request and the Channel underneath.
     """
+    if not isinstance(method, bytes):
+        method = method.encode('ascii')
+
+    if not isinstance(path, bytes):
+        path = path.encode('ascii')
 
     # Decorate it to be the full path
     if not path.startswith(b"/_matrix"):
         path = b"/_matrix/client/r0/" + path
-        path = path.replace("//", "/")
+        path = path.replace(b"//", b"/")
 
     if isinstance(content, text_type):
         content = content.encode('utf8')
@@ -94,19 +118,29 @@ def make_request(method, path, content=b""):
     req = SynapseRequest(site, channel)
     req.process = lambda: b""
     req.content = BytesIO(content)
+
+    if access_token:
+        req.requestHeaders.addRawHeader(b"Authorization", b"Bearer " + access_token)
+
+    req.requestHeaders.addRawHeader(b"X-Forwarded-For", b"127.0.0.1")
     req.requestReceived(method, path, b"1.1")
 
     return req, channel
 
 
-def wait_until_result(clock, channel, timeout=100):
+def wait_until_result(clock, request, timeout=100):
     """
-    Wait until the channel has a result.
+    Wait until the request is finished.
     """
     clock.run()
     x = 0
 
-    while not channel.result:
+    while not request.finished:
+
+        # If there's a producer, tell it to resume producing so we get content
+        if request._channel._producer:
+            request._channel._producer.resumeProducing()
+
         x += 1
 
         if x > timeout:
@@ -117,13 +151,14 @@ def wait_until_result(clock, channel, timeout=100):
 
 def render(request, resource, clock):
     request.render(resource)
-    wait_until_result(clock, request._channel)
+    wait_until_result(clock, request)
 
 
 class ThreadedMemoryReactorClock(MemoryReactorClock):
     """
     A MemoryReactorClock that supports callFromThread.
     """
+
     def callFromThread(self, callback, *args, **kwargs):
         """
         Make the callback fire in the next reactor iteration.
@@ -134,12 +169,15 @@ class ThreadedMemoryReactorClock(MemoryReactorClock):
         return d
 
 
-def setup_test_homeserver(*args, **kwargs):
+def setup_test_homeserver(cleanup_func, *args, **kwargs):
     """
     Set up a synchronous test server, driven by the reactor used by
     the homeserver.
     """
-    d = _sth(*args, **kwargs).result
+    d = _sth(cleanup_func, *args, **kwargs).result
+
+    if isinstance(d, Failure):
+        d.raiseException()
 
     # Make the thread pool synchronous.
     clock = d.get_clock()
@@ -172,9 +210,13 @@ def setup_test_homeserver(*args, **kwargs):
         """
         Threadless thread pool.
         """
+
         def start(self):
             pass
 
+        def stop(self):
+            pass
+
         def callInThreadWithCallback(self, onResult, function, *args, **kwargs):
             def _(res):
                 if isinstance(res, Failure):
@@ -191,3 +233,9 @@ def setup_test_homeserver(*args, **kwargs):
     clock.threadpool = ThreadPool()
     pool.threadpool = ThreadPool()
     return d
+
+
+def get_clock():
+    clock = ThreadedMemoryReactorClock()
+    hs_clock = Clock(clock)
+    return (clock, hs_clock)
diff --git a/tests/server_notices/__init__.py b/tests/server_notices/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/tests/server_notices/__init__.py
diff --git a/tests/server_notices/test_resource_limits_server_notices.py b/tests/server_notices/test_resource_limits_server_notices.py
new file mode 100644
index 0000000000..fede3f52a9
--- /dev/null
+++ b/tests/server_notices/test_resource_limits_server_notices.py
@@ -0,0 +1,212 @@
+from mock import Mock
+
+from twisted.internet import defer
+
+from synapse.api.constants import EventTypes, ServerNoticeMsgType
+from synapse.api.errors import ResourceLimitError
+from synapse.handlers.auth import AuthHandler
+from synapse.server_notices.resource_limits_server_notices import (
+    ResourceLimitsServerNotices,
+)
+
+from tests import unittest
+from tests.utils import setup_test_homeserver
+
+
+class AuthHandlers(object):
+    def __init__(self, hs):
+        self.auth_handler = AuthHandler(hs)
+
+
+class TestResourceLimitsServerNotices(unittest.TestCase):
+    @defer.inlineCallbacks
+    def setUp(self):
+        self.hs = yield setup_test_homeserver(self.addCleanup, handlers=None)
+        self.hs.handlers = AuthHandlers(self.hs)
+        self.auth_handler = self.hs.handlers.auth_handler
+        self.server_notices_sender = self.hs.get_server_notices_sender()
+
+        # relying on [1] is far from ideal, but the only case where
+        # ResourceLimitsServerNotices class needs to be isolated is this test,
+        # general code should never have a reason to do so ...
+        self._rlsn = self.server_notices_sender._server_notices[1]
+        if not isinstance(self._rlsn, ResourceLimitsServerNotices):
+            raise Exception("Failed to find reference to ResourceLimitsServerNotices")
+
+        self._rlsn._store.user_last_seen_monthly_active = Mock(
+            return_value=defer.succeed(1000)
+        )
+        self._send_notice = self._rlsn._server_notices_manager.send_notice
+        self._rlsn._server_notices_manager.send_notice = Mock()
+        self._rlsn._state.get_current_state = Mock(return_value=defer.succeed(None))
+        self._rlsn._store.get_events = Mock(return_value=defer.succeed({}))
+
+        self._send_notice = self._rlsn._server_notices_manager.send_notice
+
+        self.hs.config.limit_usage_by_mau = True
+        self.user_id = "@user_id:test"
+
+        # self.server_notices_mxid = "@server:test"
+        # self.server_notices_mxid_display_name = None
+        # self.server_notices_mxid_avatar_url = None
+        # self.server_notices_room_name = "Server Notices"
+
+        self._rlsn._server_notices_manager.get_notice_room_for_user = Mock(
+            returnValue=""
+        )
+        self._rlsn._store.add_tag_to_room = Mock()
+        self.hs.config.admin_uri = "mailto:user@test.com"
+
+    @defer.inlineCallbacks
+    def test_maybe_send_server_notice_to_user_flag_off(self):
+        """Tests cases where the flags indicate nothing to do"""
+        # test hs disabled case
+        self.hs.config.hs_disabled = True
+
+        yield self._rlsn.maybe_send_server_notice_to_user(self.user_id)
+
+        self._send_notice.assert_not_called()
+        # Test when mau limiting disabled
+        self.hs.config.hs_disabled = False
+        self.hs.limit_usage_by_mau = False
+        yield self._rlsn.maybe_send_server_notice_to_user(self.user_id)
+
+        self._send_notice.assert_not_called()
+
+    @defer.inlineCallbacks
+    def test_maybe_send_server_notice_to_user_remove_blocked_notice(self):
+        """Test when user has blocked notice, but should have it removed"""
+
+        self._rlsn._auth.check_auth_blocking = Mock()
+        mock_event = Mock(
+            type=EventTypes.Message,
+            content={"msgtype": ServerNoticeMsgType},
+        )
+        self._rlsn._store.get_events = Mock(return_value=defer.succeed(
+            {"123": mock_event}
+        ))
+
+        yield self._rlsn.maybe_send_server_notice_to_user(self.user_id)
+        # Would be better to check the content, but once == remove blocking event
+        self._send_notice.assert_called_once()
+
+    @defer.inlineCallbacks
+    def test_maybe_send_server_notice_to_user_remove_blocked_notice_noop(self):
+        """Test when user has blocked notice, but notice ought to be there (NOOP)"""
+        self._rlsn._auth.check_auth_blocking = Mock(
+            side_effect=ResourceLimitError(403, 'foo')
+        )
+
+        mock_event = Mock(
+            type=EventTypes.Message,
+            content={"msgtype": ServerNoticeMsgType},
+        )
+        self._rlsn._store.get_events = Mock(return_value=defer.succeed(
+            {"123": mock_event}
+        ))
+        yield self._rlsn.maybe_send_server_notice_to_user(self.user_id)
+
+        self._send_notice.assert_not_called()
+
+    @defer.inlineCallbacks
+    def test_maybe_send_server_notice_to_user_add_blocked_notice(self):
+        """Test when user does not have blocked notice, but should have one"""
+
+        self._rlsn._auth.check_auth_blocking = Mock(
+            side_effect=ResourceLimitError(403, 'foo')
+        )
+        yield self._rlsn.maybe_send_server_notice_to_user(self.user_id)
+
+        # Would be better to check contents, but 2 calls == set blocking event
+        self.assertTrue(self._send_notice.call_count == 2)
+
+    @defer.inlineCallbacks
+    def test_maybe_send_server_notice_to_user_add_blocked_notice_noop(self):
+        """Test when user does not have blocked notice, nor should they (NOOP)"""
+
+        self._rlsn._auth.check_auth_blocking = Mock()
+
+        yield self._rlsn.maybe_send_server_notice_to_user(self.user_id)
+
+        self._send_notice.assert_not_called()
+
+    @defer.inlineCallbacks
+    def test_maybe_send_server_notice_to_user_not_in_mau_cohort(self):
+
+        """Test when user is not part of the MAU cohort - this should not ever
+        happen - but ...
+        """
+
+        self._rlsn._auth.check_auth_blocking = Mock()
+        self._rlsn._store.user_last_seen_monthly_active = Mock(
+            return_value=defer.succeed(None)
+        )
+        yield self._rlsn.maybe_send_server_notice_to_user(self.user_id)
+
+        self._send_notice.assert_not_called()
+
+
+class TestResourceLimitsServerNoticesWithRealRooms(unittest.TestCase):
+    @defer.inlineCallbacks
+    def setUp(self):
+        self.hs = yield setup_test_homeserver(self.addCleanup)
+        self.store = self.hs.get_datastore()
+        self.server_notices_sender = self.hs.get_server_notices_sender()
+        self.server_notices_manager = self.hs.get_server_notices_manager()
+        self.event_source = self.hs.get_event_sources()
+
+        # relying on [1] is far from ideal, but the only case where
+        # ResourceLimitsServerNotices class needs to be isolated is this test,
+        # general code should never have a reason to do so ...
+        self._rlsn = self.server_notices_sender._server_notices[1]
+        if not isinstance(self._rlsn, ResourceLimitsServerNotices):
+            raise Exception("Failed to find reference to ResourceLimitsServerNotices")
+
+        self.hs.config.limit_usage_by_mau = True
+        self.hs.config.hs_disabled = False
+        self.hs.config.max_mau_value = 5
+        self.hs.config.server_notices_mxid = "@server:test"
+        self.hs.config.server_notices_mxid_display_name = None
+        self.hs.config.server_notices_mxid_avatar_url = None
+        self.hs.config.server_notices_room_name = "Test Server Notice Room"
+
+        self.user_id = "@user_id:test"
+
+        self.hs.config.admin_uri = "mailto:user@test.com"
+
+    @defer.inlineCallbacks
+    def test_server_notice_only_sent_once(self):
+        self.store.get_monthly_active_count = Mock(
+            return_value=1000,
+        )
+
+        self.store.user_last_seen_monthly_active = Mock(
+            return_value=1000,
+        )
+
+        # Call the function multiple times to ensure we only send the notice once
+        yield self._rlsn.maybe_send_server_notice_to_user(self.user_id)
+        yield self._rlsn.maybe_send_server_notice_to_user(self.user_id)
+        yield self._rlsn.maybe_send_server_notice_to_user(self.user_id)
+
+        # Now lets get the last load of messages in the service notice room and
+        # check that there is only one server notice
+        room_id = yield self.server_notices_manager.get_notice_room_for_user(
+            self.user_id,
+        )
+
+        token = yield self.event_source.get_current_token()
+        events, _ = yield self.store.get_recent_events_for_room(
+            room_id, limit=100, end_token=token.room_key,
+        )
+
+        count = 0
+        for event in events:
+            if event.type != EventTypes.Message:
+                continue
+            if event.content.get("msgtype") != ServerNoticeMsgType:
+                continue
+
+            count += 1
+
+        self.assertEqual(count, 1)
diff --git a/tests/storage/test__base.py b/tests/storage/test__base.py
index 6d6f00c5c5..52eb05bfbf 100644
--- a/tests/storage/test__base.py
+++ b/tests/storage/test__base.py
@@ -18,14 +18,13 @@ from mock import Mock
 
 from twisted.internet import defer
 
-from synapse.util.async import ObservableDeferred
+from synapse.util.async_helpers import ObservableDeferred
 from synapse.util.caches.descriptors import Cache, cached
 
 from tests import unittest
 
 
 class CacheTestCase(unittest.TestCase):
-
     def setUp(self):
         self.cache = Cache("test")
 
@@ -97,7 +96,6 @@ class CacheTestCase(unittest.TestCase):
 
 
 class CacheDecoratorTestCase(unittest.TestCase):
-
     @defer.inlineCallbacks
     def test_passthrough(self):
         class A(object):
@@ -180,8 +178,7 @@ class CacheDecoratorTestCase(unittest.TestCase):
             yield a.func(k)
 
         self.assertTrue(
-            callcount[0] >= 14,
-            msg="Expected callcount >= 14, got %d" % (callcount[0])
+            callcount[0] >= 14, msg="Expected callcount >= 14, got %d" % (callcount[0])
         )
 
     def test_prefill(self):
diff --git a/tests/storage/test__init__.py b/tests/storage/test__init__.py
deleted file mode 100644
index f19cb1265c..0000000000
--- a/tests/storage/test__init__.py
+++ /dev/null
@@ -1,65 +0,0 @@
-# -*- coding: utf-8 -*-
-# Copyright 2018 New Vector Ltd
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from twisted.internet import defer
-
-import tests.utils
-
-
-class InitTestCase(tests.unittest.TestCase):
-    def __init__(self, *args, **kwargs):
-        super(InitTestCase, self).__init__(*args, **kwargs)
-        self.store = None  # type: synapse.storage.DataStore
-
-    @defer.inlineCallbacks
-    def setUp(self):
-        hs = yield tests.utils.setup_test_homeserver()
-
-        hs.config.max_mau_value = 50
-        hs.config.limit_usage_by_mau = True
-        self.store = hs.get_datastore()
-        self.clock = hs.get_clock()
-
-    @defer.inlineCallbacks
-    def test_count_monthly_users(self):
-        count = yield self.store.count_monthly_users()
-        self.assertEqual(0, count)
-
-        yield self._insert_user_ips("@user:server1")
-        yield self._insert_user_ips("@user:server2")
-
-        count = yield self.store.count_monthly_users()
-        self.assertEqual(2, count)
-
-    @defer.inlineCallbacks
-    def _insert_user_ips(self, user):
-        """
-        Helper function to populate user_ips without using batch insertion infra
-        args:
-            user (str):  specify username i.e. @user:server.com
-        """
-        yield self.store._simple_upsert(
-            table="user_ips",
-            keyvalues={
-                "user_id": user,
-                "access_token": "access_token",
-                "ip": "ip",
-                "user_agent": "user_agent",
-                "device_id": "device_id",
-            },
-            values={
-                "last_seen": self.clock.time_msec(),
-            }
-        )
diff --git a/tests/storage/test_appservice.py b/tests/storage/test_appservice.py
index 099861b27c..c893990454 100644
--- a/tests/storage/test_appservice.py
+++ b/tests/storage/test_appservice.py
@@ -34,7 +34,6 @@ from tests.utils import setup_test_homeserver
 
 
 class ApplicationServiceStoreTestCase(unittest.TestCase):
-
     @defer.inlineCallbacks
     def setUp(self):
         self.as_yaml_files = []
@@ -44,6 +43,7 @@ class ApplicationServiceStoreTestCase(unittest.TestCase):
             password_providers=[],
         )
         hs = yield setup_test_homeserver(
+            self.addCleanup,
             config=config,
             federation_sender=Mock(),
             federation_client=Mock(),
@@ -53,11 +53,7 @@ class ApplicationServiceStoreTestCase(unittest.TestCase):
         self.as_url = "some_url"
         self.as_id = "as1"
         self._add_appservice(
-            self.as_token,
-            self.as_id,
-            self.as_url,
-            "some_hs_token",
-            "bob"
+            self.as_token, self.as_id, self.as_url, "some_hs_token", "bob"
         )
         self._add_appservice("token2", "as2", "some_url", "some_hs_token", "bob")
         self._add_appservice("token3", "as3", "some_url", "some_hs_token", "bob")
@@ -73,8 +69,14 @@ class ApplicationServiceStoreTestCase(unittest.TestCase):
                 pass
 
     def _add_appservice(self, as_token, id, url, hs_token, sender):
-        as_yaml = dict(url=url, as_token=as_token, hs_token=hs_token,
-                       id=id, sender_localpart=sender, namespaces={})
+        as_yaml = dict(
+            url=url,
+            as_token=as_token,
+            hs_token=hs_token,
+            id=id,
+            sender_localpart=sender,
+            namespaces={},
+        )
         # use the token as the filename
         with open(as_token, 'w') as outfile:
             outfile.write(yaml.dump(as_yaml))
@@ -85,24 +87,13 @@ class ApplicationServiceStoreTestCase(unittest.TestCase):
         self.assertEquals(service, None)
 
     def test_retrieval_of_service(self):
-        stored_service = self.store.get_app_service_by_token(
-            self.as_token
-        )
+        stored_service = self.store.get_app_service_by_token(self.as_token)
         self.assertEquals(stored_service.token, self.as_token)
         self.assertEquals(stored_service.id, self.as_id)
         self.assertEquals(stored_service.url, self.as_url)
-        self.assertEquals(
-            stored_service.namespaces[ApplicationService.NS_ALIASES],
-            []
-        )
-        self.assertEquals(
-            stored_service.namespaces[ApplicationService.NS_ROOMS],
-            []
-        )
-        self.assertEquals(
-            stored_service.namespaces[ApplicationService.NS_USERS],
-            []
-        )
+        self.assertEquals(stored_service.namespaces[ApplicationService.NS_ALIASES], [])
+        self.assertEquals(stored_service.namespaces[ApplicationService.NS_ROOMS], [])
+        self.assertEquals(stored_service.namespaces[ApplicationService.NS_USERS], [])
 
     def test_retrieval_of_all_services(self):
         services = self.store.get_app_services()
@@ -110,7 +101,6 @@ class ApplicationServiceStoreTestCase(unittest.TestCase):
 
 
 class ApplicationServiceTransactionStoreTestCase(unittest.TestCase):
-
     @defer.inlineCallbacks
     def setUp(self):
         self.as_yaml_files = []
@@ -121,6 +111,7 @@ class ApplicationServiceTransactionStoreTestCase(unittest.TestCase):
             password_providers=[],
         )
         hs = yield setup_test_homeserver(
+            self.addCleanup,
             config=config,
             federation_sender=Mock(),
             federation_client=Mock(),
@@ -128,26 +119,10 @@ class ApplicationServiceTransactionStoreTestCase(unittest.TestCase):
         self.db_pool = hs.get_db_pool()
 
         self.as_list = [
-            {
-                "token": "token1",
-                "url": "https://matrix-as.org",
-                "id": "id_1"
-            },
-            {
-                "token": "alpha_tok",
-                "url": "https://alpha.com",
-                "id": "id_alpha"
-            },
-            {
-                "token": "beta_tok",
-                "url": "https://beta.com",
-                "id": "id_beta"
-            },
-            {
-                "token": "gamma_tok",
-                "url": "https://gamma.com",
-                "id": "id_gamma"
-            },
+            {"token": "token1", "url": "https://matrix-as.org", "id": "id_1"},
+            {"token": "alpha_tok", "url": "https://alpha.com", "id": "id_alpha"},
+            {"token": "beta_tok", "url": "https://beta.com", "id": "id_beta"},
+            {"token": "gamma_tok", "url": "https://gamma.com", "id": "id_gamma"},
         ]
         for s in self.as_list:
             yield self._add_service(s["url"], s["token"], s["id"])
@@ -157,8 +132,14 @@ class ApplicationServiceTransactionStoreTestCase(unittest.TestCase):
         self.store = TestTransactionStore(None, hs)
 
     def _add_service(self, url, as_token, id):
-        as_yaml = dict(url=url, as_token=as_token, hs_token="something",
-                       id=id, sender_localpart="a_sender", namespaces={})
+        as_yaml = dict(
+            url=url,
+            as_token=as_token,
+            hs_token="something",
+            id=id,
+            sender_localpart="a_sender",
+            namespaces={},
+        )
         # use the token as the filename
         with open(as_token, 'w') as outfile:
             outfile.write(yaml.dump(as_yaml))
@@ -168,21 +149,21 @@ class ApplicationServiceTransactionStoreTestCase(unittest.TestCase):
         return self.db_pool.runQuery(
             "INSERT INTO application_services_state(as_id, state, last_txn) "
             "VALUES(?,?,?)",
-            (id, state, txn)
+            (id, state, txn),
         )
 
     def _insert_txn(self, as_id, txn_id, events):
         return self.db_pool.runQuery(
             "INSERT INTO application_services_txns(as_id, txn_id, event_ids) "
             "VALUES(?,?,?)",
-            (as_id, txn_id, json.dumps([e.event_id for e in events]))
+            (as_id, txn_id, json.dumps([e.event_id for e in events])),
         )
 
     def _set_last_txn(self, as_id, txn_id):
         return self.db_pool.runQuery(
             "INSERT INTO application_services_state(as_id, last_txn, state) "
             "VALUES(?,?,?)",
-            (as_id, txn_id, ApplicationServiceState.UP)
+            (as_id, txn_id, ApplicationServiceState.UP),
         )
 
     @defer.inlineCallbacks
@@ -193,24 +174,16 @@ class ApplicationServiceTransactionStoreTestCase(unittest.TestCase):
 
     @defer.inlineCallbacks
     def test_get_appservice_state_up(self):
-        yield self._set_state(
-            self.as_list[0]["id"], ApplicationServiceState.UP
-        )
+        yield self._set_state(self.as_list[0]["id"], ApplicationServiceState.UP)
         service = Mock(id=self.as_list[0]["id"])
         state = yield self.store.get_appservice_state(service)
         self.assertEquals(ApplicationServiceState.UP, state)
 
     @defer.inlineCallbacks
     def test_get_appservice_state_down(self):
-        yield self._set_state(
-            self.as_list[0]["id"], ApplicationServiceState.UP
-        )
-        yield self._set_state(
-            self.as_list[1]["id"], ApplicationServiceState.DOWN
-        )
-        yield self._set_state(
-            self.as_list[2]["id"], ApplicationServiceState.DOWN
-        )
+        yield self._set_state(self.as_list[0]["id"], ApplicationServiceState.UP)
+        yield self._set_state(self.as_list[1]["id"], ApplicationServiceState.DOWN)
+        yield self._set_state(self.as_list[2]["id"], ApplicationServiceState.DOWN)
         service = Mock(id=self.as_list[1]["id"])
         state = yield self.store.get_appservice_state(service)
         self.assertEquals(ApplicationServiceState.DOWN, state)
@@ -225,34 +198,22 @@ class ApplicationServiceTransactionStoreTestCase(unittest.TestCase):
     @defer.inlineCallbacks
     def test_set_appservices_state_down(self):
         service = Mock(id=self.as_list[1]["id"])
-        yield self.store.set_appservice_state(
-            service,
-            ApplicationServiceState.DOWN
-        )
+        yield self.store.set_appservice_state(service, ApplicationServiceState.DOWN)
         rows = yield self.db_pool.runQuery(
             "SELECT as_id FROM application_services_state WHERE state=?",
-            (ApplicationServiceState.DOWN,)
+            (ApplicationServiceState.DOWN,),
         )
         self.assertEquals(service.id, rows[0][0])
 
     @defer.inlineCallbacks
     def test_set_appservices_state_multiple_up(self):
         service = Mock(id=self.as_list[1]["id"])
-        yield self.store.set_appservice_state(
-            service,
-            ApplicationServiceState.UP
-        )
-        yield self.store.set_appservice_state(
-            service,
-            ApplicationServiceState.DOWN
-        )
-        yield self.store.set_appservice_state(
-            service,
-            ApplicationServiceState.UP
-        )
+        yield self.store.set_appservice_state(service, ApplicationServiceState.UP)
+        yield self.store.set_appservice_state(service, ApplicationServiceState.DOWN)
+        yield self.store.set_appservice_state(service, ApplicationServiceState.UP)
         rows = yield self.db_pool.runQuery(
             "SELECT as_id FROM application_services_state WHERE state=?",
-            (ApplicationServiceState.UP,)
+            (ApplicationServiceState.UP,),
         )
         self.assertEquals(service.id, rows[0][0])
 
@@ -319,14 +280,13 @@ class ApplicationServiceTransactionStoreTestCase(unittest.TestCase):
 
         res = yield self.db_pool.runQuery(
             "SELECT last_txn FROM application_services_state WHERE as_id=?",
-            (service.id,)
+            (service.id,),
         )
         self.assertEquals(1, len(res))
         self.assertEquals(txn_id, res[0][0])
 
         res = yield self.db_pool.runQuery(
-            "SELECT * FROM application_services_txns WHERE txn_id=?",
-            (txn_id,)
+            "SELECT * FROM application_services_txns WHERE txn_id=?", (txn_id,)
         )
         self.assertEquals(0, len(res))
 
@@ -340,17 +300,15 @@ class ApplicationServiceTransactionStoreTestCase(unittest.TestCase):
         yield self.store.complete_appservice_txn(txn_id=txn_id, service=service)
 
         res = yield self.db_pool.runQuery(
-            "SELECT last_txn, state FROM application_services_state WHERE "
-            "as_id=?",
-            (service.id,)
+            "SELECT last_txn, state FROM application_services_state WHERE " "as_id=?",
+            (service.id,),
         )
         self.assertEquals(1, len(res))
         self.assertEquals(txn_id, res[0][0])
         self.assertEquals(ApplicationServiceState.UP, res[0][1])
 
         res = yield self.db_pool.runQuery(
-            "SELECT * FROM application_services_txns WHERE txn_id=?",
-            (txn_id,)
+            "SELECT * FROM application_services_txns WHERE txn_id=?", (txn_id,)
         )
         self.assertEquals(0, len(res))
 
@@ -382,12 +340,8 @@ class ApplicationServiceTransactionStoreTestCase(unittest.TestCase):
 
     @defer.inlineCallbacks
     def test_get_appservices_by_state_single(self):
-        yield self._set_state(
-            self.as_list[0]["id"], ApplicationServiceState.DOWN
-        )
-        yield self._set_state(
-            self.as_list[1]["id"], ApplicationServiceState.UP
-        )
+        yield self._set_state(self.as_list[0]["id"], ApplicationServiceState.DOWN)
+        yield self._set_state(self.as_list[1]["id"], ApplicationServiceState.UP)
 
         services = yield self.store.get_appservices_by_state(
             ApplicationServiceState.DOWN
@@ -397,18 +351,10 @@ class ApplicationServiceTransactionStoreTestCase(unittest.TestCase):
 
     @defer.inlineCallbacks
     def test_get_appservices_by_state_multiple(self):
-        yield self._set_state(
-            self.as_list[0]["id"], ApplicationServiceState.DOWN
-        )
-        yield self._set_state(
-            self.as_list[1]["id"], ApplicationServiceState.UP
-        )
-        yield self._set_state(
-            self.as_list[2]["id"], ApplicationServiceState.DOWN
-        )
-        yield self._set_state(
-            self.as_list[3]["id"], ApplicationServiceState.UP
-        )
+        yield self._set_state(self.as_list[0]["id"], ApplicationServiceState.DOWN)
+        yield self._set_state(self.as_list[1]["id"], ApplicationServiceState.UP)
+        yield self._set_state(self.as_list[2]["id"], ApplicationServiceState.DOWN)
+        yield self._set_state(self.as_list[3]["id"], ApplicationServiceState.UP)
 
         services = yield self.store.get_appservices_by_state(
             ApplicationServiceState.DOWN
@@ -416,20 +362,17 @@ class ApplicationServiceTransactionStoreTestCase(unittest.TestCase):
         self.assertEquals(2, len(services))
         self.assertEquals(
             set([self.as_list[2]["id"], self.as_list[0]["id"]]),
-            set([services[0].id, services[1].id])
+            set([services[0].id, services[1].id]),
         )
 
 
 # required for ApplicationServiceTransactionStoreTestCase tests
-class TestTransactionStore(ApplicationServiceTransactionStore,
-                           ApplicationServiceStore):
-
+class TestTransactionStore(ApplicationServiceTransactionStore, ApplicationServiceStore):
     def __init__(self, db_conn, hs):
         super(TestTransactionStore, self).__init__(db_conn, hs)
 
 
 class ApplicationServiceStoreConfigTestCase(unittest.TestCase):
-
     def _write_config(self, suffix, **kwargs):
         vals = {
             "id": "id" + suffix,
@@ -452,10 +395,10 @@ class ApplicationServiceStoreConfigTestCase(unittest.TestCase):
         f2 = self._write_config(suffix="2")
 
         config = Mock(
-            app_service_config_files=[f1, f2], event_cache_size=1,
-            password_providers=[]
+            app_service_config_files=[f1, f2], event_cache_size=1, password_providers=[]
         )
         hs = yield setup_test_homeserver(
+            self.addCleanup,
             config=config,
             datastore=Mock(),
             federation_sender=Mock(),
@@ -470,10 +413,10 @@ class ApplicationServiceStoreConfigTestCase(unittest.TestCase):
         f2 = self._write_config(id="id", suffix="2")
 
         config = Mock(
-            app_service_config_files=[f1, f2], event_cache_size=1,
-            password_providers=[]
+            app_service_config_files=[f1, f2], event_cache_size=1, password_providers=[]
         )
         hs = yield setup_test_homeserver(
+            self.addCleanup,
             config=config,
             datastore=Mock(),
             federation_sender=Mock(),
@@ -494,10 +437,10 @@ class ApplicationServiceStoreConfigTestCase(unittest.TestCase):
         f2 = self._write_config(as_token="as_token", suffix="2")
 
         config = Mock(
-            app_service_config_files=[f1, f2], event_cache_size=1,
-            password_providers=[]
+            app_service_config_files=[f1, f2], event_cache_size=1, password_providers=[]
         )
         hs = yield setup_test_homeserver(
+            self.addCleanup,
             config=config,
             datastore=Mock(),
             federation_sender=Mock(),
diff --git a/tests/storage/test_background_update.py b/tests/storage/test_background_update.py
index ab1f310572..81403727c5 100644
--- a/tests/storage/test_background_update.py
+++ b/tests/storage/test_background_update.py
@@ -7,10 +7,11 @@ from tests.utils import setup_test_homeserver
 
 
 class BackgroundUpdateTestCase(unittest.TestCase):
-
     @defer.inlineCallbacks
     def setUp(self):
-        hs = yield setup_test_homeserver()  # type: synapse.server.HomeServer
+        hs = yield setup_test_homeserver(
+            self.addCleanup
+        )  # type: synapse.server.HomeServer
         self.store = hs.get_datastore()
         self.clock = hs.get_clock()
 
@@ -51,9 +52,7 @@ class BackgroundUpdateTestCase(unittest.TestCase):
         yield self.store.start_background_update("test_update", {"my_key": 1})
 
         self.update_handler.reset_mock()
-        result = yield self.store.do_next_background_update(
-            duration_ms * desired_count
-        )
+        result = yield self.store.do_next_background_update(duration_ms * desired_count)
         self.assertIsNotNone(result)
         self.update_handler.assert_called_once_with(
             {"my_key": 1}, self.store.DEFAULT_BACKGROUND_BATCH_SIZE
@@ -67,18 +66,12 @@ class BackgroundUpdateTestCase(unittest.TestCase):
 
         self.update_handler.side_effect = update
         self.update_handler.reset_mock()
-        result = yield self.store.do_next_background_update(
-            duration_ms * desired_count
-        )
+        result = yield self.store.do_next_background_update(duration_ms * desired_count)
         self.assertIsNotNone(result)
-        self.update_handler.assert_called_once_with(
-            {"my_key": 2}, desired_count
-        )
+        self.update_handler.assert_called_once_with({"my_key": 2}, desired_count)
 
         # third step: we don't expect to be called any more
         self.update_handler.reset_mock()
-        result = yield self.store.do_next_background_update(
-            duration_ms * desired_count
-        )
+        result = yield self.store.do_next_background_update(duration_ms * desired_count)
         self.assertIsNone(result)
         self.assertFalse(self.update_handler.called)
diff --git a/tests/storage/test_base.py b/tests/storage/test_base.py
index 1d1234ee39..7cb5f0e4cf 100644
--- a/tests/storage/test_base.py
+++ b/tests/storage/test_base.py
@@ -40,10 +40,12 @@ class SQLBaseStoreTestCase(unittest.TestCase):
 
         def runInteraction(func, *args, **kwargs):
             return defer.succeed(func(self.mock_txn, *args, **kwargs))
+
         self.db_pool.runInteraction = runInteraction
 
         def runWithConnection(func, *args, **kwargs):
             return defer.succeed(func(self.mock_conn, *args, **kwargs))
+
         self.db_pool.runWithConnection = runWithConnection
 
         config = Mock()
@@ -63,8 +65,7 @@ class SQLBaseStoreTestCase(unittest.TestCase):
         self.mock_txn.rowcount = 1
 
         yield self.datastore._simple_insert(
-            table="tablename",
-            values={"columname": "Value"}
+            table="tablename", values={"columname": "Value"}
         )
 
         self.mock_txn.execute.assert_called_with(
@@ -78,12 +79,11 @@ class SQLBaseStoreTestCase(unittest.TestCase):
         yield self.datastore._simple_insert(
             table="tablename",
             # Use OrderedDict() so we can assert on the SQL generated
-            values=OrderedDict([("colA", 1), ("colB", 2), ("colC", 3)])
+            values=OrderedDict([("colA", 1), ("colB", 2), ("colC", 3)]),
         )
 
         self.mock_txn.execute.assert_called_with(
-            "INSERT INTO tablename (colA, colB, colC) VALUES(?, ?, ?)",
-            (1, 2, 3,)
+            "INSERT INTO tablename (colA, colB, colC) VALUES(?, ?, ?)", (1, 2, 3)
         )
 
     @defer.inlineCallbacks
@@ -92,9 +92,7 @@ class SQLBaseStoreTestCase(unittest.TestCase):
         self.mock_txn.__iter__ = Mock(return_value=iter([("Value",)]))
 
         value = yield self.datastore._simple_select_one_onecol(
-            table="tablename",
-            keyvalues={"keycol": "TheKey"},
-            retcol="retcol"
+            table="tablename", keyvalues={"keycol": "TheKey"}, retcol="retcol"
         )
 
         self.assertEquals("Value", value)
@@ -110,13 +108,12 @@ class SQLBaseStoreTestCase(unittest.TestCase):
         ret = yield self.datastore._simple_select_one(
             table="tablename",
             keyvalues={"keycol": "TheKey"},
-            retcols=["colA", "colB", "colC"]
+            retcols=["colA", "colB", "colC"],
         )
 
         self.assertEquals({"colA": 1, "colB": 2, "colC": 3}, ret)
         self.mock_txn.execute.assert_called_with(
-            "SELECT colA, colB, colC FROM tablename WHERE keycol = ?",
-            ["TheKey"]
+            "SELECT colA, colB, colC FROM tablename WHERE keycol = ?", ["TheKey"]
         )
 
     @defer.inlineCallbacks
@@ -128,7 +125,7 @@ class SQLBaseStoreTestCase(unittest.TestCase):
             table="tablename",
             keyvalues={"keycol": "Not here"},
             retcols=["colA"],
-            allow_none=True
+            allow_none=True,
         )
 
         self.assertFalse(ret)
@@ -137,20 +134,15 @@ class SQLBaseStoreTestCase(unittest.TestCase):
     def test_select_list(self):
         self.mock_txn.rowcount = 3
         self.mock_txn.__iter__ = Mock(return_value=iter([(1,), (2,), (3,)]))
-        self.mock_txn.description = (
-            ("colA", None, None, None, None, None, None),
-        )
+        self.mock_txn.description = (("colA", None, None, None, None, None, None),)
 
         ret = yield self.datastore._simple_select_list(
-            table="tablename",
-            keyvalues={"keycol": "A set"},
-            retcols=["colA"],
+            table="tablename", keyvalues={"keycol": "A set"}, retcols=["colA"]
         )
 
         self.assertEquals([{"colA": 1}, {"colA": 2}, {"colA": 3}], ret)
         self.mock_txn.execute.assert_called_with(
-            "SELECT colA FROM tablename WHERE keycol = ?",
-            ["A set"]
+            "SELECT colA FROM tablename WHERE keycol = ?", ["A set"]
         )
 
     @defer.inlineCallbacks
@@ -160,12 +152,12 @@ class SQLBaseStoreTestCase(unittest.TestCase):
         yield self.datastore._simple_update_one(
             table="tablename",
             keyvalues={"keycol": "TheKey"},
-            updatevalues={"columnname": "New Value"}
+            updatevalues={"columnname": "New Value"},
         )
 
         self.mock_txn.execute.assert_called_with(
             "UPDATE tablename SET columnname = ? WHERE keycol = ?",
-            ["New Value", "TheKey"]
+            ["New Value", "TheKey"],
         )
 
     @defer.inlineCallbacks
@@ -175,13 +167,12 @@ class SQLBaseStoreTestCase(unittest.TestCase):
         yield self.datastore._simple_update_one(
             table="tablename",
             keyvalues=OrderedDict([("colA", 1), ("colB", 2)]),
-            updatevalues=OrderedDict([("colC", 3), ("colD", 4)])
+            updatevalues=OrderedDict([("colC", 3), ("colD", 4)]),
         )
 
         self.mock_txn.execute.assert_called_with(
-            "UPDATE tablename SET colC = ?, colD = ? WHERE"
-            " colA = ? AND colB = ?",
-            [3, 4, 1, 2]
+            "UPDATE tablename SET colC = ?, colD = ? WHERE" " colA = ? AND colB = ?",
+            [3, 4, 1, 2],
         )
 
     @defer.inlineCallbacks
@@ -189,8 +180,7 @@ class SQLBaseStoreTestCase(unittest.TestCase):
         self.mock_txn.rowcount = 1
 
         yield self.datastore._simple_delete_one(
-            table="tablename",
-            keyvalues={"keycol": "Go away"},
+            table="tablename", keyvalues={"keycol": "Go away"}
         )
 
         self.mock_txn.execute.assert_called_with(
diff --git a/tests/storage/test_client_ips.py b/tests/storage/test_client_ips.py
index bd6fda6cb1..c2e88bdbaf 100644
--- a/tests/storage/test_client_ips.py
+++ b/tests/storage/test_client_ips.py
@@ -12,6 +12,7 @@
 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 # See the License for the specific language governing permissions and
 # limitations under the License.
+from mock import Mock
 
 from twisted.internet import defer
 
@@ -27,17 +28,16 @@ class ClientIpStoreTestCase(tests.unittest.TestCase):
 
     @defer.inlineCallbacks
     def setUp(self):
-        hs = yield tests.utils.setup_test_homeserver()
-        self.store = hs.get_datastore()
-        self.clock = hs.get_clock()
+        self.hs = yield tests.utils.setup_test_homeserver(self.addCleanup)
+        self.store = self.hs.get_datastore()
+        self.clock = self.hs.get_clock()
 
     @defer.inlineCallbacks
     def test_insert_new_client_ip(self):
         self.clock.now = 12345678
         user_id = "@user:id"
         yield self.store.insert_client_ip(
-            user_id,
-            "access_token", "ip", "user_agent", "device_id",
+            user_id, "access_token", "ip", "user_agent", "device_id"
         )
 
         result = yield self.store.get_last_client_ip_by_device(user_id, "device_id")
@@ -52,5 +52,64 @@ class ClientIpStoreTestCase(tests.unittest.TestCase):
                 "user_agent": "user_agent",
                 "last_seen": 12345678000,
             },
-            r
+            r,
         )
+
+    @defer.inlineCallbacks
+    def test_disabled_monthly_active_user(self):
+        self.hs.config.limit_usage_by_mau = False
+        self.hs.config.max_mau_value = 50
+        user_id = "@user:server"
+        yield self.store.insert_client_ip(
+            user_id, "access_token", "ip", "user_agent", "device_id"
+        )
+        active = yield self.store.user_last_seen_monthly_active(user_id)
+        self.assertFalse(active)
+
+    @defer.inlineCallbacks
+    def test_adding_monthly_active_user_when_full(self):
+        self.hs.config.limit_usage_by_mau = True
+        self.hs.config.max_mau_value = 50
+        lots_of_users = 100
+        user_id = "@user:server"
+
+        self.store.get_monthly_active_count = Mock(
+            return_value=defer.succeed(lots_of_users)
+        )
+        yield self.store.insert_client_ip(
+            user_id, "access_token", "ip", "user_agent", "device_id"
+        )
+        active = yield self.store.user_last_seen_monthly_active(user_id)
+        self.assertFalse(active)
+
+    @defer.inlineCallbacks
+    def test_adding_monthly_active_user_when_space(self):
+        self.hs.config.limit_usage_by_mau = True
+        self.hs.config.max_mau_value = 50
+        user_id = "@user:server"
+        active = yield self.store.user_last_seen_monthly_active(user_id)
+        self.assertFalse(active)
+
+        yield self.store.insert_client_ip(
+            user_id, "access_token", "ip", "user_agent", "device_id"
+        )
+        active = yield self.store.user_last_seen_monthly_active(user_id)
+        self.assertTrue(active)
+
+    @defer.inlineCallbacks
+    def test_updating_monthly_active_user_when_space(self):
+        self.hs.config.limit_usage_by_mau = True
+        self.hs.config.max_mau_value = 50
+        user_id = "@user:server"
+
+        active = yield self.store.user_last_seen_monthly_active(user_id)
+        self.assertFalse(active)
+
+        yield self.store.insert_client_ip(
+            user_id, "access_token", "ip", "user_agent", "device_id"
+        )
+        yield self.store.insert_client_ip(
+            user_id, "access_token", "ip", "user_agent", "device_id"
+        )
+        active = yield self.store.user_last_seen_monthly_active(user_id)
+        self.assertTrue(active)
diff --git a/tests/storage/test_devices.py b/tests/storage/test_devices.py
index a54cc6bc32..aef4dfaf57 100644
--- a/tests/storage/test_devices.py
+++ b/tests/storage/test_devices.py
@@ -28,68 +28,64 @@ class DeviceStoreTestCase(tests.unittest.TestCase):
 
     @defer.inlineCallbacks
     def setUp(self):
-        hs = yield tests.utils.setup_test_homeserver()
+        hs = yield tests.utils.setup_test_homeserver(self.addCleanup)
 
         self.store = hs.get_datastore()
 
     @defer.inlineCallbacks
     def test_store_new_device(self):
-        yield self.store.store_device(
-            "user_id", "device_id", "display_name"
-        )
+        yield self.store.store_device("user_id", "device_id", "display_name")
 
         res = yield self.store.get_device("user_id", "device_id")
-        self.assertDictContainsSubset({
-            "user_id": "user_id",
-            "device_id": "device_id",
-            "display_name": "display_name",
-        }, res)
+        self.assertDictContainsSubset(
+            {
+                "user_id": "user_id",
+                "device_id": "device_id",
+                "display_name": "display_name",
+            },
+            res,
+        )
 
     @defer.inlineCallbacks
     def test_get_devices_by_user(self):
-        yield self.store.store_device(
-            "user_id", "device1", "display_name 1"
-        )
-        yield self.store.store_device(
-            "user_id", "device2", "display_name 2"
-        )
-        yield self.store.store_device(
-            "user_id2", "device3", "display_name 3"
-        )
+        yield self.store.store_device("user_id", "device1", "display_name 1")
+        yield self.store.store_device("user_id", "device2", "display_name 2")
+        yield self.store.store_device("user_id2", "device3", "display_name 3")
 
         res = yield self.store.get_devices_by_user("user_id")
         self.assertEqual(2, len(res.keys()))
-        self.assertDictContainsSubset({
-            "user_id": "user_id",
-            "device_id": "device1",
-            "display_name": "display_name 1",
-        }, res["device1"])
-        self.assertDictContainsSubset({
-            "user_id": "user_id",
-            "device_id": "device2",
-            "display_name": "display_name 2",
-        }, res["device2"])
+        self.assertDictContainsSubset(
+            {
+                "user_id": "user_id",
+                "device_id": "device1",
+                "display_name": "display_name 1",
+            },
+            res["device1"],
+        )
+        self.assertDictContainsSubset(
+            {
+                "user_id": "user_id",
+                "device_id": "device2",
+                "display_name": "display_name 2",
+            },
+            res["device2"],
+        )
 
     @defer.inlineCallbacks
     def test_update_device(self):
-        yield self.store.store_device(
-            "user_id", "device_id", "display_name 1"
-        )
+        yield self.store.store_device("user_id", "device_id", "display_name 1")
 
         res = yield self.store.get_device("user_id", "device_id")
         self.assertEqual("display_name 1", res["display_name"])
 
         # do a no-op first
-        yield self.store.update_device(
-            "user_id", "device_id",
-        )
+        yield self.store.update_device("user_id", "device_id")
         res = yield self.store.get_device("user_id", "device_id")
         self.assertEqual("display_name 1", res["display_name"])
 
         # do the update
         yield self.store.update_device(
-            "user_id", "device_id",
-            new_display_name="display_name 2",
+            "user_id", "device_id", new_display_name="display_name 2"
         )
 
         # check it worked
@@ -100,7 +96,6 @@ class DeviceStoreTestCase(tests.unittest.TestCase):
     def test_update_unknown_device(self):
         with self.assertRaises(synapse.api.errors.StoreError) as cm:
             yield self.store.update_device(
-                "user_id", "unknown_device_id",
-                new_display_name="display_name 2",
+                "user_id", "unknown_device_id", new_display_name="display_name 2"
             )
         self.assertEqual(404, cm.exception.code)
diff --git a/tests/storage/test_directory.py b/tests/storage/test_directory.py
index 129ebaf343..b4510c1c8d 100644
--- a/tests/storage/test_directory.py
+++ b/tests/storage/test_directory.py
@@ -24,10 +24,9 @@ from tests.utils import setup_test_homeserver
 
 
 class DirectoryStoreTestCase(unittest.TestCase):
-
     @defer.inlineCallbacks
     def setUp(self):
-        hs = yield setup_test_homeserver()
+        hs = yield setup_test_homeserver(self.addCleanup)
 
         self.store = DirectoryStore(None, hs)
 
@@ -37,38 +36,29 @@ class DirectoryStoreTestCase(unittest.TestCase):
     @defer.inlineCallbacks
     def test_room_to_alias(self):
         yield self.store.create_room_alias_association(
-            room_alias=self.alias,
-            room_id=self.room.to_string(),
-            servers=["test"],
+            room_alias=self.alias, room_id=self.room.to_string(), servers=["test"]
         )
 
         self.assertEquals(
             ["#my-room:test"],
-            (yield self.store.get_aliases_for_room(self.room.to_string()))
+            (yield self.store.get_aliases_for_room(self.room.to_string())),
         )
 
     @defer.inlineCallbacks
     def test_alias_to_room(self):
         yield self.store.create_room_alias_association(
-            room_alias=self.alias,
-            room_id=self.room.to_string(),
-            servers=["test"],
+            room_alias=self.alias, room_id=self.room.to_string(), servers=["test"]
         )
 
         self.assertObjectHasAttributes(
-            {
-                "room_id": self.room.to_string(),
-                "servers": ["test"],
-            },
-            (yield self.store.get_association_from_room_alias(self.alias))
+            {"room_id": self.room.to_string(), "servers": ["test"]},
+            (yield self.store.get_association_from_room_alias(self.alias)),
         )
 
     @defer.inlineCallbacks
     def test_delete_alias(self):
         yield self.store.create_room_alias_association(
-            room_alias=self.alias,
-            room_id=self.room.to_string(),
-            servers=["test"],
+            room_alias=self.alias, room_id=self.room.to_string(), servers=["test"]
         )
 
         room_id = yield self.store.delete_room_alias(self.alias)
diff --git a/tests/storage/test_end_to_end_keys.py b/tests/storage/test_end_to_end_keys.py
index 84ce492a2c..8f0aaece40 100644
--- a/tests/storage/test_end_to_end_keys.py
+++ b/tests/storage/test_end_to_end_keys.py
@@ -26,8 +26,7 @@ class EndToEndKeyStoreTestCase(tests.unittest.TestCase):
 
     @defer.inlineCallbacks
     def setUp(self):
-        hs = yield tests.utils.setup_test_homeserver()
-
+        hs = yield tests.utils.setup_test_homeserver(self.addCleanup)
         self.store = hs.get_datastore()
 
     @defer.inlineCallbacks
@@ -35,70 +34,49 @@ class EndToEndKeyStoreTestCase(tests.unittest.TestCase):
         now = 1470174257070
         json = {"key": "value"}
 
-        yield self.store.store_device(
-            "user", "device", None
-        )
+        yield self.store.store_device("user", "device", None)
 
-        yield self.store.set_e2e_device_keys(
-            "user", "device", now, json)
+        yield self.store.set_e2e_device_keys("user", "device", now, json)
 
         res = yield self.store.get_e2e_device_keys((("user", "device"),))
         self.assertIn("user", res)
         self.assertIn("device", res["user"])
         dev = res["user"]["device"]
-        self.assertDictContainsSubset({
-            "keys": json,
-            "device_display_name": None,
-        }, dev)
+        self.assertDictContainsSubset({"keys": json, "device_display_name": None}, dev)
 
     @defer.inlineCallbacks
     def test_get_key_with_device_name(self):
         now = 1470174257070
         json = {"key": "value"}
 
-        yield self.store.set_e2e_device_keys(
-            "user", "device", now, json)
-        yield self.store.store_device(
-            "user", "device", "display_name"
-        )
+        yield self.store.set_e2e_device_keys("user", "device", now, json)
+        yield self.store.store_device("user", "device", "display_name")
 
         res = yield self.store.get_e2e_device_keys((("user", "device"),))
         self.assertIn("user", res)
         self.assertIn("device", res["user"])
         dev = res["user"]["device"]
-        self.assertDictContainsSubset({
-            "keys": json,
-            "device_display_name": "display_name",
-        }, dev)
+        self.assertDictContainsSubset(
+            {"keys": json, "device_display_name": "display_name"}, dev
+        )
 
     @defer.inlineCallbacks
     def test_multiple_devices(self):
         now = 1470174257070
 
-        yield self.store.store_device(
-            "user1", "device1", None
-        )
-        yield self.store.store_device(
-            "user1", "device2", None
-        )
-        yield self.store.store_device(
-            "user2", "device1", None
-        )
-        yield self.store.store_device(
-            "user2", "device2", None
-        )
+        yield self.store.store_device("user1", "device1", None)
+        yield self.store.store_device("user1", "device2", None)
+        yield self.store.store_device("user2", "device1", None)
+        yield self.store.store_device("user2", "device2", None)
 
-        yield self.store.set_e2e_device_keys(
-            "user1", "device1", now, 'json11')
-        yield self.store.set_e2e_device_keys(
-            "user1", "device2", now, 'json12')
-        yield self.store.set_e2e_device_keys(
-            "user2", "device1", now, 'json21')
-        yield self.store.set_e2e_device_keys(
-            "user2", "device2", now, 'json22')
-
-        res = yield self.store.get_e2e_device_keys((("user1", "device1"),
-                                                    ("user2", "device2")))
+        yield self.store.set_e2e_device_keys("user1", "device1", now, 'json11')
+        yield self.store.set_e2e_device_keys("user1", "device2", now, 'json12')
+        yield self.store.set_e2e_device_keys("user2", "device1", now, 'json21')
+        yield self.store.set_e2e_device_keys("user2", "device2", now, 'json22')
+
+        res = yield self.store.get_e2e_device_keys(
+            (("user1", "device1"), ("user2", "device2"))
+        )
         self.assertIn("user1", res)
         self.assertIn("device1", res["user1"])
         self.assertNotIn("device2", res["user1"])
diff --git a/tests/storage/test_event_federation.py b/tests/storage/test_event_federation.py
index 30683e7888..2fdf34fdf6 100644
--- a/tests/storage/test_event_federation.py
+++ b/tests/storage/test_event_federation.py
@@ -22,7 +22,7 @@ import tests.utils
 class EventFederationWorkerStoreTestCase(tests.unittest.TestCase):
     @defer.inlineCallbacks
     def setUp(self):
-        hs = yield tests.utils.setup_test_homeserver()
+        hs = yield tests.utils.setup_test_homeserver(self.addCleanup)
         self.store = hs.get_datastore()
 
     @defer.inlineCallbacks
@@ -33,23 +33,32 @@ class EventFederationWorkerStoreTestCase(tests.unittest.TestCase):
         def insert_event(txn, i):
             event_id = '$event_%i:local' % i
 
-            txn.execute((
-                "INSERT INTO events ("
-                "   room_id, event_id, type, depth, topological_ordering,"
-                "   content, processed, outlier) "
-                "VALUES (?, ?, 'm.test', ?, ?, 'test', ?, ?)"
-            ), (room_id, event_id, i, i, True, False))
+            txn.execute(
+                (
+                    "INSERT INTO events ("
+                    "   room_id, event_id, type, depth, topological_ordering,"
+                    "   content, processed, outlier) "
+                    "VALUES (?, ?, 'm.test', ?, ?, 'test', ?, ?)"
+                ),
+                (room_id, event_id, i, i, True, False),
+            )
 
-            txn.execute((
-                'INSERT INTO event_forward_extremities (room_id, event_id) '
-                'VALUES (?, ?)'
-            ), (room_id, event_id))
+            txn.execute(
+                (
+                    'INSERT INTO event_forward_extremities (room_id, event_id) '
+                    'VALUES (?, ?)'
+                ),
+                (room_id, event_id),
+            )
 
-            txn.execute((
-                'INSERT INTO event_reference_hashes '
-                '(event_id, algorithm, hash) '
-                "VALUES (?, 'sha256', ?)"
-            ), (event_id, 'ffff'))
+            txn.execute(
+                (
+                    'INSERT INTO event_reference_hashes '
+                    '(event_id, algorithm, hash) '
+                    "VALUES (?, 'sha256', ?)"
+                ),
+                (event_id, b'ffff'),
+            )
 
         for i in range(0, 11):
             yield self.store.runInteraction("insert", insert_event, i)
diff --git a/tests/storage/test_event_push_actions.py b/tests/storage/test_event_push_actions.py
index 8430fc7ba6..b114c6fb1d 100644
--- a/tests/storage/test_event_push_actions.py
+++ b/tests/storage/test_event_push_actions.py
@@ -24,15 +24,16 @@ USER_ID = "@user:example.com"
 
 PlAIN_NOTIF = ["notify", {"set_tweak": "highlight", "value": False}]
 HIGHLIGHT = [
-    "notify", {"set_tweak": "sound", "value": "default"}, {"set_tweak": "highlight"}
+    "notify",
+    {"set_tweak": "sound", "value": "default"},
+    {"set_tweak": "highlight"},
 ]
 
 
 class EventPushActionsStoreTestCase(tests.unittest.TestCase):
-
     @defer.inlineCallbacks
     def setUp(self):
-        hs = yield tests.utils.setup_test_homeserver()
+        hs = yield tests.utils.setup_test_homeserver(self.addCleanup)
         self.store = hs.get_datastore()
 
     @defer.inlineCallbacks
@@ -55,12 +56,11 @@ class EventPushActionsStoreTestCase(tests.unittest.TestCase):
         @defer.inlineCallbacks
         def _assert_counts(noitf_count, highlight_count):
             counts = yield self.store.runInteraction(
-                "", self.store._get_unread_counts_by_pos_txn,
-                room_id, user_id, 0
+                "", self.store._get_unread_counts_by_pos_txn, room_id, user_id, 0
             )
             self.assertEquals(
                 counts,
-                {"notify_count": noitf_count, "highlight_count": highlight_count}
+                {"notify_count": noitf_count, "highlight_count": highlight_count},
             )
 
         @defer.inlineCallbacks
@@ -72,11 +72,13 @@ class EventPushActionsStoreTestCase(tests.unittest.TestCase):
             event.depth = stream
 
             yield self.store.add_push_actions_to_staging(
-                event.event_id, {user_id: action},
+                event.event_id, {user_id: action}
             )
             yield self.store.runInteraction(
-                "", self.store._set_push_actions_for_event_and_users_txn,
-                [(event, None)], [(event, None)],
+                "",
+                self.store._set_push_actions_for_event_and_users_txn,
+                [(event, None)],
+                [(event, None)],
             )
 
         def _rotate(stream):
@@ -86,8 +88,11 @@ class EventPushActionsStoreTestCase(tests.unittest.TestCase):
 
         def _mark_read(stream, depth):
             return self.store.runInteraction(
-                "", self.store._remove_old_push_actions_before_txn,
-                room_id, user_id, stream
+                "",
+                self.store._remove_old_push_actions_before_txn,
+                room_id,
+                user_id,
+                stream,
             )
 
         yield _assert_counts(0, 0)
@@ -112,9 +117,7 @@ class EventPushActionsStoreTestCase(tests.unittest.TestCase):
         yield _rotate(7)
 
         yield self.store._simple_delete(
-            table="event_push_actions",
-            keyvalues={"1": 1},
-            desc="",
+            table="event_push_actions", keyvalues={"1": 1}, desc=""
         )
 
         yield _assert_counts(1, 0)
@@ -132,18 +135,21 @@ class EventPushActionsStoreTestCase(tests.unittest.TestCase):
     @defer.inlineCallbacks
     def test_find_first_stream_ordering_after_ts(self):
         def add_event(so, ts):
-            return self.store._simple_insert("events", {
-                "stream_ordering": so,
-                "received_ts": ts,
-                "event_id": "event%i" % so,
-                "type": "",
-                "room_id": "",
-                "content": "",
-                "processed": True,
-                "outlier": False,
-                "topological_ordering": 0,
-                "depth": 0,
-            })
+            return self.store._simple_insert(
+                "events",
+                {
+                    "stream_ordering": so,
+                    "received_ts": ts,
+                    "event_id": "event%i" % so,
+                    "type": "",
+                    "room_id": "",
+                    "content": "",
+                    "processed": True,
+                    "outlier": False,
+                    "topological_ordering": 0,
+                    "depth": 0,
+                },
+            )
 
         # start with the base case where there are no events in the table
         r = yield self.store.find_first_stream_ordering_after_ts(11)
@@ -160,31 +166,27 @@ class EventPushActionsStoreTestCase(tests.unittest.TestCase):
 
         # add a bunch of dummy events to the events table
         for (stream_ordering, ts) in (
-                (3, 110),
-                (4, 120),
-                (5, 120),
-                (10, 130),
-                (20, 140),
+            (3, 110),
+            (4, 120),
+            (5, 120),
+            (10, 130),
+            (20, 140),
         ):
             yield add_event(stream_ordering, ts)
 
         r = yield self.store.find_first_stream_ordering_after_ts(110)
-        self.assertEqual(r, 3,
-                         "First event after 110ms should be 3, was %i" % r)
+        self.assertEqual(r, 3, "First event after 110ms should be 3, was %i" % r)
 
         # 4 and 5 are both after 120: we want 4 rather than 5
         r = yield self.store.find_first_stream_ordering_after_ts(120)
-        self.assertEqual(r, 4,
-                         "First event after 120ms should be 4, was %i" % r)
+        self.assertEqual(r, 4, "First event after 120ms should be 4, was %i" % r)
 
         r = yield self.store.find_first_stream_ordering_after_ts(129)
-        self.assertEqual(r, 10,
-                         "First event after 129ms should be 10, was %i" % r)
+        self.assertEqual(r, 10, "First event after 129ms should be 10, was %i" % r)
 
         # check we can get the last event
         r = yield self.store.find_first_stream_ordering_after_ts(140)
-        self.assertEqual(r, 20,
-                         "First event after 14ms should be 20, was %i" % r)
+        self.assertEqual(r, 20, "First event after 14ms should be 20, was %i" % r)
 
         # off the end
         r = yield self.store.find_first_stream_ordering_after_ts(160)
diff --git a/tests/storage/test_keys.py b/tests/storage/test_keys.py
index 3a3d002782..47f4a8ceac 100644
--- a/tests/storage/test_keys.py
+++ b/tests/storage/test_keys.py
@@ -28,7 +28,7 @@ class KeyStoreTestCase(tests.unittest.TestCase):
 
     @defer.inlineCallbacks
     def setUp(self):
-        hs = yield tests.utils.setup_test_homeserver()
+        hs = yield tests.utils.setup_test_homeserver(self.addCleanup)
         self.store = hs.get_datastore()
 
     @defer.inlineCallbacks
@@ -39,15 +39,12 @@ class KeyStoreTestCase(tests.unittest.TestCase):
         key2 = signedjson.key.decode_verify_key_base64(
             "ed25519", "key2", "Noi6WqcDj0QmPxCNQqgezwTlBKrfqehY1u2FyWP9uYw"
         )
-        yield self.store.store_server_verify_key(
-            "server1", "from_server", 0, key1
-        )
-        yield self.store.store_server_verify_key(
-            "server1", "from_server", 0, key2
-        )
+        yield self.store.store_server_verify_key("server1", "from_server", 0, key1)
+        yield self.store.store_server_verify_key("server1", "from_server", 0, key2)
 
         res = yield self.store.get_server_verify_keys(
-            "server1", ["ed25519:key1", "ed25519:key2", "ed25519:key3"])
+            "server1", ["ed25519:key1", "ed25519:key2", "ed25519:key3"]
+        )
 
         self.assertEqual(len(res.keys()), 2)
         self.assertEqual(res["ed25519:key1"].version, "key1")
diff --git a/tests/storage/test_monthly_active_users.py b/tests/storage/test_monthly_active_users.py
new file mode 100644
index 0000000000..f2ed866ae7
--- /dev/null
+++ b/tests/storage/test_monthly_active_users.py
@@ -0,0 +1,131 @@
+# -*- coding: utf-8 -*-
+# Copyright 2018 New Vector
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from twisted.internet import defer
+
+import tests.unittest
+import tests.utils
+from tests.utils import setup_test_homeserver
+
+FORTY_DAYS = 40 * 24 * 60 * 60
+
+
+class MonthlyActiveUsersTestCase(tests.unittest.TestCase):
+    def __init__(self, *args, **kwargs):
+        super(MonthlyActiveUsersTestCase, self).__init__(*args, **kwargs)
+
+    @defer.inlineCallbacks
+    def setUp(self):
+        self.hs = yield setup_test_homeserver(self.addCleanup)
+        self.store = self.hs.get_datastore()
+
+    @defer.inlineCallbacks
+    def test_initialise_reserved_users(self):
+        self.hs.config.max_mau_value = 5
+        user1 = "@user1:server"
+        user1_email = "user1@matrix.org"
+        user2 = "@user2:server"
+        user2_email = "user2@matrix.org"
+        threepids = [
+            {'medium': 'email', 'address': user1_email},
+            {'medium': 'email', 'address': user2_email},
+        ]
+        user_num = len(threepids)
+
+        yield self.store.register(user_id=user1, token="123", password_hash=None)
+
+        yield self.store.register(user_id=user2, token="456", password_hash=None)
+
+        now = int(self.hs.get_clock().time_msec())
+        yield self.store.user_add_threepid(user1, "email", user1_email, now, now)
+        yield self.store.user_add_threepid(user2, "email", user2_email, now, now)
+        yield self.store.initialise_reserved_users(threepids)
+
+        active_count = yield self.store.get_monthly_active_count()
+
+        # Test total counts
+        self.assertEquals(active_count, user_num)
+
+        # Test user is marked as active
+
+        timestamp = yield self.store.user_last_seen_monthly_active(user1)
+        self.assertTrue(timestamp)
+        timestamp = yield self.store.user_last_seen_monthly_active(user2)
+        self.assertTrue(timestamp)
+
+        # Test that users are never removed from the db.
+        self.hs.config.max_mau_value = 0
+
+        self.hs.get_clock().advance_time(FORTY_DAYS)
+
+        yield self.store.reap_monthly_active_users()
+
+        active_count = yield self.store.get_monthly_active_count()
+        self.assertEquals(active_count, user_num)
+
+        # Test that regalar users are removed from the db
+        ru_count = 2
+        yield self.store.upsert_monthly_active_user("@ru1:server")
+        yield self.store.upsert_monthly_active_user("@ru2:server")
+        active_count = yield self.store.get_monthly_active_count()
+
+        self.assertEqual(active_count, user_num + ru_count)
+        self.hs.config.max_mau_value = user_num
+        yield self.store.reap_monthly_active_users()
+
+        active_count = yield self.store.get_monthly_active_count()
+        self.assertEquals(active_count, user_num)
+
+    @defer.inlineCallbacks
+    def test_can_insert_and_count_mau(self):
+        count = yield self.store.get_monthly_active_count()
+        self.assertEqual(0, count)
+
+        yield self.store.upsert_monthly_active_user("@user:server")
+        count = yield self.store.get_monthly_active_count()
+
+        self.assertEqual(1, count)
+
+    @defer.inlineCallbacks
+    def test_user_last_seen_monthly_active(self):
+        user_id1 = "@user1:server"
+        user_id2 = "@user2:server"
+        user_id3 = "@user3:server"
+
+        result = yield self.store.user_last_seen_monthly_active(user_id1)
+        self.assertFalse(result == 0)
+        yield self.store.upsert_monthly_active_user(user_id1)
+        yield self.store.upsert_monthly_active_user(user_id2)
+        result = yield self.store.user_last_seen_monthly_active(user_id1)
+        self.assertTrue(result > 0)
+        result = yield self.store.user_last_seen_monthly_active(user_id3)
+        self.assertFalse(result == 0)
+
+    @defer.inlineCallbacks
+    def test_reap_monthly_active_users(self):
+        self.hs.config.max_mau_value = 5
+        initial_users = 10
+        for i in range(initial_users):
+            yield self.store.upsert_monthly_active_user("@user%d:server" % i)
+        count = yield self.store.get_monthly_active_count()
+        self.assertTrue(count, initial_users)
+        yield self.store.reap_monthly_active_users()
+        count = yield self.store.get_monthly_active_count()
+        self.assertEquals(count, initial_users - self.hs.config.max_mau_value)
+
+        self.hs.get_clock().advance_time(FORTY_DAYS)
+        yield self.store.reap_monthly_active_users()
+        count = yield self.store.get_monthly_active_count()
+        self.assertEquals(count, 0)
diff --git a/tests/storage/test_presence.py b/tests/storage/test_presence.py
index 3276b39504..b5b58ff660 100644
--- a/tests/storage/test_presence.py
+++ b/tests/storage/test_presence.py
@@ -24,10 +24,9 @@ from tests.utils import MockClock, setup_test_homeserver
 
 
 class PresenceStoreTestCase(unittest.TestCase):
-
     @defer.inlineCallbacks
     def setUp(self):
-        hs = yield setup_test_homeserver(clock=MockClock())
+        hs = yield setup_test_homeserver(self.addCleanup, clock=MockClock())
 
         self.store = PresenceStore(None, hs)
 
@@ -38,16 +37,19 @@ class PresenceStoreTestCase(unittest.TestCase):
     def test_presence_list(self):
         self.assertEquals(
             [],
-            (yield self.store.get_presence_list(
-                observer_localpart=self.u_apple.localpart,
-            ))
+            (
+                yield self.store.get_presence_list(
+                    observer_localpart=self.u_apple.localpart
+                )
+            ),
         )
         self.assertEquals(
             [],
-            (yield self.store.get_presence_list(
-                observer_localpart=self.u_apple.localpart,
-                accepted=True,
-            ))
+            (
+                yield self.store.get_presence_list(
+                    observer_localpart=self.u_apple.localpart, accepted=True
+                )
+            ),
         )
 
         yield self.store.add_presence_list_pending(
@@ -57,16 +59,19 @@ class PresenceStoreTestCase(unittest.TestCase):
 
         self.assertEquals(
             [{"observed_user_id": "@banana:test", "accepted": 0}],
-            (yield self.store.get_presence_list(
-                observer_localpart=self.u_apple.localpart,
-            ))
+            (
+                yield self.store.get_presence_list(
+                    observer_localpart=self.u_apple.localpart
+                )
+            ),
         )
         self.assertEquals(
             [],
-            (yield self.store.get_presence_list(
-                observer_localpart=self.u_apple.localpart,
-                accepted=True,
-            ))
+            (
+                yield self.store.get_presence_list(
+                    observer_localpart=self.u_apple.localpart, accepted=True
+                )
+            ),
         )
 
         yield self.store.set_presence_list_accepted(
@@ -76,16 +81,19 @@ class PresenceStoreTestCase(unittest.TestCase):
 
         self.assertEquals(
             [{"observed_user_id": "@banana:test", "accepted": 1}],
-            (yield self.store.get_presence_list(
-                observer_localpart=self.u_apple.localpart,
-            ))
+            (
+                yield self.store.get_presence_list(
+                    observer_localpart=self.u_apple.localpart
+                )
+            ),
         )
         self.assertEquals(
             [{"observed_user_id": "@banana:test", "accepted": 1}],
-            (yield self.store.get_presence_list(
-                observer_localpart=self.u_apple.localpart,
-                accepted=True,
-            ))
+            (
+                yield self.store.get_presence_list(
+                    observer_localpart=self.u_apple.localpart, accepted=True
+                )
+            ),
         )
 
         yield self.store.del_presence_list(
@@ -95,14 +103,17 @@ class PresenceStoreTestCase(unittest.TestCase):
 
         self.assertEquals(
             [],
-            (yield self.store.get_presence_list(
-                observer_localpart=self.u_apple.localpart,
-            ))
+            (
+                yield self.store.get_presence_list(
+                    observer_localpart=self.u_apple.localpart
+                )
+            ),
         )
         self.assertEquals(
             [],
-            (yield self.store.get_presence_list(
-                observer_localpart=self.u_apple.localpart,
-                accepted=True,
-            ))
+            (
+                yield self.store.get_presence_list(
+                    observer_localpart=self.u_apple.localpart, accepted=True
+                )
+            ),
         )
diff --git a/tests/storage/test_profile.py b/tests/storage/test_profile.py
index 2c95e5e95a..a1f6618bf9 100644
--- a/tests/storage/test_profile.py
+++ b/tests/storage/test_profile.py
@@ -24,10 +24,9 @@ from tests.utils import setup_test_homeserver
 
 
 class ProfileStoreTestCase(unittest.TestCase):
-
     @defer.inlineCallbacks
     def setUp(self):
-        hs = yield setup_test_homeserver()
+        hs = yield setup_test_homeserver(self.addCleanup)
 
         self.store = ProfileStore(None, hs)
 
@@ -35,24 +34,17 @@ class ProfileStoreTestCase(unittest.TestCase):
 
     @defer.inlineCallbacks
     def test_displayname(self):
-        yield self.store.create_profile(
-            self.u_frank.localpart
-        )
+        yield self.store.create_profile(self.u_frank.localpart)
 
-        yield self.store.set_profile_displayname(
-            self.u_frank.localpart, "Frank"
-        )
+        yield self.store.set_profile_displayname(self.u_frank.localpart, "Frank")
 
         self.assertEquals(
-            "Frank",
-            (yield self.store.get_profile_displayname(self.u_frank.localpart))
+            "Frank", (yield self.store.get_profile_displayname(self.u_frank.localpart))
         )
 
     @defer.inlineCallbacks
     def test_avatar_url(self):
-        yield self.store.create_profile(
-            self.u_frank.localpart
-        )
+        yield self.store.create_profile(self.u_frank.localpart)
 
         yield self.store.set_profile_avatar_url(
             self.u_frank.localpart, "http://my.site/here"
@@ -60,5 +52,5 @@ class ProfileStoreTestCase(unittest.TestCase):
 
         self.assertEquals(
             "http://my.site/here",
-            (yield self.store.get_profile_avatar_url(self.u_frank.localpart))
+            (yield self.store.get_profile_avatar_url(self.u_frank.localpart)),
         )
diff --git a/tests/storage/test_redaction.py b/tests/storage/test_redaction.py
index 475ec900c4..02bf975fbf 100644
--- a/tests/storage/test_redaction.py
+++ b/tests/storage/test_redaction.py
@@ -22,16 +22,14 @@ from synapse.api.constants import EventTypes, Membership
 from synapse.types import RoomID, UserID
 
 from tests import unittest
-from tests.utils import setup_test_homeserver
+from tests.utils import create_room, setup_test_homeserver
 
 
 class RedactionTestCase(unittest.TestCase):
-
     @defer.inlineCallbacks
     def setUp(self):
         hs = yield setup_test_homeserver(
-            resource_for_federation=Mock(),
-            http_client=None,
+            self.addCleanup, resource_for_federation=Mock(), http_client=None
         )
 
         self.store = hs.get_datastore()
@@ -43,20 +41,25 @@ class RedactionTestCase(unittest.TestCase):
 
         self.room1 = RoomID.from_string("!abc123:test")
 
+        yield create_room(hs, self.room1.to_string(), self.u_alice.to_string())
+
         self.depth = 1
 
     @defer.inlineCallbacks
-    def inject_room_member(self, room, user, membership, replaces_state=None,
-                           extra_content={}):
+    def inject_room_member(
+        self, room, user, membership, replaces_state=None, extra_content={}
+    ):
         content = {"membership": membership}
         content.update(extra_content)
-        builder = self.event_builder_factory.new({
-            "type": EventTypes.Member,
-            "sender": user.to_string(),
-            "state_key": user.to_string(),
-            "room_id": room.to_string(),
-            "content": content,
-        })
+        builder = self.event_builder_factory.new(
+            {
+                "type": EventTypes.Member,
+                "sender": user.to_string(),
+                "state_key": user.to_string(),
+                "room_id": room.to_string(),
+                "content": content,
+            }
+        )
 
         event, context = yield self.event_creation_handler.create_new_client_event(
             builder
@@ -70,13 +73,15 @@ class RedactionTestCase(unittest.TestCase):
     def inject_message(self, room, user, body):
         self.depth += 1
 
-        builder = self.event_builder_factory.new({
-            "type": EventTypes.Message,
-            "sender": user.to_string(),
-            "state_key": user.to_string(),
-            "room_id": room.to_string(),
-            "content": {"body": body, "msgtype": u"message"},
-        })
+        builder = self.event_builder_factory.new(
+            {
+                "type": EventTypes.Message,
+                "sender": user.to_string(),
+                "state_key": user.to_string(),
+                "room_id": room.to_string(),
+                "content": {"body": body, "msgtype": u"message"},
+            }
+        )
 
         event, context = yield self.event_creation_handler.create_new_client_event(
             builder
@@ -88,14 +93,16 @@ class RedactionTestCase(unittest.TestCase):
 
     @defer.inlineCallbacks
     def inject_redaction(self, room, event_id, user, reason):
-        builder = self.event_builder_factory.new({
-            "type": EventTypes.Redaction,
-            "sender": user.to_string(),
-            "state_key": user.to_string(),
-            "room_id": room.to_string(),
-            "content": {"reason": reason},
-            "redacts": event_id,
-        })
+        builder = self.event_builder_factory.new(
+            {
+                "type": EventTypes.Redaction,
+                "sender": user.to_string(),
+                "state_key": user.to_string(),
+                "room_id": room.to_string(),
+                "content": {"reason": reason},
+                "redacts": event_id,
+            }
+        )
 
         event, context = yield self.event_creation_handler.create_new_client_event(
             builder
@@ -105,9 +112,7 @@ class RedactionTestCase(unittest.TestCase):
 
     @defer.inlineCallbacks
     def test_redact(self):
-        yield self.inject_room_member(
-            self.room1, self.u_alice, Membership.JOIN
-        )
+        yield self.inject_room_member(self.room1, self.u_alice, Membership.JOIN)
 
         msg_event = yield self.inject_message(self.room1, self.u_alice, u"t")
 
@@ -157,13 +162,10 @@ class RedactionTestCase(unittest.TestCase):
 
     @defer.inlineCallbacks
     def test_redact_join(self):
-        yield self.inject_room_member(
-            self.room1, self.u_alice, Membership.JOIN
-        )
+        yield self.inject_room_member(self.room1, self.u_alice, Membership.JOIN)
 
         msg_event = yield self.inject_room_member(
-            self.room1, self.u_bob, Membership.JOIN,
-            extra_content={"blue": "red"},
+            self.room1, self.u_bob, Membership.JOIN, extra_content={"blue": "red"}
         )
 
         event = yield self.store.get_event(msg_event.event_id)
diff --git a/tests/storage/test_registration.py b/tests/storage/test_registration.py
index 7821ea3fa3..3dfb7b903a 100644
--- a/tests/storage/test_registration.py
+++ b/tests/storage/test_registration.py
@@ -21,19 +21,15 @@ from tests.utils import setup_test_homeserver
 
 
 class RegistrationStoreTestCase(unittest.TestCase):
-
     @defer.inlineCallbacks
     def setUp(self):
-        hs = yield setup_test_homeserver()
+        hs = yield setup_test_homeserver(self.addCleanup)
         self.db_pool = hs.get_db_pool()
 
         self.store = hs.get_datastore()
 
         self.user_id = "@my-user:test"
-        self.tokens = [
-            "AbCdEfGhIjKlMnOpQrStUvWxYz",
-            "BcDeFgHiJkLmNoPqRsTuVwXyZa"
-        ]
+        self.tokens = ["AbCdEfGhIjKlMnOpQrStUvWxYz", "BcDeFgHiJkLmNoPqRsTuVwXyZa"]
         self.pwhash = "{xx1}123456789"
         self.device_id = "akgjhdjklgshg"
 
@@ -50,35 +46,28 @@ class RegistrationStoreTestCase(unittest.TestCase):
                 "consent_version": None,
                 "consent_server_notice_sent": None,
                 "appservice_id": None,
+                "creation_ts": 1000,
             },
-            (yield self.store.get_user_by_id(self.user_id))
+            (yield self.store.get_user_by_id(self.user_id)),
         )
 
         result = yield self.store.get_user_by_access_token(self.tokens[0])
 
-        self.assertDictContainsSubset(
-            {
-                "name": self.user_id,
-            },
-            result
-        )
+        self.assertDictContainsSubset({"name": self.user_id}, result)
 
         self.assertTrue("token_id" in result)
 
     @defer.inlineCallbacks
     def test_add_tokens(self):
         yield self.store.register(self.user_id, self.tokens[0], self.pwhash)
-        yield self.store.add_access_token_to_user(self.user_id, self.tokens[1],
-                                                  self.device_id)
+        yield self.store.add_access_token_to_user(
+            self.user_id, self.tokens[1], self.device_id
+        )
 
         result = yield self.store.get_user_by_access_token(self.tokens[1])
 
         self.assertDictContainsSubset(
-            {
-                "name": self.user_id,
-                "device_id": self.device_id,
-            },
-            result
+            {"name": self.user_id, "device_id": self.device_id}, result
         )
 
         self.assertTrue("token_id" in result)
@@ -87,12 +76,13 @@ class RegistrationStoreTestCase(unittest.TestCase):
     def test_user_delete_access_tokens(self):
         # add some tokens
         yield self.store.register(self.user_id, self.tokens[0], self.pwhash)
-        yield self.store.add_access_token_to_user(self.user_id, self.tokens[1],
-                                                  self.device_id)
+        yield self.store.add_access_token_to_user(
+            self.user_id, self.tokens[1], self.device_id
+        )
 
         # now delete some
         yield self.store.user_delete_access_tokens(
-            self.user_id, device_id=self.device_id,
+            self.user_id, device_id=self.device_id
         )
 
         # check they were deleted
@@ -107,8 +97,7 @@ class RegistrationStoreTestCase(unittest.TestCase):
         yield self.store.user_delete_access_tokens(self.user_id)
 
         user = yield self.store.get_user_by_access_token(self.tokens[0])
-        self.assertIsNone(user,
-                          "access token was not deleted without device_id")
+        self.assertIsNone(user, "access token was not deleted without device_id")
 
 
 class TokenGenerator:
@@ -117,4 +106,4 @@ class TokenGenerator:
 
     def generate(self, user_id):
         self._last_issued_token += 1
-        return u"%s-%d" % (user_id, self._last_issued_token,)
+        return u"%s-%d" % (user_id, self._last_issued_token)
diff --git a/tests/storage/test_room.py b/tests/storage/test_room.py
index ae8ae94b6d..a1ea23b068 100644
--- a/tests/storage/test_room.py
+++ b/tests/storage/test_room.py
@@ -24,10 +24,9 @@ from tests.utils import setup_test_homeserver
 
 
 class RoomStoreTestCase(unittest.TestCase):
-
     @defer.inlineCallbacks
     def setUp(self):
-        hs = yield setup_test_homeserver()
+        hs = yield setup_test_homeserver(self.addCleanup)
 
         # We can't test RoomStore on its own without the DirectoryStore, for
         # management of the 'room_aliases' table
@@ -40,7 +39,7 @@ class RoomStoreTestCase(unittest.TestCase):
         yield self.store.store_room(
             self.room.to_string(),
             room_creator_user_id=self.u_creator.to_string(),
-            is_public=True
+            is_public=True,
         )
 
     @defer.inlineCallbacks
@@ -49,17 +48,16 @@ class RoomStoreTestCase(unittest.TestCase):
             {
                 "room_id": self.room.to_string(),
                 "creator": self.u_creator.to_string(),
-                "is_public": True
+                "is_public": True,
             },
-            (yield self.store.get_room(self.room.to_string()))
+            (yield self.store.get_room(self.room.to_string())),
         )
 
 
 class RoomEventsStoreTestCase(unittest.TestCase):
-
     @defer.inlineCallbacks
     def setUp(self):
-        hs = setup_test_homeserver()
+        hs = setup_test_homeserver(self.addCleanup)
 
         # Room events need the full datastore, for persist_event() and
         # get_room_state()
@@ -69,18 +67,13 @@ class RoomEventsStoreTestCase(unittest.TestCase):
         self.room = RoomID.from_string("!abcde:test")
 
         yield self.store.store_room(
-            self.room.to_string(),
-            room_creator_user_id="@creator:text",
-            is_public=True
+            self.room.to_string(), room_creator_user_id="@creator:text", is_public=True
         )
 
     @defer.inlineCallbacks
     def inject_room_event(self, **kwargs):
         yield self.store.persist_event(
-            self.event_factory.create_event(
-                room_id=self.room.to_string(),
-                **kwargs
-            )
+            self.event_factory.create_event(room_id=self.room.to_string(), **kwargs)
         )
 
     @defer.inlineCallbacks
@@ -88,22 +81,15 @@ class RoomEventsStoreTestCase(unittest.TestCase):
         name = u"A-Room-Name"
 
         yield self.inject_room_event(
-            etype=EventTypes.Name,
-            name=name,
-            content={"name": name},
-            depth=1,
+            etype=EventTypes.Name, name=name, content={"name": name}, depth=1
         )
 
-        state = yield self.store.get_current_state(
-            room_id=self.room.to_string()
-        )
+        state = yield self.store.get_current_state(room_id=self.room.to_string())
 
         self.assertEquals(1, len(state))
         self.assertObjectHasAttributes(
-            {"type": "m.room.name",
-             "room_id": self.room.to_string(),
-             "name": name},
-            state[0]
+            {"type": "m.room.name", "room_id": self.room.to_string(), "name": name},
+            state[0],
         )
 
     @defer.inlineCallbacks
@@ -111,22 +97,15 @@ class RoomEventsStoreTestCase(unittest.TestCase):
         topic = u"A place for things"
 
         yield self.inject_room_event(
-            etype=EventTypes.Topic,
-            topic=topic,
-            content={"topic": topic},
-            depth=1,
+            etype=EventTypes.Topic, topic=topic, content={"topic": topic}, depth=1
         )
 
-        state = yield self.store.get_current_state(
-            room_id=self.room.to_string()
-        )
+        state = yield self.store.get_current_state(room_id=self.room.to_string())
 
         self.assertEquals(1, len(state))
         self.assertObjectHasAttributes(
-            {"type": "m.room.topic",
-             "room_id": self.room.to_string(),
-             "topic": topic},
-            state[0]
+            {"type": "m.room.topic", "room_id": self.room.to_string(), "topic": topic},
+            state[0],
         )
 
     # Not testing the various 'level' methods for now because there's lots
diff --git a/tests/storage/test_roommember.py b/tests/storage/test_roommember.py
index c5fd54f67e..978c66133d 100644
--- a/tests/storage/test_roommember.py
+++ b/tests/storage/test_roommember.py
@@ -22,16 +22,14 @@ from synapse.api.constants import EventTypes, Membership
 from synapse.types import RoomID, UserID
 
 from tests import unittest
-from tests.utils import setup_test_homeserver
+from tests.utils import create_room, setup_test_homeserver
 
 
 class RoomMemberStoreTestCase(unittest.TestCase):
-
     @defer.inlineCallbacks
     def setUp(self):
         hs = yield setup_test_homeserver(
-            resource_for_federation=Mock(),
-            http_client=None,
+            self.addCleanup, resource_for_federation=Mock(), http_client=None
         )
         # We can't test the RoomMemberStore on its own without the other event
         # storage logic
@@ -47,15 +45,19 @@ class RoomMemberStoreTestCase(unittest.TestCase):
 
         self.room = RoomID.from_string("!abc123:test")
 
+        yield create_room(hs, self.room.to_string(), self.u_alice.to_string())
+
     @defer.inlineCallbacks
     def inject_room_member(self, room, user, membership, replaces_state=None):
-        builder = self.event_builder_factory.new({
-            "type": EventTypes.Member,
-            "sender": user.to_string(),
-            "state_key": user.to_string(),
-            "room_id": room.to_string(),
-            "content": {"membership": membership},
-        })
+        builder = self.event_builder_factory.new(
+            {
+                "type": EventTypes.Member,
+                "sender": user.to_string(),
+                "state_key": user.to_string(),
+                "room_id": room.to_string(),
+                "content": {"membership": membership},
+            }
+        )
 
         event, context = yield self.event_creation_handler.create_new_client_event(
             builder
@@ -71,9 +73,12 @@ class RoomMemberStoreTestCase(unittest.TestCase):
 
         self.assertEquals(
             [self.room.to_string()],
-            [m.room_id for m in (
-                yield self.store.get_rooms_for_user_where_membership_is(
-                    self.u_alice.to_string(), [Membership.JOIN]
+            [
+                m.room_id
+                for m in (
+                    yield self.store.get_rooms_for_user_where_membership_is(
+                        self.u_alice.to_string(), [Membership.JOIN]
+                    )
                 )
-            )]
+            ],
         )
diff --git a/tests/storage/test_state.py b/tests/storage/test_state.py
index 7a76d67b8c..d717b9f94e 100644
--- a/tests/storage/test_state.py
+++ b/tests/storage/test_state.py
@@ -33,7 +33,7 @@ class StateStoreTestCase(tests.unittest.TestCase):
 
     @defer.inlineCallbacks
     def setUp(self):
-        hs = yield tests.utils.setup_test_homeserver()
+        hs = yield tests.utils.setup_test_homeserver(self.addCleanup)
 
         self.store = hs.get_datastore()
         self.event_builder_factory = hs.get_event_builder_factory()
@@ -45,20 +45,20 @@ class StateStoreTestCase(tests.unittest.TestCase):
         self.room = RoomID.from_string("!abc123:test")
 
         yield self.store.store_room(
-            self.room.to_string(),
-            room_creator_user_id="@creator:text",
-            is_public=True
+            self.room.to_string(), room_creator_user_id="@creator:text", is_public=True
         )
 
     @defer.inlineCallbacks
     def inject_state_event(self, room, sender, typ, state_key, content):
-        builder = self.event_builder_factory.new({
-            "type": typ,
-            "sender": sender.to_string(),
-            "state_key": state_key,
-            "room_id": room.to_string(),
-            "content": content,
-        })
+        builder = self.event_builder_factory.new(
+            {
+                "type": typ,
+                "sender": sender.to_string(),
+                "state_key": state_key,
+                "room_id": room.to_string(),
+                "content": content,
+            }
+        )
 
         event, context = yield self.event_creation_handler.create_new_client_event(
             builder
@@ -80,27 +80,31 @@ class StateStoreTestCase(tests.unittest.TestCase):
         # this defaults to a linear DAG as each new injection defaults to whatever
         # forward extremities are currently in the DB for this room.
         e1 = yield self.inject_state_event(
-            self.room, self.u_alice, EventTypes.Create, '', {},
+            self.room, self.u_alice, EventTypes.Create, '', {}
         )
         e2 = yield self.inject_state_event(
-            self.room, self.u_alice, EventTypes.Name, '', {
-                "name": "test room"
-            },
+            self.room, self.u_alice, EventTypes.Name, '', {"name": "test room"}
         )
         e3 = yield self.inject_state_event(
-            self.room, self.u_alice, EventTypes.Member, self.u_alice.to_string(), {
-                "membership": Membership.JOIN
-            },
+            self.room,
+            self.u_alice,
+            EventTypes.Member,
+            self.u_alice.to_string(),
+            {"membership": Membership.JOIN},
         )
         e4 = yield self.inject_state_event(
-            self.room, self.u_bob, EventTypes.Member, self.u_bob.to_string(), {
-                "membership": Membership.JOIN
-            },
+            self.room,
+            self.u_bob,
+            EventTypes.Member,
+            self.u_bob.to_string(),
+            {"membership": Membership.JOIN},
         )
         e5 = yield self.inject_state_event(
-            self.room, self.u_bob, EventTypes.Member, self.u_bob.to_string(), {
-                "membership": Membership.LEAVE
-            },
+            self.room,
+            self.u_bob,
+            EventTypes.Member,
+            self.u_bob.to_string(),
+            {"membership": Membership.LEAVE},
         )
 
         # check we get the full state as of the final event
@@ -110,65 +114,66 @@ class StateStoreTestCase(tests.unittest.TestCase):
 
         self.assertIsNotNone(e4)
 
-        self.assertStateMapEqual({
-            (e1.type, e1.state_key): e1,
-            (e2.type, e2.state_key): e2,
-            (e3.type, e3.state_key): e3,
-            # e4 is overwritten by e5
-            (e5.type, e5.state_key): e5,
-        }, state)
+        self.assertStateMapEqual(
+            {
+                (e1.type, e1.state_key): e1,
+                (e2.type, e2.state_key): e2,
+                (e3.type, e3.state_key): e3,
+                # e4 is overwritten by e5
+                (e5.type, e5.state_key): e5,
+            },
+            state,
+        )
 
         # check we can filter to the m.room.name event (with a '' state key)
         state = yield self.store.get_state_for_event(
             e5.event_id, [(EventTypes.Name, '')], filtered_types=None
         )
 
-        self.assertStateMapEqual({
-            (e2.type, e2.state_key): e2,
-        }, state)
+        self.assertStateMapEqual({(e2.type, e2.state_key): e2}, state)
 
         # check we can filter to the m.room.name event (with a wildcard None state key)
         state = yield self.store.get_state_for_event(
             e5.event_id, [(EventTypes.Name, None)], filtered_types=None
         )
 
-        self.assertStateMapEqual({
-            (e2.type, e2.state_key): e2,
-        }, state)
+        self.assertStateMapEqual({(e2.type, e2.state_key): e2}, state)
 
         # check we can grab the m.room.member events (with a wildcard None state key)
         state = yield self.store.get_state_for_event(
             e5.event_id, [(EventTypes.Member, None)], filtered_types=None
         )
 
-        self.assertStateMapEqual({
-            (e3.type, e3.state_key): e3,
-            (e5.type, e5.state_key): e5,
-        }, state)
+        self.assertStateMapEqual(
+            {(e3.type, e3.state_key): e3, (e5.type, e5.state_key): e5}, state
+        )
 
-        # check we can use filter_types to grab a specific room member
+        # check we can use filtered_types to grab a specific room member
         # without filtering out the other event types
         state = yield self.store.get_state_for_event(
-            e5.event_id, [(EventTypes.Member, self.u_alice.to_string())],
+            e5.event_id,
+            [(EventTypes.Member, self.u_alice.to_string())],
             filtered_types=[EventTypes.Member],
         )
 
-        self.assertStateMapEqual({
-            (e1.type, e1.state_key): e1,
-            (e2.type, e2.state_key): e2,
-            (e3.type, e3.state_key): e3,
-        }, state)
+        self.assertStateMapEqual(
+            {
+                (e1.type, e1.state_key): e1,
+                (e2.type, e2.state_key): e2,
+                (e3.type, e3.state_key): e3,
+            },
+            state,
+        )
 
         # check that types=[], filtered_types=[EventTypes.Member]
         # doesn't return all members
         state = yield self.store.get_state_for_event(
-            e5.event_id, [], filtered_types=[EventTypes.Member],
+            e5.event_id, [], filtered_types=[EventTypes.Member]
         )
 
-        self.assertStateMapEqual({
-            (e1.type, e1.state_key): e1,
-            (e2.type, e2.state_key): e2,
-        }, state)
+        self.assertStateMapEqual(
+            {(e1.type, e1.state_key): e1, (e2.type, e2.state_key): e2}, state
+        )
 
         #######################################################
         # _get_some_state_from_cache tests against a full cache
@@ -176,70 +181,122 @@ class StateStoreTestCase(tests.unittest.TestCase):
 
         room_id = self.room.to_string()
         group_ids = yield self.store.get_state_groups_ids(room_id, [e5.event_id])
-        group = group_ids.keys()[0]
+        group = list(group_ids.keys())[0]
 
         # test _get_some_state_from_cache correctly filters out members with types=[]
         (state_dict, is_all) = yield self.store._get_some_state_from_cache(
+            self.store._state_group_cache,
             group, [], filtered_types=[EventTypes.Member]
         )
 
         self.assertEqual(is_all, True)
-        self.assertDictEqual({
-            (e1.type, e1.state_key): e1.event_id,
-            (e2.type, e2.state_key): e2.event_id,
-        }, state_dict)
+        self.assertDictEqual(
+            {
+                (e1.type, e1.state_key): e1.event_id,
+                (e2.type, e2.state_key): e2.event_id,
+            },
+            state_dict,
+        )
+
+        (state_dict, is_all) = yield self.store._get_some_state_from_cache(
+            self.store._state_group_members_cache,
+            group, [], filtered_types=[EventTypes.Member]
+        )
+
+        self.assertEqual(is_all, True)
+        self.assertDictEqual(
+            {},
+            state_dict,
+        )
 
         # test _get_some_state_from_cache correctly filters in members with wildcard types
         (state_dict, is_all) = yield self.store._get_some_state_from_cache(
+            self.store._state_group_cache,
+            group, [(EventTypes.Member, None)], filtered_types=[EventTypes.Member]
+        )
+
+        self.assertEqual(is_all, True)
+        self.assertDictEqual(
+            {
+                (e1.type, e1.state_key): e1.event_id,
+                (e2.type, e2.state_key): e2.event_id,
+            },
+            state_dict,
+        )
+
+        (state_dict, is_all) = yield self.store._get_some_state_from_cache(
+            self.store._state_group_members_cache,
             group, [(EventTypes.Member, None)], filtered_types=[EventTypes.Member]
         )
 
         self.assertEqual(is_all, True)
-        self.assertDictEqual({
-            (e1.type, e1.state_key): e1.event_id,
-            (e2.type, e2.state_key): e2.event_id,
-            (e3.type, e3.state_key): e3.event_id,
-            # e4 is overwritten by e5
-            (e5.type, e5.state_key): e5.event_id,
-        }, state_dict)
+        self.assertDictEqual(
+            {
+                (e3.type, e3.state_key): e3.event_id,
+                # e4 is overwritten by e5
+                (e5.type, e5.state_key): e5.event_id,
+            },
+            state_dict,
+        )
 
         # test _get_some_state_from_cache correctly filters in members with specific types
         (state_dict, is_all) = yield self.store._get_some_state_from_cache(
-            group, [(EventTypes.Member, e5.state_key)], filtered_types=[EventTypes.Member]
+            self.store._state_group_cache,
+            group,
+            [(EventTypes.Member, e5.state_key)],
+            filtered_types=[EventTypes.Member],
         )
 
         self.assertEqual(is_all, True)
-        self.assertDictEqual({
-            (e1.type, e1.state_key): e1.event_id,
-            (e2.type, e2.state_key): e2.event_id,
-            (e5.type, e5.state_key): e5.event_id,
-        }, state_dict)
+        self.assertDictEqual(
+            {
+                (e1.type, e1.state_key): e1.event_id,
+                (e2.type, e2.state_key): e2.event_id,
+            },
+            state_dict,
+        )
+
+        (state_dict, is_all) = yield self.store._get_some_state_from_cache(
+            self.store._state_group_members_cache,
+            group,
+            [(EventTypes.Member, e5.state_key)],
+            filtered_types=[EventTypes.Member],
+        )
+
+        self.assertEqual(is_all, True)
+        self.assertDictEqual(
+            {
+                (e5.type, e5.state_key): e5.event_id,
+            },
+            state_dict,
+        )
 
         # test _get_some_state_from_cache correctly filters in members with specific types
         # and no filtered_types
         (state_dict, is_all) = yield self.store._get_some_state_from_cache(
+            self.store._state_group_members_cache,
             group, [(EventTypes.Member, e5.state_key)], filtered_types=None
         )
 
         self.assertEqual(is_all, True)
-        self.assertDictEqual({
-            (e5.type, e5.state_key): e5.event_id,
-        }, state_dict)
+        self.assertDictEqual({(e5.type, e5.state_key): e5.event_id}, state_dict)
 
         #######################################################
         # deliberately remove e2 (room name) from the _state_group_cache
 
-        (is_all, known_absent, state_dict_ids) = self.store._state_group_cache.get(group)
+        (is_all, known_absent, state_dict_ids) = self.store._state_group_cache.get(
+            group
+        )
 
         self.assertEqual(is_all, True)
         self.assertEqual(known_absent, set())
-        self.assertDictEqual(state_dict_ids, {
-            (e1.type, e1.state_key): e1.event_id,
-            (e2.type, e2.state_key): e2.event_id,
-            (e3.type, e3.state_key): e3.event_id,
-            # e4 is overwritten by e5
-            (e5.type, e5.state_key): e5.event_id,
-        })
+        self.assertDictEqual(
+            state_dict_ids,
+            {
+                (e1.type, e1.state_key): e1.event_id,
+                (e2.type, e2.state_key): e2.event_id,
+            },
+        )
 
         state_dict_ids.pop((e2.type, e2.state_key))
         self.store._state_group_cache.invalidate(group)
@@ -250,24 +307,28 @@ class StateStoreTestCase(tests.unittest.TestCase):
             # list fetched keys so it knows it's partial
             fetched_keys=(
                 (e1.type, e1.state_key),
-                (e3.type, e3.state_key),
-                (e5.type, e5.state_key),
-            )
+            ),
         )
 
-        (is_all, known_absent, state_dict_ids) = self.store._state_group_cache.get(group)
+        (is_all, known_absent, state_dict_ids) = self.store._state_group_cache.get(
+            group
+        )
 
         self.assertEqual(is_all, False)
-        self.assertEqual(known_absent, set([
-            (e1.type, e1.state_key),
-            (e3.type, e3.state_key),
-            (e5.type, e5.state_key),
-        ]))
-        self.assertDictEqual(state_dict_ids, {
-            (e1.type, e1.state_key): e1.event_id,
-            (e3.type, e3.state_key): e3.event_id,
-            (e5.type, e5.state_key): e5.event_id,
-        })
+        self.assertEqual(
+            known_absent,
+            set(
+                [
+                    (e1.type, e1.state_key),
+                ]
+            ),
+        )
+        self.assertDictEqual(
+            state_dict_ids,
+            {
+                (e1.type, e1.state_key): e1.event_id,
+            },
+        )
 
         ############################################
         # test that things work with a partial cache
@@ -275,45 +336,100 @@ class StateStoreTestCase(tests.unittest.TestCase):
         # test _get_some_state_from_cache correctly filters out members with types=[]
         room_id = self.room.to_string()
         (state_dict, is_all) = yield self.store._get_some_state_from_cache(
+            self.store._state_group_cache,
             group, [], filtered_types=[EventTypes.Member]
         )
 
         self.assertEqual(is_all, False)
-        self.assertDictEqual({
-            (e1.type, e1.state_key): e1.event_id,
-        }, state_dict)
+        self.assertDictEqual({(e1.type, e1.state_key): e1.event_id}, state_dict)
+
+        room_id = self.room.to_string()
+        (state_dict, is_all) = yield self.store._get_some_state_from_cache(
+            self.store._state_group_members_cache,
+            group, [], filtered_types=[EventTypes.Member]
+        )
+
+        self.assertEqual(is_all, True)
+        self.assertDictEqual({}, state_dict)
 
         # test _get_some_state_from_cache correctly filters in members wildcard types
         (state_dict, is_all) = yield self.store._get_some_state_from_cache(
+            self.store._state_group_cache,
             group, [(EventTypes.Member, None)], filtered_types=[EventTypes.Member]
         )
 
         self.assertEqual(is_all, False)
-        self.assertDictEqual({
-            (e1.type, e1.state_key): e1.event_id,
-            (e3.type, e3.state_key): e3.event_id,
-            # e4 is overwritten by e5
-            (e5.type, e5.state_key): e5.event_id,
-        }, state_dict)
+        self.assertDictEqual(
+            {
+                (e1.type, e1.state_key): e1.event_id,
+            },
+            state_dict,
+        )
+
+        (state_dict, is_all) = yield self.store._get_some_state_from_cache(
+            self.store._state_group_members_cache,
+            group, [(EventTypes.Member, None)], filtered_types=[EventTypes.Member]
+        )
+
+        self.assertEqual(is_all, True)
+        self.assertDictEqual(
+            {
+                (e3.type, e3.state_key): e3.event_id,
+                (e5.type, e5.state_key): e5.event_id,
+            },
+            state_dict,
+        )
 
         # test _get_some_state_from_cache correctly filters in members with specific types
         (state_dict, is_all) = yield self.store._get_some_state_from_cache(
-            group, [(EventTypes.Member, e5.state_key)], filtered_types=[EventTypes.Member]
+            self.store._state_group_cache,
+            group,
+            [(EventTypes.Member, e5.state_key)],
+            filtered_types=[EventTypes.Member],
         )
 
         self.assertEqual(is_all, False)
-        self.assertDictEqual({
-            (e1.type, e1.state_key): e1.event_id,
-            (e5.type, e5.state_key): e5.event_id,
-        }, state_dict)
+        self.assertDictEqual(
+            {
+                (e1.type, e1.state_key): e1.event_id,
+            },
+            state_dict,
+        )
+
+        (state_dict, is_all) = yield self.store._get_some_state_from_cache(
+            self.store._state_group_members_cache,
+            group,
+            [(EventTypes.Member, e5.state_key)],
+            filtered_types=[EventTypes.Member],
+        )
+
+        self.assertEqual(is_all, True)
+        self.assertDictEqual(
+            {
+                (e5.type, e5.state_key): e5.event_id,
+            },
+            state_dict,
+        )
 
         # test _get_some_state_from_cache correctly filters in members with specific types
         # and no filtered_types
         (state_dict, is_all) = yield self.store._get_some_state_from_cache(
+            self.store._state_group_cache,
+            group, [(EventTypes.Member, e5.state_key)], filtered_types=None
+        )
+
+        self.assertEqual(is_all, False)
+        self.assertDictEqual({}, state_dict)
+
+        (state_dict, is_all) = yield self.store._get_some_state_from_cache(
+            self.store._state_group_members_cache,
             group, [(EventTypes.Member, e5.state_key)], filtered_types=None
         )
 
         self.assertEqual(is_all, True)
-        self.assertDictEqual({
-            (e5.type, e5.state_key): e5.event_id,
-        }, state_dict)
+        self.assertDictEqual(
+            {
+                (e5.type, e5.state_key): e5.event_id,
+            },
+            state_dict,
+        )
diff --git a/tests/storage/test_user_directory.py b/tests/storage/test_user_directory.py
index 23fad12bca..b46e0ea7e2 100644
--- a/tests/storage/test_user_directory.py
+++ b/tests/storage/test_user_directory.py
@@ -29,7 +29,7 @@ BOBBY = "@bobby:a"
 class UserDirectoryStoreTestCase(unittest.TestCase):
     @defer.inlineCallbacks
     def setUp(self):
-        self.hs = yield setup_test_homeserver()
+        self.hs = yield setup_test_homeserver(self.addCleanup)
         self.store = UserDirectoryStore(None, self.hs)
 
         # alice and bob are both in !room_id. bobby is not but shares
@@ -39,20 +39,12 @@ class UserDirectoryStoreTestCase(unittest.TestCase):
             {
                 ALICE: ProfileInfo(None, "alice"),
                 BOB: ProfileInfo(None, "bob"),
-                BOBBY: ProfileInfo(None, "bobby")
+                BOBBY: ProfileInfo(None, "bobby"),
             },
         )
-        yield self.store.add_users_to_public_room(
-            "!room:id",
-            [ALICE, BOB],
-        )
+        yield self.store.add_users_to_public_room("!room:id", [ALICE, BOB])
         yield self.store.add_users_who_share_room(
-            "!room:id",
-            False,
-            (
-                (ALICE, BOB),
-                (BOB, ALICE),
-            ),
+            "!room:id", False, ((ALICE, BOB), (BOB, ALICE))
         )
 
     @defer.inlineCallbacks
@@ -62,11 +54,9 @@ class UserDirectoryStoreTestCase(unittest.TestCase):
         r = yield self.store.search_user_dir(ALICE, "bob", 10)
         self.assertFalse(r["limited"])
         self.assertEqual(1, len(r["results"]))
-        self.assertDictEqual(r["results"][0], {
-            "user_id": BOB,
-            "display_name": "bob",
-            "avatar_url": None,
-        })
+        self.assertDictEqual(
+            r["results"][0], {"user_id": BOB, "display_name": "bob", "avatar_url": None}
+        )
 
     @defer.inlineCallbacks
     def test_search_user_dir_all_users(self):
@@ -75,15 +65,13 @@ class UserDirectoryStoreTestCase(unittest.TestCase):
             r = yield self.store.search_user_dir(ALICE, "bob", 10)
             self.assertFalse(r["limited"])
             self.assertEqual(2, len(r["results"]))
-            self.assertDictEqual(r["results"][0], {
-                "user_id": BOB,
-                "display_name": "bob",
-                "avatar_url": None,
-            })
-            self.assertDictEqual(r["results"][1], {
-                "user_id": BOBBY,
-                "display_name": "bobby",
-                "avatar_url": None,
-            })
+            self.assertDictEqual(
+                r["results"][0],
+                {"user_id": BOB, "display_name": "bob", "avatar_url": None},
+            )
+            self.assertDictEqual(
+                r["results"][1],
+                {"user_id": BOBBY, "display_name": "bobby", "avatar_url": None},
+            )
         finally:
             self.hs.config.user_directory_search_all_users = False
diff --git a/tests/test_distributor.py b/tests/test_distributor.py
index 71d11cda77..b57f36e6ac 100644
--- a/tests/test_distributor.py
+++ b/tests/test_distributor.py
@@ -22,7 +22,6 @@ from . import unittest
 
 
 class DistributorTestCase(unittest.TestCase):
-
     def setUp(self):
         self.dist = Distributor()
 
@@ -44,18 +43,14 @@ class DistributorTestCase(unittest.TestCase):
 
         observers[0].side_effect = Exception("Awoogah!")
 
-        with patch(
-            "synapse.util.distributor.logger", spec=["warning"]
-        ) as mock_logger:
+        with patch("synapse.util.distributor.logger", spec=["warning"]) as mock_logger:
             self.dist.fire("alarm", "Go")
 
             observers[0].assert_called_once_with("Go")
             observers[1].assert_called_once_with("Go")
 
             self.assertEquals(mock_logger.warning.call_count, 1)
-            self.assertIsInstance(
-                mock_logger.warning.call_args[0][0], str
-            )
+            self.assertIsInstance(mock_logger.warning.call_args[0][0], str)
 
     def test_signal_prereg(self):
         observer = Mock()
@@ -69,4 +64,5 @@ class DistributorTestCase(unittest.TestCase):
     def test_signal_undeclared(self):
         def code():
             self.dist.fire("notification")
+
         self.assertRaises(KeyError, code)
diff --git a/tests/test_dns.py b/tests/test_dns.py
index b647d92697..90bd34be34 100644
--- a/tests/test_dns.py
+++ b/tests/test_dns.py
@@ -27,7 +27,6 @@ from . import unittest
 
 @unittest.DEBUG
 class DnsTestCase(unittest.TestCase):
-
     @defer.inlineCallbacks
     def test_resolve(self):
         dns_client_mock = Mock()
@@ -36,14 +35,11 @@ class DnsTestCase(unittest.TestCase):
         host_name = "example.com"
 
         answer_srv = dns.RRHeader(
-            type=dns.SRV,
-            payload=dns.Record_SRV(
-                target=host_name,
-            )
+            type=dns.SRV, payload=dns.Record_SRV(target=host_name)
         )
 
         dns_client_mock.lookupService.return_value = defer.succeed(
-            ([answer_srv], None, None),
+            ([answer_srv], None, None)
         )
 
         cache = {}
@@ -68,9 +64,7 @@ class DnsTestCase(unittest.TestCase):
         entry = Mock(spec_set=["expires"])
         entry.expires = 0
 
-        cache = {
-            service_name: [entry]
-        }
+        cache = {service_name: [entry]}
 
         servers = yield resolve_service(
             service_name, dns_client=dns_client_mock, cache=cache
@@ -93,12 +87,10 @@ class DnsTestCase(unittest.TestCase):
         entry = Mock(spec_set=["expires"])
         entry.expires = 999999999
 
-        cache = {
-            service_name: [entry]
-        }
+        cache = {service_name: [entry]}
 
         servers = yield resolve_service(
-            service_name, dns_client=dns_client_mock, cache=cache, clock=clock,
+            service_name, dns_client=dns_client_mock, cache=cache, clock=clock
         )
 
         self.assertFalse(dns_client_mock.lookupService.called)
@@ -117,9 +109,7 @@ class DnsTestCase(unittest.TestCase):
         cache = {}
 
         with self.assertRaises(error.DNSServerError):
-            yield resolve_service(
-                service_name, dns_client=dns_client_mock, cache=cache
-            )
+            yield resolve_service(service_name, dns_client=dns_client_mock, cache=cache)
 
     @defer.inlineCallbacks
     def test_name_error(self):
diff --git a/tests/test_event_auth.py b/tests/test_event_auth.py
index 06112430e5..411b4a9f86 100644
--- a/tests/test_event_auth.py
+++ b/tests/test_event_auth.py
@@ -35,10 +35,7 @@ class EventAuthTestCase(unittest.TestCase):
         }
 
         # creator should be able to send state
-        event_auth.check(
-            _random_state_event(creator), auth_events,
-            do_sig_check=False,
-        )
+        event_auth.check(_random_state_event(creator), auth_events, do_sig_check=False)
 
         # joiner should not be able to send state
         self.assertRaises(
@@ -61,13 +58,9 @@ class EventAuthTestCase(unittest.TestCase):
         auth_events = {
             ("m.room.create", ""): _create_event(creator),
             ("m.room.member", creator): _join_event(creator),
-            ("m.room.power_levels", ""): _power_levels_event(creator, {
-                "state_default": "30",
-                "users": {
-                    pleb: "29",
-                    king: "30",
-                },
-            }),
+            ("m.room.power_levels", ""): _power_levels_event(
+                creator, {"state_default": "30", "users": {pleb: "29", king: "30"}}
+            ),
             ("m.room.member", pleb): _join_event(pleb),
             ("m.room.member", king): _join_event(king),
         }
@@ -82,10 +75,7 @@ class EventAuthTestCase(unittest.TestCase):
         ),
 
         # king should be able to send state
-        event_auth.check(
-            _random_state_event(king), auth_events,
-            do_sig_check=False,
-        )
+        event_auth.check(_random_state_event(king), auth_events, do_sig_check=False)
 
 
 # helpers for making events
@@ -94,52 +84,54 @@ TEST_ROOM_ID = "!test:room"
 
 
 def _create_event(user_id):
-    return FrozenEvent({
-        "room_id": TEST_ROOM_ID,
-        "event_id": _get_event_id(),
-        "type": "m.room.create",
-        "sender": user_id,
-        "content": {
-            "creator": user_id,
-        },
-    })
+    return FrozenEvent(
+        {
+            "room_id": TEST_ROOM_ID,
+            "event_id": _get_event_id(),
+            "type": "m.room.create",
+            "sender": user_id,
+            "content": {"creator": user_id},
+        }
+    )
 
 
 def _join_event(user_id):
-    return FrozenEvent({
-        "room_id": TEST_ROOM_ID,
-        "event_id": _get_event_id(),
-        "type": "m.room.member",
-        "sender": user_id,
-        "state_key": user_id,
-        "content": {
-            "membership": "join",
-        },
-    })
+    return FrozenEvent(
+        {
+            "room_id": TEST_ROOM_ID,
+            "event_id": _get_event_id(),
+            "type": "m.room.member",
+            "sender": user_id,
+            "state_key": user_id,
+            "content": {"membership": "join"},
+        }
+    )
 
 
 def _power_levels_event(sender, content):
-    return FrozenEvent({
-        "room_id": TEST_ROOM_ID,
-        "event_id": _get_event_id(),
-        "type": "m.room.power_levels",
-        "sender": sender,
-        "state_key": "",
-        "content": content,
-    })
+    return FrozenEvent(
+        {
+            "room_id": TEST_ROOM_ID,
+            "event_id": _get_event_id(),
+            "type": "m.room.power_levels",
+            "sender": sender,
+            "state_key": "",
+            "content": content,
+        }
+    )
 
 
 def _random_state_event(sender):
-    return FrozenEvent({
-        "room_id": TEST_ROOM_ID,
-        "event_id": _get_event_id(),
-        "type": "test.state",
-        "sender": sender,
-        "state_key": "",
-        "content": {
-            "membership": "join",
-        },
-    })
+    return FrozenEvent(
+        {
+            "room_id": TEST_ROOM_ID,
+            "event_id": _get_event_id(),
+            "type": "test.state",
+            "sender": sender,
+            "state_key": "",
+            "content": {"membership": "join"},
+        }
+    )
 
 
 event_count = 0
@@ -149,4 +141,4 @@ def _get_event_id():
     global event_count
     c = event_count
     event_count += 1
-    return "!%i:example.com" % (c, )
+    return "!%i:example.com" % (c,)
diff --git a/tests/test_federation.py b/tests/test_federation.py
index f40ff29b52..2540604fcc 100644
--- a/tests/test_federation.py
+++ b/tests/test_federation.py
@@ -18,7 +18,10 @@ class MessageAcceptTests(unittest.TestCase):
         self.reactor = ThreadedMemoryReactorClock()
         self.hs_clock = Clock(self.reactor)
         self.homeserver = setup_test_homeserver(
-            http_client=self.http_client, clock=self.hs_clock, reactor=self.reactor
+            self.addCleanup,
+            http_client=self.http_client,
+            clock=self.hs_clock,
+            reactor=self.reactor,
         )
 
         user_id = UserID("us", "test")
diff --git a/tests/test_mau.py b/tests/test_mau.py
new file mode 100644
index 0000000000..0732615447
--- /dev/null
+++ b/tests/test_mau.py
@@ -0,0 +1,217 @@
+# -*- coding: utf-8 -*-
+# Copyright 2018 New Vector Ltd
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Tests REST events for /rooms paths."""
+
+import json
+
+from mock import Mock, NonCallableMock
+
+from synapse.api.constants import LoginType
+from synapse.api.errors import Codes, HttpResponseException, SynapseError
+from synapse.http.server import JsonResource
+from synapse.rest.client.v2_alpha import register, sync
+from synapse.util import Clock
+
+from tests import unittest
+from tests.server import (
+    ThreadedMemoryReactorClock,
+    make_request,
+    render,
+    setup_test_homeserver,
+)
+
+
+class TestMauLimit(unittest.TestCase):
+    def setUp(self):
+        self.reactor = ThreadedMemoryReactorClock()
+        self.clock = Clock(self.reactor)
+
+        self.hs = setup_test_homeserver(
+            self.addCleanup,
+            "red",
+            http_client=None,
+            clock=self.clock,
+            reactor=self.reactor,
+            federation_client=Mock(),
+            ratelimiter=NonCallableMock(spec_set=["send_message"]),
+        )
+
+        self.store = self.hs.get_datastore()
+
+        self.hs.config.registrations_require_3pid = []
+        self.hs.config.enable_registration_captcha = False
+        self.hs.config.recaptcha_public_key = []
+
+        self.hs.config.limit_usage_by_mau = True
+        self.hs.config.hs_disabled = False
+        self.hs.config.max_mau_value = 2
+        self.hs.config.mau_trial_days = 0
+        self.hs.config.server_notices_mxid = "@server:red"
+        self.hs.config.server_notices_mxid_display_name = None
+        self.hs.config.server_notices_mxid_avatar_url = None
+        self.hs.config.server_notices_room_name = "Test Server Notice Room"
+
+        self.resource = JsonResource(self.hs)
+        register.register_servlets(self.hs, self.resource)
+        sync.register_servlets(self.hs, self.resource)
+
+    def test_simple_deny_mau(self):
+        # Create and sync so that the MAU counts get updated
+        token1 = self.create_user("kermit1")
+        self.do_sync_for_user(token1)
+        token2 = self.create_user("kermit2")
+        self.do_sync_for_user(token2)
+
+        # We've created and activated two users, we shouldn't be able to
+        # register new users
+        with self.assertRaises(SynapseError) as cm:
+            self.create_user("kermit3")
+
+        e = cm.exception
+        self.assertEqual(e.code, 403)
+        self.assertEqual(e.errcode, Codes.RESOURCE_LIMIT_EXCEEDED)
+
+    def test_allowed_after_a_month_mau(self):
+        # Create and sync so that the MAU counts get updated
+        token1 = self.create_user("kermit1")
+        self.do_sync_for_user(token1)
+        token2 = self.create_user("kermit2")
+        self.do_sync_for_user(token2)
+
+        # Advance time by 31 days
+        self.reactor.advance(31 * 24 * 60 * 60)
+
+        self.store.reap_monthly_active_users()
+
+        self.reactor.advance(0)
+
+        # We should be able to register more users
+        token3 = self.create_user("kermit3")
+        self.do_sync_for_user(token3)
+
+    def test_trial_delay(self):
+        self.hs.config.mau_trial_days = 1
+
+        # We should be able to register more than the limit initially
+        token1 = self.create_user("kermit1")
+        self.do_sync_for_user(token1)
+        token2 = self.create_user("kermit2")
+        self.do_sync_for_user(token2)
+        token3 = self.create_user("kermit3")
+        self.do_sync_for_user(token3)
+
+        # Advance time by 2 days
+        self.reactor.advance(2 * 24 * 60 * 60)
+
+        # Two users should be able to sync
+        self.do_sync_for_user(token1)
+        self.do_sync_for_user(token2)
+
+        # But the third should fail
+        with self.assertRaises(SynapseError) as cm:
+            self.do_sync_for_user(token3)
+
+        e = cm.exception
+        self.assertEqual(e.code, 403)
+        self.assertEqual(e.errcode, Codes.RESOURCE_LIMIT_EXCEEDED)
+
+        # And new registrations are now denied too
+        with self.assertRaises(SynapseError) as cm:
+            self.create_user("kermit4")
+
+        e = cm.exception
+        self.assertEqual(e.code, 403)
+        self.assertEqual(e.errcode, Codes.RESOURCE_LIMIT_EXCEEDED)
+
+    def test_trial_users_cant_come_back(self):
+        self.hs.config.mau_trial_days = 1
+
+        # We should be able to register more than the limit initially
+        token1 = self.create_user("kermit1")
+        self.do_sync_for_user(token1)
+        token2 = self.create_user("kermit2")
+        self.do_sync_for_user(token2)
+        token3 = self.create_user("kermit3")
+        self.do_sync_for_user(token3)
+
+        # Advance time by 2 days
+        self.reactor.advance(2 * 24 * 60 * 60)
+
+        # Two users should be able to sync
+        self.do_sync_for_user(token1)
+        self.do_sync_for_user(token2)
+
+        # Advance by 2 months so everyone falls out of MAU
+        self.reactor.advance(60 * 24 * 60 * 60)
+        self.store.reap_monthly_active_users()
+        self.reactor.advance(0)
+
+        # We can create as many new users as we want
+        token4 = self.create_user("kermit4")
+        self.do_sync_for_user(token4)
+        token5 = self.create_user("kermit5")
+        self.do_sync_for_user(token5)
+        token6 = self.create_user("kermit6")
+        self.do_sync_for_user(token6)
+
+        # users 2 and 3 can come back to bring us back up to MAU limit
+        self.do_sync_for_user(token2)
+        self.do_sync_for_user(token3)
+
+        # New trial users can still sync
+        self.do_sync_for_user(token4)
+        self.do_sync_for_user(token5)
+        self.do_sync_for_user(token6)
+
+        # But old user cant
+        with self.assertRaises(SynapseError) as cm:
+            self.do_sync_for_user(token1)
+
+        e = cm.exception
+        self.assertEqual(e.code, 403)
+        self.assertEqual(e.errcode, Codes.RESOURCE_LIMIT_EXCEEDED)
+
+    def create_user(self, localpart):
+        request_data = json.dumps({
+            "username": localpart,
+            "password": "monkey",
+            "auth": {"type": LoginType.DUMMY},
+        })
+
+        request, channel = make_request(b"POST", b"/register", request_data)
+        render(request, self.resource, self.reactor)
+
+        if channel.result["code"] != b"200":
+            raise HttpResponseException(
+                int(channel.result["code"]),
+                channel.result["reason"],
+                channel.result["body"],
+            ).to_synapse_error()
+
+        access_token = channel.json_body["access_token"]
+
+        return access_token
+
+    def do_sync_for_user(self, token):
+        request, channel = make_request(b"GET", b"/sync", access_token=token)
+        render(request, self.resource, self.reactor)
+
+        if channel.result["code"] != b"200":
+            raise HttpResponseException(
+                int(channel.result["code"]),
+                channel.result["reason"],
+                channel.result["body"],
+            ).to_synapse_error()
diff --git a/tests/test_preview.py b/tests/test_preview.py
index 446843367e..84ef5e5ba4 100644
--- a/tests/test_preview.py
+++ b/tests/test_preview.py
@@ -22,7 +22,6 @@ from . import unittest
 
 
 class PreviewTestCase(unittest.TestCase):
-
     def test_long_summarize(self):
         example_paras = [
             u"""Tromsø (Norwegian pronunciation: [ˈtrʊmsœ] ( listen); Northern Sami:
@@ -32,7 +31,6 @@ class PreviewTestCase(unittest.TestCase):
             alternative spellings of the city.Tromsø is considered the northernmost
             city in the world with a population above 50,000. The most populous town
             north of it is Alta, Norway, with a population of 14,272 (2013).""",
-
             u"""Tromsø lies in Northern Norway. The municipality has a population of
             (2015) 72,066, but with an annual influx of students it has over 75,000
             most of the year. It is the largest urban area in Northern Norway and the
@@ -46,7 +44,6 @@ class PreviewTestCase(unittest.TestCase):
             Sandnessund Bridge. Tromsø Airport connects the city to many destinations
             in Europe. The city is warmer than most other places located on the same
             latitude, due to the warming effect of the Gulf Stream.""",
-
             u"""The city centre of Tromsø contains the highest number of old wooden
             houses in Northern Norway, the oldest house dating from 1789. The Arctic
             Cathedral, a modern church from 1965, is probably the most famous landmark
@@ -67,7 +64,7 @@ class PreviewTestCase(unittest.TestCase):
             u" the city of Tromsø. Outside of Norway, Tromso and Tromsö are"
             u" alternative spellings of the city.Tromsø is considered the northernmost"
             u" city in the world with a population above 50,000. The most populous town"
-            u" north of it is Alta, Norway, with a population of 14,272 (2013)."
+            u" north of it is Alta, Norway, with a population of 14,272 (2013).",
         )
 
         desc = summarize_paragraphs(example_paras[1:], min_size=200, max_size=500)
@@ -80,7 +77,7 @@ class PreviewTestCase(unittest.TestCase):
             u" third largest north of the Arctic Circle (following Murmansk and Norilsk)."
             u" Most of Tromsø, including the city centre, is located on the island of"
             u" Tromsøya, 350 kilometres (217 mi) north of the Arctic Circle. In 2012,"
-            u" Tromsøya had a population of 36,088. Substantial parts of the urban…"
+            u" Tromsøya had a population of 36,088. Substantial parts of the urban…",
         )
 
     def test_short_summarize(self):
@@ -88,11 +85,9 @@ class PreviewTestCase(unittest.TestCase):
             u"Tromsø (Norwegian pronunciation: [ˈtrʊmsœ] ( listen); Northern Sami:"
             u" Romsa; Finnish: Tromssa[2] Kven: Tromssa) is a city and municipality in"
             u" Troms county, Norway.",
-
             u"Tromsø lies in Northern Norway. The municipality has a population of"
             u" (2015) 72,066, but with an annual influx of students it has over 75,000"
             u" most of the year.",
-
             u"The city centre of Tromsø contains the highest number of old wooden"
             u" houses in Northern Norway, the oldest house dating from 1789. The Arctic"
             u" Cathedral, a modern church from 1965, is probably the most famous landmark"
@@ -109,7 +104,7 @@ class PreviewTestCase(unittest.TestCase):
             u"\n"
             u"Tromsø lies in Northern Norway. The municipality has a population of"
             u" (2015) 72,066, but with an annual influx of students it has over 75,000"
-            u" most of the year."
+            u" most of the year.",
         )
 
     def test_small_then_large_summarize(self):
@@ -117,7 +112,6 @@ class PreviewTestCase(unittest.TestCase):
             u"Tromsø (Norwegian pronunciation: [ˈtrʊmsœ] ( listen); Northern Sami:"
             u" Romsa; Finnish: Tromssa[2] Kven: Tromssa) is a city and municipality in"
             u" Troms county, Norway.",
-
             u"Tromsø lies in Northern Norway. The municipality has a population of"
             u" (2015) 72,066, but with an annual influx of students it has over 75,000"
             u" most of the year."
@@ -138,7 +132,7 @@ class PreviewTestCase(unittest.TestCase):
             u" (2015) 72,066, but with an annual influx of students it has over 75,000"
             u" most of the year. The city centre of Tromsø contains the highest number"
             u" of old wooden houses in Northern Norway, the oldest house dating from"
-            u" 1789. The Arctic Cathedral, a modern church from…"
+            u" 1789. The Arctic Cathedral, a modern church from…",
         )
 
 
@@ -155,10 +149,7 @@ class PreviewUrlTestCase(unittest.TestCase):
 
         og = decode_and_calc_og(html, "http://example.com/test.html")
 
-        self.assertEquals(og, {
-            u"og:title": u"Foo",
-            u"og:description": u"Some text."
-        })
+        self.assertEquals(og, {u"og:title": u"Foo", u"og:description": u"Some text."})
 
     def test_comment(self):
         html = u"""
@@ -173,10 +164,7 @@ class PreviewUrlTestCase(unittest.TestCase):
 
         og = decode_and_calc_og(html, "http://example.com/test.html")
 
-        self.assertEquals(og, {
-            u"og:title": u"Foo",
-            u"og:description": u"Some text."
-        })
+        self.assertEquals(og, {u"og:title": u"Foo", u"og:description": u"Some text."})
 
     def test_comment2(self):
         html = u"""
@@ -194,10 +182,13 @@ class PreviewUrlTestCase(unittest.TestCase):
 
         og = decode_and_calc_og(html, "http://example.com/test.html")
 
-        self.assertEquals(og, {
-            u"og:title": u"Foo",
-            u"og:description": u"Some text.\n\nSome more text.\n\nText\n\nMore text"
-        })
+        self.assertEquals(
+            og,
+            {
+                u"og:title": u"Foo",
+                u"og:description": u"Some text.\n\nSome more text.\n\nText\n\nMore text",
+            },
+        )
 
     def test_script(self):
         html = u"""
@@ -212,10 +203,7 @@ class PreviewUrlTestCase(unittest.TestCase):
 
         og = decode_and_calc_og(html, "http://example.com/test.html")
 
-        self.assertEquals(og, {
-            u"og:title": u"Foo",
-            u"og:description": u"Some text."
-        })
+        self.assertEquals(og, {u"og:title": u"Foo", u"og:description": u"Some text."})
 
     def test_missing_title(self):
         html = u"""
@@ -228,10 +216,7 @@ class PreviewUrlTestCase(unittest.TestCase):
 
         og = decode_and_calc_og(html, "http://example.com/test.html")
 
-        self.assertEquals(og, {
-            u"og:title": None,
-            u"og:description": u"Some text."
-        })
+        self.assertEquals(og, {u"og:title": None, u"og:description": u"Some text."})
 
     def test_h1_as_title(self):
         html = u"""
@@ -245,10 +230,7 @@ class PreviewUrlTestCase(unittest.TestCase):
 
         og = decode_and_calc_og(html, "http://example.com/test.html")
 
-        self.assertEquals(og, {
-            u"og:title": u"Title",
-            u"og:description": u"Some text."
-        })
+        self.assertEquals(og, {u"og:title": u"Title", u"og:description": u"Some text."})
 
     def test_missing_title_and_broken_h1(self):
         html = u"""
@@ -262,7 +244,4 @@ class PreviewUrlTestCase(unittest.TestCase):
 
         og = decode_and_calc_og(html, "http://example.com/test.html")
 
-        self.assertEquals(og, {
-            u"og:title": None,
-            u"og:description": u"Some text."
-        })
+        self.assertEquals(og, {u"og:title": None, u"og:description": u"Some text."})
diff --git a/tests/test_server.py b/tests/test_server.py
index 7e063c0290..ef74544e93 100644
--- a/tests/test_server.py
+++ b/tests/test_server.py
@@ -1,4 +1,3 @@
-import json
 import re
 
 from twisted.internet.defer import Deferred
@@ -9,7 +8,7 @@ from synapse.http.server import JsonResource
 from synapse.util import Clock
 
 from tests import unittest
-from tests.server import make_request, setup_test_homeserver
+from tests.server import make_request, render, setup_test_homeserver
 
 
 class JsonResourceTests(unittest.TestCase):
@@ -17,7 +16,7 @@ class JsonResourceTests(unittest.TestCase):
         self.reactor = MemoryReactorClock()
         self.hs_clock = Clock(self.reactor)
         self.homeserver = setup_test_homeserver(
-            http_client=None, clock=self.hs_clock, reactor=self.reactor
+            self.addCleanup, http_client=None, clock=self.hs_clock, reactor=self.reactor
         )
 
     def test_handler_for_request(self):
@@ -38,7 +37,7 @@ class JsonResourceTests(unittest.TestCase):
         )
 
         request, channel = make_request(b"GET", b"/_matrix/foo/%E2%98%83?a=%E2%98%83")
-        request.render(res)
+        render(request, res, self.reactor)
 
         self.assertEqual(request.args, {b'a': [u"\N{SNOWMAN}".encode('utf8')]})
         self.assertEqual(got_kwargs, {u"room_id": u"\N{SNOWMAN}"})
@@ -56,7 +55,7 @@ class JsonResourceTests(unittest.TestCase):
         res.register_paths("GET", [re.compile("^/_matrix/foo$")], _callback)
 
         request, channel = make_request(b"GET", b"/_matrix/foo")
-        request.render(res)
+        render(request, res, self.reactor)
 
         self.assertEqual(channel.result["code"], b'500')
 
@@ -79,13 +78,8 @@ class JsonResourceTests(unittest.TestCase):
         res.register_paths("GET", [re.compile("^/_matrix/foo$")], _callback)
 
         request, channel = make_request(b"GET", b"/_matrix/foo")
-        request.render(res)
+        render(request, res, self.reactor)
 
-        # No error has been raised yet
-        self.assertTrue("code" not in channel.result)
-
-        # Advance time, now there's an error
-        self.reactor.advance(1)
         self.assertEqual(channel.result["code"], b'500')
 
     def test_callback_synapseerror(self):
@@ -101,12 +95,11 @@ class JsonResourceTests(unittest.TestCase):
         res.register_paths("GET", [re.compile("^/_matrix/foo$")], _callback)
 
         request, channel = make_request(b"GET", b"/_matrix/foo")
-        request.render(res)
+        render(request, res, self.reactor)
 
         self.assertEqual(channel.result["code"], b'403')
-        reply_body = json.loads(channel.result["body"])
-        self.assertEqual(reply_body["error"], "Forbidden!!one!")
-        self.assertEqual(reply_body["errcode"], "M_FORBIDDEN")
+        self.assertEqual(channel.json_body["error"], "Forbidden!!one!")
+        self.assertEqual(channel.json_body["errcode"], "M_FORBIDDEN")
 
     def test_no_handler(self):
         """
@@ -123,9 +116,8 @@ class JsonResourceTests(unittest.TestCase):
         res.register_paths("GET", [re.compile("^/_matrix/foo$")], _callback)
 
         request, channel = make_request(b"GET", b"/_matrix/foobar")
-        request.render(res)
+        render(request, res, self.reactor)
 
         self.assertEqual(channel.result["code"], b'400')
-        reply_body = json.loads(channel.result["body"])
-        self.assertEqual(reply_body["error"], "Unrecognized request")
-        self.assertEqual(reply_body["errcode"], "M_UNRECOGNIZED")
+        self.assertEqual(channel.json_body["error"], "Unrecognized request")
+        self.assertEqual(channel.json_body["errcode"], "M_UNRECOGNIZED")
diff --git a/tests/test_state.py b/tests/test_state.py
index 429a18cbf7..452a123c3a 100644
--- a/tests/test_state.py
+++ b/tests/test_state.py
@@ -18,7 +18,7 @@ from mock import Mock
 from twisted.internet import defer
 
 from synapse.api.auth import Auth
-from synapse.api.constants import EventTypes, Membership
+from synapse.api.constants import EventTypes, Membership, RoomVersions
 from synapse.events import FrozenEvent
 from synapse.state import StateHandler, StateResolutionHandler
 
@@ -29,8 +29,15 @@ from .utils import MockClock
 _next_event_id = 1000
 
 
-def create_event(name=None, type=None, state_key=None, depth=2, event_id=None,
-                 prev_events=[], **kwargs):
+def create_event(
+    name=None,
+    type=None,
+    state_key=None,
+    depth=2,
+    event_id=None,
+    prev_events=[],
+    **kwargs
+):
     global _next_event_id
 
     if not event_id:
@@ -39,9 +46,9 @@ def create_event(name=None, type=None, state_key=None, depth=2, event_id=None,
 
     if not name:
         if state_key is not None:
-            name = "<%s-%s, %s>" % (type, state_key, event_id,)
+            name = "<%s-%s, %s>" % (type, state_key, event_id)
         else:
-            name = "<%s, %s>" % (type, event_id,)
+            name = "<%s, %s>" % (type, event_id)
 
     d = {
         "event_id": event_id,
@@ -80,8 +87,9 @@ class StateGroupStore(object):
 
         return defer.succeed(groups)
 
-    def store_state_group(self, event_id, room_id, prev_group, delta_ids,
-                          current_state_ids):
+    def store_state_group(
+        self, event_id, room_id, prev_group, delta_ids, current_state_ids
+    ):
         state_group = self._next_group
         self._next_group += 1
 
@@ -91,7 +99,8 @@ class StateGroupStore(object):
 
     def get_events(self, event_ids, **kwargs):
         return {
-            e_id: self._event_id_to_event[e_id] for e_id in event_ids
+            e_id: self._event_id_to_event[e_id]
+            for e_id in event_ids
             if e_id in self._event_id_to_event
         }
 
@@ -108,6 +117,9 @@ class StateGroupStore(object):
     def register_event_id_state_group(self, event_id, state_group):
         self._event_to_state_group[event_id] = state_group
 
+    def get_room_version(self, room_id):
+        return RoomVersions.V1
+
 
 class DictObj(dict):
     def __init__(self, **kwargs):
@@ -129,9 +141,7 @@ class Graph(object):
                 prev_events = []
 
             events[event_id] = create_event(
-                event_id=event_id,
-                prev_events=prev_events,
-                **fields
+                event_id=event_id, prev_events=prev_events, **fields
             )
 
         self._leaves = clobbered
@@ -147,10 +157,15 @@ class Graph(object):
 class StateTestCase(unittest.TestCase):
     def setUp(self):
         self.store = StateGroupStore()
-        hs = Mock(spec_set=[
-            "get_datastore", "get_auth", "get_state_handler", "get_clock",
-            "get_state_resolution_handler",
-        ])
+        hs = Mock(
+            spec_set=[
+                "get_datastore",
+                "get_auth",
+                "get_state_handler",
+                "get_clock",
+                "get_state_resolution_handler",
+            ]
+        )
         hs.get_datastore.return_value = self.store
         hs.get_state_handler.return_value = None
         hs.get_clock.return_value = MockClock()
@@ -165,34 +180,14 @@ class StateTestCase(unittest.TestCase):
         graph = Graph(
             nodes={
                 "START": DictObj(
-                    type=EventTypes.Create,
-                    state_key="",
-                    depth=1,
-                ),
-                "A": DictObj(
-                    type=EventTypes.Message,
-                    depth=2,
-                ),
-                "B": DictObj(
-                    type=EventTypes.Message,
-                    depth=3,
-                ),
-                "C": DictObj(
-                    type=EventTypes.Name,
-                    state_key="",
-                    depth=3,
-                ),
-                "D": DictObj(
-                    type=EventTypes.Message,
-                    depth=4,
+                    type=EventTypes.Create, state_key="", content={}, depth=1,
                 ),
+                "A": DictObj(type=EventTypes.Message, depth=2),
+                "B": DictObj(type=EventTypes.Message, depth=3),
+                "C": DictObj(type=EventTypes.Name, state_key="", depth=3),
+                "D": DictObj(type=EventTypes.Message, depth=4),
             },
-            edges={
-                "A": ["START"],
-                "B": ["A"],
-                "C": ["A"],
-                "D": ["B", "C"]
-            }
+            edges={"A": ["START"], "B": ["A"], "C": ["A"], "D": ["B", "C"]},
         )
 
         self.store.register_events(graph.walk())
@@ -224,27 +219,11 @@ class StateTestCase(unittest.TestCase):
                     membership=Membership.JOIN,
                     depth=2,
                 ),
-                "B": DictObj(
-                    type=EventTypes.Name,
-                    state_key="",
-                    depth=3,
-                ),
-                "C": DictObj(
-                    type=EventTypes.Name,
-                    state_key="",
-                    depth=4,
-                ),
-                "D": DictObj(
-                    type=EventTypes.Message,
-                    depth=5,
-                ),
+                "B": DictObj(type=EventTypes.Name, state_key="", depth=3),
+                "C": DictObj(type=EventTypes.Name, state_key="", depth=4),
+                "D": DictObj(type=EventTypes.Message, depth=5),
             },
-            edges={
-                "A": ["START"],
-                "B": ["A"],
-                "C": ["A"],
-                "D": ["B", "C"]
-            }
+            edges={"A": ["START"], "B": ["A"], "C": ["A"], "D": ["B", "C"]},
         )
 
         self.store.register_events(graph.walk())
@@ -259,8 +238,7 @@ class StateTestCase(unittest.TestCase):
         prev_state_ids = yield context_store["D"].get_prev_state_ids(self.store)
 
         self.assertSetEqual(
-            {"START", "A", "C"},
-            {e_id for e_id in prev_state_ids.values()}
+            {"START", "A", "C"}, {e_id for e_id in prev_state_ids.values()}
         )
 
     @defer.inlineCallbacks
@@ -280,11 +258,7 @@ class StateTestCase(unittest.TestCase):
                     membership=Membership.JOIN,
                     depth=2,
                 ),
-                "B": DictObj(
-                    type=EventTypes.Name,
-                    state_key="",
-                    depth=3,
-                ),
+                "B": DictObj(type=EventTypes.Name, state_key="", depth=3),
                 "C": DictObj(
                     type=EventTypes.Member,
                     state_key="@user_id_2:example.com",
@@ -298,18 +272,9 @@ class StateTestCase(unittest.TestCase):
                     depth=4,
                     sender="@user_id_2:example.com",
                 ),
-                "E": DictObj(
-                    type=EventTypes.Message,
-                    depth=5,
-                ),
+                "E": DictObj(type=EventTypes.Message, depth=5),
             },
-            edges={
-                "A": ["START"],
-                "B": ["A"],
-                "C": ["B"],
-                "D": ["B"],
-                "E": ["C", "D"]
-            }
+            edges={"A": ["START"], "B": ["A"], "C": ["B"], "D": ["B"], "E": ["C", "D"]},
         )
 
         self.store.register_events(graph.walk())
@@ -324,8 +289,7 @@ class StateTestCase(unittest.TestCase):
         prev_state_ids = yield context_store["E"].get_prev_state_ids(self.store)
 
         self.assertSetEqual(
-            {"START", "A", "B", "C"},
-            {e for e in prev_state_ids.values()}
+            {"START", "A", "B", "C"}, {e for e in prev_state_ids.values()}
         )
 
     @defer.inlineCallbacks
@@ -357,30 +321,17 @@ class StateTestCase(unittest.TestCase):
                 state_key="",
                 content={
                     "events": {"m.room.name": 50},
-                    "users": {userid1: 100,
-                              userid2: 60},
+                    "users": {userid1: 100, userid2: 60},
                 },
             ),
-            "A5": DictObj(
-                type=EventTypes.Name,
-                state_key="",
-            ),
+            "A5": DictObj(type=EventTypes.Name, state_key=""),
             "B": DictObj(
                 type=EventTypes.PowerLevels,
                 state_key="",
-                content={
-                    "events": {"m.room.name": 50},
-                    "users": {userid2: 30},
-                },
-            ),
-            "C": DictObj(
-                type=EventTypes.Name,
-                state_key="",
-                sender=userid2,
-            ),
-            "D": DictObj(
-                type=EventTypes.Message,
+                content={"events": {"m.room.name": 50}, "users": {userid2: 30}},
             ),
+            "C": DictObj(type=EventTypes.Name, state_key="", sender=userid2),
+            "D": DictObj(type=EventTypes.Message),
         }
         edges = {
             "A2": ["A1"],
@@ -389,7 +340,7 @@ class StateTestCase(unittest.TestCase):
             "A5": ["A4"],
             "B": ["A5"],
             "C": ["A5"],
-            "D": ["B", "C"]
+            "D": ["B", "C"],
         }
         self._add_depths(nodes, edges)
         graph = Graph(nodes, edges)
@@ -406,8 +357,7 @@ class StateTestCase(unittest.TestCase):
         prev_state_ids = yield context_store["D"].get_prev_state_ids(self.store)
 
         self.assertSetEqual(
-            {"A1", "A2", "A3", "A5", "B"},
-            {e for e in prev_state_ids.values()}
+            {"A1", "A2", "A3", "A5", "B"}, {e for e in prev_state_ids.values()}
         )
 
     def _add_depths(self, nodes, edges):
@@ -432,9 +382,7 @@ class StateTestCase(unittest.TestCase):
             create_event(type="test2", state_key=""),
         ]
 
-        context = yield self.state.compute_event_context(
-            event, old_state=old_state
-        )
+        context = yield self.state.compute_event_context(event, old_state=old_state)
 
         current_state_ids = yield context.get_current_state_ids(self.store)
 
@@ -454,9 +402,7 @@ class StateTestCase(unittest.TestCase):
             create_event(type="test2", state_key=""),
         ]
 
-        context = yield self.state.compute_event_context(
-            event, old_state=old_state
-        )
+        context = yield self.state.compute_event_context(event, old_state=old_state)
 
         prev_state_ids = yield context.get_prev_state_ids(self.store)
 
@@ -468,8 +414,7 @@ class StateTestCase(unittest.TestCase):
     def test_trivial_annotate_message(self):
         prev_event_id = "prev_event_id"
         event = create_event(
-            type="test_message", name="event2",
-            prev_events=[(prev_event_id, {})],
+            type="test_message", name="event2", prev_events=[(prev_event_id, {})]
         )
 
         old_state = [
@@ -479,7 +424,10 @@ class StateTestCase(unittest.TestCase):
         ]
 
         group_name = self.store.store_state_group(
-            prev_event_id, event.room_id, None, None,
+            prev_event_id,
+            event.room_id,
+            None,
+            None,
             {(e.type, e.state_key): e.event_id for e in old_state},
         )
         self.store.register_event_id_state_group(prev_event_id, group_name)
@@ -489,8 +437,7 @@ class StateTestCase(unittest.TestCase):
         current_state_ids = yield context.get_current_state_ids(self.store)
 
         self.assertEqual(
-            set([e.event_id for e in old_state]),
-            set(current_state_ids.values())
+            set([e.event_id for e in old_state]), set(current_state_ids.values())
         )
 
         self.assertEqual(group_name, context.state_group)
@@ -499,8 +446,7 @@ class StateTestCase(unittest.TestCase):
     def test_trivial_annotate_state(self):
         prev_event_id = "prev_event_id"
         event = create_event(
-            type="state", state_key="", name="event2",
-            prev_events=[(prev_event_id, {})],
+            type="state", state_key="", name="event2", prev_events=[(prev_event_id, {})]
         )
 
         old_state = [
@@ -510,7 +456,10 @@ class StateTestCase(unittest.TestCase):
         ]
 
         group_name = self.store.store_state_group(
-            prev_event_id, event.room_id, None, None,
+            prev_event_id,
+            event.room_id,
+            None,
+            None,
             {(e.type, e.state_key): e.event_id for e in old_state},
         )
         self.store.register_event_id_state_group(prev_event_id, group_name)
@@ -520,8 +469,7 @@ class StateTestCase(unittest.TestCase):
         prev_state_ids = yield context.get_prev_state_ids(self.store)
 
         self.assertEqual(
-            set([e.event_id for e in old_state]),
-            set(prev_state_ids.values())
+            set([e.event_id for e in old_state]), set(prev_state_ids.values())
         )
 
         self.assertIsNotNone(context.state_group)
@@ -531,13 +479,12 @@ class StateTestCase(unittest.TestCase):
         prev_event_id1 = "event_id1"
         prev_event_id2 = "event_id2"
         event = create_event(
-            type="test_message", name="event3",
+            type="test_message",
+            name="event3",
             prev_events=[(prev_event_id1, {}), (prev_event_id2, {})],
         )
 
-        creation = create_event(
-            type=EventTypes.Create, state_key=""
-        )
+        creation = create_event(type=EventTypes.Create, state_key="")
 
         old_state_1 = [
             creation,
@@ -557,7 +504,7 @@ class StateTestCase(unittest.TestCase):
         self.store.register_events(old_state_2)
 
         context = yield self._get_context(
-            event, prev_event_id1, old_state_1, prev_event_id2, old_state_2,
+            event, prev_event_id1, old_state_1, prev_event_id2, old_state_2
         )
 
         current_state_ids = yield context.get_current_state_ids(self.store)
@@ -571,13 +518,13 @@ class StateTestCase(unittest.TestCase):
         prev_event_id1 = "event_id1"
         prev_event_id2 = "event_id2"
         event = create_event(
-            type="test4", state_key="", name="event",
+            type="test4",
+            state_key="",
+            name="event",
             prev_events=[(prev_event_id1, {}), (prev_event_id2, {})],
         )
 
-        creation = create_event(
-            type=EventTypes.Create, state_key=""
-        )
+        creation = create_event(type=EventTypes.Create, state_key="")
 
         old_state_1 = [
             creation,
@@ -599,7 +546,7 @@ class StateTestCase(unittest.TestCase):
         self.store.get_events = store.get_events
 
         context = yield self._get_context(
-            event, prev_event_id1, old_state_1, prev_event_id2, old_state_2,
+            event, prev_event_id1, old_state_1, prev_event_id2, old_state_2
         )
 
         current_state_ids = yield context.get_current_state_ids(self.store)
@@ -613,29 +560,25 @@ class StateTestCase(unittest.TestCase):
         prev_event_id1 = "event_id1"
         prev_event_id2 = "event_id2"
         event = create_event(
-            type="test4", name="event",
+            type="test4",
+            name="event",
             prev_events=[(prev_event_id1, {}), (prev_event_id2, {})],
         )
 
         member_event = create_event(
             type=EventTypes.Member,
             state_key="@user_id:example.com",
-            content={
-                "membership": Membership.JOIN,
-            }
+            content={"membership": Membership.JOIN},
         )
 
         power_levels = create_event(
-            type=EventTypes.PowerLevels, state_key="",
-            content={"users": {
-                "@foo:bar": "100",
-                "@user_id:example.com": "100",
-            }}
+            type=EventTypes.PowerLevels,
+            state_key="",
+            content={"users": {"@foo:bar": "100", "@user_id:example.com": "100"}},
         )
 
         creation = create_event(
-            type=EventTypes.Create, state_key="",
-            content={"creator": "@foo:bar"}
+            type=EventTypes.Create, state_key="", content={"creator": "@foo:bar"}
         )
 
         old_state_1 = [
@@ -658,14 +601,12 @@ class StateTestCase(unittest.TestCase):
         self.store.get_events = store.get_events
 
         context = yield self._get_context(
-            event, prev_event_id1, old_state_1, prev_event_id2, old_state_2,
+            event, prev_event_id1, old_state_1, prev_event_id2, old_state_2
         )
 
         current_state_ids = yield context.get_current_state_ids(self.store)
 
-        self.assertEqual(
-            old_state_2[3].event_id, current_state_ids[("test1", "1")]
-        )
+        self.assertEqual(old_state_2[3].event_id, current_state_ids[("test1", "1")])
 
         # Reverse the depth to make sure we are actually using the depths
         # during state resolution.
@@ -688,25 +629,30 @@ class StateTestCase(unittest.TestCase):
         store.register_events(old_state_2)
 
         context = yield self._get_context(
-            event, prev_event_id1, old_state_1, prev_event_id2, old_state_2,
+            event, prev_event_id1, old_state_1, prev_event_id2, old_state_2
         )
 
         current_state_ids = yield context.get_current_state_ids(self.store)
 
-        self.assertEqual(
-            old_state_1[3].event_id, current_state_ids[("test1", "1")]
-        )
+        self.assertEqual(old_state_1[3].event_id, current_state_ids[("test1", "1")])
 
-    def _get_context(self, event, prev_event_id_1, old_state_1, prev_event_id_2,
-                     old_state_2):
+    def _get_context(
+        self, event, prev_event_id_1, old_state_1, prev_event_id_2, old_state_2
+    ):
         sg1 = self.store.store_state_group(
-            prev_event_id_1, event.room_id, None, None,
+            prev_event_id_1,
+            event.room_id,
+            None,
+            None,
             {(e.type, e.state_key): e.event_id for e in old_state_1},
         )
         self.store.register_event_id_state_group(prev_event_id_1, sg1)
 
         sg2 = self.store.store_state_group(
-            prev_event_id_2, event.room_id, None, None,
+            prev_event_id_2,
+            event.room_id,
+            None,
+            None,
             {(e.type, e.state_key): e.event_id for e in old_state_2},
         )
         self.store.register_event_id_state_group(prev_event_id_2, sg2)
diff --git a/tests/test_test_utils.py b/tests/test_test_utils.py
index bc97c12245..b921ac52c0 100644
--- a/tests/test_test_utils.py
+++ b/tests/test_test_utils.py
@@ -18,7 +18,6 @@ from tests.utils import MockClock
 
 
 class MockClockTestCase(unittest.TestCase):
-
     def setUp(self):
         self.clock = MockClock()
 
@@ -34,10 +33,12 @@ class MockClockTestCase(unittest.TestCase):
 
         def _cb0():
             invoked[0] = 1
+
         self.clock.call_later(10, _cb0)
 
         def _cb1():
             invoked[1] = 1
+
         self.clock.call_later(20, _cb1)
 
         self.assertFalse(invoked[0])
@@ -56,10 +57,12 @@ class MockClockTestCase(unittest.TestCase):
 
         def _cb0():
             invoked[0] = 1
+
         t0 = self.clock.call_later(10, _cb0)
 
         def _cb1():
             invoked[1] = 1
+
         self.clock.call_later(20, _cb1)
 
         self.clock.cancel_call_later(t0)
diff --git a/tests/test_types.py b/tests/test_types.py
index 729bd676c1..be072d402b 100644
--- a/tests/test_types.py
+++ b/tests/test_types.py
@@ -69,10 +69,7 @@ class GroupIDTestCase(unittest.TestCase):
         self.assertEqual("my.domain", group_id.domain)
 
     def test_validate(self):
-        bad_ids = [
-            "$badsigil:domain",
-            "+:empty",
-        ] + [
+        bad_ids = ["$badsigil:domain", "+:empty"] + [
             "+group" + c + ":domain" for c in "A%?æ£"
         ]
         for id_string in bad_ids:
diff --git a/tests/test_visibility.py b/tests/test_visibility.py
index 0dc1a924d3..8d8ce0cab9 100644
--- a/tests/test_visibility.py
+++ b/tests/test_visibility.py
@@ -21,7 +21,7 @@ from synapse.events import FrozenEvent
 from synapse.visibility import filter_events_for_server
 
 import tests.unittest
-from tests.utils import setup_test_homeserver
+from tests.utils import create_room, setup_test_homeserver
 
 logger = logging.getLogger(__name__)
 
@@ -31,11 +31,13 @@ TEST_ROOM_ID = "!TEST:ROOM"
 class FilterEventsForServerTestCase(tests.unittest.TestCase):
     @defer.inlineCallbacks
     def setUp(self):
-        self.hs = yield setup_test_homeserver()
+        self.hs = yield setup_test_homeserver(self.addCleanup)
         self.event_creation_handler = self.hs.get_event_creation_handler()
         self.event_builder_factory = self.hs.get_event_builder_factory()
         self.store = self.hs.get_datastore()
 
+        yield create_room(self.hs, TEST_ROOM_ID, "@someone:ROOM")
+
     @defer.inlineCallbacks
     def test_filtering(self):
         #
@@ -54,14 +56,12 @@ class FilterEventsForServerTestCase(tests.unittest.TestCase):
         events_to_filter = []
 
         for i in range(0, 10):
-            user = "@user%i:%s" % (
-                i, "test_server" if i == 5 else "other_server"
-            )
+            user = "@user%i:%s" % (i, "test_server" if i == 5 else "other_server")
             evt = yield self.inject_room_member(user, extra_content={"a": "b"})
             events_to_filter.append(evt)
 
         filtered = yield filter_events_for_server(
-            self.store, "test_server", events_to_filter,
+            self.store, "test_server", events_to_filter
         )
 
         # the result should be 5 redacted events, and 5 unredacted events.
@@ -100,19 +100,21 @@ class FilterEventsForServerTestCase(tests.unittest.TestCase):
 
         # ... and the filtering happens.
         filtered = yield filter_events_for_server(
-            self.store, "test_server", events_to_filter,
+            self.store, "test_server", events_to_filter
         )
 
         for i in range(0, len(events_to_filter)):
             self.assertEqual(
-                events_to_filter[i].event_id, filtered[i].event_id,
-                "Unexpected event at result position %i" % (i, )
+                events_to_filter[i].event_id,
+                filtered[i].event_id,
+                "Unexpected event at result position %i" % (i,),
             )
 
         for i in (0, 3):
             self.assertEqual(
-                events_to_filter[i].content["body"], filtered[i].content["body"],
-                "Unexpected event content at result position %i" % (i,)
+                events_to_filter[i].content["body"],
+                filtered[i].content["body"],
+                "Unexpected event content at result position %i" % (i,),
             )
 
         for i in (1, 4):
@@ -121,13 +123,15 @@ class FilterEventsForServerTestCase(tests.unittest.TestCase):
     @defer.inlineCallbacks
     def inject_visibility(self, user_id, visibility):
         content = {"history_visibility": visibility}
-        builder = self.event_builder_factory.new({
-            "type": "m.room.history_visibility",
-            "sender": user_id,
-            "state_key": "",
-            "room_id": TEST_ROOM_ID,
-            "content": content,
-        })
+        builder = self.event_builder_factory.new(
+            {
+                "type": "m.room.history_visibility",
+                "sender": user_id,
+                "state_key": "",
+                "room_id": TEST_ROOM_ID,
+                "content": content,
+            }
+        )
 
         event, context = yield self.event_creation_handler.create_new_client_event(
             builder
@@ -139,13 +143,15 @@ class FilterEventsForServerTestCase(tests.unittest.TestCase):
     def inject_room_member(self, user_id, membership="join", extra_content={}):
         content = {"membership": membership}
         content.update(extra_content)
-        builder = self.event_builder_factory.new({
-            "type": "m.room.member",
-            "sender": user_id,
-            "state_key": user_id,
-            "room_id": TEST_ROOM_ID,
-            "content": content,
-        })
+        builder = self.event_builder_factory.new(
+            {
+                "type": "m.room.member",
+                "sender": user_id,
+                "state_key": user_id,
+                "room_id": TEST_ROOM_ID,
+                "content": content,
+            }
+        )
 
         event, context = yield self.event_creation_handler.create_new_client_event(
             builder
@@ -158,12 +164,14 @@ class FilterEventsForServerTestCase(tests.unittest.TestCase):
     def inject_message(self, user_id, content=None):
         if content is None:
             content = {"body": "testytest"}
-        builder = self.event_builder_factory.new({
-            "type": "m.room.message",
-            "sender": user_id,
-            "room_id": TEST_ROOM_ID,
-            "content": content,
-        })
+        builder = self.event_builder_factory.new(
+            {
+                "type": "m.room.message",
+                "sender": user_id,
+                "room_id": TEST_ROOM_ID,
+                "content": content,
+            }
+        )
 
         event, context = yield self.event_creation_handler.create_new_client_event(
             builder
@@ -192,56 +200,54 @@ class FilterEventsForServerTestCase(tests.unittest.TestCase):
         # history_visibility event.
         room_state = []
 
-        history_visibility_evt = FrozenEvent({
-            "event_id": "$history_vis",
-            "type": "m.room.history_visibility",
-            "sender": "@resident_user_0:test.com",
-            "state_key": "",
-            "room_id": TEST_ROOM_ID,
-            "content": {"history_visibility": "joined"},
-        })
+        history_visibility_evt = FrozenEvent(
+            {
+                "event_id": "$history_vis",
+                "type": "m.room.history_visibility",
+                "sender": "@resident_user_0:test.com",
+                "state_key": "",
+                "room_id": TEST_ROOM_ID,
+                "content": {"history_visibility": "joined"},
+            }
+        )
         room_state.append(history_visibility_evt)
         test_store.add_event(history_visibility_evt)
 
         for i in range(0, 100000):
-            user = "@resident_user_%i:test.com" % (i, )
-            evt = FrozenEvent({
-                "event_id": "$res_event_%i" % (i, ),
-                "type": "m.room.member",
-                "state_key": user,
-                "sender": user,
-                "room_id": TEST_ROOM_ID,
-                "content": {
-                    "membership": "join",
-                    "extra": "zzz,"
-                },
-            })
+            user = "@resident_user_%i:test.com" % (i,)
+            evt = FrozenEvent(
+                {
+                    "event_id": "$res_event_%i" % (i,),
+                    "type": "m.room.member",
+                    "state_key": user,
+                    "sender": user,
+                    "room_id": TEST_ROOM_ID,
+                    "content": {"membership": "join", "extra": "zzz,"},
+                }
+            )
             room_state.append(evt)
             test_store.add_event(evt)
 
         events_to_filter = []
         for i in range(0, 10):
-            user = "@user%i:%s" % (
-                i, "test_server" if i == 5 else "other_server"
+            user = "@user%i:%s" % (i, "test_server" if i == 5 else "other_server")
+            evt = FrozenEvent(
+                {
+                    "event_id": "$evt%i" % (i,),
+                    "type": "m.room.member",
+                    "state_key": user,
+                    "sender": user,
+                    "room_id": TEST_ROOM_ID,
+                    "content": {"membership": "join", "extra": "zzz"},
+                }
             )
-            evt = FrozenEvent({
-                "event_id": "$evt%i" % (i, ),
-                "type": "m.room.member",
-                "state_key": user,
-                "sender": user,
-                "room_id": TEST_ROOM_ID,
-                "content": {
-                    "membership": "join",
-                    "extra": "zzz",
-                },
-            })
             events_to_filter.append(evt)
             room_state.append(evt)
 
             test_store.add_event(evt)
-            test_store.set_state_ids_for_event(evt, {
-                (e.type, e.state_key): e.event_id for e in room_state
-            })
+            test_store.set_state_ids_for_event(
+                evt, {(e.type, e.state_key): e.event_id for e in room_state}
+            )
 
         pr = cProfile.Profile()
         pr.enable()
@@ -249,7 +255,7 @@ class FilterEventsForServerTestCase(tests.unittest.TestCase):
         logger.info("Starting filtering")
         start = time.time()
         filtered = yield filter_events_for_server(
-            test_store, "test_server", events_to_filter,
+            test_store, "test_server", events_to_filter
         )
         logger.info("Filtering took %f seconds", time.time() - start)
 
@@ -275,6 +281,7 @@ class _TestStore(object):
     filter_events_for_server
 
     """
+
     def __init__(self):
         # data for get_events: a map from event_id to event
         self.events = {}
@@ -298,8 +305,8 @@ class _TestStore(object):
                 continue
             if type != "m.room.member" or state_key is not None:
                 raise RuntimeError(
-                    "Unimplemented: get_state_ids with type (%s, %s)" %
-                    (type, state_key),
+                    "Unimplemented: get_state_ids with type (%s, %s)"
+                    % (type, state_key)
                 )
             include_memberships = True
 
@@ -316,9 +323,7 @@ class _TestStore(object):
         return succeed(res)
 
     def get_events(self, events):
-        return succeed({
-            event_id: self.events[event_id] for event_id in events
-        })
+        return succeed({event_id: self.events[event_id] for event_id in events})
 
     def are_users_erased(self, users):
         return succeed({u: False for u in users})
diff --git a/tests/unittest.py b/tests/unittest.py
index b15b06726b..d852e2465a 100644
--- a/tests/unittest.py
+++ b/tests/unittest.py
@@ -1,5 +1,6 @@
 # -*- coding: utf-8 -*-
 # Copyright 2014-2016 OpenMarket Ltd
+# Copyright 2018 New Vector
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
 # you may not use this file except in compliance with the License.
@@ -15,12 +16,21 @@
 
 import logging
 
+from mock import Mock
+
+from canonicaljson import json
+
 import twisted
 import twisted.logger
 from twisted.trial import unittest
 
+from synapse.http.server import JsonResource
+from synapse.server import HomeServer
+from synapse.types import UserID, create_requester
 from synapse.util.logcontext import LoggingContextFilter
 
+from tests.server import get_clock, make_request, render, setup_test_homeserver
+
 # Set up putting Synapse's logs into Trial's.
 rootLogger = logging.getLogger()
 
@@ -56,6 +66,7 @@ def around(target):
     def method_name(orig, *args, **kwargs):
         return orig(*args, **kwargs)
     """
+
     def _around(code):
         name = code.__name__
         orig = getattr(target, name)
@@ -89,6 +100,7 @@ class TestCase(unittest.TestCase):
             old_level = logging.getLogger().level
 
             if old_level != level:
+
                 @around(self)
                 def tearDown(orig):
                     ret = orig()
@@ -117,8 +129,9 @@ class TestCase(unittest.TestCase):
             actual (dict): The test result. Extra keys will not be checked.
         """
         for key in required:
-            self.assertEquals(required[key], actual[key],
-                              msg="%s mismatch. %s" % (key, actual))
+            self.assertEquals(
+                required[key], actual[key], msg="%s mismatch. %s" % (key, actual)
+            )
 
 
 def DEBUG(target):
@@ -126,3 +139,143 @@ def DEBUG(target):
     Can apply to either a TestCase or an individual test method."""
     target.loglevel = logging.DEBUG
     return target
+
+
+class HomeserverTestCase(TestCase):
+    """
+    A base TestCase that reduces boilerplate for HomeServer-using test cases.
+
+    Attributes:
+        servlets (list[function]): List of servlet registration function.
+        user_id (str): The user ID to assume if auth is hijacked.
+        hijack_auth (bool): Whether to hijack auth to return the user specified
+        in user_id.
+    """
+    servlets = []
+    hijack_auth = True
+
+    def setUp(self):
+        """
+        Set up the TestCase by calling the homeserver constructor, optionally
+        hijacking the authentication system to return a fixed user, and then
+        calling the prepare function.
+        """
+        self.reactor, self.clock = get_clock()
+        self._hs_args = {"clock": self.clock, "reactor": self.reactor}
+        self.hs = self.make_homeserver(self.reactor, self.clock)
+
+        if self.hs is None:
+            raise Exception("No homeserver returned from make_homeserver.")
+
+        if not isinstance(self.hs, HomeServer):
+            raise Exception("A homeserver wasn't returned, but %r" % (self.hs,))
+
+        # Register the resources
+        self.resource = JsonResource(self.hs)
+
+        for servlet in self.servlets:
+            servlet(self.hs, self.resource)
+
+        if hasattr(self, "user_id"):
+            from tests.rest.client.v1.utils import RestHelper
+
+            self.helper = RestHelper(self.hs, self.resource, self.user_id)
+
+            if self.hijack_auth:
+
+                def get_user_by_access_token(token=None, allow_guest=False):
+                    return {
+                        "user": UserID.from_string(self.helper.auth_user_id),
+                        "token_id": 1,
+                        "is_guest": False,
+                    }
+
+                def get_user_by_req(request, allow_guest=False, rights="access"):
+                    return create_requester(
+                        UserID.from_string(self.helper.auth_user_id), 1, False, None
+                    )
+
+                self.hs.get_auth().get_user_by_req = get_user_by_req
+                self.hs.get_auth().get_user_by_access_token = get_user_by_access_token
+                self.hs.get_auth().get_access_token_from_request = Mock(
+                    return_value="1234"
+                )
+
+        if hasattr(self, "prepare"):
+            self.prepare(self.reactor, self.clock, self.hs)
+
+    def make_homeserver(self, reactor, clock):
+        """
+        Make and return a homeserver.
+
+        Args:
+            reactor: A Twisted Reactor, or something that pretends to be one.
+            clock (synapse.util.Clock): The Clock, associated with the reactor.
+
+        Returns:
+            A homeserver (synapse.server.HomeServer) suitable for testing.
+
+        Function to be overridden in subclasses.
+        """
+        raise NotImplementedError()
+
+    def prepare(self, reactor, clock, homeserver):
+        """
+        Prepare for the test.  This involves things like mocking out parts of
+        the homeserver, or building test data common across the whole test
+        suite.
+
+        Args:
+            reactor: A Twisted Reactor, or something that pretends to be one.
+            clock (synapse.util.Clock): The Clock, associated with the reactor.
+            homeserver (synapse.server.HomeServer): The HomeServer to test
+            against.
+
+        Function to optionally be overridden in subclasses.
+        """
+
+    def make_request(self, method, path, content=b""):
+        """
+        Create a SynapseRequest at the path using the method and containing the
+        given content.
+
+        Args:
+            method (bytes/unicode): The HTTP request method ("verb").
+            path (bytes/unicode): The HTTP path, suitably URL encoded (e.g.
+            escaped UTF-8 & spaces and such).
+            content (bytes or dict): The body of the request. JSON-encoded, if
+            a dict.
+
+        Returns:
+            A synapse.http.site.SynapseRequest.
+        """
+        if isinstance(content, dict):
+            content = json.dumps(content).encode('utf8')
+
+        return make_request(method, path, content)
+
+    def render(self, request):
+        """
+        Render a request against the resources registered by the test class's
+        servlets.
+
+        Args:
+            request (synapse.http.site.SynapseRequest): The request to render.
+        """
+        render(request, self.resource, self.reactor)
+
+    def setup_test_homeserver(self, *args, **kwargs):
+        """
+        Set up the test homeserver, meant to be called by the overridable
+        make_homeserver. It automatically passes through the test class's
+        clock & reactor.
+
+        Args:
+            See tests.utils.setup_test_homeserver.
+
+        Returns:
+            synapse.server.HomeServer
+        """
+        kwargs = dict(kwargs)
+        kwargs.update(self._hs_args)
+        return setup_test_homeserver(self.addCleanup, *args, **kwargs)
diff --git a/tests/util/caches/test_descriptors.py b/tests/util/caches/test_descriptors.py
index ca8a7c907f..463a737efa 100644
--- a/tests/util/caches/test_descriptors.py
+++ b/tests/util/caches/test_descriptors.py
@@ -67,12 +67,8 @@ class CacheTestCase(unittest.TestCase):
         self.assertIsNone(cache.get("key2", None))
 
         # both callbacks should have been callbacked
-        self.assertTrue(
-            callback_record[0], "Invalidation callback for key1 not called",
-        )
-        self.assertTrue(
-            callback_record[1], "Invalidation callback for key2 not called",
-        )
+        self.assertTrue(callback_record[0], "Invalidation callback for key1 not called")
+        self.assertTrue(callback_record[1], "Invalidation callback for key2 not called")
 
         # letting the other lookup complete should do nothing
         d1.callback("result1")
@@ -168,8 +164,7 @@ class DescriptorTestCase(unittest.TestCase):
             with logcontext.LoggingContext() as c1:
                 c1.name = "c1"
                 r = yield obj.fn(1)
-                self.assertEqual(logcontext.LoggingContext.current_context(),
-                                 c1)
+                self.assertEqual(logcontext.LoggingContext.current_context(), c1)
             defer.returnValue(r)
 
         def check_result(r):
@@ -179,14 +174,18 @@ class DescriptorTestCase(unittest.TestCase):
 
         # set off a deferred which will do a cache lookup
         d1 = do_lookup()
-        self.assertEqual(logcontext.LoggingContext.current_context(),
-                         logcontext.LoggingContext.sentinel)
+        self.assertEqual(
+            logcontext.LoggingContext.current_context(),
+            logcontext.LoggingContext.sentinel,
+        )
         d1.addCallback(check_result)
 
         # and another
         d2 = do_lookup()
-        self.assertEqual(logcontext.LoggingContext.current_context(),
-                         logcontext.LoggingContext.sentinel)
+        self.assertEqual(
+            logcontext.LoggingContext.current_context(),
+            logcontext.LoggingContext.sentinel,
+        )
         d2.addCallback(check_result)
 
         # let the lookup complete
@@ -224,15 +223,16 @@ class DescriptorTestCase(unittest.TestCase):
                 except SynapseError:
                     pass
 
-                self.assertEqual(logcontext.LoggingContext.current_context(),
-                                 c1)
+                self.assertEqual(logcontext.LoggingContext.current_context(), c1)
 
         obj = Cls()
 
         # set off a deferred which will do a cache lookup
         d1 = do_lookup()
-        self.assertEqual(logcontext.LoggingContext.current_context(),
-                         logcontext.LoggingContext.sentinel)
+        self.assertEqual(
+            logcontext.LoggingContext.current_context(),
+            logcontext.LoggingContext.sentinel,
+        )
 
         return d1
 
@@ -288,14 +288,10 @@ class CachedListDescriptorTestCase(unittest.TestCase):
 
             @descriptors.cachedList("fn", "args1", inlineCallbacks=True)
             def list_fn(self, args1, arg2):
-                assert (
-                    logcontext.LoggingContext.current_context().request == "c1"
-                )
+                assert logcontext.LoggingContext.current_context().request == "c1"
                 # we want this to behave like an asynchronous function
                 yield run_on_reactor()
-                assert (
-                    logcontext.LoggingContext.current_context().request == "c1"
-                )
+                assert logcontext.LoggingContext.current_context().request == "c1"
                 defer.returnValue(self.mock(args1, arg2))
 
         with logcontext.LoggingContext() as c1:
@@ -308,10 +304,7 @@ class CachedListDescriptorTestCase(unittest.TestCase):
                 logcontext.LoggingContext.sentinel,
             )
             r = yield d1
-            self.assertEqual(
-                logcontext.LoggingContext.current_context(),
-                c1
-            )
+            self.assertEqual(logcontext.LoggingContext.current_context(), c1)
             obj.mock.assert_called_once_with([10, 20], 2)
             self.assertEqual(r, {10: 'fish', 20: 'chips'})
             obj.mock.reset_mock()
@@ -337,6 +330,7 @@ class CachedListDescriptorTestCase(unittest.TestCase):
     @defer.inlineCallbacks
     def test_invalidate(self):
         """Make sure that invalidation callbacks are called."""
+
         class Cls(object):
             def __init__(self):
                 self.mock = mock.Mock()
diff --git a/tests/util/test_dict_cache.py b/tests/util/test_dict_cache.py
index 26f2fa5800..34fdc9a43a 100644
--- a/tests/util/test_dict_cache.py
+++ b/tests/util/test_dict_cache.py
@@ -20,7 +20,6 @@ from tests import unittest
 
 
 class DictCacheTestCase(unittest.TestCase):
-
     def setUp(self):
         self.cache = DictionaryCache("foobar")
 
@@ -41,9 +40,7 @@ class DictCacheTestCase(unittest.TestCase):
         key = "test_simple_cache_hit_partial"
 
         seq = self.cache.sequence
-        test_value = {
-            "test": "test_simple_cache_hit_partial"
-        }
+        test_value = {"test": "test_simple_cache_hit_partial"}
         self.cache.update(seq, key, test_value)
 
         c = self.cache.get(key, ["test"])
@@ -53,9 +50,7 @@ class DictCacheTestCase(unittest.TestCase):
         key = "test_simple_cache_miss_partial"
 
         seq = self.cache.sequence
-        test_value = {
-            "test": "test_simple_cache_miss_partial"
-        }
+        test_value = {"test": "test_simple_cache_miss_partial"}
         self.cache.update(seq, key, test_value)
 
         c = self.cache.get(key, ["test2"])
@@ -79,15 +74,11 @@ class DictCacheTestCase(unittest.TestCase):
         key = "test_simple_cache_hit_miss_partial"
 
         seq = self.cache.sequence
-        test_value_1 = {
-            "test": "test_simple_cache_hit_miss_partial",
-        }
+        test_value_1 = {"test": "test_simple_cache_hit_miss_partial"}
         self.cache.update(seq, key, test_value_1, fetched_keys=set("test"))
 
         seq = self.cache.sequence
-        test_value_2 = {
-            "test2": "test_simple_cache_hit_miss_partial2",
-        }
+        test_value_2 = {"test2": "test_simple_cache_hit_miss_partial2"}
         self.cache.update(seq, key, test_value_2, fetched_keys=set("test2"))
 
         c = self.cache.get(key)
@@ -96,5 +87,5 @@ class DictCacheTestCase(unittest.TestCase):
                 "test": "test_simple_cache_hit_miss_partial",
                 "test2": "test_simple_cache_hit_miss_partial2",
             },
-            c.value
+            c.value,
         )
diff --git a/tests/util/test_expiring_cache.py b/tests/util/test_expiring_cache.py
index d12b5e838b..5cbada4eda 100644
--- a/tests/util/test_expiring_cache.py
+++ b/tests/util/test_expiring_cache.py
@@ -22,7 +22,6 @@ from .. import unittest
 
 
 class ExpiringCacheTestCase(unittest.TestCase):
-
     def test_get_set(self):
         clock = MockClock()
         cache = ExpiringCache("test", clock, max_len=1)
diff --git a/tests/util/test_file_consumer.py b/tests/util/test_file_consumer.py
index 7ce5f8c258..e90e08d1c0 100644
--- a/tests/util/test_file_consumer.py
+++ b/tests/util/test_file_consumer.py
@@ -27,7 +27,6 @@ from tests import unittest
 
 
 class FileConsumerTests(unittest.TestCase):
-
     @defer.inlineCallbacks
     def test_pull_consumer(self):
         string_file = StringIO()
@@ -87,7 +86,9 @@ class FileConsumerTests(unittest.TestCase):
             producer = NonCallableMock(spec_set=["pauseProducing", "resumeProducing"])
 
             resume_deferred = defer.Deferred()
-            producer.resumeProducing.side_effect = lambda: resume_deferred.callback(None)
+            producer.resumeProducing.side_effect = lambda: resume_deferred.callback(
+                None
+            )
 
             consumer.registerProducer(producer, True)
 
diff --git a/tests/util/test_linearizer.py b/tests/util/test_linearizer.py
index 4729bd5a0a..61a55b461b 100644
--- a/tests/util/test_linearizer.py
+++ b/tests/util/test_linearizer.py
@@ -20,13 +20,12 @@ from twisted.internet import defer, reactor
 from twisted.internet.defer import CancelledError
 
 from synapse.util import Clock, logcontext
-from synapse.util.async import Linearizer
+from synapse.util.async_helpers import Linearizer
 
 from tests import unittest
 
 
 class LinearizerTestCase(unittest.TestCase):
-
     @defer.inlineCallbacks
     def test_linearizer(self):
         linearizer = Linearizer()
@@ -54,13 +53,11 @@ class LinearizerTestCase(unittest.TestCase):
         def func(i, sleep=False):
             with logcontext.LoggingContext("func(%s)" % i) as lc:
                 with (yield linearizer.queue("")):
-                    self.assertEqual(
-                        logcontext.LoggingContext.current_context(), lc)
+                    self.assertEqual(logcontext.LoggingContext.current_context(), lc)
                     if sleep:
                         yield Clock(reactor).sleep(0)
 
-                self.assertEqual(
-                    logcontext.LoggingContext.current_context(), lc)
+                self.assertEqual(logcontext.LoggingContext.current_context(), lc)
 
         func(0, sleep=True)
         for i in range(1, 100):
diff --git a/tests/util/test_logcontext.py b/tests/util/test_logcontext.py
index c54001f7a4..4633db77b3 100644
--- a/tests/util/test_logcontext.py
+++ b/tests/util/test_logcontext.py
@@ -8,11 +8,8 @@ from .. import unittest
 
 
 class LoggingContextTestCase(unittest.TestCase):
-
     def _check_test_key(self, value):
-        self.assertEquals(
-            LoggingContext.current_context().request, value
-        )
+        self.assertEquals(LoggingContext.current_context().request, value)
 
     def test_with_context(self):
         with LoggingContext() as context_one:
@@ -50,6 +47,7 @@ class LoggingContextTestCase(unittest.TestCase):
                 self._check_test_key("one")
                 callback_completed[0] = True
                 return res
+
             d.addCallback(cb)
 
             return d
@@ -74,8 +72,7 @@ class LoggingContextTestCase(unittest.TestCase):
             # make sure that the context was reset before it got thrown back
             # into the reactor
             try:
-                self.assertIs(LoggingContext.current_context(),
-                              sentinel_context)
+                self.assertIs(LoggingContext.current_context(), sentinel_context)
                 d2.callback(None)
             except BaseException:
                 d2.errback(twisted.python.failure.Failure())
@@ -104,9 +101,7 @@ class LoggingContextTestCase(unittest.TestCase):
         # a function which returns a deferred which looks like it has been
         # called, but is actually paused
         def testfunc():
-            return logcontext.make_deferred_yieldable(
-                _chained_deferred_function()
-            )
+            return logcontext.make_deferred_yieldable(_chained_deferred_function())
 
         return self._test_run_in_background(testfunc)
 
@@ -175,5 +170,6 @@ def _chained_deferred_function():
         d2 = defer.Deferred()
         reactor.callLater(0, d2.callback, res)
         return d2
+
     d.addCallback(cb)
     return d
diff --git a/tests/util/test_lrucache.py b/tests/util/test_lrucache.py
index 9b36ef4482..786947375d 100644
--- a/tests/util/test_lrucache.py
+++ b/tests/util/test_lrucache.py
@@ -23,7 +23,6 @@ from .. import unittest
 
 
 class LruCacheTestCase(unittest.TestCase):
-
     def test_get_set(self):
         cache = LruCache(1)
         cache["key"] = "value"
@@ -235,7 +234,6 @@ class LruCacheCallbacksTestCase(unittest.TestCase):
 
 
 class LruCacheSizedTestCase(unittest.TestCase):
-
     def test_evict(self):
         cache = LruCache(5, size_callback=len)
         cache["key1"] = [0]
diff --git a/tests/util/test_rwlock.py b/tests/util/test_rwlock.py
index 24194e3b25..bd32e2cee7 100644
--- a/tests/util/test_rwlock.py
+++ b/tests/util/test_rwlock.py
@@ -14,13 +14,12 @@
 # limitations under the License.
 
 
-from synapse.util.async import ReadWriteLock
+from synapse.util.async_helpers import ReadWriteLock
 
 from tests import unittest
 
 
 class ReadWriteLockTestCase(unittest.TestCase):
-
     def _assert_called_before_not_after(self, lst, first_false):
         for i, d in enumerate(lst[:first_false]):
             self.assertTrue(d.called, msg="%d was unexpectedly false" % i)
@@ -36,12 +35,12 @@ class ReadWriteLockTestCase(unittest.TestCase):
         key = object()
 
         ds = [
-            rwlock.read(key),   # 0
-            rwlock.read(key),   # 1
+            rwlock.read(key),  # 0
+            rwlock.read(key),  # 1
             rwlock.write(key),  # 2
             rwlock.write(key),  # 3
-            rwlock.read(key),   # 4
-            rwlock.read(key),   # 5
+            rwlock.read(key),  # 4
+            rwlock.read(key),  # 5
             rwlock.write(key),  # 6
         ]
 
diff --git a/tests/util/test_snapshot_cache.py b/tests/util/test_snapshot_cache.py
index 0f5b32fcc0..1a44f72425 100644
--- a/tests/util/test_snapshot_cache.py
+++ b/tests/util/test_snapshot_cache.py
@@ -22,7 +22,6 @@ from .. import unittest
 
 
 class SnapshotCacheTestCase(unittest.TestCase):
-
     def setUp(self):
         self.cache = SnapshotCache()
         self.cache.DURATION_MS = 1
diff --git a/tests/util/test_stream_change_cache.py b/tests/util/test_stream_change_cache.py
index 65b0f2e6fb..f2be63706b 100644
--- a/tests/util/test_stream_change_cache.py
+++ b/tests/util/test_stream_change_cache.py
@@ -181,17 +181,8 @@ class StreamChangeCacheTests(unittest.TestCase):
         # Query a subset of the entries mid-way through the stream. We should
         # only get back the subset.
         self.assertEqual(
-            cache.get_entities_changed(
-                [
-                    "bar@baz.net",
-                ],
-                stream_pos=2,
-            ),
-            set(
-                [
-                    "bar@baz.net",
-                ]
-            ),
+            cache.get_entities_changed(["bar@baz.net"], stream_pos=2),
+            set(["bar@baz.net"]),
         )
 
     def test_max_pos(self):
diff --git a/tests/utils.py b/tests/utils.py
index 9bff3ff3b9..9f7ff94575 100644
--- a/tests/utils.py
+++ b/tests/utils.py
@@ -13,7 +13,10 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
+import atexit
 import hashlib
+import os
+import uuid
 from inspect import getcallargs
 
 from mock import Mock, patch
@@ -21,28 +24,88 @@ from six.moves.urllib import parse as urlparse
 
 from twisted.internet import defer, reactor
 
+from synapse.api.constants import EventTypes
 from synapse.api.errors import CodeMessageException, cs_error
 from synapse.federation.transport import server
 from synapse.http.server import HttpServer
 from synapse.server import HomeServer
 from synapse.storage import PostgresEngine
 from synapse.storage.engines import create_engine
-from synapse.storage.prepare_database import prepare_database
+from synapse.storage.prepare_database import (
+    _get_or_create_schema_state,
+    _setup_new_database,
+    prepare_database,
+)
 from synapse.util.logcontext import LoggingContext
 from synapse.util.ratelimitutils import FederationRateLimiter
 
 # set this to True to run the tests against postgres instead of sqlite.
-# It requires you to have a local postgres database called synapse_test, within
-# which ALL TABLES WILL BE DROPPED
-USE_POSTGRES_FOR_TESTS = False
+USE_POSTGRES_FOR_TESTS = os.environ.get("SYNAPSE_POSTGRES", False)
+POSTGRES_USER = os.environ.get("SYNAPSE_POSTGRES_USER", "postgres")
+POSTGRES_BASE_DB = "_synapse_unit_tests_base_%s" % (os.getpid(),)
+
+
+def setupdb():
+
+    # If we're using PostgreSQL, set up the db once
+    if USE_POSTGRES_FOR_TESTS:
+        pgconfig = {
+            "name": "psycopg2",
+            "args": {
+                "database": POSTGRES_BASE_DB,
+                "user": POSTGRES_USER,
+                "cp_min": 1,
+                "cp_max": 5,
+            },
+        }
+        config = Mock()
+        config.password_providers = []
+        config.database_config = pgconfig
+        db_engine = create_engine(pgconfig)
+        db_conn = db_engine.module.connect(user=POSTGRES_USER)
+        db_conn.autocommit = True
+        cur = db_conn.cursor()
+        cur.execute("DROP DATABASE IF EXISTS %s;" % (POSTGRES_BASE_DB,))
+        cur.execute("CREATE DATABASE %s;" % (POSTGRES_BASE_DB,))
+        cur.close()
+        db_conn.close()
+
+        # Set up in the db
+        db_conn = db_engine.module.connect(
+            database=POSTGRES_BASE_DB, user=POSTGRES_USER
+        )
+        cur = db_conn.cursor()
+        _get_or_create_schema_state(cur, db_engine)
+        _setup_new_database(cur, db_engine)
+        db_conn.commit()
+        cur.close()
+        db_conn.close()
+
+        def _cleanup():
+            db_conn = db_engine.module.connect(user=POSTGRES_USER)
+            db_conn.autocommit = True
+            cur = db_conn.cursor()
+            cur.execute("DROP DATABASE IF EXISTS %s;" % (POSTGRES_BASE_DB,))
+            cur.close()
+            db_conn.close()
+
+        atexit.register(_cleanup)
 
 
 @defer.inlineCallbacks
-def setup_test_homeserver(name="test", datastore=None, config=None, reactor=None,
-                          **kargs):
-    """Setup a homeserver suitable for running tests against. Keyword arguments
-    are passed to the Homeserver constructor. If no datastore is supplied a
-    datastore backed by an in-memory sqlite db will be given to the HS.
+def setup_test_homeserver(
+    cleanup_func, name="test", datastore=None, config=None, reactor=None,
+    homeserverToUse=HomeServer, **kargs
+):
+    """
+    Setup a homeserver suitable for running tests against.  Keyword arguments
+    are passed to the Homeserver constructor.
+
+    If no datastore is supplied, one is created and given to the homeserver.
+
+    Args:
+        cleanup_func : The function used to register a cleanup routine for
+                       after the test.
     """
     if reactor is None:
         from twisted.internet import reactor
@@ -73,6 +136,17 @@ def setup_test_homeserver(name="test", datastore=None, config=None, reactor=None
         config.block_events_without_consent_error = None
         config.media_storage_providers = []
         config.auto_join_rooms = []
+        config.limit_usage_by_mau = False
+        config.hs_disabled = False
+        config.hs_disabled_message = ""
+        config.hs_disabled_limit_type = ""
+        config.max_mau_value = 50
+        config.mau_limits_reserved_threepids = []
+        config.admin_uri = None
+
+        # we need a sane default_room_version, otherwise attempts to create rooms will
+        # fail.
+        config.default_room_version = "1"
 
         # disable user directory updates, because they get done in the
         # background, which upsets the test runner.
@@ -85,59 +159,94 @@ def setup_test_homeserver(name="test", datastore=None, config=None, reactor=None
         kargs["clock"] = MockClock()
 
     if USE_POSTGRES_FOR_TESTS:
+        test_db = "synapse_test_%s" % uuid.uuid4().hex
+
         config.database_config = {
             "name": "psycopg2",
-            "args": {
-                "database": "synapse_test",
-                "cp_min": 1,
-                "cp_max": 5,
-            },
+            "args": {"database": test_db, "cp_min": 1, "cp_max": 5},
         }
     else:
         config.database_config = {
             "name": "sqlite3",
-            "args": {
-                "database": ":memory:",
-                "cp_min": 1,
-                "cp_max": 1,
-            },
+            "args": {"database": ":memory:", "cp_min": 1, "cp_max": 1},
         }
 
     db_engine = create_engine(config.database_config)
 
+    # Create the database before we actually try and connect to it, based off
+    # the template database we generate in setupdb()
+    if datastore is None and isinstance(db_engine, PostgresEngine):
+        db_conn = db_engine.module.connect(
+            database=POSTGRES_BASE_DB, user=POSTGRES_USER
+        )
+        db_conn.autocommit = True
+        cur = db_conn.cursor()
+        cur.execute("DROP DATABASE IF EXISTS %s;" % (test_db,))
+        cur.execute(
+            "CREATE DATABASE %s WITH TEMPLATE %s;" % (test_db, POSTGRES_BASE_DB)
+        )
+        cur.close()
+        db_conn.close()
+
     # we need to configure the connection pool to run the on_new_connection
     # function, so that we can test code that uses custom sqlite functions
     # (like rank).
     config.database_config["args"]["cp_openfun"] = db_engine.on_new_connection
 
     if datastore is None:
-        hs = HomeServer(
-            name, config=config,
+        hs = homeserverToUse(
+            name,
+            config=config,
             db_config=config.database_config,
             version_string="Synapse/tests",
             database_engine=db_engine,
             room_list_handler=object(),
             tls_server_context_factory=Mock(),
+            tls_client_options_factory=Mock(),
             reactor=reactor,
             **kargs
         )
-        db_conn = hs.get_db_conn()
-        # make sure that the database is empty
-        if isinstance(db_engine, PostgresEngine):
-            cur = db_conn.cursor()
-            cur.execute("SELECT tablename FROM pg_tables where schemaname='public'")
-            rows = cur.fetchall()
-            for r in rows:
-                cur.execute("DROP TABLE %s CASCADE" % r[0])
-        yield prepare_database(db_conn, db_engine, config)
+
+        # Prepare the DB on SQLite -- PostgreSQL is a copy of an already up to
+        # date db
+        if not isinstance(db_engine, PostgresEngine):
+            db_conn = hs.get_db_conn()
+            yield prepare_database(db_conn, db_engine, config)
+            db_conn.commit()
+            db_conn.close()
+
+        else:
+            # We need to do cleanup on PostgreSQL
+            def cleanup():
+                # Close all the db pools
+                hs.get_db_pool().close()
+
+                # Drop the test database
+                db_conn = db_engine.module.connect(
+                    database=POSTGRES_BASE_DB, user=POSTGRES_USER
+                )
+                db_conn.autocommit = True
+                cur = db_conn.cursor()
+                cur.execute("DROP DATABASE IF EXISTS %s;" % (test_db,))
+                db_conn.commit()
+                cur.close()
+                db_conn.close()
+
+            # Register the cleanup hook
+            cleanup_func(cleanup)
+
         hs.setup()
     else:
-        hs = HomeServer(
-            name, db_pool=None, datastore=datastore, config=config,
+        hs = homeserverToUse(
+            name,
+            db_pool=None,
+            datastore=datastore,
+            config=config,
             version_string="Synapse/tests",
             database_engine=db_engine,
             room_list_handler=object(),
             tls_server_context_factory=Mock(),
+            tls_client_options_factory=Mock(),
             reactor=reactor,
             **kargs
         )
@@ -146,8 +255,10 @@ def setup_test_homeserver(name="test", datastore=None, config=None, reactor=None
     # Need to let the HS build an auth handler and then mess with it
     # because AuthHandler's constructor requires the HS, so we can't make one
     # beforehand and pass it in to the HS's constructor (chicken / egg)
-    hs.get_auth_handler().hash = lambda p: hashlib.md5(p).hexdigest()
-    hs.get_auth_handler().validate_hash = lambda p, h: hashlib.md5(p).hexdigest() == h
+    hs.get_auth_handler().hash = lambda p: hashlib.md5(p.encode('utf8')).hexdigest()
+    hs.get_auth_handler().validate_hash = (
+        lambda p, h: hashlib.md5(p.encode('utf8')).hexdigest() == h
+    )
 
     fed = kargs.get("resource_for_federation", None)
     if fed:
@@ -161,7 +272,7 @@ def setup_test_homeserver(name="test", datastore=None, config=None, reactor=None
                 sleep_limit=hs.config.federation_rc_sleep_limit,
                 sleep_msec=hs.config.federation_rc_sleep_delay,
                 reject_limit=hs.config.federation_rc_reject_limit,
-                concurrent_requests=hs.config.federation_rc_concurrent
+                concurrent_requests=hs.config.federation_rc_concurrent,
             ),
         )
 
@@ -187,7 +298,6 @@ def mock_getRawHeaders(headers=None):
 
 # This is a mock /resource/ not an entire server
 class MockHttpResource(HttpServer):
-
     def __init__(self, prefix=""):
         self.callbacks = []  # 3-tuple of method/pattern/function
         self.prefix = prefix
@@ -220,8 +330,8 @@ class MockHttpResource(HttpServer):
         mock_content.configure_mock(**config)
         mock_request.content = mock_content
 
-        mock_request.method = http_method
-        mock_request.uri = path
+        mock_request.method = http_method.encode('ascii')
+        mock_request.uri = path.encode('ascii')
 
         mock_request.getClientIP.return_value = "-"
 
@@ -251,15 +361,9 @@ class MockHttpResource(HttpServer):
             matcher = pattern.match(path)
             if matcher:
                 try:
-                    args = [
-                        urlparse.unquote(u)
-                        for u in matcher.groups()
-                    ]
-
-                    (code, response) = yield func(
-                        mock_request,
-                        *args
-                    )
+                    args = [urlparse.unquote(u) for u in matcher.groups()]
+
+                    (code, response) = yield func(mock_request, *args)
                     defer.returnValue((code, response))
                 except CodeMessageException as e:
                     defer.returnValue((e.code, cs_error(e.msg, code=e.errcode)))
@@ -360,8 +464,7 @@ class MockClock(object):
 
 def _format_call(args, kwargs):
     return ", ".join(
-        ["%r" % (a) for a in args] +
-        ["%s=%r" % (k, v) for k, v in kwargs.items()]
+        ["%r" % (a) for a in args] + ["%s=%r" % (k, v) for k, v in kwargs.items()]
     )
 
 
@@ -379,8 +482,9 @@ class DeferredMockCallable(object):
         self.calls.append((args, kwargs))
 
         if not self.expectations:
-            raise ValueError("%r has no pending calls to handle call(%s)" % (
-                self, _format_call(args, kwargs))
+            raise ValueError(
+                "%r has no pending calls to handle call(%s)"
+                % (self, _format_call(args, kwargs))
             )
 
         for (call, result, d) in self.expectations:
@@ -388,9 +492,9 @@ class DeferredMockCallable(object):
                 d.callback(None)
                 return result
 
-        failure = AssertionError("Was not expecting call(%s)" % (
-            _format_call(args, kwargs)
-        ))
+        failure = AssertionError(
+            "Was not expecting call(%s)" % (_format_call(args, kwargs))
+        )
 
         for _, _, d in self.expectations:
             try:
@@ -406,17 +510,19 @@ class DeferredMockCallable(object):
     @defer.inlineCallbacks
     def await_calls(self, timeout=1000):
         deferred = defer.DeferredList(
-            [d for _, _, d in self.expectations],
-            fireOnOneErrback=True
+            [d for _, _, d in self.expectations], fireOnOneErrback=True
         )
 
         timer = reactor.callLater(
             timeout / 1000,
             deferred.errback,
-            AssertionError("%d pending calls left: %s" % (
-                len([e for e in self.expectations if not e[2].called]),
-                [e for e in self.expectations if not e[2].called]
-            ))
+            AssertionError(
+                "%d pending calls left: %s"
+                % (
+                    len([e for e in self.expectations if not e[2].called]),
+                    [e for e in self.expectations if not e[2].called],
+                )
+            ),
         )
 
         yield deferred
@@ -431,7 +537,35 @@ class DeferredMockCallable(object):
             self.calls = []
 
             raise AssertionError(
-                "Expected not to received any calls, got:\n" + "\n".join([
-                    "call(%s)" % _format_call(c[0], c[1]) for c in calls
-                ])
+                "Expected not to received any calls, got:\n"
+                + "\n".join(["call(%s)" % _format_call(c[0], c[1]) for c in calls])
             )
+
+
+@defer.inlineCallbacks
+def create_room(hs, room_id, creator_id):
+    """Creates and persist a creation event for the given room
+
+    Args:
+        hs
+        room_id (str)
+        creator_id (str)
+    """
+
+    store = hs.get_datastore()
+    event_builder_factory = hs.get_event_builder_factory()
+    event_creation_handler = hs.get_event_creation_handler()
+
+    builder = event_builder_factory.new({
+        "type": EventTypes.Create,
+        "state_key": "",
+        "sender": creator_id,
+        "room_id": room_id,
+        "content": {},
+    })
+
+    event, context = yield event_creation_handler.create_new_client_event(
+        builder
+    )
+
+    yield store.persist_event(event, context)
diff --git a/tox.ini b/tox.ini
index ed26644bd9..085f438989 100644
--- a/tox.ini
+++ b/tox.ini
@@ -1,7 +1,7 @@
 [tox]
 envlist = packaging, py27, py36, pep8, check_isort
 
-[testenv]
+[base]
 deps =
     coverage
     Twisted>=15.1
@@ -15,6 +15,15 @@ deps =
 setenv =
     PYTHONDONTWRITEBYTECODE = no_byte_code
 
+[testenv]
+deps =
+    {[base]deps}
+
+setenv =
+    {[base]setenv}
+
+passenv = *
+
 commands =
     /usr/bin/find "{toxinidir}" -name '*.pyc' -delete
     coverage run {env:COVERAGE_OPTS:} --source="{toxinidir}/synapse" \
@@ -46,6 +55,15 @@ commands =
 # )
 usedevelop=true
 
+[testenv:py27-postgres]
+usedevelop=true
+deps =
+    {[base]deps}
+     psycopg2
+setenv =
+    {[base]setenv}
+    SYNAPSE_POSTGRES = 1
+
 [testenv:py36]
 usedevelop=true
 commands =