summary refs log tree commit diff
diff options
context:
space:
mode:
-rw-r--r--changelog.d/1491.feature1
-rw-r--r--changelog.d/3632.misc1
-rw-r--r--changelog.d/3633.feature1
-rw-r--r--changelog.d/3647.misc1
-rw-r--r--changelog.d/3654.feature1
-rw-r--r--changelog.d/3655.feature1
-rw-r--r--changelog.d/3658.bugfix1
-rw-r--r--changelog.d/3662.feature1
-rw-r--r--changelog.d/3664.feature1
-rw-r--r--changelog.d/3678.misc1
-rw-r--r--contrib/grafana/synapse.json24
-rw-r--r--synapse/api/auth.py18
-rw-r--r--synapse/api/constants.py9
-rw-r--r--synapse/api/errors.py25
-rw-r--r--synapse/app/client_reader.py2
-rw-r--r--synapse/app/event_creator.py2
-rw-r--r--synapse/app/federation_reader.py2
-rw-r--r--synapse/app/federation_sender.py4
-rw-r--r--synapse/app/frontend_proxy.py2
-rwxr-xr-xsynapse/app/homeserver.py13
-rw-r--r--synapse/app/media_repository.py2
-rw-r--r--synapse/app/user_dir.py2
-rw-r--r--synapse/config/logger.py3
-rw-r--r--synapse/config/server.py11
-rw-r--r--synapse/crypto/context_factory.py86
-rw-r--r--synapse/crypto/keyclient.py4
-rw-r--r--synapse/crypto/keyring.py4
-rw-r--r--synapse/event_auth.py10
-rw-r--r--synapse/federation/federation_client.py38
-rw-r--r--synapse/federation/federation_server.py29
-rw-r--r--synapse/federation/transaction_queue.py10
-rw-r--r--synapse/federation/transport/client.py5
-rw-r--r--synapse/federation/transport/server.py71
-rw-r--r--synapse/handlers/appservice.py10
-rw-r--r--synapse/handlers/auth.py17
-rw-r--r--synapse/handlers/device.py2
-rw-r--r--synapse/handlers/federation.py15
-rw-r--r--synapse/handlers/initial_sync.py2
-rw-r--r--synapse/handlers/message.py13
-rw-r--r--synapse/handlers/pagination.py2
-rw-r--r--synapse/handlers/presence.py2
-rw-r--r--synapse/handlers/read_marker.py2
-rw-r--r--synapse/handlers/register.py12
-rw-r--r--synapse/handlers/room.py28
-rw-r--r--synapse/handlers/room_list.py2
-rw-r--r--synapse/handlers/room_member.py2
-rw-r--r--synapse/handlers/room_member_worker.py41
-rw-r--r--synapse/handlers/sync.py2
-rw-r--r--synapse/http/client.py2
-rw-r--r--synapse/http/endpoint.py12
-rw-r--r--synapse/http/matrixfederationclient.py13
-rw-r--r--synapse/metrics/__init__.py13
-rw-r--r--synapse/notifier.py2
-rw-r--r--synapse/push/bulk_push_rule_evaluator.py2
-rw-r--r--synapse/push/mailer.py2
-rw-r--r--synapse/replication/http/_base.py215
-rw-r--r--synapse/replication/http/membership.py292
-rw-r--r--synapse/replication/http/send_event.py110
-rw-r--r--synapse/replication/slave/storage/events.py2
-rw-r--r--synapse/rest/client/transactions.py2
-rw-r--r--synapse/rest/media/v1/media_repository.py2
-rw-r--r--synapse/rest/media/v1/preview_url_resource.py2
-rw-r--r--synapse/state.py2
-rw-r--r--synapse/storage/__init__.py28
-rw-r--r--synapse/storage/client_ips.py4
-rw-r--r--synapse/storage/events.py13
-rw-r--r--synapse/storage/monthly_active_users.py201
-rw-r--r--synapse/storage/prepare_database.py2
-rw-r--r--synapse/storage/roommember.py2
-rw-r--r--synapse/storage/schema/delta/51/monthly_active_users.sql27
-rw-r--r--synapse/storage/state.py33
-rw-r--r--synapse/util/async_helpers.py (renamed from synapse/util/async.py)0
-rw-r--r--synapse/util/caches/descriptors.py2
-rw-r--r--synapse/util/caches/response_cache.py2
-rw-r--r--synapse/util/caches/snapshot_cache.py2
-rw-r--r--synapse/util/logcontext.py2
-rw-r--r--tests/api/test_auth.py36
-rw-r--r--tests/handlers/test_auth.py8
-rw-r--r--tests/handlers/test_register.py72
-rw-r--r--tests/handlers/test_typing.py4
-rw-r--r--tests/rest/client/test_transactions.py4
-rw-r--r--tests/rest/client/v1/test_admin.py10
-rw-r--r--tests/rest/client/v1/test_profile.py8
-rw-r--r--tests/rest/client/v1/utils.py10
-rw-r--r--tests/rest/client/v2_alpha/test_filter.py22
-rw-r--r--tests/rest/client/v2_alpha/test_register.py14
-rw-r--r--tests/rest/client/v2_alpha/test_sync.py4
-rw-r--r--tests/server.py22
-rw-r--r--tests/storage/test__base.py2
-rw-r--r--tests/storage/test__init__.py65
-rw-r--r--tests/storage/test_client_ips.py66
-rw-r--r--tests/storage/test_event_federation.py2
-rw-r--r--tests/storage/test_monthly_active_users.py123
-rw-r--r--tests/storage/test_state.py2
-rw-r--r--tests/test_server.py11
-rw-r--r--tests/util/test_linearizer.py2
-rw-r--r--tests/util/test_rwlock.py2
-rw-r--r--tests/utils.py20
98 files changed, 1462 insertions, 576 deletions
diff --git a/changelog.d/1491.feature b/changelog.d/1491.feature
new file mode 100644
index 0000000000..77b6d6ca09
--- /dev/null
+++ b/changelog.d/1491.feature
@@ -0,0 +1 @@
+Add support for the SNI extension to federation TLS connections
\ No newline at end of file
diff --git a/changelog.d/3632.misc b/changelog.d/3632.misc
new file mode 100644
index 0000000000..9d64bbe83b
--- /dev/null
+++ b/changelog.d/3632.misc
@@ -0,0 +1 @@
+Refactor HTTP replication endpoints to reduce code duplication
diff --git a/changelog.d/3633.feature b/changelog.d/3633.feature
new file mode 100644
index 0000000000..8007a04840
--- /dev/null
+++ b/changelog.d/3633.feature
@@ -0,0 +1 @@
+Add ability to limit number of monthly active users on the server
diff --git a/changelog.d/3647.misc b/changelog.d/3647.misc
new file mode 100644
index 0000000000..dbc66dae60
--- /dev/null
+++ b/changelog.d/3647.misc
@@ -0,0 +1 @@
+Tests now correctly execute on Python 3.
diff --git a/changelog.d/3654.feature b/changelog.d/3654.feature
new file mode 100644
index 0000000000..35c95580bc
--- /dev/null
+++ b/changelog.d/3654.feature
@@ -0,0 +1 @@
+Basic support for room versioning
diff --git a/changelog.d/3655.feature b/changelog.d/3655.feature
new file mode 100644
index 0000000000..1134e549e7
--- /dev/null
+++ b/changelog.d/3655.feature
@@ -0,0 +1 @@
+Ability to disable client/server Synapse via conf toggle
diff --git a/changelog.d/3658.bugfix b/changelog.d/3658.bugfix
new file mode 100644
index 0000000000..556011a150
--- /dev/null
+++ b/changelog.d/3658.bugfix
@@ -0,0 +1 @@
+Fix occasional glitches in the synapse_event_persisted_position metric
diff --git a/changelog.d/3662.feature b/changelog.d/3662.feature
new file mode 100644
index 0000000000..daacef086d
--- /dev/null
+++ b/changelog.d/3662.feature
@@ -0,0 +1 @@
+Ability to whitelist specific threepids against monthly active user limiting
diff --git a/changelog.d/3664.feature b/changelog.d/3664.feature
new file mode 100644
index 0000000000..184dde9939
--- /dev/null
+++ b/changelog.d/3664.feature
@@ -0,0 +1 @@
+Add some metrics for the appservice and federation event sending loops
diff --git a/changelog.d/3678.misc b/changelog.d/3678.misc
new file mode 100644
index 0000000000..0d7c8da64a
--- /dev/null
+++ b/changelog.d/3678.misc
@@ -0,0 +1 @@
+Rename synapse.util.async to synapse.util.async_helpers to mitigate async becoming a keyword on Python 3.7.
diff --git a/contrib/grafana/synapse.json b/contrib/grafana/synapse.json
index 94a1de58f4..c58612594a 100644
--- a/contrib/grafana/synapse.json
+++ b/contrib/grafana/synapse.json
@@ -54,7 +54,7 @@
   "gnetId": null,
   "graphTooltip": 0,
   "id": null,
-  "iteration": 1533026624326,
+  "iteration": 1533598785368,
   "links": [
     {
       "asDropdown": true,
@@ -4629,7 +4629,7 @@
             "h": 9,
             "w": 12,
             "x": 0,
-            "y": 11
+            "y": 29
           },
           "id": 67,
           "legend": {
@@ -4655,11 +4655,11 @@
           "steppedLine": false,
           "targets": [
             {
-              "expr": " synapse_event_persisted_position{instance=\"$instance\"} - ignoring(index, job, name) group_right(instance) synapse_event_processing_positions{instance=\"$instance\",job=~\"$job\",index=~\"$index\"}",
+              "expr": " synapse_event_persisted_position{instance=\"$instance\",job=\"synapse\"}  - ignoring(index, job, name) group_right() synapse_event_processing_positions{instance=\"$instance\",job=~\"$job\",index=~\"$index\"}",
               "format": "time_series",
               "interval": "",
               "intervalFactor": 1,
-              "legendFormat": "{{job}}-{{index}}",
+              "legendFormat": "{{job}}-{{index}} ",
               "refId": "A"
             }
           ],
@@ -4697,7 +4697,11 @@
               "min": null,
               "show": true
             }
-          ]
+          ],
+          "yaxis": {
+            "align": false,
+            "alignLevel": null
+          }
         },
         {
           "aliasColors": {},
@@ -4710,7 +4714,7 @@
             "h": 9,
             "w": 12,
             "x": 12,
-            "y": 11
+            "y": 29
           },
           "id": 71,
           "legend": {
@@ -4778,7 +4782,11 @@
               "min": null,
               "show": true
             }
-          ]
+          ],
+          "yaxis": {
+            "align": false,
+            "alignLevel": null
+          }
         }
       ],
       "title": "Event processing loop positions",
@@ -4957,5 +4965,5 @@
   "timezone": "",
   "title": "Synapse",
   "uid": "000000012",
-  "version": 125
+  "version": 127
 }
\ No newline at end of file
diff --git a/synapse/api/auth.py b/synapse/api/auth.py
index 5bbbe8e2e7..9c62ec4374 100644
--- a/synapse/api/auth.py
+++ b/synapse/api/auth.py
@@ -213,7 +213,7 @@ class Auth(object):
                 default=[b""]
             )[0]
             if user and access_token and ip_addr:
-                self.store.insert_client_ip(
+                yield self.store.insert_client_ip(
                     user_id=user.to_string(),
                     access_token=access_token,
                     ip=ip_addr,
@@ -773,3 +773,19 @@ class Auth(object):
             raise AuthError(
                 403, "Guest access not allowed", errcode=Codes.GUEST_ACCESS_FORBIDDEN
             )
+
+    @defer.inlineCallbacks
+    def check_auth_blocking(self):
+        """Checks if the user should be rejected for some external reason,
+        such as monthly active user limiting or global disable flag
+        """
+        if self.hs.config.hs_disabled:
+            raise AuthError(
+                403, self.hs.config.hs_disabled_message, errcode=Codes.HS_DISABLED
+            )
+        if self.hs.config.limit_usage_by_mau is True:
+            current_mau = yield self.store.get_monthly_active_count()
+            if current_mau >= self.hs.config.max_mau_value:
+                raise AuthError(
+                    403, "MAU Limit Exceeded", errcode=Codes.MAU_LIMIT_EXCEEDED
+                )
diff --git a/synapse/api/constants.py b/synapse/api/constants.py
index 4df930c8d1..b0da506f6d 100644
--- a/synapse/api/constants.py
+++ b/synapse/api/constants.py
@@ -1,6 +1,7 @@
 # -*- coding: utf-8 -*-
 # Copyright 2014-2016 OpenMarket Ltd
 # Copyright 2017 Vector Creations Ltd
+# Copyright 2018 New Vector Ltd.
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
 # you may not use this file except in compliance with the License.
@@ -94,3 +95,11 @@ class RoomCreationPreset(object):
 class ThirdPartyEntityKind(object):
     USER = "user"
     LOCATION = "location"
+
+
+# the version we will give rooms which are created on this server
+DEFAULT_ROOM_VERSION = "1"
+
+# vdh-test-version is a placeholder to get room versioning support working and tested
+# until we have a working v2.
+KNOWN_ROOM_VERSIONS = {"1", "vdh-test-version"}
diff --git a/synapse/api/errors.py b/synapse/api/errors.py
index b41d595059..dc3bed5fcb 100644
--- a/synapse/api/errors.py
+++ b/synapse/api/errors.py
@@ -1,5 +1,6 @@
 # -*- coding: utf-8 -*-
 # Copyright 2014-2016 OpenMarket Ltd
+# Copyright 2018 New Vector Ltd.
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
 # you may not use this file except in compliance with the License.
@@ -56,6 +57,9 @@ class Codes(object):
     CONSENT_NOT_GIVEN = "M_CONSENT_NOT_GIVEN"
     CANNOT_LEAVE_SERVER_NOTICE_ROOM = "M_CANNOT_LEAVE_SERVER_NOTICE_ROOM"
     MAU_LIMIT_EXCEEDED = "M_MAU_LIMIT_EXCEEDED"
+    HS_DISABLED = "M_HS_DISABLED"
+    UNSUPPORTED_ROOM_VERSION = "M_UNSUPPORTED_ROOM_VERSION"
+    INCOMPATIBLE_ROOM_VERSION = "M_INCOMPATIBLE_ROOM_VERSION"
 
 
 class CodeMessageException(RuntimeError):
@@ -285,6 +289,27 @@ class LimitExceededError(SynapseError):
         )
 
 
+class IncompatibleRoomVersionError(SynapseError):
+    """A server is trying to join a room whose version it does not support."""
+
+    def __init__(self, room_version):
+        super(IncompatibleRoomVersionError, self).__init__(
+            code=400,
+            msg="Your homeserver does not support the features required to "
+                "join this room",
+            errcode=Codes.INCOMPATIBLE_ROOM_VERSION,
+        )
+
+        self._room_version = room_version
+
+    def error_dict(self):
+        return cs_error(
+            self.msg,
+            self.errcode,
+            room_version=self._room_version,
+        )
+
+
 def cs_error(msg, code=Codes.UNKNOWN, **kwargs):
     """ Utility method for constructing an error response for client-server
     interactions.
diff --git a/synapse/app/client_reader.py b/synapse/app/client_reader.py
index e2c91123db..6b77aec832 100644
--- a/synapse/app/client_reader.py
+++ b/synapse/app/client_reader.py
@@ -168,11 +168,13 @@ def start(config_options):
     database_engine = create_engine(config.database_config)
 
     tls_server_context_factory = context_factory.ServerContextFactory(config)
+    tls_client_options_factory = context_factory.ClientTLSOptionsFactory(config)
 
     ss = ClientReaderServer(
         config.server_name,
         db_config=config.database_config,
         tls_server_context_factory=tls_server_context_factory,
+        tls_client_options_factory=tls_client_options_factory,
         config=config,
         version_string="Synapse/" + get_version_string(synapse),
         database_engine=database_engine,
diff --git a/synapse/app/event_creator.py b/synapse/app/event_creator.py
index 374f115644..a385793dd4 100644
--- a/synapse/app/event_creator.py
+++ b/synapse/app/event_creator.py
@@ -174,11 +174,13 @@ def start(config_options):
     database_engine = create_engine(config.database_config)
 
     tls_server_context_factory = context_factory.ServerContextFactory(config)
+    tls_client_options_factory = context_factory.ClientTLSOptionsFactory(config)
 
     ss = EventCreatorServer(
         config.server_name,
         db_config=config.database_config,
         tls_server_context_factory=tls_server_context_factory,
+        tls_client_options_factory=tls_client_options_factory,
         config=config,
         version_string="Synapse/" + get_version_string(synapse),
         database_engine=database_engine,
diff --git a/synapse/app/federation_reader.py b/synapse/app/federation_reader.py
index 7af00b8bcf..57d96c13a2 100644
--- a/synapse/app/federation_reader.py
+++ b/synapse/app/federation_reader.py
@@ -143,11 +143,13 @@ def start(config_options):
     database_engine = create_engine(config.database_config)
 
     tls_server_context_factory = context_factory.ServerContextFactory(config)
+    tls_client_options_factory = context_factory.ClientTLSOptionsFactory(config)
 
     ss = FederationReaderServer(
         config.server_name,
         db_config=config.database_config,
         tls_server_context_factory=tls_server_context_factory,
+        tls_client_options_factory=tls_client_options_factory,
         config=config,
         version_string="Synapse/" + get_version_string(synapse),
         database_engine=database_engine,
diff --git a/synapse/app/federation_sender.py b/synapse/app/federation_sender.py
index 18469013fa..7bbf0ad082 100644
--- a/synapse/app/federation_sender.py
+++ b/synapse/app/federation_sender.py
@@ -40,7 +40,7 @@ from synapse.replication.slave.storage.transactions import TransactionStore
 from synapse.replication.tcp.client import ReplicationClientHandler
 from synapse.server import HomeServer
 from synapse.storage.engines import create_engine
-from synapse.util.async import Linearizer
+from synapse.util.async_helpers import Linearizer
 from synapse.util.httpresourcetree import create_resource_tree
 from synapse.util.logcontext import LoggingContext, run_in_background
 from synapse.util.manhole import manhole
@@ -186,11 +186,13 @@ def start(config_options):
     config.send_federation = True
 
     tls_server_context_factory = context_factory.ServerContextFactory(config)
+    tls_client_options_factory = context_factory.ClientTLSOptionsFactory(config)
 
     ps = FederationSenderServer(
         config.server_name,
         db_config=config.database_config,
         tls_server_context_factory=tls_server_context_factory,
+        tls_client_options_factory=tls_client_options_factory,
         config=config,
         version_string="Synapse/" + get_version_string(synapse),
         database_engine=database_engine,
diff --git a/synapse/app/frontend_proxy.py b/synapse/app/frontend_proxy.py
index b5f78f4640..671fbbcb2a 100644
--- a/synapse/app/frontend_proxy.py
+++ b/synapse/app/frontend_proxy.py
@@ -208,11 +208,13 @@ def start(config_options):
     database_engine = create_engine(config.database_config)
 
     tls_server_context_factory = context_factory.ServerContextFactory(config)
+    tls_client_options_factory = context_factory.ClientTLSOptionsFactory(config)
 
     ss = FrontendProxyServer(
         config.server_name,
         db_config=config.database_config,
         tls_server_context_factory=tls_server_context_factory,
+        tls_client_options_factory=tls_client_options_factory,
         config=config,
         version_string="Synapse/" + get_version_string(synapse),
         database_engine=database_engine,
diff --git a/synapse/app/homeserver.py b/synapse/app/homeserver.py
index fba51c26e8..37a9b126a5 100755
--- a/synapse/app/homeserver.py
+++ b/synapse/app/homeserver.py
@@ -338,6 +338,7 @@ def setup(config_options):
     events.USE_FROZEN_DICTS = config.use_frozen_dicts
 
     tls_server_context_factory = context_factory.ServerContextFactory(config)
+    tls_client_options_factory = context_factory.ClientTLSOptionsFactory(config)
 
     database_engine = create_engine(config.database_config)
     config.database_config["args"]["cp_openfun"] = database_engine.on_new_connection
@@ -346,6 +347,7 @@ def setup(config_options):
         config.server_name,
         db_config=config.database_config,
         tls_server_context_factory=tls_server_context_factory,
+        tls_client_options_factory=tls_client_options_factory,
         config=config,
         version_string="Synapse/" + get_version_string(synapse),
         database_engine=database_engine,
@@ -519,17 +521,26 @@ def run(hs):
     # table will decrease
     clock.looping_call(generate_user_daily_visit_stats, 5 * 60 * 1000)
 
+    # monthly active user limiting functionality
+    clock.looping_call(
+        hs.get_datastore().reap_monthly_active_users, 1000 * 60 * 60
+    )
+
     @defer.inlineCallbacks
     def generate_monthly_active_users():
         count = 0
         if hs.config.limit_usage_by_mau:
-            count = yield hs.get_datastore().count_monthly_users()
+            count = yield hs.get_datastore().get_monthly_active_count()
         current_mau_gauge.set(float(count))
         max_mau_value_gauge.set(float(hs.config.max_mau_value))
 
+    hs.get_datastore().initialise_reserved_users(
+        hs.config.mau_limits_reserved_threepids
+    )
     generate_monthly_active_users()
     if hs.config.limit_usage_by_mau:
         clock.looping_call(generate_monthly_active_users, 5 * 60 * 1000)
+    # End of monthly active user settings
 
     if hs.config.report_stats:
         logger.info("Scheduling stats reporting for 3 hour intervals")
diff --git a/synapse/app/media_repository.py b/synapse/app/media_repository.py
index 749bbf37d0..1423056732 100644
--- a/synapse/app/media_repository.py
+++ b/synapse/app/media_repository.py
@@ -155,11 +155,13 @@ def start(config_options):
     database_engine = create_engine(config.database_config)
 
     tls_server_context_factory = context_factory.ServerContextFactory(config)
+    tls_client_options_factory = context_factory.ClientTLSOptionsFactory(config)
 
     ss = MediaRepositoryServer(
         config.server_name,
         db_config=config.database_config,
         tls_server_context_factory=tls_server_context_factory,
+        tls_client_options_factory=tls_client_options_factory,
         config=config,
         version_string="Synapse/" + get_version_string(synapse),
         database_engine=database_engine,
diff --git a/synapse/app/user_dir.py b/synapse/app/user_dir.py
index 637a89530a..cb78de8834 100644
--- a/synapse/app/user_dir.py
+++ b/synapse/app/user_dir.py
@@ -214,11 +214,13 @@ def start(config_options):
     config.update_user_directory = True
 
     tls_server_context_factory = context_factory.ServerContextFactory(config)
+    tls_client_options_factory = context_factory.ClientTLSOptionsFactory(config)
 
     ps = UserDirectoryServer(
         config.server_name,
         db_config=config.database_config,
         tls_server_context_factory=tls_server_context_factory,
+        tls_client_options_factory=tls_client_options_factory,
         config=config,
         version_string="Synapse/" + get_version_string(synapse),
         database_engine=database_engine,
diff --git a/synapse/config/logger.py b/synapse/config/logger.py
index a87b11a1df..cfc20dcccf 100644
--- a/synapse/config/logger.py
+++ b/synapse/config/logger.py
@@ -193,9 +193,8 @@ def setup_logging(config, use_worker_options=False):
 
         def sighup(signum, stack):
             # it might be better to use a file watcher or something for this.
-            logging.info("Reloading log config from %s due to SIGHUP",
-                         log_config)
             load_log_config()
+            logging.info("Reloaded log config from %s due to SIGHUP", log_config)
 
         load_log_config()
 
diff --git a/synapse/config/server.py b/synapse/config/server.py
index 6a471a0a5e..3b078d72ca 100644
--- a/synapse/config/server.py
+++ b/synapse/config/server.py
@@ -69,12 +69,19 @@ class ServerConfig(Config):
 
         # Options to control access by tracking MAU
         self.limit_usage_by_mau = config.get("limit_usage_by_mau", False)
+        self.max_mau_value = 0
         if self.limit_usage_by_mau:
             self.max_mau_value = config.get(
                 "max_mau_value", 0,
             )
-        else:
-            self.max_mau_value = 0
+        self.mau_limits_reserved_threepids = config.get(
+            "mau_limit_reserved_threepids", []
+        )
+
+        # Options to disable HS
+        self.hs_disabled = config.get("hs_disabled", False)
+        self.hs_disabled_message = config.get("hs_disabled_message", "")
+
         # FIXME: federation_domain_whitelist needs sytests
         self.federation_domain_whitelist = None
         federation_domain_whitelist = config.get(
diff --git a/synapse/crypto/context_factory.py b/synapse/crypto/context_factory.py
index a1e1d0d33a..1a391adec1 100644
--- a/synapse/crypto/context_factory.py
+++ b/synapse/crypto/context_factory.py
@@ -11,19 +11,22 @@
 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 # See the License for the specific language governing permissions and
 # limitations under the License.
-
 import logging
 
+from zope.interface import implementer
+
 from OpenSSL import SSL, crypto
-from twisted.internet import ssl
 from twisted.internet._sslverify import _defaultCurveName
+from twisted.internet.interfaces import IOpenSSLClientConnectionCreator
+from twisted.internet.ssl import CertificateOptions, ContextFactory
+from twisted.python.failure import Failure
 
 logger = logging.getLogger(__name__)
 
 
-class ServerContextFactory(ssl.ContextFactory):
+class ServerContextFactory(ContextFactory):
     """Factory for PyOpenSSL SSL contexts that are used to handle incoming
-    connections and to make connections to remote servers."""
+    connections."""
 
     def __init__(self, config):
         self._context = SSL.Context(SSL.SSLv23_METHOD)
@@ -48,3 +51,78 @@ class ServerContextFactory(ssl.ContextFactory):
 
     def getContext(self):
         return self._context
+
+
+def _idnaBytes(text):
+    """
+    Convert some text typed by a human into some ASCII bytes. This is a
+    copy of twisted.internet._idna._idnaBytes. For documentation, see the
+    twisted documentation.
+    """
+    try:
+        import idna
+    except ImportError:
+        return text.encode("idna")
+    else:
+        return idna.encode(text)
+
+
+def _tolerateErrors(wrapped):
+    """
+    Wrap up an info_callback for pyOpenSSL so that if something goes wrong
+    the error is immediately logged and the connection is dropped if possible.
+    This is a copy of twisted.internet._sslverify._tolerateErrors. For
+    documentation, see the twisted documentation.
+    """
+
+    def infoCallback(connection, where, ret):
+        try:
+            return wrapped(connection, where, ret)
+        except:  # noqa: E722, taken from the twisted implementation
+            f = Failure()
+            logger.exception("Error during info_callback")
+            connection.get_app_data().failVerification(f)
+
+    return infoCallback
+
+
+@implementer(IOpenSSLClientConnectionCreator)
+class ClientTLSOptions(object):
+    """
+    Client creator for TLS without certificate identity verification. This is a
+    copy of twisted.internet._sslverify.ClientTLSOptions with the identity
+    verification left out. For documentation, see the twisted documentation.
+    """
+
+    def __init__(self, hostname, ctx):
+        self._ctx = ctx
+        self._hostname = hostname
+        self._hostnameBytes = _idnaBytes(hostname)
+        ctx.set_info_callback(
+            _tolerateErrors(self._identityVerifyingInfoCallback)
+        )
+
+    def clientConnectionForTLS(self, tlsProtocol):
+        context = self._ctx
+        connection = SSL.Connection(context, None)
+        connection.set_app_data(tlsProtocol)
+        return connection
+
+    def _identityVerifyingInfoCallback(self, connection, where, ret):
+        if where & SSL.SSL_CB_HANDSHAKE_START:
+            connection.set_tlsext_host_name(self._hostnameBytes)
+
+
+class ClientTLSOptionsFactory(object):
+    """Factory for Twisted ClientTLSOptions that are used to make connections
+    to remote servers for federation."""
+
+    def __init__(self, config):
+        # We don't use config options yet
+        pass
+
+    def get_options(self, host):
+        return ClientTLSOptions(
+            host.decode('utf-8'),
+            CertificateOptions(verify=False).getContext()
+        )
diff --git a/synapse/crypto/keyclient.py b/synapse/crypto/keyclient.py
index 668b4f517d..c20a32096a 100644
--- a/synapse/crypto/keyclient.py
+++ b/synapse/crypto/keyclient.py
@@ -30,14 +30,14 @@ KEY_API_V1 = b"/_matrix/key/v1/"
 
 
 @defer.inlineCallbacks
-def fetch_server_key(server_name, ssl_context_factory, path=KEY_API_V1):
+def fetch_server_key(server_name, tls_client_options_factory, path=KEY_API_V1):
     """Fetch the keys for a remote server."""
 
     factory = SynapseKeyClientFactory()
     factory.path = path
     factory.host = server_name
     endpoint = matrix_federation_endpoint(
-        reactor, server_name, ssl_context_factory, timeout=30
+        reactor, server_name, tls_client_options_factory, timeout=30
     )
 
     for i in range(5):
diff --git a/synapse/crypto/keyring.py b/synapse/crypto/keyring.py
index e95b9fb43e..30e2742102 100644
--- a/synapse/crypto/keyring.py
+++ b/synapse/crypto/keyring.py
@@ -512,7 +512,7 @@ class Keyring(object):
                 continue
 
             (response, tls_certificate) = yield fetch_server_key(
-                server_name, self.hs.tls_server_context_factory,
+                server_name, self.hs.tls_client_options_factory,
                 path=(b"/_matrix/key/v2/server/%s" % (
                     urllib.quote(requested_key_id),
                 )).encode("ascii"),
@@ -655,7 +655,7 @@ class Keyring(object):
         # Try to fetch the key from the remote server.
 
         (response, tls_certificate) = yield fetch_server_key(
-            server_name, self.hs.tls_server_context_factory
+            server_name, self.hs.tls_client_options_factory
         )
 
         # Check the response.
diff --git a/synapse/event_auth.py b/synapse/event_auth.py
index b32f64e729..6baeccca38 100644
--- a/synapse/event_auth.py
+++ b/synapse/event_auth.py
@@ -20,7 +20,7 @@ from signedjson.key import decode_verify_key_bytes
 from signedjson.sign import SignatureVerifyException, verify_signed_json
 from unpaddedbase64 import decode_base64
 
-from synapse.api.constants import EventTypes, JoinRules, Membership
+from synapse.api.constants import KNOWN_ROOM_VERSIONS, EventTypes, JoinRules, Membership
 from synapse.api.errors import AuthError, EventSizeError, SynapseError
 from synapse.types import UserID, get_domain_from_id
 
@@ -83,6 +83,14 @@ def check(event, auth_events, do_sig_check=True, do_size_check=True):
                 403,
                 "Creation event's room_id domain does not match sender's"
             )
+
+        room_version = event.content.get("room_version", "1")
+        if room_version not in KNOWN_ROOM_VERSIONS:
+            raise AuthError(
+                403,
+                "room appears to have unsupported version %s" % (
+                    room_version,
+                ))
         # FIXME
         logger.debug("Allowing! %s", event)
         return
diff --git a/synapse/federation/federation_client.py b/synapse/federation/federation_client.py
index 7550e11b6e..c9f3c2d352 100644
--- a/synapse/federation/federation_client.py
+++ b/synapse/federation/federation_client.py
@@ -25,7 +25,7 @@ from prometheus_client import Counter
 
 from twisted.internet import defer
 
-from synapse.api.constants import Membership
+from synapse.api.constants import KNOWN_ROOM_VERSIONS, EventTypes, Membership
 from synapse.api.errors import (
     CodeMessageException,
     FederationDeniedError,
@@ -518,10 +518,10 @@ class FederationClient(FederationBase):
                     description, destination, exc_info=1,
                 )
 
-        raise RuntimeError("Failed to %s via any server", description)
+        raise RuntimeError("Failed to %s via any server" % (description, ))
 
     def make_membership_event(self, destinations, room_id, user_id, membership,
-                              content={},):
+                              content, params):
         """
         Creates an m.room.member event, with context, without participating in the room.
 
@@ -537,8 +537,10 @@ class FederationClient(FederationBase):
             user_id (str): The user whose membership is being evented.
             membership (str): The "membership" property of the event. Must be
                 one of "join" or "leave".
-            content (object): Any additional data to put into the content field
+            content (dict): Any additional data to put into the content field
                 of the event.
+            params (dict[str, str|Iterable[str]]): Query parameters to include in the
+                request.
         Return:
             Deferred: resolves to a tuple of (origin (str), event (object))
             where origin is the remote homeserver which generated the event.
@@ -558,10 +560,12 @@ class FederationClient(FederationBase):
         @defer.inlineCallbacks
         def send_request(destination):
             ret = yield self.transport_layer.make_membership_event(
-                destination, room_id, user_id, membership
+                destination, room_id, user_id, membership, params,
             )
 
-            pdu_dict = ret["event"]
+            pdu_dict = ret.get("event", None)
+            if not isinstance(pdu_dict, dict):
+                raise InvalidResponseError("Bad 'event' field in response")
 
             logger.debug("Got response to make_%s: %s", membership, pdu_dict)
 
@@ -605,6 +609,26 @@ class FederationClient(FederationBase):
             Fails with a ``RuntimeError`` if no servers were reachable.
         """
 
+        def check_authchain_validity(signed_auth_chain):
+            for e in signed_auth_chain:
+                if e.type == EventTypes.Create:
+                    create_event = e
+                    break
+            else:
+                raise InvalidResponseError(
+                    "no %s in auth chain" % (EventTypes.Create,),
+                )
+
+            # the room version should be sane.
+            room_version = create_event.content.get("room_version", "1")
+            if room_version not in KNOWN_ROOM_VERSIONS:
+                # This shouldn't be possible, because the remote server should have
+                # rejected the join attempt during make_join.
+                raise InvalidResponseError(
+                    "room appears to have unsupported version %s" % (
+                        room_version,
+                    ))
+
         @defer.inlineCallbacks
         def send_request(destination):
             time_now = self._clock.time_msec()
@@ -661,7 +685,7 @@ class FederationClient(FederationBase):
             for s in signed_state:
                 s.internal_metadata = copy.deepcopy(s.internal_metadata)
 
-            auth_chain.sort(key=lambda e: e.depth)
+            check_authchain_validity(signed_auth)
 
             defer.returnValue({
                 "state": signed_state,
diff --git a/synapse/federation/federation_server.py b/synapse/federation/federation_server.py
index bf89d568af..a23136784a 100644
--- a/synapse/federation/federation_server.py
+++ b/synapse/federation/federation_server.py
@@ -27,14 +27,20 @@ from twisted.internet.abstract import isIPAddress
 from twisted.python import failure
 
 from synapse.api.constants import EventTypes
-from synapse.api.errors import AuthError, FederationError, NotFoundError, SynapseError
+from synapse.api.errors import (
+    AuthError,
+    FederationError,
+    IncompatibleRoomVersionError,
+    NotFoundError,
+    SynapseError,
+)
 from synapse.crypto.event_signing import compute_event_signature
 from synapse.federation.federation_base import FederationBase, event_from_pdu_json
 from synapse.federation.persistence import TransactionActions
 from synapse.federation.units import Edu, Transaction
 from synapse.http.endpoint import parse_server_name
 from synapse.types import get_domain_from_id
-from synapse.util import async
+from synapse.util.async_helpers import Linearizer, concurrently_execute
 from synapse.util.caches.response_cache import ResponseCache
 from synapse.util.logutils import log_function
 
@@ -61,8 +67,8 @@ class FederationServer(FederationBase):
         self.auth = hs.get_auth()
         self.handler = hs.get_handlers().federation_handler
 
-        self._server_linearizer = async.Linearizer("fed_server")
-        self._transaction_linearizer = async.Linearizer("fed_txn_handler")
+        self._server_linearizer = Linearizer("fed_server")
+        self._transaction_linearizer = Linearizer("fed_txn_handler")
 
         self.transaction_actions = TransactionActions(self.store)
 
@@ -194,7 +200,7 @@ class FederationServer(FederationBase):
                         event_id, f.getTraceback().rstrip(),
                     )
 
-        yield async.concurrently_execute(
+        yield concurrently_execute(
             process_pdus_for_room, pdus_by_room.keys(),
             TRANSACTION_CONCURRENCY_LIMIT,
         )
@@ -323,12 +329,21 @@ class FederationServer(FederationBase):
         defer.returnValue((200, resp))
 
     @defer.inlineCallbacks
-    def on_make_join_request(self, origin, room_id, user_id):
+    def on_make_join_request(self, origin, room_id, user_id, supported_versions):
         origin_host, _ = parse_server_name(origin)
         yield self.check_server_matches_acl(origin_host, room_id)
+
+        room_version = yield self.store.get_room_version(room_id)
+        if room_version not in supported_versions:
+            logger.warn("Room version %s not in %s", room_version, supported_versions)
+            raise IncompatibleRoomVersionError(room_version=room_version)
+
         pdu = yield self.handler.on_make_join_request(room_id, user_id)
         time_now = self._clock.time_msec()
-        defer.returnValue({"event": pdu.get_pdu_json(time_now)})
+        defer.returnValue({
+            "event": pdu.get_pdu_json(time_now),
+            "room_version": room_version,
+        })
 
     @defer.inlineCallbacks
     def on_invite_request(self, origin, content):
diff --git a/synapse/federation/transaction_queue.py b/synapse/federation/transaction_queue.py
index 78f9d40a3a..f603c8a368 100644
--- a/synapse/federation/transaction_queue.py
+++ b/synapse/federation/transaction_queue.py
@@ -26,6 +26,8 @@ from synapse.api.errors import FederationDeniedError, HttpResponseException
 from synapse.handlers.presence import format_user_presence_state, get_interested_remotes
 from synapse.metrics import (
     LaterGauge,
+    event_processing_loop_counter,
+    event_processing_loop_room_count,
     events_processed_counter,
     sent_edus_counter,
     sent_transactions_counter,
@@ -253,7 +255,13 @@ class TransactionQueue(object):
                     synapse.metrics.event_processing_last_ts.labels(
                         "federation_sender").set(ts)
 
-                events_processed_counter.inc(len(events))
+                    events_processed_counter.inc(len(events))
+
+                    event_processing_loop_room_count.labels(
+                        "federation_sender"
+                    ).inc(len(events_by_room))
+
+                event_processing_loop_counter.labels("federation_sender").inc()
 
                 synapse.metrics.event_processing_positions.labels(
                     "federation_sender").set(next_token)
diff --git a/synapse/federation/transport/client.py b/synapse/federation/transport/client.py
index 4529d454af..b4fbe2c9d5 100644
--- a/synapse/federation/transport/client.py
+++ b/synapse/federation/transport/client.py
@@ -195,7 +195,7 @@ class TransportLayerClient(object):
 
     @defer.inlineCallbacks
     @log_function
-    def make_membership_event(self, destination, room_id, user_id, membership):
+    def make_membership_event(self, destination, room_id, user_id, membership, params):
         """Asks a remote server to build and sign us a membership event
 
         Note that this does not append any events to any graphs.
@@ -205,6 +205,8 @@ class TransportLayerClient(object):
             room_id (str): room to join/leave
             user_id (str): user to be joined/left
             membership (str): one of join/leave
+            params (dict[str, str|Iterable[str]]): Query parameters to include in the
+                request.
 
         Returns:
             Deferred: Succeeds when we get a 2xx HTTP response. The result
@@ -241,6 +243,7 @@ class TransportLayerClient(object):
         content = yield self.client.get_json(
             destination=destination,
             path=path,
+            args=params,
             retry_on_dns_fail=retry_on_dns_fail,
             timeout=20000,
             ignore_backoff=ignore_backoff,
diff --git a/synapse/federation/transport/server.py b/synapse/federation/transport/server.py
index eae5f2b427..77969a4f38 100644
--- a/synapse/federation/transport/server.py
+++ b/synapse/federation/transport/server.py
@@ -190,6 +190,41 @@ def _parse_auth_header(header_bytes):
 
 
 class BaseFederationServlet(object):
+    """Abstract base class for federation servlet classes.
+
+    The servlet object should have a PATH attribute which takes the form of a regexp to
+    match against the request path (excluding the /federation/v1 prefix).
+
+    The servlet should also implement one or more of on_GET, on_POST, on_PUT, to match
+    the appropriate HTTP method. These methods have the signature:
+
+        on_<METHOD>(self, origin, content, query, **kwargs)
+
+        With arguments:
+
+            origin (unicode|None): The authenticated server_name of the calling server,
+                unless REQUIRE_AUTH is set to False and authentication failed.
+
+            content (unicode|None): decoded json body of the request. None if the
+                request was a GET.
+
+            query (dict[bytes, list[bytes]]): Query params from the request. url-decoded
+                (ie, '+' and '%xx' are decoded) but note that it is *not* utf8-decoded
+                yet.
+
+            **kwargs (dict[unicode, unicode]): the dict mapping keys to path
+                components as specified in the path match regexp.
+
+        Returns:
+            Deferred[(int, object)|None]: either (response code, response object) to
+                 return a JSON response, or None if the request has already been handled.
+
+        Raises:
+            SynapseError: to return an error code
+
+            Exception: other exceptions will be caught, logged, and a 500 will be
+                returned.
+    """
     REQUIRE_AUTH = True
 
     def __init__(self, handler, authenticator, ratelimiter, server_name):
@@ -204,6 +239,18 @@ class BaseFederationServlet(object):
         @defer.inlineCallbacks
         @functools.wraps(func)
         def new_func(request, *args, **kwargs):
+            """ A callback which can be passed to HttpServer.RegisterPaths
+
+            Args:
+                request (twisted.web.http.Request):
+                *args: unused?
+                **kwargs (dict[unicode, unicode]): the dict mapping keys to path
+                    components as specified in the path match regexp.
+
+            Returns:
+                Deferred[(int, object)|None]: (response code, response object) as returned
+                    by the callback method. None if the request has already been handled.
+            """
             content = None
             if request.method in ["PUT", "POST"]:
                 # TODO: Handle other method types? other content types?
@@ -384,9 +431,31 @@ class FederationMakeJoinServlet(BaseFederationServlet):
     PATH = "/make_join/(?P<context>[^/]*)/(?P<user_id>[^/]*)"
 
     @defer.inlineCallbacks
-    def on_GET(self, origin, content, query, context, user_id):
+    def on_GET(self, origin, _content, query, context, user_id):
+        """
+        Args:
+            origin (unicode): The authenticated server_name of the calling server
+
+            _content (None): (GETs don't have bodies)
+
+            query (dict[bytes, list[bytes]]): Query params from the request.
+
+            **kwargs (dict[unicode, unicode]): the dict mapping keys to path
+                components as specified in the path match regexp.
+
+        Returns:
+            Deferred[(int, object)|None]: either (response code, response object) to
+                 return a JSON response, or None if the request has already been handled.
+        """
+        versions = query.get(b'ver')
+        if versions is not None:
+            supported_versions = [v.decode("utf-8") for v in versions]
+        else:
+            supported_versions = ["1"]
+
         content = yield self.handler.on_make_join_request(
             origin, context, user_id,
+            supported_versions=supported_versions,
         )
         defer.returnValue((200, content))
 
diff --git a/synapse/handlers/appservice.py b/synapse/handlers/appservice.py
index ee41aed69e..f0f89af7dc 100644
--- a/synapse/handlers/appservice.py
+++ b/synapse/handlers/appservice.py
@@ -23,6 +23,10 @@ from twisted.internet import defer
 
 import synapse
 from synapse.api.constants import EventTypes
+from synapse.metrics import (
+    event_processing_loop_counter,
+    event_processing_loop_room_count,
+)
 from synapse.metrics.background_process_metrics import run_as_background_process
 from synapse.util.logcontext import make_deferred_yieldable, run_in_background
 from synapse.util.metrics import Measure
@@ -136,6 +140,12 @@ class ApplicationServicesHandler(object):
 
                     events_processed_counter.inc(len(events))
 
+                    event_processing_loop_room_count.labels(
+                        "appservice_sender"
+                    ).inc(len(events_by_room))
+
+                    event_processing_loop_counter.labels("appservice_sender").inc()
+
                     synapse.metrics.event_processing_lag.labels(
                         "appservice_sender").set(now - ts)
                     synapse.metrics.event_processing_last_ts.labels(
diff --git a/synapse/handlers/auth.py b/synapse/handlers/auth.py
index 184eef09d0..7ea8ce9f94 100644
--- a/synapse/handlers/auth.py
+++ b/synapse/handlers/auth.py
@@ -520,7 +520,7 @@ class AuthHandler(BaseHandler):
         """
         logger.info("Logging in user %s on device %s", user_id, device_id)
         access_token = yield self.issue_access_token(user_id, device_id)
-        yield self._check_mau_limits()
+        yield self.auth.check_auth_blocking()
 
         # the device *should* have been registered before we got here; however,
         # it's possible we raced against a DELETE operation. The thing we
@@ -734,7 +734,7 @@ class AuthHandler(BaseHandler):
 
     @defer.inlineCallbacks
     def validate_short_term_login_token_and_get_user_id(self, login_token):
-        yield self._check_mau_limits()
+        yield self.auth.check_auth_blocking()
         auth_api = self.hs.get_auth()
         user_id = None
         try:
@@ -907,19 +907,6 @@ class AuthHandler(BaseHandler):
         else:
             return defer.succeed(False)
 
-    @defer.inlineCallbacks
-    def _check_mau_limits(self):
-        """
-        Ensure that if mau blocking is enabled that invalid users cannot
-        log in.
-        """
-        if self.hs.config.limit_usage_by_mau is True:
-            current_mau = yield self.store.count_monthly_users()
-            if current_mau >= self.hs.config.max_mau_value:
-                raise AuthError(
-                    403, "MAU Limit Exceeded", errcode=Codes.MAU_LIMIT_EXCEEDED
-                )
-
 
 @attr.s
 class MacaroonGenerator(object):
diff --git a/synapse/handlers/device.py b/synapse/handlers/device.py
index 2d44f15da3..9e017116a9 100644
--- a/synapse/handlers/device.py
+++ b/synapse/handlers/device.py
@@ -23,7 +23,7 @@ from synapse.api.constants import EventTypes
 from synapse.api.errors import FederationDeniedError
 from synapse.types import RoomStreamToken, get_domain_from_id
 from synapse.util import stringutils
-from synapse.util.async import Linearizer
+from synapse.util.async_helpers import Linearizer
 from synapse.util.caches.expiringcache import ExpiringCache
 from synapse.util.metrics import measure_func
 from synapse.util.retryutils import NotRetryingDestination
diff --git a/synapse/handlers/federation.py b/synapse/handlers/federation.py
index 533b82c783..2380d17f4e 100644
--- a/synapse/handlers/federation.py
+++ b/synapse/handlers/federation.py
@@ -30,7 +30,12 @@ from unpaddedbase64 import decode_base64
 
 from twisted.internet import defer
 
-from synapse.api.constants import EventTypes, Membership, RejectedReason
+from synapse.api.constants import (
+    KNOWN_ROOM_VERSIONS,
+    EventTypes,
+    Membership,
+    RejectedReason,
+)
 from synapse.api.errors import (
     AuthError,
     CodeMessageException,
@@ -47,7 +52,7 @@ from synapse.events.validator import EventValidator
 from synapse.state import resolve_events_with_factory
 from synapse.types import UserID, get_domain_from_id
 from synapse.util import logcontext, unwrapFirstError
-from synapse.util.async import Linearizer
+from synapse.util.async_helpers import Linearizer
 from synapse.util.distributor import user_joined_room
 from synapse.util.frozenutils import unfreeze
 from synapse.util.logutils import log_function
@@ -922,6 +927,9 @@ class FederationHandler(BaseHandler):
             joinee,
             "join",
             content,
+            params={
+                "ver": KNOWN_ROOM_VERSIONS,
+            },
         )
 
         # This shouldn't happen, because the RoomMemberHandler has a
@@ -1187,13 +1195,14 @@ class FederationHandler(BaseHandler):
 
     @defer.inlineCallbacks
     def _make_and_verify_event(self, target_hosts, room_id, user_id, membership,
-                               content={},):
+                               content={}, params=None):
         origin, pdu = yield self.federation_client.make_membership_event(
             target_hosts,
             room_id,
             user_id,
             membership,
             content,
+            params=params,
         )
 
         logger.debug("Got response to make_%s: %s", membership, pdu)
diff --git a/synapse/handlers/initial_sync.py b/synapse/handlers/initial_sync.py
index 40e7580a61..1fb17fd9a5 100644
--- a/synapse/handlers/initial_sync.py
+++ b/synapse/handlers/initial_sync.py
@@ -25,7 +25,7 @@ from synapse.handlers.presence import format_user_presence_state
 from synapse.streams.config import PaginationConfig
 from synapse.types import StreamToken, UserID
 from synapse.util import unwrapFirstError
-from synapse.util.async import concurrently_execute
+from synapse.util.async_helpers import concurrently_execute
 from synapse.util.caches.snapshot_cache import SnapshotCache
 from synapse.util.logcontext import make_deferred_yieldable, run_in_background
 from synapse.visibility import filter_events_for_client
diff --git a/synapse/handlers/message.py b/synapse/handlers/message.py
index 39d7724778..01a362360e 100644
--- a/synapse/handlers/message.py
+++ b/synapse/handlers/message.py
@@ -30,9 +30,9 @@ from synapse.api.urls import ConsentURIBuilder
 from synapse.crypto.event_signing import add_hashes_and_signatures
 from synapse.events.utils import serialize_event
 from synapse.events.validator import EventValidator
-from synapse.replication.http.send_event import send_event_to_master
+from synapse.replication.http.send_event import ReplicationSendEventRestServlet
 from synapse.types import RoomAlias, UserID
-from synapse.util.async import Linearizer
+from synapse.util.async_helpers import Linearizer
 from synapse.util.frozenutils import frozendict_json_encoder
 from synapse.util.logcontext import run_in_background
 from synapse.util.metrics import measure_func
@@ -171,7 +171,7 @@ class EventCreationHandler(object):
         self.notifier = hs.get_notifier()
         self.config = hs.config
 
-        self.http_client = hs.get_simple_http_client()
+        self.send_event_to_master = ReplicationSendEventRestServlet.make_client(hs)
 
         # This is only used to get at ratelimit function, and maybe_kick_guest_users
         self.base_handler = BaseHandler(hs)
@@ -559,12 +559,9 @@ class EventCreationHandler(object):
         try:
             # If we're a worker we need to hit out to the master.
             if self.config.worker_app:
-                yield send_event_to_master(
-                    clock=self.hs.get_clock(),
+                yield self.send_event_to_master(
+                    event_id=event.event_id,
                     store=self.store,
-                    client=self.http_client,
-                    host=self.config.worker_replication_host,
-                    port=self.config.worker_replication_http_port,
                     requester=requester,
                     event=event,
                     context=context,
diff --git a/synapse/handlers/pagination.py b/synapse/handlers/pagination.py
index b2849783ed..a97d43550f 100644
--- a/synapse/handlers/pagination.py
+++ b/synapse/handlers/pagination.py
@@ -22,7 +22,7 @@ from synapse.api.constants import Membership
 from synapse.api.errors import SynapseError
 from synapse.events.utils import serialize_event
 from synapse.types import RoomStreamToken
-from synapse.util.async import ReadWriteLock
+from synapse.util.async_helpers import ReadWriteLock
 from synapse.util.logcontext import run_in_background
 from synapse.util.stringutils import random_string
 from synapse.visibility import filter_events_for_client
diff --git a/synapse/handlers/presence.py b/synapse/handlers/presence.py
index 3732830194..20fc3b0323 100644
--- a/synapse/handlers/presence.py
+++ b/synapse/handlers/presence.py
@@ -36,7 +36,7 @@ from synapse.api.errors import SynapseError
 from synapse.metrics import LaterGauge
 from synapse.storage.presence import UserPresenceState
 from synapse.types import UserID, get_domain_from_id
-from synapse.util.async import Linearizer
+from synapse.util.async_helpers import Linearizer
 from synapse.util.caches.descriptors import cachedInlineCallbacks
 from synapse.util.logcontext import run_in_background
 from synapse.util.logutils import log_function
diff --git a/synapse/handlers/read_marker.py b/synapse/handlers/read_marker.py
index 995460f82a..32108568c6 100644
--- a/synapse/handlers/read_marker.py
+++ b/synapse/handlers/read_marker.py
@@ -17,7 +17,7 @@ import logging
 
 from twisted.internet import defer
 
-from synapse.util.async import Linearizer
+from synapse.util.async_helpers import Linearizer
 
 from ._base import BaseHandler
 
diff --git a/synapse/handlers/register.py b/synapse/handlers/register.py
index 289704b241..3526b20d5a 100644
--- a/synapse/handlers/register.py
+++ b/synapse/handlers/register.py
@@ -28,7 +28,7 @@ from synapse.api.errors import (
 )
 from synapse.http.client import CaptchaServerHttpClient
 from synapse.types import RoomAlias, RoomID, UserID, create_requester
-from synapse.util.async import Linearizer
+from synapse.util.async_helpers import Linearizer
 from synapse.util.threepids import check_3pid_allowed
 
 from ._base import BaseHandler
@@ -540,9 +540,7 @@ class RegistrationHandler(BaseHandler):
         Do not accept registrations if monthly active user limits exceeded
          and limiting is enabled
         """
-        if self.hs.config.limit_usage_by_mau is True:
-            current_mau = yield self.store.count_monthly_users()
-            if current_mau >= self.hs.config.max_mau_value:
-                raise RegistrationError(
-                    403, "MAU Limit Exceeded", Codes.MAU_LIMIT_EXCEEDED
-                )
+        try:
+            yield self.auth.check_auth_blocking()
+        except AuthError as e:
+            raise RegistrationError(e.code, str(e), e.errcode)
diff --git a/synapse/handlers/room.py b/synapse/handlers/room.py
index 7b7804d9b2..6a17c42238 100644
--- a/synapse/handlers/room.py
+++ b/synapse/handlers/room.py
@@ -21,9 +21,17 @@ import math
 import string
 from collections import OrderedDict
 
+from six import string_types
+
 from twisted.internet import defer
 
-from synapse.api.constants import EventTypes, JoinRules, RoomCreationPreset
+from synapse.api.constants import (
+    DEFAULT_ROOM_VERSION,
+    KNOWN_ROOM_VERSIONS,
+    EventTypes,
+    JoinRules,
+    RoomCreationPreset,
+)
 from synapse.api.errors import AuthError, Codes, StoreError, SynapseError
 from synapse.types import RoomAlias, RoomID, RoomStreamToken, StreamToken, UserID
 from synapse.util import stringutils
@@ -99,6 +107,21 @@ class RoomCreationHandler(BaseHandler):
         if ratelimit:
             yield self.ratelimit(requester)
 
+        room_version = config.get("room_version", DEFAULT_ROOM_VERSION)
+        if not isinstance(room_version, string_types):
+            raise SynapseError(
+                400,
+                "room_version must be a string",
+                Codes.BAD_JSON,
+            )
+
+        if room_version not in KNOWN_ROOM_VERSIONS:
+            raise SynapseError(
+                400,
+                "Your homeserver does not support this room version",
+                Codes.UNSUPPORTED_ROOM_VERSION,
+            )
+
         if "room_alias_name" in config:
             for wchar in string.whitespace:
                 if wchar in config["room_alias_name"]:
@@ -184,6 +207,9 @@ class RoomCreationHandler(BaseHandler):
 
         creation_content = config.get("creation_content", {})
 
+        # override any attempt to set room versions via the creation_content
+        creation_content["room_version"] = room_version
+
         room_member_handler = self.hs.get_room_member_handler()
 
         yield self._send_events_for_new_room(
diff --git a/synapse/handlers/room_list.py b/synapse/handlers/room_list.py
index 828229f5c3..37e41afd61 100644
--- a/synapse/handlers/room_list.py
+++ b/synapse/handlers/room_list.py
@@ -26,7 +26,7 @@ from twisted.internet import defer
 
 from synapse.api.constants import EventTypes, JoinRules
 from synapse.types import ThirdPartyInstanceID
-from synapse.util.async import concurrently_execute
+from synapse.util.async_helpers import concurrently_execute
 from synapse.util.caches.descriptors import cachedInlineCallbacks
 from synapse.util.caches.response_cache import ResponseCache
 
diff --git a/synapse/handlers/room_member.py b/synapse/handlers/room_member.py
index 0d4a3f4677..fb94b5d7d4 100644
--- a/synapse/handlers/room_member.py
+++ b/synapse/handlers/room_member.py
@@ -30,7 +30,7 @@ import synapse.types
 from synapse.api.constants import EventTypes, Membership
 from synapse.api.errors import AuthError, Codes, SynapseError
 from synapse.types import RoomID, UserID
-from synapse.util.async import Linearizer
+from synapse.util.async_helpers import Linearizer
 from synapse.util.distributor import user_joined_room, user_left_room
 
 logger = logging.getLogger(__name__)
diff --git a/synapse/handlers/room_member_worker.py b/synapse/handlers/room_member_worker.py
index 22d8b4b0d3..acc6eb8099 100644
--- a/synapse/handlers/room_member_worker.py
+++ b/synapse/handlers/room_member_worker.py
@@ -20,16 +20,24 @@ from twisted.internet import defer
 from synapse.api.errors import SynapseError
 from synapse.handlers.room_member import RoomMemberHandler
 from synapse.replication.http.membership import (
-    get_or_register_3pid_guest,
-    notify_user_membership_change,
-    remote_join,
-    remote_reject_invite,
+    ReplicationRegister3PIDGuestRestServlet as Repl3PID,
+    ReplicationRemoteJoinRestServlet as ReplRemoteJoin,
+    ReplicationRemoteRejectInviteRestServlet as ReplRejectInvite,
+    ReplicationUserJoinedLeftRoomRestServlet as ReplJoinedLeft,
 )
 
 logger = logging.getLogger(__name__)
 
 
 class RoomMemberWorkerHandler(RoomMemberHandler):
+    def __init__(self, hs):
+        super(RoomMemberWorkerHandler, self).__init__(hs)
+
+        self._get_register_3pid_client = Repl3PID.make_client(hs)
+        self._remote_join_client = ReplRemoteJoin.make_client(hs)
+        self._remote_reject_client = ReplRejectInvite.make_client(hs)
+        self._notify_change_client = ReplJoinedLeft.make_client(hs)
+
     @defer.inlineCallbacks
     def _remote_join(self, requester, remote_room_hosts, room_id, user, content):
         """Implements RoomMemberHandler._remote_join
@@ -37,10 +45,7 @@ class RoomMemberWorkerHandler(RoomMemberHandler):
         if len(remote_room_hosts) == 0:
             raise SynapseError(404, "No known servers")
 
-        ret = yield remote_join(
-            self.simple_http_client,
-            host=self.config.worker_replication_host,
-            port=self.config.worker_replication_http_port,
+        ret = yield self._remote_join_client(
             requester=requester,
             remote_room_hosts=remote_room_hosts,
             room_id=room_id,
@@ -55,10 +60,7 @@ class RoomMemberWorkerHandler(RoomMemberHandler):
     def _remote_reject_invite(self, requester, remote_room_hosts, room_id, target):
         """Implements RoomMemberHandler._remote_reject_invite
         """
-        return remote_reject_invite(
-            self.simple_http_client,
-            host=self.config.worker_replication_host,
-            port=self.config.worker_replication_http_port,
+        return self._remote_reject_client(
             requester=requester,
             remote_room_hosts=remote_room_hosts,
             room_id=room_id,
@@ -68,10 +70,7 @@ class RoomMemberWorkerHandler(RoomMemberHandler):
     def _user_joined_room(self, target, room_id):
         """Implements RoomMemberHandler._user_joined_room
         """
-        return notify_user_membership_change(
-            self.simple_http_client,
-            host=self.config.worker_replication_host,
-            port=self.config.worker_replication_http_port,
+        return self._notify_change_client(
             user_id=target.to_string(),
             room_id=room_id,
             change="joined",
@@ -80,10 +79,7 @@ class RoomMemberWorkerHandler(RoomMemberHandler):
     def _user_left_room(self, target, room_id):
         """Implements RoomMemberHandler._user_left_room
         """
-        return notify_user_membership_change(
-            self.simple_http_client,
-            host=self.config.worker_replication_host,
-            port=self.config.worker_replication_http_port,
+        return self._notify_change_client(
             user_id=target.to_string(),
             room_id=room_id,
             change="left",
@@ -92,10 +88,7 @@ class RoomMemberWorkerHandler(RoomMemberHandler):
     def get_or_register_3pid_guest(self, requester, medium, address, inviter_user_id):
         """Implements RoomMemberHandler.get_or_register_3pid_guest
         """
-        return get_or_register_3pid_guest(
-            self.simple_http_client,
-            host=self.config.worker_replication_host,
-            port=self.config.worker_replication_http_port,
+        return self._get_register_3pid_client(
             requester=requester,
             medium=medium,
             address=address,
diff --git a/synapse/handlers/sync.py b/synapse/handlers/sync.py
index dff1f67dcb..6393a9674b 100644
--- a/synapse/handlers/sync.py
+++ b/synapse/handlers/sync.py
@@ -25,7 +25,7 @@ from twisted.internet import defer
 from synapse.api.constants import EventTypes, Membership
 from synapse.push.clientformat import format_push_rules_for_user
 from synapse.types import RoomStreamToken
-from synapse.util.async import concurrently_execute
+from synapse.util.async_helpers import concurrently_execute
 from synapse.util.caches.expiringcache import ExpiringCache
 from synapse.util.caches.lrucache import LruCache
 from synapse.util.caches.response_cache import ResponseCache
diff --git a/synapse/http/client.py b/synapse/http/client.py
index 3771e0b3f6..ab4fbf59b2 100644
--- a/synapse/http/client.py
+++ b/synapse/http/client.py
@@ -42,7 +42,7 @@ from twisted.web.http_headers import Headers
 from synapse.api.errors import Codes, HttpResponseException, SynapseError
 from synapse.http import cancelled_to_request_timed_out_error, redact_uri
 from synapse.http.endpoint import SpiderEndpoint
-from synapse.util.async import add_timeout_to_deferred
+from synapse.util.async_helpers import add_timeout_to_deferred
 from synapse.util.caches import CACHE_SIZE_FACTOR
 from synapse.util.logcontext import make_deferred_yieldable
 
diff --git a/synapse/http/endpoint.py b/synapse/http/endpoint.py
index d65daa72bb..b0c9369519 100644
--- a/synapse/http/endpoint.py
+++ b/synapse/http/endpoint.py
@@ -26,7 +26,6 @@ from twisted.names.error import DNSNameError, DomainError
 
 logger = logging.getLogger(__name__)
 
-
 SERVER_CACHE = {}
 
 # our record of an individual server which can be tried to reach a destination.
@@ -103,15 +102,16 @@ def parse_and_validate_server_name(server_name):
     return host, port
 
 
-def matrix_federation_endpoint(reactor, destination, ssl_context_factory=None,
+def matrix_federation_endpoint(reactor, destination, tls_client_options_factory=None,
                                timeout=None):
     """Construct an endpoint for the given matrix destination.
 
     Args:
         reactor: Twisted reactor.
         destination (bytes): The name of the server to connect to.
-        ssl_context_factory (twisted.internet.ssl.ContextFactory): Factory
-            which generates SSL contexts to use for TLS.
+        tls_client_options_factory
+            (synapse.crypto.context_factory.ClientTLSOptionsFactory):
+            Factory which generates TLS options for client connections.
         timeout (int): connection timeout in seconds
     """
 
@@ -122,13 +122,13 @@ def matrix_federation_endpoint(reactor, destination, ssl_context_factory=None,
     if timeout is not None:
         endpoint_kw_args.update(timeout=timeout)
 
-    if ssl_context_factory is None:
+    if tls_client_options_factory is None:
         transport_endpoint = HostnameEndpoint
         default_port = 8008
     else:
         def transport_endpoint(reactor, host, port, timeout):
             return wrapClientTLS(
-                ssl_context_factory,
+                tls_client_options_factory.get_options(host),
                 HostnameEndpoint(reactor, host, port, timeout=timeout))
         default_port = 8448
 
diff --git a/synapse/http/matrixfederationclient.py b/synapse/http/matrixfederationclient.py
index bf1aa29502..44b61e70a4 100644
--- a/synapse/http/matrixfederationclient.py
+++ b/synapse/http/matrixfederationclient.py
@@ -43,7 +43,7 @@ from synapse.api.errors import (
 from synapse.http import cancelled_to_request_timed_out_error
 from synapse.http.endpoint import matrix_federation_endpoint
 from synapse.util import logcontext
-from synapse.util.async import add_timeout_to_deferred
+from synapse.util.async_helpers import add_timeout_to_deferred
 from synapse.util.logcontext import make_deferred_yieldable
 
 logger = logging.getLogger(__name__)
@@ -61,14 +61,14 @@ MAX_SHORT_RETRIES = 3
 
 class MatrixFederationEndpointFactory(object):
     def __init__(self, hs):
-        self.tls_server_context_factory = hs.tls_server_context_factory
+        self.tls_client_options_factory = hs.tls_client_options_factory
 
     def endpointForURI(self, uri):
         destination = uri.netloc
 
         return matrix_federation_endpoint(
             reactor, destination, timeout=10,
-            ssl_context_factory=self.tls_server_context_factory
+            tls_client_options_factory=self.tls_client_options_factory
         )
 
 
@@ -439,7 +439,7 @@ class MatrixFederationHttpClient(object):
         defer.returnValue(json.loads(body))
 
     @defer.inlineCallbacks
-    def get_json(self, destination, path, args={}, retry_on_dns_fail=True,
+    def get_json(self, destination, path, args=None, retry_on_dns_fail=True,
                  timeout=None, ignore_backoff=False):
         """ GETs some json from the given host homeserver and path
 
@@ -447,7 +447,7 @@ class MatrixFederationHttpClient(object):
             destination (str): The remote server to send the HTTP request
                 to.
             path (str): The HTTP path.
-            args (dict): A dictionary used to create query strings, defaults to
+            args (dict|None): A dictionary used to create query strings, defaults to
                 None.
             timeout (int): How long to try (in ms) the destination for before
                 giving up. None indicates no timeout and that the request will
@@ -702,6 +702,9 @@ def check_content_type_is_json(headers):
 
 
 def encode_query_args(args):
+    if args is None:
+        return b""
+
     encoded_args = {}
     for k, vs in args.items():
         if isinstance(vs, string_types):
diff --git a/synapse/metrics/__init__.py b/synapse/metrics/__init__.py
index a9158fc066..550f8443f7 100644
--- a/synapse/metrics/__init__.py
+++ b/synapse/metrics/__init__.py
@@ -174,6 +174,19 @@ sent_transactions_counter = Counter("synapse_federation_client_sent_transactions
 
 events_processed_counter = Counter("synapse_federation_client_events_processed", "")
 
+event_processing_loop_counter = Counter(
+    "synapse_event_processing_loop_count",
+    "Event processing loop iterations",
+    ["name"],
+)
+
+event_processing_loop_room_count = Counter(
+    "synapse_event_processing_loop_room_count",
+    "Rooms seen per event processing loop iteration",
+    ["name"],
+)
+
+
 # Used to track where various components have processed in the event stream,
 # e.g. federation sending, appservice sending, etc.
 event_processing_positions = Gauge("synapse_event_processing_positions", "", ["name"])
diff --git a/synapse/notifier.py b/synapse/notifier.py
index e650c3e494..82f391481c 100644
--- a/synapse/notifier.py
+++ b/synapse/notifier.py
@@ -25,7 +25,7 @@ from synapse.api.errors import AuthError
 from synapse.handlers.presence import format_user_presence_state
 from synapse.metrics import LaterGauge
 from synapse.types import StreamToken
-from synapse.util.async import (
+from synapse.util.async_helpers import (
     DeferredTimeoutError,
     ObservableDeferred,
     add_timeout_to_deferred,
diff --git a/synapse/push/bulk_push_rule_evaluator.py b/synapse/push/bulk_push_rule_evaluator.py
index 1d14d3639c..8f9a76147f 100644
--- a/synapse/push/bulk_push_rule_evaluator.py
+++ b/synapse/push/bulk_push_rule_evaluator.py
@@ -26,7 +26,7 @@ from twisted.internet import defer
 from synapse.api.constants import EventTypes, Membership
 from synapse.event_auth import get_user_power_level
 from synapse.state import POWER_KEY
-from synapse.util.async import Linearizer
+from synapse.util.async_helpers import Linearizer
 from synapse.util.caches import register_cache
 from synapse.util.caches.descriptors import cached
 
diff --git a/synapse/push/mailer.py b/synapse/push/mailer.py
index 9d601208fd..bfa6df7b68 100644
--- a/synapse/push/mailer.py
+++ b/synapse/push/mailer.py
@@ -35,7 +35,7 @@ from synapse.push.presentable_names import (
     name_from_member_event,
 )
 from synapse.types import UserID
-from synapse.util.async import concurrently_execute
+from synapse.util.async_helpers import concurrently_execute
 from synapse.visibility import filter_events_for_client
 
 logger = logging.getLogger(__name__)
diff --git a/synapse/replication/http/_base.py b/synapse/replication/http/_base.py
new file mode 100644
index 0000000000..5e5376cf58
--- /dev/null
+++ b/synapse/replication/http/_base.py
@@ -0,0 +1,215 @@
+# -*- coding: utf-8 -*-
+# Copyright 2018 New Vector Ltd
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import abc
+import logging
+import re
+
+from six.moves import urllib
+
+from twisted.internet import defer
+
+from synapse.api.errors import CodeMessageException, HttpResponseException
+from synapse.util.caches.response_cache import ResponseCache
+from synapse.util.stringutils import random_string
+
+logger = logging.getLogger(__name__)
+
+
+class ReplicationEndpoint(object):
+    """Helper base class for defining new replication HTTP endpoints.
+
+    This creates an endpoint under `/_synapse/replication/:NAME/:PATH_ARGS..`
+    (with an `/:txn_id` prefix for cached requests.), where NAME is a name,
+    PATH_ARGS are a tuple of parameters to be encoded in the URL.
+
+    For example, if `NAME` is "send_event" and `PATH_ARGS` is `("event_id",)`,
+    with `CACHE` set to true then this generates an endpoint:
+
+        /_synapse/replication/send_event/:event_id/:txn_id
+
+    For POST/PUT requests the payload is serialized to json and sent as the
+    body, while for GET requests the payload is added as query parameters. See
+    `_serialize_payload` for details.
+
+    Incoming requests are handled by overriding `_handle_request`. Servers
+    must call `register` to register the path with the HTTP server.
+
+    Requests can be sent by calling the client returned by `make_client`.
+
+    Attributes:
+        NAME (str): A name for the endpoint, added to the path as well as used
+            in logging and metrics.
+        PATH_ARGS (tuple[str]): A list of parameters to be added to the path.
+            Adding parameters to the path (rather than payload) can make it
+            easier to follow along in the log files.
+        METHOD (str): The method of the HTTP request, defaults to POST. Can be
+            one of POST, PUT or GET. If GET then the payload is sent as query
+            parameters rather than a JSON body.
+        CACHE (bool): Whether server should cache the result of the request/
+            If true then transparently adds a txn_id to all requests, and
+            `_handle_request` must return a Deferred.
+        RETRY_ON_TIMEOUT(bool): Whether or not to retry the request when a 504
+            is received.
+    """
+
+    __metaclass__ = abc.ABCMeta
+
+    NAME = abc.abstractproperty()
+    PATH_ARGS = abc.abstractproperty()
+
+    METHOD = "POST"
+    CACHE = True
+    RETRY_ON_TIMEOUT = True
+
+    def __init__(self, hs):
+        if self.CACHE:
+            self.response_cache = ResponseCache(
+                hs, "repl." + self.NAME,
+                timeout_ms=30 * 60 * 1000,
+            )
+
+        assert self.METHOD in ("PUT", "POST", "GET")
+
+    @abc.abstractmethod
+    def _serialize_payload(**kwargs):
+        """Static method that is called when creating a request.
+
+        Concrete implementations should have explicit parameters (rather than
+        kwargs) so that an appropriate exception is raised if the client is
+        called with unexpected parameters. All PATH_ARGS must appear in
+        argument list.
+
+        Returns:
+            Deferred[dict]|dict: If POST/PUT request then dictionary must be
+            JSON serialisable, otherwise must be appropriate for adding as
+            query args.
+        """
+        return {}
+
+    @abc.abstractmethod
+    def _handle_request(self, request, **kwargs):
+        """Handle incoming request.
+
+        This is called with the request object and PATH_ARGS.
+
+        Returns:
+            Deferred[dict]: A JSON serialisable dict to be used as response
+            body of request.
+        """
+        pass
+
+    @classmethod
+    def make_client(cls, hs):
+        """Create a client that makes requests.
+
+        Returns a callable that accepts the same parameters as `_serialize_payload`.
+        """
+        clock = hs.get_clock()
+        host = hs.config.worker_replication_host
+        port = hs.config.worker_replication_http_port
+
+        client = hs.get_simple_http_client()
+
+        @defer.inlineCallbacks
+        def send_request(**kwargs):
+            data = yield cls._serialize_payload(**kwargs)
+
+            url_args = [urllib.parse.quote(kwargs[name]) for name in cls.PATH_ARGS]
+
+            if cls.CACHE:
+                txn_id = random_string(10)
+                url_args.append(txn_id)
+
+            if cls.METHOD == "POST":
+                request_func = client.post_json_get_json
+            elif cls.METHOD == "PUT":
+                request_func = client.put_json
+            elif cls.METHOD == "GET":
+                request_func = client.get_json
+            else:
+                # We have already asserted in the constructor that a
+                # compatible was picked, but lets be paranoid.
+                raise Exception(
+                    "Unknown METHOD on %s replication endpoint" % (cls.NAME,)
+                )
+
+            uri = "http://%s:%s/_synapse/replication/%s/%s" % (
+                host, port, cls.NAME, "/".join(url_args)
+            )
+
+            try:
+                # We keep retrying the same request for timeouts. This is so that we
+                # have a good idea that the request has either succeeded or failed on
+                # the master, and so whether we should clean up or not.
+                while True:
+                    try:
+                        result = yield request_func(uri, data)
+                        break
+                    except CodeMessageException as e:
+                        if e.code != 504 or not cls.RETRY_ON_TIMEOUT:
+                            raise
+
+                    logger.warn("%s request timed out", cls.NAME)
+
+                    # If we timed out we probably don't need to worry about backing
+                    # off too much, but lets just wait a little anyway.
+                    yield clock.sleep(1)
+            except HttpResponseException as e:
+                # We convert to SynapseError as we know that it was a SynapseError
+                # on the master process that we should send to the client. (And
+                # importantly, not stack traces everywhere)
+                raise e.to_synapse_error()
+
+            defer.returnValue(result)
+
+        return send_request
+
+    def register(self, http_server):
+        """Called by the server to register this as a handler to the
+        appropriate path.
+        """
+
+        url_args = list(self.PATH_ARGS)
+        handler = self._handle_request
+        method = self.METHOD
+
+        if self.CACHE:
+            handler = self._cached_handler
+            url_args.append("txn_id")
+
+        args = "/".join("(?P<%s>[^/]+)" % (arg,) for arg in url_args)
+        pattern = re.compile("^/_synapse/replication/%s/%s$" % (
+            self.NAME,
+            args
+        ))
+
+        http_server.register_paths(method, [pattern], handler)
+
+    def _cached_handler(self, request, txn_id, **kwargs):
+        """Called on new incoming requests when caching is enabled. Checks
+        if there is a cached response for the request and returns that,
+        otherwise calls `_handle_request` and caches its response.
+        """
+        # We just use the txn_id here, but we probably also want to use the
+        # other PATH_ARGS as well.
+
+        assert self.CACHE
+
+        return self.response_cache.wrap(
+            txn_id,
+            self._handle_request,
+            request, **kwargs
+        )
diff --git a/synapse/replication/http/membership.py b/synapse/replication/http/membership.py
index 7a3cfb159c..e58bebf12a 100644
--- a/synapse/replication/http/membership.py
+++ b/synapse/replication/http/membership.py
@@ -14,182 +14,63 @@
 # limitations under the License.
 
 import logging
-import re
 
 from twisted.internet import defer
 
-from synapse.api.errors import HttpResponseException
-from synapse.http.servlet import RestServlet, parse_json_object_from_request
+from synapse.http.servlet import parse_json_object_from_request
+from synapse.replication.http._base import ReplicationEndpoint
 from synapse.types import Requester, UserID
 from synapse.util.distributor import user_joined_room, user_left_room
 
 logger = logging.getLogger(__name__)
 
 
-@defer.inlineCallbacks
-def remote_join(client, host, port, requester, remote_room_hosts,
-                room_id, user_id, content):
-    """Ask the master to do a remote join for the given user to the given room
+class ReplicationRemoteJoinRestServlet(ReplicationEndpoint):
+    """Does a remote join for the given user to the given room
 
-    Args:
-        client (SimpleHttpClient)
-        host (str): host of master
-        port (int): port on master listening for HTTP replication
-        requester (Requester)
-        remote_room_hosts (list[str]): Servers to try and join via
-        room_id (str)
-        user_id (str)
-        content (dict): The event content to use for the join event
+    Request format:
 
-    Returns:
-        Deferred
-    """
-    uri = "http://%s:%s/_synapse/replication/remote_join" % (host, port)
-
-    payload = {
-        "requester": requester.serialize(),
-        "remote_room_hosts": remote_room_hosts,
-        "room_id": room_id,
-        "user_id": user_id,
-        "content": content,
-    }
-
-    try:
-        result = yield client.post_json_get_json(uri, payload)
-    except HttpResponseException as e:
-        # We convert to SynapseError as we know that it was a SynapseError
-        # on the master process that we should send to the client. (And
-        # importantly, not stack traces everywhere)
-        raise e.to_synapse_error()
-    defer.returnValue(result)
-
-
-@defer.inlineCallbacks
-def remote_reject_invite(client, host, port, requester, remote_room_hosts,
-                         room_id, user_id):
-    """Ask master to reject the invite for the user and room.
-
-    Args:
-        client (SimpleHttpClient)
-        host (str): host of master
-        port (int): port on master listening for HTTP replication
-        requester (Requester)
-        remote_room_hosts (list[str]): Servers to try and reject via
-        room_id (str)
-        user_id (str)
-
-    Returns:
-        Deferred
-    """
-    uri = "http://%s:%s/_synapse/replication/remote_reject_invite" % (host, port)
-
-    payload = {
-        "requester": requester.serialize(),
-        "remote_room_hosts": remote_room_hosts,
-        "room_id": room_id,
-        "user_id": user_id,
-    }
-
-    try:
-        result = yield client.post_json_get_json(uri, payload)
-    except HttpResponseException as e:
-        # We convert to SynapseError as we know that it was a SynapseError
-        # on the master process that we should send to the client. (And
-        # importantly, not stack traces everywhere)
-        raise e.to_synapse_error()
-    defer.returnValue(result)
-
-
-@defer.inlineCallbacks
-def get_or_register_3pid_guest(client, host, port, requester,
-                               medium, address, inviter_user_id):
-    """Ask the master to get/create a guest account for given 3PID.
-
-    Args:
-        client (SimpleHttpClient)
-        host (str): host of master
-        port (int): port on master listening for HTTP replication
-        requester (Requester)
-        medium (str)
-        address (str)
-        inviter_user_id (str): The user ID who is trying to invite the
-            3PID
-
-    Returns:
-        Deferred[(str, str)]: A 2-tuple of `(user_id, access_token)` of the
-        3PID guest account.
-    """
+        POST /_synapse/replication/remote_join/:room_id/:user_id
 
-    uri = "http://%s:%s/_synapse/replication/get_or_register_3pid_guest" % (host, port)
-
-    payload = {
-        "requester": requester.serialize(),
-        "medium": medium,
-        "address": address,
-        "inviter_user_id": inviter_user_id,
-    }
-
-    try:
-        result = yield client.post_json_get_json(uri, payload)
-    except HttpResponseException as e:
-        # We convert to SynapseError as we know that it was a SynapseError
-        # on the master process that we should send to the client. (And
-        # importantly, not stack traces everywhere)
-        raise e.to_synapse_error()
-    defer.returnValue(result)
-
-
-@defer.inlineCallbacks
-def notify_user_membership_change(client, host, port, user_id, room_id, change):
-    """Notify master that a user has joined or left the room
-
-    Args:
-        client (SimpleHttpClient)
-        host (str): host of master
-        port (int): port on master listening for HTTP replication.
-        user_id (str)
-        room_id (str)
-        change (str): Either "join" or "left"
-
-    Returns:
-        Deferred
+        {
+            "requester": ...,
+            "remote_room_hosts": [...],
+            "content": { ... }
+        }
     """
-    assert change in ("joined", "left")
-
-    uri = "http://%s:%s/_synapse/replication/user_%s_room" % (host, port, change)
-
-    payload = {
-        "user_id": user_id,
-        "room_id": room_id,
-    }
-
-    try:
-        result = yield client.post_json_get_json(uri, payload)
-    except HttpResponseException as e:
-        # We convert to SynapseError as we know that it was a SynapseError
-        # on the master process that we should send to the client. (And
-        # importantly, not stack traces everywhere)
-        raise e.to_synapse_error()
-    defer.returnValue(result)
-
 
-class ReplicationRemoteJoinRestServlet(RestServlet):
-    PATTERNS = [re.compile("^/_synapse/replication/remote_join$")]
+    NAME = "remote_join"
+    PATH_ARGS = ("room_id", "user_id",)
 
     def __init__(self, hs):
-        super(ReplicationRemoteJoinRestServlet, self).__init__()
+        super(ReplicationRemoteJoinRestServlet, self).__init__(hs)
 
         self.federation_handler = hs.get_handlers().federation_handler
         self.store = hs.get_datastore()
         self.clock = hs.get_clock()
 
+    @staticmethod
+    def _serialize_payload(requester, room_id, user_id, remote_room_hosts,
+                           content):
+        """
+        Args:
+            requester(Requester)
+            room_id (str)
+            user_id (str)
+            remote_room_hosts (list[str]): Servers to try and join via
+            content(dict): The event content to use for the join event
+        """
+        return {
+            "requester": requester.serialize(),
+            "remote_room_hosts": remote_room_hosts,
+            "content": content,
+        }
+
     @defer.inlineCallbacks
-    def on_POST(self, request):
+    def _handle_request(self, request, room_id, user_id):
         content = parse_json_object_from_request(request)
 
         remote_room_hosts = content["remote_room_hosts"]
-        room_id = content["room_id"]
-        user_id = content["user_id"]
         event_content = content["content"]
 
         requester = Requester.deserialize(self.store, content["requester"])
@@ -212,23 +93,48 @@ class ReplicationRemoteJoinRestServlet(RestServlet):
         defer.returnValue((200, {}))
 
 
-class ReplicationRemoteRejectInviteRestServlet(RestServlet):
-    PATTERNS = [re.compile("^/_synapse/replication/remote_reject_invite$")]
+class ReplicationRemoteRejectInviteRestServlet(ReplicationEndpoint):
+    """Rejects the invite for the user and room.
+
+    Request format:
+
+        POST /_synapse/replication/remote_reject_invite/:room_id/:user_id
+
+        {
+            "requester": ...,
+            "remote_room_hosts": [...],
+        }
+    """
+
+    NAME = "remote_reject_invite"
+    PATH_ARGS = ("room_id", "user_id",)
 
     def __init__(self, hs):
-        super(ReplicationRemoteRejectInviteRestServlet, self).__init__()
+        super(ReplicationRemoteRejectInviteRestServlet, self).__init__(hs)
 
         self.federation_handler = hs.get_handlers().federation_handler
         self.store = hs.get_datastore()
         self.clock = hs.get_clock()
 
+    @staticmethod
+    def _serialize_payload(requester, room_id, user_id, remote_room_hosts):
+        """
+        Args:
+            requester(Requester)
+            room_id (str)
+            user_id (str)
+            remote_room_hosts (list[str]): Servers to try and reject via
+        """
+        return {
+            "requester": requester.serialize(),
+            "remote_room_hosts": remote_room_hosts,
+        }
+
     @defer.inlineCallbacks
-    def on_POST(self, request):
+    def _handle_request(self, request, room_id, user_id):
         content = parse_json_object_from_request(request)
 
         remote_room_hosts = content["remote_room_hosts"]
-        room_id = content["room_id"]
-        user_id = content["user_id"]
 
         requester = Requester.deserialize(self.store, content["requester"])
 
@@ -264,18 +170,50 @@ class ReplicationRemoteRejectInviteRestServlet(RestServlet):
         defer.returnValue((200, ret))
 
 
-class ReplicationRegister3PIDGuestRestServlet(RestServlet):
-    PATTERNS = [re.compile("^/_synapse/replication/get_or_register_3pid_guest$")]
+class ReplicationRegister3PIDGuestRestServlet(ReplicationEndpoint):
+    """Gets/creates a guest account for given 3PID.
+
+    Request format:
+
+        POST /_synapse/replication/get_or_register_3pid_guest/
+
+        {
+            "requester": ...,
+            "medium": ...,
+            "address": ...,
+            "inviter_user_id": ...
+        }
+    """
+
+    NAME = "get_or_register_3pid_guest"
+    PATH_ARGS = ()
 
     def __init__(self, hs):
-        super(ReplicationRegister3PIDGuestRestServlet, self).__init__()
+        super(ReplicationRegister3PIDGuestRestServlet, self).__init__(hs)
 
         self.registeration_handler = hs.get_handlers().registration_handler
         self.store = hs.get_datastore()
         self.clock = hs.get_clock()
 
+    @staticmethod
+    def _serialize_payload(requester, medium, address, inviter_user_id):
+        """
+        Args:
+            requester(Requester)
+            medium (str)
+            address (str)
+            inviter_user_id (str): The user ID who is trying to invite the
+                3PID
+        """
+        return {
+            "requester": requester.serialize(),
+            "medium": medium,
+            "address": address,
+            "inviter_user_id": inviter_user_id,
+        }
+
     @defer.inlineCallbacks
-    def on_POST(self, request):
+    def _handle_request(self, request):
         content = parse_json_object_from_request(request)
 
         medium = content["medium"]
@@ -296,23 +234,41 @@ class ReplicationRegister3PIDGuestRestServlet(RestServlet):
         defer.returnValue((200, ret))
 
 
-class ReplicationUserJoinedLeftRoomRestServlet(RestServlet):
-    PATTERNS = [re.compile("^/_synapse/replication/user_(?P<change>joined|left)_room$")]
+class ReplicationUserJoinedLeftRoomRestServlet(ReplicationEndpoint):
+    """Notifies that a user has joined or left the room
+
+    Request format:
+
+        POST /_synapse/replication/membership_change/:room_id/:user_id/:change
+
+        {}
+    """
+
+    NAME = "membership_change"
+    PATH_ARGS = ("room_id", "user_id", "change")
+    CACHE = False  # No point caching as should return instantly.
 
     def __init__(self, hs):
-        super(ReplicationUserJoinedLeftRoomRestServlet, self).__init__()
+        super(ReplicationUserJoinedLeftRoomRestServlet, self).__init__(hs)
 
         self.registeration_handler = hs.get_handlers().registration_handler
         self.store = hs.get_datastore()
         self.clock = hs.get_clock()
         self.distributor = hs.get_distributor()
 
-    def on_POST(self, request, change):
-        content = parse_json_object_from_request(request)
+    @staticmethod
+    def _serialize_payload(room_id, user_id, change):
+        """
+        Args:
+            room_id (str)
+            user_id (str)
+            change (str): Either "joined" or "left"
+        """
+        assert change in ("joined", "left",)
 
-        user_id = content["user_id"]
-        room_id = content["room_id"]
+        return {}
 
+    def _handle_request(self, request, room_id, user_id, change):
         logger.info("user membership change: %s in %s", user_id, room_id)
 
         user = UserID.from_string(user_id)
diff --git a/synapse/replication/http/send_event.py b/synapse/replication/http/send_event.py
index d3509dc288..5b52c91650 100644
--- a/synapse/replication/http/send_event.py
+++ b/synapse/replication/http/send_event.py
@@ -14,86 +14,26 @@
 # limitations under the License.
 
 import logging
-import re
 
 from twisted.internet import defer
 
-from synapse.api.errors import CodeMessageException, HttpResponseException
 from synapse.events import FrozenEvent
 from synapse.events.snapshot import EventContext
-from synapse.http.servlet import RestServlet, parse_json_object_from_request
+from synapse.http.servlet import parse_json_object_from_request
+from synapse.replication.http._base import ReplicationEndpoint
 from synapse.types import Requester, UserID
-from synapse.util.caches.response_cache import ResponseCache
 from synapse.util.metrics import Measure
 
 logger = logging.getLogger(__name__)
 
 
-@defer.inlineCallbacks
-def send_event_to_master(clock, store, client, host, port, requester, event, context,
-                         ratelimit, extra_users):
-    """Send event to be handled on the master
-
-    Args:
-        clock (synapse.util.Clock)
-        store (DataStore)
-        client (SimpleHttpClient)
-        host (str): host of master
-        port (int): port on master listening for HTTP replication
-        requester (Requester)
-        event (FrozenEvent)
-        context (EventContext)
-        ratelimit (bool)
-        extra_users (list(UserID)): Any extra users to notify about event
-    """
-    uri = "http://%s:%s/_synapse/replication/send_event/%s" % (
-        host, port, event.event_id,
-    )
-
-    serialized_context = yield context.serialize(event, store)
-
-    payload = {
-        "event": event.get_pdu_json(),
-        "internal_metadata": event.internal_metadata.get_dict(),
-        "rejected_reason": event.rejected_reason,
-        "context": serialized_context,
-        "requester": requester.serialize(),
-        "ratelimit": ratelimit,
-        "extra_users": [u.to_string() for u in extra_users],
-    }
-
-    try:
-        # We keep retrying the same request for timeouts. This is so that we
-        # have a good idea that the request has either succeeded or failed on
-        # the master, and so whether we should clean up or not.
-        while True:
-            try:
-                result = yield client.put_json(uri, payload)
-                break
-            except CodeMessageException as e:
-                if e.code != 504:
-                    raise
-
-            logger.warn("send_event request timed out")
-
-            # If we timed out we probably don't need to worry about backing
-            # off too much, but lets just wait a little anyway.
-            yield clock.sleep(1)
-    except HttpResponseException as e:
-        # We convert to SynapseError as we know that it was a SynapseError
-        # on the master process that we should send to the client. (And
-        # importantly, not stack traces everywhere)
-        raise e.to_synapse_error()
-    defer.returnValue(result)
-
-
-class ReplicationSendEventRestServlet(RestServlet):
+class ReplicationSendEventRestServlet(ReplicationEndpoint):
     """Handles events newly created on workers, including persisting and
     notifying.
 
     The API looks like:
 
-        POST /_synapse/replication/send_event/:event_id
+        POST /_synapse/replication/send_event/:event_id/:txn_id
 
         {
             "event": { .. serialized event .. },
@@ -105,27 +45,47 @@ class ReplicationSendEventRestServlet(RestServlet):
             "extra_users": [],
         }
     """
-    PATTERNS = [re.compile("^/_synapse/replication/send_event/(?P<event_id>[^/]+)$")]
+    NAME = "send_event"
+    PATH_ARGS = ("event_id",)
 
     def __init__(self, hs):
-        super(ReplicationSendEventRestServlet, self).__init__()
+        super(ReplicationSendEventRestServlet, self).__init__(hs)
 
         self.event_creation_handler = hs.get_event_creation_handler()
         self.store = hs.get_datastore()
         self.clock = hs.get_clock()
 
-        # The responses are tiny, so we may as well cache them for a while
-        self.response_cache = ResponseCache(hs, "send_event", timeout_ms=30 * 60 * 1000)
+    @staticmethod
+    @defer.inlineCallbacks
+    def _serialize_payload(event_id, store, event, context, requester,
+                           ratelimit, extra_users):
+        """
+        Args:
+            event_id (str)
+            store (DataStore)
+            requester (Requester)
+            event (FrozenEvent)
+            context (EventContext)
+            ratelimit (bool)
+            extra_users (list(UserID)): Any extra users to notify about event
+        """
+
+        serialized_context = yield context.serialize(event, store)
+
+        payload = {
+            "event": event.get_pdu_json(),
+            "internal_metadata": event.internal_metadata.get_dict(),
+            "rejected_reason": event.rejected_reason,
+            "context": serialized_context,
+            "requester": requester.serialize(),
+            "ratelimit": ratelimit,
+            "extra_users": [u.to_string() for u in extra_users],
+        }
 
-    def on_PUT(self, request, event_id):
-        return self.response_cache.wrap(
-            event_id,
-            self._handle_request,
-            request
-        )
+        defer.returnValue(payload)
 
     @defer.inlineCallbacks
-    def _handle_request(self, request):
+    def _handle_request(self, request, event_id):
         with Measure(self.clock, "repl_send_event_parse"):
             content = parse_json_object_from_request(request)
 
diff --git a/synapse/replication/slave/storage/events.py b/synapse/replication/slave/storage/events.py
index bdb5eee4af..4830c68f35 100644
--- a/synapse/replication/slave/storage/events.py
+++ b/synapse/replication/slave/storage/events.py
@@ -44,8 +44,8 @@ class SlavedEventStore(EventFederationWorkerStore,
                        RoomMemberWorkerStore,
                        EventPushActionsWorkerStore,
                        StreamWorkerStore,
-                       EventsWorkerStore,
                        StateGroupWorkerStore,
+                       EventsWorkerStore,
                        SignatureWorkerStore,
                        UserErasureWorkerStore,
                        BaseSlavedStore):
diff --git a/synapse/rest/client/transactions.py b/synapse/rest/client/transactions.py
index 00b1b3066e..511e96ab00 100644
--- a/synapse/rest/client/transactions.py
+++ b/synapse/rest/client/transactions.py
@@ -17,7 +17,7 @@
 to ensure idempotency when performing PUTs using the REST API."""
 import logging
 
-from synapse.util.async import ObservableDeferred
+from synapse.util.async_helpers import ObservableDeferred
 from synapse.util.logcontext import make_deferred_yieldable, run_in_background
 
 logger = logging.getLogger(__name__)
diff --git a/synapse/rest/media/v1/media_repository.py b/synapse/rest/media/v1/media_repository.py
index 8fb413d825..4c589e05e0 100644
--- a/synapse/rest/media/v1/media_repository.py
+++ b/synapse/rest/media/v1/media_repository.py
@@ -36,7 +36,7 @@ from synapse.api.errors import (
 )
 from synapse.http.matrixfederationclient import MatrixFederationHttpClient
 from synapse.metrics.background_process_metrics import run_as_background_process
-from synapse.util.async import Linearizer
+from synapse.util.async_helpers import Linearizer
 from synapse.util.logcontext import make_deferred_yieldable
 from synapse.util.retryutils import NotRetryingDestination
 from synapse.util.stringutils import is_ascii, random_string
diff --git a/synapse/rest/media/v1/preview_url_resource.py b/synapse/rest/media/v1/preview_url_resource.py
index 27aa0def2f..778ef97337 100644
--- a/synapse/rest/media/v1/preview_url_resource.py
+++ b/synapse/rest/media/v1/preview_url_resource.py
@@ -42,7 +42,7 @@ from synapse.http.server import (
 )
 from synapse.http.servlet import parse_integer, parse_string
 from synapse.metrics.background_process_metrics import run_as_background_process
-from synapse.util.async import ObservableDeferred
+from synapse.util.async_helpers import ObservableDeferred
 from synapse.util.caches.expiringcache import ExpiringCache
 from synapse.util.logcontext import make_deferred_yieldable, run_in_background
 from synapse.util.stringutils import is_ascii, random_string
diff --git a/synapse/state.py b/synapse/state.py
index e1092b97a9..8b92d4057a 100644
--- a/synapse/state.py
+++ b/synapse/state.py
@@ -28,7 +28,7 @@ from synapse import event_auth
 from synapse.api.constants import EventTypes
 from synapse.api.errors import AuthError
 from synapse.events.snapshot import EventContext
-from synapse.util.async import Linearizer
+from synapse.util.async_helpers import Linearizer
 from synapse.util.caches import CACHE_SIZE_FACTOR
 from synapse.util.caches.expiringcache import ExpiringCache
 from synapse.util.logutils import log_function
diff --git a/synapse/storage/__init__.py b/synapse/storage/__init__.py
index 134e4a80f1..23b4a8d76d 100644
--- a/synapse/storage/__init__.py
+++ b/synapse/storage/__init__.py
@@ -39,6 +39,7 @@ from .filtering import FilteringStore
 from .group_server import GroupServerStore
 from .keys import KeyStore
 from .media_repository import MediaRepositoryStore
+from .monthly_active_users import MonthlyActiveUsersStore
 from .openid import OpenIdStore
 from .presence import PresenceStore, UserPresenceState
 from .profile import ProfileStore
@@ -87,6 +88,7 @@ class DataStore(RoomMemberStore, RoomStore,
                 UserDirectoryStore,
                 GroupServerStore,
                 UserErasureStore,
+                MonthlyActiveUsersStore,
                 ):
 
     def __init__(self, db_conn, hs):
@@ -94,7 +96,6 @@ class DataStore(RoomMemberStore, RoomStore,
         self._clock = hs.get_clock()
         self.database_engine = hs.database_engine
 
-        self.db_conn = db_conn
         self._stream_id_gen = StreamIdGenerator(
             db_conn, "events", "stream_ordering",
             extra_tables=[("local_invites", "stream_id")]
@@ -267,31 +268,6 @@ class DataStore(RoomMemberStore, RoomStore,
 
         return self.runInteraction("count_users", _count_users)
 
-    def count_monthly_users(self):
-        """Counts the number of users who used this homeserver in the last 30 days
-
-        This method should be refactored with count_daily_users - the only
-        reason not to is waiting on definition of mau
-
-        Returns:
-            Defered[int]
-        """
-        def _count_monthly_users(txn):
-            thirty_days_ago = int(self._clock.time_msec()) - (1000 * 60 * 60 * 24 * 30)
-            sql = """
-                SELECT COALESCE(count(*), 0) FROM (
-                    SELECT user_id FROM user_ips
-                    WHERE last_seen > ?
-                    GROUP BY user_id
-                ) u
-            """
-
-            txn.execute(sql, (thirty_days_ago,))
-            count, = txn.fetchone()
-            return count
-
-        return self.runInteraction("count_monthly_users", _count_monthly_users)
-
     def count_r30_users(self):
         """
         Counts the number of 30 day retained users, defined as:-
diff --git a/synapse/storage/client_ips.py b/synapse/storage/client_ips.py
index b8cefd43d6..2489527f2c 100644
--- a/synapse/storage/client_ips.py
+++ b/synapse/storage/client_ips.py
@@ -35,6 +35,7 @@ LAST_SEEN_GRANULARITY = 120 * 1000
 
 class ClientIpStore(background_updates.BackgroundUpdateStore):
     def __init__(self, db_conn, hs):
+
         self.client_ip_last_seen = Cache(
             name="client_ip_last_seen",
             keylen=4,
@@ -74,6 +75,7 @@ class ClientIpStore(background_updates.BackgroundUpdateStore):
             "before", "shutdown", self._update_client_ips_batch
         )
 
+    @defer.inlineCallbacks
     def insert_client_ip(self, user_id, access_token, ip, user_agent, device_id,
                          now=None):
         if not now:
@@ -84,7 +86,7 @@ class ClientIpStore(background_updates.BackgroundUpdateStore):
             last_seen = self.client_ip_last_seen.get(key)
         except KeyError:
             last_seen = None
-
+        yield self.populate_monthly_active_users(user_id)
         # Rate-limited inserts
         if last_seen is not None and (now - last_seen) < LAST_SEEN_GRANULARITY:
             return
diff --git a/synapse/storage/events.py b/synapse/storage/events.py
index e8e5a0fe44..d4aa192a0a 100644
--- a/synapse/storage/events.py
+++ b/synapse/storage/events.py
@@ -38,7 +38,7 @@ from synapse.storage.background_updates import BackgroundUpdateStore
 from synapse.storage.event_federation import EventFederationStore
 from synapse.storage.events_worker import EventsWorkerStore
 from synapse.types import RoomStreamToken, get_domain_from_id
-from synapse.util.async import ObservableDeferred
+from synapse.util.async_helpers import ObservableDeferred
 from synapse.util.caches.descriptors import cached, cachedInlineCallbacks
 from synapse.util.frozenutils import frozendict_json_encoder
 from synapse.util.logcontext import PreserveLoggingContext, make_deferred_yieldable
@@ -485,9 +485,14 @@ class EventsStore(EventFederationStore, EventsWorkerStore, BackgroundUpdateStore
                     new_forward_extremeties=new_forward_extremeties,
                 )
                 persist_event_counter.inc(len(chunk))
-                synapse.metrics.event_persisted_position.set(
-                    chunk[-1][0].internal_metadata.stream_ordering,
-                )
+
+                if not backfilled:
+                    # backfilled events have negative stream orderings, so we don't
+                    # want to set the event_persisted_position to that.
+                    synapse.metrics.event_persisted_position.set(
+                        chunk[-1][0].internal_metadata.stream_ordering,
+                    )
+
                 for event, context in chunk:
                     if context.app_service:
                         origin_type = "local"
diff --git a/synapse/storage/monthly_active_users.py b/synapse/storage/monthly_active_users.py
new file mode 100644
index 0000000000..d47dcef3a0
--- /dev/null
+++ b/synapse/storage/monthly_active_users.py
@@ -0,0 +1,201 @@
+# -*- coding: utf-8 -*-
+# Copyright 2018 New Vector
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+import logging
+
+from twisted.internet import defer
+
+from synapse.util.caches.descriptors import cached
+
+from ._base import SQLBaseStore
+
+logger = logging.getLogger(__name__)
+
+# Number of msec of granularity to store the monthly_active_user timestamp
+# This means it is not necessary to update the table on every request
+LAST_SEEN_GRANULARITY = 60 * 60 * 1000
+
+
+class MonthlyActiveUsersStore(SQLBaseStore):
+    def __init__(self, dbconn, hs):
+        super(MonthlyActiveUsersStore, self).__init__(None, hs)
+        self._clock = hs.get_clock()
+        self.hs = hs
+        self.reserved_users = ()
+
+    @defer.inlineCallbacks
+    def initialise_reserved_users(self, threepids):
+        # TODO Why can't I do this in init?
+        store = self.hs.get_datastore()
+        reserved_user_list = []
+
+        # Do not add more reserved users than the total allowable number
+        for tp in threepids[:self.hs.config.max_mau_value]:
+            user_id = yield store.get_user_id_by_threepid(
+                tp["medium"], tp["address"]
+            )
+            if user_id:
+                self.upsert_monthly_active_user(user_id)
+                reserved_user_list.append(user_id)
+            else:
+                logger.warning(
+                    "mau limit reserved threepid %s not found in db" % tp
+                )
+        self.reserved_users = tuple(reserved_user_list)
+
+    @defer.inlineCallbacks
+    def reap_monthly_active_users(self):
+        """
+        Cleans out monthly active user table to ensure that no stale
+        entries exist.
+
+        Returns:
+            Deferred[]
+        """
+        def _reap_users(txn):
+
+            thirty_days_ago = (
+                int(self._clock.time_msec()) - (1000 * 60 * 60 * 24 * 30)
+            )
+            # Purge stale users
+
+            # questionmarks is a hack to overcome sqlite not supporting
+            # tuples in 'WHERE IN %s'
+            questionmarks = '?' * len(self.reserved_users)
+            query_args = [thirty_days_ago]
+            query_args.extend(self.reserved_users)
+
+            sql = """
+                DELETE FROM monthly_active_users
+                WHERE timestamp < ?
+                AND user_id NOT IN ({})
+                """.format(','.join(questionmarks))
+
+            txn.execute(sql, query_args)
+
+            # If MAU user count still exceeds the MAU threshold, then delete on
+            # a least recently active basis.
+            # Note it is not possible to write this query using OFFSET due to
+            # incompatibilities in how sqlite and postgres support the feature.
+            # sqlite requires 'LIMIT -1 OFFSET ?', the LIMIT must be present
+            # While Postgres does not require 'LIMIT', but also does not support
+            # negative LIMIT values. So there is no way to write it that both can
+            # support
+            query_args = [self.hs.config.max_mau_value]
+            query_args.extend(self.reserved_users)
+            sql = """
+                DELETE FROM monthly_active_users
+                WHERE user_id NOT IN (
+                    SELECT user_id FROM monthly_active_users
+                    ORDER BY timestamp DESC
+                    LIMIT ?
+                    )
+                AND user_id NOT IN ({})
+                """.format(','.join(questionmarks))
+            txn.execute(sql, query_args)
+
+        yield self.runInteraction("reap_monthly_active_users", _reap_users)
+        # It seems poor to invalidate the whole cache, Postgres supports
+        # 'Returning' which would allow me to invalidate only the
+        # specific users, but sqlite has no way to do this and instead
+        # I would need to SELECT and the DELETE which without locking
+        # is racy.
+        # Have resolved to invalidate the whole cache for now and do
+        # something about it if and when the perf becomes significant
+        self._user_last_seen_monthly_active.invalidate_all()
+        self.get_monthly_active_count.invalidate_all()
+
+    @cached(num_args=0)
+    def get_monthly_active_count(self):
+        """Generates current count of monthly active users
+
+        Returns:
+            Defered[int]: Number of current monthly active users
+        """
+
+        def _count_users(txn):
+            sql = "SELECT COALESCE(count(*), 0) FROM monthly_active_users"
+
+            txn.execute(sql)
+            count, = txn.fetchone()
+            return count
+        return self.runInteraction("count_users", _count_users)
+
+    def upsert_monthly_active_user(self, user_id):
+        """
+            Updates or inserts monthly active user member
+            Arguments:
+                user_id (str): user to add/update
+            Deferred[bool]: True if a new entry was created, False if an
+                existing one was updated.
+        """
+        is_insert = self._simple_upsert(
+            desc="upsert_monthly_active_user",
+            table="monthly_active_users",
+            keyvalues={
+                "user_id": user_id,
+            },
+            values={
+                "timestamp": int(self._clock.time_msec()),
+            },
+            lock=False,
+        )
+        if is_insert:
+            self._user_last_seen_monthly_active.invalidate((user_id,))
+            self.get_monthly_active_count.invalidate(())
+
+    @cached(num_args=1)
+    def _user_last_seen_monthly_active(self, user_id):
+        """
+            Checks if a given user is part of the monthly active user group
+            Arguments:
+                user_id (str): user to add/update
+            Return:
+                Deferred[int] : timestamp since last seen, None if never seen
+
+        """
+
+        return(self._simple_select_one_onecol(
+            table="monthly_active_users",
+            keyvalues={
+                "user_id": user_id,
+            },
+            retcol="timestamp",
+            allow_none=True,
+            desc="_user_last_seen_monthly_active",
+        ))
+
+    @defer.inlineCallbacks
+    def populate_monthly_active_users(self, user_id):
+        """Checks on the state of monthly active user limits and optionally
+        add the user to the monthly active tables
+
+        Args:
+            user_id(str): the user_id to query
+        """
+        if self.hs.config.limit_usage_by_mau:
+            last_seen_timestamp = yield self._user_last_seen_monthly_active(user_id)
+            now = self.hs.get_clock().time_msec()
+
+            # We want to reduce to the total number of db writes, and are happy
+            # to trade accuracy of timestamp in order to lighten load. This means
+            # We always insert new users (where MAU threshold has not been reached),
+            # but only update if we have not previously seen the user for
+            # LAST_SEEN_GRANULARITY ms
+            if last_seen_timestamp is None:
+                count = yield self.get_monthly_active_count()
+                if count < self.hs.config.max_mau_value:
+                    yield self.upsert_monthly_active_user(user_id)
+            elif now - last_seen_timestamp > LAST_SEEN_GRANULARITY:
+                yield self.upsert_monthly_active_user(user_id)
diff --git a/synapse/storage/prepare_database.py b/synapse/storage/prepare_database.py
index b290f834b3..b364719312 100644
--- a/synapse/storage/prepare_database.py
+++ b/synapse/storage/prepare_database.py
@@ -25,7 +25,7 @@ logger = logging.getLogger(__name__)
 
 # Remember to update this number every time a change is made to database
 # schema files, so the users will be informed on server restarts.
-SCHEMA_VERSION = 50
+SCHEMA_VERSION = 51
 
 dir_path = os.path.abspath(os.path.dirname(__file__))
 
diff --git a/synapse/storage/roommember.py b/synapse/storage/roommember.py
index 10dce21cea..9b4e6d6aa8 100644
--- a/synapse/storage/roommember.py
+++ b/synapse/storage/roommember.py
@@ -26,7 +26,7 @@ from twisted.internet import defer
 from synapse.api.constants import EventTypes, Membership
 from synapse.storage.events_worker import EventsWorkerStore
 from synapse.types import get_domain_from_id
-from synapse.util.async import Linearizer
+from synapse.util.async_helpers import Linearizer
 from synapse.util.caches import intern_string
 from synapse.util.caches.descriptors import cached, cachedInlineCallbacks
 from synapse.util.stringutils import to_ascii
diff --git a/synapse/storage/schema/delta/51/monthly_active_users.sql b/synapse/storage/schema/delta/51/monthly_active_users.sql
new file mode 100644
index 0000000000..c9d537d5a3
--- /dev/null
+++ b/synapse/storage/schema/delta/51/monthly_active_users.sql
@@ -0,0 +1,27 @@
+/* Copyright 2018 New Vector Ltd
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+-- a table of monthly active users, for use where blocking based on mau limits
+CREATE TABLE monthly_active_users (
+    user_id TEXT NOT NULL,
+    -- Last time we saw the user. Not guaranteed to be accurate due to rate limiting
+    -- on updates, Granularity of updates governed by
+    -- synapse.storage.monthly_active_users.LAST_SEEN_GRANULARITY
+    -- Measured in ms since epoch.
+    timestamp BIGINT NOT NULL
+);
+
+CREATE UNIQUE INDEX monthly_active_users_users ON monthly_active_users(user_id);
+CREATE INDEX monthly_active_users_time_stamp ON monthly_active_users(timestamp);
diff --git a/synapse/storage/state.py b/synapse/storage/state.py
index b27b3ae144..17b14d464b 100644
--- a/synapse/storage/state.py
+++ b/synapse/storage/state.py
@@ -21,15 +21,17 @@ from six.moves import range
 
 from twisted.internet import defer
 
+from synapse.api.constants import EventTypes
+from synapse.api.errors import NotFoundError
+from synapse.storage._base import SQLBaseStore
 from synapse.storage.background_updates import BackgroundUpdateStore
 from synapse.storage.engines import PostgresEngine
+from synapse.storage.events_worker import EventsWorkerStore
 from synapse.util.caches import get_cache_factor_for, intern_string
 from synapse.util.caches.descriptors import cached, cachedList
 from synapse.util.caches.dictionary_cache import DictionaryCache
 from synapse.util.stringutils import to_ascii
 
-from ._base import SQLBaseStore
-
 logger = logging.getLogger(__name__)
 
 
@@ -46,7 +48,8 @@ class _GetStateGroupDelta(namedtuple("_GetStateGroupDelta", ("prev_group", "delt
         return len(self.delta_ids) if self.delta_ids else 0
 
 
-class StateGroupWorkerStore(SQLBaseStore):
+# this inherits from EventsWorkerStore because it calls self.get_events
+class StateGroupWorkerStore(EventsWorkerStore, SQLBaseStore):
     """The parts of StateGroupStore that can be called from workers.
     """
 
@@ -61,6 +64,30 @@ class StateGroupWorkerStore(SQLBaseStore):
             "*stateGroupCache*", 500000 * get_cache_factor_for("stateGroupCache")
         )
 
+    @defer.inlineCallbacks
+    def get_room_version(self, room_id):
+        """Get the room_version of a given room
+
+        Args:
+            room_id (str)
+
+        Returns:
+            Deferred[str]
+
+        Raises:
+            NotFoundError if the room is unknown
+        """
+        # for now we do this by looking at the create event. We may want to cache this
+        # more intelligently in future.
+        state_ids = yield self.get_current_state_ids(room_id)
+        create_id = state_ids.get((EventTypes.Create, ""))
+
+        if not create_id:
+            raise NotFoundError("Unknown room")
+
+        create_event = yield self.get_event(create_id)
+        defer.returnValue(create_event.content.get("room_version", "1"))
+
     @cached(max_entries=100000, iterable=True)
     def get_current_state_ids(self, room_id):
         """Get the current state event ids for a room based on the
diff --git a/synapse/util/async.py b/synapse/util/async_helpers.py
index 9b3f2f4b96..9b3f2f4b96 100644
--- a/synapse/util/async.py
+++ b/synapse/util/async_helpers.py
diff --git a/synapse/util/caches/descriptors.py b/synapse/util/caches/descriptors.py
index 861c24809c..187510576a 100644
--- a/synapse/util/caches/descriptors.py
+++ b/synapse/util/caches/descriptors.py
@@ -25,7 +25,7 @@ from six import itervalues, string_types
 from twisted.internet import defer
 
 from synapse.util import logcontext, unwrapFirstError
-from synapse.util.async import ObservableDeferred
+from synapse.util.async_helpers import ObservableDeferred
 from synapse.util.caches import get_cache_factor_for
 from synapse.util.caches.lrucache import LruCache
 from synapse.util.caches.treecache import TreeCache, iterate_tree_cache_entry
diff --git a/synapse/util/caches/response_cache.py b/synapse/util/caches/response_cache.py
index a8491b42d5..afb03b2e1b 100644
--- a/synapse/util/caches/response_cache.py
+++ b/synapse/util/caches/response_cache.py
@@ -16,7 +16,7 @@ import logging
 
 from twisted.internet import defer
 
-from synapse.util.async import ObservableDeferred
+from synapse.util.async_helpers import ObservableDeferred
 from synapse.util.caches import register_cache
 from synapse.util.logcontext import make_deferred_yieldable, run_in_background
 
diff --git a/synapse/util/caches/snapshot_cache.py b/synapse/util/caches/snapshot_cache.py
index d03678b8c8..8318db8d2c 100644
--- a/synapse/util/caches/snapshot_cache.py
+++ b/synapse/util/caches/snapshot_cache.py
@@ -13,7 +13,7 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-from synapse.util.async import ObservableDeferred
+from synapse.util.async_helpers import ObservableDeferred
 
 
 class SnapshotCache(object):
diff --git a/synapse/util/logcontext.py b/synapse/util/logcontext.py
index 8dcae50b39..07e83fadda 100644
--- a/synapse/util/logcontext.py
+++ b/synapse/util/logcontext.py
@@ -526,7 +526,7 @@ _to_ignore = [
     "synapse.util.logcontext",
     "synapse.http.server",
     "synapse.storage._base",
-    "synapse.util.async",
+    "synapse.util.async_helpers",
 ]
 
 
diff --git a/tests/api/test_auth.py b/tests/api/test_auth.py
index a82d737e71..fbb96361a8 100644
--- a/tests/api/test_auth.py
+++ b/tests/api/test_auth.py
@@ -21,7 +21,7 @@ from twisted.internet import defer
 
 import synapse.handlers.auth
 from synapse.api.auth import Auth
-from synapse.api.errors import AuthError
+from synapse.api.errors import AuthError, Codes
 from synapse.types import UserID
 
 from tests import unittest
@@ -444,3 +444,37 @@ class AuthTestCase(unittest.TestCase):
         self.assertEqual("Guest access token used for regular user", cm.exception.msg)
 
         self.store.get_user_by_id.assert_called_with(USER_ID)
+
+    @defer.inlineCallbacks
+    def test_blocking_mau(self):
+        self.hs.config.limit_usage_by_mau = False
+        self.hs.config.max_mau_value = 50
+        lots_of_users = 100
+        small_number_of_users = 1
+
+        # Ensure no error thrown
+        yield self.auth.check_auth_blocking()
+
+        self.hs.config.limit_usage_by_mau = True
+
+        self.store.get_monthly_active_count = Mock(
+            return_value=defer.succeed(lots_of_users)
+        )
+
+        with self.assertRaises(AuthError):
+            yield self.auth.check_auth_blocking()
+
+        # Ensure does not throw an error
+        self.store.get_monthly_active_count = Mock(
+            return_value=defer.succeed(small_number_of_users)
+        )
+        yield self.auth.check_auth_blocking()
+
+    @defer.inlineCallbacks
+    def test_hs_disabled(self):
+        self.hs.config.hs_disabled = True
+        self.hs.config.hs_disabled_message = "Reason for being disabled"
+        with self.assertRaises(AuthError) as e:
+            yield self.auth.check_auth_blocking()
+        self.assertEquals(e.exception.errcode, Codes.HS_DISABLED)
+        self.assertEquals(e.exception.code, 403)
diff --git a/tests/handlers/test_auth.py b/tests/handlers/test_auth.py
index 55eab9e9cf..8a9bf2d5fd 100644
--- a/tests/handlers/test_auth.py
+++ b/tests/handlers/test_auth.py
@@ -132,14 +132,14 @@ class AuthTestCase(unittest.TestCase):
     @defer.inlineCallbacks
     def test_mau_limits_exceeded(self):
         self.hs.config.limit_usage_by_mau = True
-        self.hs.get_datastore().count_monthly_users = Mock(
+        self.hs.get_datastore().get_monthly_active_count = Mock(
             return_value=defer.succeed(self.large_number_of_users)
         )
 
         with self.assertRaises(AuthError):
             yield self.auth_handler.get_access_token_for_user_id('user_a')
 
-        self.hs.get_datastore().count_monthly_users = Mock(
+        self.hs.get_datastore().get_monthly_active_count = Mock(
             return_value=defer.succeed(self.large_number_of_users)
         )
         with self.assertRaises(AuthError):
@@ -151,13 +151,13 @@ class AuthTestCase(unittest.TestCase):
     def test_mau_limits_not_exceeded(self):
         self.hs.config.limit_usage_by_mau = True
 
-        self.hs.get_datastore().count_monthly_users = Mock(
+        self.hs.get_datastore().get_monthly_active_count = Mock(
             return_value=defer.succeed(self.small_number_of_users)
         )
         # Ensure does not raise exception
         yield self.auth_handler.get_access_token_for_user_id('user_a')
 
-        self.hs.get_datastore().count_monthly_users = Mock(
+        self.hs.get_datastore().get_monthly_active_count = Mock(
             return_value=defer.succeed(self.small_number_of_users)
         )
         yield self.auth_handler.validate_short_term_login_token_and_get_user_id(
diff --git a/tests/handlers/test_register.py b/tests/handlers/test_register.py
index 0937d71cf6..4ea59a58de 100644
--- a/tests/handlers/test_register.py
+++ b/tests/handlers/test_register.py
@@ -50,6 +50,10 @@ class RegistrationTestCase(unittest.TestCase):
         self.hs.get_macaroon_generator = Mock(return_value=self.macaroon_generator)
         self.hs.handlers = RegistrationHandlers(self.hs)
         self.handler = self.hs.get_handlers().registration_handler
+        self.store = self.hs.get_datastore()
+        self.hs.config.max_mau_value = 50
+        self.lots_of_users = 100
+        self.small_number_of_users = 1
 
     @defer.inlineCallbacks
     def test_user_is_created_and_logged_in_if_doesnt_exist(self):
@@ -80,51 +84,43 @@ class RegistrationTestCase(unittest.TestCase):
         self.assertEquals(result_token, 'secret')
 
     @defer.inlineCallbacks
-    def test_cannot_register_when_mau_limits_exceeded(self):
-        local_part = "someone"
-        display_name = "someone"
-        requester = create_requester("@as:test")
-        store = self.hs.get_datastore()
+    def test_mau_limits_when_disabled(self):
         self.hs.config.limit_usage_by_mau = False
-        self.hs.config.max_mau_value = 50
-        lots_of_users = 100
-        small_number_users = 1
-
-        store.count_monthly_users = Mock(return_value=defer.succeed(lots_of_users))
-
         # Ensure does not throw exception
-        yield self.handler.get_or_create_user(requester, 'a', display_name)
+        yield self.handler.get_or_create_user("requester", 'a', "display_name")
 
+    @defer.inlineCallbacks
+    def test_get_or_create_user_mau_not_blocked(self):
         self.hs.config.limit_usage_by_mau = True
-
-        with self.assertRaises(RegistrationError):
-            yield self.handler.get_or_create_user(requester, 'b', display_name)
-
-        store.count_monthly_users = Mock(return_value=defer.succeed(small_number_users))
-
-        self._macaroon_mock_generator("another_secret")
-
+        self.store.count_monthly_users = Mock(
+            return_value=defer.succeed(self.small_number_of_users)
+        )
         # Ensure does not throw exception
-        yield self.handler.get_or_create_user("@neil:matrix.org", 'c', "Neil")
-
-        self._macaroon_mock_generator("another another secret")
-        store.count_monthly_users = Mock(return_value=defer.succeed(lots_of_users))
+        yield self.handler.get_or_create_user("@user:server", 'c', "User")
 
+    @defer.inlineCallbacks
+    def test_get_or_create_user_mau_blocked(self):
+        self.hs.config.limit_usage_by_mau = True
+        self.store.get_monthly_active_count = Mock(
+            return_value=defer.succeed(self.lots_of_users)
+        )
         with self.assertRaises(RegistrationError):
-            yield self.handler.register(localpart=local_part)
+            yield self.handler.get_or_create_user("requester", 'b', "display_name")
 
-        self._macaroon_mock_generator("another another secret")
-        store.count_monthly_users = Mock(return_value=defer.succeed(lots_of_users))
+    @defer.inlineCallbacks
+    def test_register_mau_blocked(self):
+        self.hs.config.limit_usage_by_mau = True
+        self.store.get_monthly_active_count = Mock(
+            return_value=defer.succeed(self.lots_of_users)
+        )
+        with self.assertRaises(RegistrationError):
+            yield self.handler.register(localpart="local_part")
 
+    @defer.inlineCallbacks
+    def test_register_saml2_mau_blocked(self):
+        self.hs.config.limit_usage_by_mau = True
+        self.store.get_monthly_active_count = Mock(
+            return_value=defer.succeed(self.lots_of_users)
+        )
         with self.assertRaises(RegistrationError):
-            yield self.handler.register_saml2(local_part)
-
-    def _macaroon_mock_generator(self, secret):
-        """
-        Reset macaroon generator in the case where the test creates multiple users
-        """
-        macaroon_generator = Mock(
-            generate_access_token=Mock(return_value=secret))
-        self.hs.get_macaroon_generator = Mock(return_value=macaroon_generator)
-        self.hs.handlers = RegistrationHandlers(self.hs)
-        self.handler = self.hs.get_handlers().registration_handler
+            yield self.handler.register_saml2(localpart="local_part")
diff --git a/tests/handlers/test_typing.py b/tests/handlers/test_typing.py
index 2c263af1a3..f422cf3c5a 100644
--- a/tests/handlers/test_typing.py
+++ b/tests/handlers/test_typing.py
@@ -48,7 +48,9 @@ def _expect_edu(destination, edu_type, content, origin="test"):
 
 
 def _make_edu_json(origin, edu_type, content):
-    return json.dumps(_expect_edu("test", edu_type, content, origin=origin))
+    return json.dumps(
+        _expect_edu("test", edu_type, content, origin=origin)
+    ).encode('utf8')
 
 
 class TypingNotificationsTestCase(unittest.TestCase):
diff --git a/tests/rest/client/test_transactions.py b/tests/rest/client/test_transactions.py
index 34e68ae82f..d46c27e7e9 100644
--- a/tests/rest/client/test_transactions.py
+++ b/tests/rest/client/test_transactions.py
@@ -85,7 +85,7 @@ class HttpTransactionCacheTestCase(unittest.TestCase):
             try:
                 yield self.cache.fetch_or_execute(self.mock_key, cb)
             except Exception as e:
-                self.assertEqual(e.message, "boo")
+                self.assertEqual(e.args[0], "boo")
             self.assertIs(LoggingContext.current_context(), test_context)
 
             res = yield self.cache.fetch_or_execute(self.mock_key, cb)
@@ -111,7 +111,7 @@ class HttpTransactionCacheTestCase(unittest.TestCase):
             try:
                 yield self.cache.fetch_or_execute(self.mock_key, cb)
             except Exception as e:
-                self.assertEqual(e.message, "boo")
+                self.assertEqual(e.args[0], "boo")
             self.assertIs(LoggingContext.current_context(), test_context)
 
             res = yield self.cache.fetch_or_execute(self.mock_key, cb)
diff --git a/tests/rest/client/v1/test_admin.py b/tests/rest/client/v1/test_admin.py
index 8c90145601..fb28883d30 100644
--- a/tests/rest/client/v1/test_admin.py
+++ b/tests/rest/client/v1/test_admin.py
@@ -140,7 +140,7 @@ class UserRegisterTestCase(unittest.TestCase):
                 "admin": True,
                 "mac": want_mac,
             }
-        ).encode('utf8')
+        )
         request, channel = make_request("POST", self.url, body.encode('utf8'))
         render(request, self.resource, self.clock)
 
@@ -168,7 +168,7 @@ class UserRegisterTestCase(unittest.TestCase):
                 "admin": True,
                 "mac": want_mac,
             }
-        ).encode('utf8')
+        )
         request, channel = make_request("POST", self.url, body.encode('utf8'))
         render(request, self.resource, self.clock)
 
@@ -195,7 +195,7 @@ class UserRegisterTestCase(unittest.TestCase):
                 "admin": True,
                 "mac": want_mac,
             }
-        ).encode('utf8')
+        )
         request, channel = make_request("POST", self.url, body.encode('utf8'))
         render(request, self.resource, self.clock)
 
@@ -253,7 +253,7 @@ class UserRegisterTestCase(unittest.TestCase):
         self.assertEqual('Invalid username', channel.json_body["error"])
 
         # Must not have null bytes
-        body = json.dumps({"nonce": nonce(), "username": b"abcd\x00"})
+        body = json.dumps({"nonce": nonce(), "username": u"abcd\u0000"})
         request, channel = make_request("POST", self.url, body.encode('utf8'))
         render(request, self.resource, self.clock)
 
@@ -289,7 +289,7 @@ class UserRegisterTestCase(unittest.TestCase):
         self.assertEqual('Invalid password', channel.json_body["error"])
 
         # Must not have null bytes
-        body = json.dumps({"nonce": nonce(), "username": "a", "password": b"abcd\x00"})
+        body = json.dumps({"nonce": nonce(), "username": "a", "password": u"abcd\u0000"})
         request, channel = make_request("POST", self.url, body.encode('utf8'))
         render(request, self.resource, self.clock)
 
diff --git a/tests/rest/client/v1/test_profile.py b/tests/rest/client/v1/test_profile.py
index d71cc8e0db..0516ce3cfb 100644
--- a/tests/rest/client/v1/test_profile.py
+++ b/tests/rest/client/v1/test_profile.py
@@ -80,7 +80,7 @@ class ProfileTestCase(unittest.TestCase):
         (code, response) = yield self.mock_resource.trigger(
             "PUT",
             "/profile/%s/displayname" % (myid),
-            '{"displayname": "Frank Jr."}'
+            b'{"displayname": "Frank Jr."}'
         )
 
         self.assertEquals(200, code)
@@ -95,7 +95,7 @@ class ProfileTestCase(unittest.TestCase):
 
         (code, response) = yield self.mock_resource.trigger(
             "PUT", "/profile/%s/displayname" % ("@4567:test"),
-            '{"displayname": "Frank Jr."}'
+            b'{"displayname": "Frank Jr."}'
         )
 
         self.assertTrue(
@@ -122,7 +122,7 @@ class ProfileTestCase(unittest.TestCase):
 
         (code, response) = yield self.mock_resource.trigger(
             "PUT", "/profile/%s/displayname" % ("@opaque:elsewhere"),
-            '{"displayname":"bob"}'
+            b'{"displayname":"bob"}'
         )
 
         self.assertTrue(
@@ -151,7 +151,7 @@ class ProfileTestCase(unittest.TestCase):
         (code, response) = yield self.mock_resource.trigger(
             "PUT",
             "/profile/%s/avatar_url" % (myid),
-            '{"avatar_url": "http://my.server/pic.gif"}'
+            b'{"avatar_url": "http://my.server/pic.gif"}'
         )
 
         self.assertEquals(200, code)
diff --git a/tests/rest/client/v1/utils.py b/tests/rest/client/v1/utils.py
index 41de8e0762..e3bc5f378d 100644
--- a/tests/rest/client/v1/utils.py
+++ b/tests/rest/client/v1/utils.py
@@ -105,7 +105,7 @@ class RestTestCase(unittest.TestCase):
                 "password": "test",
                 "type": "m.login.password"
             }))
-        self.assertEquals(200, code)
+        self.assertEquals(200, code, msg=response)
         defer.returnValue(response)
 
     @defer.inlineCallbacks
@@ -149,14 +149,14 @@ class RestHelper(object):
     def create_room_as(self, room_creator, is_public=True, tok=None):
         temp_id = self.auth_user_id
         self.auth_user_id = room_creator
-        path = b"/_matrix/client/r0/createRoom"
+        path = "/_matrix/client/r0/createRoom"
         content = {}
         if not is_public:
             content["visibility"] = "private"
         if tok:
-            path = path + b"?access_token=%s" % tok.encode('ascii')
+            path = path + "?access_token=%s" % tok
 
-        request, channel = make_request(b"POST", path, json.dumps(content).encode('utf8'))
+        request, channel = make_request("POST", path, json.dumps(content).encode('utf8'))
         request.render(self.resource)
         wait_until_result(self.hs.get_reactor(), channel)
 
@@ -205,7 +205,7 @@ class RestHelper(object):
         data = {"membership": membership}
 
         request, channel = make_request(
-            b"PUT", path.encode('ascii'), json.dumps(data).encode('utf8')
+            "PUT", path, json.dumps(data).encode('utf8')
         )
 
         request.render(self.resource)
diff --git a/tests/rest/client/v2_alpha/test_filter.py b/tests/rest/client/v2_alpha/test_filter.py
index e890f0feac..de33b10a5f 100644
--- a/tests/rest/client/v2_alpha/test_filter.py
+++ b/tests/rest/client/v2_alpha/test_filter.py
@@ -33,7 +33,7 @@ PATH_PREFIX = "/_matrix/client/v2_alpha"
 
 class FilterTestCase(unittest.TestCase):
 
-    USER_ID = b"@apple:test"
+    USER_ID = "@apple:test"
     EXAMPLE_FILTER = {"room": {"timeline": {"types": ["m.room.message"]}}}
     EXAMPLE_FILTER_JSON = b'{"room": {"timeline": {"types": ["m.room.message"]}}}'
     TO_REGISTER = [filter]
@@ -72,8 +72,8 @@ class FilterTestCase(unittest.TestCase):
 
     def test_add_filter(self):
         request, channel = make_request(
-            b"POST",
-            b"/_matrix/client/r0/user/%s/filter" % (self.USER_ID),
+            "POST",
+            "/_matrix/client/r0/user/%s/filter" % (self.USER_ID),
             self.EXAMPLE_FILTER_JSON,
         )
         request.render(self.resource)
@@ -87,8 +87,8 @@ class FilterTestCase(unittest.TestCase):
 
     def test_add_filter_for_other_user(self):
         request, channel = make_request(
-            b"POST",
-            b"/_matrix/client/r0/user/%s/filter" % (b"@watermelon:test"),
+            "POST",
+            "/_matrix/client/r0/user/%s/filter" % ("@watermelon:test"),
             self.EXAMPLE_FILTER_JSON,
         )
         request.render(self.resource)
@@ -101,8 +101,8 @@ class FilterTestCase(unittest.TestCase):
         _is_mine = self.hs.is_mine
         self.hs.is_mine = lambda target_user: False
         request, channel = make_request(
-            b"POST",
-            b"/_matrix/client/r0/user/%s/filter" % (self.USER_ID),
+            "POST",
+            "/_matrix/client/r0/user/%s/filter" % (self.USER_ID),
             self.EXAMPLE_FILTER_JSON,
         )
         request.render(self.resource)
@@ -119,7 +119,7 @@ class FilterTestCase(unittest.TestCase):
         self.clock.advance(1)
         filter_id = filter_id.result
         request, channel = make_request(
-            b"GET", b"/_matrix/client/r0/user/%s/filter/%s" % (self.USER_ID, filter_id)
+            "GET", "/_matrix/client/r0/user/%s/filter/%s" % (self.USER_ID, filter_id)
         )
         request.render(self.resource)
         wait_until_result(self.clock, channel)
@@ -129,7 +129,7 @@ class FilterTestCase(unittest.TestCase):
 
     def test_get_filter_non_existant(self):
         request, channel = make_request(
-            b"GET", "/_matrix/client/r0/user/%s/filter/12382148321" % (self.USER_ID)
+            "GET", "/_matrix/client/r0/user/%s/filter/12382148321" % (self.USER_ID)
         )
         request.render(self.resource)
         wait_until_result(self.clock, channel)
@@ -141,7 +141,7 @@ class FilterTestCase(unittest.TestCase):
     # in errors.py
     def test_get_filter_invalid_id(self):
         request, channel = make_request(
-            b"GET", "/_matrix/client/r0/user/%s/filter/foobar" % (self.USER_ID)
+            "GET", "/_matrix/client/r0/user/%s/filter/foobar" % (self.USER_ID)
         )
         request.render(self.resource)
         wait_until_result(self.clock, channel)
@@ -151,7 +151,7 @@ class FilterTestCase(unittest.TestCase):
     # No ID also returns an invalid_id error
     def test_get_filter_no_id(self):
         request, channel = make_request(
-            b"GET", "/_matrix/client/r0/user/%s/filter/" % (self.USER_ID)
+            "GET", "/_matrix/client/r0/user/%s/filter/" % (self.USER_ID)
         )
         request.render(self.resource)
         wait_until_result(self.clock, channel)
diff --git a/tests/rest/client/v2_alpha/test_register.py b/tests/rest/client/v2_alpha/test_register.py
index e004d8fc73..f6293f11a8 100644
--- a/tests/rest/client/v2_alpha/test_register.py
+++ b/tests/rest/client/v2_alpha/test_register.py
@@ -81,7 +81,7 @@ class RegisterRestServletTestCase(unittest.TestCase):
             "access_token": token,
             "home_server": self.hs.hostname,
         }
-        self.assertDictContainsSubset(det_data, json.loads(channel.result["body"]))
+        self.assertDictContainsSubset(det_data, channel.json_body)
 
     def test_POST_appservice_registration_invalid(self):
         self.appservice = None  # no application service exists
@@ -102,7 +102,7 @@ class RegisterRestServletTestCase(unittest.TestCase):
 
         self.assertEquals(channel.result["code"], b"400", channel.result)
         self.assertEquals(
-            json.loads(channel.result["body"])["error"], "Invalid password"
+            channel.json_body["error"], "Invalid password"
         )
 
     def test_POST_bad_username(self):
@@ -113,7 +113,7 @@ class RegisterRestServletTestCase(unittest.TestCase):
 
         self.assertEquals(channel.result["code"], b"400", channel.result)
         self.assertEquals(
-            json.loads(channel.result["body"])["error"], "Invalid username"
+            channel.json_body["error"], "Invalid username"
         )
 
     def test_POST_user_valid(self):
@@ -140,7 +140,7 @@ class RegisterRestServletTestCase(unittest.TestCase):
             "device_id": device_id,
         }
         self.assertEquals(channel.result["code"], b"200", channel.result)
-        self.assertDictContainsSubset(det_data, json.loads(channel.result["body"]))
+        self.assertDictContainsSubset(det_data, channel.json_body)
         self.auth_handler.get_login_tuple_for_user_id(
             user_id, device_id=device_id, initial_device_display_name=None
         )
@@ -158,7 +158,7 @@ class RegisterRestServletTestCase(unittest.TestCase):
 
         self.assertEquals(channel.result["code"], b"403", channel.result)
         self.assertEquals(
-            json.loads(channel.result["body"])["error"],
+            channel.json_body["error"],
             "Registration has been disabled",
         )
 
@@ -178,7 +178,7 @@ class RegisterRestServletTestCase(unittest.TestCase):
             "device_id": "guest_device",
         }
         self.assertEquals(channel.result["code"], b"200", channel.result)
-        self.assertDictContainsSubset(det_data, json.loads(channel.result["body"]))
+        self.assertDictContainsSubset(det_data, channel.json_body)
 
     def test_POST_disabled_guest_registration(self):
         self.hs.config.allow_guest_access = False
@@ -189,5 +189,5 @@ class RegisterRestServletTestCase(unittest.TestCase):
 
         self.assertEquals(channel.result["code"], b"403", channel.result)
         self.assertEquals(
-            json.loads(channel.result["body"])["error"], "Guest access is disabled"
+            channel.json_body["error"], "Guest access is disabled"
         )
diff --git a/tests/rest/client/v2_alpha/test_sync.py b/tests/rest/client/v2_alpha/test_sync.py
index 03ec3993b2..bafc0d1df0 100644
--- a/tests/rest/client/v2_alpha/test_sync.py
+++ b/tests/rest/client/v2_alpha/test_sync.py
@@ -32,7 +32,7 @@ PATH_PREFIX = "/_matrix/client/v2_alpha"
 
 class FilterTestCase(unittest.TestCase):
 
-    USER_ID = b"@apple:test"
+    USER_ID = "@apple:test"
     TO_REGISTER = [sync]
 
     def setUp(self):
@@ -68,7 +68,7 @@ class FilterTestCase(unittest.TestCase):
             r.register_servlets(self.hs, self.resource)
 
     def test_sync_argless(self):
-        request, channel = make_request(b"GET", b"/_matrix/client/r0/sync")
+        request, channel = make_request("GET", "/_matrix/client/r0/sync")
         request.render(self.resource)
         wait_until_result(self.clock, channel)
 
diff --git a/tests/server.py b/tests/server.py
index c611dd6059..e249668d21 100644
--- a/tests/server.py
+++ b/tests/server.py
@@ -11,6 +11,7 @@ from twisted.python.failure import Failure
 from twisted.test.proto_helpers import MemoryReactorClock
 
 from synapse.http.site import SynapseRequest
+from synapse.util import Clock
 
 from tests.utils import setup_test_homeserver as _sth
 
@@ -28,7 +29,13 @@ class FakeChannel(object):
     def json_body(self):
         if not self.result:
             raise Exception("No result yet.")
-        return json.loads(self.result["body"])
+        return json.loads(self.result["body"].decode('utf8'))
+
+    @property
+    def code(self):
+        if not self.result:
+            raise Exception("No result yet.")
+        return int(self.result["code"])
 
     def writeHeaders(self, version, code, reason, headers):
         self.result["version"] = version
@@ -79,11 +86,16 @@ def make_request(method, path, content=b""):
     Make a web request using the given method and path, feed it the
     content, and return the Request and the Channel underneath.
     """
+    if not isinstance(method, bytes):
+        method = method.encode('ascii')
+
+    if not isinstance(path, bytes):
+        path = path.encode('ascii')
 
     # Decorate it to be the full path
     if not path.startswith(b"/_matrix"):
         path = b"/_matrix/client/r0/" + path
-        path = path.replace("//", "/")
+        path = path.replace(b"//", b"/")
 
     if isinstance(content, text_type):
         content = content.encode('utf8')
@@ -191,3 +203,9 @@ def setup_test_homeserver(*args, **kwargs):
     clock.threadpool = ThreadPool()
     pool.threadpool = ThreadPool()
     return d
+
+
+def get_clock():
+    clock = ThreadedMemoryReactorClock()
+    hs_clock = Clock(clock)
+    return (clock, hs_clock)
diff --git a/tests/storage/test__base.py b/tests/storage/test__base.py
index 6d6f00c5c5..52eff2a104 100644
--- a/tests/storage/test__base.py
+++ b/tests/storage/test__base.py
@@ -18,7 +18,7 @@ from mock import Mock
 
 from twisted.internet import defer
 
-from synapse.util.async import ObservableDeferred
+from synapse.util.async_helpers import ObservableDeferred
 from synapse.util.caches.descriptors import Cache, cached
 
 from tests import unittest
diff --git a/tests/storage/test__init__.py b/tests/storage/test__init__.py
deleted file mode 100644
index f19cb1265c..0000000000
--- a/tests/storage/test__init__.py
+++ /dev/null
@@ -1,65 +0,0 @@
-# -*- coding: utf-8 -*-
-# Copyright 2018 New Vector Ltd
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from twisted.internet import defer
-
-import tests.utils
-
-
-class InitTestCase(tests.unittest.TestCase):
-    def __init__(self, *args, **kwargs):
-        super(InitTestCase, self).__init__(*args, **kwargs)
-        self.store = None  # type: synapse.storage.DataStore
-
-    @defer.inlineCallbacks
-    def setUp(self):
-        hs = yield tests.utils.setup_test_homeserver()
-
-        hs.config.max_mau_value = 50
-        hs.config.limit_usage_by_mau = True
-        self.store = hs.get_datastore()
-        self.clock = hs.get_clock()
-
-    @defer.inlineCallbacks
-    def test_count_monthly_users(self):
-        count = yield self.store.count_monthly_users()
-        self.assertEqual(0, count)
-
-        yield self._insert_user_ips("@user:server1")
-        yield self._insert_user_ips("@user:server2")
-
-        count = yield self.store.count_monthly_users()
-        self.assertEqual(2, count)
-
-    @defer.inlineCallbacks
-    def _insert_user_ips(self, user):
-        """
-        Helper function to populate user_ips without using batch insertion infra
-        args:
-            user (str):  specify username i.e. @user:server.com
-        """
-        yield self.store._simple_upsert(
-            table="user_ips",
-            keyvalues={
-                "user_id": user,
-                "access_token": "access_token",
-                "ip": "ip",
-                "user_agent": "user_agent",
-                "device_id": "device_id",
-            },
-            values={
-                "last_seen": self.clock.time_msec(),
-            }
-        )
diff --git a/tests/storage/test_client_ips.py b/tests/storage/test_client_ips.py
index bd6fda6cb1..7a58c6eb24 100644
--- a/tests/storage/test_client_ips.py
+++ b/tests/storage/test_client_ips.py
@@ -12,6 +12,7 @@
 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 # See the License for the specific language governing permissions and
 # limitations under the License.
+from mock import Mock
 
 from twisted.internet import defer
 
@@ -27,9 +28,9 @@ class ClientIpStoreTestCase(tests.unittest.TestCase):
 
     @defer.inlineCallbacks
     def setUp(self):
-        hs = yield tests.utils.setup_test_homeserver()
-        self.store = hs.get_datastore()
-        self.clock = hs.get_clock()
+        self.hs = yield tests.utils.setup_test_homeserver()
+        self.store = self.hs.get_datastore()
+        self.clock = self.hs.get_clock()
 
     @defer.inlineCallbacks
     def test_insert_new_client_ip(self):
@@ -54,3 +55,62 @@ class ClientIpStoreTestCase(tests.unittest.TestCase):
             },
             r
         )
+
+    @defer.inlineCallbacks
+    def test_disabled_monthly_active_user(self):
+        self.hs.config.limit_usage_by_mau = False
+        self.hs.config.max_mau_value = 50
+        user_id = "@user:server"
+        yield self.store.insert_client_ip(
+            user_id, "access_token", "ip", "user_agent", "device_id",
+        )
+        active = yield self.store._user_last_seen_monthly_active(user_id)
+        self.assertFalse(active)
+
+    @defer.inlineCallbacks
+    def test_adding_monthly_active_user_when_full(self):
+        self.hs.config.limit_usage_by_mau = True
+        self.hs.config.max_mau_value = 50
+        lots_of_users = 100
+        user_id = "@user:server"
+
+        self.store.get_monthly_active_count = Mock(
+            return_value=defer.succeed(lots_of_users)
+        )
+        yield self.store.insert_client_ip(
+            user_id, "access_token", "ip", "user_agent", "device_id",
+        )
+        active = yield self.store._user_last_seen_monthly_active(user_id)
+        self.assertFalse(active)
+
+    @defer.inlineCallbacks
+    def test_adding_monthly_active_user_when_space(self):
+        self.hs.config.limit_usage_by_mau = True
+        self.hs.config.max_mau_value = 50
+        user_id = "@user:server"
+        active = yield self.store._user_last_seen_monthly_active(user_id)
+        self.assertFalse(active)
+
+        yield self.store.insert_client_ip(
+            user_id, "access_token", "ip", "user_agent", "device_id",
+        )
+        active = yield self.store._user_last_seen_monthly_active(user_id)
+        self.assertTrue(active)
+
+    @defer.inlineCallbacks
+    def test_updating_monthly_active_user_when_space(self):
+        self.hs.config.limit_usage_by_mau = True
+        self.hs.config.max_mau_value = 50
+        user_id = "@user:server"
+
+        active = yield self.store._user_last_seen_monthly_active(user_id)
+        self.assertFalse(active)
+
+        yield self.store.insert_client_ip(
+            user_id, "access_token", "ip", "user_agent", "device_id",
+        )
+        yield self.store.insert_client_ip(
+            user_id, "access_token", "ip", "user_agent", "device_id",
+        )
+        active = yield self.store._user_last_seen_monthly_active(user_id)
+        self.assertTrue(active)
diff --git a/tests/storage/test_event_federation.py b/tests/storage/test_event_federation.py
index 30683e7888..69412c5aad 100644
--- a/tests/storage/test_event_federation.py
+++ b/tests/storage/test_event_federation.py
@@ -49,7 +49,7 @@ class EventFederationWorkerStoreTestCase(tests.unittest.TestCase):
                 'INSERT INTO event_reference_hashes '
                 '(event_id, algorithm, hash) '
                 "VALUES (?, 'sha256', ?)"
-            ), (event_id, 'ffff'))
+            ), (event_id, b'ffff'))
 
         for i in range(0, 11):
             yield self.store.runInteraction("insert", insert_event, i)
diff --git a/tests/storage/test_monthly_active_users.py b/tests/storage/test_monthly_active_users.py
new file mode 100644
index 0000000000..cbd480cd42
--- /dev/null
+++ b/tests/storage/test_monthly_active_users.py
@@ -0,0 +1,123 @@
+# -*- coding: utf-8 -*-
+# Copyright 2018 New Vector
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from twisted.internet import defer
+
+import tests.unittest
+import tests.utils
+from tests.utils import setup_test_homeserver
+
+FORTY_DAYS = 40 * 24 * 60 * 60
+
+
+class MonthlyActiveUsersTestCase(tests.unittest.TestCase):
+    def __init__(self, *args, **kwargs):
+        super(MonthlyActiveUsersTestCase, self).__init__(*args, **kwargs)
+
+    @defer.inlineCallbacks
+    def setUp(self):
+        self.hs = yield setup_test_homeserver()
+        self.store = self.hs.get_datastore()
+
+    @defer.inlineCallbacks
+    def test_initialise_reserved_users(self):
+
+        user1 = "@user1:server"
+        user1_email = "user1@matrix.org"
+        user2 = "@user2:server"
+        user2_email = "user2@matrix.org"
+        threepids = [
+            {'medium': 'email', 'address': user1_email},
+            {'medium': 'email', 'address': user2_email}
+        ]
+        user_num = len(threepids)
+
+        yield self.store.register(
+            user_id=user1,
+            token="123",
+            password_hash=None)
+
+        yield self.store.register(
+            user_id=user2,
+            token="456",
+            password_hash=None)
+
+        now = int(self.hs.get_clock().time_msec())
+        yield self.store.user_add_threepid(user1, "email", user1_email, now, now)
+        yield self.store.user_add_threepid(user2, "email", user2_email, now, now)
+        yield self.store.initialise_reserved_users(threepids)
+
+        active_count = yield self.store.get_monthly_active_count()
+
+        # Test total counts
+        self.assertEquals(active_count, user_num)
+
+        # Test user is marked as active
+
+        timestamp = yield self.store._user_last_seen_monthly_active(user1)
+        self.assertTrue(timestamp)
+        timestamp = yield self.store._user_last_seen_monthly_active(user2)
+        self.assertTrue(timestamp)
+
+        # Test that users are never removed from the db.
+        self.hs.config.max_mau_value = 0
+
+        self.hs.get_clock().advance_time(FORTY_DAYS)
+
+        yield self.store.reap_monthly_active_users()
+
+        active_count = yield self.store.get_monthly_active_count()
+        self.assertEquals(active_count, user_num)
+
+    @defer.inlineCallbacks
+    def test_can_insert_and_count_mau(self):
+        count = yield self.store.get_monthly_active_count()
+        self.assertEqual(0, count)
+
+        yield self.store.upsert_monthly_active_user("@user:server")
+        count = yield self.store.get_monthly_active_count()
+
+        self.assertEqual(1, count)
+
+    @defer.inlineCallbacks
+    def test__user_last_seen_monthly_active(self):
+        user_id1 = "@user1:server"
+        user_id2 = "@user2:server"
+        user_id3 = "@user3:server"
+        result = yield self.store._user_last_seen_monthly_active(user_id1)
+        self.assertFalse(result == 0)
+        yield self.store.upsert_monthly_active_user(user_id1)
+        yield self.store.upsert_monthly_active_user(user_id2)
+        result = yield self.store._user_last_seen_monthly_active(user_id1)
+        self.assertTrue(result > 0)
+        result = yield self.store._user_last_seen_monthly_active(user_id3)
+        self.assertFalse(result == 0)
+
+    @defer.inlineCallbacks
+    def test_reap_monthly_active_users(self):
+        self.hs.config.max_mau_value = 5
+        initial_users = 10
+        for i in range(initial_users):
+            yield self.store.upsert_monthly_active_user("@user%d:server" % i)
+        count = yield self.store.get_monthly_active_count()
+        self.assertTrue(count, initial_users)
+        yield self.store.reap_monthly_active_users()
+        count = yield self.store.get_monthly_active_count()
+        self.assertEquals(count, initial_users - self.hs.config.max_mau_value)
+
+        self.hs.get_clock().advance_time(FORTY_DAYS)
+        yield self.store.reap_monthly_active_users()
+        count = yield self.store.get_monthly_active_count()
+        self.assertEquals(count, 0)
diff --git a/tests/storage/test_state.py b/tests/storage/test_state.py
index 7a76d67b8c..f7871cd426 100644
--- a/tests/storage/test_state.py
+++ b/tests/storage/test_state.py
@@ -176,7 +176,7 @@ class StateStoreTestCase(tests.unittest.TestCase):
 
         room_id = self.room.to_string()
         group_ids = yield self.store.get_state_groups_ids(room_id, [e5.event_id])
-        group = group_ids.keys()[0]
+        group = list(group_ids.keys())[0]
 
         # test _get_some_state_from_cache correctly filters out members with types=[]
         (state_dict, is_all) = yield self.store._get_some_state_from_cache(
diff --git a/tests/test_server.py b/tests/test_server.py
index 7e063c0290..fc396226ea 100644
--- a/tests/test_server.py
+++ b/tests/test_server.py
@@ -1,4 +1,3 @@
-import json
 import re
 
 from twisted.internet.defer import Deferred
@@ -104,9 +103,8 @@ class JsonResourceTests(unittest.TestCase):
         request.render(res)
 
         self.assertEqual(channel.result["code"], b'403')
-        reply_body = json.loads(channel.result["body"])
-        self.assertEqual(reply_body["error"], "Forbidden!!one!")
-        self.assertEqual(reply_body["errcode"], "M_FORBIDDEN")
+        self.assertEqual(channel.json_body["error"], "Forbidden!!one!")
+        self.assertEqual(channel.json_body["errcode"], "M_FORBIDDEN")
 
     def test_no_handler(self):
         """
@@ -126,6 +124,5 @@ class JsonResourceTests(unittest.TestCase):
         request.render(res)
 
         self.assertEqual(channel.result["code"], b'400')
-        reply_body = json.loads(channel.result["body"])
-        self.assertEqual(reply_body["error"], "Unrecognized request")
-        self.assertEqual(reply_body["errcode"], "M_UNRECOGNIZED")
+        self.assertEqual(channel.json_body["error"], "Unrecognized request")
+        self.assertEqual(channel.json_body["errcode"], "M_UNRECOGNIZED")
diff --git a/tests/util/test_linearizer.py b/tests/util/test_linearizer.py
index 4729bd5a0a..7f48a72de8 100644
--- a/tests/util/test_linearizer.py
+++ b/tests/util/test_linearizer.py
@@ -20,7 +20,7 @@ from twisted.internet import defer, reactor
 from twisted.internet.defer import CancelledError
 
 from synapse.util import Clock, logcontext
-from synapse.util.async import Linearizer
+from synapse.util.async_helpers import Linearizer
 
 from tests import unittest
 
diff --git a/tests/util/test_rwlock.py b/tests/util/test_rwlock.py
index 24194e3b25..7cd470be67 100644
--- a/tests/util/test_rwlock.py
+++ b/tests/util/test_rwlock.py
@@ -14,7 +14,7 @@
 # limitations under the License.
 
 
-from synapse.util.async import ReadWriteLock
+from synapse.util.async_helpers import ReadWriteLock
 
 from tests import unittest
 
diff --git a/tests/utils.py b/tests/utils.py
index 9bff3ff3b9..8d73797971 100644
--- a/tests/utils.py
+++ b/tests/utils.py
@@ -73,6 +73,15 @@ def setup_test_homeserver(name="test", datastore=None, config=None, reactor=None
         config.block_events_without_consent_error = None
         config.media_storage_providers = []
         config.auto_join_rooms = []
+        config.limit_usage_by_mau = False
+        config.hs_disabled = False
+        config.hs_disabled_message = ""
+        config.max_mau_value = 50
+        config.mau_limits_reserved_threepids = []
+
+        # we need a sane default_room_version, otherwise attempts to create rooms will
+        # fail.
+        config.default_room_version = "1"
 
         # disable user directory updates, because they get done in the
         # background, which upsets the test runner.
@@ -118,6 +127,7 @@ def setup_test_homeserver(name="test", datastore=None, config=None, reactor=None
             database_engine=db_engine,
             room_list_handler=object(),
             tls_server_context_factory=Mock(),
+            tls_client_options_factory=Mock(),
             reactor=reactor,
             **kargs
         )
@@ -138,6 +148,7 @@ def setup_test_homeserver(name="test", datastore=None, config=None, reactor=None
             database_engine=db_engine,
             room_list_handler=object(),
             tls_server_context_factory=Mock(),
+            tls_client_options_factory=Mock(),
             reactor=reactor,
             **kargs
         )
@@ -146,8 +157,9 @@ def setup_test_homeserver(name="test", datastore=None, config=None, reactor=None
     # Need to let the HS build an auth handler and then mess with it
     # because AuthHandler's constructor requires the HS, so we can't make one
     # beforehand and pass it in to the HS's constructor (chicken / egg)
-    hs.get_auth_handler().hash = lambda p: hashlib.md5(p).hexdigest()
-    hs.get_auth_handler().validate_hash = lambda p, h: hashlib.md5(p).hexdigest() == h
+    hs.get_auth_handler().hash = lambda p: hashlib.md5(p.encode('utf8')).hexdigest()
+    hs.get_auth_handler().validate_hash = lambda p, h: hashlib.md5(
+        p.encode('utf8')).hexdigest() == h
 
     fed = kargs.get("resource_for_federation", None)
     if fed:
@@ -220,8 +232,8 @@ class MockHttpResource(HttpServer):
         mock_content.configure_mock(**config)
         mock_request.content = mock_content
 
-        mock_request.method = http_method
-        mock_request.uri = path
+        mock_request.method = http_method.encode('ascii')
+        mock_request.uri = path.encode('ascii')
 
         mock_request.getClientIP.return_value = "-"