summary refs log tree commit diff
path: root/synapse
diff options
context:
space:
mode:
authorMatthew Hodgson <matthew@arasphere.net>2018-07-23 10:03:28 +0100
committerGitHub <noreply@github.com>2018-07-23 10:03:28 +0100
commit9b34f3ea3af233d70412565290468092cf3c5984 (patch)
treedcfc00333a843cdc3a011249ef78f8438804dbd4 /synapse
parentadd trailing comma (diff)
parentMake the rest of the .iterwhatever go away (#3562) (diff)
downloadsynapse-9b34f3ea3af233d70412565290468092cf3c5984.tar.xz
Merge branch 'develop' into matthew/sync_deleted_devices
Diffstat (limited to 'synapse')
-rw-r--r--synapse/__init__.py2
-rw-r--r--synapse/api/auth.py124
-rwxr-xr-xsynapse/app/homeserver.py6
-rwxr-xr-xsynapse/app/synctl.py4
-rw-r--r--synapse/config/voip.py4
-rw-r--r--synapse/events/snapshot.py4
-rw-r--r--synapse/federation/federation_base.py4
-rw-r--r--synapse/federation/transaction_queue.py25
-rw-r--r--synapse/handlers/federation.py205
-rw-r--r--synapse/handlers/message.py4
-rw-r--r--synapse/handlers/room_list.py6
-rw-r--r--synapse/http/client.py6
-rw-r--r--synapse/http/request_metrics.py9
-rw-r--r--synapse/http/servlet.py2
-rw-r--r--synapse/http/site.py7
-rw-r--r--synapse/metrics/background_process_metrics.py179
-rw-r--r--synapse/replication/slave/storage/receipts.py2
-rw-r--r--synapse/rest/__init__.py43
-rw-r--r--synapse/rest/client/transactions.py45
-rw-r--r--synapse/rest/client/v1/admin.py169
-rw-r--r--synapse/rest/client/v1/base.py2
-rw-r--r--synapse/rest/client/v1/directory.py7
-rw-r--r--synapse/rest/client/v1/initial_sync.py3
-rw-r--r--synapse/rest/client/v1/logout.py3
-rw-r--r--synapse/rest/client/v1/push_rule.py10
-rw-r--r--synapse/rest/client/v1/pusher.py17
-rw-r--r--synapse/rest/client/v1/room.py17
-rw-r--r--synapse/rest/client/v1_only/__init__.py3
-rw-r--r--synapse/rest/client/v1_only/base.py39
-rw-r--r--synapse/rest/client/v1_only/register.py (renamed from synapse/rest/client/v1/register.py)37
-rw-r--r--synapse/rest/client/v2_alpha/account.py49
-rw-r--r--synapse/rest/client/v2_alpha/devices.py26
-rw-r--r--synapse/rest/client/v2_alpha/register.py30
-rw-r--r--synapse/rest/client/v2_alpha/report_event.py26
-rw-r--r--synapse/rest/client/v2_alpha/sendtodevice.py2
-rw-r--r--synapse/rest/media/v1/identicon_resource.py6
-rw-r--r--synapse/rest/media/v1/preview_url_resource.py5
-rw-r--r--synapse/rest/media/v1/upload_resource.py5
-rw-r--r--synapse/secrets.py42
-rw-r--r--synapse/server.py5
-rw-r--r--synapse/state.py6
-rw-r--r--synapse/storage/_base.py2
-rw-r--r--synapse/storage/background_updates.py10
-rw-r--r--synapse/storage/client_ips.py15
-rw-r--r--synapse/storage/events.py10
-rw-r--r--synapse/storage/events_worker.py10
-rw-r--r--synapse/storage/receipts.py25
-rw-r--r--synapse/streams/config.py30
-rw-r--r--synapse/util/async.py145
-rw-r--r--synapse/util/caches/expiringcache.py6
-rw-r--r--synapse/util/caches/stream_change_cache.py15
-rw-r--r--synapse/util/distributor.py48
-rw-r--r--synapse/util/logcontext.py11
-rw-r--r--synapse/util/metrics.py19
-rw-r--r--synapse/util/ratelimitutils.py33
-rw-r--r--synapse/visibility.py159
56 files changed, 1056 insertions, 672 deletions
diff --git a/synapse/__init__.py b/synapse/__init__.py
index 3cde33c0d7..5c0f2f83aa 100644
--- a/synapse/__init__.py
+++ b/synapse/__init__.py
@@ -17,4 +17,4 @@
 """ This is a reference implementation of a Matrix home server.
 """
 
-__version__ = "0.32.2"
+__version__ = "0.33.0"
diff --git a/synapse/api/auth.py b/synapse/api/auth.py
index 6dec862fec..bc629832d9 100644
--- a/synapse/api/auth.py
+++ b/synapse/api/auth.py
@@ -193,7 +193,7 @@ class Auth(object):
                     synapse.types.create_requester(user_id, app_service=app_service)
                 )
 
-            access_token = get_access_token_from_request(
+            access_token = self.get_access_token_from_request(
                 request, self.TOKEN_NOT_FOUND_HTTP_STATUS
             )
 
@@ -239,7 +239,7 @@ class Auth(object):
     @defer.inlineCallbacks
     def _get_appservice_user_id(self, request):
         app_service = self.store.get_app_service_by_token(
-            get_access_token_from_request(
+            self.get_access_token_from_request(
                 request, self.TOKEN_NOT_FOUND_HTTP_STATUS
             )
         )
@@ -513,7 +513,7 @@ class Auth(object):
 
     def get_appservice_by_req(self, request):
         try:
-            token = get_access_token_from_request(
+            token = self.get_access_token_from_request(
                 request, self.TOKEN_NOT_FOUND_HTTP_STATUS
             )
             service = self.store.get_app_service_by_token(token)
@@ -673,67 +673,67 @@ class Auth(object):
                 " edit its room list entry"
             )
 
+    @staticmethod
+    def has_access_token(request):
+        """Checks if the request has an access_token.
 
-def has_access_token(request):
-    """Checks if the request has an access_token.
+        Returns:
+            bool: False if no access_token was given, True otherwise.
+        """
+        query_params = request.args.get("access_token")
+        auth_headers = request.requestHeaders.getRawHeaders(b"Authorization")
+        return bool(query_params) or bool(auth_headers)
 
-    Returns:
-        bool: False if no access_token was given, True otherwise.
-    """
-    query_params = request.args.get("access_token")
-    auth_headers = request.requestHeaders.getRawHeaders(b"Authorization")
-    return bool(query_params) or bool(auth_headers)
-
-
-def get_access_token_from_request(request, token_not_found_http_status=401):
-    """Extracts the access_token from the request.
-
-    Args:
-        request: The http request.
-        token_not_found_http_status(int): The HTTP status code to set in the
-            AuthError if the token isn't found. This is used in some of the
-            legacy APIs to change the status code to 403 from the default of
-            401 since some of the old clients depended on auth errors returning
-            403.
-    Returns:
-        str: The access_token
-    Raises:
-        AuthError: If there isn't an access_token in the request.
-    """
+    @staticmethod
+    def get_access_token_from_request(request, token_not_found_http_status=401):
+        """Extracts the access_token from the request.
 
-    auth_headers = request.requestHeaders.getRawHeaders(b"Authorization")
-    query_params = request.args.get(b"access_token")
-    if auth_headers:
-        # Try the get the access_token from a "Authorization: Bearer"
-        # header
-        if query_params is not None:
-            raise AuthError(
-                token_not_found_http_status,
-                "Mixing Authorization headers and access_token query parameters.",
-                errcode=Codes.MISSING_TOKEN,
-            )
-        if len(auth_headers) > 1:
-            raise AuthError(
-                token_not_found_http_status,
-                "Too many Authorization headers.",
-                errcode=Codes.MISSING_TOKEN,
-            )
-        parts = auth_headers[0].split(" ")
-        if parts[0] == "Bearer" and len(parts) == 2:
-            return parts[1]
+        Args:
+            request: The http request.
+            token_not_found_http_status(int): The HTTP status code to set in the
+                AuthError if the token isn't found. This is used in some of the
+                legacy APIs to change the status code to 403 from the default of
+                401 since some of the old clients depended on auth errors returning
+                403.
+        Returns:
+            str: The access_token
+        Raises:
+            AuthError: If there isn't an access_token in the request.
+        """
+
+        auth_headers = request.requestHeaders.getRawHeaders(b"Authorization")
+        query_params = request.args.get(b"access_token")
+        if auth_headers:
+            # Try the get the access_token from a "Authorization: Bearer"
+            # header
+            if query_params is not None:
+                raise AuthError(
+                    token_not_found_http_status,
+                    "Mixing Authorization headers and access_token query parameters.",
+                    errcode=Codes.MISSING_TOKEN,
+                )
+            if len(auth_headers) > 1:
+                raise AuthError(
+                    token_not_found_http_status,
+                    "Too many Authorization headers.",
+                    errcode=Codes.MISSING_TOKEN,
+                )
+            parts = auth_headers[0].split(" ")
+            if parts[0] == "Bearer" and len(parts) == 2:
+                return parts[1]
+            else:
+                raise AuthError(
+                    token_not_found_http_status,
+                    "Invalid Authorization header.",
+                    errcode=Codes.MISSING_TOKEN,
+                )
         else:
-            raise AuthError(
-                token_not_found_http_status,
-                "Invalid Authorization header.",
-                errcode=Codes.MISSING_TOKEN,
-            )
-    else:
-        # Try to get the access_token from the query params.
-        if not query_params:
-            raise AuthError(
-                token_not_found_http_status,
-                "Missing access token.",
-                errcode=Codes.MISSING_TOKEN
-            )
+            # Try to get the access_token from the query params.
+            if not query_params:
+                raise AuthError(
+                    token_not_found_http_status,
+                    "Missing access token.",
+                    errcode=Codes.MISSING_TOKEN
+                )
 
-        return query_params[0]
+            return query_params[0]
diff --git a/synapse/app/homeserver.py b/synapse/app/homeserver.py
index 14e6dca522..2ad1beb8d8 100755
--- a/synapse/app/homeserver.py
+++ b/synapse/app/homeserver.py
@@ -18,6 +18,8 @@ import logging
 import os
 import sys
 
+from six import iteritems
+
 from twisted.application import service
 from twisted.internet import defer, reactor
 from twisted.web.resource import EncodingResourceWrapper, NoResource
@@ -442,7 +444,7 @@ def run(hs):
         stats["total_nonbridged_users"] = total_nonbridged_users
 
         daily_user_type_results = yield hs.get_datastore().count_daily_user_type()
-        for name, count in daily_user_type_results.iteritems():
+        for name, count in iteritems(daily_user_type_results):
             stats["daily_user_type_" + name] = count
 
         room_count = yield hs.get_datastore().get_room_count()
@@ -453,7 +455,7 @@ def run(hs):
         stats["daily_messages"] = yield hs.get_datastore().count_daily_messages()
 
         r30_results = yield hs.get_datastore().count_r30_users()
-        for name, count in r30_results.iteritems():
+        for name, count in iteritems(r30_results):
             stats["r30_users_" + name] = count
 
         daily_sent_messages = yield hs.get_datastore().count_daily_sent_messages()
diff --git a/synapse/app/synctl.py b/synapse/app/synctl.py
index 68acc15a9a..d658f967ba 100755
--- a/synapse/app/synctl.py
+++ b/synapse/app/synctl.py
@@ -25,6 +25,8 @@ import subprocess
 import sys
 import time
 
+from six import iteritems
+
 import yaml
 
 SYNAPSE = [sys.executable, "-B", "-m", "synapse.app.homeserver"]
@@ -173,7 +175,7 @@ def main():
         os.environ["SYNAPSE_CACHE_FACTOR"] = str(cache_factor)
 
     cache_factors = config.get("synctl_cache_factors", {})
-    for cache_name, factor in cache_factors.iteritems():
+    for cache_name, factor in iteritems(cache_factors):
         os.environ["SYNAPSE_CACHE_FACTOR_" + cache_name.upper()] = str(factor)
 
     worker_configfiles = []
diff --git a/synapse/config/voip.py b/synapse/config/voip.py
index 3a4e16fa96..d07bd24ffd 100644
--- a/synapse/config/voip.py
+++ b/synapse/config/voip.py
@@ -30,10 +30,10 @@ class VoipConfig(Config):
         ## Turn ##
 
         # The public URIs of the TURN server to give to clients
-        turn_uris: []
+        #turn_uris: []
 
         # The shared secret used to compute passwords for the TURN server
-        turn_shared_secret: "YOUR_SHARED_SECRET"
+        #turn_shared_secret: "YOUR_SHARED_SECRET"
 
         # The Username and password if the TURN server needs them and
         # does not use a token
diff --git a/synapse/events/snapshot.py b/synapse/events/snapshot.py
index bcd9bb5946..f83a1581a6 100644
--- a/synapse/events/snapshot.py
+++ b/synapse/events/snapshot.py
@@ -13,6 +13,8 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
+from six import iteritems
+
 from frozendict import frozendict
 
 from twisted.internet import defer
@@ -159,7 +161,7 @@ def _encode_state_dict(state_dict):
 
     return [
         (etype, state_key, v)
-        for (etype, state_key), v in state_dict.iteritems()
+        for (etype, state_key), v in iteritems(state_dict)
     ]
 
 
diff --git a/synapse/federation/federation_base.py b/synapse/federation/federation_base.py
index f0c7a06718..c11798093d 100644
--- a/synapse/federation/federation_base.py
+++ b/synapse/federation/federation_base.py
@@ -23,7 +23,7 @@ from synapse.api.errors import Codes, SynapseError
 from synapse.crypto.event_signing import check_event_content_hash
 from synapse.events import FrozenEvent
 from synapse.events.utils import prune_event
-from synapse.http.servlet import assert_params_in_request
+from synapse.http.servlet import assert_params_in_dict
 from synapse.util import logcontext, unwrapFirstError
 
 logger = logging.getLogger(__name__)
@@ -199,7 +199,7 @@ def event_from_pdu_json(pdu_json, outlier=False):
     """
     # we could probably enforce a bunch of other fields here (room_id, sender,
     # origin, etc etc)
-    assert_params_in_request(pdu_json, ('event_id', 'type', 'depth'))
+    assert_params_in_dict(pdu_json, ('event_id', 'type', 'depth'))
 
     depth = pdu_json['depth']
     if not isinstance(depth, six.integer_types):
diff --git a/synapse/federation/transaction_queue.py b/synapse/federation/transaction_queue.py
index 5a956ecfb3..6996d6b695 100644
--- a/synapse/federation/transaction_queue.py
+++ b/synapse/federation/transaction_queue.py
@@ -30,7 +30,8 @@ from synapse.metrics import (
     sent_edus_counter,
     sent_transactions_counter,
 )
-from synapse.util import PreserveLoggingContext, logcontext
+from synapse.metrics.background_process_metrics import run_as_background_process
+from synapse.util import logcontext
 from synapse.util.metrics import measure_func
 from synapse.util.retryutils import NotRetryingDestination, get_retry_limiter
 
@@ -165,10 +166,11 @@ class TransactionQueue(object):
         if self._is_processing:
             return
 
-        # fire off a processing loop in the background. It's likely it will
-        # outlast the current request, so run it in the sentinel logcontext.
-        with PreserveLoggingContext():
-            self._process_event_queue_loop()
+        # fire off a processing loop in the background
+        run_as_background_process(
+            "process_event_queue_for_federation",
+            self._process_event_queue_loop,
+        )
 
     @defer.inlineCallbacks
     def _process_event_queue_loop(self):
@@ -432,14 +434,11 @@ class TransactionQueue(object):
 
         logger.debug("TX [%s] Starting transaction loop", destination)
 
-        # Drop the logcontext before starting the transaction. It doesn't
-        # really make sense to log all the outbound transactions against
-        # whatever path led us to this point: that's pretty arbitrary really.
-        #
-        # (this also means we can fire off _perform_transaction without
-        # yielding)
-        with logcontext.PreserveLoggingContext():
-            self._transaction_transmission_loop(destination)
+        run_as_background_process(
+            "federation_transaction_transmission_loop",
+            self._transaction_transmission_loop,
+            destination,
+        )
 
     @defer.inlineCallbacks
     def _transaction_transmission_loop(self, destination):
diff --git a/synapse/handlers/federation.py b/synapse/handlers/federation.py
index d3ecebd29f..a6d391c4e8 100644
--- a/synapse/handlers/federation.py
+++ b/synapse/handlers/federation.py
@@ -21,8 +21,8 @@ import logging
 import sys
 
 import six
-from six import iteritems
-from six.moves import http_client
+from six import iteritems, itervalues
+from six.moves import http_client, zip
 
 from signedjson.key import decode_verify_key_bytes
 from signedjson.sign import verify_signed_json
@@ -43,7 +43,6 @@ from synapse.crypto.event_signing import (
     add_hashes_and_signatures,
     compute_event_signature,
 )
-from synapse.events.utils import prune_event
 from synapse.events.validator import EventValidator
 from synapse.state import resolve_events_with_factory
 from synapse.types import UserID, get_domain_from_id
@@ -52,8 +51,8 @@ from synapse.util.async import Linearizer
 from synapse.util.distributor import user_joined_room
 from synapse.util.frozenutils import unfreeze
 from synapse.util.logutils import log_function
-from synapse.util.metrics import measure_func
 from synapse.util.retryutils import NotRetryingDestination
+from synapse.visibility import filter_events_for_server
 
 from ._base import BaseHandler
 
@@ -501,137 +500,6 @@ class FederationHandler(BaseHandler):
                     user = UserID.from_string(event.state_key)
                     yield user_joined_room(self.distributor, user, event.room_id)
 
-    @measure_func("_filter_events_for_server")
-    @defer.inlineCallbacks
-    def _filter_events_for_server(self, server_name, room_id, events):
-        """Filter the given events for the given server, redacting those the
-        server can't see.
-
-        Assumes the server is currently in the room.
-
-        Returns
-            list[FrozenEvent]
-        """
-        # First lets check to see if all the events have a history visibility
-        # of "shared" or "world_readable". If thats the case then we don't
-        # need to check membership (as we know the server is in the room).
-        event_to_state_ids = yield self.store.get_state_ids_for_events(
-            frozenset(e.event_id for e in events),
-            types=(
-                (EventTypes.RoomHistoryVisibility, ""),
-            )
-        )
-
-        visibility_ids = set()
-        for sids in event_to_state_ids.itervalues():
-            hist = sids.get((EventTypes.RoomHistoryVisibility, ""))
-            if hist:
-                visibility_ids.add(hist)
-
-        # If we failed to find any history visibility events then the default
-        # is "shared" visiblity.
-        if not visibility_ids:
-            defer.returnValue(events)
-
-        event_map = yield self.store.get_events(visibility_ids)
-        all_open = all(
-            e.content.get("history_visibility") in (None, "shared", "world_readable")
-            for e in event_map.itervalues()
-        )
-
-        if all_open:
-            defer.returnValue(events)
-
-        # Ok, so we're dealing with events that have non-trivial visibility
-        # rules, so we need to also get the memberships of the room.
-
-        event_to_state_ids = yield self.store.get_state_ids_for_events(
-            frozenset(e.event_id for e in events),
-            types=(
-                (EventTypes.RoomHistoryVisibility, ""),
-                (EventTypes.Member, None),
-            )
-        )
-
-        # We only want to pull out member events that correspond to the
-        # server's domain.
-
-        def check_match(id):
-            try:
-                return server_name == get_domain_from_id(id)
-            except Exception:
-                return False
-
-        # Parses mapping `event_id -> (type, state_key) -> state event_id`
-        # to get all state ids that we're interested in.
-        event_map = yield self.store.get_events([
-            e_id
-            for key_to_eid in list(event_to_state_ids.values())
-            for key, e_id in key_to_eid.items()
-            if key[0] != EventTypes.Member or check_match(key[1])
-        ])
-
-        event_to_state = {
-            e_id: {
-                key: event_map[inner_e_id]
-                for key, inner_e_id in key_to_eid.iteritems()
-                if inner_e_id in event_map
-            }
-            for e_id, key_to_eid in event_to_state_ids.iteritems()
-        }
-
-        erased_senders = yield self.store.are_users_erased(
-            e.sender for e in events,
-        )
-
-        def redact_disallowed(event, state):
-            # if the sender has been gdpr17ed, always return a redacted
-            # copy of the event.
-            if erased_senders[event.sender]:
-                logger.info(
-                    "Sender of %s has been erased, redacting",
-                    event.event_id,
-                )
-                return prune_event(event)
-
-            if not state:
-                return event
-
-            history = state.get((EventTypes.RoomHistoryVisibility, ''), None)
-            if history:
-                visibility = history.content.get("history_visibility", "shared")
-                if visibility in ["invited", "joined"]:
-                    # We now loop through all state events looking for
-                    # membership states for the requesting server to determine
-                    # if the server is either in the room or has been invited
-                    # into the room.
-                    for ev in state.itervalues():
-                        if ev.type != EventTypes.Member:
-                            continue
-                        try:
-                            domain = get_domain_from_id(ev.state_key)
-                        except Exception:
-                            continue
-
-                        if domain != server_name:
-                            continue
-
-                        memtype = ev.membership
-                        if memtype == Membership.JOIN:
-                            return event
-                        elif memtype == Membership.INVITE:
-                            if visibility == "invited":
-                                return event
-                    else:
-                        return prune_event(event)
-
-            return event
-
-        defer.returnValue([
-            redact_disallowed(e, event_to_state[e.event_id])
-            for e in events
-        ])
-
     @log_function
     @defer.inlineCallbacks
     def backfill(self, dest, room_id, limit, extremities):
@@ -863,7 +731,7 @@ class FederationHandler(BaseHandler):
             """
             joined_users = [
                 (state_key, int(event.depth))
-                for (e_type, state_key), event in state.iteritems()
+                for (e_type, state_key), event in iteritems(state)
                 if e_type == EventTypes.Member
                 and event.membership == Membership.JOIN
             ]
@@ -880,7 +748,7 @@ class FederationHandler(BaseHandler):
                 except Exception:
                     pass
 
-            return sorted(joined_domains.iteritems(), key=lambda d: d[1])
+            return sorted(joined_domains.items(), key=lambda d: d[1])
 
         curr_domains = get_domains_from_state(curr_state)
 
@@ -943,7 +811,7 @@ class FederationHandler(BaseHandler):
         tried_domains = set(likely_domains)
         tried_domains.add(self.server_name)
 
-        event_ids = list(extremities.iterkeys())
+        event_ids = list(extremities.keys())
 
         logger.debug("calling resolve_state_groups in _maybe_backfill")
         resolve = logcontext.preserve_fn(
@@ -959,15 +827,15 @@ class FederationHandler(BaseHandler):
         states = dict(zip(event_ids, [s.state for s in states]))
 
         state_map = yield self.store.get_events(
-            [e_id for ids in states.itervalues() for e_id in ids.itervalues()],
+            [e_id for ids in itervalues(states) for e_id in itervalues(ids)],
             get_prev_content=False
         )
         states = {
             key: {
                 k: state_map[e_id]
-                for k, e_id in state_dict.iteritems()
+                for k, e_id in iteritems(state_dict)
                 if e_id in state_map
-            } for key, state_dict in states.iteritems()
+            } for key, state_dict in iteritems(states)
         }
 
         for e_id, _ in sorted_extremeties_tuple:
@@ -1038,16 +906,6 @@ class FederationHandler(BaseHandler):
             [auth_id for auth_id, _ in event.auth_events],
             include_given=True
         )
-
-        for event in auth:
-            event.signatures.update(
-                compute_event_signature(
-                    event,
-                    self.hs.hostname,
-                    self.hs.config.signing_key[0]
-                )
-            )
-
         defer.returnValue([e for e in auth])
 
     @log_function
@@ -1503,18 +1361,6 @@ class FederationHandler(BaseHandler):
                     del results[(event.type, event.state_key)]
 
             res = list(results.values())
-            for event in res:
-                # We sign these again because there was a bug where we
-                # incorrectly signed things the first time round
-                if self.is_mine_id(event.event_id):
-                    event.signatures.update(
-                        compute_event_signature(
-                            event,
-                            self.hs.hostname,
-                            self.hs.config.signing_key[0]
-                        )
-                    )
-
             defer.returnValue(res)
         else:
             defer.returnValue([])
@@ -1558,7 +1404,7 @@ class FederationHandler(BaseHandler):
             limit
         )
 
-        events = yield self._filter_events_for_server(origin, room_id, events)
+        events = yield filter_events_for_server(self.store, origin, events)
 
         defer.returnValue(events)
 
@@ -1586,18 +1432,6 @@ class FederationHandler(BaseHandler):
         )
 
         if event:
-            if self.is_mine_id(event.event_id):
-                # FIXME: This is a temporary work around where we occasionally
-                # return events slightly differently than when they were
-                # originally signed
-                event.signatures.update(
-                    compute_event_signature(
-                        event,
-                        self.hs.hostname,
-                        self.hs.config.signing_key[0]
-                    )
-                )
-
             in_room = yield self.auth.check_host_in_room(
                 event.room_id,
                 origin
@@ -1605,8 +1439,8 @@ class FederationHandler(BaseHandler):
             if not in_room:
                 raise AuthError(403, "Host not in room.")
 
-            events = yield self._filter_events_for_server(
-                origin, event.room_id, [event]
+            events = yield filter_events_for_server(
+                self.store, origin, [event],
             )
             event = events[0]
             defer.returnValue(event)
@@ -1681,7 +1515,7 @@ class FederationHandler(BaseHandler):
         yield self.store.persist_events(
             [
                 (ev_info["event"], context)
-                for ev_info, context in itertools.izip(event_infos, contexts)
+                for ev_info, context in zip(event_infos, contexts)
             ],
             backfilled=backfilled,
         )
@@ -1862,15 +1696,6 @@ class FederationHandler(BaseHandler):
             local_auth_chain, remote_auth_chain
         )
 
-        for event in ret["auth_chain"]:
-            event.signatures.update(
-                compute_event_signature(
-                    event,
-                    self.hs.hostname,
-                    self.hs.config.signing_key[0]
-                )
-            )
-
         logger.debug("on_query_auth returning: %s", ret)
 
         defer.returnValue(ret)
@@ -1896,8 +1721,8 @@ class FederationHandler(BaseHandler):
             min_depth=min_depth,
         )
 
-        missing_events = yield self._filter_events_for_server(
-            origin, room_id, missing_events,
+        missing_events = yield filter_events_for_server(
+            self.store, origin, missing_events,
         )
 
         defer.returnValue(missing_events)
diff --git a/synapse/handlers/message.py b/synapse/handlers/message.py
index a39b852ceb..abc07ea87c 100644
--- a/synapse/handlers/message.py
+++ b/synapse/handlers/message.py
@@ -33,7 +33,7 @@ from synapse.events.utils import serialize_event
 from synapse.events.validator import EventValidator
 from synapse.replication.http.send_event import send_event_to_master
 from synapse.types import RoomAlias, RoomStreamToken, UserID
-from synapse.util.async import Limiter, ReadWriteLock
+from synapse.util.async import Linearizer, ReadWriteLock
 from synapse.util.frozenutils import frozendict_json_encoder
 from synapse.util.logcontext import run_in_background
 from synapse.util.metrics import measure_func
@@ -427,7 +427,7 @@ class EventCreationHandler(object):
 
         # We arbitrarily limit concurrent event creation for a room to 5.
         # This is to stop us from diverging history *too* much.
-        self.limiter = Limiter(max_count=5)
+        self.limiter = Linearizer(max_count=5, name="room_event_creation_limit")
 
         self.action_generator = hs.get_action_generator()
 
diff --git a/synapse/handlers/room_list.py b/synapse/handlers/room_list.py
index 1abd45297b..828229f5c3 100644
--- a/synapse/handlers/room_list.py
+++ b/synapse/handlers/room_list.py
@@ -38,7 +38,7 @@ REMOTE_ROOM_LIST_POLL_INTERVAL = 60 * 1000
 
 
 # This is used to indicate we should only return rooms published to the main list.
-EMTPY_THIRD_PARTY_ID = ThirdPartyInstanceID(None, None)
+EMPTY_THIRD_PARTY_ID = ThirdPartyInstanceID(None, None)
 
 
 class RoomListHandler(BaseHandler):
@@ -50,7 +50,7 @@ class RoomListHandler(BaseHandler):
 
     def get_local_public_room_list(self, limit=None, since_token=None,
                                    search_filter=None,
-                                   network_tuple=EMTPY_THIRD_PARTY_ID,):
+                                   network_tuple=EMPTY_THIRD_PARTY_ID,):
         """Generate a local public room list.
 
         There are multiple different lists: the main one plus one per third
@@ -87,7 +87,7 @@ class RoomListHandler(BaseHandler):
     @defer.inlineCallbacks
     def _get_public_room_list(self, limit=None, since_token=None,
                               search_filter=None,
-                              network_tuple=EMTPY_THIRD_PARTY_ID,):
+                              network_tuple=EMPTY_THIRD_PARTY_ID,):
         if since_token and since_token != "END":
             since_token = RoomListNextBatch.from_token(since_token)
         else:
diff --git a/synapse/http/client.py b/synapse/http/client.py
index d6a0d75b2b..25b6307884 100644
--- a/synapse/http/client.py
+++ b/synapse/http/client.py
@@ -26,9 +26,11 @@ from OpenSSL.SSL import VERIFY_NONE
 from twisted.internet import defer, protocol, reactor, ssl, task
 from twisted.internet.endpoints import HostnameEndpoint, wrapClientTLS
 from twisted.web._newclient import ResponseDone
-from twisted.web.client import Agent, BrowserLikeRedirectAgent, ContentDecoderAgent
-from twisted.web.client import FileBodyProducer as TwistedFileBodyProducer
 from twisted.web.client import (
+    Agent,
+    BrowserLikeRedirectAgent,
+    ContentDecoderAgent,
+    FileBodyProducer as TwistedFileBodyProducer,
     GzipDecoder,
     HTTPConnectionPool,
     PartialDownloadError,
diff --git a/synapse/http/request_metrics.py b/synapse/http/request_metrics.py
index f24b4b949c..588e280571 100644
--- a/synapse/http/request_metrics.py
+++ b/synapse/http/request_metrics.py
@@ -38,7 +38,8 @@ outgoing_responses_counter = Counter(
 )
 
 response_timer = Histogram(
-    "synapse_http_server_response_time_seconds", "sec", ["method", "servlet", "tag"]
+    "synapse_http_server_response_time_seconds", "sec",
+    ["method", "servlet", "tag", "code"],
 )
 
 response_ru_utime = Counter(
@@ -171,11 +172,13 @@ class RequestMetrics(object):
                 )
                 return
 
-        outgoing_responses_counter.labels(request.method, str(request.code)).inc()
+        response_code = str(request.code)
+
+        outgoing_responses_counter.labels(request.method, response_code).inc()
 
         response_count.labels(request.method, self.name, tag).inc()
 
-        response_timer.labels(request.method, self.name, tag).observe(
+        response_timer.labels(request.method, self.name, tag, response_code).observe(
             time_sec - self.start
         )
 
diff --git a/synapse/http/servlet.py b/synapse/http/servlet.py
index cf6723563a..882816dc8f 100644
--- a/synapse/http/servlet.py
+++ b/synapse/http/servlet.py
@@ -206,7 +206,7 @@ def parse_json_object_from_request(request, allow_empty_body=False):
     return content
 
 
-def assert_params_in_request(body, required):
+def assert_params_in_dict(body, required):
     absent = []
     for k in required:
         if k not in body:
diff --git a/synapse/http/site.py b/synapse/http/site.py
index 21e26f9c5e..5fd30a4c2c 100644
--- a/synapse/http/site.py
+++ b/synapse/http/site.py
@@ -20,7 +20,7 @@ from twisted.web.server import Request, Site
 
 from synapse.http import redact_uri
 from synapse.http.request_metrics import RequestMetrics
-from synapse.util.logcontext import LoggingContext, ContextResourceUsage
+from synapse.util.logcontext import ContextResourceUsage, LoggingContext
 
 logger = logging.getLogger(__name__)
 
@@ -42,9 +42,10 @@ class SynapseRequest(Request):
     which is handling the request, and returns a context manager.
 
     """
-    def __init__(self, site, *args, **kw):
-        Request.__init__(self, *args, **kw)
+    def __init__(self, site, channel, *args, **kw):
+        Request.__init__(self, channel, *args, **kw)
         self.site = site
+        self._channel = channel
         self.authenticated_entity = None
         self.start_time = 0
 
diff --git a/synapse/metrics/background_process_metrics.py b/synapse/metrics/background_process_metrics.py
new file mode 100644
index 0000000000..9d820e44a6
--- /dev/null
+++ b/synapse/metrics/background_process_metrics.py
@@ -0,0 +1,179 @@
+# -*- coding: utf-8 -*-
+# Copyright 2018 New Vector Ltd
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import six
+
+from prometheus_client.core import REGISTRY, Counter, GaugeMetricFamily
+
+from twisted.internet import defer
+
+from synapse.util.logcontext import LoggingContext, PreserveLoggingContext
+
+_background_process_start_count = Counter(
+    "synapse_background_process_start_count",
+    "Number of background processes started",
+    ["name"],
+)
+
+# we set registry=None in all of these to stop them getting registered with
+# the default registry. Instead we collect them all via the CustomCollector,
+# which ensures that we can update them before they are collected.
+#
+_background_process_ru_utime = Counter(
+    "synapse_background_process_ru_utime_seconds",
+    "User CPU time used by background processes, in seconds",
+    ["name"],
+    registry=None,
+)
+
+_background_process_ru_stime = Counter(
+    "synapse_background_process_ru_stime_seconds",
+    "System CPU time used by background processes, in seconds",
+    ["name"],
+    registry=None,
+)
+
+_background_process_db_txn_count = Counter(
+    "synapse_background_process_db_txn_count",
+    "Number of database transactions done by background processes",
+    ["name"],
+    registry=None,
+)
+
+_background_process_db_txn_duration = Counter(
+    "synapse_background_process_db_txn_duration_seconds",
+    ("Seconds spent by background processes waiting for database "
+     "transactions, excluding scheduling time"),
+    ["name"],
+    registry=None,
+)
+
+_background_process_db_sched_duration = Counter(
+    "synapse_background_process_db_sched_duration_seconds",
+    "Seconds spent by background processes waiting for database connections",
+    ["name"],
+    registry=None,
+)
+
+# map from description to a counter, so that we can name our logcontexts
+# incrementally. (It actually duplicates _background_process_start_count, but
+# it's much simpler to do so than to try to combine them.)
+_background_process_counts = dict()  # type: dict[str, int]
+
+# map from description to the currently running background processes.
+#
+# it's kept as a dict of sets rather than a big set so that we can keep track
+# of process descriptions that no longer have any active processes.
+_background_processes = dict()  # type: dict[str, set[_BackgroundProcess]]
+
+
+class _Collector(object):
+    """A custom metrics collector for the background process metrics.
+
+    Ensures that all of the metrics are up-to-date with any in-flight processes
+    before they are returned.
+    """
+    def collect(self):
+        background_process_in_flight_count = GaugeMetricFamily(
+            "synapse_background_process_in_flight_count",
+            "Number of background processes in flight",
+            labels=["name"],
+        )
+
+        for desc, processes in six.iteritems(_background_processes):
+            background_process_in_flight_count.add_metric(
+                (desc,), len(processes),
+            )
+            for process in processes:
+                process.update_metrics()
+
+        yield background_process_in_flight_count
+
+        # now we need to run collect() over each of the static Counters, and
+        # yield each metric they return.
+        for m in (
+                _background_process_ru_utime,
+                _background_process_ru_stime,
+                _background_process_db_txn_count,
+                _background_process_db_txn_duration,
+                _background_process_db_sched_duration,
+        ):
+            for r in m.collect():
+                yield r
+
+
+REGISTRY.register(_Collector())
+
+
+class _BackgroundProcess(object):
+    def __init__(self, desc, ctx):
+        self.desc = desc
+        self._context = ctx
+        self._reported_stats = None
+
+    def update_metrics(self):
+        """Updates the metrics with values from this process."""
+        new_stats = self._context.get_resource_usage()
+        if self._reported_stats is None:
+            diff = new_stats
+        else:
+            diff = new_stats - self._reported_stats
+        self._reported_stats = new_stats
+
+        _background_process_ru_utime.labels(self.desc).inc(diff.ru_utime)
+        _background_process_ru_stime.labels(self.desc).inc(diff.ru_stime)
+        _background_process_db_txn_count.labels(self.desc).inc(
+            diff.db_txn_count,
+        )
+        _background_process_db_txn_duration.labels(self.desc).inc(
+            diff.db_txn_duration_sec,
+        )
+        _background_process_db_sched_duration.labels(self.desc).inc(
+            diff.db_sched_duration_sec,
+        )
+
+
+def run_as_background_process(desc, func, *args, **kwargs):
+    """Run the given function in its own logcontext, with resource metrics
+
+    This should be used to wrap processes which are fired off to run in the
+    background, instead of being associated with a particular request.
+
+    Args:
+        desc (str): a description for this background process type
+        func: a function, which may return a Deferred
+        args: positional args for func
+        kwargs: keyword args for func
+
+    Returns: None
+    """
+    @defer.inlineCallbacks
+    def run():
+        count = _background_process_counts.get(desc, 0)
+        _background_process_counts[desc] = count + 1
+        _background_process_start_count.labels(desc).inc()
+
+        with LoggingContext(desc) as context:
+            context.request = "%s-%i" % (desc, count)
+            proc = _BackgroundProcess(desc, context)
+            _background_processes.setdefault(desc, set()).add(proc)
+            try:
+                yield func(*args, **kwargs)
+            finally:
+                proc.update_metrics()
+                _background_processes[desc].remove(proc)
+
+    with PreserveLoggingContext():
+        run()
diff --git a/synapse/replication/slave/storage/receipts.py b/synapse/replication/slave/storage/receipts.py
index 7ab12b850f..ed12342f40 100644
--- a/synapse/replication/slave/storage/receipts.py
+++ b/synapse/replication/slave/storage/receipts.py
@@ -49,7 +49,7 @@ class SlavedReceiptsStore(ReceiptsWorkerStore, BaseSlavedStore):
 
     def invalidate_caches_for_receipt(self, room_id, receipt_type, user_id):
         self.get_receipts_for_user.invalidate((user_id, receipt_type))
-        self.get_linearized_receipts_for_room.invalidate_many((room_id,))
+        self._get_linearized_receipts_for_room.invalidate_many((room_id,))
         self.get_last_receipt_event_id_for_user.invalidate(
             (user_id, room_id, receipt_type)
         )
diff --git a/synapse/rest/__init__.py b/synapse/rest/__init__.py
index 75c2a4ec8e..3418f06fd6 100644
--- a/synapse/rest/__init__.py
+++ b/synapse/rest/__init__.py
@@ -1,5 +1,6 @@
 # -*- coding: utf-8 -*-
 # Copyright 2014-2016 OpenMarket Ltd
+# Copyright 2018 New Vector Ltd
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
 # you may not use this file except in compliance with the License.
@@ -13,13 +14,24 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
+from six import PY3
+
 from synapse.http.server import JsonResource
 from synapse.rest.client import versions
-from synapse.rest.client.v1 import admin, directory, events, initial_sync
-from synapse.rest.client.v1 import login as v1_login
-from synapse.rest.client.v1 import logout, presence, profile, push_rule, pusher
-from synapse.rest.client.v1 import register as v1_register
-from synapse.rest.client.v1 import room, voip
+from synapse.rest.client.v1 import (
+    admin,
+    directory,
+    events,
+    initial_sync,
+    login as v1_login,
+    logout,
+    presence,
+    profile,
+    push_rule,
+    pusher,
+    room,
+    voip,
+)
 from synapse.rest.client.v2_alpha import (
     account,
     account_data,
@@ -42,6 +54,11 @@ from synapse.rest.client.v2_alpha import (
     user_directory,
 )
 
+if not PY3:
+    from synapse.rest.client.v1_only import (
+        register as v1_register,
+    )
+
 
 class ClientRestResource(JsonResource):
     """A resource for version 1 of the matrix client API."""
@@ -54,14 +71,22 @@ class ClientRestResource(JsonResource):
     def register_servlets(client_resource, hs):
         versions.register_servlets(client_resource)
 
-        # "v1"
-        room.register_servlets(hs, client_resource)
+        if not PY3:
+            # "v1" (Python 2 only)
+            v1_register.register_servlets(hs, client_resource)
+
+        # Deprecated in r0
+        initial_sync.register_servlets(hs, client_resource)
+        room.register_deprecated_servlets(hs, client_resource)
+
+        # Partially deprecated in r0
         events.register_servlets(hs, client_resource)
-        v1_register.register_servlets(hs, client_resource)
+
+        # "v1" + "r0"
+        room.register_servlets(hs, client_resource)
         v1_login.register_servlets(hs, client_resource)
         profile.register_servlets(hs, client_resource)
         presence.register_servlets(hs, client_resource)
-        initial_sync.register_servlets(hs, client_resource)
         directory.register_servlets(hs, client_resource)
         voip.register_servlets(hs, client_resource)
         admin.register_servlets(hs, client_resource)
diff --git a/synapse/rest/client/transactions.py b/synapse/rest/client/transactions.py
index 7c01b438cb..00b1b3066e 100644
--- a/synapse/rest/client/transactions.py
+++ b/synapse/rest/client/transactions.py
@@ -17,38 +17,20 @@
 to ensure idempotency when performing PUTs using the REST API."""
 import logging
 
-from synapse.api.auth import get_access_token_from_request
 from synapse.util.async import ObservableDeferred
 from synapse.util.logcontext import make_deferred_yieldable, run_in_background
 
 logger = logging.getLogger(__name__)
 
-
-def get_transaction_key(request):
-    """A helper function which returns a transaction key that can be used
-    with TransactionCache for idempotent requests.
-
-    Idempotency is based on the returned key being the same for separate
-    requests to the same endpoint. The key is formed from the HTTP request
-    path and the access_token for the requesting user.
-
-    Args:
-        request (twisted.web.http.Request): The incoming request. Must
-        contain an access_token.
-    Returns:
-        str: A transaction key
-    """
-    token = get_access_token_from_request(request)
-    return request.path + "/" + token
-
-
 CLEANUP_PERIOD_MS = 1000 * 60 * 30  # 30 mins
 
 
 class HttpTransactionCache(object):
 
-    def __init__(self, clock):
-        self.clock = clock
+    def __init__(self, hs):
+        self.hs = hs
+        self.auth = self.hs.get_auth()
+        self.clock = self.hs.get_clock()
         self.transactions = {
             # $txn_key: (ObservableDeferred<(res_code, res_json_body)>, timestamp)
         }
@@ -56,6 +38,23 @@ class HttpTransactionCache(object):
         # for at *LEAST* 30 mins, and at *MOST* 60 mins.
         self.cleaner = self.clock.looping_call(self._cleanup, CLEANUP_PERIOD_MS)
 
+    def _get_transaction_key(self, request):
+        """A helper function which returns a transaction key that can be used
+        with TransactionCache for idempotent requests.
+
+        Idempotency is based on the returned key being the same for separate
+        requests to the same endpoint. The key is formed from the HTTP request
+        path and the access_token for the requesting user.
+
+        Args:
+            request (twisted.web.http.Request): The incoming request. Must
+            contain an access_token.
+        Returns:
+            str: A transaction key
+        """
+        token = self.auth.get_access_token_from_request(request)
+        return request.path + "/" + token
+
     def fetch_or_execute_request(self, request, fn, *args, **kwargs):
         """A helper function for fetch_or_execute which extracts
         a transaction key from the given request.
@@ -64,7 +63,7 @@ class HttpTransactionCache(object):
             fetch_or_execute
         """
         return self.fetch_or_execute(
-            get_transaction_key(request), fn, *args, **kwargs
+            self._get_transaction_key(request), fn, *args, **kwargs
         )
 
     def fetch_or_execute(self, txn_key, fn, *args, **kwargs):
diff --git a/synapse/rest/client/v1/admin.py b/synapse/rest/client/v1/admin.py
index 2b091d61a5..9e9c175970 100644
--- a/synapse/rest/client/v1/admin.py
+++ b/synapse/rest/client/v1/admin.py
@@ -14,6 +14,8 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
+import hashlib
+import hmac
 import logging
 
 from six.moves import http_client
@@ -22,7 +24,12 @@ from twisted.internet import defer
 
 from synapse.api.constants import Membership
 from synapse.api.errors import AuthError, Codes, NotFoundError, SynapseError
-from synapse.http.servlet import parse_json_object_from_request
+from synapse.http.servlet import (
+    assert_params_in_dict,
+    parse_integer,
+    parse_json_object_from_request,
+    parse_string,
+)
 from synapse.types import UserID, create_requester
 
 from .base import ClientV1RestServlet, client_path_patterns
@@ -58,6 +65,125 @@ class UsersRestServlet(ClientV1RestServlet):
         defer.returnValue((200, ret))
 
 
+class UserRegisterServlet(ClientV1RestServlet):
+    """
+    Attributes:
+         NONCE_TIMEOUT (int): Seconds until a generated nonce won't be accepted
+         nonces (dict[str, int]): The nonces that we will accept. A dict of
+             nonce to the time it was generated, in int seconds.
+    """
+    PATTERNS = client_path_patterns("/admin/register")
+    NONCE_TIMEOUT = 60
+
+    def __init__(self, hs):
+        super(UserRegisterServlet, self).__init__(hs)
+        self.handlers = hs.get_handlers()
+        self.reactor = hs.get_reactor()
+        self.nonces = {}
+        self.hs = hs
+
+    def _clear_old_nonces(self):
+        """
+        Clear out old nonces that are older than NONCE_TIMEOUT.
+        """
+        now = int(self.reactor.seconds())
+
+        for k, v in list(self.nonces.items()):
+            if now - v > self.NONCE_TIMEOUT:
+                del self.nonces[k]
+
+    def on_GET(self, request):
+        """
+        Generate a new nonce.
+        """
+        self._clear_old_nonces()
+
+        nonce = self.hs.get_secrets().token_hex(64)
+        self.nonces[nonce] = int(self.reactor.seconds())
+        return (200, {"nonce": nonce.encode('ascii')})
+
+    @defer.inlineCallbacks
+    def on_POST(self, request):
+        self._clear_old_nonces()
+
+        if not self.hs.config.registration_shared_secret:
+            raise SynapseError(400, "Shared secret registration is not enabled")
+
+        body = parse_json_object_from_request(request)
+
+        if "nonce" not in body:
+            raise SynapseError(
+                400, "nonce must be specified", errcode=Codes.BAD_JSON,
+            )
+
+        nonce = body["nonce"]
+
+        if nonce not in self.nonces:
+            raise SynapseError(
+                400, "unrecognised nonce",
+            )
+
+        # Delete the nonce, so it can't be reused, even if it's invalid
+        del self.nonces[nonce]
+
+        if "username" not in body:
+            raise SynapseError(
+                400, "username must be specified", errcode=Codes.BAD_JSON,
+            )
+        else:
+            if (not isinstance(body['username'], str) or len(body['username']) > 512):
+                raise SynapseError(400, "Invalid username")
+
+            username = body["username"].encode("utf-8")
+            if b"\x00" in username:
+                raise SynapseError(400, "Invalid username")
+
+        if "password" not in body:
+            raise SynapseError(
+                400, "password must be specified", errcode=Codes.BAD_JSON,
+            )
+        else:
+            if (not isinstance(body['password'], str) or len(body['password']) > 512):
+                raise SynapseError(400, "Invalid password")
+
+            password = body["password"].encode("utf-8")
+            if b"\x00" in password:
+                raise SynapseError(400, "Invalid password")
+
+        admin = body.get("admin", None)
+        got_mac = body["mac"]
+
+        want_mac = hmac.new(
+            key=self.hs.config.registration_shared_secret.encode(),
+            digestmod=hashlib.sha1,
+        )
+        want_mac.update(nonce)
+        want_mac.update(b"\x00")
+        want_mac.update(username)
+        want_mac.update(b"\x00")
+        want_mac.update(password)
+        want_mac.update(b"\x00")
+        want_mac.update(b"admin" if admin else b"notadmin")
+        want_mac = want_mac.hexdigest()
+
+        if not hmac.compare_digest(want_mac, got_mac):
+            raise SynapseError(
+                403, "HMAC incorrect",
+            )
+
+        # Reuse the parts of RegisterRestServlet to reduce code duplication
+        from synapse.rest.client.v2_alpha.register import RegisterRestServlet
+        register = RegisterRestServlet(self.hs)
+
+        (user_id, _) = yield register.registration_handler.register(
+            localpart=username.lower(), password=password, admin=bool(admin),
+            generate_token=False,
+        )
+
+        result = yield register._create_registration_details(user_id, body)
+        defer.returnValue((200, result))
+
+
 class WhoisRestServlet(ClientV1RestServlet):
     PATTERNS = client_path_patterns("/admin/whois/(?P<user_id>[^/]*)")
 
@@ -98,16 +224,8 @@ class PurgeMediaCacheRestServlet(ClientV1RestServlet):
         if not is_admin:
             raise AuthError(403, "You are not a server admin")
 
-        before_ts = request.args.get("before_ts", None)
-        if not before_ts:
-            raise SynapseError(400, "Missing 'before_ts' arg")
-
-        logger.info("before_ts: %r", before_ts[0])
-
-        try:
-            before_ts = int(before_ts[0])
-        except Exception:
-            raise SynapseError(400, "Invalid 'before_ts' arg")
+        before_ts = parse_integer(request, "before_ts", required=True)
+        logger.info("before_ts: %r", before_ts)
 
         ret = yield self.media_repository.delete_old_remote_media(before_ts)
 
@@ -300,10 +418,8 @@ class ShutdownRoomRestServlet(ClientV1RestServlet):
             raise AuthError(403, "You are not a server admin")
 
         content = parse_json_object_from_request(request)
-
-        new_room_user_id = content.get("new_room_user_id")
-        if not new_room_user_id:
-            raise SynapseError(400, "Please provide field `new_room_user_id`")
+        assert_params_in_dict(content, ["new_room_user_id"])
+        new_room_user_id = content["new_room_user_id"]
 
         room_creator_requester = create_requester(new_room_user_id)
 
@@ -464,9 +580,8 @@ class ResetPasswordRestServlet(ClientV1RestServlet):
             raise AuthError(403, "You are not a server admin")
 
         params = parse_json_object_from_request(request)
+        assert_params_in_dict(params, ["new_password"])
         new_password = params['new_password']
-        if not new_password:
-            raise SynapseError(400, "Missing 'new_password' arg")
 
         logger.info("new_password: %r", new_password)
 
@@ -514,12 +629,9 @@ class GetUsersPaginatedRestServlet(ClientV1RestServlet):
             raise SynapseError(400, "Can only users a local user")
 
         order = "name"  # order by name in user table
-        start = request.args.get("start")[0]
-        limit = request.args.get("limit")[0]
-        if not limit:
-            raise SynapseError(400, "Missing 'limit' arg")
-        if not start:
-            raise SynapseError(400, "Missing 'start' arg")
+        start = parse_integer(request, "start", required=True)
+        limit = parse_integer(request, "limit", required=True)
+
         logger.info("limit: %s, start: %s", limit, start)
 
         ret = yield self.handlers.admin_handler.get_users_paginate(
@@ -551,12 +663,9 @@ class GetUsersPaginatedRestServlet(ClientV1RestServlet):
 
         order = "name"  # order by name in user table
         params = parse_json_object_from_request(request)
+        assert_params_in_dict(params, ["limit", "start"])
         limit = params['limit']
         start = params['start']
-        if not limit:
-            raise SynapseError(400, "Missing 'limit' arg")
-        if not start:
-            raise SynapseError(400, "Missing 'start' arg")
         logger.info("limit: %s, start: %s", limit, start)
 
         ret = yield self.handlers.admin_handler.get_users_paginate(
@@ -604,10 +713,7 @@ class SearchUsersRestServlet(ClientV1RestServlet):
         if not self.hs.is_mine(target_user):
             raise SynapseError(400, "Can only users a local user")
 
-        term = request.args.get("term")[0]
-        if not term:
-            raise SynapseError(400, "Missing 'term' arg")
-
+        term = parse_string(request, "term", required=True)
         logger.info("term: %s ", term)
 
         ret = yield self.handlers.admin_handler.search_users(
@@ -629,3 +735,4 @@ def register_servlets(hs, http_server):
     ShutdownRoomRestServlet(hs).register(http_server)
     QuarantineMediaInRoom(hs).register(http_server)
     ListMediaInRoom(hs).register(http_server)
+    UserRegisterServlet(hs).register(http_server)
diff --git a/synapse/rest/client/v1/base.py b/synapse/rest/client/v1/base.py
index dde02328c3..c77d7aba68 100644
--- a/synapse/rest/client/v1/base.py
+++ b/synapse/rest/client/v1/base.py
@@ -62,4 +62,4 @@ class ClientV1RestServlet(RestServlet):
         self.hs = hs
         self.builder_factory = hs.get_event_builder_factory()
         self.auth = hs.get_auth()
-        self.txns = HttpTransactionCache(hs.get_clock())
+        self.txns = HttpTransactionCache(hs)
diff --git a/synapse/rest/client/v1/directory.py b/synapse/rest/client/v1/directory.py
index 4fdbb83815..69dcd618cb 100644
--- a/synapse/rest/client/v1/directory.py
+++ b/synapse/rest/client/v1/directory.py
@@ -52,15 +52,14 @@ class ClientDirectoryServer(ClientV1RestServlet):
 
     @defer.inlineCallbacks
     def on_PUT(self, request, room_alias):
+        room_alias = RoomAlias.from_string(room_alias)
+
         content = parse_json_object_from_request(request)
         if "room_id" not in content:
-            raise SynapseError(400, "Missing room_id key",
+            raise SynapseError(400, 'Missing params: ["room_id"]',
                                errcode=Codes.BAD_JSON)
 
         logger.debug("Got content: %s", content)
-
-        room_alias = RoomAlias.from_string(room_alias)
-
         logger.debug("Got room name: %s", room_alias.to_string())
 
         room_id = content["room_id"]
diff --git a/synapse/rest/client/v1/initial_sync.py b/synapse/rest/client/v1/initial_sync.py
index fbe8cb2023..fd5f85b53e 100644
--- a/synapse/rest/client/v1/initial_sync.py
+++ b/synapse/rest/client/v1/initial_sync.py
@@ -15,6 +15,7 @@
 
 from twisted.internet import defer
 
+from synapse.http.servlet import parse_boolean
 from synapse.streams.config import PaginationConfig
 
 from .base import ClientV1RestServlet, client_path_patterns
@@ -33,7 +34,7 @@ class InitialSyncRestServlet(ClientV1RestServlet):
         requester = yield self.auth.get_user_by_req(request)
         as_client_event = "raw" not in request.args
         pagination_config = PaginationConfig.from_request(request)
-        include_archived = request.args.get("archived", None) == ["true"]
+        include_archived = parse_boolean(request, "archived", default=False)
         content = yield self.initial_sync_handler.snapshot_all_rooms(
             user_id=requester.user.to_string(),
             pagin_config=pagination_config,
diff --git a/synapse/rest/client/v1/logout.py b/synapse/rest/client/v1/logout.py
index 05a8ecfcd8..430c692336 100644
--- a/synapse/rest/client/v1/logout.py
+++ b/synapse/rest/client/v1/logout.py
@@ -17,7 +17,6 @@ import logging
 
 from twisted.internet import defer
 
-from synapse.api.auth import get_access_token_from_request
 from synapse.api.errors import AuthError
 
 from .base import ClientV1RestServlet, client_path_patterns
@@ -51,7 +50,7 @@ class LogoutRestServlet(ClientV1RestServlet):
             if requester.device_id is None:
                 # the acccess token wasn't associated with a device.
                 # Just delete the access token
-                access_token = get_access_token_from_request(request)
+                access_token = self._auth.get_access_token_from_request(request)
                 yield self._auth_handler.delete_access_token(access_token)
             else:
                 yield self._device_handler.delete_device(
diff --git a/synapse/rest/client/v1/push_rule.py b/synapse/rest/client/v1/push_rule.py
index 0df7ce570f..6e95d9bec2 100644
--- a/synapse/rest/client/v1/push_rule.py
+++ b/synapse/rest/client/v1/push_rule.py
@@ -21,7 +21,7 @@ from synapse.api.errors import (
     SynapseError,
     UnrecognizedRequestError,
 )
-from synapse.http.servlet import parse_json_value_from_request
+from synapse.http.servlet import parse_json_value_from_request, parse_string
 from synapse.push.baserules import BASE_RULE_IDS
 from synapse.push.clientformat import format_push_rules_for_user
 from synapse.push.rulekinds import PRIORITY_CLASS_MAP
@@ -75,13 +75,13 @@ class PushRuleRestServlet(ClientV1RestServlet):
         except InvalidRuleException as e:
             raise SynapseError(400, e.message)
 
-        before = request.args.get("before", None)
+        before = parse_string(request, "before")
         if before:
-            before = _namespaced_rule_id(spec, before[0])
+            before = _namespaced_rule_id(spec, before)
 
-        after = request.args.get("after", None)
+        after = parse_string(request, "after")
         if after:
-            after = _namespaced_rule_id(spec, after[0])
+            after = _namespaced_rule_id(spec, after)
 
         try:
             yield self.store.add_push_rule(
diff --git a/synapse/rest/client/v1/pusher.py b/synapse/rest/client/v1/pusher.py
index 1581f88db5..182a68b1e2 100644
--- a/synapse/rest/client/v1/pusher.py
+++ b/synapse/rest/client/v1/pusher.py
@@ -21,6 +21,7 @@ from synapse.api.errors import Codes, StoreError, SynapseError
 from synapse.http.server import finish_request
 from synapse.http.servlet import (
     RestServlet,
+    assert_params_in_dict,
     parse_json_object_from_request,
     parse_string,
 )
@@ -91,15 +92,11 @@ class PushersSetRestServlet(ClientV1RestServlet):
             )
             defer.returnValue((200, {}))
 
-        reqd = ['kind', 'app_id', 'app_display_name',
-                'device_display_name', 'pushkey', 'lang', 'data']
-        missing = []
-        for i in reqd:
-            if i not in content:
-                missing.append(i)
-        if len(missing):
-            raise SynapseError(400, "Missing parameters: " + ','.join(missing),
-                               errcode=Codes.MISSING_PARAM)
+        assert_params_in_dict(
+            content,
+            ['kind', 'app_id', 'app_display_name',
+             'device_display_name', 'pushkey', 'lang', 'data']
+        )
 
         logger.debug("set pushkey %s to kind %s", content['pushkey'], content['kind'])
         logger.debug("Got pushers request with body: %r", content)
@@ -148,7 +145,7 @@ class PushersRemoveRestServlet(RestServlet):
     SUCCESS_HTML = "<html><body>You have been unsubscribed</body><html>"
 
     def __init__(self, hs):
-        super(RestServlet, self).__init__()
+        super(PushersRemoveRestServlet, self).__init__()
         self.hs = hs
         self.notifier = hs.get_notifier()
         self.auth = hs.get_auth()
diff --git a/synapse/rest/client/v1/room.py b/synapse/rest/client/v1/room.py
index 2470db52ba..b9512a2b61 100644
--- a/synapse/rest/client/v1/room.py
+++ b/synapse/rest/client/v1/room.py
@@ -28,6 +28,7 @@ from synapse.api.errors import AuthError, Codes, SynapseError
 from synapse.api.filtering import Filter
 from synapse.events.utils import format_event_for_client_v2, serialize_event
 from synapse.http.servlet import (
+    assert_params_in_dict,
     parse_integer,
     parse_json_object_from_request,
     parse_string,
@@ -435,9 +436,9 @@ class RoomMessageListRestServlet(ClientV1RestServlet):
             request, default_limit=10,
         )
         as_client_event = "raw" not in request.args
-        filter_bytes = request.args.get("filter", None)
+        filter_bytes = parse_string(request, "filter")
         if filter_bytes:
-            filter_json = urlparse.unquote(filter_bytes[-1]).decode("UTF-8")
+            filter_json = urlparse.unquote(filter_bytes).decode("UTF-8")
             event_filter = Filter(json.loads(filter_json))
         else:
             event_filter = None
@@ -530,7 +531,7 @@ class RoomEventContextServlet(ClientV1RestServlet):
     def on_GET(self, request, room_id, event_id):
         requester = yield self.auth.get_user_by_req(request, allow_guest=True)
 
-        limit = int(request.args.get("limit", [10])[0])
+        limit = parse_integer(request, "limit", default=10)
 
         results = yield self.handlers.room_context_handler.get_event_context(
             requester.user,
@@ -636,8 +637,7 @@ class RoomMembershipRestServlet(ClientV1RestServlet):
 
         target = requester.user
         if membership_action in ["invite", "ban", "unban", "kick"]:
-            if "user_id" not in content:
-                raise SynapseError(400, "Missing user_id key.")
+            assert_params_in_dict(content, ["user_id"])
             target = UserID.from_string(content["user_id"])
 
         event_content = None
@@ -764,7 +764,7 @@ class SearchRestServlet(ClientV1RestServlet):
 
         content = parse_json_object_from_request(request)
 
-        batch = request.args.get("next_batch", [None])[0]
+        batch = parse_string(request, "next_batch")
         results = yield self.handlers.search_handler.search(
             requester.user,
             content,
@@ -832,10 +832,13 @@ def register_servlets(hs, http_server):
     RoomSendEventRestServlet(hs).register(http_server)
     PublicRoomListRestServlet(hs).register(http_server)
     RoomStateRestServlet(hs).register(http_server)
-    RoomInitialSyncRestServlet(hs).register(http_server)
     RoomRedactEventRestServlet(hs).register(http_server)
     RoomTypingRestServlet(hs).register(http_server)
     SearchRestServlet(hs).register(http_server)
     JoinedRoomsRestServlet(hs).register(http_server)
     RoomEventServlet(hs).register(http_server)
     RoomEventContextServlet(hs).register(http_server)
+
+
+def register_deprecated_servlets(hs, http_server):
+    RoomInitialSyncRestServlet(hs).register(http_server)
diff --git a/synapse/rest/client/v1_only/__init__.py b/synapse/rest/client/v1_only/__init__.py
new file mode 100644
index 0000000000..936f902ace
--- /dev/null
+++ b/synapse/rest/client/v1_only/__init__.py
@@ -0,0 +1,3 @@
+"""
+REST APIs that are only used in v1 (the legacy API).
+"""
diff --git a/synapse/rest/client/v1_only/base.py b/synapse/rest/client/v1_only/base.py
new file mode 100644
index 0000000000..9d4db7437c
--- /dev/null
+++ b/synapse/rest/client/v1_only/base.py
@@ -0,0 +1,39 @@
+# -*- coding: utf-8 -*-
+# Copyright 2014-2016 OpenMarket Ltd
+# Copyright 2018 New Vector Ltd
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""This module contains base REST classes for constructing client v1 servlets.
+"""
+
+import re
+
+from synapse.api.urls import CLIENT_PREFIX
+
+
+def v1_only_client_path_patterns(path_regex, include_in_unstable=True):
+    """Creates a regex compiled client path with the correct client path
+    prefix.
+
+    Args:
+        path_regex (str): The regex string to match. This should NOT have a ^
+        as this will be prefixed.
+    Returns:
+        list of SRE_Pattern
+    """
+    patterns = [re.compile("^" + CLIENT_PREFIX + path_regex)]
+    if include_in_unstable:
+        unstable_prefix = CLIENT_PREFIX.replace("/api/v1", "/unstable")
+        patterns.append(re.compile("^" + unstable_prefix + path_regex))
+    return patterns
diff --git a/synapse/rest/client/v1/register.py b/synapse/rest/client/v1_only/register.py
index 3ce5f8b726..3439c3c6d4 100644
--- a/synapse/rest/client/v1/register.py
+++ b/synapse/rest/client/v1_only/register.py
@@ -18,18 +18,16 @@ import hmac
 import logging
 from hashlib import sha1
 
-from six import string_types
-
 from twisted.internet import defer
 
 import synapse.util.stringutils as stringutils
-from synapse.api.auth import get_access_token_from_request
 from synapse.api.constants import LoginType
 from synapse.api.errors import Codes, SynapseError
-from synapse.http.servlet import parse_json_object_from_request
+from synapse.http.servlet import assert_params_in_dict, parse_json_object_from_request
+from synapse.rest.client.v1.base import ClientV1RestServlet
 from synapse.types import create_requester
 
-from .base import ClientV1RestServlet, client_path_patterns
+from .base import v1_only_client_path_patterns
 
 logger = logging.getLogger(__name__)
 
@@ -52,7 +50,7 @@ class RegisterRestServlet(ClientV1RestServlet):
     handler doesn't have a concept of multi-stages or sessions.
     """
 
-    PATTERNS = client_path_patterns("/register$", releases=(), include_in_unstable=False)
+    PATTERNS = v1_only_client_path_patterns("/register$", include_in_unstable=False)
 
     def __init__(self, hs):
         """
@@ -67,6 +65,7 @@ class RegisterRestServlet(ClientV1RestServlet):
         # TODO: persistent storage
         self.sessions = {}
         self.enable_registration = hs.config.enable_registration
+        self.auth = hs.get_auth()
         self.auth_handler = hs.get_auth_handler()
         self.handlers = hs.get_handlers()
 
@@ -124,8 +123,7 @@ class RegisterRestServlet(ClientV1RestServlet):
         session = (register_json["session"]
                    if "session" in register_json else None)
         login_type = None
-        if "type" not in register_json:
-            raise SynapseError(400, "Missing 'type' key.")
+        assert_params_in_dict(register_json, ["type"])
 
         try:
             login_type = register_json["type"]
@@ -310,11 +308,9 @@ class RegisterRestServlet(ClientV1RestServlet):
 
     @defer.inlineCallbacks
     def _do_app_service(self, request, register_json, session):
-        as_token = get_access_token_from_request(request)
-
-        if "user" not in register_json:
-            raise SynapseError(400, "Expected 'user' key.")
+        as_token = self.auth.get_access_token_from_request(request)
 
+        assert_params_in_dict(register_json, ["user"])
         user_localpart = register_json["user"].encode("utf-8")
 
         handler = self.handlers.registration_handler
@@ -331,12 +327,7 @@ class RegisterRestServlet(ClientV1RestServlet):
 
     @defer.inlineCallbacks
     def _do_shared_secret(self, request, register_json, session):
-        if not isinstance(register_json.get("mac", None), string_types):
-            raise SynapseError(400, "Expected mac.")
-        if not isinstance(register_json.get("user", None), string_types):
-            raise SynapseError(400, "Expected 'user' key.")
-        if not isinstance(register_json.get("password", None), string_types):
-            raise SynapseError(400, "Expected 'password' key.")
+        assert_params_in_dict(register_json, ["mac", "user", "password"])
 
         if not self.hs.config.registration_shared_secret:
             raise SynapseError(400, "Shared secret registration is not enabled")
@@ -389,7 +380,7 @@ class CreateUserRestServlet(ClientV1RestServlet):
     """Handles user creation via a server-to-server interface
     """
 
-    PATTERNS = client_path_patterns("/createUser$", releases=())
+    PATTERNS = v1_only_client_path_patterns("/createUser$")
 
     def __init__(self, hs):
         super(CreateUserRestServlet, self).__init__(hs)
@@ -400,7 +391,7 @@ class CreateUserRestServlet(ClientV1RestServlet):
     def on_POST(self, request):
         user_json = parse_json_object_from_request(request)
 
-        access_token = get_access_token_from_request(request)
+        access_token = self.auth.get_access_token_from_request(request)
         app_service = self.store.get_app_service_by_token(
             access_token
         )
@@ -419,11 +410,7 @@ class CreateUserRestServlet(ClientV1RestServlet):
 
     @defer.inlineCallbacks
     def _do_create(self, requester, user_json):
-        if "localpart" not in user_json:
-            raise SynapseError(400, "Expected 'localpart' key.")
-
-        if "displayname" not in user_json:
-            raise SynapseError(400, "Expected 'displayname' key.")
+        assert_params_in_dict(user_json, ["localpart", "displayname"])
 
         localpart = user_json["localpart"].encode("utf-8")
         displayname = user_json["displayname"].encode("utf-8")
diff --git a/synapse/rest/client/v2_alpha/account.py b/synapse/rest/client/v2_alpha/account.py
index 528c1f43f9..eeae466d82 100644
--- a/synapse/rest/client/v2_alpha/account.py
+++ b/synapse/rest/client/v2_alpha/account.py
@@ -20,12 +20,11 @@ from six.moves import http_client
 
 from twisted.internet import defer
 
-from synapse.api.auth import has_access_token
 from synapse.api.constants import LoginType
 from synapse.api.errors import Codes, SynapseError
 from synapse.http.servlet import (
     RestServlet,
-    assert_params_in_request,
+    assert_params_in_dict,
     parse_json_object_from_request,
 )
 from synapse.util.msisdn import phone_number_to_msisdn
@@ -48,7 +47,7 @@ class EmailPasswordRequestTokenRestServlet(RestServlet):
     def on_POST(self, request):
         body = parse_json_object_from_request(request)
 
-        assert_params_in_request(body, [
+        assert_params_in_dict(body, [
             'id_server', 'client_secret', 'email', 'send_attempt'
         ])
 
@@ -81,7 +80,7 @@ class MsisdnPasswordRequestTokenRestServlet(RestServlet):
     def on_POST(self, request):
         body = parse_json_object_from_request(request)
 
-        assert_params_in_request(body, [
+        assert_params_in_dict(body, [
             'id_server', 'client_secret',
             'country', 'phone_number', 'send_attempt',
         ])
@@ -130,7 +129,7 @@ class PasswordRestServlet(RestServlet):
         #
         # In the second case, we require a password to confirm their identity.
 
-        if has_access_token(request):
+        if self.auth.has_access_token(request):
             requester = yield self.auth.get_user_by_req(request)
             params = yield self.auth_handler.validate_user_via_ui_auth(
                 requester, body, self.hs.get_ip_from_request(request),
@@ -160,11 +159,10 @@ class PasswordRestServlet(RestServlet):
                     raise SynapseError(404, "Email address not found", Codes.NOT_FOUND)
                 user_id = threepid_user_id
             else:
-                logger.error("Auth succeeded but no known type!", result.keys())
+                logger.error("Auth succeeded but no known type! %r", result.keys())
                 raise SynapseError(500, "", Codes.UNKNOWN)
 
-        if 'new_password' not in params:
-            raise SynapseError(400, "", Codes.MISSING_PARAM)
+        assert_params_in_dict(params, ["new_password"])
         new_password = params['new_password']
 
         yield self._set_password_handler.set_password(
@@ -229,15 +227,10 @@ class EmailThreepidRequestTokenRestServlet(RestServlet):
     @defer.inlineCallbacks
     def on_POST(self, request):
         body = parse_json_object_from_request(request)
-
-        required = ['id_server', 'client_secret', 'email', 'send_attempt']
-        absent = []
-        for k in required:
-            if k not in body:
-                absent.append(k)
-
-        if absent:
-            raise SynapseError(400, "Missing params: %r" % absent, Codes.MISSING_PARAM)
+        assert_params_in_dict(
+            body,
+            ['id_server', 'client_secret', 'email', 'send_attempt'],
+        )
 
         if not check_3pid_allowed(self.hs, "email", body['email']):
             raise SynapseError(
@@ -267,18 +260,10 @@ class MsisdnThreepidRequestTokenRestServlet(RestServlet):
     @defer.inlineCallbacks
     def on_POST(self, request):
         body = parse_json_object_from_request(request)
-
-        required = [
+        assert_params_in_dict(body, [
             'id_server', 'client_secret',
             'country', 'phone_number', 'send_attempt',
-        ]
-        absent = []
-        for k in required:
-            if k not in body:
-                absent.append(k)
-
-        if absent:
-            raise SynapseError(400, "Missing params: %r" % absent, Codes.MISSING_PARAM)
+        ])
 
         msisdn = phone_number_to_msisdn(body['country'], body['phone_number'])
 
@@ -373,15 +358,7 @@ class ThreepidDeleteRestServlet(RestServlet):
     @defer.inlineCallbacks
     def on_POST(self, request):
         body = parse_json_object_from_request(request)
-
-        required = ['medium', 'address']
-        absent = []
-        for k in required:
-            if k not in body:
-                absent.append(k)
-
-        if absent:
-            raise SynapseError(400, "Missing params: %r" % absent, Codes.MISSING_PARAM)
+        assert_params_in_dict(body, ['medium', 'address'])
 
         requester = yield self.auth.get_user_by_req(request)
         user_id = requester.user.to_string()
diff --git a/synapse/rest/client/v2_alpha/devices.py b/synapse/rest/client/v2_alpha/devices.py
index 09f6a8efe3..9b75bb1377 100644
--- a/synapse/rest/client/v2_alpha/devices.py
+++ b/synapse/rest/client/v2_alpha/devices.py
@@ -18,14 +18,18 @@ import logging
 from twisted.internet import defer
 
 from synapse.api import errors
-from synapse.http import servlet
+from synapse.http.servlet import (
+    RestServlet,
+    assert_params_in_dict,
+    parse_json_object_from_request,
+)
 
 from ._base import client_v2_patterns, interactive_auth_handler
 
 logger = logging.getLogger(__name__)
 
 
-class DevicesRestServlet(servlet.RestServlet):
+class DevicesRestServlet(RestServlet):
     PATTERNS = client_v2_patterns("/devices$", v2_alpha=False)
 
     def __init__(self, hs):
@@ -47,7 +51,7 @@ class DevicesRestServlet(servlet.RestServlet):
         defer.returnValue((200, {"devices": devices}))
 
 
-class DeleteDevicesRestServlet(servlet.RestServlet):
+class DeleteDevicesRestServlet(RestServlet):
     """
     API for bulk deletion of devices. Accepts a JSON object with a devices
     key which lists the device_ids to delete. Requires user interactive auth.
@@ -67,19 +71,17 @@ class DeleteDevicesRestServlet(servlet.RestServlet):
         requester = yield self.auth.get_user_by_req(request)
 
         try:
-            body = servlet.parse_json_object_from_request(request)
+            body = parse_json_object_from_request(request)
         except errors.SynapseError as e:
             if e.errcode == errors.Codes.NOT_JSON:
-                # deal with older clients which didn't pass a J*DELETESON dict
+                # DELETE
+                # deal with older clients which didn't pass a JSON dict
                 # the same as those that pass an empty dict
                 body = {}
             else:
                 raise e
 
-        if 'devices' not in body:
-            raise errors.SynapseError(
-                400, "No devices supplied", errcode=errors.Codes.MISSING_PARAM
-            )
+        assert_params_in_dict(body, ["devices"])
 
         yield self.auth_handler.validate_user_via_ui_auth(
             requester, body, self.hs.get_ip_from_request(request),
@@ -92,7 +94,7 @@ class DeleteDevicesRestServlet(servlet.RestServlet):
         defer.returnValue((200, {}))
 
 
-class DeviceRestServlet(servlet.RestServlet):
+class DeviceRestServlet(RestServlet):
     PATTERNS = client_v2_patterns("/devices/(?P<device_id>[^/]*)$", v2_alpha=False)
 
     def __init__(self, hs):
@@ -121,7 +123,7 @@ class DeviceRestServlet(servlet.RestServlet):
         requester = yield self.auth.get_user_by_req(request)
 
         try:
-            body = servlet.parse_json_object_from_request(request)
+            body = parse_json_object_from_request(request)
 
         except errors.SynapseError as e:
             if e.errcode == errors.Codes.NOT_JSON:
@@ -144,7 +146,7 @@ class DeviceRestServlet(servlet.RestServlet):
     def on_PUT(self, request, device_id):
         requester = yield self.auth.get_user_by_req(request, allow_guest=True)
 
-        body = servlet.parse_json_object_from_request(request)
+        body = parse_json_object_from_request(request)
         yield self.device_handler.update_device(
             requester.user.to_string(),
             device_id,
diff --git a/synapse/rest/client/v2_alpha/register.py b/synapse/rest/client/v2_alpha/register.py
index 896650d5a5..d6cf915d86 100644
--- a/synapse/rest/client/v2_alpha/register.py
+++ b/synapse/rest/client/v2_alpha/register.py
@@ -24,12 +24,11 @@ from twisted.internet import defer
 
 import synapse
 import synapse.types
-from synapse.api.auth import get_access_token_from_request, has_access_token
 from synapse.api.constants import LoginType
 from synapse.api.errors import Codes, SynapseError, UnrecognizedRequestError
 from synapse.http.servlet import (
     RestServlet,
-    assert_params_in_request,
+    assert_params_in_dict,
     parse_json_object_from_request,
     parse_string,
 )
@@ -69,7 +68,7 @@ class EmailRegisterRequestTokenRestServlet(RestServlet):
     def on_POST(self, request):
         body = parse_json_object_from_request(request)
 
-        assert_params_in_request(body, [
+        assert_params_in_dict(body, [
             'id_server', 'client_secret', 'email', 'send_attempt'
         ])
 
@@ -105,7 +104,7 @@ class MsisdnRegisterRequestTokenRestServlet(RestServlet):
     def on_POST(self, request):
         body = parse_json_object_from_request(request)
 
-        assert_params_in_request(body, [
+        assert_params_in_dict(body, [
             'id_server', 'client_secret',
             'country', 'phone_number',
             'send_attempt',
@@ -224,7 +223,7 @@ class RegisterRestServlet(RestServlet):
             desired_username = body['username']
 
         appservice = None
-        if has_access_token(request):
+        if self.auth.has_access_token(request):
             appservice = yield self.auth.get_appservice_by_req(request)
 
         # fork off as soon as possible for ASes and shared secret auth which
@@ -242,7 +241,7 @@ class RegisterRestServlet(RestServlet):
             # because the IRC bridges rely on being able to register stupid
             # IDs.
 
-            access_token = get_access_token_from_request(request)
+            access_token = self.auth.get_access_token_from_request(request)
 
             if isinstance(desired_username, string_types):
                 result = yield self._do_appservice_registration(
@@ -387,9 +386,7 @@ class RegisterRestServlet(RestServlet):
             add_msisdn = False
         else:
             # NB: This may be from the auth handler and NOT from the POST
-            if 'password' not in params:
-                raise SynapseError(400, "Missing password.",
-                                   Codes.MISSING_PARAM)
+            assert_params_in_dict(params, ["password"])
 
             desired_username = params.get("username", None)
             new_password = params.get("password", None)
@@ -566,11 +563,14 @@ class RegisterRestServlet(RestServlet):
         Returns:
             defer.Deferred:
         """
-        reqd = ('medium', 'address', 'validated_at')
-        if any(x not in threepid for x in reqd):
-            # This will only happen if the ID server returns a malformed response
-            logger.info("Can't add incomplete 3pid")
-            defer.returnValue()
+        try:
+            assert_params_in_dict(threepid, ['medium', 'address', 'validated_at'])
+        except SynapseError as ex:
+            if ex.errcode == Codes.MISSING_PARAM:
+                # This will only happen if the ID server returns a malformed response
+                logger.info("Can't add incomplete 3pid")
+                defer.returnValue(None)
+            raise
 
         yield self.auth_handler.add_threepid(
             user_id,
@@ -643,7 +643,7 @@ class RegisterRestServlet(RestServlet):
     @defer.inlineCallbacks
     def _do_guest_registration(self, params):
         if not self.hs.config.allow_guest_access:
-            defer.returnValue((403, "Guest access is disabled"))
+            raise SynapseError(403, "Guest access is disabled")
         user_id, _ = yield self.registration_handler.register(
             generate_token=False,
             make_guest=True
diff --git a/synapse/rest/client/v2_alpha/report_event.py b/synapse/rest/client/v2_alpha/report_event.py
index 08bb8e04fd..95d2a71ec2 100644
--- a/synapse/rest/client/v2_alpha/report_event.py
+++ b/synapse/rest/client/v2_alpha/report_event.py
@@ -15,9 +15,17 @@
 
 import logging
 
+from six import string_types
+from six.moves import http_client
+
 from twisted.internet import defer
 
-from synapse.http.servlet import RestServlet, parse_json_object_from_request
+from synapse.api.errors import Codes, SynapseError
+from synapse.http.servlet import (
+    RestServlet,
+    assert_params_in_dict,
+    parse_json_object_from_request,
+)
 
 from ._base import client_v2_patterns
 
@@ -42,12 +50,26 @@ class ReportEventRestServlet(RestServlet):
         user_id = requester.user.to_string()
 
         body = parse_json_object_from_request(request)
+        assert_params_in_dict(body, ("reason", "score"))
+
+        if not isinstance(body["reason"], string_types):
+            raise SynapseError(
+                http_client.BAD_REQUEST,
+                "Param 'reason' must be a string",
+                Codes.BAD_JSON,
+            )
+        if not isinstance(body["score"], int):
+            raise SynapseError(
+                http_client.BAD_REQUEST,
+                "Param 'score' must be an integer",
+                Codes.BAD_JSON,
+            )
 
         yield self.store.add_event_report(
             room_id=room_id,
             event_id=event_id,
             user_id=user_id,
-            reason=body.get("reason"),
+            reason=body["reason"],
             content=body,
             received_ts=self.clock.time_msec(),
         )
diff --git a/synapse/rest/client/v2_alpha/sendtodevice.py b/synapse/rest/client/v2_alpha/sendtodevice.py
index 90bdb1db15..a9e9a47a0b 100644
--- a/synapse/rest/client/v2_alpha/sendtodevice.py
+++ b/synapse/rest/client/v2_alpha/sendtodevice.py
@@ -40,7 +40,7 @@ class SendToDeviceRestServlet(servlet.RestServlet):
         super(SendToDeviceRestServlet, self).__init__()
         self.hs = hs
         self.auth = hs.get_auth()
-        self.txns = HttpTransactionCache(hs.get_clock())
+        self.txns = HttpTransactionCache(hs)
         self.device_message_handler = hs.get_device_message_handler()
 
     def on_PUT(self, request, message_type, txn_id):
diff --git a/synapse/rest/media/v1/identicon_resource.py b/synapse/rest/media/v1/identicon_resource.py
index a2e391415f..bdbd8d50dd 100644
--- a/synapse/rest/media/v1/identicon_resource.py
+++ b/synapse/rest/media/v1/identicon_resource.py
@@ -16,6 +16,8 @@ from pydenticon import Generator
 
 from twisted.web.resource import Resource
 
+from synapse.http.servlet import parse_integer
+
 FOREGROUND = [
     "rgb(45,79,255)",
     "rgb(254,180,44)",
@@ -56,8 +58,8 @@ class IdenticonResource(Resource):
 
     def render_GET(self, request):
         name = "/".join(request.postpath)
-        width = int(request.args.get("width", [96])[0])
-        height = int(request.args.get("height", [96])[0])
+        width = parse_integer(request, "width", default=96)
+        height = parse_integer(request, "height", default=96)
         identicon_bytes = self.generate_identicon(name, width, height)
         request.setHeader(b"Content-Type", b"image/png")
         request.setHeader(
diff --git a/synapse/rest/media/v1/preview_url_resource.py b/synapse/rest/media/v1/preview_url_resource.py
index 4e3a18ce08..b70b15c4c2 100644
--- a/synapse/rest/media/v1/preview_url_resource.py
+++ b/synapse/rest/media/v1/preview_url_resource.py
@@ -40,6 +40,7 @@ from synapse.http.server import (
     respond_with_json_bytes,
     wrap_json_request_handler,
 )
+from synapse.http.servlet import parse_integer, parse_string
 from synapse.util.async import ObservableDeferred
 from synapse.util.caches.expiringcache import ExpiringCache
 from synapse.util.logcontext import make_deferred_yieldable, run_in_background
@@ -96,9 +97,9 @@ class PreviewUrlResource(Resource):
 
         # XXX: if get_user_by_req fails, what should we do in an async render?
         requester = yield self.auth.get_user_by_req(request)
-        url = request.args.get("url")[0]
+        url = parse_string(request, "url")
         if "ts" in request.args:
-            ts = int(request.args.get("ts")[0])
+            ts = parse_integer(request, "ts")
         else:
             ts = self.clock.time_msec()
 
diff --git a/synapse/rest/media/v1/upload_resource.py b/synapse/rest/media/v1/upload_resource.py
index 1a98120e1d..9b22d204a6 100644
--- a/synapse/rest/media/v1/upload_resource.py
+++ b/synapse/rest/media/v1/upload_resource.py
@@ -21,6 +21,7 @@ from twisted.web.server import NOT_DONE_YET
 
 from synapse.api.errors import SynapseError
 from synapse.http.server import respond_with_json, wrap_json_request_handler
+from synapse.http.servlet import parse_string
 
 logger = logging.getLogger(__name__)
 
@@ -65,10 +66,10 @@ class UploadResource(Resource):
                 code=413,
             )
 
-        upload_name = request.args.get("filename", None)
+        upload_name = parse_string(request, "filename")
         if upload_name:
             try:
-                upload_name = upload_name[0].decode('UTF-8')
+                upload_name = upload_name.decode('UTF-8')
             except UnicodeDecodeError:
                 raise SynapseError(
                     msg="Invalid UTF-8 filename parameter: %r" % (upload_name),
diff --git a/synapse/secrets.py b/synapse/secrets.py
new file mode 100644
index 0000000000..f397daaa5e
--- /dev/null
+++ b/synapse/secrets.py
@@ -0,0 +1,42 @@
+# -*- coding: utf-8 -*-
+# Copyright 2018 New Vector Ltd
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""
+Injectable secrets module for Synapse.
+
+See https://docs.python.org/3/library/secrets.html#module-secrets for the API
+used in Python 3.6, and the API emulated in Python 2.7.
+"""
+
+import six
+
+if six.PY3:
+    import secrets
+
+    def Secrets():
+        return secrets
+
+
+else:
+
+    import os
+    import binascii
+
+    class Secrets(object):
+        def token_bytes(self, nbytes=32):
+            return os.urandom(nbytes)
+
+        def token_hex(self, nbytes=32):
+            return binascii.hexlify(self.token_bytes(nbytes))
diff --git a/synapse/server.py b/synapse/server.py
index 92bea96c5c..fd4f992258 100644
--- a/synapse/server.py
+++ b/synapse/server.py
@@ -74,6 +74,7 @@ from synapse.rest.media.v1.media_repository import (
     MediaRepository,
     MediaRepositoryResource,
 )
+from synapse.secrets import Secrets
 from synapse.server_notices.server_notices_manager import ServerNoticesManager
 from synapse.server_notices.server_notices_sender import ServerNoticesSender
 from synapse.server_notices.worker_server_notices_sender import WorkerServerNoticesSender
@@ -158,6 +159,7 @@ class HomeServer(object):
         'groups_server_handler',
         'groups_attestation_signing',
         'groups_attestation_renewer',
+        'secrets',
         'spam_checker',
         'room_member_handler',
         'federation_registry',
@@ -405,6 +407,9 @@ class HomeServer(object):
     def build_groups_attestation_renewer(self):
         return GroupAttestionRenewer(self)
 
+    def build_secrets(self):
+        return Secrets()
+
     def build_spam_checker(self):
         return SpamChecker(self)
 
diff --git a/synapse/state.py b/synapse/state.py
index 15a593d41c..504caae2f7 100644
--- a/synapse/state.py
+++ b/synapse/state.py
@@ -18,7 +18,7 @@ import hashlib
 import logging
 from collections import namedtuple
 
-from six import iteritems, itervalues
+from six import iteritems, iterkeys, itervalues
 
 from frozendict import frozendict
 
@@ -647,7 +647,7 @@ def resolve_events_with_factory(state_sets, event_map, state_map_factory):
         for event_id in event_ids
     )
     if event_map is not None:
-        needed_events -= set(event_map.iterkeys())
+        needed_events -= set(iterkeys(event_map))
 
     logger.info("Asking for %d conflicted events", len(needed_events))
 
@@ -668,7 +668,7 @@ def resolve_events_with_factory(state_sets, event_map, state_map_factory):
     new_needed_events = set(itervalues(auth_events))
     new_needed_events -= needed_events
     if event_map is not None:
-        new_needed_events -= set(event_map.iterkeys())
+        new_needed_events -= set(iterkeys(event_map))
 
     logger.info("Asking for %d auth events", len(new_needed_events))
 
diff --git a/synapse/storage/_base.py b/synapse/storage/_base.py
index 98dde77431..1d41d8d445 100644
--- a/synapse/storage/_base.py
+++ b/synapse/storage/_base.py
@@ -344,7 +344,7 @@ class SQLBaseStore(object):
         parent_context = LoggingContext.current_context()
         if parent_context == LoggingContext.sentinel:
             logger.warn(
-                "Running db txn from sentinel context: metrics will be lost",
+                "Starting db connection from sentinel context: metrics will be lost",
             )
             parent_context = None
 
diff --git a/synapse/storage/background_updates.py b/synapse/storage/background_updates.py
index dc9eca7d15..5fe1ca2de7 100644
--- a/synapse/storage/background_updates.py
+++ b/synapse/storage/background_updates.py
@@ -19,6 +19,8 @@ from canonicaljson import json
 
 from twisted.internet import defer
 
+from synapse.metrics.background_process_metrics import run_as_background_process
+
 from . import engines
 from ._base import SQLBaseStore
 
@@ -87,10 +89,14 @@ class BackgroundUpdateStore(SQLBaseStore):
         self._background_update_handlers = {}
         self._all_done = False
 
-    @defer.inlineCallbacks
     def start_doing_background_updates(self):
-        logger.info("Starting background schema updates")
+        run_as_background_process(
+            "background_updates", self._run_background_updates,
+        )
 
+    @defer.inlineCallbacks
+    def _run_background_updates(self):
+        logger.info("Starting background schema updates")
         while True:
             yield self.hs.get_clock().sleep(
                 self.BACKGROUND_UPDATE_INTERVAL_MS / 1000.)
diff --git a/synapse/storage/client_ips.py b/synapse/storage/client_ips.py
index b78eda3413..77ae10da3d 100644
--- a/synapse/storage/client_ips.py
+++ b/synapse/storage/client_ips.py
@@ -19,6 +19,7 @@ from six import iteritems
 
 from twisted.internet import defer
 
+from synapse.metrics.background_process_metrics import run_as_background_process
 from synapse.util.caches import CACHE_SIZE_FACTOR
 
 from . import background_updates
@@ -93,10 +94,16 @@ class ClientIpStore(background_updates.BackgroundUpdateStore):
         self._batch_row_update[key] = (user_agent, device_id, now)
 
     def _update_client_ips_batch(self):
-        to_update = self._batch_row_update
-        self._batch_row_update = {}
-        return self.runInteraction(
-            "_update_client_ips_batch", self._update_client_ips_batch_txn, to_update
+        def update():
+            to_update = self._batch_row_update
+            self._batch_row_update = {}
+            return self.runInteraction(
+                "_update_client_ips_batch", self._update_client_ips_batch_txn,
+                to_update,
+            )
+
+        run_as_background_process(
+            "update_client_ips", update,
         )
 
     def _update_client_ips_batch_txn(self, txn, to_update):
diff --git a/synapse/storage/events.py b/synapse/storage/events.py
index 2aaab0d02c..4ff0fdc4ab 100644
--- a/synapse/storage/events.py
+++ b/synapse/storage/events.py
@@ -33,12 +33,13 @@ from synapse.api.errors import SynapseError
 # these are only included to make the type annotations work
 from synapse.events import EventBase  # noqa: F401
 from synapse.events.snapshot import EventContext  # noqa: F401
+from synapse.metrics.background_process_metrics import run_as_background_process
 from synapse.storage.events_worker import EventsWorkerStore
 from synapse.types import RoomStreamToken, get_domain_from_id
 from synapse.util.async import ObservableDeferred
 from synapse.util.caches.descriptors import cached, cachedInlineCallbacks
 from synapse.util.frozenutils import frozendict_json_encoder
-from synapse.util.logcontext import PreserveLoggingContext, make_deferred_yieldable
+from synapse.util.logcontext import make_deferred_yieldable
 from synapse.util.logutils import log_function
 from synapse.util.metrics import Measure
 
@@ -155,11 +156,8 @@ class _EventPeristenceQueue(object):
                     self._event_persist_queues[room_id] = queue
                 self._currently_persisting_rooms.discard(room_id)
 
-        # set handle_queue_loop off on the background. We don't want to
-        # attribute work done in it to the current request, so we drop the
-        # logcontext altogether.
-        with PreserveLoggingContext():
-            handle_queue_loop()
+        # set handle_queue_loop off in the background
+        run_as_background_process("persist_events", handle_queue_loop)
 
     def _get_drainining_queue(self, room_id):
         queue = self._event_persist_queues.setdefault(room_id, deque())
diff --git a/synapse/storage/events_worker.py b/synapse/storage/events_worker.py
index 67433606c6..f28239a808 100644
--- a/synapse/storage/events_worker.py
+++ b/synapse/storage/events_worker.py
@@ -25,6 +25,7 @@ from synapse.events import EventBase  # noqa: F401
 from synapse.events import FrozenEvent
 from synapse.events.snapshot import EventContext  # noqa: F401
 from synapse.events.utils import prune_event
+from synapse.metrics.background_process_metrics import run_as_background_process
 from synapse.util.logcontext import (
     LoggingContext,
     PreserveLoggingContext,
@@ -322,10 +323,11 @@ class EventsWorkerStore(SQLBaseStore):
                 should_start = False
 
         if should_start:
-            with PreserveLoggingContext():
-                self.runWithConnection(
-                    self._do_fetch
-                )
+            run_as_background_process(
+                "fetch_events",
+                self.runWithConnection,
+                self._do_fetch,
+            )
 
         logger.debug("Loading %d events", len(events))
         with PreserveLoggingContext():
diff --git a/synapse/storage/receipts.py b/synapse/storage/receipts.py
index 3738901ea4..0ac665e967 100644
--- a/synapse/storage/receipts.py
+++ b/synapse/storage/receipts.py
@@ -140,7 +140,9 @@ class ReceiptsWorkerStore(SQLBaseStore):
         """
         room_ids = set(room_ids)
 
-        if from_key:
+        if from_key is not None:
+            # Only ask the database about rooms where there have been new
+            # receipts added since `from_key`
             room_ids = yield self._receipts_stream_cache.get_entities_changed(
                 room_ids, from_key
             )
@@ -151,7 +153,6 @@ class ReceiptsWorkerStore(SQLBaseStore):
 
         defer.returnValue([ev for res in results.values() for ev in res])
 
-    @cachedInlineCallbacks(num_args=3, tree=True)
     def get_linearized_receipts_for_room(self, room_id, to_key, from_key=None):
         """Get receipts for a single room for sending to clients.
 
@@ -162,7 +163,19 @@ class ReceiptsWorkerStore(SQLBaseStore):
                 from the start.
 
         Returns:
-            list: A list of receipts.
+            Deferred[list]: A list of receipts.
+        """
+        if from_key is not None:
+            # Check the cache first to see if any new receipts have been added
+            # since`from_key`. If not we can no-op.
+            if not self._receipts_stream_cache.has_entity_changed(room_id, from_key):
+                defer.succeed([])
+
+        return self._get_linearized_receipts_for_room(room_id, to_key, from_key)
+
+    @cachedInlineCallbacks(num_args=3, tree=True)
+    def _get_linearized_receipts_for_room(self, room_id, to_key, from_key=None):
+        """See get_linearized_receipts_for_room
         """
         def f(txn):
             if from_key:
@@ -211,7 +224,7 @@ class ReceiptsWorkerStore(SQLBaseStore):
             "content": content,
         }])
 
-    @cachedList(cached_method_name="get_linearized_receipts_for_room",
+    @cachedList(cached_method_name="_get_linearized_receipts_for_room",
                 list_name="room_ids", num_args=3, inlineCallbacks=True)
     def _get_linearized_receipts_for_rooms(self, room_ids, to_key, from_key=None):
         if not room_ids:
@@ -373,7 +386,7 @@ class ReceiptsStore(ReceiptsWorkerStore):
             self.get_receipts_for_user.invalidate, (user_id, receipt_type)
         )
         # FIXME: This shouldn't invalidate the whole cache
-        txn.call_after(self.get_linearized_receipts_for_room.invalidate_many, (room_id,))
+        txn.call_after(self._get_linearized_receipts_for_room.invalidate_many, (room_id,))
 
         txn.call_after(
             self._receipts_stream_cache.entity_has_changed,
@@ -493,7 +506,7 @@ class ReceiptsStore(ReceiptsWorkerStore):
             self.get_receipts_for_user.invalidate, (user_id, receipt_type)
         )
         # FIXME: This shouldn't invalidate the whole cache
-        txn.call_after(self.get_linearized_receipts_for_room.invalidate_many, (room_id,))
+        txn.call_after(self._get_linearized_receipts_for_room.invalidate_many, (room_id,))
 
         self._simple_delete_txn(
             txn,
diff --git a/synapse/streams/config.py b/synapse/streams/config.py
index 46ccbbda7d..451e4fa441 100644
--- a/synapse/streams/config.py
+++ b/synapse/streams/config.py
@@ -16,6 +16,7 @@
 import logging
 
 from synapse.api.errors import SynapseError
+from synapse.http.servlet import parse_integer, parse_string
 from synapse.types import StreamToken
 
 logger = logging.getLogger(__name__)
@@ -56,23 +57,10 @@ class PaginationConfig(object):
     @classmethod
     def from_request(cls, request, raise_invalid_params=True,
                      default_limit=None):
-        def get_param(name, default=None):
-            lst = request.args.get(name, [])
-            if len(lst) > 1:
-                raise SynapseError(
-                    400, "%s must be specified only once" % (name,)
-                )
-            elif len(lst) == 1:
-                return lst[0]
-            else:
-                return default
-
-        direction = get_param("dir", 'f')
-        if direction not in ['f', 'b']:
-            raise SynapseError(400, "'dir' parameter is invalid.")
-
-        from_tok = get_param("from")
-        to_tok = get_param("to")
+        direction = parse_string(request, "dir", default='f', allowed_values=['f', 'b'])
+
+        from_tok = parse_string(request, "from")
+        to_tok = parse_string(request, "to")
 
         try:
             if from_tok == "END":
@@ -88,12 +76,10 @@ class PaginationConfig(object):
         except Exception:
             raise SynapseError(400, "'to' paramater is invalid")
 
-        limit = get_param("limit", None)
-        if limit is not None and not limit.isdigit():
-            raise SynapseError(400, "'limit' parameter must be an integer.")
+        limit = parse_integer(request, "limit", default=default_limit)
 
-        if limit is None:
-            limit = default_limit
+        if limit and limit < 0:
+            raise SynapseError(400, "Limit must be 0 or above")
 
         try:
             return PaginationConfig(from_tok, to_tok, direction, limit)
diff --git a/synapse/util/async.py b/synapse/util/async.py
index 5d0fb39130..a7094e2fb4 100644
--- a/synapse/util/async.py
+++ b/synapse/util/async.py
@@ -1,5 +1,6 @@
 # -*- coding: utf-8 -*-
 # Copyright 2014-2016 OpenMarket Ltd
+# Copyright 2018 New Vector Ltd.
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
 # you may not use this file except in compliance with the License.
@@ -12,7 +13,7 @@
 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 # See the License for the specific language governing permissions and
 # limitations under the License.
-
+import collections
 import logging
 from contextlib import contextmanager
 
@@ -156,54 +157,72 @@ def concurrently_execute(func, args, limit):
 
 
 class Linearizer(object):
-    """Linearizes access to resources based on a key. Useful to ensure only one
-    thing is happening at a time on a given resource.
+    """Limits concurrent access to resources based on a key. Useful to ensure
+    only a few things happen at a time on a given resource.
 
     Example:
 
-        with (yield linearizer.queue("test_key")):
+        with (yield limiter.queue("test_key")):
             # do some work.
 
     """
-    def __init__(self, name=None, clock=None):
+    def __init__(self, name=None, max_count=1, clock=None):
+        """
+        Args:
+            max_count(int): The maximum number of concurrent accesses
+        """
         if name is None:
             self.name = id(self)
         else:
             self.name = name
-        self.key_to_defer = {}
 
         if not clock:
             from twisted.internet import reactor
             clock = Clock(reactor)
         self._clock = clock
+        self.max_count = max_count
+
+        # key_to_defer is a map from the key to a 2 element list where
+        # the first element is the number of things executing, and
+        # the second element is an OrderedDict, where the keys are deferreds for the
+        # things blocked from executing.
+        self.key_to_defer = {}
 
     @defer.inlineCallbacks
     def queue(self, key):
-        # If there is already a deferred in the queue, we pull it out so that
-        # we can wait on it later.
-        # Then we replace it with a deferred that we resolve *after* the
-        # context manager has exited.
-        # We only return the context manager after the previous deferred has
-        # resolved.
-        # This all has the net effect of creating a chain of deferreds that
-        # wait for the previous deferred before starting their work.
-        current_defer = self.key_to_defer.get(key)
+        entry = self.key_to_defer.setdefault(key, [0, collections.OrderedDict()])
 
-        new_defer = defer.Deferred()
-        self.key_to_defer[key] = new_defer
+        # If the number of things executing is greater than the maximum
+        # then add a deferred to the list of blocked items
+        # When on of the things currently executing finishes it will callback
+        # this item so that it can continue executing.
+        if entry[0] >= self.max_count:
+            new_defer = defer.Deferred()
+            entry[1][new_defer] = 1
 
-        if current_defer:
             logger.info(
-                "Waiting to acquire linearizer lock %r for key %r", self.name, key
+                "Waiting to acquire linearizer lock %r for key %r", self.name, key,
             )
             try:
-                with PreserveLoggingContext():
-                    yield current_defer
-            except Exception:
-                logger.exception("Unexpected exception in Linearizer")
-
-            logger.info("Acquired linearizer lock %r for key %r", self.name,
-                        key)
+                yield make_deferred_yieldable(new_defer)
+            except Exception as e:
+                if isinstance(e, CancelledError):
+                    logger.info(
+                        "Cancelling wait for linearizer lock %r for key %r",
+                        self.name, key,
+                    )
+                else:
+                    logger.warn(
+                        "Unexpected exception waiting for linearizer lock %r for key %r",
+                        self.name, key,
+                    )
+
+                # we just have to take ourselves back out of the queue.
+                del entry[1][new_defer]
+                raise
+
+            logger.info("Acquired linearizer lock %r for key %r", self.name, key)
+            entry[0] += 1
 
             # if the code holding the lock completes synchronously, then it
             # will recursively run the next claimant on the list. That can
@@ -213,15 +232,15 @@ class Linearizer(object):
             # In order to break the cycle, we add a cheeky sleep(0) here to
             # ensure that we fall back to the reactor between each iteration.
             #
-            # (There's no particular need for it to happen before we return
-            # the context manager, but it needs to happen while we hold the
-            # lock, and the context manager's exit code must be synchronous,
-            # so actually this is the only sensible place.
+            # (This needs to happen while we hold the lock, and the context manager's exit
+            # code must be synchronous, so this is the only sensible place.)
             yield self._clock.sleep(0)
 
         else:
-            logger.info("Acquired uncontended linearizer lock %r for key %r",
-                        self.name, key)
+            logger.info(
+                "Acquired uncontended linearizer lock %r for key %r", self.name, key,
+            )
+            entry[0] += 1
 
         @contextmanager
         def _ctx_manager():
@@ -229,73 +248,15 @@ class Linearizer(object):
                 yield
             finally:
                 logger.info("Releasing linearizer lock %r for key %r", self.name, key)
-                with PreserveLoggingContext():
-                    new_defer.callback(None)
-                current_d = self.key_to_defer.get(key)
-                if current_d is new_defer:
-                    self.key_to_defer.pop(key, None)
-
-        defer.returnValue(_ctx_manager())
-
-
-class Limiter(object):
-    """Limits concurrent access to resources based on a key. Useful to ensure
-    only a few thing happen at a time on a given resource.
-
-    Example:
-
-        with (yield limiter.queue("test_key")):
-            # do some work.
-
-    """
-    def __init__(self, max_count):
-        """
-        Args:
-            max_count(int): The maximum number of concurrent access
-        """
-        self.max_count = max_count
-
-        # key_to_defer is a map from the key to a 2 element list where
-        # the first element is the number of things executing
-        # the second element is a list of deferreds for the things blocked from
-        # executing.
-        self.key_to_defer = {}
-
-    @defer.inlineCallbacks
-    def queue(self, key):
-        entry = self.key_to_defer.setdefault(key, [0, []])
-
-        # If the number of things executing is greater than the maximum
-        # then add a deferred to the list of blocked items
-        # When on of the things currently executing finishes it will callback
-        # this item so that it can continue executing.
-        if entry[0] >= self.max_count:
-            new_defer = defer.Deferred()
-            entry[1].append(new_defer)
-
-            logger.info("Waiting to acquire limiter lock for key %r", key)
-            with PreserveLoggingContext():
-                yield new_defer
-            logger.info("Acquired limiter lock for key %r", key)
-        else:
-            logger.info("Acquired uncontended limiter lock for key %r", key)
-
-        entry[0] += 1
-
-        @contextmanager
-        def _ctx_manager():
-            try:
-                yield
-            finally:
-                logger.info("Releasing limiter lock for key %r", key)
 
                 # We've finished executing so check if there are any things
                 # blocked waiting to execute and start one of them
                 entry[0] -= 1
 
                 if entry[1]:
-                    next_def = entry[1].pop(0)
+                    (next_def, _) = entry[1].popitem(last=False)
 
+                    # we need to run the next thing in the sentinel context.
                     with PreserveLoggingContext():
                         next_def.callback(None)
                 elif entry[0] == 0:
diff --git a/synapse/util/caches/expiringcache.py b/synapse/util/caches/expiringcache.py
index 4abca91f6d..465adc54a8 100644
--- a/synapse/util/caches/expiringcache.py
+++ b/synapse/util/caches/expiringcache.py
@@ -16,6 +16,7 @@
 import logging
 from collections import OrderedDict
 
+from synapse.metrics.background_process_metrics import run_as_background_process
 from synapse.util.caches import register_cache
 
 logger = logging.getLogger(__name__)
@@ -63,7 +64,10 @@ class ExpiringCache(object):
             return
 
         def f():
-            self._prune_cache()
+            run_as_background_process(
+                "prune_cache_%s" % self._cache_name,
+                self._prune_cache,
+            )
 
         self._clock.looping_call(f, self._expiry_ms / 2)
 
diff --git a/synapse/util/caches/stream_change_cache.py b/synapse/util/caches/stream_change_cache.py
index 8637867c6d..f2bde74dc5 100644
--- a/synapse/util/caches/stream_change_cache.py
+++ b/synapse/util/caches/stream_change_cache.py
@@ -74,14 +74,13 @@ class StreamChangeCache(object):
         assert type(stream_pos) is int
 
         if stream_pos >= self._earliest_known_stream_pos:
-            not_known_entities = set(entities) - set(self._entity_to_key)
-
-            result = (
-                {self._cache[k] for k in self._cache.islice(
-                    start=self._cache.bisect_right(stream_pos))}
-                .intersection(entities)
-                .union(not_known_entities)
-            )
+            changed_entities = {
+                self._cache[k] for k in self._cache.islice(
+                    start=self._cache.bisect_right(stream_pos),
+                )
+            }
+
+            result = changed_entities.intersection(entities)
 
             self.metrics.inc_hits()
         else:
diff --git a/synapse/util/distributor.py b/synapse/util/distributor.py
index 734331caaa..194da87639 100644
--- a/synapse/util/distributor.py
+++ b/synapse/util/distributor.py
@@ -17,20 +17,18 @@ import logging
 
 from twisted.internet import defer
 
-from synapse.util import unwrapFirstError
-from synapse.util.logcontext import PreserveLoggingContext
+from synapse.metrics.background_process_metrics import run_as_background_process
+from synapse.util.logcontext import make_deferred_yieldable, run_in_background
 
 logger = logging.getLogger(__name__)
 
 
 def user_left_room(distributor, user, room_id):
-    with PreserveLoggingContext():
-        distributor.fire("user_left_room", user=user, room_id=room_id)
+    distributor.fire("user_left_room", user=user, room_id=room_id)
 
 
 def user_joined_room(distributor, user, room_id):
-    with PreserveLoggingContext():
-        distributor.fire("user_joined_room", user=user, room_id=room_id)
+    distributor.fire("user_joined_room", user=user, room_id=room_id)
 
 
 class Distributor(object):
@@ -44,9 +42,7 @@ class Distributor(object):
       model will do for today.
     """
 
-    def __init__(self, suppress_failures=True):
-        self.suppress_failures = suppress_failures
-
+    def __init__(self):
         self.signals = {}
         self.pre_registration = {}
 
@@ -56,7 +52,6 @@ class Distributor(object):
 
         self.signals[name] = Signal(
             name,
-            suppress_failures=self.suppress_failures,
         )
 
         if name in self.pre_registration:
@@ -75,10 +70,18 @@ class Distributor(object):
             self.pre_registration[name].append(observer)
 
     def fire(self, name, *args, **kwargs):
+        """Dispatches the given signal to the registered observers.
+
+        Runs the observers as a background process. Does not return a deferred.
+        """
         if name not in self.signals:
             raise KeyError("%r does not have a signal named %s" % (self, name))
 
-        return self.signals[name].fire(*args, **kwargs)
+        run_as_background_process(
+            name,
+            self.signals[name].fire,
+            *args, **kwargs
+        )
 
 
 class Signal(object):
@@ -91,9 +94,8 @@ class Signal(object):
     method into all of the observers.
     """
 
-    def __init__(self, name, suppress_failures):
+    def __init__(self, name):
         self.name = name
-        self.suppress_failures = suppress_failures
         self.observers = []
 
     def observe(self, observer):
@@ -103,7 +105,6 @@ class Signal(object):
         Each observer callable may return a Deferred."""
         self.observers.append(observer)
 
-    @defer.inlineCallbacks
     def fire(self, *args, **kwargs):
         """Invokes every callable in the observer list, passing in the args and
         kwargs. Exceptions thrown by observers are logged but ignored. It is
@@ -121,22 +122,17 @@ class Signal(object):
                         failure.type,
                         failure.value,
                         failure.getTracebackObject()))
-                if not self.suppress_failures:
-                    return failure
 
             return defer.maybeDeferred(observer, *args, **kwargs).addErrback(eb)
 
-        with PreserveLoggingContext():
-            deferreds = [
-                do(observer)
-                for observer in self.observers
-            ]
-
-            res = yield defer.gatherResults(
-                deferreds, consumeErrors=True
-            ).addErrback(unwrapFirstError)
+        deferreds = [
+            run_in_background(do, o)
+            for o in self.observers
+        ]
 
-        defer.returnValue(res)
+        return make_deferred_yieldable(defer.gatherResults(
+            deferreds, consumeErrors=True,
+        ))
 
     def __repr__(self):
         return "<Signal name=%r>" % (self.name,)
diff --git a/synapse/util/logcontext.py b/synapse/util/logcontext.py
index f6c7175f74..8dcae50b39 100644
--- a/synapse/util/logcontext.py
+++ b/synapse/util/logcontext.py
@@ -99,6 +99,17 @@ class ContextResourceUsage(object):
         self.db_sched_duration_sec = 0
         self.evt_db_fetch_count = 0
 
+    def __repr__(self):
+        return ("<ContextResourceUsage ru_stime='%r', ru_utime='%r', "
+                "db_txn_count='%r', db_txn_duration_sec='%r', "
+                "db_sched_duration_sec='%r', evt_db_fetch_count='%r'>") % (
+                    self.ru_stime,
+                    self.ru_utime,
+                    self.db_txn_count,
+                    self.db_txn_duration_sec,
+                    self.db_sched_duration_sec,
+                    self.evt_db_fetch_count,)
+
     def __iadd__(self, other):
         """Add another ContextResourceUsage's stats to this one's.
 
diff --git a/synapse/util/metrics.py b/synapse/util/metrics.py
index 6ba7107896..97f1267380 100644
--- a/synapse/util/metrics.py
+++ b/synapse/util/metrics.py
@@ -104,12 +104,19 @@ class Measure(object):
             logger.warn("Expected context. (%r)", self.name)
             return
 
-        usage = context.get_resource_usage() - self.start_usage
-        block_ru_utime.labels(self.name).inc(usage.ru_utime)
-        block_ru_stime.labels(self.name).inc(usage.ru_stime)
-        block_db_txn_count.labels(self.name).inc(usage.db_txn_count)
-        block_db_txn_duration.labels(self.name).inc(usage.db_txn_duration_sec)
-        block_db_sched_duration.labels(self.name).inc(usage.db_sched_duration_sec)
+        current = context.get_resource_usage()
+        usage = current - self.start_usage
+        try:
+            block_ru_utime.labels(self.name).inc(usage.ru_utime)
+            block_ru_stime.labels(self.name).inc(usage.ru_stime)
+            block_db_txn_count.labels(self.name).inc(usage.db_txn_count)
+            block_db_txn_duration.labels(self.name).inc(usage.db_txn_duration_sec)
+            block_db_sched_duration.labels(self.name).inc(usage.db_sched_duration_sec)
+        except ValueError:
+            logger.warn(
+                "Failed to save metrics! OLD: %r, NEW: %r",
+                self.start_usage, current
+            )
 
         if self.created_context:
             self.start_context.__exit__(exc_type, exc_val, exc_tb)
diff --git a/synapse/util/ratelimitutils.py b/synapse/util/ratelimitutils.py
index 5ac33b2132..7deb38f2a7 100644
--- a/synapse/util/ratelimitutils.py
+++ b/synapse/util/ratelimitutils.py
@@ -92,13 +92,22 @@ class _PerHostRatelimiter(object):
 
         self.window_size = window_size
         self.sleep_limit = sleep_limit
-        self.sleep_msec = sleep_msec
+        self.sleep_sec = sleep_msec / 1000.0
         self.reject_limit = reject_limit
         self.concurrent_requests = concurrent_requests
 
+        # request_id objects for requests which have been slept
         self.sleeping_requests = set()
+
+        # map from request_id object to Deferred for requests which are ready
+        # for processing but have been queued
         self.ready_request_queue = collections.OrderedDict()
+
+        # request id objects for requests which are in progress
         self.current_processing = set()
+
+        # times at which we have recently (within the last window_size ms)
+        # received requests.
         self.request_times = []
 
     @contextlib.contextmanager
@@ -117,11 +126,15 @@ class _PerHostRatelimiter(object):
 
     def _on_enter(self, request_id):
         time_now = self.clock.time_msec()
+
+        # remove any entries from request_times which aren't within the window
         self.request_times[:] = [
             r for r in self.request_times
             if time_now - r < self.window_size
         ]
 
+        # reject the request if we already have too many queued up (either
+        # sleeping or in the ready queue).
         queue_size = len(self.ready_request_queue) + len(self.sleeping_requests)
         if queue_size > self.reject_limit:
             raise LimitExceededError(
@@ -134,9 +147,13 @@ class _PerHostRatelimiter(object):
 
         def queue_request():
             if len(self.current_processing) > self.concurrent_requests:
-                logger.debug("Ratelimit [%s]: Queue req", id(request_id))
                 queue_defer = defer.Deferred()
                 self.ready_request_queue[request_id] = queue_defer
+                logger.info(
+                    "Ratelimiter: queueing request (queue now %i items)",
+                    len(self.ready_request_queue),
+                )
+
                 return queue_defer
             else:
                 return defer.succeed(None)
@@ -148,10 +165,9 @@ class _PerHostRatelimiter(object):
 
         if len(self.request_times) > self.sleep_limit:
             logger.debug(
-                "Ratelimit [%s]: sleeping req",
-                id(request_id),
+                "Ratelimiter: sleeping request for %f sec", self.sleep_sec,
             )
-            ret_defer = run_in_background(self.clock.sleep, self.sleep_msec / 1000.0)
+            ret_defer = run_in_background(self.clock.sleep, self.sleep_sec)
 
             self.sleeping_requests.add(request_id)
 
@@ -200,11 +216,8 @@ class _PerHostRatelimiter(object):
         )
         self.current_processing.discard(request_id)
         try:
-            request_id, deferred = self.ready_request_queue.popitem()
-
-            # XXX: why do we do the following? the on_start callback above will
-            # do it for us.
-            self.current_processing.add(request_id)
+            # start processing the next item on the queue.
+            _, deferred = self.ready_request_queue.popitem(last=False)
 
             with PreserveLoggingContext():
                 deferred.callback(None)
diff --git a/synapse/visibility.py b/synapse/visibility.py
index 015c2bab37..ba0499a022 100644
--- a/synapse/visibility.py
+++ b/synapse/visibility.py
@@ -12,14 +12,18 @@
 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 # See the License for the specific language governing permissions and
 # limitations under the License.
-import itertools
+
 import logging
 import operator
 
+from six import iteritems, itervalues
+from six.moves import map
+
 from twisted.internet import defer
 
 from synapse.api.constants import EventTypes, Membership
 from synapse.events.utils import prune_event
+from synapse.types import get_domain_from_id
 from synapse.util.logcontext import make_deferred_yieldable, preserve_fn
 
 logger = logging.getLogger(__name__)
@@ -218,10 +222,161 @@ def filter_events_for_client(store, user_id, events, is_peeking=False,
         return event
 
     # check each event: gives an iterable[None|EventBase]
-    filtered_events = itertools.imap(allowed, events)
+    filtered_events = map(allowed, events)
 
     # remove the None entries
     filtered_events = filter(operator.truth, filtered_events)
 
     # we turn it into a list before returning it.
     defer.returnValue(list(filtered_events))
+
+
+@defer.inlineCallbacks
+def filter_events_for_server(store, server_name, events):
+    # Whatever else we do, we need to check for senders which have requested
+    # erasure of their data.
+    erased_senders = yield store.are_users_erased(
+        e.sender for e in events,
+    )
+
+    def redact_disallowed(event, state):
+        # if the sender has been gdpr17ed, always return a redacted
+        # copy of the event.
+        if erased_senders[event.sender]:
+            logger.info(
+                "Sender of %s has been erased, redacting",
+                event.event_id,
+            )
+            return prune_event(event)
+
+        # state will be None if we decided we didn't need to filter by
+        # room membership.
+        if not state:
+            return event
+
+        history = state.get((EventTypes.RoomHistoryVisibility, ''), None)
+        if history:
+            visibility = history.content.get("history_visibility", "shared")
+            if visibility in ["invited", "joined"]:
+                # We now loop through all state events looking for
+                # membership states for the requesting server to determine
+                # if the server is either in the room or has been invited
+                # into the room.
+                for ev in itervalues(state):
+                    if ev.type != EventTypes.Member:
+                        continue
+                    try:
+                        domain = get_domain_from_id(ev.state_key)
+                    except Exception:
+                        continue
+
+                    if domain != server_name:
+                        continue
+
+                    memtype = ev.membership
+                    if memtype == Membership.JOIN:
+                        return event
+                    elif memtype == Membership.INVITE:
+                        if visibility == "invited":
+                            return event
+                else:
+                    # server has no users in the room: redact
+                    return prune_event(event)
+
+        return event
+
+    # Next lets check to see if all the events have a history visibility
+    # of "shared" or "world_readable". If thats the case then we don't
+    # need to check membership (as we know the server is in the room).
+    event_to_state_ids = yield store.get_state_ids_for_events(
+        frozenset(e.event_id for e in events),
+        types=(
+            (EventTypes.RoomHistoryVisibility, ""),
+        )
+    )
+
+    visibility_ids = set()
+    for sids in itervalues(event_to_state_ids):
+        hist = sids.get((EventTypes.RoomHistoryVisibility, ""))
+        if hist:
+            visibility_ids.add(hist)
+
+    # If we failed to find any history visibility events then the default
+    # is "shared" visiblity.
+    if not visibility_ids:
+        all_open = True
+    else:
+        event_map = yield store.get_events(visibility_ids)
+        all_open = all(
+            e.content.get("history_visibility") in (None, "shared", "world_readable")
+            for e in itervalues(event_map)
+        )
+
+    if all_open:
+        # all the history_visibility state affecting these events is open, so
+        # we don't need to filter by membership state. We *do* need to check
+        # for user erasure, though.
+        if erased_senders:
+            events = [
+                redact_disallowed(e, None)
+                for e in events
+            ]
+
+        defer.returnValue(events)
+
+    # Ok, so we're dealing with events that have non-trivial visibility
+    # rules, so we need to also get the memberships of the room.
+
+    # first, for each event we're wanting to return, get the event_ids
+    # of the history vis and membership state at those events.
+    event_to_state_ids = yield store.get_state_ids_for_events(
+        frozenset(e.event_id for e in events),
+        types=(
+            (EventTypes.RoomHistoryVisibility, ""),
+            (EventTypes.Member, None),
+        )
+    )
+
+    # We only want to pull out member events that correspond to the
+    # server's domain.
+    #
+    # event_to_state_ids contains lots of duplicates, so it turns out to be
+    # cheaper to build a complete set of unique
+    # ((type, state_key), event_id) tuples, and then filter out the ones we
+    # don't want.
+    #
+    state_key_to_event_id_set = {
+        e
+        for key_to_eid in itervalues(event_to_state_ids)
+        for e in key_to_eid.items()
+    }
+
+    def include(typ, state_key):
+        if typ != EventTypes.Member:
+            return True
+
+        # we avoid using get_domain_from_id here for efficiency.
+        idx = state_key.find(":")
+        if idx == -1:
+            return False
+        return state_key[idx + 1:] == server_name
+
+    event_map = yield store.get_events([
+        e_id
+        for key, e_id in state_key_to_event_id_set
+        if include(key[0], key[1])
+    ])
+
+    event_to_state = {
+        e_id: {
+            key: event_map[inner_e_id]
+            for key, inner_e_id in iteritems(key_to_eid)
+            if inner_e_id in event_map
+        }
+        for e_id, key_to_eid in iteritems(event_to_state_ids)
+    }
+
+    defer.returnValue([
+        redact_disallowed(e, event_to_state[e.event_id])
+        for e in events
+    ])