summary refs log tree commit diff
path: root/synapse
diff options
context:
space:
mode:
Diffstat (limited to 'synapse')
-rw-r--r--synapse/__init__.py4
-rw-r--r--synapse/app/_base.py28
-rw-r--r--synapse/app/client_reader.py2
-rw-r--r--synapse/app/federation_reader.py2
-rwxr-xr-xsynapse/app/homeserver.py41
-rw-r--r--synapse/config/key.py2
-rw-r--r--synapse/config/logger.py2
-rw-r--r--synapse/config/room_directory.py170
-rw-r--r--synapse/config/server.py6
-rw-r--r--synapse/crypto/keyring.py4
-rw-r--r--synapse/groups/attestations.py7
-rw-r--r--synapse/handlers/_base.py2
-rw-r--r--synapse/handlers/device.py14
-rw-r--r--synapse/handlers/directory.py29
-rw-r--r--synapse/handlers/groups_local.py12
-rw-r--r--synapse/handlers/register.py114
-rw-r--r--synapse/handlers/room.py1
-rw-r--r--synapse/handlers/user_directory.py94
-rw-r--r--synapse/http/server.py6
-rw-r--r--synapse/python_dependencies.py7
-rw-r--r--synapse/replication/http/__init__.py4
-rw-r--r--synapse/replication/http/login.py74
-rw-r--r--synapse/replication/http/register.py91
-rw-r--r--synapse/rest/client/v1/login.py59
-rw-r--r--synapse/rest/client/v2_alpha/register.py38
-rw-r--r--synapse/rest/well_known.py3
-rw-r--r--synapse/storage/registration.py230
-rw-r--r--synapse/storage/state.py25
-rw-r--r--synapse/storage/user_directory.py153
29 files changed, 821 insertions, 403 deletions
diff --git a/synapse/__init__.py b/synapse/__init__.py
index 048d6e572f..2004375f98 100644
--- a/synapse/__init__.py
+++ b/synapse/__init__.py
@@ -1,6 +1,6 @@
 # -*- coding: utf-8 -*-
 # Copyright 2014-2016 OpenMarket Ltd
-# Copyright 2018 New Vector Ltd
+# Copyright 2018-9 New Vector Ltd
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
 # you may not use this file except in compliance with the License.
@@ -27,4 +27,4 @@ try:
 except ImportError:
     pass
 
-__version__ = "0.99.0"
+__version__ = "0.99.1.1"
diff --git a/synapse/app/_base.py b/synapse/app/_base.py
index 0284151d0f..32e8b8a3f5 100644
--- a/synapse/app/_base.py
+++ b/synapse/app/_base.py
@@ -155,9 +155,8 @@ def listen_metrics(bind_addresses, port):
     from prometheus_client import start_http_server
 
     for host in bind_addresses:
-        reactor.callInThread(start_http_server, int(port),
-                             addr=host, registry=RegistryProxy)
-        logger.info("Metrics now reporting on %s:%d", host, port)
+        logger.info("Starting metrics listener on %s:%d", host, port)
+        start_http_server(port, addr=host, registry=RegistryProxy)
 
 
 def listen_tcp(bind_addresses, port, factory, reactor=reactor, backlog=50):
@@ -165,21 +164,23 @@ def listen_tcp(bind_addresses, port, factory, reactor=reactor, backlog=50):
     Create a TCP socket for a port and several addresses
 
     Returns:
-        list (empty)
+        list[twisted.internet.tcp.Port]: listening for TCP connections
     """
+    r = []
     for address in bind_addresses:
         try:
-            reactor.listenTCP(
-                port,
-                factory,
-                backlog,
-                address
+            r.append(
+                reactor.listenTCP(
+                    port,
+                    factory,
+                    backlog,
+                    address
+                )
             )
         except error.CannotListenError as e:
             check_bind_error(e, address, bind_addresses)
 
-    logger.info("Synapse now listening on TCP port %d", port)
-    return []
+    return r
 
 
 def listen_ssl(
@@ -206,7 +207,6 @@ def listen_ssl(
         except error.CannotListenError as e:
             check_bind_error(e, address, bind_addresses)
 
-    logger.info("Synapse now listening on port %d (TLS)", port)
     return r
 
 
@@ -232,6 +232,10 @@ def refresh_certificate(hs):
             # requests. This factory attribute is public but missing from
             # Twisted's documentation.
             if isinstance(i.factory, TLSMemoryBIOFactory):
+                addr = i.getHost()
+                logger.info(
+                    "Replacing TLS context factory on [%s]:%i", addr.host, addr.port,
+                )
                 # We want to replace TLS factories with a new one, with the new
                 # TLS configuration. We do this by reaching in and pulling out
                 # the wrappedFactory, and then re-wrapping it.
diff --git a/synapse/app/client_reader.py b/synapse/app/client_reader.py
index a9d2147022..9250b6c239 100644
--- a/synapse/app/client_reader.py
+++ b/synapse/app/client_reader.py
@@ -47,6 +47,7 @@ from synapse.rest.client.v1.room import (
     RoomMemberListRestServlet,
     RoomStateRestServlet,
 )
+from synapse.rest.client.v2_alpha.register import RegisterRestServlet
 from synapse.server import HomeServer
 from synapse.storage.engines import create_engine
 from synapse.util.httpresourcetree import create_resource_tree
@@ -92,6 +93,7 @@ class ClientReaderServer(HomeServer):
                     JoinedRoomMemberListRestServlet(self).register(resource)
                     RoomStateRestServlet(self).register(resource)
                     RoomEventContextServlet(self).register(resource)
+                    RegisterRestServlet(self).register(resource)
 
                     resources.update({
                         "/_matrix/client/r0": resource,
diff --git a/synapse/app/federation_reader.py b/synapse/app/federation_reader.py
index 6ee2b76dcd..b116c17669 100644
--- a/synapse/app/federation_reader.py
+++ b/synapse/app/federation_reader.py
@@ -40,6 +40,7 @@ from synapse.replication.slave.storage.profile import SlavedProfileStore
 from synapse.replication.slave.storage.push_rule import SlavedPushRuleStore
 from synapse.replication.slave.storage.pushers import SlavedPusherStore
 from synapse.replication.slave.storage.receipts import SlavedReceiptsStore
+from synapse.replication.slave.storage.registration import SlavedRegistrationStore
 from synapse.replication.slave.storage.room import RoomStore
 from synapse.replication.slave.storage.transactions import SlavedTransactionStore
 from synapse.replication.tcp.client import ReplicationClientHandler
@@ -62,6 +63,7 @@ class FederationReaderSlavedStore(
     SlavedReceiptsStore,
     SlavedEventStore,
     SlavedKeyStore,
+    SlavedRegistrationStore,
     RoomStore,
     DirectoryStore,
     SlavedTransactionStore,
diff --git a/synapse/app/homeserver.py b/synapse/app/homeserver.py
index dbd98d394f..05a97979ec 100755
--- a/synapse/app/homeserver.py
+++ b/synapse/app/homeserver.py
@@ -1,6 +1,7 @@
 #!/usr/bin/env python
 # -*- coding: utf-8 -*-
 # Copyright 2014-2016 OpenMarket Ltd
+# Copyright 2019 New Vector Ltd
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
 # you may not use this file except in compliance with the License.
@@ -14,11 +15,12 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
+from __future__ import print_function
+
 import gc
 import logging
 import os
 import sys
-import traceback
 
 from six import iteritems
 
@@ -27,6 +29,7 @@ from prometheus_client import Gauge
 
 from twisted.application import service
 from twisted.internet import defer, reactor
+from twisted.python.failure import Failure
 from twisted.web.resource import EncodingResourceWrapper, NoResource
 from twisted.web.server import GzipEncoderFactory
 from twisted.web.static import File
@@ -121,7 +124,7 @@ class SynapseHomeServer(HomeServer):
         root_resource = create_resource_tree(resources, root_resource)
 
         if tls:
-            return listen_ssl(
+            ports = listen_ssl(
                 bind_addresses,
                 port,
                 SynapseSite(
@@ -134,9 +137,10 @@ class SynapseHomeServer(HomeServer):
                 self.tls_server_context_factory,
                 reactor=self.get_reactor(),
             )
+            logger.info("Synapse now listening on TCP port %d (TLS)", port)
 
         else:
-            return listen_tcp(
+            ports = listen_tcp(
                 bind_addresses,
                 port,
                 SynapseSite(
@@ -148,6 +152,9 @@ class SynapseHomeServer(HomeServer):
                 ),
                 reactor=self.get_reactor(),
             )
+            logger.info("Synapse now listening on TCP port %d", port)
+
+        return ports
 
     def _configure_named_resource(self, name, compress=False):
         """Build a resource map for a named resource
@@ -262,14 +269,14 @@ class SynapseHomeServer(HomeServer):
                     )
                 )
             elif listener["type"] == "replication":
-                bind_addresses = listener["bind_addresses"]
-                for address in bind_addresses:
-                    factory = ReplicationStreamProtocolFactory(self)
-                    server_listener = reactor.listenTCP(
-                        listener["port"], factory, interface=address
-                    )
+                services = listen_tcp(
+                    listener["bind_addresses"],
+                    listener["port"],
+                    ReplicationStreamProtocolFactory(self),
+                )
+                for s in services:
                     reactor.addSystemEventTrigger(
-                        "before", "shutdown", server_listener.stopListening,
+                        "before", "shutdown", s.stopListening,
                     )
             elif listener["type"] == "metrics":
                 if not self.get_config().enable_metrics:
@@ -390,10 +397,10 @@ def setup(config_options):
         # is less than our re-registration threshold.
         provision = False
 
-        if (cert_days_remaining is None):
-            provision = True
-
-        if cert_days_remaining > hs.config.acme_reprovision_threshold:
+        if (
+            cert_days_remaining is None or
+            cert_days_remaining < hs.config.acme_reprovision_threshold
+        ):
             provision = True
 
         if provision:
@@ -434,7 +441,11 @@ def setup(config_options):
             hs.get_datastore().start_doing_background_updates()
         except Exception:
             # Print the exception and bail out.
-            traceback.print_exc(file=sys.stderr)
+            print("Error during startup:", file=sys.stderr)
+
+            # this gives better tracebacks than traceback.print_exc()
+            Failure().printTraceback(file=sys.stderr)
+
             if reactor.running:
                 reactor.stop()
             sys.exit(1)
diff --git a/synapse/config/key.py b/synapse/config/key.py
index dce4b19a2d..499ffd4e06 100644
--- a/synapse/config/key.py
+++ b/synapse/config/key.py
@@ -56,7 +56,7 @@ class KeyConfig(Config):
         if not self.macaroon_secret_key:
             # Unfortunately, there are people out there that don't have this
             # set. Lets just be "nice" and derive one from their secret key.
-            logger.warn("Config is missing missing macaroon_secret_key")
+            logger.warn("Config is missing macaroon_secret_key")
             seed = bytes(self.signing_key[0])
             self.macaroon_secret_key = hashlib.sha256(seed).digest()
 
diff --git a/synapse/config/logger.py b/synapse/config/logger.py
index 4b938053fb..9b5994d55e 100644
--- a/synapse/config/logger.py
+++ b/synapse/config/logger.py
@@ -242,3 +242,5 @@ def setup_logging(config, use_worker_options=False):
         [_log],
         redirectStandardIO=not config.no_redirect_stdio,
     )
+    if not config.no_redirect_stdio:
+        print("Redirected stdout/stderr to logs")
diff --git a/synapse/config/room_directory.py b/synapse/config/room_directory.py
index 9da13ab11b..c8e0abbae7 100644
--- a/synapse/config/room_directory.py
+++ b/synapse/config/room_directory.py
@@ -20,12 +20,37 @@ from ._base import Config, ConfigError
 
 class RoomDirectoryConfig(Config):
     def read_config(self, config):
-        alias_creation_rules = config["alias_creation_rules"]
+        alias_creation_rules = config.get("alias_creation_rules")
 
-        self._alias_creation_rules = [
-            _AliasRule(rule)
-            for rule in alias_creation_rules
-        ]
+        if alias_creation_rules is not None:
+            self._alias_creation_rules = [
+                _RoomDirectoryRule("alias_creation_rules", rule)
+                for rule in alias_creation_rules
+            ]
+        else:
+            self._alias_creation_rules = [
+                _RoomDirectoryRule(
+                    "alias_creation_rules", {
+                        "action": "allow",
+                    }
+                )
+            ]
+
+        room_list_publication_rules = config.get("room_list_publication_rules")
+
+        if room_list_publication_rules is not None:
+            self._room_list_publication_rules = [
+                _RoomDirectoryRule("room_list_publication_rules", rule)
+                for rule in room_list_publication_rules
+            ]
+        else:
+            self._room_list_publication_rules = [
+                _RoomDirectoryRule(
+                    "room_list_publication_rules", {
+                        "action": "allow",
+                    }
+                )
+            ]
 
     def default_config(self, config_dir_path, server_name, **kwargs):
         return """
@@ -33,60 +58,138 @@ class RoomDirectoryConfig(Config):
         # on this server.
         #
         # The format of this option is a list of rules that contain globs that
-        # match against user_id and the new alias (fully qualified with server
-        # name). The action in the first rule that matches is taken, which can
-        # currently either be "allow" or "deny".
+        # match against user_id, room_id and the new alias (fully qualified with
+        # server name). The action in the first rule that matches is taken,
+        # which can currently either be "allow" or "deny".
+        #
+        # Missing user_id/room_id/alias fields default to "*".
+        #
+        # If no rules match the request is denied. An empty list means no one
+        # can create aliases.
+        #
+        # Options for the rules include:
+        #
+        #   user_id: Matches against the creator of the alias
+        #   alias: Matches against the alias being created
+        #   room_id: Matches against the room ID the alias is being pointed at
+        #   action: Whether to "allow" or "deny" the request if the rule matches
+        #
+        # The default is:
+        #
+        # alias_creation_rules:
+        #     - user_id: "*"
+        #       alias: "*"
+        #       room_id: "*"
+        #       action: allow
+
+        # The `room_list_publication_rules` option controls who can publish and
+        # which rooms can be published in the public room list.
+        #
+        # The format of this option is the same as that for
+        # `alias_creation_rules`.
         #
-        # If no rules match the request is denied.
-        alias_creation_rules:
-            - user_id: "*"
-              alias: "*"
-              action: allow
+        # If the room has one or more aliases associated with it, only one of
+        # the aliases needs to match the alias rule. If there are no aliases
+        # then only rules with `alias: *` match.
+        #
+        # If no rules match the request is denied. An empty list means no one
+        # can publish rooms.
+        #
+        # Options for the rules include:
+        #
+        #   user_id: Matches agaisnt the creator of the alias
+        #   room_id: Matches against the room ID being published
+        #   alias: Matches against any current local or canonical aliases
+        #            associated with the room
+        #   action: Whether to "allow" or "deny" the request if the rule matches
+        #
+        # The default is:
+        #
+        # room_list_publication_rules:
+        #     - user_id: "*"
+        #       alias: "*"
+        #       room_id: "*"
+        #       action: allow
         """
 
-    def is_alias_creation_allowed(self, user_id, alias):
+    def is_alias_creation_allowed(self, user_id, room_id, alias):
         """Checks if the given user is allowed to create the given alias
 
         Args:
             user_id (str)
+            room_id (str)
             alias (str)
 
         Returns:
             boolean: True if user is allowed to crate the alias
         """
         for rule in self._alias_creation_rules:
-            if rule.matches(user_id, alias):
+            if rule.matches(user_id, room_id, [alias]):
+                return rule.action == "allow"
+
+        return False
+
+    def is_publishing_room_allowed(self, user_id, room_id, aliases):
+        """Checks if the given user is allowed to publish the room
+
+        Args:
+            user_id (str)
+            room_id (str)
+            aliases (list[str]): any local aliases associated with the room
+
+        Returns:
+            boolean: True if user can publish room
+        """
+        for rule in self._room_list_publication_rules:
+            if rule.matches(user_id, room_id, aliases):
                 return rule.action == "allow"
 
         return False
 
 
-class _AliasRule(object):
-    def __init__(self, rule):
+class _RoomDirectoryRule(object):
+    """Helper class to test whether a room directory action is allowed, like
+    creating an alias or publishing a room.
+    """
+
+    def __init__(self, option_name, rule):
+        """
+        Args:
+            option_name (str): Name of the config option this rule belongs to
+            rule (dict): The rule as specified in the config
+        """
+
         action = rule["action"]
-        user_id = rule["user_id"]
-        alias = rule["alias"]
+        user_id = rule.get("user_id", "*")
+        room_id = rule.get("room_id", "*")
+        alias = rule.get("alias", "*")
 
         if action in ("allow", "deny"):
             self.action = action
         else:
             raise ConfigError(
-                "alias_creation_rules rules can only have action of 'allow'"
-                " or 'deny'"
+                "%s rules can only have action of 'allow'"
+                " or 'deny'" % (option_name,)
             )
 
+        self._alias_matches_all = alias == "*"
+
         try:
             self._user_id_regex = glob_to_regex(user_id)
             self._alias_regex = glob_to_regex(alias)
+            self._room_id_regex = glob_to_regex(room_id)
         except Exception as e:
             raise ConfigError("Failed to parse glob into regex: %s", e)
 
-    def matches(self, user_id, alias):
-        """Tests if this rule matches the given user_id and alias.
+    def matches(self, user_id, room_id, aliases):
+        """Tests if this rule matches the given user_id, room_id and aliases.
 
         Args:
             user_id (str)
-            alias (str)
+            room_id (str)
+            aliases (list[str]): The associated aliases to the room. Will be a
+                single element for testing alias creation, and can be empty for
+                testing room publishing.
 
         Returns:
             boolean
@@ -96,7 +199,22 @@ class _AliasRule(object):
         if not self._user_id_regex.match(user_id):
             return False
 
-        if not self._alias_regex.match(alias):
+        if not self._room_id_regex.match(room_id):
             return False
 
-        return True
+        # We only have alias checks left, so we can short circuit if the alias
+        # rule matches everything.
+        if self._alias_matches_all:
+            return True
+
+        # If we are not given any aliases then this rule only matches if the
+        # alias glob matches all aliases, which we checked above.
+        if not aliases:
+            return False
+
+        # Otherwise, we just need one alias to match
+        for alias in aliases:
+            if self._alias_regex.match(alias):
+                return True
+
+        return False
diff --git a/synapse/config/server.py b/synapse/config/server.py
index c5c3aac8ed..93a30e4cfa 100644
--- a/synapse/config/server.py
+++ b/synapse/config/server.py
@@ -151,7 +151,11 @@ class ServerConfig(Config):
 
             # if we still have an empty list of addresses, use the default list
             if not bind_addresses:
-                bind_addresses.extend(DEFAULT_BIND_ADDRESSES)
+                if listener['type'] == 'metrics':
+                    # the metrics listener doesn't support IPv6
+                    bind_addresses.append('0.0.0.0')
+                else:
+                    bind_addresses.extend(DEFAULT_BIND_ADDRESSES)
 
             self.listeners.append(listener)
 
diff --git a/synapse/crypto/keyring.py b/synapse/crypto/keyring.py
index 3a96980bed..cce40fdd2d 100644
--- a/synapse/crypto/keyring.py
+++ b/synapse/crypto/keyring.py
@@ -35,7 +35,7 @@ from unpaddedbase64 import decode_base64
 
 from twisted.internet import defer
 
-from synapse.api.errors import Codes, SynapseError
+from synapse.api.errors import Codes, RequestSendFailed, SynapseError
 from synapse.util import logcontext, unwrapFirstError
 from synapse.util.logcontext import (
     LoggingContext,
@@ -656,7 +656,7 @@ def _handle_key_deferred(verify_request):
     try:
         with PreserveLoggingContext():
             _, key_id, verify_key = yield verify_request.deferred
-    except IOError as e:
+    except (IOError, RequestSendFailed) as e:
         logger.warn(
             "Got IOError when downloading keys for %s: %s %s",
             server_name, type(e).__name__, str(e),
diff --git a/synapse/groups/attestations.py b/synapse/groups/attestations.py
index b04f4234ca..786149be65 100644
--- a/synapse/groups/attestations.py
+++ b/synapse/groups/attestations.py
@@ -42,7 +42,7 @@ from signedjson.sign import sign_json
 
 from twisted.internet import defer
 
-from synapse.api.errors import SynapseError
+from synapse.api.errors import RequestSendFailed, SynapseError
 from synapse.metrics.background_process_metrics import run_as_background_process
 from synapse.types import get_domain_from_id
 from synapse.util.logcontext import run_in_background
@@ -191,6 +191,11 @@ class GroupAttestionRenewer(object):
                 yield self.store.update_attestation_renewal(
                     group_id, user_id, attestation
                 )
+            except RequestSendFailed as e:
+                logger.warning(
+                    "Failed to renew attestation of %r in %r: %s",
+                    user_id, group_id, e,
+                )
             except Exception:
                 logger.exception("Error renewing attestation of %r in %r",
                                  user_id, group_id)
diff --git a/synapse/handlers/_base.py b/synapse/handlers/_base.py
index 704181d2d3..594754cfd8 100644
--- a/synapse/handlers/_base.py
+++ b/synapse/handlers/_base.py
@@ -167,4 +167,4 @@ class BaseHandler(object):
                     ratelimit=False,
                 )
             except Exception as e:
-                logger.warn("Error kicking guest user: %s" % (e,))
+                logger.exception("Error kicking guest user: %s" % (e,))
diff --git a/synapse/handlers/device.py b/synapse/handlers/device.py
index 8955cde4ed..c708c35d4d 100644
--- a/synapse/handlers/device.py
+++ b/synapse/handlers/device.py
@@ -20,7 +20,11 @@ from twisted.internet import defer
 
 from synapse.api import errors
 from synapse.api.constants import EventTypes
-from synapse.api.errors import FederationDeniedError
+from synapse.api.errors import (
+    FederationDeniedError,
+    HttpResponseException,
+    RequestSendFailed,
+)
 from synapse.types import RoomStreamToken, get_domain_from_id
 from synapse.util import stringutils
 from synapse.util.async_helpers import Linearizer
@@ -504,13 +508,13 @@ class DeviceListEduUpdater(object):
                 origin = get_domain_from_id(user_id)
                 try:
                     result = yield self.federation.query_user_devices(origin, user_id)
-                except NotRetryingDestination:
+                except (
+                    NotRetryingDestination, RequestSendFailed, HttpResponseException,
+                ):
                     # TODO: Remember that we are now out of sync and try again
                     # later
                     logger.warn(
-                        "Failed to handle device list update for %s,"
-                        " we're not retrying the remote",
-                        user_id,
+                        "Failed to handle device list update for %s", user_id,
                     )
                     # We abort on exceptions rather than accepting the update
                     # as otherwise synapse will 'forget' that its device list
diff --git a/synapse/handlers/directory.py b/synapse/handlers/directory.py
index 6bb254f899..8b113307d2 100644
--- a/synapse/handlers/directory.py
+++ b/synapse/handlers/directory.py
@@ -112,7 +112,9 @@ class DirectoryHandler(BaseHandler):
                     403, "This user is not permitted to create this alias",
                 )
 
-            if not self.config.is_alias_creation_allowed(user_id, room_alias.to_string()):
+            if not self.config.is_alias_creation_allowed(
+                user_id, room_id, room_alias.to_string(),
+            ):
                 # Lets just return a generic message, as there may be all sorts of
                 # reasons why we said no. TODO: Allow configurable error messages
                 # per alias creation rule?
@@ -395,9 +397,9 @@ class DirectoryHandler(BaseHandler):
         room_id (str)
         visibility (str): "public" or "private"
         """
-        if not self.spam_checker.user_may_publish_room(
-            requester.user.to_string(), room_id
-        ):
+        user_id = requester.user.to_string()
+
+        if not self.spam_checker.user_may_publish_room(user_id, room_id):
             raise AuthError(
                 403,
                 "This user is not permitted to publish rooms to the room list"
@@ -415,7 +417,24 @@ class DirectoryHandler(BaseHandler):
 
         yield self.auth.check_can_change_room_list(room_id, requester.user)
 
-        yield self.store.set_room_is_public(room_id, visibility == "public")
+        making_public = visibility == "public"
+        if making_public:
+            room_aliases = yield self.store.get_aliases_for_room(room_id)
+            canonical_alias = yield self.store.get_canonical_alias_for_room(room_id)
+            if canonical_alias:
+                room_aliases.append(canonical_alias)
+
+            if not self.config.is_publishing_room_allowed(
+                user_id, room_id, room_aliases,
+            ):
+                # Lets just return a generic message, as there may be all sorts of
+                # reasons why we said no. TODO: Allow configurable error messages
+                # per alias creation rule?
+                raise SynapseError(
+                    403, "Not allowed to publish room",
+                )
+
+        yield self.store.set_room_is_public(room_id, making_public)
 
     @defer.inlineCallbacks
     def edit_published_appservice_room_list(self, appservice_id, network_id,
diff --git a/synapse/handlers/groups_local.py b/synapse/handlers/groups_local.py
index 173315af6c..02c508acec 100644
--- a/synapse/handlers/groups_local.py
+++ b/synapse/handlers/groups_local.py
@@ -20,7 +20,7 @@ from six import iteritems
 
 from twisted.internet import defer
 
-from synapse.api.errors import HttpResponseException, SynapseError
+from synapse.api.errors import HttpResponseException, RequestSendFailed, SynapseError
 from synapse.types import get_domain_from_id
 
 logger = logging.getLogger(__name__)
@@ -46,13 +46,19 @@ def _create_rerouter(func_name):
             # when the remote end responds with things like 403 Not
             # In Group, we can communicate that to the client instead
             # of a 500.
-            def h(failure):
+            def http_response_errback(failure):
                 failure.trap(HttpResponseException)
                 e = failure.value
                 if e.code == 403:
                     raise e.to_synapse_error()
                 return failure
-            d.addErrback(h)
+
+            def request_failed_errback(failure):
+                failure.trap(RequestSendFailed)
+                raise SynapseError(502, "Failed to contact group server")
+
+            d.addErrback(http_response_errback)
+            d.addErrback(request_failed_errback)
             return d
     return f
 
diff --git a/synapse/handlers/register.py b/synapse/handlers/register.py
index 21c17c59a0..f92ab4d525 100644
--- a/synapse/handlers/register.py
+++ b/synapse/handlers/register.py
@@ -27,6 +27,8 @@ from synapse.api.errors import (
     SynapseError,
 )
 from synapse.http.client import CaptchaServerHttpClient
+from synapse.replication.http.login import RegisterDeviceReplicationServlet
+from synapse.replication.http.register import ReplicationRegisterServlet
 from synapse.types import RoomAlias, RoomID, UserID, create_requester
 from synapse.util.async_helpers import Linearizer
 from synapse.util.threepids import check_3pid_allowed
@@ -61,6 +63,14 @@ class RegistrationHandler(BaseHandler):
         )
         self._server_notices_mxid = hs.config.server_notices_mxid
 
+        if hs.config.worker_app:
+            self._register_client = ReplicationRegisterServlet.make_client(hs)
+            self._register_device_client = (
+                RegisterDeviceReplicationServlet.make_client(hs)
+            )
+        else:
+            self.device_handler = hs.get_device_handler()
+
     @defer.inlineCallbacks
     def check_username(self, localpart, guest_access_token=None,
                        assigned_user_id=None):
@@ -155,7 +165,7 @@ class RegistrationHandler(BaseHandler):
         yield self.auth.check_auth_blocking(threepid=threepid)
         password_hash = None
         if password:
-            password_hash = yield self.auth_handler().hash(password)
+            password_hash = yield self._auth_handler.hash(password)
 
         if localpart:
             yield self.check_username(localpart, guest_access_token=guest_access_token)
@@ -185,7 +195,7 @@ class RegistrationHandler(BaseHandler):
             token = None
             if generate_token:
                 token = self.macaroon_gen.generate_access_token(user_id)
-            yield self.store.register(
+            yield self._register_with_store(
                 user_id=user_id,
                 token=token,
                 password_hash=password_hash,
@@ -217,7 +227,7 @@ class RegistrationHandler(BaseHandler):
                 if default_display_name is None:
                     default_display_name = localpart
                 try:
-                    yield self.store.register(
+                    yield self._register_with_store(
                         user_id=user_id,
                         token=token,
                         password_hash=password_hash,
@@ -316,7 +326,7 @@ class RegistrationHandler(BaseHandler):
             user_id, allowed_appservice=service
         )
 
-        yield self.store.register(
+        yield self._register_with_store(
             user_id=user_id,
             password_hash="",
             appservice_id=service_id,
@@ -494,7 +504,7 @@ class RegistrationHandler(BaseHandler):
         token = self.macaroon_gen.generate_access_token(user_id)
 
         if need_register:
-            yield self.store.register(
+            yield self._register_with_store(
                 user_id=user_id,
                 token=token,
                 password_hash=password_hash,
@@ -512,9 +522,6 @@ class RegistrationHandler(BaseHandler):
 
         defer.returnValue((user_id, token))
 
-    def auth_handler(self):
-        return self.hs.get_auth_handler()
-
     @defer.inlineCallbacks
     def get_or_register_3pid_guest(self, medium, address, inviter_user_id):
         """Get a guest access token for a 3PID, creating a guest account if
@@ -573,3 +580,94 @@ class RegistrationHandler(BaseHandler):
             action="join",
             ratelimit=False,
         )
+
+    def _register_with_store(self, user_id, token=None, password_hash=None,
+                             was_guest=False, make_guest=False, appservice_id=None,
+                             create_profile_with_displayname=None, admin=False,
+                             user_type=None):
+        """Register user in the datastore.
+
+        Args:
+            user_id (str): The desired user ID to register.
+            token (str): The desired access token to use for this user. If this
+                is not None, the given access token is associated with the user
+                id.
+            password_hash (str|None): Optional. The password hash for this user.
+            was_guest (bool): Optional. Whether this is a guest account being
+                upgraded to a non-guest account.
+            make_guest (boolean): True if the the new user should be guest,
+                false to add a regular user account.
+            appservice_id (str|None): The ID of the appservice registering the user.
+            create_profile_with_displayname (unicode|None): Optionally create a
+                profile for the user, setting their displayname to the given value
+            admin (boolean): is an admin user?
+            user_type (str|None): type of user. One of the values from
+                api.constants.UserTypes, or None for a normal user.
+
+        Returns:
+            Deferred
+        """
+        if self.hs.config.worker_app:
+            return self._register_client(
+                user_id=user_id,
+                token=token,
+                password_hash=password_hash,
+                was_guest=was_guest,
+                make_guest=make_guest,
+                appservice_id=appservice_id,
+                create_profile_with_displayname=create_profile_with_displayname,
+                admin=admin,
+                user_type=user_type,
+            )
+        else:
+            return self.store.register(
+                user_id=user_id,
+                token=token,
+                password_hash=password_hash,
+                was_guest=was_guest,
+                make_guest=make_guest,
+                appservice_id=appservice_id,
+                create_profile_with_displayname=create_profile_with_displayname,
+                admin=admin,
+                user_type=user_type,
+            )
+
+    @defer.inlineCallbacks
+    def register_device(self, user_id, device_id, initial_display_name,
+                        is_guest=False):
+        """Register a device for a user and generate an access token.
+
+        Args:
+            user_id (str): full canonical @user:id
+            device_id (str|None): The device ID to check, or None to generate
+                a new one.
+            initial_display_name (str|None): An optional display name for the
+                device.
+            is_guest (bool): Whether this is a guest account
+
+        Returns:
+            defer.Deferred[tuple[str, str]]: Tuple of device ID and access token
+        """
+
+        if self.hs.config.worker_app:
+            r = yield self._register_device_client(
+                user_id=user_id,
+                device_id=device_id,
+                initial_display_name=initial_display_name,
+                is_guest=is_guest,
+            )
+            defer.returnValue((r["device_id"], r["access_token"]))
+        else:
+            device_id = yield self.device_handler.check_device_registered(
+                user_id, device_id, initial_display_name
+            )
+            if is_guest:
+                access_token = self.macaroon_gen.generate_access_token(
+                    user_id, ["guest = true"]
+                )
+            else:
+                access_token = yield self._auth_handler.get_access_token_for_user_id(
+                    user_id, device_id=device_id,
+                )
+
+            defer.returnValue((device_id, access_token))
diff --git a/synapse/handlers/room.py b/synapse/handlers/room.py
index 5e40e9ea46..f9af1f0046 100644
--- a/synapse/handlers/room.py
+++ b/synapse/handlers/room.py
@@ -284,6 +284,7 @@ class RoomCreationHandler(BaseHandler):
             (EventTypes.GuestAccess, ""),
             (EventTypes.RoomAvatar, ""),
             (EventTypes.Encryption, ""),
+            (EventTypes.ServerACL, ""),
         )
 
         old_room_state_ids = yield self.store.get_filtered_current_state_ids(
diff --git a/synapse/handlers/user_directory.py b/synapse/handlers/user_directory.py
index 120815b09b..283c6c1b81 100644
--- a/synapse/handlers/user_directory.py
+++ b/synapse/handlers/user_directory.py
@@ -130,7 +130,7 @@ class UserDirectoryHandler(object):
         # Support users are for diagnostics and should not appear in the user directory.
         if not is_support:
             yield self.store.update_profile_in_user_dir(
-                user_id, profile.display_name, profile.avatar_url, None,
+                user_id, profile.display_name, profile.avatar_url, None
             )
 
     @defer.inlineCallbacks
@@ -166,8 +166,9 @@ class UserDirectoryHandler(object):
                 self.pos = deltas[-1]["stream_id"]
 
                 # Expose current event processing position to prometheus
-                synapse.metrics.event_processing_positions.labels(
-                    "user_dir").set(self.pos)
+                synapse.metrics.event_processing_positions.labels("user_dir").set(
+                    self.pos
+                )
 
                 yield self.store.update_user_directory_stream_pos(self.pos)
 
@@ -191,21 +192,25 @@ class UserDirectoryHandler(object):
             logger.info("Handling room %d/%d", num_processed_rooms + 1, len(room_ids))
             yield self._handle_initial_room(room_id)
             num_processed_rooms += 1
-            yield self.clock.sleep(self.INITIAL_ROOM_SLEEP_MS / 1000.)
+            yield self.clock.sleep(self.INITIAL_ROOM_SLEEP_MS / 1000.0)
 
         logger.info("Processed all rooms.")
 
         if self.search_all_users:
             num_processed_users = 0
             user_ids = yield self.store.get_all_local_users()
-            logger.info("Doing initial update of user directory. %d users", len(user_ids))
+            logger.info(
+                "Doing initial update of user directory. %d users", len(user_ids)
+            )
             for user_id in user_ids:
                 # We add profiles for all users even if they don't match the
                 # include pattern, just in case we want to change it in future
-                logger.info("Handling user %d/%d", num_processed_users + 1, len(user_ids))
+                logger.info(
+                    "Handling user %d/%d", num_processed_users + 1, len(user_ids)
+                )
                 yield self._handle_local_user(user_id)
                 num_processed_users += 1
-                yield self.clock.sleep(self.INITIAL_USER_SLEEP_MS / 1000.)
+                yield self.clock.sleep(self.INITIAL_USER_SLEEP_MS / 1000.0)
 
             logger.info("Processed all users")
 
@@ -224,24 +229,24 @@ class UserDirectoryHandler(object):
         if not is_in_room:
             return
 
-        is_public = yield self.store.is_room_world_readable_or_publicly_joinable(room_id)
+        is_public = yield self.store.is_room_world_readable_or_publicly_joinable(
+            room_id
+        )
 
         users_with_profile = yield self.state.get_current_user_in_room(room_id)
         user_ids = set(users_with_profile)
         unhandled_users = user_ids - self.initially_handled_users
 
         yield self.store.add_profiles_to_user_dir(
-            room_id, {
-                user_id: users_with_profile[user_id] for user_id in unhandled_users
-            }
+            room_id,
+            {user_id: users_with_profile[user_id] for user_id in unhandled_users},
         )
 
         self.initially_handled_users |= unhandled_users
 
         if is_public:
             yield self.store.add_users_to_public_room(
-                room_id,
-                user_ids=user_ids - self.initially_handled_users_in_public
+                room_id, user_ids=user_ids - self.initially_handled_users_in_public
             )
             self.initially_handled_users_in_public |= user_ids
 
@@ -253,7 +258,7 @@ class UserDirectoryHandler(object):
         count = 0
         for user_id in user_ids:
             if count % self.INITIAL_ROOM_SLEEP_COUNT == 0:
-                yield self.clock.sleep(self.INITIAL_ROOM_SLEEP_MS / 1000.)
+                yield self.clock.sleep(self.INITIAL_ROOM_SLEEP_MS / 1000.0)
 
             if not self.is_mine_id(user_id):
                 count += 1
@@ -268,7 +273,7 @@ class UserDirectoryHandler(object):
                     continue
 
                 if count % self.INITIAL_ROOM_SLEEP_COUNT == 0:
-                    yield self.clock.sleep(self.INITIAL_ROOM_SLEEP_MS / 1000.)
+                    yield self.clock.sleep(self.INITIAL_ROOM_SLEEP_MS / 1000.0)
                 count += 1
 
                 user_set = (user_id, other_user_id)
@@ -290,25 +295,23 @@ class UserDirectoryHandler(object):
 
                 if len(to_insert) > self.INITIAL_ROOM_BATCH_SIZE:
                     yield self.store.add_users_who_share_room(
-                        room_id, not is_public, to_insert,
+                        room_id, not is_public, to_insert
                     )
                     to_insert.clear()
 
                 if len(to_update) > self.INITIAL_ROOM_BATCH_SIZE:
                     yield self.store.update_users_who_share_room(
-                        room_id, not is_public, to_update,
+                        room_id, not is_public, to_update
                     )
                     to_update.clear()
 
         if to_insert:
-            yield self.store.add_users_who_share_room(
-                room_id, not is_public, to_insert,
-            )
+            yield self.store.add_users_who_share_room(room_id, not is_public, to_insert)
             to_insert.clear()
 
         if to_update:
             yield self.store.update_users_who_share_room(
-                room_id, not is_public, to_update,
+                room_id, not is_public, to_update
             )
             to_update.clear()
 
@@ -329,11 +332,12 @@ class UserDirectoryHandler(object):
             # may have become public or not and add/remove the users in said room
             if typ in (EventTypes.RoomHistoryVisibility, EventTypes.JoinRules):
                 yield self._handle_room_publicity_change(
-                    room_id, prev_event_id, event_id, typ,
+                    room_id, prev_event_id, event_id, typ
                 )
             elif typ == EventTypes.Member:
                 change = yield self._get_key_change(
-                    prev_event_id, event_id,
+                    prev_event_id,
+                    event_id,
                     key_name="membership",
                     public_value=Membership.JOIN,
                 )
@@ -342,14 +346,16 @@ class UserDirectoryHandler(object):
                     # Need to check if the server left the room entirely, if so
                     # we might need to remove all the users in that room
                     is_in_room = yield self.store.is_host_joined(
-                        room_id, self.server_name,
+                        room_id, self.server_name
                     )
                     if not is_in_room:
                         logger.info("Server left room: %r", room_id)
                         # Fetch all the users that we marked as being in user
                         # directory due to being in the room and then check if
                         # need to remove those users or not
-                        user_ids = yield self.store.get_users_in_dir_due_to_room(room_id)
+                        user_ids = yield self.store.get_users_in_dir_due_to_room(
+                            room_id
+                        )
                         for user_id in user_ids:
                             yield self._handle_remove_user(room_id, user_id)
                         return
@@ -361,7 +367,7 @@ class UserDirectoryHandler(object):
                     if change is None:
                         # Handle any profile changes
                         yield self._handle_profile_change(
-                            state_key, room_id, prev_event_id, event_id,
+                            state_key, room_id, prev_event_id, event_id
                         )
                         continue
 
@@ -393,13 +399,15 @@ class UserDirectoryHandler(object):
 
         if typ == EventTypes.RoomHistoryVisibility:
             change = yield self._get_key_change(
-                prev_event_id, event_id,
+                prev_event_id,
+                event_id,
                 key_name="history_visibility",
                 public_value="world_readable",
             )
         elif typ == EventTypes.JoinRules:
             change = yield self._get_key_change(
-                prev_event_id, event_id,
+                prev_event_id,
+                event_id,
                 key_name="join_rule",
                 public_value=JoinRules.PUBLIC,
             )
@@ -524,7 +532,7 @@ class UserDirectoryHandler(object):
             )
             if self.is_mine_id(other_user_id) and not is_appservice:
                 shared_is_private = yield self.store.get_if_users_share_a_room(
-                    other_user_id, user_id,
+                    other_user_id, user_id
                 )
                 if shared_is_private is True:
                     # We've already marked in the database they share a private room
@@ -539,13 +547,11 @@ class UserDirectoryHandler(object):
                     to_insert.add((other_user_id, user_id))
 
         if to_insert:
-            yield self.store.add_users_who_share_room(
-                room_id, not is_public, to_insert,
-            )
+            yield self.store.add_users_who_share_room(room_id, not is_public, to_insert)
 
         if to_update:
             yield self.store.update_users_who_share_room(
-                room_id, not is_public, to_update,
+                room_id, not is_public, to_update
             )
 
     @defer.inlineCallbacks
@@ -564,15 +570,15 @@ class UserDirectoryHandler(object):
         row = yield self.store.get_user_in_public_room(user_id)
         update_user_in_public = row and row["room_id"] == room_id
 
-        if (update_user_in_public or update_user_dir):
+        if update_user_in_public or update_user_dir:
             # XXX: Make this faster?
             rooms = yield self.store.get_rooms_for_user(user_id)
             for j_room_id in rooms:
-                if (not update_user_in_public and not update_user_dir):
+                if not update_user_in_public and not update_user_dir:
                     break
 
                 is_in_room = yield self.store.is_host_joined(
-                    j_room_id, self.server_name,
+                    j_room_id, self.server_name
                 )
 
                 if not is_in_room:
@@ -600,19 +606,19 @@ class UserDirectoryHandler(object):
         # Get a list of user tuples that were in the DB due to this room and
         # users (this includes tuples where the other user matches `user_id`)
         user_tuples = yield self.store.get_users_in_share_dir_with_room_id(
-            user_id, room_id,
+            user_id, room_id
         )
 
         for user_id, other_user_id in user_tuples:
             # For each user tuple get a list of rooms that they still share,
             # trying to find a private room, and update the entry in the DB
-            rooms = yield self.store.get_rooms_in_common_for_users(user_id, other_user_id)
+            rooms = yield self.store.get_rooms_in_common_for_users(
+                user_id, other_user_id
+            )
 
             # If they dont share a room anymore, remove the mapping
             if not rooms:
-                yield self.store.remove_user_who_share_room(
-                    user_id, other_user_id,
-                )
+                yield self.store.remove_user_who_share_room(user_id, other_user_id)
                 continue
 
             found_public_share = None
@@ -626,13 +632,13 @@ class UserDirectoryHandler(object):
                 else:
                     found_public_share = None
                     yield self.store.update_users_who_share_room(
-                        room_id, not is_public, [(user_id, other_user_id)],
+                        room_id, not is_public, [(user_id, other_user_id)]
                     )
                     break
 
             if found_public_share:
                 yield self.store.update_users_who_share_room(
-                    room_id, not is_public, [(user_id, other_user_id)],
+                    room_id, not is_public, [(user_id, other_user_id)]
                 )
 
     @defer.inlineCallbacks
@@ -660,7 +666,7 @@ class UserDirectoryHandler(object):
 
         if prev_name != new_name or prev_avatar != new_avatar:
             yield self.store.update_profile_in_user_dir(
-                user_id, new_name, new_avatar, room_id,
+                user_id, new_name, new_avatar, room_id
             )
 
     @defer.inlineCallbacks
diff --git a/synapse/http/server.py b/synapse/http/server.py
index 6a427d96a6..6c67a25a11 100644
--- a/synapse/http/server.py
+++ b/synapse/http/server.py
@@ -106,10 +106,10 @@ def wrap_json_request_handler(h):
             # trace.
             f = failure.Failure()
             logger.error(
-                "Failed handle request via %r: %r: %s",
-                h,
+                "Failed handle request via %r: %r",
+                request.request_metrics.name,
                 request,
-                f.getTraceback().rstrip(),
+                exc_info=(f.type, f.value, f.getTracebackObject()),
             )
             # Only respond with an error response if we haven't already started
             # writing, otherwise lets just kill the connection
diff --git a/synapse/python_dependencies.py b/synapse/python_dependencies.py
index 444fc2a979..f71e21ff4d 100644
--- a/synapse/python_dependencies.py
+++ b/synapse/python_dependencies.py
@@ -144,9 +144,12 @@ def check_requirements(for_feature=None, _get_distribution=get_distribution):
         for dependency in OPTS:
             try:
                 _get_distribution(dependency)
-            except VersionConflict:
+            except VersionConflict as e:
                 deps_needed.append(dependency)
-                errors.append("Needed %s but it was not installed" % (dependency,))
+                errors.append(
+                    "Needed optional %s, got %s==%s"
+                    % (dependency, e.dist.project_name, e.dist.version)
+                )
             except DistributionNotFound:
                 # If it's not found, we don't care
                 pass
diff --git a/synapse/replication/http/__init__.py b/synapse/replication/http/__init__.py
index 19f214281e..81b85352b1 100644
--- a/synapse/replication/http/__init__.py
+++ b/synapse/replication/http/__init__.py
@@ -14,7 +14,7 @@
 # limitations under the License.
 
 from synapse.http.server import JsonResource
-from synapse.replication.http import federation, membership, send_event
+from synapse.replication.http import federation, login, membership, register, send_event
 
 REPLICATION_PREFIX = "/_synapse/replication"
 
@@ -28,3 +28,5 @@ class ReplicationRestResource(JsonResource):
         send_event.register_servlets(hs, self)
         membership.register_servlets(hs, self)
         federation.register_servlets(hs, self)
+        login.register_servlets(hs, self)
+        register.register_servlets(hs, self)
diff --git a/synapse/replication/http/login.py b/synapse/replication/http/login.py
new file mode 100644
index 0000000000..1590eca317
--- /dev/null
+++ b/synapse/replication/http/login.py
@@ -0,0 +1,74 @@
+# -*- coding: utf-8 -*-
+# Copyright 2019 New Vector Ltd
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import logging
+
+from twisted.internet import defer
+
+from synapse.http.servlet import parse_json_object_from_request
+from synapse.replication.http._base import ReplicationEndpoint
+
+logger = logging.getLogger(__name__)
+
+
+class RegisterDeviceReplicationServlet(ReplicationEndpoint):
+    """Ensure a device is registered, generating a new access token for the
+    device.
+
+    Used during registration and login.
+    """
+
+    NAME = "device_check_registered"
+    PATH_ARGS = ("user_id",)
+
+    def __init__(self, hs):
+        super(RegisterDeviceReplicationServlet, self).__init__(hs)
+        self.registration_handler = hs.get_handlers().registration_handler
+
+    @staticmethod
+    def _serialize_payload(user_id, device_id, initial_display_name, is_guest):
+        """
+        Args:
+            device_id (str|None): Device ID to use, if None a new one is
+                generated.
+            initial_display_name (str|None)
+            is_guest (bool)
+        """
+        return {
+            "device_id": device_id,
+            "initial_display_name": initial_display_name,
+            "is_guest": is_guest,
+        }
+
+    @defer.inlineCallbacks
+    def _handle_request(self, request, user_id):
+        content = parse_json_object_from_request(request)
+
+        device_id = content["device_id"]
+        initial_display_name = content["initial_display_name"]
+        is_guest = content["is_guest"]
+
+        device_id, access_token = yield self.registration_handler.register_device(
+            user_id, device_id, initial_display_name, is_guest,
+        )
+
+        defer.returnValue((200, {
+            "device_id": device_id,
+            "access_token": access_token,
+        }))
+
+
+def register_servlets(hs, http_server):
+    RegisterDeviceReplicationServlet(hs).register(http_server)
diff --git a/synapse/replication/http/register.py b/synapse/replication/http/register.py
new file mode 100644
index 0000000000..bdaf37396c
--- /dev/null
+++ b/synapse/replication/http/register.py
@@ -0,0 +1,91 @@
+# -*- coding: utf-8 -*-
+# Copyright 2019 New Vector Ltd
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import logging
+
+from twisted.internet import defer
+
+from synapse.http.servlet import parse_json_object_from_request
+from synapse.replication.http._base import ReplicationEndpoint
+
+logger = logging.getLogger(__name__)
+
+
+class ReplicationRegisterServlet(ReplicationEndpoint):
+    """Register a new user
+    """
+
+    NAME = "register_user"
+    PATH_ARGS = ("user_id",)
+
+    def __init__(self, hs):
+        super(ReplicationRegisterServlet, self).__init__(hs)
+        self.store = hs.get_datastore()
+
+    @staticmethod
+    def _serialize_payload(
+        user_id, token, password_hash, was_guest, make_guest, appservice_id,
+        create_profile_with_displayname, admin, user_type,
+    ):
+        """
+        Args:
+            user_id (str): The desired user ID to register.
+            token (str): The desired access token to use for this user. If this
+                is not None, the given access token is associated with the user
+                id.
+            password_hash (str|None): Optional. The password hash for this user.
+            was_guest (bool): Optional. Whether this is a guest account being
+                upgraded to a non-guest account.
+            make_guest (boolean): True if the the new user should be guest,
+                false to add a regular user account.
+            appservice_id (str|None): The ID of the appservice registering the user.
+            create_profile_with_displayname (unicode|None): Optionally create a
+                profile for the user, setting their displayname to the given value
+            admin (boolean): is an admin user?
+            user_type (str|None): type of user. One of the values from
+                api.constants.UserTypes, or None for a normal user.
+        """
+        return {
+            "token": token,
+            "password_hash": password_hash,
+            "was_guest": was_guest,
+            "make_guest": make_guest,
+            "appservice_id": appservice_id,
+            "create_profile_with_displayname": create_profile_with_displayname,
+            "admin": admin,
+            "user_type": user_type,
+        }
+
+    @defer.inlineCallbacks
+    def _handle_request(self, request, user_id):
+        content = parse_json_object_from_request(request)
+
+        yield self.store.register(
+            user_id=user_id,
+            token=content["token"],
+            password_hash=content["password_hash"],
+            was_guest=content["was_guest"],
+            make_guest=content["make_guest"],
+            appservice_id=content["appservice_id"],
+            create_profile_with_displayname=content["create_profile_with_displayname"],
+            admin=content["admin"],
+            user_type=content["user_type"],
+        )
+
+        defer.returnValue((200, {}))
+
+
+def register_servlets(hs, http_server):
+    ReplicationRegisterServlet(hs).register(http_server)
diff --git a/synapse/rest/client/v1/login.py b/synapse/rest/client/v1/login.py
index 942e4d3816..4a5775083f 100644
--- a/synapse/rest/client/v1/login.py
+++ b/synapse/rest/client/v1/login.py
@@ -94,7 +94,7 @@ class LoginRestServlet(ClientV1RestServlet):
         self.jwt_algorithm = hs.config.jwt_algorithm
         self.cas_enabled = hs.config.cas_enabled
         self.auth_handler = self.hs.get_auth_handler()
-        self.device_handler = self.hs.get_device_handler()
+        self.registration_handler = hs.get_handlers().registration_handler
         self.handlers = hs.get_handlers()
         self._well_known_builder = WellKnownBuilder(hs)
 
@@ -220,11 +220,10 @@ class LoginRestServlet(ClientV1RestServlet):
             login_submission,
         )
 
-        device_id = yield self._register_device(
-            canonical_user_id, login_submission,
-        )
-        access_token = yield auth_handler.get_access_token_for_user_id(
-            canonical_user_id, device_id,
+        device_id = login_submission.get("device_id")
+        initial_display_name = login_submission.get("initial_device_display_name")
+        device_id, access_token = yield self.registration_handler.register_device(
+            canonical_user_id, device_id, initial_display_name,
         )
 
         result = {
@@ -246,10 +245,13 @@ class LoginRestServlet(ClientV1RestServlet):
         user_id = (
             yield auth_handler.validate_short_term_login_token_and_get_user_id(token)
         )
-        device_id = yield self._register_device(user_id, login_submission)
-        access_token = yield auth_handler.get_access_token_for_user_id(
-            user_id, device_id,
+
+        device_id = login_submission.get("device_id")
+        initial_display_name = login_submission.get("initial_device_display_name")
+        device_id, access_token = yield self.registration_handler.register_device(
+            user_id, device_id, initial_display_name,
         )
+
         result = {
             "user_id": user_id,  # may have changed
             "access_token": access_token,
@@ -286,11 +288,10 @@ class LoginRestServlet(ClientV1RestServlet):
         auth_handler = self.auth_handler
         registered_user_id = yield auth_handler.check_user_exists(user_id)
         if registered_user_id:
-            device_id = yield self._register_device(
-                registered_user_id, login_submission
-            )
-            access_token = yield auth_handler.get_access_token_for_user_id(
-                registered_user_id, device_id,
+            device_id = login_submission.get("device_id")
+            initial_display_name = login_submission.get("initial_device_display_name")
+            device_id, access_token = yield self.registration_handler.register_device(
+                registered_user_id, device_id, initial_display_name,
             )
 
             result = {
@@ -299,12 +300,16 @@ class LoginRestServlet(ClientV1RestServlet):
                 "home_server": self.hs.hostname,
             }
         else:
-            # TODO: we should probably check that the register isn't going
-            # to fonx/change our user_id before registering the device
-            device_id = yield self._register_device(user_id, login_submission)
             user_id, access_token = (
                 yield self.handlers.registration_handler.register(localpart=user)
             )
+
+            device_id = login_submission.get("device_id")
+            initial_display_name = login_submission.get("initial_device_display_name")
+            device_id, access_token = yield self.registration_handler.register_device(
+                registered_user_id, device_id, initial_display_name,
+            )
+
             result = {
                 "user_id": user_id,  # may have changed
                 "access_token": access_token,
@@ -313,26 +318,6 @@ class LoginRestServlet(ClientV1RestServlet):
 
         defer.returnValue(result)
 
-    def _register_device(self, user_id, login_submission):
-        """Register a device for a user.
-
-        This is called after the user's credentials have been validated, but
-        before the access token has been issued.
-
-        Args:
-            (str) user_id: full canonical @user:id
-            (object) login_submission: dictionary supplied to /login call, from
-               which we pull device_id and initial_device_name
-        Returns:
-            defer.Deferred: (str) device_id
-        """
-        device_id = login_submission.get("device_id")
-        initial_display_name = login_submission.get(
-            "initial_device_display_name")
-        return self.device_handler.check_device_registered(
-            user_id, device_id, initial_display_name
-        )
-
 
 class CasRedirectServlet(RestServlet):
     PATTERNS = client_path_patterns("/login/(cas|sso)/redirect")
diff --git a/synapse/rest/client/v2_alpha/register.py b/synapse/rest/client/v2_alpha/register.py
index 7f812b8209..c1cdb8f9c8 100644
--- a/synapse/rest/client/v2_alpha/register.py
+++ b/synapse/rest/client/v2_alpha/register.py
@@ -190,7 +190,6 @@ class RegisterRestServlet(RestServlet):
         self.registration_handler = hs.get_handlers().registration_handler
         self.identity_handler = hs.get_handlers().identity_handler
         self.room_member_handler = hs.get_room_member_handler()
-        self.device_handler = hs.get_device_handler()
         self.macaroon_gen = hs.get_macaroon_generator()
 
     @interactive_auth_handler
@@ -633,12 +632,10 @@ class RegisterRestServlet(RestServlet):
             "home_server": self.hs.hostname,
         }
         if not params.get("inhibit_login", False):
-            device_id = yield self._register_device(user_id, params)
-
-            access_token = (
-                yield self.auth_handler.get_access_token_for_user_id(
-                    user_id, device_id=device_id,
-                )
+            device_id = params.get("device_id")
+            initial_display_name = params.get("initial_device_display_name")
+            device_id, access_token = yield self.registration_handler.register_device(
+                user_id, device_id, initial_display_name, is_guest=False,
             )
 
             result.update({
@@ -647,26 +644,6 @@ class RegisterRestServlet(RestServlet):
             })
         defer.returnValue(result)
 
-    def _register_device(self, user_id, params):
-        """Register a device for a user.
-
-        This is called after the user's credentials have been validated, but
-        before the access token has been issued.
-
-        Args:
-            (str) user_id: full canonical @user:id
-            (object) params: registration parameters, from which we pull
-                device_id and initial_device_name
-        Returns:
-            defer.Deferred: (str) device_id
-        """
-        # register the user's device
-        device_id = params.get("device_id")
-        initial_display_name = params.get("initial_device_display_name")
-        return self.device_handler.check_device_registered(
-            user_id, device_id, initial_display_name
-        )
-
     @defer.inlineCallbacks
     def _do_guest_registration(self, params):
         if not self.hs.config.allow_guest_access:
@@ -680,13 +657,10 @@ class RegisterRestServlet(RestServlet):
         # we have nowhere to store it.
         device_id = synapse.api.auth.GUEST_DEVICE_ID
         initial_display_name = params.get("initial_device_display_name")
-        yield self.device_handler.check_device_registered(
-            user_id, device_id, initial_display_name
+        device_id, access_token = yield self.registration_handler.register_device(
+            user_id, device_id, initial_display_name, is_guest=True,
         )
 
-        access_token = self.macaroon_gen.generate_access_token(
-            user_id, ["guest = true"]
-        )
         defer.returnValue((200, {
             "user_id": user_id,
             "device_id": device_id,
diff --git a/synapse/rest/well_known.py b/synapse/rest/well_known.py
index 6e043d6162..c0a4ae93e5 100644
--- a/synapse/rest/well_known.py
+++ b/synapse/rest/well_known.py
@@ -18,6 +18,8 @@ import logging
 
 from twisted.web.resource import Resource
 
+from synapse.http.server import set_cors_headers
+
 logger = logging.getLogger(__name__)
 
 
@@ -59,6 +61,7 @@ class WellKnownResource(Resource):
         self._well_known_builder = WellKnownBuilder(hs)
 
     def render_GET(self, request):
+        set_cors_headers(request)
         r = self._well_known_builder.get_well_known()
         if not r:
             request.setResponseCode(404)
diff --git a/synapse/storage/registration.py b/synapse/storage/registration.py
index c9e11c3135..3bc5def48e 100644
--- a/synapse/storage/registration.py
+++ b/synapse/storage/registration.py
@@ -139,6 +139,121 @@ class RegistrationWorkerStore(SQLBaseStore):
         )
         return True if res == UserTypes.SUPPORT else False
 
+    def get_users_by_id_case_insensitive(self, user_id):
+        """Gets users that match user_id case insensitively.
+        Returns a mapping of user_id -> password_hash.
+        """
+        def f(txn):
+            sql = (
+                "SELECT name, password_hash FROM users"
+                " WHERE lower(name) = lower(?)"
+            )
+            txn.execute(sql, (user_id,))
+            return dict(txn)
+
+        return self.runInteraction("get_users_by_id_case_insensitive", f)
+
+    @defer.inlineCallbacks
+    def count_all_users(self):
+        """Counts all users registered on the homeserver."""
+        def _count_users(txn):
+            txn.execute("SELECT COUNT(*) AS users FROM users")
+            rows = self.cursor_to_dict(txn)
+            if rows:
+                return rows[0]["users"]
+            return 0
+
+        ret = yield self.runInteraction("count_users", _count_users)
+        defer.returnValue(ret)
+
+    def count_daily_user_type(self):
+        """
+        Counts 1) native non guest users
+               2) native guests users
+               3) bridged users
+        who registered on the homeserver in the past 24 hours
+        """
+        def _count_daily_user_type(txn):
+            yesterday = int(self._clock.time()) - (60 * 60 * 24)
+
+            sql = """
+                SELECT user_type, COALESCE(count(*), 0) AS count FROM (
+                    SELECT
+                    CASE
+                        WHEN is_guest=0 AND appservice_id IS NULL THEN 'native'
+                        WHEN is_guest=1 AND appservice_id IS NULL THEN 'guest'
+                        WHEN is_guest=0 AND appservice_id IS NOT NULL THEN 'bridged'
+                    END AS user_type
+                    FROM users
+                    WHERE creation_ts > ?
+                ) AS t GROUP BY user_type
+            """
+            results = {'native': 0, 'guest': 0, 'bridged': 0}
+            txn.execute(sql, (yesterday,))
+            for row in txn:
+                results[row[0]] = row[1]
+            return results
+        return self.runInteraction("count_daily_user_type", _count_daily_user_type)
+
+    @defer.inlineCallbacks
+    def count_nonbridged_users(self):
+        def _count_users(txn):
+            txn.execute("""
+                SELECT COALESCE(COUNT(*), 0) FROM users
+                WHERE appservice_id IS NULL
+            """)
+            count, = txn.fetchone()
+            return count
+
+        ret = yield self.runInteraction("count_users", _count_users)
+        defer.returnValue(ret)
+
+    @defer.inlineCallbacks
+    def find_next_generated_user_id_localpart(self):
+        """
+        Gets the localpart of the next generated user ID.
+
+        Generated user IDs are integers, and we aim for them to be as small as
+        we can. Unfortunately, it's possible some of them are already taken by
+        existing users, and there may be gaps in the already taken range. This
+        function returns the start of the first allocatable gap. This is to
+        avoid the case of ID 10000000 being pre-allocated, so us wasting the
+        first (and shortest) many generated user IDs.
+        """
+        def _find_next_generated_user_id(txn):
+            txn.execute("SELECT name FROM users")
+
+            regex = re.compile(r"^@(\d+):")
+
+            found = set()
+
+            for user_id, in txn:
+                match = regex.search(user_id)
+                if match:
+                    found.add(int(match.group(1)))
+            for i in range(len(found) + 1):
+                if i not in found:
+                    return i
+
+        defer.returnValue((yield self.runInteraction(
+            "find_next_generated_user_id",
+            _find_next_generated_user_id
+        )))
+
+    @defer.inlineCallbacks
+    def get_3pid_guest_access_token(self, medium, address):
+        ret = yield self._simple_select_one(
+            "threepid_guest_access_tokens",
+            {
+                "medium": medium,
+                "address": address
+            },
+            ["guest_access_token"], True, 'get_3pid_guest_access_token'
+        )
+        if ret:
+            defer.returnValue(ret["guest_access_token"])
+        defer.returnValue(None)
+
 
 class RegistrationStore(RegistrationWorkerStore,
                         background_updates.BackgroundUpdateStore):
@@ -326,20 +441,6 @@ class RegistrationStore(RegistrationWorkerStore,
         )
         txn.call_after(self.is_guest.invalidate, (user_id,))
 
-    def get_users_by_id_case_insensitive(self, user_id):
-        """Gets users that match user_id case insensitively.
-        Returns a mapping of user_id -> password_hash.
-        """
-        def f(txn):
-            sql = (
-                "SELECT name, password_hash FROM users"
-                " WHERE lower(name) = lower(?)"
-            )
-            txn.execute(sql, (user_id,))
-            return dict(txn)
-
-        return self.runInteraction("get_users_by_id_case_insensitive", f)
-
     def user_set_password_hash(self, user_id, password_hash):
         """
         NB. This does *not* evict any cache because the one use for this
@@ -565,107 +666,6 @@ class RegistrationStore(RegistrationWorkerStore,
         )
 
     @defer.inlineCallbacks
-    def count_all_users(self):
-        """Counts all users registered on the homeserver."""
-        def _count_users(txn):
-            txn.execute("SELECT COUNT(*) AS users FROM users")
-            rows = self.cursor_to_dict(txn)
-            if rows:
-                return rows[0]["users"]
-            return 0
-
-        ret = yield self.runInteraction("count_users", _count_users)
-        defer.returnValue(ret)
-
-    def count_daily_user_type(self):
-        """
-        Counts 1) native non guest users
-               2) native guests users
-               3) bridged users
-        who registered on the homeserver in the past 24 hours
-        """
-        def _count_daily_user_type(txn):
-            yesterday = int(self._clock.time()) - (60 * 60 * 24)
-
-            sql = """
-                SELECT user_type, COALESCE(count(*), 0) AS count FROM (
-                    SELECT
-                    CASE
-                        WHEN is_guest=0 AND appservice_id IS NULL THEN 'native'
-                        WHEN is_guest=1 AND appservice_id IS NULL THEN 'guest'
-                        WHEN is_guest=0 AND appservice_id IS NOT NULL THEN 'bridged'
-                    END AS user_type
-                    FROM users
-                    WHERE creation_ts > ?
-                ) AS t GROUP BY user_type
-            """
-            results = {'native': 0, 'guest': 0, 'bridged': 0}
-            txn.execute(sql, (yesterday,))
-            for row in txn:
-                results[row[0]] = row[1]
-            return results
-        return self.runInteraction("count_daily_user_type", _count_daily_user_type)
-
-    @defer.inlineCallbacks
-    def count_nonbridged_users(self):
-        def _count_users(txn):
-            txn.execute("""
-                SELECT COALESCE(COUNT(*), 0) FROM users
-                WHERE appservice_id IS NULL
-            """)
-            count, = txn.fetchone()
-            return count
-
-        ret = yield self.runInteraction("count_users", _count_users)
-        defer.returnValue(ret)
-
-    @defer.inlineCallbacks
-    def find_next_generated_user_id_localpart(self):
-        """
-        Gets the localpart of the next generated user ID.
-
-        Generated user IDs are integers, and we aim for them to be as small as
-        we can. Unfortunately, it's possible some of them are already taken by
-        existing users, and there may be gaps in the already taken range. This
-        function returns the start of the first allocatable gap. This is to
-        avoid the case of ID 10000000 being pre-allocated, so us wasting the
-        first (and shortest) many generated user IDs.
-        """
-        def _find_next_generated_user_id(txn):
-            txn.execute("SELECT name FROM users")
-
-            regex = re.compile(r"^@(\d+):")
-
-            found = set()
-
-            for user_id, in txn:
-                match = regex.search(user_id)
-                if match:
-                    found.add(int(match.group(1)))
-            for i in range(len(found) + 1):
-                if i not in found:
-                    return i
-
-        defer.returnValue((yield self.runInteraction(
-            "find_next_generated_user_id",
-            _find_next_generated_user_id
-        )))
-
-    @defer.inlineCallbacks
-    def get_3pid_guest_access_token(self, medium, address):
-        ret = yield self._simple_select_one(
-            "threepid_guest_access_tokens",
-            {
-                "medium": medium,
-                "address": address
-            },
-            ["guest_access_token"], True, 'get_3pid_guest_access_token'
-        )
-        if ret:
-            defer.returnValue(ret["guest_access_token"])
-        defer.returnValue(None)
-
-    @defer.inlineCallbacks
     def save_or_get_3pid_guest_access_token(
             self, medium, address, access_token, inviter_user_id
     ):
diff --git a/synapse/storage/state.py b/synapse/storage/state.py
index d14a7b2538..6ddc4055d2 100644
--- a/synapse/storage/state.py
+++ b/synapse/storage/state.py
@@ -548,6 +548,31 @@ class StateGroupWorkerStore(EventsWorkerStore, SQLBaseStore):
             _get_filtered_current_state_ids_txn,
         )
 
+    @defer.inlineCallbacks
+    def get_canonical_alias_for_room(self, room_id):
+        """Get canonical alias for room, if any
+
+        Args:
+            room_id (str)
+
+        Returns:
+            Deferred[str|None]: The canonical alias, if any
+        """
+
+        state = yield self.get_filtered_current_state_ids(room_id, StateFilter.from_types(
+            [(EventTypes.CanonicalAlias, "")]
+        ))
+
+        event_id = state.get((EventTypes.CanonicalAlias, ""))
+        if not event_id:
+            return
+
+        event = yield self.get_event(event_id, allow_none=True)
+        if not event:
+            return
+
+        defer.returnValue(event.content.get("canonical_alias"))
+
     @cached(max_entries=10000, iterable=True)
     def get_state_group_delta(self, state_group):
         """Given a state group try to return a previous group and a delta between
diff --git a/synapse/storage/user_directory.py b/synapse/storage/user_directory.py
index e8b574ee5e..fea866c043 100644
--- a/synapse/storage/user_directory.py
+++ b/synapse/storage/user_directory.py
@@ -44,7 +44,7 @@ class UserDirectoryStore(SQLBaseStore):
         )
 
         current_state_ids = yield self.get_filtered_current_state_ids(
-            room_id, StateFilter.from_types(types_to_filter),
+            room_id, StateFilter.from_types(types_to_filter)
         )
 
         join_rules_id = current_state_ids.get((EventTypes.JoinRules, ""))
@@ -74,14 +74,8 @@ class UserDirectoryStore(SQLBaseStore):
         """
         yield self._simple_insert_many(
             table="users_in_public_rooms",
-            values=[
-                {
-                    "user_id": user_id,
-                    "room_id": room_id,
-                }
-                for user_id in user_ids
-            ],
-            desc="add_users_to_public_room"
+            values=[{"user_id": user_id, "room_id": room_id} for user_id in user_ids],
+            desc="add_users_to_public_room",
         )
         for user_id in user_ids:
             self.get_user_in_public_room.invalidate((user_id,))
@@ -107,7 +101,9 @@ class UserDirectoryStore(SQLBaseStore):
             """
             args = (
                 (
-                    user_id, get_localpart_from_id(user_id), get_domain_from_id(user_id),
+                    user_id,
+                    get_localpart_from_id(user_id),
+                    get_domain_from_id(user_id),
                     profile.display_name,
                 )
                 for user_id, profile in iteritems(users_with_profile)
@@ -120,7 +116,7 @@ class UserDirectoryStore(SQLBaseStore):
             args = (
                 (
                     user_id,
-                    "%s %s" % (user_id, p.display_name,) if p.display_name else user_id
+                    "%s %s" % (user_id, p.display_name) if p.display_name else user_id,
                 )
                 for user_id, p in iteritems(users_with_profile)
             )
@@ -141,12 +137,10 @@ class UserDirectoryStore(SQLBaseStore):
                         "avatar_url": profile.avatar_url,
                     }
                     for user_id, profile in iteritems(users_with_profile)
-                ]
+                ],
             )
             for user_id in users_with_profile:
-                txn.call_after(
-                    self.get_user_in_directory.invalidate, (user_id,)
-                )
+                txn.call_after(self.get_user_in_directory.invalidate, (user_id,))
 
         return self.runInteraction(
             "add_profiles_to_user_dir", _add_profiles_to_user_dir_txn
@@ -188,9 +182,11 @@ class UserDirectoryStore(SQLBaseStore):
                     txn.execute(
                         sql,
                         (
-                            user_id, get_localpart_from_id(user_id),
-                            get_domain_from_id(user_id), display_name,
-                        )
+                            user_id,
+                            get_localpart_from_id(user_id),
+                            get_domain_from_id(user_id),
+                            display_name,
+                        ),
                     )
                 else:
                     # TODO: Remove this code after we've bumped the minimum version
@@ -208,9 +204,11 @@ class UserDirectoryStore(SQLBaseStore):
                         txn.execute(
                             sql,
                             (
-                                user_id, get_localpart_from_id(user_id),
-                                get_domain_from_id(user_id), display_name,
-                            )
+                                user_id,
+                                get_localpart_from_id(user_id),
+                                get_domain_from_id(user_id),
+                                display_name,
+                            ),
                         )
                     elif new_entry is False:
                         sql = """
@@ -225,15 +223,16 @@ class UserDirectoryStore(SQLBaseStore):
                             (
                                 get_localpart_from_id(user_id),
                                 get_domain_from_id(user_id),
-                                display_name, user_id,
-                            )
+                                display_name,
+                                user_id,
+                            ),
                         )
                     else:
                         raise RuntimeError(
                             "upsert returned None when 'can_native_upsert' is False"
                         )
             elif isinstance(self.database_engine, Sqlite3Engine):
-                value = "%s %s" % (user_id, display_name,) if display_name else user_id
+                value = "%s %s" % (user_id, display_name) if display_name else user_id
                 self._simple_upsert_txn(
                     txn,
                     table="user_directory_search",
@@ -264,29 +263,18 @@ class UserDirectoryStore(SQLBaseStore):
     def remove_from_user_dir(self, user_id):
         def _remove_from_user_dir_txn(txn):
             self._simple_delete_txn(
-                txn,
-                table="user_directory",
-                keyvalues={"user_id": user_id},
+                txn, table="user_directory", keyvalues={"user_id": user_id}
             )
             self._simple_delete_txn(
-                txn,
-                table="user_directory_search",
-                keyvalues={"user_id": user_id},
+                txn, table="user_directory_search", keyvalues={"user_id": user_id}
             )
             self._simple_delete_txn(
-                txn,
-                table="users_in_public_rooms",
-                keyvalues={"user_id": user_id},
-            )
-            txn.call_after(
-                self.get_user_in_directory.invalidate, (user_id,)
-            )
-            txn.call_after(
-                self.get_user_in_public_room.invalidate, (user_id,)
+                txn, table="users_in_public_rooms", keyvalues={"user_id": user_id}
             )
-        return self.runInteraction(
-            "remove_from_user_dir", _remove_from_user_dir_txn,
-        )
+            txn.call_after(self.get_user_in_directory.invalidate, (user_id,))
+            txn.call_after(self.get_user_in_public_room.invalidate, (user_id,))
+
+        return self.runInteraction("remove_from_user_dir", _remove_from_user_dir_txn)
 
     @defer.inlineCallbacks
     def remove_from_user_in_public_room(self, user_id):
@@ -371,6 +359,7 @@ class UserDirectoryStore(SQLBaseStore):
             share_private (bool): Is the room private
             user_id_tuples([(str, str)]): iterable of 2-tuple of user IDs.
         """
+
         def _add_users_who_share_room_txn(txn):
             self._simple_insert_many_txn(
                 txn,
@@ -387,13 +376,12 @@ class UserDirectoryStore(SQLBaseStore):
             )
             for user_id, other_user_id in user_id_tuples:
                 txn.call_after(
-                    self.get_users_who_share_room_from_dir.invalidate,
-                    (user_id,),
+                    self.get_users_who_share_room_from_dir.invalidate, (user_id,)
                 )
                 txn.call_after(
-                    self.get_if_users_share_a_room.invalidate,
-                    (user_id, other_user_id),
+                    self.get_if_users_share_a_room.invalidate, (user_id, other_user_id)
                 )
+
         return self.runInteraction(
             "add_users_who_share_room", _add_users_who_share_room_txn
         )
@@ -407,6 +395,7 @@ class UserDirectoryStore(SQLBaseStore):
             share_private (bool): Is the room private
             user_id_tuples([(str, str)]): iterable of 2-tuple of user IDs.
         """
+
         def _update_users_who_share_room_txn(txn):
             sql = """
                 UPDATE users_who_share_rooms
@@ -414,21 +403,16 @@ class UserDirectoryStore(SQLBaseStore):
                 WHERE user_id = ? AND other_user_id = ?
             """
             txn.executemany(
-                sql,
-                (
-                    (room_id, share_private, uid, oid)
-                    for uid, oid in user_id_sets
-                )
+                sql, ((room_id, share_private, uid, oid) for uid, oid in user_id_sets)
             )
             for user_id, other_user_id in user_id_sets:
                 txn.call_after(
-                    self.get_users_who_share_room_from_dir.invalidate,
-                    (user_id,),
+                    self.get_users_who_share_room_from_dir.invalidate, (user_id,)
                 )
                 txn.call_after(
-                    self.get_if_users_share_a_room.invalidate,
-                    (user_id, other_user_id),
+                    self.get_if_users_share_a_room.invalidate, (user_id, other_user_id)
                 )
+
         return self.runInteraction(
             "update_users_who_share_room", _update_users_who_share_room_txn
         )
@@ -442,22 +426,18 @@ class UserDirectoryStore(SQLBaseStore):
             share_private (bool): Is the room private
             user_id_tuples([(str, str)]): iterable of 2-tuple of user IDs.
         """
+
         def _remove_user_who_share_room_txn(txn):
             self._simple_delete_txn(
                 txn,
                 table="users_who_share_rooms",
-                keyvalues={
-                    "user_id": user_id,
-                    "other_user_id": other_user_id,
-                },
+                keyvalues={"user_id": user_id, "other_user_id": other_user_id},
             )
             txn.call_after(
-                self.get_users_who_share_room_from_dir.invalidate,
-                (user_id,),
+                self.get_users_who_share_room_from_dir.invalidate, (user_id,)
             )
             txn.call_after(
-                self.get_if_users_share_a_room.invalidate,
-                (user_id, other_user_id),
+                self.get_if_users_share_a_room.invalidate, (user_id, other_user_id)
             )
 
         return self.runInteraction(
@@ -478,10 +458,7 @@ class UserDirectoryStore(SQLBaseStore):
         """
         return self._simple_select_one_onecol(
             table="users_who_share_rooms",
-            keyvalues={
-                "user_id": user_id,
-                "other_user_id": other_user_id,
-            },
+            keyvalues={"user_id": user_id, "other_user_id": other_user_id},
             retcol="share_private",
             allow_none=True,
             desc="get_if_users_share_a_room",
@@ -499,17 +476,12 @@ class UserDirectoryStore(SQLBaseStore):
         """
         rows = yield self._simple_select_list(
             table="users_who_share_rooms",
-            keyvalues={
-                "user_id": user_id,
-            },
-            retcols=("other_user_id", "share_private",),
+            keyvalues={"user_id": user_id},
+            retcols=("other_user_id", "share_private"),
             desc="get_users_who_share_room_with_user",
         )
 
-        defer.returnValue({
-            row["other_user_id"]: row["share_private"]
-            for row in rows
-        })
+        defer.returnValue({row["other_user_id"]: row["share_private"] for row in rows})
 
     def get_users_in_share_dir_with_room_id(self, user_id, room_id):
         """Get all user tuples that are in the users_who_share_rooms due to the
@@ -556,6 +528,7 @@ class UserDirectoryStore(SQLBaseStore):
     def delete_all_from_user_dir(self):
         """Delete the entire user directory
         """
+
         def _delete_all_from_user_dir_txn(txn):
             txn.execute("DELETE FROM user_directory")
             txn.execute("DELETE FROM user_directory_search")
@@ -565,6 +538,7 @@ class UserDirectoryStore(SQLBaseStore):
             txn.call_after(self.get_user_in_public_room.invalidate_all)
             txn.call_after(self.get_users_who_share_room_from_dir.invalidate_all)
             txn.call_after(self.get_if_users_share_a_room.invalidate_all)
+
         return self.runInteraction(
             "delete_all_from_user_dir", _delete_all_from_user_dir_txn
         )
@@ -574,7 +548,7 @@ class UserDirectoryStore(SQLBaseStore):
         return self._simple_select_one(
             table="user_directory",
             keyvalues={"user_id": user_id},
-            retcols=("room_id", "display_name", "avatar_url",),
+            retcols=("room_id", "display_name", "avatar_url"),
             allow_none=True,
             desc="get_user_in_directory",
         )
@@ -607,7 +581,9 @@ class UserDirectoryStore(SQLBaseStore):
 
     def get_current_state_deltas(self, prev_stream_id):
         prev_stream_id = int(prev_stream_id)
-        if not self._curr_state_delta_stream_cache.has_any_entity_changed(prev_stream_id):
+        if not self._curr_state_delta_stream_cache.has_any_entity_changed(
+            prev_stream_id
+        ):
             return []
 
         def get_current_state_deltas_txn(txn):
@@ -641,7 +617,7 @@ class UserDirectoryStore(SQLBaseStore):
                 WHERE ? < stream_id AND stream_id <= ?
                 ORDER BY stream_id ASC
             """
-            txn.execute(sql, (prev_stream_id, max_stream_id,))
+            txn.execute(sql, (prev_stream_id, max_stream_id))
             return self.cursor_to_dict(txn)
 
         return self.runInteraction(
@@ -731,8 +707,11 @@ class UserDirectoryStore(SQLBaseStore):
                     display_name IS NULL,
                     avatar_url IS NULL
                 LIMIT ?
-            """ % (join_clause, where_clause)
-            args = join_args + (full_query, exact_query, prefix_query, limit + 1,)
+            """ % (
+                join_clause,
+                where_clause,
+            )
+            args = join_args + (full_query, exact_query, prefix_query, limit + 1)
         elif isinstance(self.database_engine, Sqlite3Engine):
             search_query = _parse_query_sqlite(search_term)
 
@@ -749,7 +728,10 @@ class UserDirectoryStore(SQLBaseStore):
                     display_name IS NULL,
                     avatar_url IS NULL
                 LIMIT ?
-            """ % (join_clause, where_clause)
+            """ % (
+                join_clause,
+                where_clause,
+            )
             args = join_args + (search_query, limit + 1)
         else:
             # This should be unreachable.
@@ -761,10 +743,7 @@ class UserDirectoryStore(SQLBaseStore):
 
         limited = len(results) > limit
 
-        defer.returnValue({
-            "limited": limited,
-            "results": results,
-        })
+        defer.returnValue({"limited": limited, "results": results})
 
 
 def _parse_query_sqlite(search_term):
@@ -779,7 +758,7 @@ def _parse_query_sqlite(search_term):
 
     # Pull out the individual words, discarding any non-word characters.
     results = re.findall(r"([\w\-]+)", search_term, re.UNICODE)
-    return " & ".join("(%s* OR %s)" % (result, result,) for result in results)
+    return " & ".join("(%s* OR %s)" % (result, result) for result in results)
 
 
 def _parse_query_postgres(search_term):
@@ -792,7 +771,7 @@ def _parse_query_postgres(search_term):
     # Pull out the individual words, discarding any non-word characters.
     results = re.findall(r"([\w\-]+)", search_term, re.UNICODE)
 
-    both = " & ".join("(%s:* | %s)" % (result, result,) for result in results)
+    both = " & ".join("(%s:* | %s)" % (result, result) for result in results)
     exact = " & ".join("%s" % (result,) for result in results)
     prefix = " & ".join("%s:*" % (result,) for result in results)