summary refs log tree commit diff
path: root/synapse
diff options
context:
space:
mode:
Diffstat (limited to 'synapse')
-rw-r--r--synapse/app/pusher.py172
-rw-r--r--synapse/app/synchrotron.py136
-rw-r--r--synapse/config/homeserver.py4
-rw-r--r--synapse/config/logger.py102
-rw-r--r--synapse/config/server.py33
-rw-r--r--synapse/config/workers.py31
-rw-r--r--synapse/federation/federation_server.py15
-rw-r--r--synapse/federation/transport/server.py2
-rw-r--r--synapse/handlers/auth.py2
-rw-r--r--synapse/handlers/federation.py15
-rw-r--r--synapse/handlers/profile.py7
-rw-r--r--synapse/handlers/register.py24
-rw-r--r--synapse/push/mailer.py29
-rw-r--r--synapse/rest/media/v1/preview_url_resource.py11
-rw-r--r--synapse/storage/registration.py17
-rw-r--r--synapse/util/distributor.py4
16 files changed, 249 insertions, 355 deletions
diff --git a/synapse/app/pusher.py b/synapse/app/pusher.py
index 4ec23d84c1..4f1d18ab5f 100644
--- a/synapse/app/pusher.py
+++ b/synapse/app/pusher.py
@@ -18,10 +18,8 @@ import synapse
 
 from synapse.server import HomeServer
 from synapse.config._base import ConfigError
-from synapse.config.database import DatabaseConfig
-from synapse.config.logger import LoggingConfig
-from synapse.config.emailconfig import EmailConfig
-from synapse.config.key import KeyConfig
+from synapse.config.logger import setup_logging
+from synapse.config.homeserver import HomeServerConfig
 from synapse.http.site import SynapseSite
 from synapse.metrics.resource import MetricsResource, METRICS_PREFIX
 from synapse.storage.roommember import RoomMemberStore
@@ -43,98 +41,13 @@ from twisted.web.resource import Resource
 
 from daemonize import Daemonize
 
-import gc
 import sys
 import logging
+import gc
 
 logger = logging.getLogger("synapse.app.pusher")
 
 
-class SlaveConfig(DatabaseConfig):
-    def read_config(self, config):
-        self.replication_url = config["replication_url"]
-        self.server_name = config["server_name"]
-        self.use_insecure_ssl_client_just_for_testing_do_not_use = config.get(
-            "use_insecure_ssl_client_just_for_testing_do_not_use", False
-        )
-        self.user_agent_suffix = None
-        self.start_pushers = True
-        self.listeners = config["listeners"]
-        self.soft_file_limit = config.get("soft_file_limit")
-        self.daemonize = config.get("daemonize")
-        self.pid_file = self.abspath(config.get("pid_file"))
-        self.public_baseurl = config["public_baseurl"]
-
-        thresholds = config.get("gc_thresholds", None)
-        if thresholds is not None:
-            try:
-                assert len(thresholds) == 3
-                self.gc_thresholds = (
-                    int(thresholds[0]), int(thresholds[1]), int(thresholds[2]),
-                )
-            except:
-                raise ConfigError(
-                    "Value of `gc_threshold` must be a list of three integers if set"
-                )
-        else:
-            self.gc_thresholds = None
-
-        # some things used by the auth handler but not actually used in the
-        # pusher codebase
-        self.bcrypt_rounds = None
-        self.ldap_enabled = None
-        self.ldap_server = None
-        self.ldap_port = None
-        self.ldap_tls = None
-        self.ldap_search_base = None
-        self.ldap_search_property = None
-        self.ldap_email_property = None
-        self.ldap_full_name_property = None
-
-        # We would otherwise try to use the registration shared secret as the
-        # macaroon shared secret if there was no macaroon_shared_secret, but
-        # that means pulling in RegistrationConfig too. We don't need to be
-        # backwards compaitible in the pusher codebase so just make people set
-        # macaroon_shared_secret. We set this to None to prevent it referencing
-        # an undefined key.
-        self.registration_shared_secret = None
-
-    def default_config(self, server_name, **kwargs):
-        pid_file = self.abspath("pusher.pid")
-        return """\
-        # Slave configuration
-
-        # The replication listener on the synapse to talk to.
-        #replication_url: https://localhost:{replication_port}/_synapse/replication
-
-        server_name: "%(server_name)s"
-
-        listeners: []
-        # Enable a ssh manhole listener on the pusher.
-        # - type: manhole
-        #   port: {manhole_port}
-        #   bind_address: 127.0.0.1
-        # Enable a metric listener on the pusher.
-        # - type: http
-        #   port: {metrics_port}
-        #   bind_address: 127.0.0.1
-        #   resources:
-        #    - names: ["metrics"]
-        #      compress: False
-
-        report_stats: False
-
-        daemonize: False
-
-        pid_file: %(pid_file)s
-
-        """ % locals()
-
-
-class PusherSlaveConfig(SlaveConfig, LoggingConfig, EmailConfig, KeyConfig):
-    pass
-
-
 class PusherSlaveStore(
     SlavedEventStore, SlavedPusherStore, SlavedReceiptsStore,
     SlavedAccountDataStore
@@ -199,7 +112,7 @@ class PusherServer(HomeServer):
 
     def remove_pusher(self, app_id, push_key, user_id):
         http_client = self.get_simple_http_client()
-        replication_url = self.config.replication_url
+        replication_url = self.config.worker_replication_url
         url = replication_url + "/remove_pushers"
         return http_client.post_json_get_json(url, {
             "remove": [{
@@ -232,8 +145,8 @@ class PusherServer(HomeServer):
         )
         logger.info("Synapse pusher now listening on port %d", port)
 
-    def start_listening(self):
-        for listener in self.config.listeners:
+    def start_listening(self, listeners):
+        for listener in listeners:
             if listener["type"] == "http":
                 self._listen_http(listener)
             elif listener["type"] == "manhole":
@@ -253,7 +166,7 @@ class PusherServer(HomeServer):
     def replicate(self):
         http_client = self.get_simple_http_client()
         store = self.get_datastore()
-        replication_url = self.config.replication_url
+        replication_url = self.config.worker_replication_url
         pusher_pool = self.get_pusherpool()
         clock = self.get_clock()
 
@@ -329,19 +242,30 @@ class PusherServer(HomeServer):
                 yield sleep(30)
 
 
-def setup(config_options):
+def start(config_options):
     try:
-        config = PusherSlaveConfig.load_config(
+        config = HomeServerConfig.load_config(
             "Synapse pusher", config_options
         )
     except ConfigError as e:
         sys.stderr.write("\n" + e.message + "\n")
         sys.exit(1)
 
-    if not config:
-        sys.exit(0)
+    assert config.worker_app == "synapse.app.pusher"
+
+    setup_logging(config.worker_log_config, config.worker_log_file)
 
-    config.setup_logging()
+    if config.start_pushers:
+        sys.stderr.write(
+            "\nThe pushers must be disabled in the main synapse process"
+            "\nbefore they can be run in a separate worker."
+            "\nPlease add ``start_pushers: false`` to the main config"
+            "\n"
+        )
+        sys.exit(1)
+
+    # Force the pushers to start since they will be disabled in the main config
+    config.start_pushers = True
 
     database_engine = create_engine(config.database_config)
 
@@ -354,11 +278,15 @@ def setup(config_options):
     )
 
     ps.setup()
-    ps.start_listening()
-
-    change_resource_limit(ps.config.soft_file_limit)
-    if ps.config.gc_thresholds:
-        gc.set_threshold(*ps.config.gc_thresholds)
+    ps.start_listening(config.worker_listeners)
+
+    def run():
+        with LoggingContext("run"):
+            logger.info("Running")
+            change_resource_limit(config.soft_file_limit)
+            if config.gc_thresholds:
+                gc.set_threshold(*config.gc_thresholds)
+            reactor.run()
 
     def start():
         ps.replicate()
@@ -367,30 +295,20 @@ def setup(config_options):
 
     reactor.callWhenRunning(start)
 
-    return ps
+    if config.worker_daemonize:
+        daemon = Daemonize(
+            app="synapse-pusher",
+            pid=config.worker_pid_file,
+            action=run,
+            auto_close_fds=False,
+            verbose=True,
+            logger=logger,
+        )
+        daemon.start()
+    else:
+        run()
 
 
 if __name__ == '__main__':
     with LoggingContext("main"):
-        ps = setup(sys.argv[1:])
-
-        if ps.config.daemonize:
-            def run():
-                with LoggingContext("run"):
-                    change_resource_limit(ps.config.soft_file_limit)
-                    if ps.config.gc_thresholds:
-                        gc.set_threshold(*ps.config.gc_thresholds)
-                    reactor.run()
-
-            daemon = Daemonize(
-                app="synapse-pusher",
-                pid=ps.config.pid_file,
-                action=run,
-                auto_close_fds=False,
-                verbose=True,
-                logger=logger,
-            )
-
-            daemon.start()
-        else:
-            reactor.run()
+        ps = start(sys.argv[1:])
diff --git a/synapse/app/synchrotron.py b/synapse/app/synchrotron.py
index 297e199453..8cf5bbbb6d 100644
--- a/synapse/app/synchrotron.py
+++ b/synapse/app/synchrotron.py
@@ -18,9 +18,8 @@ import synapse
 
 from synapse.api.constants import EventTypes, PresenceState
 from synapse.config._base import ConfigError
-from synapse.config.database import DatabaseConfig
-from synapse.config.logger import LoggingConfig
-from synapse.config.appservice import AppServiceConfig
+from synapse.config.homeserver import HomeServerConfig
+from synapse.config.logger import setup_logging
 from synapse.events import FrozenEvent
 from synapse.handlers.presence import PresenceHandler
 from synapse.http.site import SynapseSite
@@ -63,70 +62,6 @@ import ujson as json
 logger = logging.getLogger("synapse.app.synchrotron")
 
 
-class SynchrotronConfig(DatabaseConfig, LoggingConfig, AppServiceConfig):
-    def read_config(self, config):
-        self.replication_url = config["replication_url"]
-        self.server_name = config["server_name"]
-        self.use_insecure_ssl_client_just_for_testing_do_not_use = config.get(
-            "use_insecure_ssl_client_just_for_testing_do_not_use", False
-        )
-        self.user_agent_suffix = None
-        self.listeners = config["listeners"]
-        self.soft_file_limit = config.get("soft_file_limit")
-        self.daemonize = config.get("daemonize")
-        self.pid_file = self.abspath(config.get("pid_file"))
-        self.macaroon_secret_key = config["macaroon_secret_key"]
-        self.expire_access_token = config.get("expire_access_token", False)
-
-        thresholds = config.get("gc_thresholds", None)
-        if thresholds is not None:
-            try:
-                assert len(thresholds) == 3
-                self.gc_thresholds = (
-                    int(thresholds[0]), int(thresholds[1]), int(thresholds[2]),
-                )
-            except:
-                raise ConfigError(
-                    "Value of `gc_threshold` must be a list of three integers if set"
-                )
-        else:
-            self.gc_thresholds = None
-
-    def default_config(self, server_name, **kwargs):
-        pid_file = self.abspath("synchroton.pid")
-        return """\
-        # Slave configuration
-
-        # The replication listener on the synapse to talk to.
-        #replication_url: https://localhost:{replication_port}/_synapse/replication
-
-        server_name: "%(server_name)s"
-
-        listeners:
-        # Enable a /sync listener on the synchrontron
-        #- type: http
-        #    port: {http_port}
-        #    bind_address: ""
-        # Enable a ssh manhole listener on the synchrotron
-        # - type: manhole
-        #   port: {manhole_port}
-        #   bind_address: 127.0.0.1
-        # Enable a metric listener on the synchrotron
-        # - type: http
-        #   port: {metrics_port}
-        #   bind_address: 127.0.0.1
-        #   resources:
-        #    - names: ["metrics"]
-        #      compress: False
-
-        report_stats: False
-
-        daemonize: False
-
-        pid_file: %(pid_file)s
-        """ % locals()
-
-
 class SynchrotronSlavedStore(
     SlavedPushRuleStore,
     SlavedEventStore,
@@ -163,7 +98,7 @@ class SynchrotronPresence(object):
         self.http_client = hs.get_simple_http_client()
         self.store = hs.get_datastore()
         self.user_to_num_current_syncs = {}
-        self.syncing_users_url = hs.config.replication_url + "/syncing_users"
+        self.syncing_users_url = hs.config.worker_replication_url + "/syncing_users"
         self.clock = hs.get_clock()
 
         active_presence = self.store.take_presence_startup_info()
@@ -350,8 +285,8 @@ class SynchrotronServer(HomeServer):
         )
         logger.info("Synapse synchrotron now listening on port %d", port)
 
-    def start_listening(self):
-        for listener in self.config.listeners:
+    def start_listening(self, listeners):
+        for listener in listeners:
             if listener["type"] == "http":
                 self._listen_http(listener)
             elif listener["type"] == "manhole":
@@ -371,7 +306,7 @@ class SynchrotronServer(HomeServer):
     def replicate(self):
         http_client = self.get_simple_http_client()
         store = self.get_datastore()
-        replication_url = self.config.replication_url
+        replication_url = self.config.worker_replication_url
         clock = self.get_clock()
         notifier = self.get_notifier()
         presence_handler = self.get_presence_handler()
@@ -470,19 +405,18 @@ class SynchrotronServer(HomeServer):
         return SynchrotronTyping(self)
 
 
-def setup(config_options):
+def start(config_options):
     try:
-        config = SynchrotronConfig.load_config(
+        config = HomeServerConfig.load_config(
             "Synapse synchrotron", config_options
         )
     except ConfigError as e:
         sys.stderr.write("\n" + e.message + "\n")
         sys.exit(1)
 
-    if not config:
-        sys.exit(0)
+    assert config.worker_app == "synapse.app.synchrotron"
 
-    config.setup_logging()
+    setup_logging(config.worker_log_config, config.worker_log_file)
 
     database_engine = create_engine(config.database_config)
 
@@ -496,11 +430,15 @@ def setup(config_options):
     )
 
     ss.setup()
-    ss.start_listening()
-
-    change_resource_limit(ss.config.soft_file_limit)
-    if ss.config.gc_thresholds:
-        ss.set_threshold(*ss.config.gc_thresholds)
+    ss.start_listening(config.worker_listeners)
+
+    def run():
+        with LoggingContext("run"):
+            logger.info("Running")
+            change_resource_limit(config.soft_file_limit)
+            if config.gc_thresholds:
+                gc.set_threshold(*config.gc_thresholds)
+            reactor.run()
 
     def start():
         ss.get_datastore().start_profiling()
@@ -508,30 +446,20 @@ def setup(config_options):
 
     reactor.callWhenRunning(start)
 
-    return ss
+    if config.worker_daemonize:
+        daemon = Daemonize(
+            app="synapse-synchrotron",
+            pid=config.worker_pid_file,
+            action=run,
+            auto_close_fds=False,
+            verbose=True,
+            logger=logger,
+        )
+        daemon.start()
+    else:
+        run()
 
 
 if __name__ == '__main__':
     with LoggingContext("main"):
-        ss = setup(sys.argv[1:])
-
-        if ss.config.daemonize:
-            def run():
-                with LoggingContext("run"):
-                    change_resource_limit(ss.config.soft_file_limit)
-                    if ss.config.gc_thresholds:
-                        gc.set_threshold(*ss.config.gc_thresholds)
-                    reactor.run()
-
-            daemon = Daemonize(
-                app="synapse-synchrotron",
-                pid=ss.config.pid_file,
-                action=run,
-                auto_close_fds=False,
-                verbose=True,
-                logger=logger,
-            )
-
-            daemon.start()
-        else:
-            reactor.run()
+        start(sys.argv[1:])
diff --git a/synapse/config/homeserver.py b/synapse/config/homeserver.py
index fc2445484c..79b0534b3b 100644
--- a/synapse/config/homeserver.py
+++ b/synapse/config/homeserver.py
@@ -32,13 +32,15 @@ from .password import PasswordConfig
 from .jwt import JWTConfig
 from .ldap import LDAPConfig
 from .emailconfig import EmailConfig
+from .workers import WorkerConfig
 
 
 class HomeServerConfig(TlsConfig, ServerConfig, DatabaseConfig, LoggingConfig,
                        RatelimitConfig, ContentRepositoryConfig, CaptchaConfig,
                        VoipConfig, RegistrationConfig, MetricsConfig, ApiConfig,
                        AppServiceConfig, KeyConfig, SAML2Config, CasConfig,
-                       JWTConfig, LDAPConfig, PasswordConfig, EmailConfig,):
+                       JWTConfig, LDAPConfig, PasswordConfig, EmailConfig,
+                       WorkerConfig,):
     pass
 
 
diff --git a/synapse/config/logger.py b/synapse/config/logger.py
index 5047db898f..dc68683fbc 100644
--- a/synapse/config/logger.py
+++ b/synapse/config/logger.py
@@ -126,54 +126,58 @@ class LoggingConfig(Config):
                 )
 
     def setup_logging(self):
-        log_format = (
-            "%(asctime)s - %(name)s - %(lineno)d - %(levelname)s - %(request)s"
-            " - %(message)s"
-        )
-        if self.log_config is None:
-
-            level = logging.INFO
-            level_for_storage = logging.INFO
-            if self.verbosity:
-                level = logging.DEBUG
-                if self.verbosity > 1:
-                    level_for_storage = logging.DEBUG
-
-            # FIXME: we need a logging.WARN for a -q quiet option
-            logger = logging.getLogger('')
-            logger.setLevel(level)
-
-            logging.getLogger('synapse.storage').setLevel(level_for_storage)
-
-            formatter = logging.Formatter(log_format)
-            if self.log_file:
-                # TODO: Customisable file size / backup count
-                handler = logging.handlers.RotatingFileHandler(
-                    self.log_file, maxBytes=(1000 * 1000 * 100), backupCount=3
-                )
-
-                def sighup(signum, stack):
-                    logger.info("Closing log file due to SIGHUP")
-                    handler.doRollover()
-                    logger.info("Opened new log file due to SIGHUP")
-
-                # TODO(paul): obviously this is a terrible mechanism for
-                #   stealing SIGHUP, because it means no other part of synapse
-                #   can use it instead. If we want to catch SIGHUP anywhere
-                #   else as well, I'd suggest we find a nicer way to broadcast
-                #   it around.
-                if getattr(signal, "SIGHUP"):
-                    signal.signal(signal.SIGHUP, sighup)
-            else:
-                handler = logging.StreamHandler()
-            handler.setFormatter(formatter)
-
-            handler.addFilter(LoggingContextFilter(request=""))
-
-            logger.addHandler(handler)
+        setup_logging(self.log_config, self.log_file, self.verbosity)
+
+
+def setup_logging(log_config=None, log_file=None, verbosity=None):
+    log_format = (
+        "%(asctime)s - %(name)s - %(lineno)d - %(levelname)s - %(request)s"
+        " - %(message)s"
+    )
+    if log_config is None:
+
+        level = logging.INFO
+        level_for_storage = logging.INFO
+        if verbosity:
+            level = logging.DEBUG
+            if verbosity > 1:
+                level_for_storage = logging.DEBUG
+
+        # FIXME: we need a logging.WARN for a -q quiet option
+        logger = logging.getLogger('')
+        logger.setLevel(level)
+
+        logging.getLogger('synapse.storage').setLevel(level_for_storage)
+
+        formatter = logging.Formatter(log_format)
+        if log_file:
+            # TODO: Customisable file size / backup count
+            handler = logging.handlers.RotatingFileHandler(
+                log_file, maxBytes=(1000 * 1000 * 100), backupCount=3
+            )
+
+            def sighup(signum, stack):
+                logger.info("Closing log file due to SIGHUP")
+                handler.doRollover()
+                logger.info("Opened new log file due to SIGHUP")
+
+            # TODO(paul): obviously this is a terrible mechanism for
+            #   stealing SIGHUP, because it means no other part of synapse
+            #   can use it instead. If we want to catch SIGHUP anywhere
+            #   else as well, I'd suggest we find a nicer way to broadcast
+            #   it around.
+            if getattr(signal, "SIGHUP"):
+                signal.signal(signal.SIGHUP, sighup)
         else:
-            with open(self.log_config, 'r') as f:
-                logging.config.dictConfig(yaml.load(f))
+            handler = logging.StreamHandler()
+        handler.setFormatter(formatter)
+
+        handler.addFilter(LoggingContextFilter(request=""))
+
+        logger.addHandler(handler)
+    else:
+        with open(log_config, 'r') as f:
+            logging.config.dictConfig(yaml.load(f))
 
-        observer = PythonLoggingObserver()
-        observer.start()
+    observer = PythonLoggingObserver()
+    observer.start()
diff --git a/synapse/config/server.py b/synapse/config/server.py
index 44b8d422e0..7840dc3ad6 100644
--- a/synapse/config/server.py
+++ b/synapse/config/server.py
@@ -27,7 +27,7 @@ class ServerConfig(Config):
         self.daemonize = config.get("daemonize")
         self.print_pidfile = config.get("print_pidfile")
         self.user_agent_suffix = config.get("user_agent_suffix")
-        self.use_frozen_dicts = config.get("use_frozen_dicts", True)
+        self.use_frozen_dicts = config.get("use_frozen_dicts", False)
         self.public_baseurl = config.get("public_baseurl")
         self.secondary_directory_servers = config.get("secondary_directory_servers", [])
 
@@ -38,19 +38,7 @@ class ServerConfig(Config):
 
         self.listeners = config.get("listeners", [])
 
-        thresholds = config.get("gc_thresholds", None)
-        if thresholds is not None:
-            try:
-                assert len(thresholds) == 3
-                self.gc_thresholds = (
-                    int(thresholds[0]), int(thresholds[1]), int(thresholds[2]),
-                )
-            except:
-                raise ConfigError(
-                    "Value of `gc_threshold` must be a list of three integers if set"
-                )
-        else:
-            self.gc_thresholds = None
+        self.gc_thresholds = read_gc_thresholds(config.get("gc_thresholds", None))
 
         bind_port = config.get("bind_port")
         if bind_port:
@@ -264,3 +252,20 @@ class ServerConfig(Config):
                                   type=int,
                                   help="Turn on the twisted telnet manhole"
                                   " service on the given port.")
+
+
+def read_gc_thresholds(thresholds):
+    """Reads the three integer thresholds for garbage collection. Ensures that
+    the thresholds are integers if thresholds are supplied.
+    """
+    if thresholds is None:
+        return None
+    try:
+        assert len(thresholds) == 3
+        return (
+            int(thresholds[0]), int(thresholds[1]), int(thresholds[2]),
+        )
+    except:
+        raise ConfigError(
+            "Value of `gc_threshold` must be a list of three integers if set"
+        )
diff --git a/synapse/config/workers.py b/synapse/config/workers.py
new file mode 100644
index 0000000000..904789d155
--- /dev/null
+++ b/synapse/config/workers.py
@@ -0,0 +1,31 @@
+# -*- coding: utf-8 -*-
+# Copyright 2016 matrix.org
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from ._base import Config
+
+
+class WorkerConfig(Config):
+    """The workers are processes run separately to the main synapse process.
+    They have their own pid_file and listener configuration. They use the
+    replication_url to talk to the main synapse process."""
+
+    def read_config(self, config):
+        self.worker_app = config.get("worker_app")
+        self.worker_listeners = config.get("worker_listeners")
+        self.worker_daemonize = config.get("worker_daemonize")
+        self.worker_pid_file = config.get("worker_pid_file")
+        self.worker_log_file = config.get("worker_log_file")
+        self.worker_log_config = config.get("worker_log_config")
+        self.worker_replication_url = config.get("worker_replication_url")
diff --git a/synapse/federation/federation_server.py b/synapse/federation/federation_server.py
index fe92457ba1..2a589524a4 100644
--- a/synapse/federation/federation_server.py
+++ b/synapse/federation/federation_server.py
@@ -193,13 +193,16 @@ class FederationServer(FederationBase):
             )
 
             for event in auth_chain:
-                event.signatures.update(
-                    compute_event_signature(
-                        event,
-                        self.hs.hostname,
-                        self.hs.config.signing_key[0]
+                # We sign these again because there was a bug where we
+                # incorrectly signed things the first time round
+                if self.hs.is_mine_id(event.event_id):
+                    event.signatures.update(
+                        compute_event_signature(
+                            event,
+                            self.hs.hostname,
+                            self.hs.config.signing_key[0]
+                        )
                     )
-                )
         else:
             raise NotImplementedError("Specify an event")
 
diff --git a/synapse/federation/transport/server.py b/synapse/federation/transport/server.py
index 6fc3e2207c..8a1965f45a 100644
--- a/synapse/federation/transport/server.py
+++ b/synapse/federation/transport/server.py
@@ -37,7 +37,7 @@ class TransportLayerServer(JsonResource):
         self.hs = hs
         self.clock = hs.get_clock()
 
-        super(TransportLayerServer, self).__init__(hs)
+        super(TransportLayerServer, self).__init__(hs, canonical_json=False)
 
         self.authenticator = Authenticator(hs)
         self.ratelimiter = FederationRateLimiter(
diff --git a/synapse/handlers/auth.py b/synapse/handlers/auth.py
index 200793b5ed..b38f81e999 100644
--- a/synapse/handlers/auth.py
+++ b/synapse/handlers/auth.py
@@ -626,6 +626,6 @@ class AuthHandler(BaseHandler):
             Whether self.hash(password) == stored_hash (bool).
         """
         if stored_hash:
-            return bcrypt.hashpw(password, stored_hash) == stored_hash
+            return bcrypt.hashpw(password, stored_hash.encode('utf-8')) == stored_hash
         else:
             return False
diff --git a/synapse/handlers/federation.py b/synapse/handlers/federation.py
index c2df43e2f6..6c0bc7eafa 100644
--- a/synapse/handlers/federation.py
+++ b/synapse/handlers/federation.py
@@ -1018,13 +1018,16 @@ class FederationHandler(BaseHandler):
 
             res = results.values()
             for event in res:
-                event.signatures.update(
-                    compute_event_signature(
-                        event,
-                        self.hs.hostname,
-                        self.hs.config.signing_key[0]
+                # We sign these again because there was a bug where we
+                # incorrectly signed things the first time round
+                if self.hs.is_mine_id(event.event_id):
+                    event.signatures.update(
+                        compute_event_signature(
+                            event,
+                            self.hs.hostname,
+                            self.hs.config.signing_key[0]
+                        )
                     )
-                )
 
             defer.returnValue(res)
         else:
diff --git a/synapse/handlers/profile.py b/synapse/handlers/profile.py
index e37409170d..711a6a567f 100644
--- a/synapse/handlers/profile.py
+++ b/synapse/handlers/profile.py
@@ -36,13 +36,6 @@ class ProfileHandler(BaseHandler):
             "profile", self.on_profile_query
         )
 
-        distributor = hs.get_distributor()
-
-        distributor.observe("registered_user", self.registered_user)
-
-    def registered_user(self, user):
-        return self.store.create_profile(user.localpart)
-
     @defer.inlineCallbacks
     def get_displayname(self, target_user):
         if self.hs.is_mine(target_user):
diff --git a/synapse/handlers/register.py b/synapse/handlers/register.py
index e0aaefe7be..0b7517221d 100644
--- a/synapse/handlers/register.py
+++ b/synapse/handlers/register.py
@@ -23,7 +23,6 @@ from synapse.api.errors import (
 from ._base import BaseHandler
 from synapse.util.async import run_on_reactor
 from synapse.http.client import CaptchaServerHttpClient
-from synapse.util.distributor import registered_user
 
 import logging
 import urllib
@@ -37,8 +36,6 @@ class RegistrationHandler(BaseHandler):
         super(RegistrationHandler, self).__init__(hs)
 
         self.auth = hs.get_auth()
-        self.distributor = hs.get_distributor()
-        self.distributor.declare("registered_user")
         self.captcha_client = CaptchaServerHttpClient(hs)
 
         self._next_generated_user_id = None
@@ -140,9 +137,11 @@ class RegistrationHandler(BaseHandler):
                 password_hash=password_hash,
                 was_guest=was_guest,
                 make_guest=make_guest,
+                create_profile_with_localpart=(
+                    # If the user was a guest then they already have a profile
+                    None if was_guest else user.localpart
+                ),
             )
-
-            yield registered_user(self.distributor, user)
         else:
             # autogen a sequential user ID
             attempts = 0
@@ -160,7 +159,8 @@ class RegistrationHandler(BaseHandler):
                         user_id=user_id,
                         token=token,
                         password_hash=password_hash,
-                        make_guest=make_guest
+                        make_guest=make_guest,
+                        create_profile_with_localpart=user.localpart,
                     )
                 except SynapseError:
                     # if user id is taken, just generate another
@@ -168,7 +168,6 @@ class RegistrationHandler(BaseHandler):
                     user_id = None
                     token = None
                     attempts += 1
-            yield registered_user(self.distributor, user)
 
         # We used to generate default identicons here, but nowadays
         # we want clients to generate their own as part of their branding
@@ -201,8 +200,8 @@ class RegistrationHandler(BaseHandler):
             token=token,
             password_hash="",
             appservice_id=service_id,
+            create_profile_with_localpart=user.localpart,
         )
-        yield registered_user(self.distributor, user)
         defer.returnValue((user_id, token))
 
     @defer.inlineCallbacks
@@ -248,9 +247,9 @@ class RegistrationHandler(BaseHandler):
             yield self.store.register(
                 user_id=user_id,
                 token=token,
-                password_hash=None
+                password_hash=None,
+                create_profile_with_localpart=user.localpart,
             )
-            yield registered_user(self.distributor, user)
         except Exception as e:
             yield self.store.add_access_token_to_user(user_id, token)
             # Ignore Registration errors
@@ -395,10 +394,9 @@ class RegistrationHandler(BaseHandler):
             yield self.store.register(
                 user_id=user_id,
                 token=token,
-                password_hash=None
+                password_hash=None,
+                create_profile_with_localpart=user.localpart,
             )
-
-            yield registered_user(self.distributor, user)
         else:
             yield self.store.user_delete_access_tokens(user_id=user_id)
             yield self.store.add_access_token_to_user(user_id=user_id, token=token)
diff --git a/synapse/push/mailer.py b/synapse/push/mailer.py
index e5c3929cd7..1028731bc9 100644
--- a/synapse/push/mailer.py
+++ b/synapse/push/mailer.py
@@ -273,16 +273,16 @@ class Mailer(object):
 
         sender_state_event = room_state[("m.room.member", event.sender)]
         sender_name = name_from_member_event(sender_state_event)
-        sender_avatar_url = None
-        if "avatar_url" in sender_state_event.content:
-            sender_avatar_url = sender_state_event.content["avatar_url"]
+        sender_avatar_url = sender_state_event.content.get("avatar_url")
 
         # 'hash' for deterministically picking default images: use
         # sender_hash % the number of default images to choose from
         sender_hash = string_ordinal_total(event.sender)
 
+        msgtype = event.content.get("msgtype")
+
         ret = {
-            "msgtype": event.content["msgtype"],
+            "msgtype": msgtype,
             "is_historical": event.event_id != notif['event_id'],
             "id": event.event_id,
             "ts": event.origin_server_ts,
@@ -291,9 +291,9 @@ class Mailer(object):
             "sender_hash": sender_hash,
         }
 
-        if event.content["msgtype"] == "m.text":
+        if msgtype == "m.text":
             self.add_text_message_vars(ret, event)
-        elif event.content["msgtype"] == "m.image":
+        elif msgtype == "m.image":
             self.add_image_message_vars(ret, event)
 
         if "body" in event.content:
@@ -302,16 +302,17 @@ class Mailer(object):
         return ret
 
     def add_text_message_vars(self, messagevars, event):
-        if "format" in event.content:
-            msgformat = event.content["format"]
-        else:
-            msgformat = None
+        msgformat = event.content.get("format")
+
         messagevars["format"] = msgformat
 
-        if msgformat == "org.matrix.custom.html":
-            messagevars["body_text_html"] = safe_markup(event.content["formatted_body"])
-        else:
-            messagevars["body_text_html"] = safe_text(event.content["body"])
+        formatted_body = event.content.get("formatted_body")
+        body = event.content.get("body")
+
+        if msgformat == "org.matrix.custom.html" and formatted_body:
+            messagevars["body_text_html"] = safe_markup(formatted_body)
+        elif body:
+            messagevars["body_text_html"] = safe_text(body)
 
         return messagevars
 
diff --git a/synapse/rest/media/v1/preview_url_resource.py b/synapse/rest/media/v1/preview_url_resource.py
index 37dd1de899..74c64f1371 100644
--- a/synapse/rest/media/v1/preview_url_resource.py
+++ b/synapse/rest/media/v1/preview_url_resource.py
@@ -252,7 +252,8 @@ class PreviewUrlResource(Resource):
 
         og = {}
         for tag in tree.xpath("//*/meta[starts-with(@property, 'og:')]"):
-            og[tag.attrib['property']] = tag.attrib['content']
+            if 'content' in tag.attrib:
+                og[tag.attrib['property']] = tag.attrib['content']
 
         # TODO: grab article: meta tags too, e.g.:
 
@@ -279,7 +280,7 @@ class PreviewUrlResource(Resource):
                 # TODO: consider inlined CSS styles as well as width & height attribs
                 images = tree.xpath("//img[@src][number(@width)>10][number(@height)>10]")
                 images = sorted(images, key=lambda i: (
-                    -1 * int(i.attrib['width']) * int(i.attrib['height'])
+                    -1 * float(i.attrib['width']) * float(i.attrib['height'])
                 ))
                 if not images:
                     images = tree.xpath("//img[@src]")
@@ -287,9 +288,9 @@ class PreviewUrlResource(Resource):
                     og['og:image'] = images[0].attrib['src']
 
         # pre-cache the image for posterity
-        # FIXME: it might be cleaner to use the same flow as the main /preview_url request
-        # itself and benefit from the same caching etc.  But for now we just rely on the
-        # caching on the master request to speed things up.
+        # FIXME: it might be cleaner to use the same flow as the main /preview_url
+        # request itself and benefit from the same caching etc.  But for now we
+        # just rely on the caching on the master request to speed things up.
         if 'og:image' in og and og['og:image']:
             image_info = yield self._download_url(
                 self._rebase_url(og['og:image'], media_info['uri']), requester.user
diff --git a/synapse/storage/registration.py b/synapse/storage/registration.py
index bda84a744a..3de9e0f709 100644
--- a/synapse/storage/registration.py
+++ b/synapse/storage/registration.py
@@ -76,7 +76,8 @@ class RegistrationStore(SQLBaseStore):
 
     @defer.inlineCallbacks
     def register(self, user_id, token, password_hash,
-                 was_guest=False, make_guest=False, appservice_id=None):
+                 was_guest=False, make_guest=False, appservice_id=None,
+                 create_profile_with_localpart=None):
         """Attempts to register an account.
 
         Args:
@@ -88,6 +89,8 @@ class RegistrationStore(SQLBaseStore):
             make_guest (boolean): True if the the new user should be guest,
                 false to add a regular user account.
             appservice_id (str): The ID of the appservice registering the user.
+            create_profile_with_localpart (str): Optionally create a profile for
+                the given localpart.
         Raises:
             StoreError if the user_id could not be registered.
         """
@@ -99,7 +102,8 @@ class RegistrationStore(SQLBaseStore):
             password_hash,
             was_guest,
             make_guest,
-            appservice_id
+            appservice_id,
+            create_profile_with_localpart,
         )
         self.get_user_by_id.invalidate((user_id,))
         self.is_guest.invalidate((user_id,))
@@ -112,7 +116,8 @@ class RegistrationStore(SQLBaseStore):
         password_hash,
         was_guest,
         make_guest,
-        appservice_id
+        appservice_id,
+        create_profile_with_localpart,
     ):
         now = int(self.clock.time())
 
@@ -157,6 +162,12 @@ class RegistrationStore(SQLBaseStore):
                 (next_id, user_id, token,)
             )
 
+        if create_profile_with_localpart:
+            txn.execute(
+                "INSERT INTO profiles(user_id) VALUES (?)",
+                (create_profile_with_localpart,)
+            )
+
     @cached()
     def get_user_by_id(self, user_id):
         return self._simple_select_one(
diff --git a/synapse/util/distributor.py b/synapse/util/distributor.py
index d7cccc06b1..e68f94ce77 100644
--- a/synapse/util/distributor.py
+++ b/synapse/util/distributor.py
@@ -27,10 +27,6 @@ import logging
 logger = logging.getLogger(__name__)
 
 
-def registered_user(distributor, user):
-    return distributor.fire("registered_user", user)
-
-
 def user_left_room(distributor, user, room_id):
     return preserve_context_over_fn(
         distributor.fire,