summary refs log tree commit diff
path: root/synapse/config
diff options
context:
space:
mode:
Diffstat (limited to 'synapse/config')
-rw-r--r--synapse/config/_base.py242
-rw-r--r--synapse/config/api.py26
-rw-r--r--synapse/config/appservice.py58
-rw-r--r--synapse/config/captcha.py5
-rw-r--r--synapse/config/cas.py4
-rw-r--r--synapse/config/consent_config.py31
-rw-r--r--synapse/config/database.py35
-rw-r--r--synapse/config/emailconfig.py260
-rw-r--r--synapse/config/groups.py4
-rw-r--r--synapse/config/homeserver.py44
-rw-r--r--synapse/config/jwt_config.py9
-rw-r--r--synapse/config/key.py269
-rw-r--r--synapse/config/logger.py80
-rw-r--r--synapse/config/metrics.py12
-rw-r--r--synapse/config/password.py4
-rw-r--r--synapse/config/password_auth_providers.py20
-rw-r--r--synapse/config/push.py4
-rw-r--r--synapse/config/ratelimiting.py4
-rw-r--r--synapse/config/registration.py49
-rw-r--r--synapse/config/repository.py84
-rw-r--r--synapse/config/room_directory.py23
-rw-r--r--synapse/config/saml2_config.py22
-rw-r--r--synapse/config/server.py354
-rw-r--r--synapse/config/server_notices_config.py21
-rw-r--r--synapse/config/spam_checker.py4
-rw-r--r--synapse/config/stats.py60
-rw-r--r--synapse/config/third_party_event_rules.py42
-rw-r--r--synapse/config/tls.py80
-rw-r--r--synapse/config/user_directory.py18
-rw-r--r--synapse/config/voip.py7
-rw-r--r--synapse/config/workers.py19
31 files changed, 1210 insertions, 684 deletions
diff --git a/synapse/config/_base.py b/synapse/config/_base.py
index f7d7f153bb..965478d8d5 100644
--- a/synapse/config/_base.py
+++ b/synapse/config/_base.py
@@ -1,5 +1,7 @@
 # -*- coding: utf-8 -*-
 # Copyright 2014-2016 OpenMarket Ltd
+# Copyright 2017-2018 New Vector Ltd
+# Copyright 2019 The Matrix.org Foundation C.I.C.
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
 # you may not use this file except in compliance with the License.
@@ -134,11 +136,6 @@ class Config(object):
         with open(file_path) as file_stream:
             return file_stream.read()
 
-    @staticmethod
-    def read_config_file(file_path):
-        with open(file_path) as file_stream:
-            return yaml.safe_load(file_stream)
-
     def invoke_all(self, name, *args, **kargs):
         results = []
         for cls in type(self).mro():
@@ -153,12 +150,12 @@ class Config(object):
         server_name,
         generate_secrets=False,
         report_stats=None,
+        open_private_ports=False,
     ):
         """Build a default configuration file
 
-        This is used both when the user explicitly asks us to generate a config file
-        (eg with --generate_config), and before loading the config at runtime (to give
-        a base which the config files override)
+        This is used when the user explicitly asks us to generate a config file
+        (eg with --generate_config).
 
         Args:
             config_dir_path (str): The path where the config files are kept. Used to
@@ -177,25 +174,33 @@ class Config(object):
             report_stats (bool|None): Initial setting for the report_stats setting.
                 If None, report_stats will be left unset.
 
+            open_private_ports (bool): True to leave private ports (such as the non-TLS
+                HTTP listener) open to the internet.
+
         Returns:
             str: the yaml config file
         """
-        default_config = "\n\n".join(
+        return "\n\n".join(
             dedent(conf)
             for conf in self.invoke_all(
-                "default_config",
+                "generate_config_section",
                 config_dir_path=config_dir_path,
                 data_dir_path=data_dir_path,
                 server_name=server_name,
                 generate_secrets=generate_secrets,
                 report_stats=report_stats,
+                open_private_ports=open_private_ports,
             )
         )
 
-        return default_config
-
     @classmethod
     def load_config(cls, description, argv):
+        """Parse the commandline and config files
+
+        Doesn't support config-file-generation: used by the worker apps.
+
+        Returns: Config object.
+        """
         config_parser = argparse.ArgumentParser(description=description)
         config_parser.add_argument(
             "-c",
@@ -210,7 +215,7 @@ class Config(object):
             "--keys-directory",
             metavar="DIRECTORY",
             help="Where files such as certs and signing keys are stored when"
-            " their location is given explicitly in the config."
+            " their location is not given explicitly in the config."
             " Defaults to the directory containing the last config file",
         )
 
@@ -222,8 +227,19 @@ class Config(object):
 
         config_files = find_config_files(search_paths=config_args.config_path)
 
-        obj.read_config_files(
-            config_files, keys_directory=config_args.keys_directory, generate_keys=False
+        if not config_files:
+            config_parser.error("Must supply a config file.")
+
+        if config_args.keys_directory:
+            config_dir_path = config_args.keys_directory
+        else:
+            config_dir_path = os.path.dirname(config_files[-1])
+        config_dir_path = os.path.abspath(config_dir_path)
+        data_dir_path = os.getcwd()
+
+        config_dict = read_config_files(config_files)
+        obj.parse_config_dict(
+            config_dict, config_dir_path=config_dir_path, data_dir_path=data_dir_path
         )
 
         obj.invoke_all("read_arguments", config_args)
@@ -232,6 +248,12 @@ class Config(object):
 
     @classmethod
     def load_or_generate_config(cls, description, argv):
+        """Parse the commandline and config files
+
+        Supports generation of config files, so is used for the main homeserver app.
+
+        Returns: Config object, or None if --generate-config or --generate-keys was set
+        """
         config_parser = argparse.ArgumentParser(add_help=False)
         config_parser.add_argument(
             "-c",
@@ -241,37 +263,74 @@ class Config(object):
             help="Specify config file. Can be given multiple times and"
             " may specify directories containing *.yaml files.",
         )
-        config_parser.add_argument(
+
+        generate_group = config_parser.add_argument_group("Config generation")
+        generate_group.add_argument(
             "--generate-config",
             action="store_true",
-            help="Generate a config file for the server name",
+            help="Generate a config file, then exit.",
         )
-        config_parser.add_argument(
+        generate_group.add_argument(
+            "--generate-missing-configs",
+            "--generate-keys",
+            action="store_true",
+            help="Generate any missing additional config files, then exit.",
+        )
+        generate_group.add_argument(
+            "-H", "--server-name", help="The server name to generate a config file for."
+        )
+        generate_group.add_argument(
             "--report-stats",
             action="store",
-            help="Whether the generated config reports anonymized usage statistics",
+            help="Whether the generated config reports anonymized usage statistics.",
             choices=["yes", "no"],
         )
-        config_parser.add_argument(
-            "--generate-keys",
-            action="store_true",
-            help="Generate any missing key files then exit",
-        )
-        config_parser.add_argument(
+        generate_group.add_argument(
+            "--config-directory",
             "--keys-directory",
             metavar="DIRECTORY",
-            help="Used with 'generate-*' options to specify where files such as"
-            " signing keys should be stored, unless explicitly"
-            " specified in the config.",
+            help=(
+                "Specify where additional config files such as signing keys and log"
+                " config should be stored. Defaults to the same directory as the last"
+                " config file."
+            ),
         )
-        config_parser.add_argument(
-            "-H", "--server-name", help="The server name to generate a config file for"
+        generate_group.add_argument(
+            "--data-directory",
+            metavar="DIRECTORY",
+            help=(
+                "Specify where data such as the media store and database file should be"
+                " stored. Defaults to the current working directory."
+            ),
+        )
+        generate_group.add_argument(
+            "--open-private-ports",
+            action="store_true",
+            help=(
+                "Leave private ports (such as the non-TLS HTTP listener) open to the"
+                " internet. Do not use this unless you know what you are doing."
+            ),
         )
+
         config_args, remaining_args = config_parser.parse_known_args(argv)
 
         config_files = find_config_files(search_paths=config_args.config_path)
 
-        generate_keys = config_args.generate_keys
+        if not config_files:
+            config_parser.error(
+                "Must supply a config file.\nA config file can be automatically"
+                ' generated using "--generate-config -H SERVER_NAME'
+                ' -c CONFIG-FILE"'
+            )
+
+        if config_args.config_directory:
+            config_dir_path = config_args.config_directory
+        else:
+            config_dir_path = os.path.dirname(config_files[-1])
+        config_dir_path = os.path.abspath(config_dir_path)
+        data_dir_path = os.getcwd()
+
+        generate_missing_configs = config_args.generate_missing_configs
 
         obj = cls()
 
@@ -281,19 +340,16 @@ class Config(object):
                     "Please specify either --report-stats=yes or --report-stats=no\n\n"
                     + MISSING_REPORT_STATS_SPIEL
                 )
-            if not config_files:
-                config_parser.error(
-                    "Must supply a config file.\nA config file can be automatically"
-                    " generated using \"--generate-config -H SERVER_NAME"
-                    " -c CONFIG-FILE\""
-                )
+
             (config_path,) = config_files
             if not cls.path_exists(config_path):
-                if config_args.keys_directory:
-                    config_dir_path = config_args.keys_directory
+                print("Generating config file %s" % (config_path,))
+
+                if config_args.data_directory:
+                    data_dir_path = config_args.data_directory
                 else:
-                    config_dir_path = os.path.dirname(config_path)
-                config_dir_path = os.path.abspath(config_dir_path)
+                    data_dir_path = os.getcwd()
+                data_dir_path = os.path.abspath(data_dir_path)
 
                 server_name = config_args.server_name
                 if not server_name:
@@ -304,22 +360,21 @@ class Config(object):
 
                 config_str = obj.generate_config(
                     config_dir_path=config_dir_path,
-                    data_dir_path=os.getcwd(),
+                    data_dir_path=data_dir_path,
                     server_name=server_name,
                     report_stats=(config_args.report_stats == "yes"),
                     generate_secrets=True,
+                    open_private_ports=config_args.open_private_ports,
                 )
 
                 if not cls.path_exists(config_dir_path):
                     os.makedirs(config_dir_path)
                 with open(config_path, "w") as config_file:
-                    config_file.write(
-                        "# vim:ft=yaml\n\n"
-                    )
+                    config_file.write("# vim:ft=yaml\n\n")
                     config_file.write(config_str)
 
-                config = yaml.safe_load(config_str)
-                obj.invoke_all("generate_files", config)
+                config_dict = yaml.safe_load(config_str)
+                obj.generate_missing_files(config_dict, config_dir_path)
 
                 print(
                     (
@@ -333,12 +388,12 @@ class Config(object):
             else:
                 print(
                     (
-                        "Config file %r already exists. Generating any missing key"
+                        "Config file %r already exists. Generating any missing config"
                         " files."
                     )
                     % (config_path,)
                 )
-                generate_keys = True
+                generate_missing_configs = True
 
         parser = argparse.ArgumentParser(
             parents=[config_parser],
@@ -349,66 +404,63 @@ class Config(object):
         obj.invoke_all("add_arguments", parser)
         args = parser.parse_args(remaining_args)
 
-        if not config_files:
-            config_parser.error(
-                "Must supply a config file.\nA config file can be automatically"
-                " generated using \"--generate-config -H SERVER_NAME"
-                " -c CONFIG-FILE\""
-            )
-
-        obj.read_config_files(
-            config_files,
-            keys_directory=config_args.keys_directory,
-            generate_keys=generate_keys,
-        )
-
-        if generate_keys:
+        config_dict = read_config_files(config_files)
+        if generate_missing_configs:
+            obj.generate_missing_files(config_dict, config_dir_path)
             return None
 
+        obj.parse_config_dict(
+            config_dict, config_dir_path=config_dir_path, data_dir_path=data_dir_path
+        )
         obj.invoke_all("read_arguments", args)
 
         return obj
 
-    def read_config_files(self, config_files, keys_directory=None, generate_keys=False):
-        if not keys_directory:
-            keys_directory = os.path.dirname(config_files[-1])
+    def parse_config_dict(self, config_dict, config_dir_path, data_dir_path):
+        """Read the information from the config dict into this Config object.
 
-        self.config_dir_path = os.path.abspath(keys_directory)
-
-        specified_config = {}
-        for config_file in config_files:
-            yaml_config = self.read_config_file(config_file)
-            specified_config.update(yaml_config)
+        Args:
+            config_dict (dict): Configuration data, as read from the yaml
 
-        if "server_name" not in specified_config:
-            raise ConfigError(MISSING_SERVER_NAME)
+            config_dir_path (str): The path where the config files are kept. Used to
+                create filenames for things like the log config and the signing key.
 
-        server_name = specified_config["server_name"]
-        config_string = self.generate_config(
-            config_dir_path=self.config_dir_path,
-            data_dir_path=os.getcwd(),
-            server_name=server_name,
-            generate_secrets=False,
+            data_dir_path (str): The path where the data files are kept. Used to create
+                filenames for things like the database and media store.
+        """
+        self.invoke_all(
+            "read_config",
+            config_dict,
+            config_dir_path=config_dir_path,
+            data_dir_path=data_dir_path,
         )
-        config = yaml.safe_load(config_string)
-        config.pop("log_config")
-        config.update(specified_config)
 
-        if "report_stats" not in config:
-            raise ConfigError(
-                MISSING_REPORT_STATS_CONFIG_INSTRUCTIONS
-                + "\n"
-                + MISSING_REPORT_STATS_SPIEL
-            )
+    def generate_missing_files(self, config_dict, config_dir_path):
+        self.invoke_all("generate_files", config_dict, config_dir_path)
 
-        if generate_keys:
-            self.invoke_all("generate_files", config)
-            return
 
-        self.parse_config_dict(config)
+def read_config_files(config_files):
+    """Read the config files into a dict
 
-    def parse_config_dict(self, config_dict):
-        self.invoke_all("read_config", config_dict)
+    Args:
+        config_files (iterable[str]): A list of the config files to read
+
+    Returns: dict
+    """
+    specified_config = {}
+    for config_file in config_files:
+        with open(config_file) as file_stream:
+            yaml_config = yaml.safe_load(file_stream)
+        specified_config.update(yaml_config)
+
+    if "server_name" not in specified_config:
+        raise ConfigError(MISSING_SERVER_NAME)
+
+    if "report_stats" not in specified_config:
+        raise ConfigError(
+            MISSING_REPORT_STATS_CONFIG_INSTRUCTIONS + "\n" + MISSING_REPORT_STATS_SPIEL
+        )
+    return specified_config
 
 
 def find_config_files(search_paths):
diff --git a/synapse/config/api.py b/synapse/config/api.py
index 5eb4f86fa2..dddea79a8a 100644
--- a/synapse/config/api.py
+++ b/synapse/config/api.py
@@ -18,17 +18,19 @@ from ._base import Config
 
 
 class ApiConfig(Config):
+    def read_config(self, config, **kwargs):
+        self.room_invite_state_types = config.get(
+            "room_invite_state_types",
+            [
+                EventTypes.JoinRules,
+                EventTypes.CanonicalAlias,
+                EventTypes.RoomAvatar,
+                EventTypes.RoomEncryption,
+                EventTypes.Name,
+            ],
+        )
 
-    def read_config(self, config):
-        self.room_invite_state_types = config.get("room_invite_state_types", [
-            EventTypes.JoinRules,
-            EventTypes.CanonicalAlias,
-            EventTypes.RoomAvatar,
-            EventTypes.RoomEncryption,
-            EventTypes.Name,
-        ])
-
-    def default_config(cls, **kwargs):
+    def generate_config_section(cls, **kwargs):
         return """\
         ## API Configuration ##
 
@@ -40,4 +42,6 @@ class ApiConfig(Config):
         #  - "{RoomAvatar}"
         #  - "{RoomEncryption}"
         #  - "{Name}"
-        """.format(**vars(EventTypes))
+        """.format(
+            **vars(EventTypes)
+        )
diff --git a/synapse/config/appservice.py b/synapse/config/appservice.py
index 7e89d345d8..8387ff6805 100644
--- a/synapse/config/appservice.py
+++ b/synapse/config/appservice.py
@@ -29,13 +29,12 @@ logger = logging.getLogger(__name__)
 
 
 class AppServiceConfig(Config):
-
-    def read_config(self, config):
+    def read_config(self, config, **kwargs):
         self.app_service_config_files = config.get("app_service_config_files", [])
         self.notify_appservices = config.get("notify_appservices", True)
         self.track_appservice_user_ips = config.get("track_appservice_user_ips", False)
 
-    def default_config(cls, **kwargs):
+    def generate_config_section(cls, **kwargs):
         return """\
         # A list of application service config files to use
         #
@@ -53,9 +52,7 @@ class AppServiceConfig(Config):
 def load_appservices(hostname, config_files):
     """Returns a list of Application Services from the config files."""
     if not isinstance(config_files, list):
-        logger.warning(
-            "Expected %s to be a list of AS config files.", config_files
-        )
+        logger.warning("Expected %s to be a list of AS config files.", config_files)
         return []
 
     # Dicts of value -> filename
@@ -66,22 +63,20 @@ def load_appservices(hostname, config_files):
 
     for config_file in config_files:
         try:
-            with open(config_file, 'r') as f:
-                appservice = _load_appservice(
-                    hostname, yaml.safe_load(f), config_file
-                )
+            with open(config_file, "r") as f:
+                appservice = _load_appservice(hostname, yaml.safe_load(f), config_file)
                 if appservice.id in seen_ids:
                     raise ConfigError(
                         "Cannot reuse ID across application services: "
-                        "%s (files: %s, %s)" % (
-                            appservice.id, config_file, seen_ids[appservice.id],
-                        )
+                        "%s (files: %s, %s)"
+                        % (appservice.id, config_file, seen_ids[appservice.id])
                     )
                 seen_ids[appservice.id] = config_file
                 if appservice.token in seen_as_tokens:
                     raise ConfigError(
                         "Cannot reuse as_token across application services: "
-                        "%s (files: %s, %s)" % (
+                        "%s (files: %s, %s)"
+                        % (
                             appservice.token,
                             config_file,
                             seen_as_tokens[appservice.token],
@@ -98,28 +93,26 @@ def load_appservices(hostname, config_files):
 
 
 def _load_appservice(hostname, as_info, config_filename):
-    required_string_fields = [
-        "id", "as_token", "hs_token", "sender_localpart"
-    ]
+    required_string_fields = ["id", "as_token", "hs_token", "sender_localpart"]
     for field in required_string_fields:
         if not isinstance(as_info.get(field), string_types):
-            raise KeyError("Required string field: '%s' (%s)" % (
-                field, config_filename,
-            ))
+            raise KeyError(
+                "Required string field: '%s' (%s)" % (field, config_filename)
+            )
 
     # 'url' must either be a string or explicitly null, not missing
     # to avoid accidentally turning off push for ASes.
-    if (not isinstance(as_info.get("url"), string_types) and
-            as_info.get("url", "") is not None):
+    if (
+        not isinstance(as_info.get("url"), string_types)
+        and as_info.get("url", "") is not None
+    ):
         raise KeyError(
             "Required string field or explicit null: 'url' (%s)" % (config_filename,)
         )
 
     localpart = as_info["sender_localpart"]
     if urlparse.quote(localpart) != localpart:
-        raise ValueError(
-            "sender_localpart needs characters which are not URL encoded."
-        )
+        raise ValueError("sender_localpart needs characters which are not URL encoded.")
     user = UserID(localpart, hostname)
     user_id = user.to_string()
 
@@ -138,13 +131,12 @@ def _load_appservice(hostname, as_info, config_filename):
             for regex_obj in as_info["namespaces"][ns]:
                 if not isinstance(regex_obj, dict):
                     raise ValueError(
-                        "Expected namespace entry in %s to be an object,"
-                        " but got %s", ns, regex_obj
+                        "Expected namespace entry in %s to be an object," " but got %s",
+                        ns,
+                        regex_obj,
                     )
                 if not isinstance(regex_obj.get("regex"), string_types):
-                    raise ValueError(
-                        "Missing/bad type 'regex' key in %s", regex_obj
-                    )
+                    raise ValueError("Missing/bad type 'regex' key in %s", regex_obj)
                 if not isinstance(regex_obj.get("exclusive"), bool):
                     raise ValueError(
                         "Missing/bad type 'exclusive' key in %s", regex_obj
@@ -167,10 +159,8 @@ def _load_appservice(hostname, as_info, config_filename):
         )
 
     ip_range_whitelist = None
-    if as_info.get('ip_range_whitelist'):
-        ip_range_whitelist = IPSet(
-            as_info.get('ip_range_whitelist')
-        )
+    if as_info.get("ip_range_whitelist"):
+        ip_range_whitelist = IPSet(as_info.get("ip_range_whitelist"))
 
     return ApplicationService(
         token=as_info["as_token"],
diff --git a/synapse/config/captcha.py b/synapse/config/captcha.py
index f7eebf26d2..8dac8152cf 100644
--- a/synapse/config/captcha.py
+++ b/synapse/config/captcha.py
@@ -16,8 +16,7 @@ from ._base import Config
 
 
 class CaptchaConfig(Config):
-
-    def read_config(self, config):
+    def read_config(self, config, **kwargs):
         self.recaptcha_private_key = config.get("recaptcha_private_key")
         self.recaptcha_public_key = config.get("recaptcha_public_key")
         self.enable_registration_captcha = config.get(
@@ -29,7 +28,7 @@ class CaptchaConfig(Config):
             "https://www.recaptcha.net/recaptcha/api/siteverify",
         )
 
-    def default_config(self, **kwargs):
+    def generate_config_section(self, **kwargs):
         return """\
         ## Captcha ##
         # See docs/CAPTCHA_SETUP for full details of configuring this.
diff --git a/synapse/config/cas.py b/synapse/config/cas.py
index 609c0815c8..ebe34d933b 100644
--- a/synapse/config/cas.py
+++ b/synapse/config/cas.py
@@ -22,7 +22,7 @@ class CasConfig(Config):
     cas_server_url: URL of CAS server
     """
 
-    def read_config(self, config):
+    def read_config(self, config, **kwargs):
         cas_config = config.get("cas_config", None)
         if cas_config:
             self.cas_enabled = cas_config.get("enabled", True)
@@ -35,7 +35,7 @@ class CasConfig(Config):
             self.cas_service_url = None
             self.cas_required_attributes = {}
 
-    def default_config(self, config_dir_path, server_name, **kwargs):
+    def generate_config_section(self, config_dir_path, server_name, **kwargs):
         return """
         # Enable CAS for registration and login.
         #
diff --git a/synapse/config/consent_config.py b/synapse/config/consent_config.py
index abeb0180d3..94916f3a49 100644
--- a/synapse/config/consent_config.py
+++ b/synapse/config/consent_config.py
@@ -84,35 +84,32 @@ class ConsentConfig(Config):
         self.user_consent_at_registration = False
         self.user_consent_policy_name = "Privacy Policy"
 
-    def read_config(self, config):
+    def read_config(self, config, **kwargs):
         consent_config = config.get("user_consent")
         if consent_config is None:
             return
         self.user_consent_version = str(consent_config["version"])
-        self.user_consent_template_dir = self.abspath(
-            consent_config["template_dir"]
-        )
+        self.user_consent_template_dir = self.abspath(consent_config["template_dir"])
         if not path.isdir(self.user_consent_template_dir):
             raise ConfigError(
-                "Could not find template directory '%s'" % (
-                    self.user_consent_template_dir,
-                ),
+                "Could not find template directory '%s'"
+                % (self.user_consent_template_dir,)
             )
         self.user_consent_server_notice_content = consent_config.get(
-            "server_notice_content",
+            "server_notice_content"
         )
         self.block_events_without_consent_error = consent_config.get(
-            "block_events_error",
+            "block_events_error"
+        )
+        self.user_consent_server_notice_to_guests = bool(
+            consent_config.get("send_server_notice_to_guests", False)
+        )
+        self.user_consent_at_registration = bool(
+            consent_config.get("require_at_registration", False)
         )
-        self.user_consent_server_notice_to_guests = bool(consent_config.get(
-            "send_server_notice_to_guests", False,
-        ))
-        self.user_consent_at_registration = bool(consent_config.get(
-            "require_at_registration", False,
-        ))
         self.user_consent_policy_name = consent_config.get(
-            "policy_name", "Privacy Policy",
+            "policy_name", "Privacy Policy"
         )
 
-    def default_config(self, **kwargs):
+    def generate_config_section(self, **kwargs):
         return DEFAULT_CONFIG
diff --git a/synapse/config/database.py b/synapse/config/database.py
index 3c27ed6b4a..bcb2089dd7 100644
--- a/synapse/config/database.py
+++ b/synapse/config/database.py
@@ -18,37 +18,30 @@ from ._base import Config
 
 
 class DatabaseConfig(Config):
-
-    def read_config(self, config):
-        self.event_cache_size = self.parse_size(
-            config.get("event_cache_size", "10K")
-        )
+    def read_config(self, config, **kwargs):
+        self.event_cache_size = self.parse_size(config.get("event_cache_size", "10K"))
 
         self.database_config = config.get("database")
 
         if self.database_config is None:
-            self.database_config = {
-                "name": "sqlite3",
-                "args": {},
-            }
+            self.database_config = {"name": "sqlite3", "args": {}}
 
         name = self.database_config.get("name", None)
         if name == "psycopg2":
             pass
         elif name == "sqlite3":
-            self.database_config.setdefault("args", {}).update({
-                "cp_min": 1,
-                "cp_max": 1,
-                "check_same_thread": False,
-            })
+            self.database_config.setdefault("args", {}).update(
+                {"cp_min": 1, "cp_max": 1, "check_same_thread": False}
+            )
         else:
             raise RuntimeError("Unsupported database type '%s'" % (name,))
 
         self.set_databasepath(config.get("database_path"))
 
-    def default_config(self, data_dir_path, **kwargs):
+    def generate_config_section(self, data_dir_path, **kwargs):
         database_path = os.path.join(data_dir_path, "homeserver.db")
-        return """\
+        return (
+            """\
         ## Database ##
 
         database:
@@ -62,7 +55,9 @@ class DatabaseConfig(Config):
         # Number of events to cache in memory.
         #
         #event_cache_size: 10K
-        """ % locals()
+        """
+            % locals()
+        )
 
     def read_arguments(self, args):
         self.set_databasepath(args.database_path)
@@ -77,6 +72,8 @@ class DatabaseConfig(Config):
     def add_arguments(self, parser):
         db_group = parser.add_argument_group("database")
         db_group.add_argument(
-            "-d", "--database-path", metavar="SQLITE_DATABASE_PATH",
-            help="The path to a sqlite database to use."
+            "-d",
+            "--database-path",
+            metavar="SQLITE_DATABASE_PATH",
+            help="The path to a sqlite database to use.",
         )
diff --git a/synapse/config/emailconfig.py b/synapse/config/emailconfig.py
index 342a6ce5fd..cf39936da7 100644
--- a/synapse/config/emailconfig.py
+++ b/synapse/config/emailconfig.py
@@ -1,5 +1,7 @@
 # -*- coding: utf-8 -*-
-# Copyright 2015, 2016 OpenMarket Ltd
+# Copyright 2015-2016 OpenMarket Ltd
+# Copyright 2017-2018 New Vector Ltd
+# Copyright 2019 The Matrix.org Foundation C.I.C.
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
 # you may not use this file except in compliance with the License.
@@ -17,31 +19,151 @@ from __future__ import print_function
 
 # This file can't be called email.py because if it is, we cannot:
 import email.utils
-import logging
 import os
 
 import pkg_resources
 
 from ._base import Config, ConfigError
 
-logger = logging.getLogger(__name__)
-
 
 class EmailConfig(Config):
-    def read_config(self, config):
+    def read_config(self, config, **kwargs):
+        # TODO: We should separate better the email configuration from the notification
+        # and account validity config.
+
         self.email_enable_notifs = False
 
         email_config = config.get("email", {})
+
+        self.email_smtp_host = email_config.get("smtp_host", None)
+        self.email_smtp_port = email_config.get("smtp_port", None)
+        self.email_smtp_user = email_config.get("smtp_user", None)
+        self.email_smtp_pass = email_config.get("smtp_pass", None)
+        self.require_transport_security = email_config.get(
+            "require_transport_security", False
+        )
+        if "app_name" in email_config:
+            self.email_app_name = email_config["app_name"]
+        else:
+            self.email_app_name = "Matrix"
+
+        # TODO: Rename notif_from to something more generic, or have a separate
+        # from for password resets, message notifications, etc?
+        # Currently the email section is a bit bogged down with settings for
+        # multiple functions. Would be good to split it out into separate
+        # sections and only put the common ones under email:
+        self.email_notif_from = email_config.get("notif_from", None)
+        if self.email_notif_from is not None:
+            # make sure it's valid
+            parsed = email.utils.parseaddr(self.email_notif_from)
+            if parsed[1] == "":
+                raise RuntimeError("Invalid notif_from address")
+
+        template_dir = email_config.get("template_dir")
+        # we need an absolute path, because we change directory after starting (and
+        # we don't yet know what auxilliary templates like mail.css we will need).
+        # (Note that loading as package_resources with jinja.PackageLoader doesn't
+        # work for the same reason.)
+        if not template_dir:
+            template_dir = pkg_resources.resource_filename("synapse", "res/templates")
+
+        self.email_template_dir = os.path.abspath(template_dir)
+
         self.email_enable_notifs = email_config.get("enable_notifs", False)
+        account_validity_renewal_enabled = config.get("account_validity", {}).get(
+            "renew_at"
+        )
 
-        if self.email_enable_notifs:
+        email_trust_identity_server_for_password_resets = email_config.get(
+            "trust_identity_server_for_password_resets", False
+        )
+        self.email_password_reset_behaviour = (
+            "remote" if email_trust_identity_server_for_password_resets else "local"
+        )
+        self.password_resets_were_disabled_due_to_email_config = False
+        if self.email_password_reset_behaviour == "local" and email_config == {}:
+            # We cannot warn the user this has happened here
+            # Instead do so when a user attempts to reset their password
+            self.password_resets_were_disabled_due_to_email_config = True
+
+            self.email_password_reset_behaviour = "off"
+
+        # Get lifetime of a validation token in milliseconds
+        self.email_validation_token_lifetime = self.parse_duration(
+            email_config.get("validation_token_lifetime", "1h")
+        )
+
+        if (
+            self.email_enable_notifs
+            or account_validity_renewal_enabled
+            or self.email_password_reset_behaviour == "local"
+        ):
             # make sure we can import the required deps
             import jinja2
             import bleach
+
             # prevent unused warnings
             jinja2
             bleach
 
+        if self.email_password_reset_behaviour == "local":
+            required = ["smtp_host", "smtp_port", "notif_from"]
+
+            missing = []
+            for k in required:
+                if k not in email_config:
+                    missing.append(k)
+
+            if len(missing) > 0:
+                raise RuntimeError(
+                    "email.password_reset_behaviour is set to 'local' "
+                    "but required keys are missing: %s"
+                    % (", ".join(["email." + k for k in missing]),)
+                )
+
+            # Templates for password reset emails
+            self.email_password_reset_template_html = email_config.get(
+                "password_reset_template_html", "password_reset.html"
+            )
+            self.email_password_reset_template_text = email_config.get(
+                "password_reset_template_text", "password_reset.txt"
+            )
+            self.email_password_reset_failure_template = email_config.get(
+                "password_reset_failure_template", "password_reset_failure.html"
+            )
+            # This template does not support any replaceable variables, so we will
+            # read it from the disk once during setup
+            email_password_reset_success_template = email_config.get(
+                "password_reset_success_template", "password_reset_success.html"
+            )
+
+            # Check templates exist
+            for f in [
+                self.email_password_reset_template_html,
+                self.email_password_reset_template_text,
+                self.email_password_reset_failure_template,
+                email_password_reset_success_template,
+            ]:
+                p = os.path.join(self.email_template_dir, f)
+                if not os.path.isfile(p):
+                    raise ConfigError("Unable to find template file %s" % (p,))
+
+            # Retrieve content of web templates
+            filepath = os.path.join(
+                self.email_template_dir, email_password_reset_success_template
+            )
+            self.email_password_reset_success_html_content = self.read_file(
+                filepath, "email.password_reset_template_success_html"
+            )
+
+            if config.get("public_baseurl") is None:
+                raise RuntimeError(
+                    "email.password_reset_behaviour is set to 'local' but no "
+                    "public_baseurl is set. This is necessary to generate password "
+                    "reset links"
+                )
+
+        if self.email_enable_notifs:
             required = [
                 "smtp_host",
                 "smtp_port",
@@ -55,10 +177,10 @@ class EmailConfig(Config):
                 if k not in email_config:
                     missing.append(k)
 
-            if (len(missing) > 0):
+            if len(missing) > 0:
                 raise RuntimeError(
-                    "email.enable_notifs is True but required keys are missing: %s" %
-                    (", ".join(["email." + k for k in missing]),)
+                    "email.enable_notifs is True but required keys are missing: %s"
+                    % (", ".join(["email." + k for k in missing]),)
                 )
 
             if config.get("public_baseurl") is None:
@@ -66,70 +188,36 @@ class EmailConfig(Config):
                     "email.enable_notifs is True but no public_baseurl is set"
                 )
 
-            self.email_smtp_host = email_config["smtp_host"]
-            self.email_smtp_port = email_config["smtp_port"]
-            self.email_notif_from = email_config["notif_from"]
             self.email_notif_template_html = email_config["notif_template_html"]
             self.email_notif_template_text = email_config["notif_template_text"]
-            self.email_expiry_template_html = email_config.get(
-                "expiry_template_html", "notice_expiry.html",
-            )
-            self.email_expiry_template_text = email_config.get(
-                "expiry_template_text", "notice_expiry.txt",
-            )
-
-            template_dir = email_config.get("template_dir")
-            # we need an absolute path, because we change directory after starting (and
-            # we don't yet know what auxilliary templates like mail.css we will need).
-            # (Note that loading as package_resources with jinja.PackageLoader doesn't
-            # work for the same reason.)
-            if not template_dir:
-                template_dir = pkg_resources.resource_filename(
-                    'synapse', 'res/templates'
-                )
-            template_dir = os.path.abspath(template_dir)
 
             for f in self.email_notif_template_text, self.email_notif_template_html:
-                p = os.path.join(template_dir, f)
+                p = os.path.join(self.email_template_dir, f)
                 if not os.path.isfile(p):
-                    raise ConfigError("Unable to find email template file %s" % (p, ))
-            self.email_template_dir = template_dir
+                    raise ConfigError("Unable to find email template file %s" % (p,))
 
             self.email_notif_for_new_users = email_config.get(
                 "notif_for_new_users", True
             )
-            self.email_riot_base_url = email_config.get(
-                "riot_base_url", None
-            )
-            self.email_smtp_user = email_config.get(
-                "smtp_user", None
-            )
-            self.email_smtp_pass = email_config.get(
-                "smtp_pass", None
+            self.email_riot_base_url = email_config.get("riot_base_url", None)
+
+        if account_validity_renewal_enabled:
+            self.email_expiry_template_html = email_config.get(
+                "expiry_template_html", "notice_expiry.html"
             )
-            self.require_transport_security = email_config.get(
-                "require_transport_security", False
+            self.email_expiry_template_text = email_config.get(
+                "expiry_template_text", "notice_expiry.txt"
             )
-            if "app_name" in email_config:
-                self.email_app_name = email_config["app_name"]
-            else:
-                self.email_app_name = "Matrix"
 
-            # make sure it's valid
-            parsed = email.utils.parseaddr(self.email_notif_from)
-            if parsed[1] == '':
-                raise RuntimeError("Invalid notif_from address")
-        else:
-            self.email_enable_notifs = False
-            # Not much point setting defaults for the rest: it would be an
-            # error for them to be used.
+            for f in self.email_expiry_template_text, self.email_expiry_template_html:
+                p = os.path.join(self.email_template_dir, f)
+                if not os.path.isfile(p):
+                    raise ConfigError("Unable to find email template file %s" % (p,))
 
-    def default_config(self, config_dir_path, server_name, **kwargs):
+    def generate_config_section(self, config_dir_path, server_name, **kwargs):
         return """
-        # Enable sending emails for notification events or expiry notices
-        # Defining a custom URL for Riot is only needed if email notifications
-        # should contain links to a self-hosted installation of Riot; when set
-        # the "app_name" setting is ignored.
+        # Enable sending emails for password resets, notification events or
+        # account expiry notices
         #
         # If your SMTP server requires authentication, the optional smtp_user &
         # smtp_pass variables should be used
@@ -137,20 +225,62 @@ class EmailConfig(Config):
         #email:
         #   enable_notifs: false
         #   smtp_host: "localhost"
-        #   smtp_port: 25
+        #   smtp_port: 25 # SSL: 465, STARTTLS: 587
         #   smtp_user: "exampleusername"
         #   smtp_pass: "examplepassword"
         #   require_transport_security: False
         #   notif_from: "Your Friendly %(app)s Home Server <noreply@example.com>"
         #   app_name: Matrix
-        #   # if template_dir is unset, uses the example templates that are part of
-        #   # the Synapse distribution.
+        #
+        #   # Enable email notifications by default
+        #   notif_for_new_users: True
+        #
+        #   # Defining a custom URL for Riot is only needed if email notifications
+        #   # should contain links to a self-hosted installation of Riot; when set
+        #   # the "app_name" setting is ignored
+        #   riot_base_url: "http://localhost/riot"
+        #
+        #   # Enable sending password reset emails via the configured, trusted
+        #   # identity servers
+        #   #
+        #   # IMPORTANT! This will give a malicious or overtaken identity server
+        #   # the ability to reset passwords for your users! Make absolutely sure
+        #   # that you want to do this! It is strongly recommended that password
+        #   # reset emails be sent by the homeserver instead
+        #   #
+        #   # If this option is set to false and SMTP options have not been
+        #   # configured, resetting user passwords via email will be disabled
+        #   #trust_identity_server_for_password_resets: false
+        #
+        #   # Configure the time that a validation email or text message code
+        #   # will expire after sending
+        #   #
+        #   # This is currently used for password resets
+        #   #validation_token_lifetime: 1h
+        #
+        #   # Template directory. All template files should be stored within this
+        #   # directory
+        #   #
         #   #template_dir: res/templates
+        #
+        #   # Templates for email notifications
+        #   #
         #   notif_template_html: notif_mail.html
         #   notif_template_text: notif_mail.txt
-        #   # Templates for account expiry notices.
+        #
+        #   # Templates for account expiry notices
+        #   #
         #   expiry_template_html: notice_expiry.html
         #   expiry_template_text: notice_expiry.txt
-        #   notif_for_new_users: True
-        #   riot_base_url: "http://localhost/riot"
+        #
+        #   # Templates for password reset emails sent by the homeserver
+        #   #
+        #   #password_reset_template_html: password_reset.html
+        #   #password_reset_template_text: password_reset.txt
+        #
+        #   # Templates for password reset success and failure pages that a user
+        #   # will see after attempting to reset their password
+        #   #
+        #   #password_reset_template_success_html: password_reset_success.html
+        #   #password_reset_template_failure_html: password_reset_failure.html
         """
diff --git a/synapse/config/groups.py b/synapse/config/groups.py
index e4be172a79..2a522b5f44 100644
--- a/synapse/config/groups.py
+++ b/synapse/config/groups.py
@@ -17,11 +17,11 @@ from ._base import Config
 
 
 class GroupsConfig(Config):
-    def read_config(self, config):
+    def read_config(self, config, **kwargs):
         self.enable_group_creation = config.get("enable_group_creation", False)
         self.group_creation_prefix = config.get("group_creation_prefix", "")
 
-    def default_config(self, **kwargs):
+    def generate_config_section(self, **kwargs):
         return """\
         # Uncomment to allow non-server-admin users to create groups on this server
         #
diff --git a/synapse/config/homeserver.py b/synapse/config/homeserver.py
index 727fdc54d8..acadef4fd3 100644
--- a/synapse/config/homeserver.py
+++ b/synapse/config/homeserver.py
@@ -13,6 +13,7 @@
 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 # See the License for the specific language governing permissions and
 # limitations under the License.
+
 from .api import ApiConfig
 from .appservice import AppServiceConfig
 from .captcha import CaptchaConfig
@@ -36,20 +37,43 @@ from .saml2_config import SAML2Config
 from .server import ServerConfig
 from .server_notices_config import ServerNoticesConfig
 from .spam_checker import SpamCheckerConfig
+from .stats import StatsConfig
+from .third_party_event_rules import ThirdPartyRulesConfig
 from .tls import TlsConfig
 from .user_directory import UserDirectoryConfig
 from .voip import VoipConfig
 from .workers import WorkerConfig
 
 
-class HomeServerConfig(ServerConfig, TlsConfig, DatabaseConfig, LoggingConfig,
-                       RatelimitConfig, ContentRepositoryConfig, CaptchaConfig,
-                       VoipConfig, RegistrationConfig, MetricsConfig, ApiConfig,
-                       AppServiceConfig, KeyConfig, SAML2Config, CasConfig,
-                       JWTConfig, PasswordConfig, EmailConfig,
-                       WorkerConfig, PasswordAuthProviderConfig, PushConfig,
-                       SpamCheckerConfig, GroupsConfig, UserDirectoryConfig,
-                       ConsentConfig,
-                       ServerNoticesConfig, RoomDirectoryConfig,
-                       ):
+class HomeServerConfig(
+    ServerConfig,
+    TlsConfig,
+    DatabaseConfig,
+    LoggingConfig,
+    RatelimitConfig,
+    ContentRepositoryConfig,
+    CaptchaConfig,
+    VoipConfig,
+    RegistrationConfig,
+    MetricsConfig,
+    ApiConfig,
+    AppServiceConfig,
+    KeyConfig,
+    SAML2Config,
+    CasConfig,
+    JWTConfig,
+    PasswordConfig,
+    EmailConfig,
+    WorkerConfig,
+    PasswordAuthProviderConfig,
+    PushConfig,
+    SpamCheckerConfig,
+    GroupsConfig,
+    UserDirectoryConfig,
+    ConsentConfig,
+    StatsConfig,
+    ServerNoticesConfig,
+    RoomDirectoryConfig,
+    ThirdPartyRulesConfig,
+):
     pass
diff --git a/synapse/config/jwt_config.py b/synapse/config/jwt_config.py
index ecb4124096..36d87cef03 100644
--- a/synapse/config/jwt_config.py
+++ b/synapse/config/jwt_config.py
@@ -15,17 +15,15 @@
 
 from ._base import Config, ConfigError
 
-MISSING_JWT = (
-    """Missing jwt library. This is required for jwt login.
+MISSING_JWT = """Missing jwt library. This is required for jwt login.
 
     Install by running:
         pip install pyjwt
     """
-)
 
 
 class JWTConfig(Config):
-    def read_config(self, config):
+    def read_config(self, config, **kwargs):
         jwt_config = config.get("jwt_config", None)
         if jwt_config:
             self.jwt_enabled = jwt_config.get("enabled", False)
@@ -34,6 +32,7 @@ class JWTConfig(Config):
 
             try:
                 import jwt
+
                 jwt  # To stop unused lint.
             except ImportError:
                 raise ConfigError(MISSING_JWT)
@@ -42,7 +41,7 @@ class JWTConfig(Config):
             self.jwt_secret = None
             self.jwt_algorithm = None
 
-    def default_config(self, **kwargs):
+    def generate_config_section(self, **kwargs):
         return """\
         # The JWT needs to contain a globally unique "sub" (subject) claim.
         #
diff --git a/synapse/config/key.py b/synapse/config/key.py
index eb10259818..8fc74f9cdf 100644
--- a/synapse/config/key.py
+++ b/synapse/config/key.py
@@ -1,5 +1,6 @@
 # -*- coding: utf-8 -*-
 # Copyright 2015, 2016 OpenMarket Ltd
+# Copyright 2019 The Matrix.org Foundation C.I.C.
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
 # you may not use this file except in compliance with the License.
@@ -17,6 +18,8 @@ import hashlib
 import logging
 import os
 
+import attr
+import jsonschema
 from signedjson.key import (
     NACL_ED25519,
     decode_signing_key_base64,
@@ -32,33 +35,74 @@ from synapse.util.stringutils import random_string, random_string_with_symbols
 
 from ._base import Config, ConfigError
 
+INSECURE_NOTARY_ERROR = """\
+Your server is configured to accept key server responses without signature
+validation or TLS certificate validation. This is likely to be very insecure. If
+you are *sure* you want to do this, set 'accept_keys_insecurely' on the
+keyserver configuration."""
+
+RELYING_ON_MATRIX_KEY_ERROR = """\
+Your server is configured to accept key server responses without TLS certificate
+validation, and which are only signed by the old (possibly compromised)
+matrix.org signing key 'ed25519:auto'. This likely isn't what you want to do,
+and you should enable 'federation_verify_certificates' in your configuration.
+
+If you are *sure* you want to do this, set 'accept_keys_insecurely' on the
+trusted_key_server configuration."""
+
+
 logger = logging.getLogger(__name__)
 
 
-class KeyConfig(Config):
+@attr.s
+class TrustedKeyServer(object):
+    # string: name of the server.
+    server_name = attr.ib()
+
+    # dict[str,VerifyKey]|None: map from key id to key object, or None to disable
+    # signature verification.
+    verify_keys = attr.ib(default=None)
+
 
-    def read_config(self, config):
+class KeyConfig(Config):
+    def read_config(self, config, config_dir_path, **kwargs):
         # the signing key can be specified inline or in a separate file
         if "signing_key" in config:
             self.signing_key = read_signing_keys([config["signing_key"]])
         else:
-            self.signing_key_path = config["signing_key_path"]
-            self.signing_key = self.read_signing_key(self.signing_key_path)
+            signing_key_path = config.get("signing_key_path")
+            if signing_key_path is None:
+                signing_key_path = os.path.join(
+                    config_dir_path, config["server_name"] + ".signing.key"
+                )
+
+            self.signing_key = self.read_signing_key(signing_key_path)
 
         self.old_signing_keys = self.read_old_signing_keys(
             config.get("old_signing_keys", {})
         )
         self.key_refresh_interval = self.parse_duration(
-            config.get("key_refresh_interval", "1d"),
+            config.get("key_refresh_interval", "1d")
         )
-        self.perspectives = self.read_perspectives(
-            config.get("perspectives", {}).get("servers", {
-                "matrix.org": {"verify_keys": {
-                    "ed25519:auto": {
-                        "key": "Noi6WqcDj0QmPxCNQqgezwTlBKrfqehY1u2FyWP9uYw",
-                    }
-                }}
-            })
+
+        # if neither trusted_key_servers nor perspectives are given, use the default.
+        if "perspectives" not in config and "trusted_key_servers" not in config:
+            key_servers = [{"server_name": "matrix.org"}]
+        else:
+            key_servers = config.get("trusted_key_servers", [])
+
+            if not isinstance(key_servers, list):
+                raise ConfigError(
+                    "trusted_key_servers, if given, must be a list, not a %s"
+                    % (type(key_servers).__name__,)
+                )
+
+            # merge the 'perspectives' config into the 'trusted_key_servers' config.
+            key_servers.extend(_perspectives_to_key_servers(config))
+
+        # list of TrustedKeyServer objects
+        self.key_servers = list(
+            _parse_key_servers(key_servers, self.federation_verify_certificates)
         )
 
         self.macaroon_secret_key = config.get(
@@ -78,8 +122,9 @@ class KeyConfig(Config):
         # falsification of values
         self.form_secret = config.get("form_secret", None)
 
-    def default_config(self, config_dir_path, server_name, generate_secrets=False,
-                       **kwargs):
+    def generate_config_section(
+        self, config_dir_path, server_name, generate_secrets=False, **kwargs
+    ):
         base_key_name = os.path.join(config_dir_path, server_name)
 
         if generate_secrets:
@@ -91,7 +136,8 @@ class KeyConfig(Config):
             macaroon_secret_key = "# macaroon_secret_key: <PRIVATE STRING>"
             form_secret = "# form_secret: <PRIVATE STRING>"
 
-        return """\
+        return (
+            """\
         # a secret which is used to sign access tokens. If none is specified,
         # the registration_shared_secret is used, if one is given; otherwise,
         # a secret key is derived from the signing key.
@@ -133,33 +179,53 @@ class KeyConfig(Config):
 
         # The trusted servers to download signing keys from.
         #
-        #perspectives:
-        #  servers:
-        #    "matrix.org":
-        #      verify_keys:
-        #        "ed25519:auto":
-        #          key: "Noi6WqcDj0QmPxCNQqgezwTlBKrfqehY1u2FyWP9uYw"
-        """ % locals()
-
-    def read_perspectives(self, perspectives_servers):
-        servers = {}
-        for server_name, server_config in perspectives_servers.items():
-            for key_id, key_data in server_config["verify_keys"].items():
-                if is_signing_algorithm_supported(key_id):
-                    key_base64 = key_data["key"]
-                    key_bytes = decode_base64(key_base64)
-                    verify_key = decode_verify_key_bytes(key_id, key_bytes)
-                    servers.setdefault(server_name, {})[key_id] = verify_key
-        return servers
+        # When we need to fetch a signing key, each server is tried in parallel.
+        #
+        # Normally, the connection to the key server is validated via TLS certificates.
+        # Additional security can be provided by configuring a `verify key`, which
+        # will make synapse check that the response is signed by that key.
+        #
+        # This setting supercedes an older setting named `perspectives`. The old format
+        # is still supported for backwards-compatibility, but it is deprecated.
+        #
+        # Options for each entry in the list include:
+        #
+        #    server_name: the name of the server. required.
+        #
+        #    verify_keys: an optional map from key id to base64-encoded public key.
+        #       If specified, we will check that the response is signed by at least
+        #       one of the given keys.
+        #
+        #    accept_keys_insecurely: a boolean. Normally, if `verify_keys` is unset,
+        #       and federation_verify_certificates is not `true`, synapse will refuse
+        #       to start, because this would allow anyone who can spoof DNS responses
+        #       to masquerade as the trusted key server. If you know what you are doing
+        #       and are sure that your network environment provides a secure connection
+        #       to the key server, you can set this to `true` to override this
+        #       behaviour.
+        #
+        # An example configuration might look like:
+        #
+        #trusted_key_servers:
+        #  - server_name: "my_trusted_server.example.com"
+        #    verify_keys:
+        #      "ed25519:auto": "abcdefghijklmnopqrstuvwxyzabcdefghijklmopqr"
+        #  - server_name: "my_other_trusted_server.example.com"
+        #
+        # The default configuration is:
+        #
+        #trusted_key_servers:
+        #  - server_name: "matrix.org"
+        """
+            % locals()
+        )
 
     def read_signing_key(self, signing_key_path):
         signing_keys = self.read_file(signing_key_path, "signing_key")
         try:
             return read_signing_keys(signing_keys.splitlines(True))
         except Exception as e:
-            raise ConfigError(
-                "Error reading signing_key: %s" % (str(e))
-            )
+            raise ConfigError("Error reading signing_key: %s" % (str(e)))
 
     def read_old_signing_keys(self, old_signing_keys):
         keys = {}
@@ -176,15 +242,21 @@ class KeyConfig(Config):
                 )
         return keys
 
-    def generate_files(self, config):
-        signing_key_path = config["signing_key_path"]
+    def generate_files(self, config, config_dir_path):
+        if "signing_key" in config:
+            return
+
+        signing_key_path = config.get("signing_key_path")
+        if signing_key_path is None:
+            signing_key_path = os.path.join(
+                config_dir_path, config["server_name"] + ".signing.key"
+            )
 
         if not self.path_exists(signing_key_path):
+            print("Generating signing key file %s" % (signing_key_path,))
             with open(signing_key_path, "w") as signing_key_file:
                 key_id = "a_" + random_string(4)
-                write_signing_keys(
-                    signing_key_file, (generate_signing_key(key_id),),
-                )
+                write_signing_keys(signing_key_file, (generate_signing_key(key_id),))
         else:
             signing_keys = self.read_file(signing_key_path, "signing_key")
             if len(signing_keys.split("\n")[0].split()) == 1:
@@ -194,6 +266,115 @@ class KeyConfig(Config):
                     NACL_ED25519, key_id, signing_keys.split("\n")[0]
                 )
                 with open(signing_key_path, "w") as signing_key_file:
-                    write_signing_keys(
-                        signing_key_file, (key,),
+                    write_signing_keys(signing_key_file, (key,))
+
+
+def _perspectives_to_key_servers(config):
+    """Convert old-style 'perspectives' configs into new-style 'trusted_key_servers'
+
+    Returns an iterable of entries to add to trusted_key_servers.
+    """
+
+    # 'perspectives' looks like:
+    #
+    # {
+    #     "servers": {
+    #         "matrix.org": {
+    #             "verify_keys": {
+    #                 "ed25519:auto": {
+    #                     "key": "Noi6WqcDj0QmPxCNQqgezwTlBKrfqehY1u2FyWP9uYw"
+    #                 }
+    #             }
+    #         }
+    #     }
+    # }
+    #
+    # 'trusted_keys' looks like:
+    #
+    # [
+    #     {
+    #         "server_name": "matrix.org",
+    #         "verify_keys": {
+    #             "ed25519:auto": "Noi6WqcDj0QmPxCNQqgezwTlBKrfqehY1u2FyWP9uYw",
+    #         }
+    #     }
+    # ]
+
+    perspectives_servers = config.get("perspectives", {}).get("servers", {})
+
+    for server_name, server_opts in perspectives_servers.items():
+        trusted_key_server_entry = {"server_name": server_name}
+        verify_keys = server_opts.get("verify_keys")
+        if verify_keys is not None:
+            trusted_key_server_entry["verify_keys"] = {
+                key_id: key_data["key"] for key_id, key_data in verify_keys.items()
+            }
+        yield trusted_key_server_entry
+
+
+TRUSTED_KEY_SERVERS_SCHEMA = {
+    "$schema": "http://json-schema.org/draft-04/schema#",
+    "description": "schema for the trusted_key_servers setting",
+    "type": "array",
+    "items": {
+        "type": "object",
+        "properties": {
+            "server_name": {"type": "string"},
+            "verify_keys": {
+                "type": "object",
+                # each key must be a base64 string
+                "additionalProperties": {"type": "string"},
+            },
+        },
+        "required": ["server_name"],
+    },
+}
+
+
+def _parse_key_servers(key_servers, federation_verify_certificates):
+    try:
+        jsonschema.validate(key_servers, TRUSTED_KEY_SERVERS_SCHEMA)
+    except jsonschema.ValidationError as e:
+        raise ConfigError("Unable to parse 'trusted_key_servers': " + e.message)
+
+    for server in key_servers:
+        server_name = server["server_name"]
+        result = TrustedKeyServer(server_name=server_name)
+
+        verify_keys = server.get("verify_keys")
+        if verify_keys is not None:
+            result.verify_keys = {}
+            for key_id, key_base64 in verify_keys.items():
+                if not is_signing_algorithm_supported(key_id):
+                    raise ConfigError(
+                        "Unsupported signing algorithm on key %s for server %s in "
+                        "trusted_key_servers" % (key_id, server_name)
+                    )
+                try:
+                    key_bytes = decode_base64(key_base64)
+                    verify_key = decode_verify_key_bytes(key_id, key_bytes)
+                except Exception as e:
+                    raise ConfigError(
+                        "Unable to parse key %s for server %s in "
+                        "trusted_key_servers: %s" % (key_id, server_name, e)
                     )
+
+                result.verify_keys[key_id] = verify_key
+
+        if not federation_verify_certificates and not server.get(
+            "accept_keys_insecurely"
+        ):
+            _assert_keyserver_has_verify_keys(result)
+
+        yield result
+
+
+def _assert_keyserver_has_verify_keys(trusted_key_server):
+    if not trusted_key_server.verify_keys:
+        raise ConfigError(INSECURE_NOTARY_ERROR)
+
+    # also check that they are not blindly checking the old matrix.org key
+    if trusted_key_server.server_name == "matrix.org" and any(
+        key_id == "ed25519:auto" for key_id in trusted_key_server.verify_keys
+    ):
+        raise ConfigError(RELYING_ON_MATRIX_KEY_ERROR)
diff --git a/synapse/config/logger.py b/synapse/config/logger.py
index c1febbe9d3..931aec41c0 100644
--- a/synapse/config/logger.py
+++ b/synapse/config/logger.py
@@ -29,7 +29,8 @@ from synapse.util.versionstring import get_version_string
 
 from ._base import Config
 
-DEFAULT_LOG_CONFIG = Template("""
+DEFAULT_LOG_CONFIG = Template(
+    """
 version: 1
 
 formatters:
@@ -68,26 +69,29 @@ loggers:
 root:
     level: INFO
     handlers: [file, console]
-""")
+"""
+)
 
 
 class LoggingConfig(Config):
-
-    def read_config(self, config):
+    def read_config(self, config, **kwargs):
         self.verbosity = config.get("verbose", 0)
         self.no_redirect_stdio = config.get("no_redirect_stdio", False)
         self.log_config = self.abspath(config.get("log_config"))
         self.log_file = self.abspath(config.get("log_file"))
 
-    def default_config(self, config_dir_path, server_name, **kwargs):
+    def generate_config_section(self, config_dir_path, server_name, **kwargs):
         log_config = os.path.join(config_dir_path, server_name + ".log.config")
-        return """\
+        return (
+            """\
         ## Logging ##
 
         # A yaml python logging config file
         #
         log_config: "%(log_config)s"
-        """ % locals()
+        """
+            % locals()
+        )
 
     def read_arguments(self, args):
         if args.verbose is not None:
@@ -102,32 +106,43 @@ class LoggingConfig(Config):
     def add_arguments(cls, parser):
         logging_group = parser.add_argument_group("logging")
         logging_group.add_argument(
-            '-v', '--verbose', dest="verbose", action='count',
+            "-v",
+            "--verbose",
+            dest="verbose",
+            action="count",
             help="The verbosity level. Specify multiple times to increase "
-            "verbosity. (Ignored if --log-config is specified.)"
+            "verbosity. (Ignored if --log-config is specified.)",
         )
         logging_group.add_argument(
-            '-f', '--log-file', dest="log_file",
-            help="File to log to. (Ignored if --log-config is specified.)"
+            "-f",
+            "--log-file",
+            dest="log_file",
+            help="File to log to. (Ignored if --log-config is specified.)",
         )
         logging_group.add_argument(
-            '--log-config', dest="log_config", default=None,
-            help="Python logging config file"
+            "--log-config",
+            dest="log_config",
+            default=None,
+            help="Python logging config file",
         )
         logging_group.add_argument(
-            '-n', '--no-redirect-stdio',
-            action='store_true', default=None,
-            help="Do not redirect stdout/stderr to the log"
+            "-n",
+            "--no-redirect-stdio",
+            action="store_true",
+            default=None,
+            help="Do not redirect stdout/stderr to the log",
         )
 
-    def generate_files(self, config):
+    def generate_files(self, config, config_dir_path):
         log_config = config.get("log_config")
         if log_config and not os.path.exists(log_config):
             log_file = self.abspath("homeserver.log")
+            print(
+                "Generating log config file %s which will log to %s"
+                % (log_config, log_file)
+            )
             with open(log_config, "w") as log_config_file:
-                log_config_file.write(
-                    DEFAULT_LOG_CONFIG.substitute(log_file=log_file)
-                )
+                log_config_file.write(DEFAULT_LOG_CONFIG.substitute(log_file=log_file))
 
 
 def setup_logging(config, use_worker_options=False):
@@ -143,10 +158,8 @@ def setup_logging(config, use_worker_options=False):
         register_sighup (func | None): Function to call to register a
             sighup handler.
     """
-    log_config = (config.worker_log_config if use_worker_options
-                  else config.log_config)
-    log_file = (config.worker_log_file if use_worker_options
-                else config.log_file)
+    log_config = config.worker_log_config if use_worker_options else config.log_config
+    log_file = config.worker_log_file if use_worker_options else config.log_file
 
     log_format = (
         "%(asctime)s - %(name)s - %(lineno)d - %(levelname)s - %(request)s"
@@ -164,23 +177,23 @@ def setup_logging(config, use_worker_options=False):
             if config.verbosity > 1:
                 level_for_storage = logging.DEBUG
 
-        logger = logging.getLogger('')
+        logger = logging.getLogger("")
         logger.setLevel(level)
 
-        logging.getLogger('synapse.storage.SQL').setLevel(level_for_storage)
+        logging.getLogger("synapse.storage.SQL").setLevel(level_for_storage)
 
         formatter = logging.Formatter(log_format)
         if log_file:
             # TODO: Customisable file size / backup count
             handler = logging.handlers.RotatingFileHandler(
-                log_file, maxBytes=(1000 * 1000 * 100), backupCount=3,
-                encoding='utf8'
+                log_file, maxBytes=(1000 * 1000 * 100), backupCount=3, encoding="utf8"
             )
 
             def sighup(signum, stack):
                 logger.info("Closing log file due to SIGHUP")
                 handler.doRollover()
                 logger.info("Opened new log file due to SIGHUP")
+
         else:
             handler = logging.StreamHandler()
 
@@ -193,8 +206,9 @@ def setup_logging(config, use_worker_options=False):
 
         logger.addHandler(handler)
     else:
+
         def load_log_config():
-            with open(log_config, 'r') as f:
+            with open(log_config, "r") as f:
                 logging.config.dictConfig(yaml.safe_load(f))
 
         def sighup(*args):
@@ -209,10 +223,7 @@ def setup_logging(config, use_worker_options=False):
     # make sure that the first thing we log is a thing we can grep backwards
     # for
     logging.warn("***** STARTING SERVER *****")
-    logging.warn(
-        "Server %s version %s",
-        sys.argv[0], get_version_string(synapse),
-    )
+    logging.warn("Server %s version %s", sys.argv[0], get_version_string(synapse))
     logging.info("Server hostname: %s", config.server_name)
 
     # It's critical to point twisted's internal logging somewhere, otherwise it
@@ -242,8 +253,7 @@ def setup_logging(config, use_worker_options=False):
         return observer(event)
 
     globalLogBeginner.beginLoggingTo(
-        [_log],
-        redirectStandardIO=not config.no_redirect_stdio,
+        [_log], redirectStandardIO=not config.no_redirect_stdio
     )
     if not config.no_redirect_stdio:
         print("Redirected stdout/stderr to logs")
diff --git a/synapse/config/metrics.py b/synapse/config/metrics.py
index 2de51979d8..3698441963 100644
--- a/synapse/config/metrics.py
+++ b/synapse/config/metrics.py
@@ -15,15 +15,13 @@
 
 from ._base import Config, ConfigError
 
-MISSING_SENTRY = (
-    """Missing sentry-sdk library. This is required to enable sentry
+MISSING_SENTRY = """Missing sentry-sdk library. This is required to enable sentry
     integration.
     """
-)
 
 
 class MetricsConfig(Config):
-    def read_config(self, config):
+    def read_config(self, config, **kwargs):
         self.enable_metrics = config.get("enable_metrics", False)
         self.report_stats = config.get("report_stats", None)
         self.metrics_port = config.get("metrics_port")
@@ -39,10 +37,10 @@ class MetricsConfig(Config):
             self.sentry_dsn = config["sentry"].get("dsn")
             if not self.sentry_dsn:
                 raise ConfigError(
-                    "sentry.dsn field is required when sentry integration is enabled",
+                    "sentry.dsn field is required when sentry integration is enabled"
                 )
 
-    def default_config(self, report_stats=None, **kwargs):
+    def generate_config_section(self, report_stats=None, **kwargs):
         res = """\
         ## Metrics ###
 
@@ -66,6 +64,6 @@ class MetricsConfig(Config):
         if report_stats is None:
             res += "# report_stats: true|false\n"
         else:
-            res += "report_stats: %s\n" % ('true' if report_stats else 'false')
+            res += "report_stats: %s\n" % ("true" if report_stats else "false")
 
         return res
diff --git a/synapse/config/password.py b/synapse/config/password.py
index eea59e772b..598f84fc0c 100644
--- a/synapse/config/password.py
+++ b/synapse/config/password.py
@@ -20,7 +20,7 @@ class PasswordConfig(Config):
     """Password login configuration
     """
 
-    def read_config(self, config):
+    def read_config(self, config, **kwargs):
         password_config = config.get("password_config", {})
         if password_config is None:
             password_config = {}
@@ -28,7 +28,7 @@ class PasswordConfig(Config):
         self.password_enabled = password_config.get("enabled", True)
         self.password_pepper = password_config.get("pepper", "")
 
-    def default_config(self, config_dir_path, server_name, **kwargs):
+    def generate_config_section(self, config_dir_path, server_name, **kwargs):
         return """\
         password_config:
            # Uncomment to disable password login
diff --git a/synapse/config/password_auth_providers.py b/synapse/config/password_auth_providers.py
index f0a6be0679..788c39c9fb 100644
--- a/synapse/config/password_auth_providers.py
+++ b/synapse/config/password_auth_providers.py
@@ -17,11 +17,11 @@ from synapse.util.module_loader import load_module
 
 from ._base import Config
 
-LDAP_PROVIDER = 'ldap_auth_provider.LdapAuthProvider'
+LDAP_PROVIDER = "ldap_auth_provider.LdapAuthProvider"
 
 
 class PasswordAuthProviderConfig(Config):
-    def read_config(self, config):
+    def read_config(self, config, **kwargs):
         self.password_providers = []
         providers = []
 
@@ -29,28 +29,24 @@ class PasswordAuthProviderConfig(Config):
         # param.
         ldap_config = config.get("ldap_config", {})
         if ldap_config.get("enabled", False):
-            providers.append({
-                'module': LDAP_PROVIDER,
-                'config': ldap_config,
-            })
+            providers.append({"module": LDAP_PROVIDER, "config": ldap_config})
 
         providers.extend(config.get("password_providers", []))
         for provider in providers:
-            mod_name = provider['module']
+            mod_name = provider["module"]
 
             # This is for backwards compat when the ldap auth provider resided
             # in this package.
             if mod_name == "synapse.util.ldap_auth_provider.LdapAuthProvider":
                 mod_name = LDAP_PROVIDER
 
-            (provider_class, provider_config) = load_module({
-                "module": mod_name,
-                "config": provider['config'],
-            })
+            (provider_class, provider_config) = load_module(
+                {"module": mod_name, "config": provider["config"]}
+            )
 
             self.password_providers.append((provider_class, provider_config))
 
-    def default_config(self, **kwargs):
+    def generate_config_section(self, **kwargs):
         return """\
         #password_providers:
         #    - module: "ldap_auth_provider.LdapAuthProvider"
diff --git a/synapse/config/push.py b/synapse/config/push.py
index 62c0060c9c..1b932722a5 100644
--- a/synapse/config/push.py
+++ b/synapse/config/push.py
@@ -18,7 +18,7 @@ from ._base import Config
 
 
 class PushConfig(Config):
-    def read_config(self, config):
+    def read_config(self, config, **kwargs):
         push_config = config.get("push", {})
         self.push_include_content = push_config.get("include_content", True)
 
@@ -42,7 +42,7 @@ class PushConfig(Config):
             )
             self.push_include_content = not redact_content
 
-    def default_config(self, config_dir_path, server_name, **kwargs):
+    def generate_config_section(self, config_dir_path, server_name, **kwargs):
         return """
         # Clients requesting push notifications can either have the body of
         # the message sent in the notification poke along with other details
diff --git a/synapse/config/ratelimiting.py b/synapse/config/ratelimiting.py
index 5a9adac480..8c587f3fd2 100644
--- a/synapse/config/ratelimiting.py
+++ b/synapse/config/ratelimiting.py
@@ -36,7 +36,7 @@ class FederationRateLimitConfig(object):
 
 
 class RatelimitConfig(Config):
-    def read_config(self, config):
+    def read_config(self, config, **kwargs):
 
         # Load the new-style messages config if it exists. Otherwise fall back
         # to the old method.
@@ -80,7 +80,7 @@ class RatelimitConfig(Config):
             "federation_rr_transactions_per_room_per_second", 50
         )
 
-    def default_config(self, **kwargs):
+    def generate_config_section(self, **kwargs):
         return """\
         ## Ratelimiting ##
 
diff --git a/synapse/config/registration.py b/synapse/config/registration.py
index 693288f938..4a59e6ec90 100644
--- a/synapse/config/registration.py
+++ b/synapse/config/registration.py
@@ -23,7 +23,7 @@ from synapse.util.stringutils import random_string_with_symbols
 class AccountValidityConfig(Config):
     def __init__(self, config, synapse_config):
         self.enabled = config.get("enabled", False)
-        self.renew_by_email_enabled = ("renew_at" in config)
+        self.renew_by_email_enabled = "renew_at" in config
 
         if self.enabled:
             if "period" in config:
@@ -39,13 +39,14 @@ class AccountValidityConfig(Config):
             else:
                 self.renew_email_subject = "Renew your %(app)s account"
 
+            self.startup_job_max_delta = self.period * 10.0 / 100.0
+
         if self.renew_by_email_enabled and "public_baseurl" not in synapse_config:
             raise ConfigError("Can't send renewal emails without 'public_baseurl'")
 
 
 class RegistrationConfig(Config):
-
-    def read_config(self, config):
+    def read_config(self, config, **kwargs):
         self.enable_registration = bool(
             strtobool(str(config.get("enable_registration", False)))
         )
@@ -55,7 +56,7 @@ class RegistrationConfig(Config):
             )
 
         self.account_validity = AccountValidityConfig(
-            config.get("account_validity", {}), config,
+            config.get("account_validity", {}), config
         )
 
         self.registrations_require_3pid = config.get("registrations_require_3pid", [])
@@ -65,35 +66,37 @@ class RegistrationConfig(Config):
 
         self.bcrypt_rounds = config.get("bcrypt_rounds", 12)
         self.trusted_third_party_id_servers = config.get(
-            "trusted_third_party_id_servers",
-            ["matrix.org", "vector.im"],
+            "trusted_third_party_id_servers", ["matrix.org", "vector.im"]
         )
         self.default_identity_server = config.get("default_identity_server")
         self.allow_guest_access = config.get("allow_guest_access", False)
 
-        self.invite_3pid_guest = (
-            self.allow_guest_access and config.get("invite_3pid_guest", False)
+        self.invite_3pid_guest = self.allow_guest_access and config.get(
+            "invite_3pid_guest", False
         )
 
         self.auto_join_rooms = config.get("auto_join_rooms", [])
         for room_alias in self.auto_join_rooms:
             if not RoomAlias.is_valid(room_alias):
-                raise ConfigError('Invalid auto_join_rooms entry %s' % (room_alias,))
+                raise ConfigError("Invalid auto_join_rooms entry %s" % (room_alias,))
         self.autocreate_auto_join_rooms = config.get("autocreate_auto_join_rooms", True)
 
-        self.disable_msisdn_registration = (
-            config.get("disable_msisdn_registration", False)
+        self.disable_msisdn_registration = config.get(
+            "disable_msisdn_registration", False
         )
 
-    def default_config(self, generate_secrets=False, **kwargs):
+    def generate_config_section(self, generate_secrets=False, **kwargs):
         if generate_secrets:
             registration_shared_secret = 'registration_shared_secret: "%s"' % (
                 random_string_with_symbols(50),
             )
         else:
-            registration_shared_secret = '# registration_shared_secret: <PRIVATE STRING>'
+            registration_shared_secret = (
+                "# registration_shared_secret: <PRIVATE STRING>"
+            )
 
-        return """\
+        return (
+            """\
         ## Registration ##
         #
         # Registration can be rate-limited using the parameters in the "Ratelimiting"
@@ -129,7 +132,9 @@ class RegistrationConfig(Config):
         # This means that, if a validity period is set, and Synapse is restarted (it will
         # then derive an expiration date from the current validity period), and some time
         # after that the validity period changes and Synapse is restarted, the users'
-        # expiration dates won't be updated unless their account is manually renewed.
+        # expiration dates won't be updated unless their account is manually renewed. This
+        # date will be randomly selected within a range [now + period - d ; now + period],
+        # where d is equal to 10%% of the validity period.
         #
         #account_validity:
         #  enabled: True
@@ -213,17 +218,19 @@ class RegistrationConfig(Config):
         # users cannot be auto-joined since they do not exist.
         #
         #autocreate_auto_join_rooms: true
-        """ % locals()
+        """
+            % locals()
+        )
 
     def add_arguments(self, parser):
         reg_group = parser.add_argument_group("registration")
         reg_group.add_argument(
-            "--enable-registration", action="store_true", default=None,
-            help="Enable registration for new users."
+            "--enable-registration",
+            action="store_true",
+            default=None,
+            help="Enable registration for new users.",
         )
 
     def read_arguments(self, args):
         if args.enable_registration is not None:
-            self.enable_registration = bool(
-                strtobool(str(args.enable_registration))
-            )
+            self.enable_registration = bool(strtobool(str(args.enable_registration)))
diff --git a/synapse/config/repository.py b/synapse/config/repository.py
index fbfcecc240..80a628d9b0 100644
--- a/synapse/config/repository.py
+++ b/synapse/config/repository.py
@@ -20,27 +20,11 @@ from synapse.util.module_loader import load_module
 from ._base import Config, ConfigError
 
 DEFAULT_THUMBNAIL_SIZES = [
-    {
-        "width": 32,
-        "height": 32,
-        "method": "crop",
-    }, {
-        "width": 96,
-        "height": 96,
-        "method": "crop",
-    }, {
-        "width": 320,
-        "height": 240,
-        "method": "scale",
-    }, {
-        "width": 640,
-        "height": 480,
-        "method": "scale",
-    }, {
-        "width": 800,
-        "height": 600,
-        "method": "scale"
-    },
+    {"width": 32, "height": 32, "method": "crop"},
+    {"width": 96, "height": 96, "method": "crop"},
+    {"width": 320, "height": 240, "method": "scale"},
+    {"width": 640, "height": 480, "method": "scale"},
+    {"width": 800, "height": 600, "method": "scale"},
 ]
 
 THUMBNAIL_SIZE_YAML = """\
@@ -49,19 +33,15 @@ THUMBNAIL_SIZE_YAML = """\
         #    method: %(method)s
 """
 
-MISSING_NETADDR = (
-    "Missing netaddr library. This is required for URL preview API."
-)
+MISSING_NETADDR = "Missing netaddr library. This is required for URL preview API."
 
-MISSING_LXML = (
-    """Missing lxml library. This is required for URL preview API.
+MISSING_LXML = """Missing lxml library. This is required for URL preview API.
 
     Install by running:
         pip install lxml
 
     Requires libxslt1-dev system package.
     """
-)
 
 
 ThumbnailRequirement = namedtuple(
@@ -69,7 +49,8 @@ ThumbnailRequirement = namedtuple(
 )
 
 MediaStorageProviderConfig = namedtuple(
-    "MediaStorageProviderConfig", (
+    "MediaStorageProviderConfig",
+    (
         "store_local",  # Whether to store newly uploaded local files
         "store_remote",  # Whether to store newly downloaded remote files
         "store_synchronous",  # Whether to wait for successful storage for local uploads
@@ -100,18 +81,19 @@ def parse_thumbnail_requirements(thumbnail_sizes):
         requirements.setdefault("image/gif", []).append(png_thumbnail)
         requirements.setdefault("image/png", []).append(png_thumbnail)
     return {
-        media_type: tuple(thumbnails)
-        for media_type, thumbnails in requirements.items()
+        media_type: tuple(thumbnails) for media_type, thumbnails in requirements.items()
     }
 
 
 class ContentRepositoryConfig(Config):
-    def read_config(self, config):
+    def read_config(self, config, **kwargs):
         self.max_upload_size = self.parse_size(config.get("max_upload_size", "10M"))
         self.max_image_pixels = self.parse_size(config.get("max_image_pixels", "32M"))
         self.max_spider_size = self.parse_size(config.get("max_spider_size", "10M"))
 
-        self.media_store_path = self.ensure_directory(config["media_store_path"])
+        self.media_store_path = self.ensure_directory(
+            config.get("media_store_path", "media_store")
+        )
 
         backup_media_store_path = config.get("backup_media_store_path")
 
@@ -127,15 +109,15 @@ class ContentRepositoryConfig(Config):
                     "Cannot use both 'backup_media_store_path' and 'storage_providers'"
                 )
 
-            storage_providers = [{
-                "module": "file_system",
-                "store_local": True,
-                "store_synchronous": synchronous_backup_media_store,
-                "store_remote": True,
-                "config": {
-                    "directory": backup_media_store_path,
+            storage_providers = [
+                {
+                    "module": "file_system",
+                    "store_local": True,
+                    "store_synchronous": synchronous_backup_media_store,
+                    "store_remote": True,
+                    "config": {"directory": backup_media_store_path},
                 }
-            }]
+            ]
 
         # This is a list of config that can be used to create the storage
         # providers. The entries are tuples of (Class, class_config,
@@ -165,18 +147,19 @@ class ContentRepositoryConfig(Config):
             )
 
             self.media_storage_providers.append(
-                (provider_class, parsed_config, wrapper_config,)
+                (provider_class, parsed_config, wrapper_config)
             )
 
-        self.uploads_path = self.ensure_directory(config["uploads_path"])
+        self.uploads_path = self.ensure_directory(config.get("uploads_path", "uploads"))
         self.dynamic_thumbnails = config.get("dynamic_thumbnails", False)
         self.thumbnail_requirements = parse_thumbnail_requirements(
-            config.get("thumbnail_sizes", DEFAULT_THUMBNAIL_SIZES),
+            config.get("thumbnail_sizes", DEFAULT_THUMBNAIL_SIZES)
         )
         self.url_preview_enabled = config.get("url_preview_enabled", False)
         if self.url_preview_enabled:
             try:
                 import lxml
+
                 lxml  # To stop unused lint.
             except ImportError:
                 raise ConfigError(MISSING_LXML)
@@ -199,17 +182,15 @@ class ContentRepositoryConfig(Config):
 
             # we always blacklist '0.0.0.0' and '::', which are supposed to be
             # unroutable addresses.
-            self.url_preview_ip_range_blacklist.update(['0.0.0.0', '::'])
+            self.url_preview_ip_range_blacklist.update(["0.0.0.0", "::"])
 
             self.url_preview_ip_range_whitelist = IPSet(
                 config.get("url_preview_ip_range_whitelist", ())
             )
 
-            self.url_preview_url_blacklist = config.get(
-                "url_preview_url_blacklist", ()
-            )
+            self.url_preview_url_blacklist = config.get("url_preview_url_blacklist", ())
 
-    def default_config(self, data_dir_path, **kwargs):
+    def generate_config_section(self, data_dir_path, **kwargs):
         media_store = os.path.join(data_dir_path, "media_store")
         uploads_path = os.path.join(data_dir_path, "uploads")
 
@@ -219,7 +200,8 @@ class ContentRepositoryConfig(Config):
         # strip final NL
         formatted_thumbnail_sizes = formatted_thumbnail_sizes[:-1]
 
-        return r"""
+        return (
+            r"""
         # Directory where uploaded images and attachments are stored.
         #
         media_store_path: "%(media_store)s"
@@ -342,4 +324,6 @@ class ContentRepositoryConfig(Config):
         # The largest allowed URL preview spidering size in bytes
         #
         #max_spider_size: 10M
-        """ % locals()
+        """
+            % locals()
+        )
diff --git a/synapse/config/room_directory.py b/synapse/config/room_directory.py
index 8a9fded4c5..a92693017b 100644
--- a/synapse/config/room_directory.py
+++ b/synapse/config/room_directory.py
@@ -19,10 +19,8 @@ from ._base import Config, ConfigError
 
 
 class RoomDirectoryConfig(Config):
-    def read_config(self, config):
-        self.enable_room_list_search = config.get(
-            "enable_room_list_search", True,
-        )
+    def read_config(self, config, **kwargs):
+        self.enable_room_list_search = config.get("enable_room_list_search", True)
 
         alias_creation_rules = config.get("alias_creation_rules")
 
@@ -33,11 +31,7 @@ class RoomDirectoryConfig(Config):
             ]
         else:
             self._alias_creation_rules = [
-                _RoomDirectoryRule(
-                    "alias_creation_rules", {
-                        "action": "allow",
-                    }
-                )
+                _RoomDirectoryRule("alias_creation_rules", {"action": "allow"})
             ]
 
         room_list_publication_rules = config.get("room_list_publication_rules")
@@ -49,14 +43,10 @@ class RoomDirectoryConfig(Config):
             ]
         else:
             self._room_list_publication_rules = [
-                _RoomDirectoryRule(
-                    "room_list_publication_rules", {
-                        "action": "allow",
-                    }
-                )
+                _RoomDirectoryRule("room_list_publication_rules", {"action": "allow"})
             ]
 
-    def default_config(self, config_dir_path, server_name, **kwargs):
+    def generate_config_section(self, config_dir_path, server_name, **kwargs):
         return """
         # Uncomment to disable searching the public room list. When disabled
         # blocks searching local and remote room lists for local and remote
@@ -178,8 +168,7 @@ class _RoomDirectoryRule(object):
             self.action = action
         else:
             raise ConfigError(
-                "%s rules can only have action of 'allow'"
-                " or 'deny'" % (option_name,)
+                "%s rules can only have action of 'allow'" " or 'deny'" % (option_name,)
             )
 
         self._alias_matches_all = alias == "*"
diff --git a/synapse/config/saml2_config.py b/synapse/config/saml2_config.py
index aa6eac271f..872a1ba934 100644
--- a/synapse/config/saml2_config.py
+++ b/synapse/config/saml2_config.py
@@ -17,7 +17,7 @@ from ._base import Config, ConfigError
 
 
 class SAML2Config(Config):
-    def read_config(self, config):
+    def read_config(self, config, **kwargs):
         self.saml2_enabled = False
 
         saml2_config = config.get("saml2_config")
@@ -28,6 +28,7 @@ class SAML2Config(Config):
         self.saml2_enabled = True
 
         import saml2.config
+
         self.saml2_sp_config = saml2.config.SPConfig()
         self.saml2_sp_config.load(self._default_saml_config_dict())
         self.saml2_sp_config.load(saml2_config.get("sp_config", {}))
@@ -41,29 +42,26 @@ class SAML2Config(Config):
 
         public_baseurl = self.public_baseurl
         if public_baseurl is None:
-            raise ConfigError(
-                "saml2_config requires a public_baseurl to be set"
-            )
+            raise ConfigError("saml2_config requires a public_baseurl to be set")
 
         metadata_url = public_baseurl + "_matrix/saml2/metadata.xml"
         response_url = public_baseurl + "_matrix/saml2/authn_response"
         return {
             "entityid": metadata_url,
-
             "service": {
                 "sp": {
                     "endpoints": {
                         "assertion_consumer_service": [
-                            (response_url, saml2.BINDING_HTTP_POST),
-                        ],
+                            (response_url, saml2.BINDING_HTTP_POST)
+                        ]
                     },
                     "required_attributes": ["uid"],
                     "optional_attributes": ["mail", "surname", "givenname"],
-                },
-            }
+                }
+            },
         }
 
-    def default_config(self, config_dir_path, server_name, **kwargs):
+    def generate_config_section(self, config_dir_path, server_name, **kwargs):
         return """\
         # Enable SAML2 for registration and login. Uses pysaml2.
         #
@@ -106,4 +104,6 @@ class SAML2Config(Config):
         #  # separate pysaml2 configuration file:
         #  #
         #  config_path: "%(config_dir_path)s/sp_conf.py"
-        """ % {"config_dir_path": config_dir_path}
+        """ % {
+            "config_dir_path": config_dir_path
+        }
diff --git a/synapse/config/server.py b/synapse/config/server.py
index f34aa42afa..2a74dea2ea 100644
--- a/synapse/config/server.py
+++ b/synapse/config/server.py
@@ -20,6 +20,7 @@ import os.path
 
 from netaddr import IPSet
 
+from synapse.api.room_versions import KNOWN_ROOM_VERSIONS
 from synapse.http.endpoint import parse_and_validate_server_name
 from synapse.python_dependencies import DependencyException, check_requirements
 
@@ -33,12 +34,13 @@ logger = logging.Logger(__name__)
 #
 # We later check for errors when binding to 0.0.0.0 and ignore them if :: is also in
 # in the list.
-DEFAULT_BIND_ADDRESSES = ['::', '0.0.0.0']
+DEFAULT_BIND_ADDRESSES = ["::", "0.0.0.0"]
 
+DEFAULT_ROOM_VERSION = "4"
 
-class ServerConfig(Config):
 
-    def read_config(self, config):
+class ServerConfig(Config):
+    def read_config(self, config, **kwargs):
         self.server_name = config["server_name"]
         self.server_context = config.get("server_context", None)
 
@@ -55,7 +57,6 @@ class ServerConfig(Config):
         self.user_agent_suffix = config.get("user_agent_suffix")
         self.use_frozen_dicts = config.get("use_frozen_dicts", False)
         self.public_baseurl = config.get("public_baseurl")
-        self.cpu_affinity = config.get("cpu_affinity")
 
         # Whether to send federation traffic out in this process. This only
         # applies to some federation traffic, and so shouldn't be used to
@@ -78,15 +79,49 @@ class ServerConfig(Config):
         # Whether to require authentication to retrieve profile data (avatars,
         # display names) of other users through the client API.
         self.require_auth_for_profile_requests = config.get(
-            "require_auth_for_profile_requests", False,
+            "require_auth_for_profile_requests", False
         )
 
-        # If set to 'True', requires authentication to access the server's
-        # public rooms directory through the client API, and forbids any other
-        # homeserver to fetch it via federation.
-        self.restrict_public_rooms_to_local_users = config.get(
-            "restrict_public_rooms_to_local_users", False,
-        )
+        if "restrict_public_rooms_to_local_users" in config and (
+            "allow_public_rooms_without_auth" in config
+            or "allow_public_rooms_over_federation" in config
+        ):
+            raise ConfigError(
+                "Can't use 'restrict_public_rooms_to_local_users' if"
+                " 'allow_public_rooms_without_auth' and/or"
+                " 'allow_public_rooms_over_federation' is set."
+            )
+
+        # Check if the legacy "restrict_public_rooms_to_local_users" flag is set. This
+        # flag is now obsolete but we need to check it for backward-compatibility.
+        if config.get("restrict_public_rooms_to_local_users", False):
+            self.allow_public_rooms_without_auth = False
+            self.allow_public_rooms_over_federation = False
+        else:
+            # If set to 'False', requires authentication to access the server's public
+            # rooms directory through the client API. Defaults to 'True'.
+            self.allow_public_rooms_without_auth = config.get(
+                "allow_public_rooms_without_auth", True
+            )
+            # If set to 'False', forbids any other homeserver to fetch the server's public
+            # rooms directory via federation. Defaults to 'True'.
+            self.allow_public_rooms_over_federation = config.get(
+                "allow_public_rooms_over_federation", True
+            )
+
+        default_room_version = config.get("default_room_version", DEFAULT_ROOM_VERSION)
+
+        # Ensure room version is a str
+        default_room_version = str(default_room_version)
+
+        if default_room_version not in KNOWN_ROOM_VERSIONS:
+            raise ConfigError(
+                "Unknown default_room_version: %s, known room versions: %s"
+                % (default_room_version, list(KNOWN_ROOM_VERSIONS.keys()))
+            )
+
+        # Get the actual room version object rather than just the identifier
+        self.default_room_version = KNOWN_ROOM_VERSIONS[default_room_version]
 
         # whether to enable search. If disabled, new entries will not be inserted
         # into the search tables and they will not be indexed. Users will receive
@@ -97,31 +132,25 @@ class ServerConfig(Config):
 
         # Whether we should block invites sent to users on this server
         # (other than those sent by local server admins)
-        self.block_non_admin_invites = config.get(
-            "block_non_admin_invites", False,
-        )
+        self.block_non_admin_invites = config.get("block_non_admin_invites", False)
 
         # Whether to enable experimental MSC1849 (aka relations) support
         self.experimental_msc1849_support_enabled = config.get(
-            "experimental_msc1849_support_enabled", False,
+            "experimental_msc1849_support_enabled", False
         )
 
         # Options to control access by tracking MAU
         self.limit_usage_by_mau = config.get("limit_usage_by_mau", False)
         self.max_mau_value = 0
         if self.limit_usage_by_mau:
-            self.max_mau_value = config.get(
-                "max_mau_value", 0,
-            )
+            self.max_mau_value = config.get("max_mau_value", 0)
         self.mau_stats_only = config.get("mau_stats_only", False)
 
         self.mau_limits_reserved_threepids = config.get(
             "mau_limit_reserved_threepids", []
         )
 
-        self.mau_trial_days = config.get(
-            "mau_trial_days", 0,
-        )
+        self.mau_trial_days = config.get("mau_trial_days", 0)
 
         # Options to disable HS
         self.hs_disabled = config.get("hs_disabled", False)
@@ -134,9 +163,7 @@ class ServerConfig(Config):
 
         # FIXME: federation_domain_whitelist needs sytests
         self.federation_domain_whitelist = None
-        federation_domain_whitelist = config.get(
-            "federation_domain_whitelist", None,
-        )
+        federation_domain_whitelist = config.get("federation_domain_whitelist", None)
 
         if federation_domain_whitelist is not None:
             # turn the whitelist into a hash for speed of lookup
@@ -146,7 +173,7 @@ class ServerConfig(Config):
                 self.federation_domain_whitelist[domain] = True
 
         self.federation_ip_range_blacklist = config.get(
-            "federation_ip_range_blacklist", [],
+            "federation_ip_range_blacklist", []
         )
 
         # Attempt to create an IPSet from the given ranges
@@ -159,13 +186,12 @@ class ServerConfig(Config):
             self.federation_ip_range_blacklist.update(["0.0.0.0", "::"])
         except Exception as e:
             raise ConfigError(
-                "Invalid range(s) provided in "
-                "federation_ip_range_blacklist: %s" % e
+                "Invalid range(s) provided in " "federation_ip_range_blacklist: %s" % e
             )
 
         if self.public_baseurl is not None:
-            if self.public_baseurl[-1] != '/':
-                self.public_baseurl += '/'
+            if self.public_baseurl[-1] != "/":
+                self.public_baseurl += "/"
         self.start_pushers = config.get("start_pushers", True)
 
         # (undocumented) option for torturing the worker-mode replication a bit,
@@ -176,7 +202,7 @@ class ServerConfig(Config):
         # Whether to require a user to be in the room to add an alias to it.
         # Defaults to True.
         self.require_membership_for_aliases = config.get(
-            "require_membership_for_aliases", True,
+            "require_membership_for_aliases", True
         )
 
         # Whether to allow per-room membership profiles through the send of membership
@@ -208,9 +234,9 @@ class ServerConfig(Config):
 
             # if we still have an empty list of addresses, use the default list
             if not bind_addresses:
-                if listener['type'] == 'metrics':
+                if listener["type"] == "metrics":
                     # the metrics listener doesn't support IPv6
-                    bind_addresses.append('0.0.0.0')
+                    bind_addresses.append("0.0.0.0")
                 else:
                     bind_addresses.extend(DEFAULT_BIND_ADDRESSES)
 
@@ -230,78 +256,80 @@ class ServerConfig(Config):
             bind_host = config.get("bind_host", "")
             gzip_responses = config.get("gzip_responses", True)
 
-            self.listeners.append({
-                "port": bind_port,
-                "bind_addresses": [bind_host],
-                "tls": True,
-                "type": "http",
-                "resources": [
-                    {
-                        "names": ["client"],
-                        "compress": gzip_responses,
-                    },
-                    {
-                        "names": ["federation"],
-                        "compress": False,
-                    }
-                ]
-            })
-
-            unsecure_port = config.get("unsecure_port", bind_port - 400)
-            if unsecure_port:
-                self.listeners.append({
-                    "port": unsecure_port,
+            self.listeners.append(
+                {
+                    "port": bind_port,
                     "bind_addresses": [bind_host],
-                    "tls": False,
+                    "tls": True,
                     "type": "http",
                     "resources": [
-                        {
-                            "names": ["client"],
-                            "compress": gzip_responses,
-                        },
-                        {
-                            "names": ["federation"],
-                            "compress": False,
-                        }
-                    ]
-                })
+                        {"names": ["client"], "compress": gzip_responses},
+                        {"names": ["federation"], "compress": False},
+                    ],
+                }
+            )
+
+            unsecure_port = config.get("unsecure_port", bind_port - 400)
+            if unsecure_port:
+                self.listeners.append(
+                    {
+                        "port": unsecure_port,
+                        "bind_addresses": [bind_host],
+                        "tls": False,
+                        "type": "http",
+                        "resources": [
+                            {"names": ["client"], "compress": gzip_responses},
+                            {"names": ["federation"], "compress": False},
+                        ],
+                    }
+                )
 
         manhole = config.get("manhole")
         if manhole:
-            self.listeners.append({
-                "port": manhole,
-                "bind_addresses": ["127.0.0.1"],
-                "type": "manhole",
-                "tls": False,
-            })
+            self.listeners.append(
+                {
+                    "port": manhole,
+                    "bind_addresses": ["127.0.0.1"],
+                    "type": "manhole",
+                    "tls": False,
+                }
+            )
 
         metrics_port = config.get("metrics_port")
         if metrics_port:
             logger.warn(
-                ("The metrics_port configuration option is deprecated in Synapse 0.31 "
-                 "in favour of a listener. Please see "
-                 "http://github.com/matrix-org/synapse/blob/master/docs/metrics-howto.rst"
-                 " on how to configure the new listener."))
-
-            self.listeners.append({
-                "port": metrics_port,
-                "bind_addresses": [config.get("metrics_bind_host", "127.0.0.1")],
-                "tls": False,
-                "type": "http",
-                "resources": [
-                    {
-                        "names": ["metrics"],
-                        "compress": False,
-                    },
-                ]
-            })
+                (
+                    "The metrics_port configuration option is deprecated in Synapse 0.31 "
+                    "in favour of a listener. Please see "
+                    "http://github.com/matrix-org/synapse/blob/master/docs/metrics-howto.rst"
+                    " on how to configure the new listener."
+                )
+            )
+
+            self.listeners.append(
+                {
+                    "port": metrics_port,
+                    "bind_addresses": [config.get("metrics_bind_host", "127.0.0.1")],
+                    "tls": False,
+                    "type": "http",
+                    "resources": [{"names": ["metrics"], "compress": False}],
+                }
+            )
 
         _check_resource_config(self.listeners)
 
+        # An experimental option to try and periodically clean up extremities
+        # by sending dummy events.
+        self.cleanup_extremities_with_dummy_events = config.get(
+            "cleanup_extremities_with_dummy_events", False
+        )
+
     def has_tls_listener(self):
         return any(l["tls"] for l in self.listeners)
 
-    def default_config(self, server_name, data_dir_path, **kwargs):
+    def generate_config_section(
+        self, server_name, data_dir_path, open_private_ports, **kwargs
+    ):
         _, bind_port = parse_and_validate_server_name(server_name)
         if bind_port is not None:
             unsecure_port = bind_port - 400
@@ -310,7 +338,19 @@ class ServerConfig(Config):
             unsecure_port = 8008
 
         pid_file = os.path.join(data_dir_path, "homeserver.pid")
-        return """\
+
+        # Bring DEFAULT_ROOM_VERSION into the local-scope for use in the
+        # default config string
+        default_room_version = DEFAULT_ROOM_VERSION
+
+        unsecure_http_binding = "port: %i\n            tls: false" % (unsecure_port,)
+        if not open_private_ports:
+            unsecure_http_binding += (
+                "\n            bind_addresses: ['::1', '127.0.0.1']"
+            )
+
+        return (
+            """\
         ## Server ##
 
         # The domain name of the server, with optional explicit port.
@@ -324,29 +364,6 @@ class ServerConfig(Config):
         #
         pid_file: %(pid_file)s
 
-        # CPU affinity mask. Setting this restricts the CPUs on which the
-        # process will be scheduled. It is represented as a bitmask, with the
-        # lowest order bit corresponding to the first logical CPU and the
-        # highest order bit corresponding to the last logical CPU. Not all CPUs
-        # may exist on a given system but a mask may specify more CPUs than are
-        # present.
-        #
-        # For example:
-        #    0x00000001  is processor #0,
-        #    0x00000003  is processors #0 and #1,
-        #    0xFFFFFFFF  is all processors (#0 through #31).
-        #
-        # Pinning a Python process to a single CPU is desirable, because Python
-        # is inherently single-threaded due to the GIL, and can suffer a
-        # 30-40%% slowdown due to cache blow-out and thread context switching
-        # if the scheduler happens to schedule the underlying threads across
-        # different cores. See
-        # https://www.mirantis.com/blog/improve-performance-python-programs-restricting-single-cpu/.
-        #
-        # This setting requires the affinity package to be installed!
-        #
-        #cpu_affinity: 0xFFFFFFFF
-
         # The path to the web client which will be served at /_matrix/client/
         # if 'webclient' is configured under the 'listeners' configuration.
         #
@@ -378,11 +395,25 @@ class ServerConfig(Config):
         #
         #require_auth_for_profile_requests: true
 
-        # If set to 'true', requires authentication to access the server's
-        # public rooms directory through the client API, and forbids any other
-        # homeserver to fetch it via federation. Defaults to 'false'.
+        # If set to 'false', requires authentication to access the server's public rooms
+        # directory through the client API. Defaults to 'true'.
         #
-        #restrict_public_rooms_to_local_users: true
+        #allow_public_rooms_without_auth: false
+
+        # If set to 'false', forbids any other homeserver to fetch the server's public
+        # rooms directory via federation. Defaults to 'true'.
+        #
+        #allow_public_rooms_over_federation: false
+
+        # The default room version for newly created rooms.
+        #
+        # Known room versions are listed here:
+        # https://matrix.org/docs/spec/#complete-list-of-room-versions
+        #
+        # For example, for room version 1, default_room_version should be set
+        # to "1".
+        #
+        #default_room_version: "%(default_room_version)s"
 
         # The GC threshold parameters to pass to `gc.set_threshold`, if defined
         #
@@ -513,9 +544,7 @@ class ServerConfig(Config):
           # If you plan to use a reverse proxy, please see
           # https://github.com/matrix-org/synapse/blob/master/docs/reverse_proxy.rst.
           #
-          - port: %(unsecure_port)s
-            tls: false
-            bind_addresses: ['::1', '127.0.0.1']
+          - %(unsecure_http_binding)s
             type: http
             x_forwarded: true
 
@@ -523,7 +552,7 @@ class ServerConfig(Config):
               - names: [client, federation]
                 compress: false
 
-            # example additonal_resources:
+            # example additional_resources:
             #
             #additional_resources:
             #  "/_matrix/my/custom/endpoint":
@@ -552,6 +581,22 @@ class ServerConfig(Config):
 
         # Monthly Active User Blocking
         #
+        # Used in cases where the admin or server owner wants to limit to the
+        # number of monthly active users.
+        #
+        # 'limit_usage_by_mau' disables/enables monthly active user blocking. When
+        # anabled and a limit is reached the server returns a 'ResourceLimitError'
+        # with error type Codes.RESOURCE_LIMIT_EXCEEDED
+        #
+        # 'max_mau_value' is the hard limit of monthly active users above which
+        # the server will start blocking user actions.
+        #
+        # 'mau_trial_days' is a means to add a grace period for active users. It
+        # means that users must be active for this number of days before they
+        # can be considered active and guards against the case where lots of users
+        # sign up in a short space of time never to return after their initial
+        # session.
+        #
         #limit_usage_by_mau: False
         #max_mau_value: 50
         #mau_trial_days: 2
@@ -582,7 +627,9 @@ class ServerConfig(Config):
         # Defaults to 'true'.
         #
         #allow_per_room_profiles: false
-        """ % locals()
+        """
+            % locals()
+        )
 
     def read_arguments(self, args):
         if args.manhole is not None:
@@ -594,17 +641,26 @@ class ServerConfig(Config):
 
     def add_arguments(self, parser):
         server_group = parser.add_argument_group("server")
-        server_group.add_argument("-D", "--daemonize", action='store_true',
-                                  default=None,
-                                  help="Daemonize the home server")
-        server_group.add_argument("--print-pidfile", action='store_true',
-                                  default=None,
-                                  help="Print the path to the pidfile just"
-                                  " before daemonizing")
-        server_group.add_argument("--manhole", metavar="PORT", dest="manhole",
-                                  type=int,
-                                  help="Turn on the twisted telnet manhole"
-                                  " service on the given port.")
+        server_group.add_argument(
+            "-D",
+            "--daemonize",
+            action="store_true",
+            default=None,
+            help="Daemonize the home server",
+        )
+        server_group.add_argument(
+            "--print-pidfile",
+            action="store_true",
+            default=None,
+            help="Print the path to the pidfile just" " before daemonizing",
+        )
+        server_group.add_argument(
+            "--manhole",
+            metavar="PORT",
+            dest="manhole",
+            type=int,
+            help="Turn on the twisted telnet manhole" " service on the given port.",
+        )
 
 
 def is_threepid_reserved(reserved_threepids, threepid):
@@ -618,7 +674,7 @@ def is_threepid_reserved(reserved_threepids, threepid):
     """
 
     for tp in reserved_threepids:
-        if (threepid['medium'] == tp['medium'] and threepid['address'] == tp['address']):
+        if threepid["medium"] == tp["medium"] and threepid["address"] == tp["address"]:
             return True
     return False
 
@@ -631,9 +687,7 @@ def read_gc_thresholds(thresholds):
         return None
     try:
         assert len(thresholds) == 3
-        return (
-            int(thresholds[0]), int(thresholds[1]), int(thresholds[2]),
-        )
+        return (int(thresholds[0]), int(thresholds[1]), int(thresholds[2]))
     except Exception:
         raise ConfigError(
             "Value of `gc_threshold` must be a list of three integers if set"
@@ -651,22 +705,22 @@ def _warn_if_webclient_configured(listeners):
     for listener in listeners:
         for res in listener.get("resources", []):
             for name in res.get("names", []):
-                if name == 'webclient':
+                if name == "webclient":
                     logger.warning(NO_MORE_WEB_CLIENT_WARNING)
                     return
 
 
 KNOWN_RESOURCES = (
-    'client',
-    'consent',
-    'federation',
-    'keys',
-    'media',
-    'metrics',
-    'openid',
-    'replication',
-    'static',
-    'webclient',
+    "client",
+    "consent",
+    "federation",
+    "keys",
+    "media",
+    "metrics",
+    "openid",
+    "replication",
+    "static",
+    "webclient",
 )
 
 
@@ -680,11 +734,9 @@ def _check_resource_config(listeners):
 
     for resource in resource_names:
         if resource not in KNOWN_RESOURCES:
-            raise ConfigError(
-                "Unknown listener resource '%s'" % (resource, )
-            )
+            raise ConfigError("Unknown listener resource '%s'" % (resource,))
         if resource == "consent":
             try:
-                check_requirements('resources.consent')
+                check_requirements("resources.consent")
             except DependencyException as e:
                 raise ConfigError(e.message)
diff --git a/synapse/config/server_notices_config.py b/synapse/config/server_notices_config.py
index 529dc0a617..eaac3d73bc 100644
--- a/synapse/config/server_notices_config.py
+++ b/synapse/config/server_notices_config.py
@@ -58,6 +58,7 @@ class ServerNoticesConfig(Config):
             The name to use for the server notices room.
             None if server notices are not enabled.
     """
+
     def __init__(self):
         super(ServerNoticesConfig, self).__init__()
         self.server_notices_mxid = None
@@ -65,23 +66,17 @@ class ServerNoticesConfig(Config):
         self.server_notices_mxid_avatar_url = None
         self.server_notices_room_name = None
 
-    def read_config(self, config):
+    def read_config(self, config, **kwargs):
         c = config.get("server_notices")
         if c is None:
             return
 
-        mxid_localpart = c['system_mxid_localpart']
-        self.server_notices_mxid = UserID(
-            mxid_localpart, self.server_name,
-        ).to_string()
-        self.server_notices_mxid_display_name = c.get(
-            'system_mxid_display_name', None,
-        )
-        self.server_notices_mxid_avatar_url = c.get(
-            'system_mxid_avatar_url', None,
-        )
+        mxid_localpart = c["system_mxid_localpart"]
+        self.server_notices_mxid = UserID(mxid_localpart, self.server_name).to_string()
+        self.server_notices_mxid_display_name = c.get("system_mxid_display_name", None)
+        self.server_notices_mxid_avatar_url = c.get("system_mxid_avatar_url", None)
         # todo: i18n
-        self.server_notices_room_name = c.get('room_name', "Server Notices")
+        self.server_notices_room_name = c.get("room_name", "Server Notices")
 
-    def default_config(self, **kwargs):
+    def generate_config_section(self, **kwargs):
         return DEFAULT_CONFIG
diff --git a/synapse/config/spam_checker.py b/synapse/config/spam_checker.py
index 1502e9faba..e40797ab50 100644
--- a/synapse/config/spam_checker.py
+++ b/synapse/config/spam_checker.py
@@ -19,14 +19,14 @@ from ._base import Config
 
 
 class SpamCheckerConfig(Config):
-    def read_config(self, config):
+    def read_config(self, config, **kwargs):
         self.spam_checker = None
 
         provider = config.get("spam_checker", None)
         if provider is not None:
             self.spam_checker = load_module(provider)
 
-    def default_config(self, **kwargs):
+    def generate_config_section(self, **kwargs):
         return """\
         #spam_checker:
         #  module: "my_custom_project.SuperSpamChecker"
diff --git a/synapse/config/stats.py b/synapse/config/stats.py
new file mode 100644
index 0000000000..b518a3ed9c
--- /dev/null
+++ b/synapse/config/stats.py
@@ -0,0 +1,60 @@
+# -*- coding: utf-8 -*-
+# Copyright 2018 New Vector Ltd
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from __future__ import division
+
+import sys
+
+from ._base import Config
+
+
+class StatsConfig(Config):
+    """Stats Configuration
+    Configuration for the behaviour of synapse's stats engine
+    """
+
+    def read_config(self, config, **kwargs):
+        self.stats_enabled = True
+        self.stats_bucket_size = 86400
+        self.stats_retention = sys.maxsize
+        stats_config = config.get("stats", None)
+        if stats_config:
+            self.stats_enabled = stats_config.get("enabled", self.stats_enabled)
+            self.stats_bucket_size = (
+                self.parse_duration(stats_config.get("bucket_size", "1d")) / 1000
+            )
+            self.stats_retention = (
+                self.parse_duration(
+                    stats_config.get("retention", "%ds" % (sys.maxsize,))
+                )
+                / 1000
+            )
+
+    def generate_config_section(self, config_dir_path, server_name, **kwargs):
+        return """
+        # Local statistics collection. Used in populating the room directory.
+        #
+        # 'bucket_size' controls how large each statistics timeslice is. It can
+        # be defined in a human readable short form -- e.g. "1d", "1y".
+        #
+        # 'retention' controls how long historical statistics will be kept for.
+        # It can be defined in a human readable short form -- e.g. "1d", "1y".
+        #
+        #
+        #stats:
+        #   enabled: true
+        #   bucket_size: 1d
+        #   retention: 1y
+        """
diff --git a/synapse/config/third_party_event_rules.py b/synapse/config/third_party_event_rules.py
new file mode 100644
index 0000000000..b3431441b9
--- /dev/null
+++ b/synapse/config/third_party_event_rules.py
@@ -0,0 +1,42 @@
+# -*- coding: utf-8 -*-
+# Copyright 2019 The Matrix.org Foundation C.I.C.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from synapse.util.module_loader import load_module
+
+from ._base import Config
+
+
+class ThirdPartyRulesConfig(Config):
+    def read_config(self, config, **kwargs):
+        self.third_party_event_rules = None
+
+        provider = config.get("third_party_event_rules", None)
+        if provider is not None:
+            self.third_party_event_rules = load_module(provider)
+
+    def generate_config_section(self, **kwargs):
+        return """\
+        # Server admins can define a Python module that implements extra rules for
+        # allowing or denying incoming events. In order to work, this module needs to
+        # override the methods defined in synapse/events/third_party_rules.py.
+        #
+        # This feature is designed to be used in closed federations only, where each
+        # participating server enforces the same rules.
+        #
+        #third_party_event_rules:
+        #  module: "my_custom_project.SuperRulesSet"
+        #  config:
+        #    example_option: 'things'
+        """
diff --git a/synapse/config/tls.py b/synapse/config/tls.py
index 72dd5926f9..8fcf801418 100644
--- a/synapse/config/tls.py
+++ b/synapse/config/tls.py
@@ -33,7 +33,7 @@ logger = logging.getLogger(__name__)
 
 
 class TlsConfig(Config):
-    def read_config(self, config):
+    def read_config(self, config, config_dir_path, **kwargs):
 
         acme_config = config.get("acme", None)
         if acme_config is None:
@@ -42,14 +42,18 @@ class TlsConfig(Config):
         self.acme_enabled = acme_config.get("enabled", False)
 
         # hyperlink complains on py2 if this is not a Unicode
-        self.acme_url = six.text_type(acme_config.get(
-            "url", u"https://acme-v01.api.letsencrypt.org/directory"
-        ))
+        self.acme_url = six.text_type(
+            acme_config.get("url", "https://acme-v01.api.letsencrypt.org/directory")
+        )
         self.acme_port = acme_config.get("port", 80)
-        self.acme_bind_addresses = acme_config.get("bind_addresses", ['::', '0.0.0.0'])
+        self.acme_bind_addresses = acme_config.get("bind_addresses", ["::", "0.0.0.0"])
         self.acme_reprovision_threshold = acme_config.get("reprovision_threshold", 30)
         self.acme_domain = acme_config.get("domain", config.get("server_name"))
 
+        self.acme_account_key_file = self.abspath(
+            acme_config.get("account_key_file", config_dir_path + "/client.key")
+        )
+
         self.tls_certificate_file = self.abspath(config.get("tls_certificate_path"))
         self.tls_private_key_file = self.abspath(config.get("tls_private_key_path"))
 
@@ -74,12 +78,12 @@ class TlsConfig(Config):
 
         # Whether to verify certificates on outbound federation traffic
         self.federation_verify_certificates = config.get(
-            "federation_verify_certificates", False,
+            "federation_verify_certificates", True
         )
 
         # Whitelist of domains to not verify certificates for
         fed_whitelist_entries = config.get(
-            "federation_certificate_verification_whitelist", [],
+            "federation_certificate_verification_whitelist", []
         )
 
         # Support globs (*) in whitelist values
@@ -90,9 +94,7 @@ class TlsConfig(Config):
             self.federation_certificate_verification_whitelist.append(entry_regex)
 
         # List of custom certificate authorities for federation traffic validation
-        custom_ca_list = config.get(
-            "federation_custom_ca_list", None,
-        )
+        custom_ca_list = config.get("federation_custom_ca_list", None)
 
         # Read in and parse custom CA certificates
         self.federation_ca_trust_root = None
@@ -101,21 +103,24 @@ class TlsConfig(Config):
                 # A trustroot cannot be generated without any CA certificates.
                 # Raise an error if this option has been specified without any
                 # corresponding certificates.
-                raise ConfigError("federation_custom_ca_list specified without "
-                                  "any certificate files")
+                raise ConfigError(
+                    "federation_custom_ca_list specified without "
+                    "any certificate files"
+                )
 
             certs = []
             for ca_file in custom_ca_list:
                 logger.debug("Reading custom CA certificate file: %s", ca_file)
-                content = self.read_file(ca_file)
+                content = self.read_file(ca_file, "federation_custom_ca_list")
 
                 # Parse the CA certificates
                 try:
                     cert_base = Certificate.loadPEM(content)
                     certs.append(cert_base)
                 except Exception as e:
-                    raise ConfigError("Error parsing custom CA certificate file %s: %s"
-                                      % (ca_file, e))
+                    raise ConfigError(
+                        "Error parsing custom CA certificate file %s: %s" % (ca_file, e)
+                    )
 
             self.federation_ca_trust_root = trustRootFromCertificates(certs)
 
@@ -146,17 +151,21 @@ class TlsConfig(Config):
             return None
 
         try:
-            with open(self.tls_certificate_file, 'rb') as f:
+            with open(self.tls_certificate_file, "rb") as f:
                 cert_pem = f.read()
         except Exception as e:
-            raise ConfigError("Failed to read existing certificate file %s: %s"
-                              % (self.tls_certificate_file, e))
+            raise ConfigError(
+                "Failed to read existing certificate file %s: %s"
+                % (self.tls_certificate_file, e)
+            )
 
         try:
             tls_certificate = crypto.load_certificate(crypto.FILETYPE_PEM, cert_pem)
         except Exception as e:
-            raise ConfigError("Failed to parse existing certificate file %s: %s"
-                              % (self.tls_certificate_file, e))
+            raise ConfigError(
+                "Failed to parse existing certificate file %s: %s"
+                % (self.tls_certificate_file, e)
+            )
 
         if not allow_self_signed:
             if tls_certificate.get_subject() == tls_certificate.get_issuer():
@@ -166,7 +175,7 @@ class TlsConfig(Config):
 
         # YYYYMMDDhhmmssZ -- in UTC
         expires_on = datetime.strptime(
-            tls_certificate.get_notAfter().decode('ascii'), "%Y%m%d%H%M%SZ"
+            tls_certificate.get_notAfter().decode("ascii"), "%Y%m%d%H%M%SZ"
         )
         now = datetime.utcnow()
         days_remaining = (expires_on - now).days
@@ -191,7 +200,8 @@ class TlsConfig(Config):
             except Exception as e:
                 logger.info(
                     "Unable to read TLS certificate (%s). Ignoring as no "
-                    "tls listeners enabled.", e,
+                    "tls listeners enabled.",
+                    e,
                 )
 
         self.tls_fingerprints = list(self._original_tls_fingerprints)
@@ -205,18 +215,21 @@ class TlsConfig(Config):
             sha256_fingerprint = encode_base64(sha256(x509_certificate_bytes).digest())
             sha256_fingerprints = set(f["sha256"] for f in self.tls_fingerprints)
             if sha256_fingerprint not in sha256_fingerprints:
-                self.tls_fingerprints.append({u"sha256": sha256_fingerprint})
+                self.tls_fingerprints.append({"sha256": sha256_fingerprint})
 
-    def default_config(self, config_dir_path, server_name, **kwargs):
+    def generate_config_section(
+        self, config_dir_path, server_name, data_dir_path, **kwargs
+    ):
         base_key_name = os.path.join(config_dir_path, server_name)
 
         tls_certificate_path = base_key_name + ".tls.crt"
         tls_private_key_path = base_key_name + ".tls.key"
+        default_acme_account_file = os.path.join(data_dir_path, "acme_account.key")
 
         # this is to avoid the max line length. Sorrynotsorry
         proxypassline = (
-            'ProxyPass /.well-known/acme-challenge '
-            'http://localhost:8009/.well-known/acme-challenge'
+            "ProxyPass /.well-known/acme-challenge "
+            "http://localhost:8009/.well-known/acme-challenge"
         )
 
         return (
@@ -241,12 +254,12 @@ class TlsConfig(Config):
         #
         #tls_private_key_path: "%(tls_private_key_path)s"
 
-        # Whether to verify TLS certificates when sending federation traffic.
+        # Whether to verify TLS server certificates for outbound federation requests.
         #
-        # This currently defaults to `false`, however this will change in
-        # Synapse 1.0 when valid federation certificates will be required.
+        # Defaults to `true`. To disable certificate verification, uncomment the
+        # following line.
         #
-        #federation_verify_certificates: true
+        #federation_verify_certificates: false
 
         # Skip federation certificate verification on the following whitelist
         # of domains.
@@ -337,6 +350,13 @@ class TlsConfig(Config):
             #
             #domain: matrix.example.com
 
+            # file to use for the account key. This will be generated if it doesn't
+            # exist.
+            #
+            # If unspecified, we will use CONFDIR/client.key.
+            #
+            account_key_file: %(default_acme_account_file)s
+
         # List of allowed TLS fingerprints for this server to publish along
         # with the signing keys for this server. Other matrix servers that
         # make HTTPS requests to this server will check that the TLS
diff --git a/synapse/config/user_directory.py b/synapse/config/user_directory.py
index 142754a7dc..f6313e17d4 100644
--- a/synapse/config/user_directory.py
+++ b/synapse/config/user_directory.py
@@ -21,19 +21,19 @@ class UserDirectoryConfig(Config):
     Configuration for the behaviour of the /user_directory API
     """
 
-    def read_config(self, config):
+    def read_config(self, config, **kwargs):
         self.user_directory_search_enabled = True
         self.user_directory_search_all_users = False
         user_directory_config = config.get("user_directory", None)
         if user_directory_config:
-            self.user_directory_search_enabled = (
-                user_directory_config.get("enabled", True)
+            self.user_directory_search_enabled = user_directory_config.get(
+                "enabled", True
             )
-            self.user_directory_search_all_users = (
-                user_directory_config.get("search_all_users", False)
+            self.user_directory_search_all_users = user_directory_config.get(
+                "search_all_users", False
             )
 
-    def default_config(self, config_dir_path, server_name, **kwargs):
+    def generate_config_section(self, config_dir_path, server_name, **kwargs):
         return """
         # User Directory configuration
         #
@@ -43,9 +43,9 @@ class UserDirectoryConfig(Config):
         #
         # 'search_all_users' defines whether to search all users visible to your HS
         # when searching the user directory, rather than limiting to users visible
-        # in public rooms.  Defaults to false.  If you set it True, you'll have to run
-        # UPDATE user_directory_stream_pos SET stream_id = NULL;
-        # on your database to tell it to rebuild the user_directory search indexes.
+        # in public rooms.  Defaults to false.  If you set it True, you'll have to
+        # rebuild the user_directory search indexes, see
+        # https://github.com/matrix-org/synapse/blob/master/docs/user_directory.md
         #
         #user_directory:
         #  enabled: true
diff --git a/synapse/config/voip.py b/synapse/config/voip.py
index 2a1f005a37..2ca0e1cf70 100644
--- a/synapse/config/voip.py
+++ b/synapse/config/voip.py
@@ -16,18 +16,17 @@ from ._base import Config
 
 
 class VoipConfig(Config):
-
-    def read_config(self, config):
+    def read_config(self, config, **kwargs):
         self.turn_uris = config.get("turn_uris", [])
         self.turn_shared_secret = config.get("turn_shared_secret")
         self.turn_username = config.get("turn_username")
         self.turn_password = config.get("turn_password")
         self.turn_user_lifetime = self.parse_duration(
-            config.get("turn_user_lifetime", "1h"),
+            config.get("turn_user_lifetime", "1h")
         )
         self.turn_allow_guests = config.get("turn_allow_guests", True)
 
-    def default_config(self, **kwargs):
+    def generate_config_section(self, **kwargs):
         return """\
         ## TURN ##
 
diff --git a/synapse/config/workers.py b/synapse/config/workers.py
index bfbd8b6c91..3b75471d85 100644
--- a/synapse/config/workers.py
+++ b/synapse/config/workers.py
@@ -21,7 +21,7 @@ class WorkerConfig(Config):
     They have their own pid_file and listener configuration. They use the
     replication_url to talk to the main synapse process."""
 
-    def read_config(self, config):
+    def read_config(self, config, **kwargs):
         self.worker_app = config.get("worker_app")
 
         # Canonicalise worker_app so that master always has None
@@ -46,18 +46,19 @@ class WorkerConfig(Config):
         self.worker_name = config.get("worker_name", self.worker_app)
 
         self.worker_main_http_uri = config.get("worker_main_http_uri", None)
-        self.worker_cpu_affinity = config.get("worker_cpu_affinity")
 
         # This option is really only here to support `--manhole` command line
         # argument.
         manhole = config.get("worker_manhole")
         if manhole:
-            self.worker_listeners.append({
-                "port": manhole,
-                "bind_addresses": ["127.0.0.1"],
-                "type": "manhole",
-                "tls": False,
-            })
+            self.worker_listeners.append(
+                {
+                    "port": manhole,
+                    "bind_addresses": ["127.0.0.1"],
+                    "type": "manhole",
+                    "tls": False,
+                }
+            )
 
         if self.worker_listeners:
             for listener in self.worker_listeners:
@@ -67,7 +68,7 @@ class WorkerConfig(Config):
                 if bind_address:
                     bind_addresses.append(bind_address)
                 elif not bind_addresses:
-                    bind_addresses.append('')
+                    bind_addresses.append("")
 
     def read_arguments(self, args):
         # We support a bunch of command line arguments that override options in