summary refs log tree commit diff
path: root/synapse/config
diff options
context:
space:
mode:
authorBrendan Abolivier <babolivier@matrix.org>2020-06-10 11:42:30 +0100
committerBrendan Abolivier <babolivier@matrix.org>2020-06-10 11:42:30 +0100
commitec0a7b9034806d6b2ba086bae58f5c6b0fd14672 (patch)
treef2af547b1342795e10548f8fb7a9cfc93e03df37 /synapse/config
parentchangelog (diff)
parent1.15.0rc1 (diff)
downloadsynapse-ec0a7b9034806d6b2ba086bae58f5c6b0fd14672.tar.xz
Merge branch 'develop' into babolivier/mark_unread
Diffstat (limited to 'synapse/config')
-rw-r--r--synapse/config/_base.py239
-rw-r--r--synapse/config/_base.pyi139
-rw-r--r--synapse/config/api.py2
-rw-r--r--synapse/config/appservice.py11
-rw-r--r--synapse/config/cache.py198
-rw-r--r--synapse/config/captcha.py24
-rw-r--r--synapse/config/cas.py5
-rw-r--r--synapse/config/consent_config.py11
-rw-r--r--synapse/config/database.py217
-rw-r--r--synapse/config/emailconfig.py293
-rw-r--r--synapse/config/groups.py2
-rw-r--r--synapse/config/homeserver.py76
-rw-r--r--synapse/config/jwt_config.py2
-rw-r--r--synapse/config/key.py79
-rw-r--r--synapse/config/logger.py40
-rw-r--r--synapse/config/metrics.py9
-rw-r--r--synapse/config/oidc_config.py203
-rw-r--r--synapse/config/password.py41
-rw-r--r--synapse/config/password_auth_providers.py22
-rw-r--r--synapse/config/push.py4
-rw-r--r--synapse/config/ratelimiting.py13
-rw-r--r--synapse/config/redis.py34
-rw-r--r--synapse/config/registration.py140
-rw-r--r--synapse/config/repository.py51
-rw-r--r--synapse/config/room_directory.py4
-rw-r--r--synapse/config/saml2_config.py348
-rw-r--r--synapse/config/server.py396
-rw-r--r--synapse/config/server_notices_config.py8
-rw-r--r--synapse/config/spam_checker.py40
-rw-r--r--synapse/config/sso.py148
-rw-r--r--synapse/config/stats.py2
-rw-r--r--synapse/config/third_party_event_rules.py2
-rw-r--r--synapse/config/tls.py41
-rw-r--r--synapse/config/tracer.py2
-rw-r--r--synapse/config/user_directory.py2
-rw-r--r--synapse/config/voip.py4
-rw-r--r--synapse/config/workers.py49
37 files changed, 2401 insertions, 500 deletions
diff --git a/synapse/config/_base.py b/synapse/config/_base.py
index 31f6530978..30d1050a91 100644
--- a/synapse/config/_base.py
+++ b/synapse/config/_base.py
@@ -18,7 +18,9 @@
 import argparse
 import errno
 import os
+from collections import OrderedDict
 from textwrap import dedent
+from typing import Any, MutableMapping, Optional
 
 from six import integer_types
 
@@ -51,7 +53,68 @@ Missing mandatory `server_name` config option.
 """
 
 
+CONFIG_FILE_HEADER = """\
+# Configuration file for Synapse.
+#
+# This is a YAML file: see [1] for a quick introduction. Note in particular
+# that *indentation is important*: all the elements of a list or dictionary
+# should have the same indentation.
+#
+# [1] https://docs.ansible.com/ansible/latest/reference_appendices/YAMLSyntax.html
+
+"""
+
+
+def path_exists(file_path):
+    """Check if a file exists
+
+    Unlike os.path.exists, this throws an exception if there is an error
+    checking if the file exists (for example, if there is a perms error on
+    the parent dir).
+
+    Returns:
+        bool: True if the file exists; False if not.
+    """
+    try:
+        os.stat(file_path)
+        return True
+    except OSError as e:
+        if e.errno != errno.ENOENT:
+            raise e
+        return False
+
+
 class Config(object):
+    """
+    A configuration section, containing configuration keys and values.
+
+    Attributes:
+        section (str): The section title of this config object, such as
+            "tls" or "logger". This is used to refer to it on the root
+            logger (for example, `config.tls.some_option`). Must be
+            defined in subclasses.
+    """
+
+    section = None
+
+    def __init__(self, root_config=None):
+        self.root = root_config
+
+    def __getattr__(self, item: str) -> Any:
+        """
+        Try and fetch a configuration option that does not exist on this class.
+
+        This is so that existing configs that rely on `self.value`, where value
+        is actually from a different config section, continue to work.
+        """
+        if item in ["generate_config_section", "read_config"]:
+            raise AttributeError(item)
+
+        if self.root is None:
+            raise AttributeError(item)
+        else:
+            return self.root._get_unclassed_config(self.section, item)
+
     @staticmethod
     def parse_size(value):
         if isinstance(value, integer_types):
@@ -88,22 +151,7 @@ class Config(object):
 
     @classmethod
     def path_exists(cls, file_path):
-        """Check if a file exists
-
-        Unlike os.path.exists, this throws an exception if there is an error
-        checking if the file exists (for example, if there is a perms error on
-        the parent dir).
-
-        Returns:
-            bool: True if the file exists; False if not.
-        """
-        try:
-            os.stat(file_path)
-            return True
-        except OSError as e:
-            if e.errno != errno.ENOENT:
-                raise e
-            return False
+        return path_exists(file_path)
 
     @classmethod
     def check_file(cls, file_path, config_name):
@@ -136,42 +184,106 @@ class Config(object):
         with open(file_path) as file_stream:
             return file_stream.read()
 
-    def invoke_all(self, name, *args, **kargs):
-        """Invoke all instance methods with the given name and arguments in the
-        class's MRO.
+
+class RootConfig(object):
+    """
+    Holder of an application's configuration.
+
+    What configuration this object holds is defined by `config_classes`, a list
+    of Config classes that will be instantiated and given the contents of a
+    configuration file to read. They can then be accessed on this class by their
+    section name, defined in the Config or dynamically set to be the name of the
+    class, lower-cased and with "Config" removed.
+    """
+
+    config_classes = []
+
+    def __init__(self):
+        self._configs = OrderedDict()
+
+        for config_class in self.config_classes:
+            if config_class.section is None:
+                raise ValueError("%r requires a section name" % (config_class,))
+
+            try:
+                conf = config_class(self)
+            except Exception as e:
+                raise Exception("Failed making %s: %r" % (config_class.section, e))
+            self._configs[config_class.section] = conf
+
+    def __getattr__(self, item: str) -> Any:
+        """
+        Redirect lookups on this object either to config objects, or values on
+        config objects, so that `config.tls.blah` works, as well as legacy uses
+        of things like `config.server_name`. It will first look up the config
+        section name, and then values on those config classes.
+        """
+        if item in self._configs.keys():
+            return self._configs[item]
+
+        return self._get_unclassed_config(None, item)
+
+    def _get_unclassed_config(self, asking_section: Optional[str], item: str):
+        """
+        Fetch a config value from one of the instantiated config classes that
+        has not been fetched directly.
 
         Args:
-            name (str): Name of function to invoke
+            asking_section: If this check is coming from a Config child, which
+                one? This section will not be asked if it has the value.
+            item: The configuration value key.
+
+        Raises:
+            AttributeError if no config classes have the config key. The body
+                will contain what sections were checked.
+        """
+        for key, val in self._configs.items():
+            if key == asking_section:
+                continue
+
+            if item in dir(val):
+                return getattr(val, item)
+
+        raise AttributeError(item, "not found in %s" % (list(self._configs.keys()),))
+
+    def invoke_all(self, func_name: str, *args, **kwargs) -> MutableMapping[str, Any]:
+        """
+        Invoke a function on all instantiated config objects this RootConfig is
+        configured to use.
+
+        Args:
+            func_name: Name of function to invoke
             *args
             **kwargs
-
         Returns:
-            list: The list of the return values from each method called
+            ordered dictionary of config section name and the result of the
+            function from it.
         """
-        results = []
-        for cls in type(self).mro():
-            if name in cls.__dict__:
-                results.append(getattr(cls, name)(self, *args, **kargs))
-        return results
+        res = OrderedDict()
+
+        for name, config in self._configs.items():
+            if hasattr(config, func_name):
+                res[name] = getattr(config, func_name)(*args, **kwargs)
+
+        return res
 
     @classmethod
-    def invoke_all_static(cls, name, *args, **kargs):
-        """Invoke all static methods with the given name and arguments in the
-        class's MRO.
+    def invoke_all_static(cls, func_name: str, *args, **kwargs):
+        """
+        Invoke a static function on config objects this RootConfig is
+        configured to use.
 
         Args:
-            name (str): Name of function to invoke
+            func_name: Name of function to invoke
             *args
             **kwargs
-
         Returns:
-            list: The list of the return values from each method called
+            ordered dictionary of config section name and the result of the
+            function from it.
         """
-        results = []
-        for c in cls.mro():
-            if name in c.__dict__:
-                results.append(getattr(c, name)(*args, **kargs))
-        return results
+        for config in cls.config_classes:
+            if hasattr(config, func_name):
+                getattr(config, func_name)(*args, **kwargs)
 
     def generate_config(
         self,
@@ -182,12 +294,12 @@ class Config(object):
         report_stats=None,
         open_private_ports=False,
         listeners=None,
-        database_conf=None,
         tls_certificate_path=None,
         tls_private_key_path=None,
         acme_domain=None,
     ):
-        """Build a default configuration file
+        """
+        Build a default configuration file
 
         This is used when the user explicitly asks us to generate a config file
         (eg with --generate_config).
@@ -242,7 +354,8 @@ class Config(object):
         Returns:
             str: the yaml config file
         """
-        return "\n\n".join(
+
+        return CONFIG_FILE_HEADER + "\n\n".join(
             dedent(conf)
             for conf in self.invoke_all(
                 "generate_config_section",
@@ -253,11 +366,10 @@ class Config(object):
                 report_stats=report_stats,
                 open_private_ports=open_private_ports,
                 listeners=listeners,
-                database_conf=database_conf,
                 tls_certificate_path=tls_certificate_path,
                 tls_private_key_path=tls_private_key_path,
                 acme_domain=acme_domain,
-            )
+            ).values()
         )
 
     @classmethod
@@ -356,8 +468,8 @@ class Config(object):
 
         Returns: Config object, or None if --generate-config or --generate-keys was set
         """
-        config_parser = argparse.ArgumentParser(add_help=False)
-        config_parser.add_argument(
+        parser = argparse.ArgumentParser(description=description)
+        parser.add_argument(
             "-c",
             "--config-path",
             action="append",
@@ -366,7 +478,7 @@ class Config(object):
             " may specify directories containing *.yaml files.",
         )
 
-        generate_group = config_parser.add_argument_group("Config generation")
+        generate_group = parser.add_argument_group("Config generation")
         generate_group.add_argument(
             "--generate-config",
             action="store_true",
@@ -414,12 +526,13 @@ class Config(object):
             ),
         )
 
-        config_args, remaining_args = config_parser.parse_known_args(argv)
+        cls.invoke_all_static("add_arguments", parser)
+        config_args = parser.parse_args(argv)
 
         config_files = find_config_files(search_paths=config_args.config_path)
 
         if not config_files:
-            config_parser.error(
+            parser.error(
                 "Must supply a config file.\nA config file can be automatically"
                 ' generated using "--generate-config -H SERVER_NAME'
                 ' -c CONFIG-FILE"'
@@ -438,13 +551,13 @@ class Config(object):
 
         if config_args.generate_config:
             if config_args.report_stats is None:
-                config_parser.error(
+                parser.error(
                     "Please specify either --report-stats=yes or --report-stats=no\n\n"
                     + MISSING_REPORT_STATS_SPIEL
                 )
 
             (config_path,) = config_files
-            if not cls.path_exists(config_path):
+            if not path_exists(config_path):
                 print("Generating config file %s" % (config_path,))
 
                 if config_args.data_directory:
@@ -469,11 +582,11 @@ class Config(object):
                     open_private_ports=config_args.open_private_ports,
                 )
 
-                if not cls.path_exists(config_dir_path):
+                if not path_exists(config_dir_path):
                     os.makedirs(config_dir_path)
                 with open(config_path, "w") as config_file:
-                    config_file.write("# vim:ft=yaml\n\n")
                     config_file.write(config_str)
+                    config_file.write("\n\n# vim:ft=yaml")
 
                 config_dict = yaml.safe_load(config_str)
                 obj.generate_missing_files(config_dict, config_dir_path)
@@ -497,15 +610,6 @@ class Config(object):
                 )
                 generate_missing_configs = True
 
-        parser = argparse.ArgumentParser(
-            parents=[config_parser],
-            description=description,
-            formatter_class=argparse.RawDescriptionHelpFormatter,
-        )
-
-        obj.invoke_all_static("add_arguments", parser)
-        args = parser.parse_args(remaining_args)
-
         config_dict = read_config_files(config_files)
         if generate_missing_configs:
             obj.generate_missing_files(config_dict, config_dir_path)
@@ -514,11 +618,11 @@ class Config(object):
         obj.parse_config_dict(
             config_dict, config_dir_path=config_dir_path, data_dir_path=data_dir_path
         )
-        obj.invoke_all("read_arguments", args)
+        obj.invoke_all("read_arguments", config_args)
 
         return obj
 
-    def parse_config_dict(self, config_dict, config_dir_path, data_dir_path):
+    def parse_config_dict(self, config_dict, config_dir_path=None, data_dir_path=None):
         """Read the information from the config dict into this Config object.
 
         Args:
@@ -553,6 +657,12 @@ def read_config_files(config_files):
     for config_file in config_files:
         with open(config_file) as file_stream:
             yaml_config = yaml.safe_load(file_stream)
+
+        if not isinstance(yaml_config, dict):
+            err = "File %r is empty or doesn't parse into a key-value map. IGNORING."
+            print(err % (config_file,))
+            continue
+
         specified_config.update(yaml_config)
 
     if "server_name" not in specified_config:
@@ -607,3 +717,6 @@ def find_config_files(search_paths):
             else:
                 config_files.append(config_path)
     return config_files
+
+
+__all__ = ["Config", "RootConfig"]
diff --git a/synapse/config/_base.pyi b/synapse/config/_base.pyi
new file mode 100644
index 0000000000..9e576060d4
--- /dev/null
+++ b/synapse/config/_base.pyi
@@ -0,0 +1,139 @@
+from typing import Any, List, Optional
+
+from synapse.config import (
+    api,
+    appservice,
+    captcha,
+    cas,
+    consent_config,
+    database,
+    emailconfig,
+    groups,
+    jwt_config,
+    key,
+    logger,
+    metrics,
+    oidc_config,
+    password,
+    password_auth_providers,
+    push,
+    ratelimiting,
+    registration,
+    repository,
+    room_directory,
+    saml2_config,
+    server,
+    server_notices_config,
+    spam_checker,
+    sso,
+    stats,
+    third_party_event_rules,
+    tls,
+    tracer,
+    user_directory,
+    voip,
+    workers,
+)
+
+class ConfigError(Exception): ...
+
+MISSING_REPORT_STATS_CONFIG_INSTRUCTIONS: str
+MISSING_REPORT_STATS_SPIEL: str
+MISSING_SERVER_NAME: str
+
+def path_exists(file_path: str): ...
+
+class RootConfig:
+    server: server.ServerConfig
+    tls: tls.TlsConfig
+    database: database.DatabaseConfig
+    logging: logger.LoggingConfig
+    ratelimit: ratelimiting.RatelimitConfig
+    media: repository.ContentRepositoryConfig
+    captcha: captcha.CaptchaConfig
+    voip: voip.VoipConfig
+    registration: registration.RegistrationConfig
+    metrics: metrics.MetricsConfig
+    api: api.ApiConfig
+    appservice: appservice.AppServiceConfig
+    key: key.KeyConfig
+    saml2: saml2_config.SAML2Config
+    cas: cas.CasConfig
+    sso: sso.SSOConfig
+    oidc: oidc_config.OIDCConfig
+    jwt: jwt_config.JWTConfig
+    password: password.PasswordConfig
+    email: emailconfig.EmailConfig
+    worker: workers.WorkerConfig
+    authproviders: password_auth_providers.PasswordAuthProviderConfig
+    push: push.PushConfig
+    spamchecker: spam_checker.SpamCheckerConfig
+    groups: groups.GroupsConfig
+    userdirectory: user_directory.UserDirectoryConfig
+    consent: consent_config.ConsentConfig
+    stats: stats.StatsConfig
+    servernotices: server_notices_config.ServerNoticesConfig
+    roomdirectory: room_directory.RoomDirectoryConfig
+    thirdpartyrules: third_party_event_rules.ThirdPartyRulesConfig
+    tracer: tracer.TracerConfig
+
+    config_classes: List = ...
+    def __init__(self) -> None: ...
+    def invoke_all(self, func_name: str, *args: Any, **kwargs: Any): ...
+    @classmethod
+    def invoke_all_static(cls, func_name: str, *args: Any, **kwargs: Any) -> None: ...
+    def __getattr__(self, item: str): ...
+    def parse_config_dict(
+        self,
+        config_dict: Any,
+        config_dir_path: Optional[Any] = ...,
+        data_dir_path: Optional[Any] = ...,
+    ) -> None: ...
+    read_config: Any = ...
+    def generate_config(
+        self,
+        config_dir_path: str,
+        data_dir_path: str,
+        server_name: str,
+        generate_secrets: bool = ...,
+        report_stats: Optional[str] = ...,
+        open_private_ports: bool = ...,
+        listeners: Optional[Any] = ...,
+        database_conf: Optional[Any] = ...,
+        tls_certificate_path: Optional[str] = ...,
+        tls_private_key_path: Optional[str] = ...,
+        acme_domain: Optional[str] = ...,
+    ): ...
+    @classmethod
+    def load_or_generate_config(cls, description: Any, argv: Any): ...
+    @classmethod
+    def load_config(cls, description: Any, argv: Any): ...
+    @classmethod
+    def add_arguments_to_parser(cls, config_parser: Any) -> None: ...
+    @classmethod
+    def load_config_with_parser(cls, parser: Any, argv: Any): ...
+    def generate_missing_files(
+        self, config_dict: dict, config_dir_path: str
+    ) -> None: ...
+
+class Config:
+    root: RootConfig
+    def __init__(self, root_config: Optional[RootConfig] = ...) -> None: ...
+    def __getattr__(self, item: str, from_root: bool = ...): ...
+    @staticmethod
+    def parse_size(value: Any): ...
+    @staticmethod
+    def parse_duration(value: Any): ...
+    @staticmethod
+    def abspath(file_path: Optional[str]): ...
+    @classmethod
+    def path_exists(cls, file_path: str): ...
+    @classmethod
+    def check_file(cls, file_path: str, config_name: str): ...
+    @classmethod
+    def ensure_directory(cls, dir_path: str): ...
+    @classmethod
+    def read_file(cls, file_path: str, config_name: str): ...
+
+def read_config_files(config_files: List[str]): ...
+def find_config_files(search_paths: List[str]): ...
diff --git a/synapse/config/api.py b/synapse/config/api.py
index dddea79a8a..74cd53a8ed 100644
--- a/synapse/config/api.py
+++ b/synapse/config/api.py
@@ -18,6 +18,8 @@ from ._base import Config
 
 
 class ApiConfig(Config):
+    section = "api"
+
     def read_config(self, config, **kwargs):
         self.room_invite_state_types = config.get(
             "room_invite_state_types",
diff --git a/synapse/config/appservice.py b/synapse/config/appservice.py
index 8387ff6805..ca43e96bd1 100644
--- a/synapse/config/appservice.py
+++ b/synapse/config/appservice.py
@@ -13,6 +13,7 @@
 # limitations under the License.
 
 import logging
+from typing import Dict
 
 from six import string_types
 from six.moves.urllib import parse as urlparse
@@ -29,6 +30,8 @@ logger = logging.getLogger(__name__)
 
 
 class AppServiceConfig(Config):
+    section = "appservice"
+
     def read_config(self, config, **kwargs):
         self.app_service_config_files = config.get("app_service_config_files", [])
         self.notify_appservices = config.get("notify_appservices", True)
@@ -45,7 +48,7 @@ class AppServiceConfig(Config):
         # Uncomment to enable tracking of application service IP addresses. Implicitly
         # enables MAU tracking for application service users.
         #
-        #track_appservice_user_ips: True
+        #track_appservice_user_ips: true
         """
 
 
@@ -56,8 +59,8 @@ def load_appservices(hostname, config_files):
         return []
 
     # Dicts of value -> filename
-    seen_as_tokens = {}
-    seen_ids = {}
+    seen_as_tokens = {}  # type: Dict[str, str]
+    seen_ids = {}  # type: Dict[str, str]
 
     appservices = []
 
@@ -131,7 +134,7 @@ def _load_appservice(hostname, as_info, config_filename):
             for regex_obj in as_info["namespaces"][ns]:
                 if not isinstance(regex_obj, dict):
                     raise ValueError(
-                        "Expected namespace entry in %s to be an object," " but got %s",
+                        "Expected namespace entry in %s to be an object, but got %s",
                         ns,
                         regex_obj,
                     )
diff --git a/synapse/config/cache.py b/synapse/config/cache.py
new file mode 100644
index 0000000000..0672538796
--- /dev/null
+++ b/synapse/config/cache.py
@@ -0,0 +1,198 @@
+# -*- coding: utf-8 -*-
+# Copyright 2019 Matrix.org Foundation C.I.C.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import os
+import re
+from typing import Callable, Dict
+
+from ._base import Config, ConfigError
+
+# The prefix for all cache factor-related environment variables
+_CACHE_PREFIX = "SYNAPSE_CACHE_FACTOR"
+
+# Map from canonicalised cache name to cache.
+_CACHES = {}
+
+_DEFAULT_FACTOR_SIZE = 0.5
+_DEFAULT_EVENT_CACHE_SIZE = "10K"
+
+
+class CacheProperties(object):
+    def __init__(self):
+        # The default factor size for all caches
+        self.default_factor_size = float(
+            os.environ.get(_CACHE_PREFIX, _DEFAULT_FACTOR_SIZE)
+        )
+        self.resize_all_caches_func = None
+
+
+properties = CacheProperties()
+
+
+def _canonicalise_cache_name(cache_name: str) -> str:
+    """Gets the canonical form of the cache name.
+
+    Since we specify cache names in config and environment variables we need to
+    ignore case and special characters. For example, some caches have asterisks
+    in their name to denote that they're not attached to a particular database
+    function, and these asterisks need to be stripped out
+    """
+
+    cache_name = re.sub(r"[^A-Za-z_1-9]", "", cache_name)
+
+    return cache_name.lower()
+
+
+def add_resizable_cache(cache_name: str, cache_resize_callback: Callable):
+    """Register a cache that's size can dynamically change
+
+    Args:
+        cache_name: A reference to the cache
+        cache_resize_callback: A callback function that will be ran whenever
+            the cache needs to be resized
+    """
+    # Some caches have '*' in them which we strip out.
+    cache_name = _canonicalise_cache_name(cache_name)
+
+    _CACHES[cache_name] = cache_resize_callback
+
+    # Ensure all loaded caches are sized appropriately
+    #
+    # This method should only run once the config has been read,
+    # as it uses values read from it
+    if properties.resize_all_caches_func:
+        properties.resize_all_caches_func()
+
+
+class CacheConfig(Config):
+    section = "caches"
+    _environ = os.environ
+
+    @staticmethod
+    def reset():
+        """Resets the caches to their defaults. Used for tests."""
+        properties.default_factor_size = float(
+            os.environ.get(_CACHE_PREFIX, _DEFAULT_FACTOR_SIZE)
+        )
+        properties.resize_all_caches_func = None
+        _CACHES.clear()
+
+    def generate_config_section(self, **kwargs):
+        return """\
+        ## Caching ##
+
+        # Caching can be configured through the following options.
+        #
+        # A cache 'factor' is a multiplier that can be applied to each of
+        # Synapse's caches in order to increase or decrease the maximum
+        # number of entries that can be stored.
+
+        # The number of events to cache in memory. Not affected by
+        # caches.global_factor.
+        #
+        #event_cache_size: 10K
+
+        caches:
+           # Controls the global cache factor, which is the default cache factor
+           # for all caches if a specific factor for that cache is not otherwise
+           # set.
+           #
+           # This can also be set by the "SYNAPSE_CACHE_FACTOR" environment
+           # variable. Setting by environment variable takes priority over
+           # setting through the config file.
+           #
+           # Defaults to 0.5, which will half the size of all caches.
+           #
+           #global_factor: 1.0
+
+           # A dictionary of cache name to cache factor for that individual
+           # cache. Overrides the global cache factor for a given cache.
+           #
+           # These can also be set through environment variables comprised
+           # of "SYNAPSE_CACHE_FACTOR_" + the name of the cache in capital
+           # letters and underscores. Setting by environment variable
+           # takes priority over setting through the config file.
+           # Ex. SYNAPSE_CACHE_FACTOR_GET_USERS_WHO_SHARE_ROOM_WITH_USER=2.0
+           #
+           # Some caches have '*' and other characters that are not
+           # alphanumeric or underscores. These caches can be named with or
+           # without the special characters stripped. For example, to specify
+           # the cache factor for `*stateGroupCache*` via an environment
+           # variable would be `SYNAPSE_CACHE_FACTOR_STATEGROUPCACHE=2.0`.
+           #
+           per_cache_factors:
+             #get_users_who_share_room_with_user: 2.0
+        """
+
+    def read_config(self, config, **kwargs):
+        self.event_cache_size = self.parse_size(
+            config.get("event_cache_size", _DEFAULT_EVENT_CACHE_SIZE)
+        )
+        self.cache_factors = {}  # type: Dict[str, float]
+
+        cache_config = config.get("caches") or {}
+        self.global_factor = cache_config.get(
+            "global_factor", properties.default_factor_size
+        )
+        if not isinstance(self.global_factor, (int, float)):
+            raise ConfigError("caches.global_factor must be a number.")
+
+        # Set the global one so that it's reflected in new caches
+        properties.default_factor_size = self.global_factor
+
+        # Load cache factors from the config
+        individual_factors = cache_config.get("per_cache_factors") or {}
+        if not isinstance(individual_factors, dict):
+            raise ConfigError("caches.per_cache_factors must be a dictionary")
+
+        # Canonicalise the cache names *before* updating with the environment
+        # variables.
+        individual_factors = {
+            _canonicalise_cache_name(key): val
+            for key, val in individual_factors.items()
+        }
+
+        # Override factors from environment if necessary
+        individual_factors.update(
+            {
+                _canonicalise_cache_name(key[len(_CACHE_PREFIX) + 1 :]): float(val)
+                for key, val in self._environ.items()
+                if key.startswith(_CACHE_PREFIX + "_")
+            }
+        )
+
+        for cache, factor in individual_factors.items():
+            if not isinstance(factor, (int, float)):
+                raise ConfigError(
+                    "caches.per_cache_factors.%s must be a number" % (cache,)
+                )
+            self.cache_factors[cache] = factor
+
+        # Resize all caches (if necessary) with the new factors we've loaded
+        self.resize_all_caches()
+
+        # Store this function so that it can be called from other classes without
+        # needing an instance of Config
+        properties.resize_all_caches_func = self.resize_all_caches
+
+    def resize_all_caches(self):
+        """Ensure all cache sizes are up to date
+
+        For each cache, run the mapped callback function with either
+        a specific cache factor or the default, global one.
+        """
+        for cache_name, callback in _CACHES.items():
+            new_factor = self.cache_factors.get(cache_name, self.global_factor)
+            callback(new_factor)
diff --git a/synapse/config/captcha.py b/synapse/config/captcha.py
index 8dac8152cf..82f04d7966 100644
--- a/synapse/config/captcha.py
+++ b/synapse/config/captcha.py
@@ -16,13 +16,14 @@ from ._base import Config
 
 
 class CaptchaConfig(Config):
+    section = "captcha"
+
     def read_config(self, config, **kwargs):
         self.recaptcha_private_key = config.get("recaptcha_private_key")
         self.recaptcha_public_key = config.get("recaptcha_public_key")
         self.enable_registration_captcha = config.get(
             "enable_registration_captcha", False
         )
-        self.captcha_bypass_secret = config.get("captcha_bypass_secret")
         self.recaptcha_siteverify_api = config.get(
             "recaptcha_siteverify_api",
             "https://www.recaptcha.net/recaptcha/api/siteverify",
@@ -31,27 +32,26 @@ class CaptchaConfig(Config):
     def generate_config_section(self, **kwargs):
         return """\
         ## Captcha ##
-        # See docs/CAPTCHA_SETUP for full details of configuring this.
+        # See docs/CAPTCHA_SETUP.md for full details of configuring this.
 
-        # This Home Server's ReCAPTCHA public key.
+        # This homeserver's ReCAPTCHA public key. Must be specified if
+        # enable_registration_captcha is enabled.
         #
         #recaptcha_public_key: "YOUR_PUBLIC_KEY"
 
-        # This Home Server's ReCAPTCHA private key.
+        # This homeserver's ReCAPTCHA private key. Must be specified if
+        # enable_registration_captcha is enabled.
         #
         #recaptcha_private_key: "YOUR_PRIVATE_KEY"
 
-        # Enables ReCaptcha checks when registering, preventing signup
+        # Uncomment to enable ReCaptcha checks when registering, preventing signup
         # unless a captcha is answered. Requires a valid ReCaptcha
-        # public/private key.
-        #
-        #enable_registration_captcha: false
-
-        # A secret key used to bypass the captcha test entirely.
+        # public/private key. Defaults to 'false'.
         #
-        #captcha_bypass_secret: "YOUR_SECRET_HERE"
+        #enable_registration_captcha: true
 
         # The API endpoint to use for verifying m.login.recaptcha responses.
+        # Defaults to "https://www.recaptcha.net/recaptcha/api/siteverify".
         #
-        #recaptcha_siteverify_api: "https://www.recaptcha.net/recaptcha/api/siteverify"
+        #recaptcha_siteverify_api: "https://my.recaptcha.site"
         """
diff --git a/synapse/config/cas.py b/synapse/config/cas.py
index ebe34d933b..4526c1a67b 100644
--- a/synapse/config/cas.py
+++ b/synapse/config/cas.py
@@ -22,17 +22,21 @@ class CasConfig(Config):
     cas_server_url: URL of CAS server
     """
 
+    section = "cas"
+
     def read_config(self, config, **kwargs):
         cas_config = config.get("cas_config", None)
         if cas_config:
             self.cas_enabled = cas_config.get("enabled", True)
             self.cas_server_url = cas_config["server_url"]
             self.cas_service_url = cas_config["service_url"]
+            self.cas_displayname_attribute = cas_config.get("displayname_attribute")
             self.cas_required_attributes = cas_config.get("required_attributes", {})
         else:
             self.cas_enabled = False
             self.cas_server_url = None
             self.cas_service_url = None
+            self.cas_displayname_attribute = None
             self.cas_required_attributes = {}
 
     def generate_config_section(self, config_dir_path, server_name, **kwargs):
@@ -43,6 +47,7 @@ class CasConfig(Config):
         #   enabled: true
         #   server_url: "https://cas-server.com"
         #   service_url: "https://homeserver.domain.com:8448"
+        #   #displayname_attribute: name
         #   #required_attributes:
         #   #    name: value
         """
diff --git a/synapse/config/consent_config.py b/synapse/config/consent_config.py
index 94916f3a49..aec9c4bbce 100644
--- a/synapse/config/consent_config.py
+++ b/synapse/config/consent_config.py
@@ -62,19 +62,22 @@ DEFAULT_CONFIG = """\
 #    body: >-
 #      To continue using this homeserver you must review and agree to the
 #      terms and conditions at %(consent_uri)s
-#  send_server_notice_to_guests: True
+#  send_server_notice_to_guests: true
 #  block_events_error: >-
 #    To continue using this homeserver you must review and agree to the
 #    terms and conditions at %(consent_uri)s
-#  require_at_registration: False
+#  require_at_registration: false
 #  policy_name: Privacy Policy
 #
 """
 
 
 class ConsentConfig(Config):
-    def __init__(self):
-        super(ConsentConfig, self).__init__()
+
+    section = "consent"
+
+    def __init__(self, *args):
+        super(ConsentConfig, self).__init__(*args)
 
         self.user_consent_version = None
         self.user_consent_template_dir = None
diff --git a/synapse/config/database.py b/synapse/config/database.py
index 118aafbd4a..1064c2697b 100644
--- a/synapse/config/database.py
+++ b/synapse/config/database.py
@@ -1,5 +1,6 @@
 # -*- coding: utf-8 -*-
 # Copyright 2014-2016 OpenMarket Ltd
+# Copyright 2020 The Matrix.org Foundation C.I.C.
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
 # you may not use this file except in compliance with the License.
@@ -12,73 +13,185 @@
 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 # See the License for the specific language governing permissions and
 # limitations under the License.
+import logging
 import os
-from textwrap import indent
 
-import yaml
+from synapse.config._base import Config, ConfigError
 
-from ._base import Config
+logger = logging.getLogger(__name__)
 
+NON_SQLITE_DATABASE_PATH_WARNING = """\
+Ignoring 'database_path' setting: not using a sqlite3 database.
+--------------------------------------------------------------------------------
+"""
 
-class DatabaseConfig(Config):
-    def read_config(self, config, **kwargs):
-        self.event_cache_size = self.parse_size(config.get("event_cache_size", "10K"))
+DEFAULT_CONFIG = """\
+## Database ##
+
+# The 'database' setting defines the database that synapse uses to store all of
+# its data.
+#
+# 'name' gives the database engine to use: either 'sqlite3' (for SQLite) or
+# 'psycopg2' (for PostgreSQL).
+#
+# 'args' gives options which are passed through to the database engine,
+# except for options starting 'cp_', which are used to configure the Twisted
+# connection pool. For a reference to valid arguments, see:
+#   * for sqlite: https://docs.python.org/3/library/sqlite3.html#sqlite3.connect
+#   * for postgres: https://www.postgresql.org/docs/current/libpq-connect.html#LIBPQ-PARAMKEYWORDS
+#   * for the connection pool: https://twistedmatrix.com/documents/current/api/twisted.enterprise.adbapi.ConnectionPool.html#__init__
+#
+#
+# Example SQLite configuration:
+#
+#database:
+#  name: sqlite3
+#  args:
+#    database: /path/to/homeserver.db
+#
+#
+# Example Postgres configuration:
+#
+#database:
+#  name: psycopg2
+#  args:
+#    user: synapse
+#    password: secretpassword
+#    database: synapse
+#    host: localhost
+#    cp_min: 5
+#    cp_max: 10
+#
+# For more information on using Synapse with Postgres, see `docs/postgres.md`.
+#
+database:
+  name: sqlite3
+  args:
+    database: %(database_path)s
+"""
+
+
+class DatabaseConnectionConfig:
+    """Contains the connection config for a particular database.
 
-        self.database_config = config.get("database")
+    Args:
+        name: A label for the database, used for logging.
+        db_config: The config for a particular database, as per `database`
+            section of main config. Has three fields: `name` for database
+            module name, `args` for the args to give to the database
+            connector, and optional `data_stores` that is a list of stores to
+            provision on this database (defaulting to all).
+    """
 
-        if self.database_config is None:
-            self.database_config = {"name": "sqlite3", "args": {}}
+    def __init__(self, name: str, db_config: dict):
+        db_engine = db_config.get("name", "sqlite3")
 
-        name = self.database_config.get("name", None)
-        if name == "psycopg2":
-            pass
-        elif name == "sqlite3":
-            self.database_config.setdefault("args", {}).update(
+        if db_engine not in ("sqlite3", "psycopg2"):
+            raise ConfigError("Unsupported database type %r" % (db_engine,))
+
+        if db_engine == "sqlite3":
+            db_config.setdefault("args", {}).update(
                 {"cp_min": 1, "cp_max": 1, "check_same_thread": False}
             )
-        else:
-            raise RuntimeError("Unsupported database type '%s'" % (name,))
-
-        self.set_databasepath(config.get("database_path"))
-
-    def generate_config_section(self, data_dir_path, database_conf, **kwargs):
-        if not database_conf:
-            database_path = os.path.join(data_dir_path, "homeserver.db")
-            database_conf = (
-                """# The database engine name
-          name: "sqlite3"
-          # Arguments to pass to the engine
-          args:
-            # Path to the database
-            database: "%(database_path)s"
-            """
-                % locals()
-            )
-        else:
-            database_conf = indent(yaml.dump(database_conf), " " * 10).lstrip()
 
-        return (
-            """\
-        ## Database ##
+        data_stores = db_config.get("data_stores")
+        if data_stores is None:
+            data_stores = ["main", "state"]
+
+        self.name = name
+        self.config = db_config
+        self.data_stores = data_stores
+
+
+class DatabaseConfig(Config):
+    section = "database"
+
+    def __init__(self, *args, **kwargs):
+        super().__init__(*args, **kwargs)
 
-        database:
-          %(database_conf)s
-        # Number of events to cache in memory.
+        self.databases = []
+
+    def read_config(self, config, **kwargs):
+        # We *experimentally* support specifying multiple databases via the
+        # `databases` key. This is a map from a label to database config in the
+        # same format as the `database` config option, plus an extra
+        # `data_stores` key to specify which data store goes where. For example:
         #
-        #event_cache_size: 10K
-        """
-            % locals()
-        )
+        #   databases:
+        #       master:
+        #           name: psycopg2
+        #           data_stores: ["main"]
+        #           args: {}
+        #       state:
+        #           name: psycopg2
+        #           data_stores: ["state"]
+        #           args: {}
+
+        multi_database_config = config.get("databases")
+        database_config = config.get("database")
+        database_path = config.get("database_path")
+
+        if multi_database_config and database_config:
+            raise ConfigError("Can't specify both 'database' and 'databases' in config")
+
+        if multi_database_config:
+            if database_path:
+                raise ConfigError("Can't specify 'database_path' with 'databases'")
+
+            self.databases = [
+                DatabaseConnectionConfig(name, db_conf)
+                for name, db_conf in multi_database_config.items()
+            ]
+
+        if database_config:
+            self.databases = [DatabaseConnectionConfig("master", database_config)]
+
+        if database_path:
+            if self.databases and self.databases[0].name != "sqlite3":
+                logger.warning(NON_SQLITE_DATABASE_PATH_WARNING)
+                return
+
+            database_config = {"name": "sqlite3", "args": {}}
+            self.databases = [DatabaseConnectionConfig("master", database_config)]
+            self.set_databasepath(database_path)
+
+    def generate_config_section(self, data_dir_path, **kwargs):
+        return DEFAULT_CONFIG % {
+            "database_path": os.path.join(data_dir_path, "homeserver.db")
+        }
 
     def read_arguments(self, args):
-        self.set_databasepath(args.database_path)
+        """
+        Cases for the cli input:
+          - If no databases are configured and no database_path is set, raise.
+          - No databases and only database_path available ==> sqlite3 db.
+          - If there are multiple databases and a database_path raise an error.
+          - If the database set in the config file is sqlite then
+            overwrite with the command line argument.
+        """
+
+        if args.database_path is None:
+            if not self.databases:
+                raise ConfigError("No database config provided")
+            return
+
+        if len(self.databases) == 0:
+            database_config = {"name": "sqlite3", "args": {}}
+            self.databases = [DatabaseConnectionConfig("master", database_config)]
+            self.set_databasepath(args.database_path)
+            return
+
+        if self.get_single_database().name == "sqlite3":
+            self.set_databasepath(args.database_path)
+        else:
+            logger.warning(NON_SQLITE_DATABASE_PATH_WARNING)
 
     def set_databasepath(self, database_path):
+
         if database_path != ":memory:":
             database_path = self.abspath(database_path)
-        if self.database_config.get("name", None) == "sqlite3":
-            if database_path is not None:
-                self.database_config["args"]["database"] = database_path
+
+        self.databases[0].config["args"]["database"] = database_path
 
     @staticmethod
     def add_arguments(parser):
@@ -89,3 +202,11 @@ class DatabaseConfig(Config):
             metavar="SQLITE_DATABASE_PATH",
             help="The path to a sqlite database to use.",
         )
+
+    def get_single_database(self) -> DatabaseConnectionConfig:
+        """Returns the database if there is only one, useful for e.g. tests
+        """
+        if not self.databases:
+            raise Exception("More than one database exists")
+
+        return self.databases[0]
diff --git a/synapse/config/emailconfig.py b/synapse/config/emailconfig.py
index e5de768b0c..ca61214454 100644
--- a/synapse/config/emailconfig.py
+++ b/synapse/config/emailconfig.py
@@ -21,23 +21,34 @@ from __future__ import print_function
 import email.utils
 import os
 from enum import Enum
+from typing import Optional
 
 import pkg_resources
 
 from ._base import Config, ConfigError
 
+MISSING_PASSWORD_RESET_CONFIG_ERROR = """\
+Password reset emails are enabled on this homeserver due to a partial
+'email' block. However, the following required keys are missing:
+    %s
+"""
+
 
 class EmailConfig(Config):
+    section = "email"
+
     def read_config(self, config, **kwargs):
         # TODO: We should separate better the email configuration from the notification
         # and account validity config.
 
         self.email_enable_notifs = False
 
-        email_config = config.get("email", {})
+        email_config = config.get("email")
+        if email_config is None:
+            email_config = {}
 
-        self.email_smtp_host = email_config.get("smtp_host", None)
-        self.email_smtp_port = email_config.get("smtp_port", None)
+        self.email_smtp_host = email_config.get("smtp_host", "localhost")
+        self.email_smtp_port = email_config.get("smtp_port", 25)
         self.email_smtp_user = email_config.get("smtp_user", None)
         self.email_smtp_pass = email_config.get("smtp_pass", None)
         self.require_transport_security = email_config.get(
@@ -71,9 +82,9 @@ class EmailConfig(Config):
         self.email_template_dir = os.path.abspath(template_dir)
 
         self.email_enable_notifs = email_config.get("enable_notifs", False)
-        account_validity_renewal_enabled = config.get("account_validity", {}).get(
-            "renew_at"
-        )
+
+        account_validity_config = config.get("account_validity") or {}
+        account_validity_renewal_enabled = account_validity_config.get("renew_at")
 
         self.threepid_behaviour_email = (
             # Have Synapse handle the email sending if account_threepid_delegates.email
@@ -97,9 +108,14 @@ class EmailConfig(Config):
             if self.trusted_third_party_id_servers:
                 # XXX: It's a little confusing that account_threepid_delegate_email is modified
                 # both in RegistrationConfig and here. We should factor this bit out
-                self.account_threepid_delegate_email = self.trusted_third_party_id_servers[
-                    0
-                ]
+
+                first_trusted_identity_server = self.trusted_third_party_id_servers[0]
+
+                # trusted_third_party_id_servers does not contain a scheme whereas
+                # account_threepid_delegate_email is expected to. Presume https
+                self.account_threepid_delegate_email = (
+                    "https://" + first_trusted_identity_server
+                )  # type: Optional[str]
                 self.using_identity_server_from_trusted_list = True
             else:
                 raise ConfigError(
@@ -137,22 +153,18 @@ class EmailConfig(Config):
             bleach
 
         if self.threepid_behaviour_email == ThreepidBehaviour.LOCAL:
-            required = ["smtp_host", "smtp_port", "notif_from"]
-
             missing = []
-            for k in required:
-                if k not in email_config:
-                    missing.append("email." + k)
+            if not self.email_notif_from:
+                missing.append("email.notif_from")
 
+            # public_baseurl is required to build password reset and validation links that
+            # will be emailed to users
             if config.get("public_baseurl") is None:
                 missing.append("public_baseurl")
 
-            if len(missing) > 0:
-                raise RuntimeError(
-                    "Password resets emails are configured to be sent from "
-                    "this homeserver due to a partial 'email' block. "
-                    "However, the following required keys are missing: %s"
-                    % (", ".join(missing),)
+            if missing:
+                raise ConfigError(
+                    MISSING_PASSWORD_RESET_CONFIG_ERROR % (", ".join(missing),)
                 )
 
             # These email templates have placeholders in them, and thus must be
@@ -169,12 +181,22 @@ class EmailConfig(Config):
             self.email_registration_template_text = email_config.get(
                 "registration_template_text", "registration.txt"
             )
+            self.email_add_threepid_template_html = email_config.get(
+                "add_threepid_template_html", "add_threepid.html"
+            )
+            self.email_add_threepid_template_text = email_config.get(
+                "add_threepid_template_text", "add_threepid.txt"
+            )
+
             self.email_password_reset_template_failure_html = email_config.get(
                 "password_reset_template_failure_html", "password_reset_failure.html"
             )
             self.email_registration_template_failure_html = email_config.get(
                 "registration_template_failure_html", "registration_failure.html"
             )
+            self.email_add_threepid_template_failure_html = email_config.get(
+                "add_threepid_template_failure_html", "add_threepid_failure.html"
+            )
 
             # These templates do not support any placeholder variables, so we
             # will read them from disk once during setup
@@ -184,6 +206,9 @@ class EmailConfig(Config):
             email_registration_template_success_html = email_config.get(
                 "registration_template_success_html", "registration_success.html"
             )
+            email_add_threepid_template_success_html = email_config.get(
+                "add_threepid_template_success_html", "add_threepid_success.html"
+            )
 
             # Check templates exist
             for f in [
@@ -191,9 +216,14 @@ class EmailConfig(Config):
                 self.email_password_reset_template_text,
                 self.email_registration_template_html,
                 self.email_registration_template_text,
+                self.email_add_threepid_template_html,
+                self.email_add_threepid_template_text,
                 self.email_password_reset_template_failure_html,
+                self.email_registration_template_failure_html,
+                self.email_add_threepid_template_failure_html,
                 email_password_reset_template_success_html,
                 email_registration_template_success_html,
+                email_add_threepid_template_success_html,
             ]:
                 p = os.path.join(self.email_template_dir, f)
                 if not os.path.isfile(p):
@@ -212,34 +242,33 @@ class EmailConfig(Config):
             self.email_registration_template_success_html_content = self.read_file(
                 filepath, "email.registration_template_success_html"
             )
+            filepath = os.path.join(
+                self.email_template_dir, email_add_threepid_template_success_html
+            )
+            self.email_add_threepid_template_success_html_content = self.read_file(
+                filepath, "email.add_threepid_template_success_html"
+            )
 
         if self.email_enable_notifs:
-            required = [
-                "smtp_host",
-                "smtp_port",
-                "notif_from",
-                "notif_template_html",
-                "notif_template_text",
-            ]
-
             missing = []
-            for k in required:
-                if k not in email_config:
-                    missing.append(k)
-
-            if len(missing) > 0:
-                raise RuntimeError(
-                    "email.enable_notifs is True but required keys are missing: %s"
-                    % (", ".join(["email." + k for k in missing]),)
-                )
+            if not self.email_notif_from:
+                missing.append("email.notif_from")
 
             if config.get("public_baseurl") is None:
-                raise RuntimeError(
-                    "email.enable_notifs is True but no public_baseurl is set"
+                missing.append("public_baseurl")
+
+            if missing:
+                raise ConfigError(
+                    "email.enable_notifs is True but required keys are missing: %s"
+                    % (", ".join(missing),)
                 )
 
-            self.email_notif_template_html = email_config["notif_template_html"]
-            self.email_notif_template_text = email_config["notif_template_text"]
+            self.email_notif_template_html = email_config.get(
+                "notif_template_html", "notif_mail.html"
+            )
+            self.email_notif_template_text = email_config.get(
+                "notif_template_text", "notif_mail.txt"
+            )
 
             for f in self.email_notif_template_text, self.email_notif_template_html:
                 p = os.path.join(self.email_template_dir, f)
@@ -249,7 +278,9 @@ class EmailConfig(Config):
             self.email_notif_for_new_users = email_config.get(
                 "notif_for_new_users", True
             )
-            self.email_riot_base_url = email_config.get("riot_base_url", None)
+            self.email_riot_base_url = email_config.get(
+                "client_base_url", email_config.get("riot_base_url", None)
+            )
 
         if account_validity_renewal_enabled:
             self.email_expiry_template_html = email_config.get(
@@ -265,80 +296,112 @@ class EmailConfig(Config):
                     raise ConfigError("Unable to find email template file %s" % (p,))
 
     def generate_config_section(self, config_dir_path, server_name, **kwargs):
-        return """
-        # Enable sending emails for password resets, notification events or
-        # account expiry notices
-        #
-        # If your SMTP server requires authentication, the optional smtp_user &
-        # smtp_pass variables should be used
-        #
-        #email:
-        #   enable_notifs: false
-        #   smtp_host: "localhost"
-        #   smtp_port: 25 # SSL: 465, STARTTLS: 587
-        #   smtp_user: "exampleusername"
-        #   smtp_pass: "examplepassword"
-        #   require_transport_security: False
-        #   notif_from: "Your Friendly %(app)s Home Server <noreply@example.com>"
-        #   app_name: Matrix
-        #
-        #   # Enable email notifications by default
-        #   #
-        #   notif_for_new_users: True
-        #
-        #   # Defining a custom URL for Riot is only needed if email notifications
-        #   # should contain links to a self-hosted installation of Riot; when set
-        #   # the "app_name" setting is ignored
-        #   #
-        #   riot_base_url: "http://localhost/riot"
-        #
-        #   # Configure the time that a validation email or text message code
-        #   # will expire after sending
-        #   #
-        #   # This is currently used for password resets
-        #   #
-        #   #validation_token_lifetime: 1h
-        #
-        #   # Template directory. All template files should be stored within this
-        #   # directory. If not set, default templates from within the Synapse
-        #   # package will be used
-        #   #
-        #   # For the list of default templates, please see
-        #   # https://github.com/matrix-org/synapse/tree/master/synapse/res/templates
-        #   #
-        #   #template_dir: res/templates
-        #
-        #   # Templates for email notifications
-        #   #
-        #   notif_template_html: notif_mail.html
-        #   notif_template_text: notif_mail.txt
-        #
-        #   # Templates for account expiry notices
-        #   #
-        #   expiry_template_html: notice_expiry.html
-        #   expiry_template_text: notice_expiry.txt
-        #
-        #   # Templates for password reset emails sent by the homeserver
-        #   #
-        #   #password_reset_template_html: password_reset.html
-        #   #password_reset_template_text: password_reset.txt
-        #
-        #   # Templates for registration emails sent by the homeserver
-        #   #
-        #   #registration_template_html: registration.html
-        #   #registration_template_text: registration.txt
-        #
-        #   # Templates for password reset success and failure pages that a user
-        #   # will see after attempting to reset their password
-        #   #
-        #   #password_reset_template_success_html: password_reset_success.html
-        #   #password_reset_template_failure_html: password_reset_failure.html
+        return """\
+        # Configuration for sending emails from Synapse.
         #
-        #   # Templates for registration success and failure pages that a user
-        #   # will see after attempting to register using an email or phone
-        #   #
-        #   #registration_template_success_html: registration_success.html
-        #   #registration_template_failure_html: registration_failure.html
+        email:
+          # The hostname of the outgoing SMTP server to use. Defaults to 'localhost'.
+          #
+          #smtp_host: mail.server
+
+          # The port on the mail server for outgoing SMTP. Defaults to 25.
+          #
+          #smtp_port: 587
+
+          # Username/password for authentication to the SMTP server. By default, no
+          # authentication is attempted.
+          #
+          #smtp_user: "exampleusername"
+          #smtp_pass: "examplepassword"
+
+          # Uncomment the following to require TLS transport security for SMTP.
+          # By default, Synapse will connect over plain text, and will then switch to
+          # TLS via STARTTLS *if the SMTP server supports it*. If this option is set,
+          # Synapse will refuse to connect unless the server supports STARTTLS.
+          #
+          #require_transport_security: true
+
+          # notif_from defines the "From" address to use when sending emails.
+          # It must be set if email sending is enabled.
+          #
+          # The placeholder '%(app)s' will be replaced by the application name,
+          # which is normally 'app_name' (below), but may be overridden by the
+          # Matrix client application.
+          #
+          # Note that the placeholder must be written '%(app)s', including the
+          # trailing 's'.
+          #
+          #notif_from: "Your Friendly %(app)s homeserver <noreply@example.com>"
+
+          # app_name defines the default value for '%(app)s' in notif_from. It
+          # defaults to 'Matrix'.
+          #
+          #app_name: my_branded_matrix_server
+
+          # Uncomment the following to enable sending emails for messages that the user
+          # has missed. Disabled by default.
+          #
+          #enable_notifs: true
+
+          # Uncomment the following to disable automatic subscription to email
+          # notifications for new users. Enabled by default.
+          #
+          #notif_for_new_users: false
+
+          # Custom URL for client links within the email notifications. By default
+          # links will be based on "https://matrix.to".
+          #
+          # (This setting used to be called riot_base_url; the old name is still
+          # supported for backwards-compatibility but is now deprecated.)
+          #
+          #client_base_url: "http://localhost/riot"
+
+          # Configure the time that a validation email will expire after sending.
+          # Defaults to 1h.
+          #
+          #validation_token_lifetime: 15m
+
+          # Directory in which Synapse will try to find the template files below.
+          # If not set, default templates from within the Synapse package will be used.
+          #
+          # DO NOT UNCOMMENT THIS SETTING unless you want to customise the templates.
+          # If you *do* uncomment it, you will need to make sure that all the templates
+          # below are in the directory.
+          #
+          # Synapse will look for the following templates in this directory:
+          #
+          # * The contents of email notifications of missed events: 'notif_mail.html' and
+          #   'notif_mail.txt'.
+          #
+          # * The contents of account expiry notice emails: 'notice_expiry.html' and
+          #   'notice_expiry.txt'.
+          #
+          # * The contents of password reset emails sent by the homeserver:
+          #   'password_reset.html' and 'password_reset.txt'
+          #
+          # * HTML pages for success and failure that a user will see when they follow
+          #   the link in the password reset email: 'password_reset_success.html' and
+          #   'password_reset_failure.html'
+          #
+          # * The contents of address verification emails sent during registration:
+          #   'registration.html' and 'registration.txt'
+          #
+          # * HTML pages for success and failure that a user will see when they follow
+          #   the link in an address verification email sent during registration:
+          #   'registration_success.html' and 'registration_failure.html'
+          #
+          # * The contents of address verification emails sent when an address is added
+          #   to a Matrix account: 'add_threepid.html' and 'add_threepid.txt'
+          #
+          # * HTML pages for success and failure that a user will see when they follow
+          #   the link in an address verification email sent when an address is added
+          #   to a Matrix account: 'add_threepid_success.html' and
+          #   'add_threepid_failure.html'
+          #
+          # You can see the default templates at:
+          # https://github.com/matrix-org/synapse/tree/master/synapse/res/templates
+          #
+          #template_dir: "res/templates"
         """
 
 
diff --git a/synapse/config/groups.py b/synapse/config/groups.py
index 2a522b5f44..d6862d9a64 100644
--- a/synapse/config/groups.py
+++ b/synapse/config/groups.py
@@ -17,6 +17,8 @@ from ._base import Config
 
 
 class GroupsConfig(Config):
+    section = "groups"
+
     def read_config(self, config, **kwargs):
         self.enable_group_creation = config.get("enable_group_creation", False)
         self.group_creation_prefix = config.get("group_creation_prefix", "")
diff --git a/synapse/config/homeserver.py b/synapse/config/homeserver.py
index 72acad4f18..2c7b3a699f 100644
--- a/synapse/config/homeserver.py
+++ b/synapse/config/homeserver.py
@@ -14,8 +14,10 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
+from ._base import RootConfig
 from .api import ApiConfig
 from .appservice import AppServiceConfig
+from .cache import CacheConfig
 from .captcha import CaptchaConfig
 from .cas import CasConfig
 from .consent_config import ConsentConfig
@@ -26,10 +28,12 @@ from .jwt_config import JWTConfig
 from .key import KeyConfig
 from .logger import LoggingConfig
 from .metrics import MetricsConfig
+from .oidc_config import OIDCConfig
 from .password import PasswordConfig
 from .password_auth_providers import PasswordAuthProviderConfig
 from .push import PushConfig
 from .ratelimiting import RatelimitConfig
+from .redis import RedisConfig
 from .registration import RegistrationConfig
 from .repository import ContentRepositoryConfig
 from .room_directory import RoomDirectoryConfig
@@ -37,6 +41,7 @@ from .saml2_config import SAML2Config
 from .server import ServerConfig
 from .server_notices_config import ServerNoticesConfig
 from .spam_checker import SpamCheckerConfig
+from .sso import SSOConfig
 from .stats import StatsConfig
 from .third_party_event_rules import ThirdPartyRulesConfig
 from .tls import TlsConfig
@@ -46,36 +51,41 @@ from .voip import VoipConfig
 from .workers import WorkerConfig
 
 
-class HomeServerConfig(
-    ServerConfig,
-    TlsConfig,
-    DatabaseConfig,
-    LoggingConfig,
-    RatelimitConfig,
-    ContentRepositoryConfig,
-    CaptchaConfig,
-    VoipConfig,
-    RegistrationConfig,
-    MetricsConfig,
-    ApiConfig,
-    AppServiceConfig,
-    KeyConfig,
-    SAML2Config,
-    CasConfig,
-    JWTConfig,
-    PasswordConfig,
-    EmailConfig,
-    WorkerConfig,
-    PasswordAuthProviderConfig,
-    PushConfig,
-    SpamCheckerConfig,
-    GroupsConfig,
-    UserDirectoryConfig,
-    ConsentConfig,
-    StatsConfig,
-    ServerNoticesConfig,
-    RoomDirectoryConfig,
-    ThirdPartyRulesConfig,
-    TracerConfig,
-):
-    pass
+class HomeServerConfig(RootConfig):
+
+    config_classes = [
+        ServerConfig,
+        TlsConfig,
+        CacheConfig,
+        DatabaseConfig,
+        LoggingConfig,
+        RatelimitConfig,
+        ContentRepositoryConfig,
+        CaptchaConfig,
+        VoipConfig,
+        RegistrationConfig,
+        MetricsConfig,
+        ApiConfig,
+        AppServiceConfig,
+        KeyConfig,
+        SAML2Config,
+        OIDCConfig,
+        CasConfig,
+        SSOConfig,
+        JWTConfig,
+        PasswordConfig,
+        EmailConfig,
+        WorkerConfig,
+        PasswordAuthProviderConfig,
+        PushConfig,
+        SpamCheckerConfig,
+        GroupsConfig,
+        UserDirectoryConfig,
+        ConsentConfig,
+        StatsConfig,
+        ServerNoticesConfig,
+        RoomDirectoryConfig,
+        ThirdPartyRulesConfig,
+        TracerConfig,
+        RedisConfig,
+    ]
diff --git a/synapse/config/jwt_config.py b/synapse/config/jwt_config.py
index 36d87cef03..a568726985 100644
--- a/synapse/config/jwt_config.py
+++ b/synapse/config/jwt_config.py
@@ -23,6 +23,8 @@ MISSING_JWT = """Missing jwt library. This is required for jwt login.
 
 
 class JWTConfig(Config):
+    section = "jwt"
+
     def read_config(self, config, **kwargs):
         jwt_config = config.get("jwt_config", None)
         if jwt_config:
diff --git a/synapse/config/key.py b/synapse/config/key.py
index ba2199bceb..b529ea5da0 100644
--- a/synapse/config/key.py
+++ b/synapse/config/key.py
@@ -50,6 +50,33 @@ and you should enable 'federation_verify_certificates' in your configuration.
 If you are *sure* you want to do this, set 'accept_keys_insecurely' on the
 trusted_key_server configuration."""
 
+TRUSTED_KEY_SERVER_NOT_CONFIGURED_WARN = """\
+Synapse requires that a list of trusted key servers are specified in order to
+provide signing keys for other servers in the federation.
+
+This homeserver does not have a trusted key server configured in
+homeserver.yaml and will fall back to the default of 'matrix.org'.
+
+Trusted key servers should be long-lived and stable which makes matrix.org a
+good choice for many admins, but some admins may wish to choose another. To
+suppress this warning, the admin should set 'trusted_key_servers' in
+homeserver.yaml to their desired key server and 'suppress_key_server_warning'
+to 'true'.
+
+In a future release the software-defined default will be removed entirely and
+the trusted key server will be defined exclusively by the value of
+'trusted_key_servers'.
+--------------------------------------------------------------------------------"""
+
+TRUSTED_KEY_SERVER_CONFIGURED_AS_M_ORG_WARN = """\
+This server is configured to use 'matrix.org' as its trusted key server via the
+'trusted_key_servers' config option. 'matrix.org' is a good choice for a key
+server since it is long-lived, stable and trusted. However, some admins may
+wish to use another server for this purpose.
+
+To suppress this warning and continue using 'matrix.org', admins should set
+'suppress_key_server_warning' to 'true' in homeserver.yaml.
+--------------------------------------------------------------------------------"""
 
 logger = logging.getLogger(__name__)
 
@@ -65,6 +92,8 @@ class TrustedKeyServer(object):
 
 
 class KeyConfig(Config):
+    section = "key"
+
     def read_config(self, config, config_dir_path, **kwargs):
         # the signing key can be specified inline or in a separate file
         if "signing_key" in config:
@@ -79,12 +108,13 @@ class KeyConfig(Config):
             self.signing_key = self.read_signing_keys(signing_key_path, "signing_key")
 
         self.old_signing_keys = self.read_old_signing_keys(
-            config.get("old_signing_keys", {})
+            config.get("old_signing_keys")
         )
         self.key_refresh_interval = self.parse_duration(
             config.get("key_refresh_interval", "1d")
         )
 
+        suppress_key_server_warning = config.get("suppress_key_server_warning", False)
         key_server_signing_keys_path = config.get("key_server_signing_keys_path")
         if key_server_signing_keys_path:
             self.key_server_signing_keys = self.read_signing_keys(
@@ -95,6 +125,7 @@ class KeyConfig(Config):
 
         # if neither trusted_key_servers nor perspectives are given, use the default.
         if "perspectives" not in config and "trusted_key_servers" not in config:
+            logger.warning(TRUSTED_KEY_SERVER_NOT_CONFIGURED_WARN)
             key_servers = [{"server_name": "matrix.org"}]
         else:
             key_servers = config.get("trusted_key_servers", [])
@@ -108,6 +139,11 @@ class KeyConfig(Config):
             # merge the 'perspectives' config into the 'trusted_key_servers' config.
             key_servers.extend(_perspectives_to_key_servers(config))
 
+            if not suppress_key_server_warning and "matrix.org" in (
+                s["server_name"] for s in key_servers
+            ):
+                logger.warning(TRUSTED_KEY_SERVER_CONFIGURED_AS_M_ORG_WARN)
+
         # list of TrustedKeyServer objects
         self.key_servers = list(
             _parse_key_servers(key_servers, self.federation_verify_certificates)
@@ -120,7 +156,7 @@ class KeyConfig(Config):
         if not self.macaroon_secret_key:
             # Unfortunately, there are people out there that don't have this
             # set. Lets just be "nice" and derive one from their secret key.
-            logger.warn("Config is missing macaroon_secret_key")
+            logger.warning("Config is missing macaroon_secret_key")
             seed = bytes(self.signing_key[0])
             self.macaroon_secret_key = hashlib.sha256(seed).digest()
 
@@ -139,8 +175,8 @@ class KeyConfig(Config):
             )
             form_secret = 'form_secret: "%s"' % random_string_with_symbols(50)
         else:
-            macaroon_secret_key = "# macaroon_secret_key: <PRIVATE STRING>"
-            form_secret = "# form_secret: <PRIVATE STRING>"
+            macaroon_secret_key = "#macaroon_secret_key: <PRIVATE STRING>"
+            form_secret = "#form_secret: <PRIVATE STRING>"
 
         return (
             """\
@@ -163,14 +199,19 @@ class KeyConfig(Config):
         signing_key_path: "%(base_key_name)s.signing.key"
 
         # The keys that the server used to sign messages with but won't use
-        # to sign new messages. E.g. it has lost its private key
+        # to sign new messages.
         #
-        #old_signing_keys:
-        #  "ed25519:auto":
-        #    # Base64 encoded public key
-        #    key: "The public part of your old signing key."
-        #    # Millisecond POSIX timestamp when the key expired.
-        #    expired_ts: 123456789123
+        old_signing_keys:
+          # For each key, `key` should be the base64-encoded public key, and
+          # `expired_ts`should be the time (in milliseconds since the unix epoch) that
+          # it was last used.
+          #
+          # It is possible to build an entry from an old signing.key file using the
+          # `export_signing_key` script which is provided with synapse.
+          #
+          # For example:
+          #
+          #"ed25519:id": { key: "base64string", expired_ts: 123456789123 }
 
         # How long key response published by this server is valid for.
         # Used to set the valid_until_ts in /key/v2 APIs.
@@ -190,6 +231,10 @@ class KeyConfig(Config):
         # This setting supercedes an older setting named `perspectives`. The old format
         # is still supported for backwards-compatibility, but it is deprecated.
         #
+        # 'trusted_key_servers' defaults to matrix.org, but using it will generate a
+        # warning on start-up. To suppress this warning, set
+        # 'suppress_key_server_warning' to true.
+        #
         # Options for each entry in the list include:
         #
         #    server_name: the name of the server. required.
@@ -214,11 +259,13 @@ class KeyConfig(Config):
         #      "ed25519:auto": "abcdefghijklmnopqrstuvwxyzabcdefghijklmopqr"
         #  - server_name: "my_other_trusted_server.example.com"
         #
-        # The default configuration is:
-        #
-        #trusted_key_servers:
-        #  - server_name: "matrix.org"
+        trusted_key_servers:
+          - server_name: "matrix.org"
+
+        # Uncomment the following to disable the warning that is emitted when the
+        # trusted_key_servers include 'matrix.org'. See above.
         #
+        #suppress_key_server_warning: true
 
         # The signing keys to use when acting as a trusted key server. If not specified
         # defaults to the server signing key.
@@ -248,6 +295,8 @@ class KeyConfig(Config):
             raise ConfigError("Error reading %s: %s" % (name, str(e)))
 
     def read_old_signing_keys(self, old_signing_keys):
+        if old_signing_keys is None:
+            return {}
         keys = {}
         for key_id, key_data in old_signing_keys.items():
             if is_signing_algorithm_supported(key_id):
diff --git a/synapse/config/logger.py b/synapse/config/logger.py
index 767ecfdf09..49f6c32beb 100644
--- a/synapse/config/logger.py
+++ b/synapse/config/logger.py
@@ -12,7 +12,7 @@
 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 # See the License for the specific language governing permissions and
 # limitations under the License.
-
+import argparse
 import logging
 import logging.config
 import os
@@ -37,10 +37,17 @@ from synapse.logging._structured import (
 from synapse.logging.context import LoggingContextFilter
 from synapse.util.versionstring import get_version_string
 
-from ._base import Config
+from ._base import Config, ConfigError
 
 DEFAULT_LOG_CONFIG = Template(
-    """
+    """\
+# Log configuration for Synapse.
+#
+# This is a YAML file containing a standard Python logging configuration
+# dictionary. See [1] for details on the valid settings.
+#
+# [1]: https://docs.python.org/3.7/library/logging.config.html#configuration-dictionary-schema
+
 version: 1
 
 formatters:
@@ -68,9 +75,6 @@ handlers:
         filters: [context]
 
 loggers:
-    synapse:
-        level: INFO
-
     synapse.storage.SQL:
         # beware: increasing this to DEBUG will make synapse log sensitive
         # information such as access tokens.
@@ -79,12 +83,23 @@ loggers:
 root:
     level: INFO
     handlers: [file, console]
+
+disable_existing_loggers: false
 """
 )
 
+LOG_FILE_ERROR = """\
+Support for the log_file configuration option and --log-file command-line option was
+removed in Synapse 1.3.0. You should instead set up a separate log configuration file.
+"""
+
 
 class LoggingConfig(Config):
+    section = "logging"
+
     def read_config(self, config, **kwargs):
+        if config.get("log_file"):
+            raise ConfigError(LOG_FILE_ERROR)
         self.log_config = self.abspath(config.get("log_config"))
         self.no_redirect_stdio = config.get("no_redirect_stdio", False)
 
@@ -105,6 +120,8 @@ class LoggingConfig(Config):
     def read_arguments(self, args):
         if args.no_redirect_stdio is not None:
             self.no_redirect_stdio = args.no_redirect_stdio
+        if args.log_file is not None:
+            raise ConfigError(LOG_FILE_ERROR)
 
     @staticmethod
     def add_arguments(parser):
@@ -117,6 +134,10 @@ class LoggingConfig(Config):
             help="Do not redirect stdout/stderr to the log",
         )
 
+        logging_group.add_argument(
+            "-f", "--log-file", dest="log_file", help=argparse.SUPPRESS,
+        )
+
     def generate_files(self, config, config_dir_path):
         log_config = config.get("log_config")
         if log_config and not os.path.exists(log_config):
@@ -181,7 +202,7 @@ def _reload_stdlib_logging(*args, log_config=None):
     logger = logging.getLogger("")
 
     if not log_config:
-        logger.warn("Reloaded a blank config?")
+        logger.warning("Reloaded a blank config?")
 
     logging.config.dictConfig(log_config)
 
@@ -233,8 +254,9 @@ def setup_logging(
 
     # make sure that the first thing we log is a thing we can grep backwards
     # for
-    logging.warn("***** STARTING SERVER *****")
-    logging.warn("Server %s version %s", sys.argv[0], get_version_string(synapse))
+    logging.warning("***** STARTING SERVER *****")
+    logging.warning("Server %s version %s", sys.argv[0], get_version_string(synapse))
     logging.info("Server hostname: %s", config.server_name)
+    logging.info("Instance name: %s", hs.get_instance_name())
 
     return logger
diff --git a/synapse/config/metrics.py b/synapse/config/metrics.py
index ec35a6b868..6aad0d37c0 100644
--- a/synapse/config/metrics.py
+++ b/synapse/config/metrics.py
@@ -34,6 +34,8 @@ class MetricsFlags(object):
 
 
 class MetricsConfig(Config):
+    section = "metrics"
+
     def read_config(self, config, **kwargs):
         self.enable_metrics = config.get("enable_metrics", False)
         self.report_stats = config.get("report_stats", None)
@@ -68,7 +70,7 @@ class MetricsConfig(Config):
 
         # Enable collection and rendering of performance metrics
         #
-        #enable_metrics: False
+        #enable_metrics: false
 
         # Enable sentry integration
         # NOTE: While attempts are made to ensure that the logs don't contain
@@ -84,17 +86,18 @@ class MetricsConfig(Config):
         # enabled by default, either for performance reasons or limited use.
         #
         metrics_flags:
-            # Publish synapse_federation_known_servers, a g auge of the number of
+            # Publish synapse_federation_known_servers, a gauge of the number of
             # servers this homeserver knows about, including itself. May cause
             # performance problems on large homeservers.
             #
             #known_servers: true
 
         # Whether or not to report anonymized homeserver usage statistics.
+        #
         """
 
         if report_stats is None:
-            res += "# report_stats: true|false\n"
+            res += "#report_stats: true|false\n"
         else:
             res += "report_stats: %s\n" % ("true" if report_stats else "false")
 
diff --git a/synapse/config/oidc_config.py b/synapse/config/oidc_config.py
new file mode 100644
index 0000000000..e24dd637bc
--- /dev/null
+++ b/synapse/config/oidc_config.py
@@ -0,0 +1,203 @@
+# -*- coding: utf-8 -*-
+# Copyright 2020 Quentin Gliech
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from synapse.python_dependencies import DependencyException, check_requirements
+from synapse.util.module_loader import load_module
+
+from ._base import Config, ConfigError
+
+DEFAULT_USER_MAPPING_PROVIDER = "synapse.handlers.oidc_handler.JinjaOidcMappingProvider"
+
+
+class OIDCConfig(Config):
+    section = "oidc"
+
+    def read_config(self, config, **kwargs):
+        self.oidc_enabled = False
+
+        oidc_config = config.get("oidc_config")
+
+        if not oidc_config or not oidc_config.get("enabled", False):
+            return
+
+        try:
+            check_requirements("oidc")
+        except DependencyException as e:
+            raise ConfigError(e.message)
+
+        public_baseurl = self.public_baseurl
+        if public_baseurl is None:
+            raise ConfigError("oidc_config requires a public_baseurl to be set")
+        self.oidc_callback_url = public_baseurl + "_synapse/oidc/callback"
+
+        self.oidc_enabled = True
+        self.oidc_discover = oidc_config.get("discover", True)
+        self.oidc_issuer = oidc_config["issuer"]
+        self.oidc_client_id = oidc_config["client_id"]
+        self.oidc_client_secret = oidc_config["client_secret"]
+        self.oidc_client_auth_method = oidc_config.get(
+            "client_auth_method", "client_secret_basic"
+        )
+        self.oidc_scopes = oidc_config.get("scopes", ["openid"])
+        self.oidc_authorization_endpoint = oidc_config.get("authorization_endpoint")
+        self.oidc_token_endpoint = oidc_config.get("token_endpoint")
+        self.oidc_userinfo_endpoint = oidc_config.get("userinfo_endpoint")
+        self.oidc_jwks_uri = oidc_config.get("jwks_uri")
+        self.oidc_skip_verification = oidc_config.get("skip_verification", False)
+
+        ump_config = oidc_config.get("user_mapping_provider", {})
+        ump_config.setdefault("module", DEFAULT_USER_MAPPING_PROVIDER)
+        ump_config.setdefault("config", {})
+
+        (
+            self.oidc_user_mapping_provider_class,
+            self.oidc_user_mapping_provider_config,
+        ) = load_module(ump_config)
+
+        # Ensure loaded user mapping module has defined all necessary methods
+        required_methods = [
+            "get_remote_user_id",
+            "map_user_attributes",
+        ]
+        missing_methods = [
+            method
+            for method in required_methods
+            if not hasattr(self.oidc_user_mapping_provider_class, method)
+        ]
+        if missing_methods:
+            raise ConfigError(
+                "Class specified by oidc_config."
+                "user_mapping_provider.module is missing required "
+                "methods: %s" % (", ".join(missing_methods),)
+            )
+
+    def generate_config_section(self, config_dir_path, server_name, **kwargs):
+        return """\
+        # OpenID Connect integration. The following settings can be used to make Synapse
+        # use an OpenID Connect Provider for authentication, instead of its internal
+        # password database.
+        #
+        # See https://github.com/matrix-org/synapse/blob/master/openid.md.
+        #
+        oidc_config:
+          # Uncomment the following to enable authorization against an OpenID Connect
+          # server. Defaults to false.
+          #
+          #enabled: true
+
+          # Uncomment the following to disable use of the OIDC discovery mechanism to
+          # discover endpoints. Defaults to true.
+          #
+          #discover: false
+
+          # the OIDC issuer. Used to validate tokens and (if discovery is enabled) to
+          # discover the provider's endpoints.
+          #
+          # Required if 'enabled' is true.
+          #
+          #issuer: "https://accounts.example.com/"
+
+          # oauth2 client id to use.
+          #
+          # Required if 'enabled' is true.
+          #
+          #client_id: "provided-by-your-issuer"
+
+          # oauth2 client secret to use.
+          #
+          # Required if 'enabled' is true.
+          #
+          #client_secret: "provided-by-your-issuer"
+
+          # auth method to use when exchanging the token.
+          # Valid values are 'client_secret_basic' (default), 'client_secret_post' and
+          # 'none'.
+          #
+          #client_auth_method: client_secret_post
+
+          # list of scopes to request. This should normally include the "openid" scope.
+          # Defaults to ["openid"].
+          #
+          #scopes: ["openid", "profile"]
+
+          # the oauth2 authorization endpoint. Required if provider discovery is disabled.
+          #
+          #authorization_endpoint: "https://accounts.example.com/oauth2/auth"
+
+          # the oauth2 token endpoint. Required if provider discovery is disabled.
+          #
+          #token_endpoint: "https://accounts.example.com/oauth2/token"
+
+          # the OIDC userinfo endpoint. Required if discovery is disabled and the
+          # "openid" scope is not requested.
+          #
+          #userinfo_endpoint: "https://accounts.example.com/userinfo"
+
+          # URI where to fetch the JWKS. Required if discovery is disabled and the
+          # "openid" scope is used.
+          #
+          #jwks_uri: "https://accounts.example.com/.well-known/jwks.json"
+
+          # Uncomment to skip metadata verification. Defaults to false.
+          #
+          # Use this if you are connecting to a provider that is not OpenID Connect
+          # compliant.
+          # Avoid this in production.
+          #
+          #skip_verification: true
+
+          # An external module can be provided here as a custom solution to mapping
+          # attributes returned from a OIDC provider onto a matrix user.
+          #
+          user_mapping_provider:
+            # The custom module's class. Uncomment to use a custom module.
+            # Default is {mapping_provider!r}.
+            #
+            # See https://github.com/matrix-org/synapse/blob/master/docs/sso_mapping_providers.md#openid-mapping-providers
+            # for information on implementing a custom mapping provider.
+            #
+            #module: mapping_provider.OidcMappingProvider
+
+            # Custom configuration values for the module. This section will be passed as
+            # a Python dictionary to the user mapping provider module's `parse_config`
+            # method.
+            #
+            # The examples below are intended for the default provider: they should be
+            # changed if using a custom provider.
+            #
+            config:
+              # name of the claim containing a unique identifier for the user.
+              # Defaults to `sub`, which OpenID Connect compliant providers should provide.
+              #
+              #subject_claim: "sub"
+
+              # Jinja2 template for the localpart of the MXID.
+              #
+              # When rendering, this template is given the following variables:
+              #   * user: The claims returned by the UserInfo Endpoint and/or in the ID
+              #     Token
+              #
+              # This must be configured if using the default mapping provider.
+              #
+              localpart_template: "{{{{ user.preferred_username }}}}"
+
+              # Jinja2 template for the display name to set on first login.
+              #
+              # If unset, no displayname will be set.
+              #
+              #display_name_template: "{{{{ user.given_name }}}} {{{{ user.last_name }}}}"
+        """.format(
+            mapping_provider=DEFAULT_USER_MAPPING_PROVIDER
+        )
diff --git a/synapse/config/password.py b/synapse/config/password.py
index d5b5953f2f..9c0ea8c30a 100644
--- a/synapse/config/password.py
+++ b/synapse/config/password.py
@@ -20,6 +20,8 @@ class PasswordConfig(Config):
     """Password login configuration
     """
 
+    section = "password"
+
     def read_config(self, config, **kwargs):
         password_config = config.get("password_config", {})
         if password_config is None:
@@ -29,6 +31,10 @@ class PasswordConfig(Config):
         self.password_localdb_enabled = password_config.get("localdb_enabled", True)
         self.password_pepper = password_config.get("pepper", "")
 
+        # Password policy
+        self.password_policy = password_config.get("policy") or {}
+        self.password_policy_enabled = self.password_policy.get("enabled", False)
+
     def generate_config_section(self, config_dir_path, server_name, **kwargs):
         return """\
         password_config:
@@ -46,4 +52,39 @@ class PasswordConfig(Config):
            # DO NOT CHANGE THIS AFTER INITIAL SETUP!
            #
            #pepper: "EVEN_MORE_SECRET"
+
+           # Define and enforce a password policy. Each parameter is optional.
+           # This is an implementation of MSC2000.
+           #
+           policy:
+              # Whether to enforce the password policy.
+              # Defaults to 'false'.
+              #
+              #enabled: true
+
+              # Minimum accepted length for a password.
+              # Defaults to 0.
+              #
+              #minimum_length: 15
+
+              # Whether a password must contain at least one digit.
+              # Defaults to 'false'.
+              #
+              #require_digit: true
+
+              # Whether a password must contain at least one symbol.
+              # A symbol is any character that's not a number or a letter.
+              # Defaults to 'false'.
+              #
+              #require_symbol: true
+
+              # Whether a password must contain at least one lowercase letter.
+              # Defaults to 'false'.
+              #
+              #require_lowercase: true
+
+              # Whether a password must contain at least one lowercase letter.
+              # Defaults to 'false'.
+              #
+              #require_uppercase: true
         """
diff --git a/synapse/config/password_auth_providers.py b/synapse/config/password_auth_providers.py
index 788c39c9fb..4fda8ae987 100644
--- a/synapse/config/password_auth_providers.py
+++ b/synapse/config/password_auth_providers.py
@@ -13,6 +13,8 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
+from typing import Any, List
+
 from synapse.util.module_loader import load_module
 
 from ._base import Config
@@ -21,8 +23,10 @@ LDAP_PROVIDER = "ldap_auth_provider.LdapAuthProvider"
 
 
 class PasswordAuthProviderConfig(Config):
+    section = "authproviders"
+
     def read_config(self, config, **kwargs):
-        self.password_providers = []
+        self.password_providers = []  # type: List[Any]
         providers = []
 
         # We want to be backwards compatible with the old `ldap_config`
@@ -31,7 +35,7 @@ class PasswordAuthProviderConfig(Config):
         if ldap_config.get("enabled", False):
             providers.append({"module": LDAP_PROVIDER, "config": ldap_config})
 
-        providers.extend(config.get("password_providers", []))
+        providers.extend(config.get("password_providers") or [])
         for provider in providers:
             mod_name = provider["module"]
 
@@ -48,7 +52,19 @@ class PasswordAuthProviderConfig(Config):
 
     def generate_config_section(self, **kwargs):
         return """\
-        #password_providers:
+        # Password providers allow homeserver administrators to integrate
+        # their Synapse installation with existing authentication methods
+        # ex. LDAP, external tokens, etc.
+        #
+        # For more information and known implementations, please see
+        # https://github.com/matrix-org/synapse/blob/master/docs/password_auth_providers.md
+        #
+        # Note: instances wishing to use SAML or CAS authentication should
+        # instead use the `saml2_config` or `cas_config` options,
+        # respectively.
+        #
+        password_providers:
+        #    # Example config for an LDAP auth provider
         #    - module: "ldap_auth_provider.LdapAuthProvider"
         #      config:
         #        enabled: true
diff --git a/synapse/config/push.py b/synapse/config/push.py
index 1b932722a5..6f2b3a7faa 100644
--- a/synapse/config/push.py
+++ b/synapse/config/push.py
@@ -18,6 +18,8 @@ from ._base import Config
 
 
 class PushConfig(Config):
+    section = "push"
+
     def read_config(self, config, **kwargs):
         push_config = config.get("push", {})
         self.push_include_content = push_config.get("include_content", True)
@@ -33,7 +35,7 @@ class PushConfig(Config):
 
         # Now check for the one in the 'email' section and honour it,
         # with a warning.
-        push_config = config.get("email", {})
+        push_config = config.get("email") or {}
         redact_content = push_config.get("redact_content")
         if redact_content is not None:
             print(
diff --git a/synapse/config/ratelimiting.py b/synapse/config/ratelimiting.py
index 587e2862b7..2dd94bae2b 100644
--- a/synapse/config/ratelimiting.py
+++ b/synapse/config/ratelimiting.py
@@ -12,11 +12,17 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
+from typing import Dict
+
 from ._base import Config
 
 
 class RateLimitConfig(object):
-    def __init__(self, config, defaults={"per_second": 0.17, "burst_count": 3.0}):
+    def __init__(
+        self,
+        config: Dict[str, float],
+        defaults={"per_second": 0.17, "burst_count": 3.0},
+    ):
         self.per_second = config.get("per_second", defaults["per_second"])
         self.burst_count = config.get("burst_count", defaults["burst_count"])
 
@@ -36,6 +42,8 @@ class FederationRateLimitConfig(object):
 
 
 class RatelimitConfig(Config):
+    section = "ratelimiting"
+
     def read_config(self, config, **kwargs):
 
         # Load the new-style messages config if it exists. Otherwise fall back
@@ -81,10 +89,9 @@ class RatelimitConfig(Config):
         )
 
         rc_admin_redaction = config.get("rc_admin_redaction")
+        self.rc_admin_redaction = None
         if rc_admin_redaction:
             self.rc_admin_redaction = RateLimitConfig(rc_admin_redaction)
-        else:
-            self.rc_admin_redaction = None
 
     def generate_config_section(self, **kwargs):
         return """\
diff --git a/synapse/config/redis.py b/synapse/config/redis.py
new file mode 100644
index 0000000000..d5d3ca1c9e
--- /dev/null
+++ b/synapse/config/redis.py
@@ -0,0 +1,34 @@
+# -*- coding: utf-8 -*-
+# Copyright 2020 The Matrix.org Foundation C.I.C.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from synapse.config._base import Config
+from synapse.python_dependencies import check_requirements
+
+
+class RedisConfig(Config):
+    section = "redis"
+
+    def read_config(self, config, **kwargs):
+        redis_config = config.get("redis", {})
+        self.redis_enabled = redis_config.get("enabled", False)
+
+        if not self.redis_enabled:
+            return
+
+        check_requirements("redis")
+
+        self.redis_host = redis_config.get("host", "localhost")
+        self.redis_port = redis_config.get("port", 6379)
+        self.redis_password = redis_config.get("password")
diff --git a/synapse/config/registration.py b/synapse/config/registration.py
index d4654e99b3..fecced2d57 100644
--- a/synapse/config/registration.py
+++ b/synapse/config/registration.py
@@ -24,7 +24,12 @@ from synapse.util.stringutils import random_string_with_symbols
 
 
 class AccountValidityConfig(Config):
+    section = "accountvalidity"
+
     def __init__(self, config, synapse_config):
+        if config is None:
+            return
+        super(AccountValidityConfig, self).__init__()
         self.enabled = config.get("enabled", False)
         self.renew_by_email_enabled = "renew_at" in config
 
@@ -77,6 +82,8 @@ class AccountValidityConfig(Config):
 
 
 class RegistrationConfig(Config):
+    section = "registration"
+
     def read_config(self, config, **kwargs):
         self.enable_registration = bool(
             strtobool(str(config.get("enable_registration", False)))
@@ -87,7 +94,7 @@ class RegistrationConfig(Config):
             )
 
         self.account_validity = AccountValidityConfig(
-            config.get("account_validity", {}), config
+            config.get("account_validity") or {}, config
         )
 
         self.registrations_require_3pid = config.get("registrations_require_3pid", [])
@@ -102,6 +109,13 @@ class RegistrationConfig(Config):
         account_threepid_delegates = config.get("account_threepid_delegates") or {}
         self.account_threepid_delegate_email = account_threepid_delegates.get("email")
         self.account_threepid_delegate_msisdn = account_threepid_delegates.get("msisdn")
+        if self.account_threepid_delegate_msisdn and not self.public_baseurl:
+            raise ConfigError(
+                "The configuration option `public_baseurl` is required if "
+                "`account_threepid_delegate.msisdn` is set, such that "
+                "clients know where to submit validation tokens to. Please "
+                "configure `public_baseurl`."
+            )
 
         self.default_identity_server = config.get("default_identity_server")
         self.allow_guest_access = config.get("allow_guest_access", False)
@@ -114,6 +128,11 @@ class RegistrationConfig(Config):
             if not RoomAlias.is_valid(room_alias):
                 raise ConfigError("Invalid auto_join_rooms entry %s" % (room_alias,))
         self.autocreate_auto_join_rooms = config.get("autocreate_auto_join_rooms", True)
+        self.auto_join_rooms_for_guests = config.get("auto_join_rooms_for_guests", True)
+
+        self.enable_set_displayname = config.get("enable_set_displayname", True)
+        self.enable_set_avatar_url = config.get("enable_set_avatar_url", True)
+        self.enable_3pid_changes = config.get("enable_3pid_changes", True)
 
         self.disable_msisdn_registration = config.get(
             "disable_msisdn_registration", False
@@ -130,9 +149,7 @@ class RegistrationConfig(Config):
                 random_string_with_symbols(50),
             )
         else:
-            registration_shared_secret = (
-                "# registration_shared_secret: <PRIVATE STRING>"
-            )
+            registration_shared_secret = "#registration_shared_secret: <PRIVATE STRING>"
 
         return (
             """\
@@ -148,23 +165,6 @@ class RegistrationConfig(Config):
         # Optional account validity configuration. This allows for accounts to be denied
         # any request after a given period.
         #
-        # ``enabled`` defines whether the account validity feature is enabled. Defaults
-        # to False.
-        #
-        # ``period`` allows setting the period after which an account is valid
-        # after its registration. When renewing the account, its validity period
-        # will be extended by this amount of time. This parameter is required when using
-        # the account validity feature.
-        #
-        # ``renew_at`` is the amount of time before an account's expiry date at which
-        # Synapse will send an email to the account's email address with a renewal link.
-        # This needs the ``email`` and ``public_baseurl`` configuration sections to be
-        # filled.
-        #
-        # ``renew_email_subject`` is the subject of the email sent out with the renewal
-        # link. ``%%(app)s`` can be used as a placeholder for the ``app_name`` parameter
-        # from the ``email`` section.
-        #
         # Once this feature is enabled, Synapse will look for registered users without an
         # expiration date at startup and will add one to every account it found using the
         # current settings at that time.
@@ -175,21 +175,55 @@ class RegistrationConfig(Config):
         # date will be randomly selected within a range [now + period - d ; now + period],
         # where d is equal to 10%% of the validity period.
         #
-        #account_validity:
-        #  enabled: True
-        #  period: 6w
-        #  renew_at: 1w
-        #  renew_email_subject: "Renew your %%(app)s account"
-        #  # Directory in which Synapse will try to find the HTML files to serve to the
-        #  # user when trying to renew an account. Optional, defaults to
-        #  # synapse/res/templates.
-        #  template_dir: "res/templates"
-        #  # HTML to be displayed to the user after they successfully renewed their
-        #  # account. Optional.
-        #  account_renewed_html_path: "account_renewed.html"
-        #  # HTML to be displayed when the user tries to renew an account with an invalid
-        #  # renewal token. Optional.
-        #  invalid_token_html_path: "invalid_token.html"
+        account_validity:
+          # The account validity feature is disabled by default. Uncomment the
+          # following line to enable it.
+          #
+          #enabled: true
+
+          # The period after which an account is valid after its registration. When
+          # renewing the account, its validity period will be extended by this amount
+          # of time. This parameter is required when using the account validity
+          # feature.
+          #
+          #period: 6w
+
+          # The amount of time before an account's expiry date at which Synapse will
+          # send an email to the account's email address with a renewal link. By
+          # default, no such emails are sent.
+          #
+          # If you enable this setting, you will also need to fill out the 'email' and
+          # 'public_baseurl' configuration sections.
+          #
+          #renew_at: 1w
+
+          # The subject of the email sent out with the renewal link. '%%(app)s' can be
+          # used as a placeholder for the 'app_name' parameter from the 'email'
+          # section.
+          #
+          # Note that the placeholder must be written '%%(app)s', including the
+          # trailing 's'.
+          #
+          # If this is not set, a default value is used.
+          #
+          #renew_email_subject: "Renew your %%(app)s account"
+
+          # Directory in which Synapse will try to find templates for the HTML files to
+          # serve to the user when trying to renew an account. If not set, default
+          # templates from within the Synapse package will be used.
+          #
+          #template_dir: "res/templates"
+
+          # File within 'template_dir' giving the HTML to be displayed to the user after
+          # they successfully renewed their account. If not set, default text is used.
+          #
+          #account_renewed_html_path: "account_renewed.html"
+
+          # File within 'template_dir' giving the HTML to be displayed when the user
+          # tries to renew an account with an invalid renewal token. If not set,
+          # default text is used.
+          #
+          #invalid_token_html_path: "invalid_token.html"
 
         # Time that a user's session remains valid for, after they log in.
         #
@@ -293,10 +327,35 @@ class RegistrationConfig(Config):
         # by the Matrix Identity Service API specification:
         # https://matrix.org/docs/spec/identity_service/latest
         #
+        # If a delegate is specified, the config option public_baseurl must also be filled out.
+        #
         account_threepid_delegates:
-            #email: https://example.com     # Delegate email sending to example.org
+            #email: https://example.com     # Delegate email sending to example.com
             #msisdn: http://localhost:8090  # Delegate SMS sending to this local process
 
+        # Whether users are allowed to change their displayname after it has
+        # been initially set. Useful when provisioning users based on the
+        # contents of a third-party directory.
+        #
+        # Does not apply to server administrators. Defaults to 'true'
+        #
+        #enable_set_displayname: false
+
+        # Whether users are allowed to change their avatar after it has been
+        # initially set. Useful when provisioning users based on the contents
+        # of a third-party directory.
+        #
+        # Does not apply to server administrators. Defaults to 'true'
+        #
+        #enable_set_avatar_url: false
+
+        # Whether users can change the 3PIDs associated with their accounts
+        # (email address and msisdn).
+        #
+        # Defaults to 'true'
+        #
+        #enable_3pid_changes: false
+
         # Users who register on this homeserver will automatically be joined
         # to these rooms
         #
@@ -310,6 +369,13 @@ class RegistrationConfig(Config):
         # users cannot be auto-joined since they do not exist.
         #
         #autocreate_auto_join_rooms: true
+
+        # When auto_join_rooms is specified, setting this flag to false prevents
+        # guest accounts from being automatically joined to the rooms.
+        #
+        # Defaults to true.
+        #
+        #auto_join_rooms_for_guests: false
         """
             % locals()
         )
diff --git a/synapse/config/repository.py b/synapse/config/repository.py
index 34f1a9a92d..b751d02d37 100644
--- a/synapse/config/repository.py
+++ b/synapse/config/repository.py
@@ -1,5 +1,5 @@
 # -*- coding: utf-8 -*-
-# Copyright 2014, 2015 matrix.org
+# Copyright 2014, 2015 OpenMarket Ltd
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
 # you may not use this file except in compliance with the License.
@@ -15,6 +15,7 @@
 
 import os
 from collections import namedtuple
+from typing import Dict, List
 
 from synapse.python_dependencies import DependencyException, check_requirements
 from synapse.util.module_loader import load_module
@@ -61,7 +62,7 @@ def parse_thumbnail_requirements(thumbnail_sizes):
         Dictionary mapping from media type string to list of
         ThumbnailRequirement tuples.
     """
-    requirements = {}
+    requirements = {}  # type: Dict[str, List]
     for size in thumbnail_sizes:
         width = size["width"]
         height = size["height"]
@@ -69,6 +70,7 @@ def parse_thumbnail_requirements(thumbnail_sizes):
         jpeg_thumbnail = ThumbnailRequirement(width, height, method, "image/jpeg")
         png_thumbnail = ThumbnailRequirement(width, height, method, "image/png")
         requirements.setdefault("image/jpeg", []).append(jpeg_thumbnail)
+        requirements.setdefault("image/webp", []).append(jpeg_thumbnail)
         requirements.setdefault("image/gif", []).append(png_thumbnail)
         requirements.setdefault("image/png", []).append(png_thumbnail)
     return {
@@ -77,6 +79,8 @@ def parse_thumbnail_requirements(thumbnail_sizes):
 
 
 class ContentRepositoryConfig(Config):
+    section = "media"
+
     def read_config(self, config, **kwargs):
 
         # Only enable the media repo if either the media repo is enabled or the
@@ -130,7 +134,7 @@ class ContentRepositoryConfig(Config):
         #
         # We don't create the storage providers here as not all workers need
         # them to be started.
-        self.media_storage_providers = []
+        self.media_storage_providers = []  # type: List[tuple]
 
         for provider_config in storage_providers:
             # We special case the module "file_system" so as not to need to
@@ -153,7 +157,6 @@ class ContentRepositoryConfig(Config):
                 (provider_class, parsed_config, wrapper_config)
             )
 
-        self.uploads_path = self.ensure_directory(config.get("uploads_path", "uploads"))
         self.dynamic_thumbnails = config.get("dynamic_thumbnails", False)
         self.thumbnail_requirements = parse_thumbnail_requirements(
             config.get("thumbnail_sizes", DEFAULT_THUMBNAIL_SIZES)
@@ -190,6 +193,10 @@ class ContentRepositoryConfig(Config):
 
             self.url_preview_url_blacklist = config.get("url_preview_url_blacklist", ())
 
+            self.url_preview_accept_language = config.get(
+                "url_preview_accept_language"
+            ) or ["en"]
+
     def generate_config_section(self, data_dir_path, **kwargs):
         media_store = os.path.join(data_dir_path, "media_store")
         uploads_path = os.path.join(data_dir_path, "uploads")
@@ -218,20 +225,15 @@ class ContentRepositoryConfig(Config):
         #
         #media_storage_providers:
         #  - module: file_system
-        #    # Whether to write new local files.
+        #    # Whether to store newly uploaded local files
         #    store_local: false
-        #    # Whether to write new remote media
+        #    # Whether to store newly downloaded remote files
         #    store_remote: false
-        #    # Whether to block upload requests waiting for write to this
-        #    # provider to complete
+        #    # Whether to wait for successful storage for local uploads
         #    store_synchronous: false
         #    config:
         #       directory: /mnt/some/other/directory
 
-        # Directory where in-progress uploads are stored.
-        #
-        uploads_path: "%(uploads_path)s"
-
         # The largest allowed upload size in bytes
         #
         #max_upload_size: 10M
@@ -331,6 +333,31 @@ class ContentRepositoryConfig(Config):
         # The largest allowed URL preview spidering size in bytes
         #
         #max_spider_size: 10M
+
+        # A list of values for the Accept-Language HTTP header used when
+        # downloading webpages during URL preview generation. This allows
+        # Synapse to specify the preferred languages that URL previews should
+        # be in when communicating with remote servers.
+        #
+        # Each value is a IETF language tag; a 2-3 letter identifier for a
+        # language, optionally followed by subtags separated by '-', specifying
+        # a country or region variant.
+        #
+        # Multiple values can be provided, and a weight can be added to each by
+        # using quality value syntax (;q=). '*' translates to any language.
+        #
+        # Defaults to "en".
+        #
+        # Example:
+        #
+        # url_preview_accept_language:
+        #   - en-UK
+        #   - en-US;q=0.9
+        #   - fr;q=0.8
+        #   - *;q=0.7
+        #
+        url_preview_accept_language:
+        #   - en
         """
             % locals()
         )
diff --git a/synapse/config/room_directory.py b/synapse/config/room_directory.py
index a92693017b..7ac7699676 100644
--- a/synapse/config/room_directory.py
+++ b/synapse/config/room_directory.py
@@ -19,6 +19,8 @@ from ._base import Config, ConfigError
 
 
 class RoomDirectoryConfig(Config):
+    section = "roomdirectory"
+
     def read_config(self, config, **kwargs):
         self.enable_room_list_search = config.get("enable_room_list_search", True)
 
@@ -168,7 +170,7 @@ class _RoomDirectoryRule(object):
             self.action = action
         else:
             raise ConfigError(
-                "%s rules can only have action of 'allow'" " or 'deny'" % (option_name,)
+                "%s rules can only have action of 'allow' or 'deny'" % (option_name,)
             )
 
         self._alias_matches_all = alias == "*"
diff --git a/synapse/config/saml2_config.py b/synapse/config/saml2_config.py
index 6a8161547a..d0a19751e8 100644
--- a/synapse/config/saml2_config.py
+++ b/synapse/config/saml2_config.py
@@ -1,5 +1,6 @@
 # -*- coding: utf-8 -*-
 # Copyright 2018 New Vector Ltd
+# Copyright 2019 The Matrix.org Foundation C.I.C.
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
 # you may not use this file except in compliance with the License.
@@ -12,12 +13,55 @@
 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 # See the License for the specific language governing permissions and
 # limitations under the License.
+
+import logging
+
+import jinja2
+import pkg_resources
+
 from synapse.python_dependencies import DependencyException, check_requirements
+from synapse.util.module_loader import load_module, load_python_module
 
 from ._base import Config, ConfigError
 
+logger = logging.getLogger(__name__)
+
+DEFAULT_USER_MAPPING_PROVIDER = (
+    "synapse.handlers.saml_handler.DefaultSamlMappingProvider"
+)
+
+
+def _dict_merge(merge_dict, into_dict):
+    """Do a deep merge of two dicts
+
+    Recursively merges `merge_dict` into `into_dict`:
+      * For keys where both `merge_dict` and `into_dict` have a dict value, the values
+        are recursively merged
+      * For all other keys, the values in `into_dict` (if any) are overwritten with
+        the value from `merge_dict`.
+
+    Args:
+        merge_dict (dict): dict to merge
+        into_dict (dict): target dict
+    """
+    for k, v in merge_dict.items():
+        if k not in into_dict:
+            into_dict[k] = v
+            continue
+
+        current_val = into_dict[k]
+
+        if isinstance(v, dict) and isinstance(current_val, dict):
+            _dict_merge(v, current_val)
+            continue
+
+        # otherwise we just overwrite
+        into_dict[k] = v
+
 
 class SAML2Config(Config):
+    section = "saml2"
+
     def read_config(self, config, **kwargs):
         self.saml2_enabled = False
 
@@ -26,6 +70,9 @@ class SAML2Config(Config):
         if not saml2_config or not saml2_config.get("enabled", True):
             return
 
+        if not saml2_config.get("sp_config") and not saml2_config.get("config_path"):
+            return
+
         try:
             check_requirements("saml2")
         except DependencyException as e:
@@ -33,28 +80,124 @@ class SAML2Config(Config):
 
         self.saml2_enabled = True
 
-        import saml2.config
+        self.saml2_grandfathered_mxid_source_attribute = saml2_config.get(
+            "grandfathered_mxid_source_attribute", "uid"
+        )
 
-        self.saml2_sp_config = saml2.config.SPConfig()
-        self.saml2_sp_config.load(self._default_saml_config_dict())
-        self.saml2_sp_config.load(saml2_config.get("sp_config", {}))
+        # user_mapping_provider may be None if the key is present but has no value
+        ump_dict = saml2_config.get("user_mapping_provider") or {}
+
+        # Use the default user mapping provider if not set
+        ump_dict.setdefault("module", DEFAULT_USER_MAPPING_PROVIDER)
+
+        # Ensure a config is present
+        ump_dict["config"] = ump_dict.get("config") or {}
+
+        if ump_dict["module"] == DEFAULT_USER_MAPPING_PROVIDER:
+            # Load deprecated options for use by the default module
+            old_mxid_source_attribute = saml2_config.get("mxid_source_attribute")
+            if old_mxid_source_attribute:
+                logger.warning(
+                    "The config option saml2_config.mxid_source_attribute is deprecated. "
+                    "Please use saml2_config.user_mapping_provider.config"
+                    ".mxid_source_attribute instead."
+                )
+                ump_dict["config"]["mxid_source_attribute"] = old_mxid_source_attribute
+
+            old_mxid_mapping = saml2_config.get("mxid_mapping")
+            if old_mxid_mapping:
+                logger.warning(
+                    "The config option saml2_config.mxid_mapping is deprecated. Please "
+                    "use saml2_config.user_mapping_provider.config.mxid_mapping instead."
+                )
+                ump_dict["config"]["mxid_mapping"] = old_mxid_mapping
+
+        # Retrieve an instance of the module's class
+        # Pass the config dictionary to the module for processing
+        (
+            self.saml2_user_mapping_provider_class,
+            self.saml2_user_mapping_provider_config,
+        ) = load_module(ump_dict)
+
+        # Ensure loaded user mapping module has defined all necessary methods
+        # Note parse_config() is already checked during the call to load_module
+        required_methods = [
+            "get_saml_attributes",
+            "saml_response_to_user_attributes",
+            "get_remote_user_id",
+        ]
+        missing_methods = [
+            method
+            for method in required_methods
+            if not hasattr(self.saml2_user_mapping_provider_class, method)
+        ]
+        if missing_methods:
+            raise ConfigError(
+                "Class specified by saml2_config."
+                "user_mapping_provider.module is missing required "
+                "methods: %s" % (", ".join(missing_methods),)
+            )
+
+        # Get the desired saml auth response attributes from the module
+        saml2_config_dict = self._default_saml_config_dict(
+            *self.saml2_user_mapping_provider_class.get_saml_attributes(
+                self.saml2_user_mapping_provider_config
+            )
+        )
+        _dict_merge(
+            merge_dict=saml2_config.get("sp_config", {}), into_dict=saml2_config_dict
+        )
 
         config_path = saml2_config.get("config_path", None)
         if config_path is not None:
-            self.saml2_sp_config.load_file(config_path)
+            mod = load_python_module(config_path)
+            _dict_merge(merge_dict=mod.CONFIG, into_dict=saml2_config_dict)
+
+        import saml2.config
+
+        self.saml2_sp_config = saml2.config.SPConfig()
+        self.saml2_sp_config.load(saml2_config_dict)
 
         # session lifetime: in milliseconds
         self.saml2_session_lifetime = self.parse_duration(
             saml2_config.get("saml_session_lifetime", "5m")
         )
 
-    def _default_saml_config_dict(self):
+        template_dir = saml2_config.get("template_dir")
+        if not template_dir:
+            template_dir = pkg_resources.resource_filename("synapse", "res/templates",)
+
+        loader = jinja2.FileSystemLoader(template_dir)
+        # enable auto-escape here, to having to remember to escape manually in the
+        # template
+        env = jinja2.Environment(loader=loader, autoescape=True)
+        self.saml2_error_html_template = env.get_template("saml_error.html")
+
+    def _default_saml_config_dict(
+        self, required_attributes: set, optional_attributes: set
+    ):
+        """Generate a configuration dictionary with required and optional attributes that
+        will be needed to process new user registration
+
+        Args:
+            required_attributes: SAML auth response attributes that are
+                necessary to function
+            optional_attributes: SAML auth response attributes that can be used to add
+                additional information to Synapse user accounts, but are not required
+
+        Returns:
+            dict: A SAML configuration dictionary
+        """
         import saml2
 
         public_baseurl = self.public_baseurl
         if public_baseurl is None:
             raise ConfigError("saml2_config requires a public_baseurl to be set")
 
+        if self.saml2_grandfathered_mxid_source_attribute:
+            optional_attributes.add(self.saml2_grandfathered_mxid_source_attribute)
+        optional_attributes -= required_attributes
+
         metadata_url = public_baseurl + "_matrix/saml2/metadata.xml"
         response_url = public_baseurl + "_matrix/saml2/authn_response"
         return {
@@ -66,22 +209,26 @@ class SAML2Config(Config):
                             (response_url, saml2.BINDING_HTTP_POST)
                         ]
                     },
-                    "required_attributes": ["uid"],
-                    "optional_attributes": ["mail", "surname", "givenname"],
+                    "required_attributes": list(required_attributes),
+                    "optional_attributes": list(optional_attributes),
+                    # "name_id_format": saml2.saml.NAMEID_FORMAT_PERSISTENT,
                 }
             },
         }
 
     def generate_config_section(self, config_dir_path, server_name, **kwargs):
         return """\
+        ## Single sign-on integration ##
+
         # Enable SAML2 for registration and login. Uses pysaml2.
         #
-        # `sp_config` is the configuration for the pysaml2 Service Provider.
-        # See pysaml2 docs for format of config.
+        # At least one of `sp_config` or `config_path` must be set in this section to
+        # enable SAML login.
         #
-        # Default values will be used for the 'entityid' and 'service' settings,
-        # so it is not normally necessary to specify them unless you need to
-        # override them.
+        # (You will probably also want to set the following options to `false` to
+        # disable the regular login/registration flows:
+        #   * enable_registration
+        #   * password_config.enabled
         #
         # Once SAML support is enabled, a metadata file will be exposed at
         # https://<server>:<port>/_matrix/saml2/metadata.xml, which you may be able to
@@ -89,52 +236,135 @@ class SAML2Config(Config):
         # the IdP to use an ACS location of
         # https://<server>:<port>/_matrix/saml2/authn_response.
         #
-        #saml2_config:
-        #  sp_config:
-        #    # point this to the IdP's metadata. You can use either a local file or
-        #    # (preferably) a URL.
-        #    metadata:
-        #      #local: ["saml2/idp.xml"]
-        #      remote:
-        #        - url: https://our_idp/metadata.xml
-        #
-        #    # By default, the user has to go to our login page first. If you'd like to
-        #    # allow IdP-initiated login, set 'allow_unsolicited: True' in a
-        #    # 'service.sp' section:
-        #    #
-        #    #service:
-        #    #  sp:
-        #    #    allow_unsolicited: True
-        #
-        #    # The examples below are just used to generate our metadata xml, and you
-        #    # may well not need it, depending on your setup. Alternatively you
-        #    # may need a whole lot more detail - see the pysaml2 docs!
-        #
-        #    description: ["My awesome SP", "en"]
-        #    name: ["Test SP", "en"]
-        #
-        #    organization:
-        #      name: Example com
-        #      display_name:
-        #        - ["Example co", "en"]
-        #      url: "http://example.com"
-        #
-        #    contact_person:
-        #      - given_name: Bob
-        #        sur_name: "the Sysadmin"
-        #        email_address": ["admin@example.com"]
-        #        contact_type": technical
-        #
-        #  # Instead of putting the config inline as above, you can specify a
-        #  # separate pysaml2 configuration file:
-        #  #
-        #  config_path: "%(config_dir_path)s/sp_conf.py"
-        #
-        #  # the lifetime of a SAML session. This defines how long a user has to
-        #  # complete the authentication process, if allow_unsolicited is unset.
-        #  # The default is 5 minutes.
-        #  #
-        #  # saml_session_lifetime: 5m
+        saml2_config:
+          # `sp_config` is the configuration for the pysaml2 Service Provider.
+          # See pysaml2 docs for format of config.
+          #
+          # Default values will be used for the 'entityid' and 'service' settings,
+          # so it is not normally necessary to specify them unless you need to
+          # override them.
+          #
+          #sp_config:
+          #  # point this to the IdP's metadata. You can use either a local file or
+          #  # (preferably) a URL.
+          #  metadata:
+          #    #local: ["saml2/idp.xml"]
+          #    remote:
+          #      - url: https://our_idp/metadata.xml
+          #
+          #  # By default, the user has to go to our login page first. If you'd like
+          #  # to allow IdP-initiated login, set 'allow_unsolicited: true' in a
+          #  # 'service.sp' section:
+          #  #
+          #  #service:
+          #  #  sp:
+          #  #    allow_unsolicited: true
+          #
+          #  # The examples below are just used to generate our metadata xml, and you
+          #  # may well not need them, depending on your setup. Alternatively you
+          #  # may need a whole lot more detail - see the pysaml2 docs!
+          #
+          #  description: ["My awesome SP", "en"]
+          #  name: ["Test SP", "en"]
+          #
+          #  organization:
+          #    name: Example com
+          #    display_name:
+          #      - ["Example co", "en"]
+          #    url: "http://example.com"
+          #
+          #  contact_person:
+          #    - given_name: Bob
+          #      sur_name: "the Sysadmin"
+          #      email_address": ["admin@example.com"]
+          #      contact_type": technical
+
+          # Instead of putting the config inline as above, you can specify a
+          # separate pysaml2 configuration file:
+          #
+          #config_path: "%(config_dir_path)s/sp_conf.py"
+
+          # The lifetime of a SAML session. This defines how long a user has to
+          # complete the authentication process, if allow_unsolicited is unset.
+          # The default is 5 minutes.
+          #
+          #saml_session_lifetime: 5m
+
+          # An external module can be provided here as a custom solution to
+          # mapping attributes returned from a saml provider onto a matrix user.
+          #
+          user_mapping_provider:
+            # The custom module's class. Uncomment to use a custom module.
+            #
+            #module: mapping_provider.SamlMappingProvider
+
+            # Custom configuration values for the module. Below options are
+            # intended for the built-in provider, they should be changed if
+            # using a custom module. This section will be passed as a Python
+            # dictionary to the module's `parse_config` method.
+            #
+            config:
+              # The SAML attribute (after mapping via the attribute maps) to use
+              # to derive the Matrix ID from. 'uid' by default.
+              #
+              # Note: This used to be configured by the
+              # saml2_config.mxid_source_attribute option. If that is still
+              # defined, its value will be used instead.
+              #
+              #mxid_source_attribute: displayName
+
+              # The mapping system to use for mapping the saml attribute onto a
+              # matrix ID.
+              #
+              # Options include:
+              #  * 'hexencode' (which maps unpermitted characters to '=xx')
+              #  * 'dotreplace' (which replaces unpermitted characters with
+              #     '.').
+              # The default is 'hexencode'.
+              #
+              # Note: This used to be configured by the
+              # saml2_config.mxid_mapping option. If that is still defined, its
+              # value will be used instead.
+              #
+              #mxid_mapping: dotreplace
+
+          # In previous versions of synapse, the mapping from SAML attribute to
+          # MXID was always calculated dynamically rather than stored in a
+          # table. For backwards- compatibility, we will look for user_ids
+          # matching such a pattern before creating a new account.
+          #
+          # This setting controls the SAML attribute which will be used for this
+          # backwards-compatibility lookup. Typically it should be 'uid', but if
+          # the attribute maps are changed, it may be necessary to change it.
+          #
+          # The default is 'uid'.
+          #
+          #grandfathered_mxid_source_attribute: upn
+
+          # Directory in which Synapse will try to find the template files below.
+          # If not set, default templates from within the Synapse package will be used.
+          #
+          # DO NOT UNCOMMENT THIS SETTING unless you want to customise the templates.
+          # If you *do* uncomment it, you will need to make sure that all the templates
+          # below are in the directory.
+          #
+          # Synapse will look for the following templates in this directory:
+          #
+          # * HTML page to display to users if something goes wrong during the
+          #   authentication process: 'saml_error.html'.
+          #
+          #   When rendering, this template is given the following variables:
+          #     * code: an HTML error code corresponding to the error that is being
+          #       returned (typically 400 or 500)
+          #
+          #     * msg: a textual message describing the error.
+          #
+          #   The variables will automatically be HTML-escaped.
+          #
+          # You can see the default templates at:
+          # https://github.com/matrix-org/synapse/tree/master/synapse/res/templates
+          #
+          #template_dir: "res/templates"
         """ % {
             "config_dir_path": config_dir_path
         }
diff --git a/synapse/config/server.py b/synapse/config/server.py
index 7f8d315954..f57eefc99c 100644
--- a/synapse/config/server.py
+++ b/synapse/config/server.py
@@ -19,6 +19,7 @@ import logging
 import os.path
 import re
 from textwrap import indent
+from typing import Dict, List, Optional
 
 import attr
 import yaml
@@ -40,7 +41,7 @@ logger = logging.Logger(__name__)
 # in the list.
 DEFAULT_BIND_ADDRESSES = ["::", "0.0.0.0"]
 
-DEFAULT_ROOM_VERSION = "4"
+DEFAULT_ROOM_VERSION = "5"
 
 ROOM_COMPLEXITY_TOO_GREAT = (
     "Your homeserver is unable to join rooms this large or complex. "
@@ -48,8 +49,17 @@ ROOM_COMPLEXITY_TOO_GREAT = (
     "to join this room."
 )
 
+METRICS_PORT_WARNING = """\
+The metrics_port configuration option is deprecated in Synapse 0.31 in favour of
+a listener. Please see
+https://github.com/matrix-org/synapse/blob/master/docs/metrics-howto.md
+on how to configure the new listener.
+--------------------------------------------------------------------------------"""
+
 
 class ServerConfig(Config):
+    section = "server"
+
     def read_config(self, config, **kwargs):
         self.server_name = config["server_name"]
         self.server_context = config.get("server_context", None)
@@ -92,6 +102,12 @@ class ServerConfig(Config):
             "require_auth_for_profile_requests", False
         )
 
+        # Whether to require sharing a room with a user to retrieve their
+        # profile data
+        self.limit_profile_requests_to_users_who_share_rooms = config.get(
+            "limit_profile_requests_to_users_who_share_rooms", False,
+        )
+
         if "restrict_public_rooms_to_local_users" in config and (
             "allow_public_rooms_without_auth" in config
             or "allow_public_rooms_over_federation" in config
@@ -108,15 +124,16 @@ class ServerConfig(Config):
             self.allow_public_rooms_without_auth = False
             self.allow_public_rooms_over_federation = False
         else:
-            # If set to 'False', requires authentication to access the server's public
-            # rooms directory through the client API. Defaults to 'True'.
+            # If set to 'true', removes the need for authentication to access the server's
+            # public rooms directory through the client API, meaning that anyone can
+            # query the room directory. Defaults to 'false'.
             self.allow_public_rooms_without_auth = config.get(
-                "allow_public_rooms_without_auth", True
+                "allow_public_rooms_without_auth", False
             )
-            # If set to 'False', forbids any other homeserver to fetch the server's public
-            # rooms directory via federation. Defaults to 'True'.
+            # If set to 'true', allows any other homeserver to fetch the server's public
+            # rooms directory via federation. Defaults to 'false'.
             self.allow_public_rooms_over_federation = config.get(
-                "allow_public_rooms_over_federation", True
+                "allow_public_rooms_over_federation", False
             )
 
         default_room_version = config.get("default_room_version", DEFAULT_ROOM_VERSION)
@@ -161,6 +178,7 @@ class ServerConfig(Config):
         )
 
         self.mau_trial_days = config.get("mau_trial_days", 0)
+        self.mau_limit_alerting = config.get("mau_limit_alerting", True)
 
         # How long to keep redacted events in the database in unredacted form
         # before redacting them.
@@ -172,17 +190,23 @@ class ServerConfig(Config):
         else:
             self.redaction_retention_period = None
 
+        # How long to keep entries in the `users_ips` table.
+        user_ips_max_age = config.get("user_ips_max_age", "28d")
+        if user_ips_max_age is not None:
+            self.user_ips_max_age = self.parse_duration(user_ips_max_age)
+        else:
+            self.user_ips_max_age = None
+
         # Options to disable HS
         self.hs_disabled = config.get("hs_disabled", False)
         self.hs_disabled_message = config.get("hs_disabled_message", "")
-        self.hs_disabled_limit_type = config.get("hs_disabled_limit_type", "")
 
         # Admin uri to direct users at should their instance become blocked
         # due to resource constraints
         self.admin_contact = config.get("admin_contact", None)
 
         # FIXME: federation_domain_whitelist needs sytests
-        self.federation_domain_whitelist = None
+        self.federation_domain_whitelist = None  # type: Optional[dict]
         federation_domain_whitelist = config.get("federation_domain_whitelist", None)
 
         if federation_domain_whitelist is not None:
@@ -206,7 +230,7 @@ class ServerConfig(Config):
             self.federation_ip_range_blacklist.update(["0.0.0.0", "::"])
         except Exception as e:
             raise ConfigError(
-                "Invalid range(s) provided in " "federation_ip_range_blacklist: %s" % e
+                "Invalid range(s) provided in federation_ip_range_blacklist: %s" % e
             )
 
         if self.public_baseurl is not None:
@@ -229,7 +253,133 @@ class ServerConfig(Config):
         # events with profile information that differ from the target's global profile.
         self.allow_per_room_profiles = config.get("allow_per_room_profiles", True)
 
-        self.listeners = []
+        retention_config = config.get("retention")
+        if retention_config is None:
+            retention_config = {}
+
+        self.retention_enabled = retention_config.get("enabled", False)
+
+        retention_default_policy = retention_config.get("default_policy")
+
+        if retention_default_policy is not None:
+            self.retention_default_min_lifetime = retention_default_policy.get(
+                "min_lifetime"
+            )
+            if self.retention_default_min_lifetime is not None:
+                self.retention_default_min_lifetime = self.parse_duration(
+                    self.retention_default_min_lifetime
+                )
+
+            self.retention_default_max_lifetime = retention_default_policy.get(
+                "max_lifetime"
+            )
+            if self.retention_default_max_lifetime is not None:
+                self.retention_default_max_lifetime = self.parse_duration(
+                    self.retention_default_max_lifetime
+                )
+
+            if (
+                self.retention_default_min_lifetime is not None
+                and self.retention_default_max_lifetime is not None
+                and (
+                    self.retention_default_min_lifetime
+                    > self.retention_default_max_lifetime
+                )
+            ):
+                raise ConfigError(
+                    "The default retention policy's 'min_lifetime' can not be greater"
+                    " than its 'max_lifetime'"
+                )
+        else:
+            self.retention_default_min_lifetime = None
+            self.retention_default_max_lifetime = None
+
+        if self.retention_enabled:
+            logger.info(
+                "Message retention policies support enabled with the following default"
+                " policy: min_lifetime = %s ; max_lifetime = %s",
+                self.retention_default_min_lifetime,
+                self.retention_default_max_lifetime,
+            )
+
+        self.retention_allowed_lifetime_min = retention_config.get(
+            "allowed_lifetime_min"
+        )
+        if self.retention_allowed_lifetime_min is not None:
+            self.retention_allowed_lifetime_min = self.parse_duration(
+                self.retention_allowed_lifetime_min
+            )
+
+        self.retention_allowed_lifetime_max = retention_config.get(
+            "allowed_lifetime_max"
+        )
+        if self.retention_allowed_lifetime_max is not None:
+            self.retention_allowed_lifetime_max = self.parse_duration(
+                self.retention_allowed_lifetime_max
+            )
+
+        if (
+            self.retention_allowed_lifetime_min is not None
+            and self.retention_allowed_lifetime_max is not None
+            and self.retention_allowed_lifetime_min
+            > self.retention_allowed_lifetime_max
+        ):
+            raise ConfigError(
+                "Invalid retention policy limits: 'allowed_lifetime_min' can not be"
+                " greater than 'allowed_lifetime_max'"
+            )
+
+        self.retention_purge_jobs = []  # type: List[Dict[str, Optional[int]]]
+        for purge_job_config in retention_config.get("purge_jobs", []):
+            interval_config = purge_job_config.get("interval")
+
+            if interval_config is None:
+                raise ConfigError(
+                    "A retention policy's purge jobs configuration must have the"
+                    " 'interval' key set."
+                )
+
+            interval = self.parse_duration(interval_config)
+
+            shortest_max_lifetime = purge_job_config.get("shortest_max_lifetime")
+
+            if shortest_max_lifetime is not None:
+                shortest_max_lifetime = self.parse_duration(shortest_max_lifetime)
+
+            longest_max_lifetime = purge_job_config.get("longest_max_lifetime")
+
+            if longest_max_lifetime is not None:
+                longest_max_lifetime = self.parse_duration(longest_max_lifetime)
+
+            if (
+                shortest_max_lifetime is not None
+                and longest_max_lifetime is not None
+                and shortest_max_lifetime > longest_max_lifetime
+            ):
+                raise ConfigError(
+                    "A retention policy's purge jobs configuration's"
+                    " 'shortest_max_lifetime' value can not be greater than its"
+                    " 'longest_max_lifetime' value."
+                )
+
+            self.retention_purge_jobs.append(
+                {
+                    "interval": interval,
+                    "shortest_max_lifetime": shortest_max_lifetime,
+                    "longest_max_lifetime": longest_max_lifetime,
+                }
+            )
+
+        if not self.retention_purge_jobs:
+            self.retention_purge_jobs = [
+                {
+                    "interval": self.parse_duration("1d"),
+                    "shortest_max_lifetime": None,
+                    "longest_max_lifetime": None,
+                }
+            ]
+
+        self.listeners = []  # type: List[dict]
         for listener in config.get("listeners", []):
             if not isinstance(listener.get("port", None), int):
                 raise ConfigError(
@@ -273,7 +423,10 @@ class ServerConfig(Config):
                 validator=attr.validators.instance_of(bool), default=False
             )
             complexity = attr.ib(
-                validator=attr.validators.instance_of((int, float)), default=1.0
+                validator=attr.validators.instance_of(
+                    (float, int)  # type: ignore[arg-type] # noqa
+                ),
+                default=1.0,
             )
             complexity_error = attr.ib(
                 validator=attr.validators.instance_of(str),
@@ -281,7 +434,7 @@ class ServerConfig(Config):
             )
 
         self.limit_remote_rooms = LimitRemoteRoomsConfig(
-            **config.get("limit_remote_rooms", {})
+            **(config.get("limit_remote_rooms") or {})
         )
 
         bind_port = config.get("bind_port")
@@ -334,14 +487,7 @@ class ServerConfig(Config):
 
         metrics_port = config.get("metrics_port")
         if metrics_port:
-            logger.warn(
-                (
-                    "The metrics_port configuration option is deprecated in Synapse 0.31 "
-                    "in favour of a listener. Please see "
-                    "http://github.com/matrix-org/synapse/blob/master/docs/metrics-howto.md"
-                    " on how to configure the new listener."
-                )
-            )
+            logger.warning(METRICS_PORT_WARNING)
 
             self.listeners.append(
                 {
@@ -355,14 +501,28 @@ class ServerConfig(Config):
 
         _check_resource_config(self.listeners)
 
-        # An experimental option to try and periodically clean up extremities
-        # by sending dummy events.
         self.cleanup_extremities_with_dummy_events = config.get(
-            "cleanup_extremities_with_dummy_events", False
+            "cleanup_extremities_with_dummy_events", True
+        )
+
+        # The number of forward extremities in a room needed to send a dummy event.
+        self.dummy_events_threshold = config.get("dummy_events_threshold", 10)
+
+        self.enable_ephemeral_messages = config.get("enable_ephemeral_messages", False)
+
+        # Inhibits the /requestToken endpoints from returning an error that might leak
+        # information about whether an e-mail address is in use or not on this
+        # homeserver, and instead return a 200 with a fake sid if this kind of error is
+        # met, without sending anything.
+        # This is a compromise between sending an email, which could be a spam vector,
+        # and letting the client know which email address is bound to an account and
+        # which one isn't.
+        self.request_token_inhibit_3pid_errors = config.get(
+            "request_token_inhibit_3pid_errors", False,
         )
 
-    def has_tls_listener(self):
-        return any(l["tls"] for l in self.listeners)
+    def has_tls_listener(self) -> bool:
+        return any(listener["tls"] for listener in self.listeners)
 
     def generate_config_section(
         self, server_name, data_dir_path, open_private_ports, listeners, **kwargs
@@ -458,10 +618,15 @@ class ServerConfig(Config):
         #
         pid_file: %(pid_file)s
 
-        # The path to the web client which will be served at /_matrix/client/
-        # if 'webclient' is configured under the 'listeners' configuration.
+        # The absolute URL to the web client which /_matrix/client will redirect
+        # to if 'webclient' is configured under the 'listeners' configuration.
+        #
+        # This option can be also set to the filesystem path to the web client
+        # which will be served at /_matrix/client/ if 'webclient' is configured
+        # under the 'listeners' configuration, however this is a security risk:
+        # https://github.com/matrix-org/synapse#security-note
         #
-        #web_client_location: "/path/to/web/root"
+        #web_client_location: https://riot.example.com/
 
         # The public-facing base URL that clients use to access this HS
         # (not including _matrix/...). This is the same URL a user would
@@ -489,15 +654,23 @@ class ServerConfig(Config):
         #
         #require_auth_for_profile_requests: true
 
-        # If set to 'false', requires authentication to access the server's public rooms
-        # directory through the client API. Defaults to 'true'.
+        # Uncomment to require a user to share a room with another user in order
+        # to retrieve their profile information. Only checked on Client-Server
+        # requests. Profile requests from other servers should be checked by the
+        # requesting server. Defaults to 'false'.
         #
-        #allow_public_rooms_without_auth: false
+        #limit_profile_requests_to_users_who_share_rooms: true
 
-        # If set to 'false', forbids any other homeserver to fetch the server's public
-        # rooms directory via federation. Defaults to 'true'.
+        # If set to 'true', removes the need for authentication to access the server's
+        # public rooms directory through the client API, meaning that anyone can
+        # query the room directory. Defaults to 'false'.
         #
-        #allow_public_rooms_over_federation: false
+        #allow_public_rooms_without_auth: true
+
+        # If set to 'true', allows any other homeserver to fetch the server's public
+        # rooms directory via federation. Defaults to 'false'.
+        #
+        #allow_public_rooms_over_federation: true
 
         # The default room version for newly created rooms.
         #
@@ -521,7 +694,7 @@ class ServerConfig(Config):
         # Whether room invites to users on this server should be blocked
         # (except those sent by local server admins). The default is False.
         #
-        #block_non_admin_invites: True
+        #block_non_admin_invites: true
 
         # Room searching
         #
@@ -545,6 +718,9 @@ class ServerConfig(Config):
         # blacklist IP address CIDR ranges. If this option is not specified, or
         # specified with an empty list, no ip range blacklist will be enforced.
         #
+        # As of Synapse v1.4.0 this option also affects any outbound requests to identity
+        # servers provided by user input.
+        #
         # (0.0.0.0 and :: are always blacklisted, whether or not they are explicitly
         # listed here, since they correspond to unroutable addresses.)
         #
@@ -650,6 +826,18 @@ class ServerConfig(Config):
           #  bind_addresses: ['::1', '127.0.0.1']
           #  type: manhole
 
+        # Forward extremities can build up in a room due to networking delays between
+        # homeservers. Once this happens in a large room, calculation of the state of
+        # that room can become quite expensive. To mitigate this, once the number of
+        # forward extremities reaches a given threshold, Synapse will send an
+        # org.matrix.dummy_event event, which will reduce the forward extremities
+        # in the room.
+        #
+        # This setting defines the threshold (i.e. number of forward extremities in the
+        # room) at which dummy events are sent. The default value is 10.
+        #
+        #dummy_events_threshold: 5
+
 
         ## Homeserver blocking ##
 
@@ -659,9 +847,8 @@ class ServerConfig(Config):
 
         # Global blocking
         #
-        #hs_disabled: False
+        #hs_disabled: false
         #hs_disabled_message: 'Human readable reason for why the HS is blocked'
-        #hs_disabled_limit_type: 'error code(str), to help clients decode reason'
 
         # Monthly Active User Blocking
         #
@@ -681,15 +868,22 @@ class ServerConfig(Config):
         # sign up in a short space of time never to return after their initial
         # session.
         #
-        #limit_usage_by_mau: False
+        # 'mau_limit_alerting' is a means of limiting client side alerting
+        # should the mau limit be reached. This is useful for small instances
+        # where the admin has 5 mau seats (say) for 5 specific people and no
+        # interest increasing the mau limit further. Defaults to True, which
+        # means that alerting is enabled
+        #
+        #limit_usage_by_mau: false
         #max_mau_value: 50
         #mau_trial_days: 2
+        #mau_limit_alerting: false
 
         # If enabled, the metrics for the number of monthly active users will
         # be populated, however no one will be limited. If limit_usage_by_mau
         # is true, this is implied to be true.
         #
-        #mau_stats_only: False
+        #mau_stats_only: false
 
         # Sometimes the server admin will want to ensure certain accounts are
         # never blocked by mau checking. These accounts are specified here.
@@ -701,22 +895,27 @@ class ServerConfig(Config):
         # Used by phonehome stats to group together related servers.
         #server_context: context
 
-        # Resource-constrained Homeserver Settings
+        # Resource-constrained homeserver settings
         #
-        # If limit_remote_rooms.enabled is True, the room complexity will be
-        # checked before a user joins a new remote room. If it is above
-        # limit_remote_rooms.complexity, it will disallow joining or
-        # instantly leave.
+        # When this is enabled, the room "complexity" will be checked before a user
+        # joins a new remote room. If it is above the complexity limit, the server will
+        # disallow joining, or will instantly leave.
         #
-        # limit_remote_rooms.complexity_error can be set to customise the text
-        # displayed to the user when a room above the complexity threshold has
-        # its join cancelled.
+        # Room complexity is an arbitrary measure based on factors such as the number of
+        # users in the room.
         #
-        # Uncomment the below lines to enable:
-        #limit_remote_rooms:
-        #  enabled: True
-        #  complexity: 1.0
-        #  complexity_error: "This room is too complex."
+        limit_remote_rooms:
+          # Uncomment to enable room complexity checking.
+          #
+          #enabled: true
+
+          # the limit above which rooms cannot be joined. The default is 1.0.
+          #
+          #complexity: 0.5
+
+          # override the error which is returned when the room is too complex.
+          #
+          #complexity_error: "This room is too complex."
 
         # Whether to require a user to be in the room to add an alias to it.
         # Defaults to 'true'.
@@ -734,7 +933,86 @@ class ServerConfig(Config):
         #
         # Defaults to `7d`. Set to `null` to disable.
         #
-        redaction_retention_period: 7d
+        #redaction_retention_period: 28d
+
+        # How long to track users' last seen time and IPs in the database.
+        #
+        # Defaults to `28d`. Set to `null` to disable clearing out of old rows.
+        #
+        #user_ips_max_age: 14d
+
+        # Message retention policy at the server level.
+        #
+        # Room admins and mods can define a retention period for their rooms using the
+        # 'm.room.retention' state event, and server admins can cap this period by setting
+        # the 'allowed_lifetime_min' and 'allowed_lifetime_max' config options.
+        #
+        # If this feature is enabled, Synapse will regularly look for and purge events
+        # which are older than the room's maximum retention period. Synapse will also
+        # filter events received over federation so that events that should have been
+        # purged are ignored and not stored again.
+        #
+        retention:
+          # The message retention policies feature is disabled by default. Uncomment the
+          # following line to enable it.
+          #
+          #enabled: true
+
+          # Default retention policy. If set, Synapse will apply it to rooms that lack the
+          # 'm.room.retention' state event. Currently, the value of 'min_lifetime' doesn't
+          # matter much because Synapse doesn't take it into account yet.
+          #
+          #default_policy:
+          #  min_lifetime: 1d
+          #  max_lifetime: 1y
+
+          # Retention policy limits. If set, a user won't be able to send a
+          # 'm.room.retention' event which features a 'min_lifetime' or a 'max_lifetime'
+          # that's not within this range. This is especially useful in closed federations,
+          # in which server admins can make sure every federating server applies the same
+          # rules.
+          #
+          #allowed_lifetime_min: 1d
+          #allowed_lifetime_max: 1y
+
+          # Server admins can define the settings of the background jobs purging the
+          # events which lifetime has expired under the 'purge_jobs' section.
+          #
+          # If no configuration is provided, a single job will be set up to delete expired
+          # events in every room daily.
+          #
+          # Each job's configuration defines which range of message lifetimes the job
+          # takes care of. For example, if 'shortest_max_lifetime' is '2d' and
+          # 'longest_max_lifetime' is '3d', the job will handle purging expired events in
+          # rooms whose state defines a 'max_lifetime' that's both higher than 2 days, and
+          # lower than or equal to 3 days. Both the minimum and the maximum value of a
+          # range are optional, e.g. a job with no 'shortest_max_lifetime' and a
+          # 'longest_max_lifetime' of '3d' will handle every room with a retention policy
+          # which 'max_lifetime' is lower than or equal to three days.
+          #
+          # The rationale for this per-job configuration is that some rooms might have a
+          # retention policy with a low 'max_lifetime', where history needs to be purged
+          # of outdated messages on a more frequent basis than for the rest of the rooms
+          # (e.g. every 12h), but not want that purge to be performed by a job that's
+          # iterating over every room it knows, which could be heavy on the server.
+          #
+          #purge_jobs:
+          #  - shortest_max_lifetime: 1d
+          #    longest_max_lifetime: 3d
+          #    interval: 12h
+          #  - shortest_max_lifetime: 3d
+          #    longest_max_lifetime: 1y
+          #    interval: 1d
+
+        # Inhibits the /requestToken endpoints from returning an error that might leak
+        # information about whether an e-mail address is in use or not on this
+        # homeserver.
+        # Note that for some endpoints the error situation is the e-mail already being
+        # used, and for others the error is entering the e-mail being unused.
+        # If this option is enabled, instead of returning an error, these endpoints will
+        # act as if no error happened and return a fake session ID ('sid') to clients.
+        #
+        #request_token_inhibit_3pid_errors: true
         """
             % locals()
         )
@@ -755,20 +1033,20 @@ class ServerConfig(Config):
             "--daemonize",
             action="store_true",
             default=None,
-            help="Daemonize the home server",
+            help="Daemonize the homeserver",
         )
         server_group.add_argument(
             "--print-pidfile",
             action="store_true",
             default=None,
-            help="Print the path to the pidfile just" " before daemonizing",
+            help="Print the path to the pidfile just before daemonizing",
         )
         server_group.add_argument(
             "--manhole",
             metavar="PORT",
             dest="manhole",
             type=int,
-            help="Turn on the twisted telnet manhole" " service on the given port.",
+            help="Turn on the twisted telnet manhole service on the given port.",
         )
 
 
@@ -834,12 +1112,12 @@ KNOWN_RESOURCES = (
 
 
 def _check_resource_config(listeners):
-    resource_names = set(
+    resource_names = {
         res_name
         for listener in listeners
         for res in listener.get("resources", [])
         for res_name in res.get("names", [])
-    )
+    }
 
     for resource in resource_names:
         if resource not in KNOWN_RESOURCES:
diff --git a/synapse/config/server_notices_config.py b/synapse/config/server_notices_config.py
index eaac3d73bc..6c427b6f92 100644
--- a/synapse/config/server_notices_config.py
+++ b/synapse/config/server_notices_config.py
@@ -51,7 +51,7 @@ class ServerNoticesConfig(Config):
             None if server notices are not enabled.
 
         server_notices_mxid_avatar_url (str|None):
-            The display name to use for the server notices user.
+            The MXC URL for the avatar of the server notices user.
             None if server notices are not enabled.
 
         server_notices_room_name (str|None):
@@ -59,8 +59,10 @@ class ServerNoticesConfig(Config):
             None if server notices are not enabled.
     """
 
-    def __init__(self):
-        super(ServerNoticesConfig, self).__init__()
+    section = "servernotices"
+
+    def __init__(self, *args):
+        super(ServerNoticesConfig, self).__init__(*args)
         self.server_notices_mxid = None
         self.server_notices_mxid_display_name = None
         self.server_notices_mxid_avatar_url = None
diff --git a/synapse/config/spam_checker.py b/synapse/config/spam_checker.py
index e40797ab50..3d067d29db 100644
--- a/synapse/config/spam_checker.py
+++ b/synapse/config/spam_checker.py
@@ -13,23 +13,47 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
+from typing import Any, Dict, List, Tuple
+
+from synapse.config import ConfigError
 from synapse.util.module_loader import load_module
 
 from ._base import Config
 
 
 class SpamCheckerConfig(Config):
+    section = "spamchecker"
+
     def read_config(self, config, **kwargs):
-        self.spam_checker = None
+        self.spam_checkers = []  # type: List[Tuple[Any, Dict]]
+
+        spam_checkers = config.get("spam_checker") or []
+        if isinstance(spam_checkers, dict):
+            # The spam_checker config option used to only support one
+            # spam checker, and thus was simply a dictionary with module
+            # and config keys. Support this old behaviour by checking
+            # to see if the option resolves to a dictionary
+            self.spam_checkers.append(load_module(spam_checkers))
+        elif isinstance(spam_checkers, list):
+            for spam_checker in spam_checkers:
+                if not isinstance(spam_checker, dict):
+                    raise ConfigError("spam_checker syntax is incorrect")
 
-        provider = config.get("spam_checker", None)
-        if provider is not None:
-            self.spam_checker = load_module(provider)
+                self.spam_checkers.append(load_module(spam_checker))
+        else:
+            raise ConfigError("spam_checker syntax is incorrect")
 
     def generate_config_section(self, **kwargs):
         return """\
-        #spam_checker:
-        #  module: "my_custom_project.SuperSpamChecker"
-        #  config:
-        #    example_option: 'things'
+        # Spam checkers are third-party modules that can block specific actions
+        # of local users, such as creating rooms and registering undesirable
+        # usernames, as well as remote users by redacting incoming events.
+        #
+        spam_checker:
+           #- module: "my_custom_project.SuperSpamChecker"
+           #  config:
+           #    example_option: 'things'
+           #- module: "some_other_project.BadEventStopper"
+           #  config:
+           #    example_stop_events_from: ['@bad:example.com']
         """
diff --git a/synapse/config/sso.py b/synapse/config/sso.py
new file mode 100644
index 0000000000..73b7296399
--- /dev/null
+++ b/synapse/config/sso.py
@@ -0,0 +1,148 @@
+# -*- coding: utf-8 -*-
+# Copyright 2020 The Matrix.org Foundation C.I.C.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+import os
+from typing import Any, Dict
+
+import pkg_resources
+
+from ._base import Config
+
+
+class SSOConfig(Config):
+    """SSO Configuration
+    """
+
+    section = "sso"
+
+    def read_config(self, config, **kwargs):
+        sso_config = config.get("sso") or {}  # type: Dict[str, Any]
+
+        # Pick a template directory in order of:
+        # * The sso-specific template_dir
+        # * /path/to/synapse/install/res/templates
+        template_dir = sso_config.get("template_dir")
+        if not template_dir:
+            template_dir = pkg_resources.resource_filename("synapse", "res/templates",)
+
+        self.sso_template_dir = template_dir
+        self.sso_account_deactivated_template = self.read_file(
+            os.path.join(self.sso_template_dir, "sso_account_deactivated.html"),
+            "sso_account_deactivated_template",
+        )
+        self.sso_auth_success_template = self.read_file(
+            os.path.join(self.sso_template_dir, "sso_auth_success.html"),
+            "sso_auth_success_template",
+        )
+
+        self.sso_client_whitelist = sso_config.get("client_whitelist") or []
+
+        # Attempt to also whitelist the server's login fallback, since that fallback sets
+        # the redirect URL to itself (so it can process the login token then return
+        # gracefully to the client). This would make it pointless to ask the user for
+        # confirmation, since the URL the confirmation page would be showing wouldn't be
+        # the client's.
+        # public_baseurl is an optional setting, so we only add the fallback's URL to the
+        # list if it's provided (because we can't figure out what that URL is otherwise).
+        if self.public_baseurl:
+            login_fallback_url = self.public_baseurl + "_matrix/static/client/login"
+            self.sso_client_whitelist.append(login_fallback_url)
+
+    def generate_config_section(self, **kwargs):
+        return """\
+        # Additional settings to use with single-sign on systems such as OpenID Connect,
+        # SAML2 and CAS.
+        #
+        sso:
+            # A list of client URLs which are whitelisted so that the user does not
+            # have to confirm giving access to their account to the URL. Any client
+            # whose URL starts with an entry in the following list will not be subject
+            # to an additional confirmation step after the SSO login is completed.
+            #
+            # WARNING: An entry such as "https://my.client" is insecure, because it
+            # will also match "https://my.client.evil.site", exposing your users to
+            # phishing attacks from evil.site. To avoid this, include a slash after the
+            # hostname: "https://my.client/".
+            #
+            # If public_baseurl is set, then the login fallback page (used by clients
+            # that don't natively support the required login flows) is whitelisted in
+            # addition to any URLs in this list.
+            #
+            # By default, this list is empty.
+            #
+            #client_whitelist:
+            #  - https://riot.im/develop
+            #  - https://my.custom.client/
+
+            # Directory in which Synapse will try to find the template files below.
+            # If not set, default templates from within the Synapse package will be used.
+            #
+            # DO NOT UNCOMMENT THIS SETTING unless you want to customise the templates.
+            # If you *do* uncomment it, you will need to make sure that all the templates
+            # below are in the directory.
+            #
+            # Synapse will look for the following templates in this directory:
+            #
+            # * HTML page for a confirmation step before redirecting back to the client
+            #   with the login token: 'sso_redirect_confirm.html'.
+            #
+            #   When rendering, this template is given three variables:
+            #     * redirect_url: the URL the user is about to be redirected to. Needs
+            #                     manual escaping (see
+            #                     https://jinja.palletsprojects.com/en/2.11.x/templates/#html-escaping).
+            #
+            #     * display_url: the same as `redirect_url`, but with the query
+            #                    parameters stripped. The intention is to have a
+            #                    human-readable URL to show to users, not to use it as
+            #                    the final address to redirect to. Needs manual escaping
+            #                    (see https://jinja.palletsprojects.com/en/2.11.x/templates/#html-escaping).
+            #
+            #     * server_name: the homeserver's name.
+            #
+            # * HTML page which notifies the user that they are authenticating to confirm
+            #   an operation on their account during the user interactive authentication
+            #   process: 'sso_auth_confirm.html'.
+            #
+            #   When rendering, this template is given the following variables:
+            #     * redirect_url: the URL the user is about to be redirected to. Needs
+            #                     manual escaping (see
+            #                     https://jinja.palletsprojects.com/en/2.11.x/templates/#html-escaping).
+            #
+            #     * description: the operation which the user is being asked to confirm
+            #
+            # * HTML page shown after a successful user interactive authentication session:
+            #   'sso_auth_success.html'.
+            #
+            #   Note that this page must include the JavaScript which notifies of a successful authentication
+            #   (see https://matrix.org/docs/spec/client_server/r0.6.0#fallback).
+            #
+            #   This template has no additional variables.
+            #
+            # * HTML page shown during single sign-on if a deactivated user (according to Synapse's database)
+            #   attempts to login: 'sso_account_deactivated.html'.
+            #
+            #   This template has no additional variables.
+            #
+            # * HTML page to display to users if something goes wrong during the
+            #   OpenID Connect authentication process: 'sso_error.html'.
+            #
+            #   When rendering, this template is given two variables:
+            #     * error: the technical name of the error
+            #     * error_description: a human-readable message for the error
+            #
+            # You can see the default templates at:
+            # https://github.com/matrix-org/synapse/tree/master/synapse/res/templates
+            #
+            #template_dir: "res/templates"
+        """
diff --git a/synapse/config/stats.py b/synapse/config/stats.py
index b18ddbd1fa..62485189ea 100644
--- a/synapse/config/stats.py
+++ b/synapse/config/stats.py
@@ -25,6 +25,8 @@ class StatsConfig(Config):
     Configuration for the behaviour of synapse's stats engine
     """
 
+    section = "stats"
+
     def read_config(self, config, **kwargs):
         self.stats_enabled = True
         self.stats_bucket_size = 86400 * 1000
diff --git a/synapse/config/third_party_event_rules.py b/synapse/config/third_party_event_rules.py
index b3431441b9..10a99c792e 100644
--- a/synapse/config/third_party_event_rules.py
+++ b/synapse/config/third_party_event_rules.py
@@ -19,6 +19,8 @@ from ._base import Config
 
 
 class ThirdPartyRulesConfig(Config):
+    section = "thirdpartyrules"
+
     def read_config(self, config, **kwargs):
         self.third_party_event_rules = None
 
diff --git a/synapse/config/tls.py b/synapse/config/tls.py
index fc47ba3e9a..a65538562b 100644
--- a/synapse/config/tls.py
+++ b/synapse/config/tls.py
@@ -18,6 +18,7 @@ import os
 import warnings
 from datetime import datetime
 from hashlib import sha256
+from typing import List
 
 import six
 
@@ -31,9 +32,22 @@ from synapse.util import glob_to_regex
 
 logger = logging.getLogger(__name__)
 
+ACME_SUPPORT_ENABLED_WARN = """\
+This server uses Synapse's built-in ACME support. Note that ACME v1 has been
+deprecated by Let's Encrypt, and that Synapse doesn't currently support ACME v2,
+which means that this feature will not work with Synapse installs set up after
+November 2019, and that it may stop working on June 2020 for installs set up
+before that date.
+
+For more info and alternative solutions, see
+https://github.com/matrix-org/synapse/blob/master/docs/ACME.md#deprecation-of-acme-v1
+--------------------------------------------------------------------------------"""
+
 
 class TlsConfig(Config):
-    def read_config(self, config, config_dir_path, **kwargs):
+    section = "tls"
+
+    def read_config(self, config: dict, config_dir_path: str, **kwargs):
 
         acme_config = config.get("acme", None)
         if acme_config is None:
@@ -41,6 +55,9 @@ class TlsConfig(Config):
 
         self.acme_enabled = acme_config.get("enabled", False)
 
+        if self.acme_enabled:
+            logger.warning(ACME_SUPPORT_ENABLED_WARN)
+
         # hyperlink complains on py2 if this is not a Unicode
         self.acme_url = six.text_type(
             acme_config.get("url", "https://acme-v01.api.letsencrypt.org/directory")
@@ -57,7 +74,7 @@ class TlsConfig(Config):
         self.tls_certificate_file = self.abspath(config.get("tls_certificate_path"))
         self.tls_private_key_file = self.abspath(config.get("tls_private_key_path"))
 
-        if self.has_tls_listener():
+        if self.root.server.has_tls_listener():
             if not self.tls_certificate_file:
                 raise ConfigError(
                     "tls_certificate_path must be specified if TLS-enabled listeners are "
@@ -106,9 +123,11 @@ class TlsConfig(Config):
         fed_whitelist_entries = config.get(
             "federation_certificate_verification_whitelist", []
         )
+        if fed_whitelist_entries is None:
+            fed_whitelist_entries = []
 
         # Support globs (*) in whitelist values
-        self.federation_certificate_verification_whitelist = []
+        self.federation_certificate_verification_whitelist = []  # type: List[str]
         for entry in fed_whitelist_entries:
             try:
                 entry_regex = glob_to_regex(entry.encode("ascii").decode("ascii"))
@@ -241,7 +260,7 @@ class TlsConfig(Config):
                 crypto.FILETYPE_ASN1, self.tls_certificate
             )
             sha256_fingerprint = encode_base64(sha256(x509_certificate_bytes).digest())
-            sha256_fingerprints = set(f["sha256"] for f in self.tls_fingerprints)
+            sha256_fingerprints = {f["sha256"] for f in self.tls_fingerprints}
             if sha256_fingerprint not in sha256_fingerprints:
                 self.tls_fingerprints.append({"sha256": sha256_fingerprint})
 
@@ -286,6 +305,9 @@ class TlsConfig(Config):
             "http://localhost:8009/.well-known/acme-challenge"
         )
 
+        # flake8 doesn't recognise that variables are used in the below string
+        _ = tls_enabled, proxypassline, acme_enabled, default_acme_account_file
+
         return (
             """\
         ## TLS ##
@@ -354,6 +376,11 @@ class TlsConfig(Config):
         # ACME support: This will configure Synapse to request a valid TLS certificate
         # for your configured `server_name` via Let's Encrypt.
         #
+        # Note that ACME v1 is now deprecated, and Synapse currently doesn't support
+        # ACME v2. This means that this feature currently won't work with installs set
+        # up after November 2019. For more info, and alternative solutions, see
+        # https://github.com/matrix-org/synapse/blob/master/docs/ACME.md#deprecation-of-acme-v1
+        #
         # Note that provisioning a certificate in this way requires port 80 to be
         # routed to Synapse so that it can complete the http-01 ACME challenge.
         # By default, if you enable ACME support, Synapse will attempt to listen on
@@ -448,7 +475,11 @@ class TlsConfig(Config):
         #tls_fingerprints: [{"sha256": "<base64_encoded_sha256_fingerprint>"}]
 
         """
-            % locals()
+            # Lowercase the string representation of boolean values
+            % {
+                x[0]: str(x[1]).lower() if isinstance(x[1], bool) else x[1]
+                for x in locals().items()
+            }
         )
 
     def read_tls_certificate(self):
diff --git a/synapse/config/tracer.py b/synapse/config/tracer.py
index 85d99a3166..8be1346113 100644
--- a/synapse/config/tracer.py
+++ b/synapse/config/tracer.py
@@ -19,6 +19,8 @@ from ._base import Config, ConfigError
 
 
 class TracerConfig(Config):
+    section = "tracing"
+
     def read_config(self, config, **kwargs):
         opentracing_config = config.get("opentracing")
         if opentracing_config is None:
diff --git a/synapse/config/user_directory.py b/synapse/config/user_directory.py
index f6313e17d4..c8d19c5d6b 100644
--- a/synapse/config/user_directory.py
+++ b/synapse/config/user_directory.py
@@ -21,6 +21,8 @@ class UserDirectoryConfig(Config):
     Configuration for the behaviour of the /user_directory API
     """
 
+    section = "userdirectory"
+
     def read_config(self, config, **kwargs):
         self.user_directory_search_enabled = True
         self.user_directory_search_all_users = False
diff --git a/synapse/config/voip.py b/synapse/config/voip.py
index 2ca0e1cf70..b313bff140 100644
--- a/synapse/config/voip.py
+++ b/synapse/config/voip.py
@@ -16,6 +16,8 @@ from ._base import Config
 
 
 class VoipConfig(Config):
+    section = "voip"
+
     def read_config(self, config, **kwargs):
         self.turn_uris = config.get("turn_uris", [])
         self.turn_shared_secret = config.get("turn_shared_secret")
@@ -54,5 +56,5 @@ class VoipConfig(Config):
         # connect to arbitrary endpoints without having first signed up for a
         # valid account (e.g. by passing a CAPTCHA).
         #
-        #turn_allow_guests: True
+        #turn_allow_guests: true
         """
diff --git a/synapse/config/workers.py b/synapse/config/workers.py
index bc0fc165e3..ed06b91a54 100644
--- a/synapse/config/workers.py
+++ b/synapse/config/workers.py
@@ -1,5 +1,5 @@
 # -*- coding: utf-8 -*-
-# Copyright 2016 matrix.org
+# Copyright 2016 OpenMarket Ltd
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
 # you may not use this file except in compliance with the License.
@@ -13,7 +13,29 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-from ._base import Config
+import attr
+
+from ._base import Config, ConfigError
+
+
+@attr.s
+class InstanceLocationConfig:
+    """The host and port to talk to an instance via HTTP replication.
+    """
+
+    host = attr.ib(type=str)
+    port = attr.ib(type=int)
+
+
+@attr.s
+class WriterLocations:
+    """Specifies the instances that write various streams.
+
+    Attributes:
+        events: The instance that writes to the event and backfill streams.
+    """
+
+    events = attr.ib(default="master", type=str)
 
 
 class WorkerConfig(Config):
@@ -21,6 +43,8 @@ class WorkerConfig(Config):
     They have their own pid_file and listener configuration. They use the
     replication_url to talk to the main synapse process."""
 
+    section = "worker"
+
     def read_config(self, config, **kwargs):
         self.worker_app = config.get("worker_app")
 
@@ -69,6 +93,27 @@ class WorkerConfig(Config):
                 elif not bind_addresses:
                     bind_addresses.append("")
 
+        # A map from instance name to host/port of their HTTP replication endpoint.
+        instance_map = config.get("instance_map") or {}
+        self.instance_map = {
+            name: InstanceLocationConfig(**c) for name, c in instance_map.items()
+        }
+
+        # Map from type of streams to source, c.f. WriterLocations.
+        writers = config.get("stream_writers") or {}
+        self.writers = WriterLocations(**writers)
+
+        # Check that the configured writer for events also appears in
+        # `instance_map`.
+        if (
+            self.writers.events != "master"
+            and self.writers.events not in self.instance_map
+        ):
+            raise ConfigError(
+                "Instance %r is configured to write events but does not appear in `instance_map` config."
+                % (self.writers.events,)
+            )
+
     def read_arguments(self, args):
         # We support a bunch of command line arguments that override options in
         # the config. A lot of these options have a worker_* prefix when running