summary refs log tree commit diff
diff options
context:
space:
mode:
-rw-r--r--changelog.d/3484.misc1
-rw-r--r--changelog.d/5039.bugfix1
-rw-r--r--changelog.d/5043.feature1
-rw-r--r--changelog.d/5171.misc1
-rw-r--r--changelog.d/5174.bugfix1
-rw-r--r--changelog.d/5181.feature1
-rw-r--r--changelog.d/5183.misc1
-rw-r--r--changelog.d/5184.misc1
-rw-r--r--changelog.d/5185.misc1
-rw-r--r--changelog.d/5187.bugfix1
-rw-r--r--changelog.d/5190.feature1
-rw-r--r--changelog.d/5196.feature1
-rw-r--r--docs/sample_config.yaml77
-rw-r--r--synapse/api/urls.py3
-rw-r--r--synapse/config/ratelimiting.py115
-rw-r--r--synapse/config/server.py49
-rw-r--r--synapse/events/utils.py44
-rw-r--r--synapse/federation/transport/server.py6
-rw-r--r--synapse/handlers/_base.py4
-rw-r--r--synapse/handlers/events.py8
-rw-r--r--synapse/handlers/initial_sync.py44
-rw-r--r--synapse/handlers/message.py7
-rw-r--r--synapse/handlers/pagination.py22
-rw-r--r--synapse/handlers/room_member.py9
-rw-r--r--synapse/handlers/search.py42
-rw-r--r--synapse/handlers/sync.py44
-rw-r--r--synapse/http/client.py6
-rw-r--r--synapse/http/matrixfederationclient.py48
-rw-r--r--synapse/python_dependencies.py2
-rw-r--r--synapse/rest/client/v1/base.py8
-rw-r--r--synapse/rest/client/v1/events.py5
-rw-r--r--synapse/rest/client/v1/room.py29
-rw-r--r--synapse/rest/client/v2_alpha/_base.py9
-rw-r--r--synapse/rest/client/v2_alpha/auth.py18
-rw-r--r--synapse/rest/client/v2_alpha/devices.py6
-rw-r--r--synapse/rest/client/v2_alpha/notifications.py10
-rw-r--r--synapse/rest/client/v2_alpha/register.py85
-rw-r--r--synapse/rest/client/v2_alpha/room_upgrade_rest_servlet.py1
-rw-r--r--synapse/rest/client/v2_alpha/sendtodevice.py1
-rw-r--r--synapse/rest/client/v2_alpha/sync.py59
-rw-r--r--synapse/rest/media/v1/media_repository.py9
-rw-r--r--synapse/rest/media/v1/storage_provider.py1
-rw-r--r--synapse/rest/media/v1/thumbnailer.py35
-rw-r--r--synapse/server.py5
-rw-r--r--synapse/storage/appservice.py4
-rw-r--r--synapse/storage/event_federation.py6
-rw-r--r--synapse/storage/events_worker.py50
-rw-r--r--synapse/storage/search.py4
-rw-r--r--synapse/storage/stream.py18
-rw-r--r--synapse/util/async_helpers.py19
-rw-r--r--synapse/util/distributor.py1
-rw-r--r--synapse/util/ratelimitutils.py47
-rw-r--r--tests/handlers/test_register.py8
-rw-r--r--tests/handlers/test_user_directory.py4
-rw-r--r--tests/http/federation/test_matrix_federation_agent.py4
-rw-r--r--tests/http/test_fedclient.py71
-rw-r--r--tests/push/test_email.py36
-rw-r--r--tests/push/test_http.py2
-rw-r--r--tests/rest/client/test_consent.py11
-rw-r--r--tests/rest/client/test_identity.py2
-rw-r--r--tests/rest/client/v1/test_directory.py2
-rw-r--r--tests/rest/client/v1/test_events.py6
-rw-r--r--tests/rest/client/v1/test_profile.py2
-rw-r--r--tests/rest/client/v1/test_rooms.py72
-rw-r--r--tests/rest/client/v2_alpha/test_auth.py15
-rw-r--r--tests/rest/client/v2_alpha/test_register.py51
-rw-r--r--tests/rest/media/v1/test_media_storage.py17
-rw-r--r--tests/rest/media/v1/test_url_preview.py40
-rw-r--r--tests/server.py61
-rw-r--r--tests/server_notices/test_consent.py32
-rw-r--r--tests/server_notices/test_resource_limits_server_notices.py9
-rw-r--r--tests/storage/test_appservice.py2
-rw-r--r--tests/test_state.py2
-rw-r--r--tests/test_terms_auth.py2
-rw-r--r--tests/unittest.py12
-rw-r--r--tests/utils.py141
76 files changed, 1031 insertions, 545 deletions
diff --git a/changelog.d/3484.misc b/changelog.d/3484.misc
new file mode 100644
index 0000000000..3645849844
--- /dev/null
+++ b/changelog.d/3484.misc
@@ -0,0 +1 @@
+Make /sync attempt to return device updates for both joined and invited users. Note that this doesn't currently work correctly due to other bugs.
diff --git a/changelog.d/5039.bugfix b/changelog.d/5039.bugfix
new file mode 100644
index 0000000000..212cff7ae8
--- /dev/null
+++ b/changelog.d/5039.bugfix
@@ -0,0 +1 @@
+Fix image orientation when generating thumbnails (needs pillow>=4.3.0). Contributed by Pau Rodriguez-Estivill.
diff --git a/changelog.d/5043.feature b/changelog.d/5043.feature
new file mode 100644
index 0000000000..0f1e0ee30e
--- /dev/null
+++ b/changelog.d/5043.feature
@@ -0,0 +1 @@
+Add ability to blacklist IP ranges for the federation client.
diff --git a/changelog.d/5171.misc b/changelog.d/5171.misc
new file mode 100644
index 0000000000..d148b03b51
--- /dev/null
+++ b/changelog.d/5171.misc
@@ -0,0 +1 @@
+Update tests to consistently be configured via the same code that is used when loading from configuration files.
diff --git a/changelog.d/5174.bugfix b/changelog.d/5174.bugfix
new file mode 100644
index 0000000000..0f26d46b2c
--- /dev/null
+++ b/changelog.d/5174.bugfix
@@ -0,0 +1 @@
+Re-order stages in registration flows such that msisdn and email verification are done last.
diff --git a/changelog.d/5181.feature b/changelog.d/5181.feature
new file mode 100644
index 0000000000..5ce13aa2ea
--- /dev/null
+++ b/changelog.d/5181.feature
@@ -0,0 +1 @@
+Ratelimiting configuration for clients sending messages and the federation server has been altered to match login ratelimiting. The old configuration names will continue working. Check the sample config for details of the new names.
diff --git a/changelog.d/5183.misc b/changelog.d/5183.misc
new file mode 100644
index 0000000000..a8970f29eb
--- /dev/null
+++ b/changelog.d/5183.misc
@@ -0,0 +1 @@
+Allow client event serialization to be async.
diff --git a/changelog.d/5184.misc b/changelog.d/5184.misc
new file mode 100644
index 0000000000..1588bdef6c
--- /dev/null
+++ b/changelog.d/5184.misc
@@ -0,0 +1 @@
+Expose DataStore._get_events as get_events_as_list.
diff --git a/changelog.d/5185.misc b/changelog.d/5185.misc
new file mode 100644
index 0000000000..d148b03b51
--- /dev/null
+++ b/changelog.d/5185.misc
@@ -0,0 +1 @@
+Update tests to consistently be configured via the same code that is used when loading from configuration files.
diff --git a/changelog.d/5187.bugfix b/changelog.d/5187.bugfix
new file mode 100644
index 0000000000..df176cf5b2
--- /dev/null
+++ b/changelog.d/5187.bugfix
@@ -0,0 +1 @@
+Fix a bug where the register endpoint would fail with M_THREEPID_IN_USE instead of returning an account previously registered in the same session.
diff --git a/changelog.d/5190.feature b/changelog.d/5190.feature
new file mode 100644
index 0000000000..34904aa7a8
--- /dev/null
+++ b/changelog.d/5190.feature
@@ -0,0 +1 @@
+Drop support for the undocumented /_matrix/client/v2_alpha API prefix.
diff --git a/changelog.d/5196.feature b/changelog.d/5196.feature
new file mode 100644
index 0000000000..1ffb928f62
--- /dev/null
+++ b/changelog.d/5196.feature
@@ -0,0 +1 @@
+Add an option to disable per-room profiles.
diff --git a/docs/sample_config.yaml b/docs/sample_config.yaml
index bdfc34c6bd..d218aefee5 100644
--- a/docs/sample_config.yaml
+++ b/docs/sample_config.yaml
@@ -115,6 +115,24 @@ pid_file: DATADIR/homeserver.pid
 #  - nyc.example.com
 #  - syd.example.com
 
+# Prevent federation requests from being sent to the following
+# blacklist IP address CIDR ranges. If this option is not specified, or
+# specified with an empty list, no ip range blacklist will be enforced.
+#
+# (0.0.0.0 and :: are always blacklisted, whether or not they are explicitly
+# listed here, since they correspond to unroutable addresses.)
+#
+federation_ip_range_blacklist:
+  - '127.0.0.0/8'
+  - '10.0.0.0/8'
+  - '172.16.0.0/12'
+  - '192.168.0.0/16'
+  - '100.64.0.0/10'
+  - '169.254.0.0/16'
+  - '::1/128'
+  - 'fe80::/64'
+  - 'fc00::/7'
+
 # List of ports that Synapse should listen on, their purpose and their
 # configuration.
 #
@@ -258,6 +276,12 @@ listeners:
 #
 #require_membership_for_aliases: false
 
+# Whether to allow per-room membership profiles through the send of membership
+# events with profile information that differ from the target's global profile.
+# Defaults to 'true'.
+#
+#allow_per_room_profiles: false
+
 
 ## TLS ##
 
@@ -428,21 +452,15 @@ log_config: "CONFDIR/SERVERNAME.log.config"
 
 ## Ratelimiting ##
 
-# Number of messages a client can send per second
-#
-#rc_messages_per_second: 0.2
-
-# Number of message a client can send before being throttled
-#
-#rc_message_burst_count: 10.0
-
-# Ratelimiting settings for registration and login.
+# Ratelimiting settings for client actions (registration, login, messaging).
 #
 # Each ratelimiting configuration is made of two parameters:
 #   - per_second: number of requests a client can send per second.
 #   - burst_count: number of requests a client can send before being throttled.
 #
 # Synapse currently uses the following configurations:
+#   - one for messages that ratelimits sending based on the account the client
+#     is using
 #   - one for registration that ratelimits registration requests based on the
 #     client's IP address.
 #   - one for login that ratelimits login requests based on the client's IP
@@ -455,6 +473,10 @@ log_config: "CONFDIR/SERVERNAME.log.config"
 #
 # The defaults are as shown below.
 #
+#rc_message:
+#  per_second: 0.2
+#  burst_count: 10
+#
 #rc_registration:
 #  per_second: 0.17
 #  burst_count: 3
@@ -470,29 +492,28 @@ log_config: "CONFDIR/SERVERNAME.log.config"
 #    per_second: 0.17
 #    burst_count: 3
 
-# The federation window size in milliseconds
-#
-#federation_rc_window_size: 1000
 
-# The number of federation requests from a single server in a window
-# before the server will delay processing the request.
+# Ratelimiting settings for incoming federation
 #
-#federation_rc_sleep_limit: 10
-
-# The duration in milliseconds to delay processing events from
-# remote servers by if they go over the sleep limit.
+# The rc_federation configuration is made up of the following settings:
+#   - window_size: window size in milliseconds
+#   - sleep_limit: number of federation requests from a single server in
+#     a window before the server will delay processing the request.
+#   - sleep_delay: duration in milliseconds to delay processing events
+#     from remote servers by if they go over the sleep limit.
+#   - reject_limit: maximum number of concurrent federation requests
+#     allowed from a single server
+#   - concurrent: number of federation requests to concurrently process
+#     from a single server
 #
-#federation_rc_sleep_delay: 500
-
-# The maximum number of concurrent federation requests allowed
-# from a single server
-#
-#federation_rc_reject_limit: 50
-
-# The number of federation requests to concurrently process from a
-# single server
+# The defaults are as shown below.
 #
-#federation_rc_concurrent: 3
+#rc_federation:
+#  window_size: 1000
+#  sleep_limit: 10
+#  sleep_delay: 500
+#  reject_limit: 50
+#  concurrent: 3
 
 # Target outgoing federation transaction frequency for sending read-receipts,
 # per-room.
diff --git a/synapse/api/urls.py b/synapse/api/urls.py
index cb71d80875..3c6bddff7a 100644
--- a/synapse/api/urls.py
+++ b/synapse/api/urls.py
@@ -22,8 +22,7 @@ from six.moves.urllib.parse import urlencode
 
 from synapse.config import ConfigError
 
-CLIENT_PREFIX = "/_matrix/client/api/v1"
-CLIENT_V2_ALPHA_PREFIX = "/_matrix/client/v2_alpha"
+CLIENT_API_PREFIX = "/_matrix/client"
 FEDERATION_PREFIX = "/_matrix/federation"
 FEDERATION_V1_PREFIX = FEDERATION_PREFIX + "/v1"
 FEDERATION_V2_PREFIX = FEDERATION_PREFIX + "/v2"
diff --git a/synapse/config/ratelimiting.py b/synapse/config/ratelimiting.py
index 5a68399e63..5a9adac480 100644
--- a/synapse/config/ratelimiting.py
+++ b/synapse/config/ratelimiting.py
@@ -16,16 +16,56 @@ from ._base import Config
 
 
 class RateLimitConfig(object):
-    def __init__(self, config):
-        self.per_second = config.get("per_second", 0.17)
-        self.burst_count = config.get("burst_count", 3.0)
+    def __init__(self, config, defaults={"per_second": 0.17, "burst_count": 3.0}):
+        self.per_second = config.get("per_second", defaults["per_second"])
+        self.burst_count = config.get("burst_count", defaults["burst_count"])
 
 
-class RatelimitConfig(Config):
+class FederationRateLimitConfig(object):
+    _items_and_default = {
+        "window_size": 10000,
+        "sleep_limit": 10,
+        "sleep_delay": 500,
+        "reject_limit": 50,
+        "concurrent": 3,
+    }
+
+    def __init__(self, **kwargs):
+        for i in self._items_and_default.keys():
+            setattr(self, i, kwargs.get(i) or self._items_and_default[i])
+
 
+class RatelimitConfig(Config):
     def read_config(self, config):
-        self.rc_messages_per_second = config.get("rc_messages_per_second", 0.2)
-        self.rc_message_burst_count = config.get("rc_message_burst_count", 10.0)
+
+        # Load the new-style messages config if it exists. Otherwise fall back
+        # to the old method.
+        if "rc_message" in config:
+            self.rc_message = RateLimitConfig(
+                config["rc_message"], defaults={"per_second": 0.2, "burst_count": 10.0}
+            )
+        else:
+            self.rc_message = RateLimitConfig(
+                {
+                    "per_second": config.get("rc_messages_per_second", 0.2),
+                    "burst_count": config.get("rc_message_burst_count", 10.0),
+                }
+            )
+
+        # Load the new-style federation config, if it exists. Otherwise, fall
+        # back to the old method.
+        if "federation_rc" in config:
+            self.rc_federation = FederationRateLimitConfig(**config["rc_federation"])
+        else:
+            self.rc_federation = FederationRateLimitConfig(
+                **{
+                    "window_size": config.get("federation_rc_window_size"),
+                    "sleep_limit": config.get("federation_rc_sleep_limit"),
+                    "sleep_delay": config.get("federation_rc_sleep_delay"),
+                    "reject_limit": config.get("federation_rc_reject_limit"),
+                    "concurrent": config.get("federation_rc_concurrent"),
+                }
+            )
 
         self.rc_registration = RateLimitConfig(config.get("rc_registration", {}))
 
@@ -33,38 +73,26 @@ class RatelimitConfig(Config):
         self.rc_login_address = RateLimitConfig(rc_login_config.get("address", {}))
         self.rc_login_account = RateLimitConfig(rc_login_config.get("account", {}))
         self.rc_login_failed_attempts = RateLimitConfig(
-            rc_login_config.get("failed_attempts", {}),
+            rc_login_config.get("failed_attempts", {})
         )
 
-        self.federation_rc_window_size = config.get("federation_rc_window_size", 1000)
-        self.federation_rc_sleep_limit = config.get("federation_rc_sleep_limit", 10)
-        self.federation_rc_sleep_delay = config.get("federation_rc_sleep_delay", 500)
-        self.federation_rc_reject_limit = config.get("federation_rc_reject_limit", 50)
-        self.federation_rc_concurrent = config.get("federation_rc_concurrent", 3)
-
         self.federation_rr_transactions_per_room_per_second = config.get(
-            "federation_rr_transactions_per_room_per_second", 50,
+            "federation_rr_transactions_per_room_per_second", 50
         )
 
     def default_config(self, **kwargs):
         return """\
         ## Ratelimiting ##
 
-        # Number of messages a client can send per second
-        #
-        #rc_messages_per_second: 0.2
-
-        # Number of message a client can send before being throttled
-        #
-        #rc_message_burst_count: 10.0
-
-        # Ratelimiting settings for registration and login.
+        # Ratelimiting settings for client actions (registration, login, messaging).
         #
         # Each ratelimiting configuration is made of two parameters:
         #   - per_second: number of requests a client can send per second.
         #   - burst_count: number of requests a client can send before being throttled.
         #
         # Synapse currently uses the following configurations:
+        #   - one for messages that ratelimits sending based on the account the client
+        #     is using
         #   - one for registration that ratelimits registration requests based on the
         #     client's IP address.
         #   - one for login that ratelimits login requests based on the client's IP
@@ -77,6 +105,10 @@ class RatelimitConfig(Config):
         #
         # The defaults are as shown below.
         #
+        #rc_message:
+        #  per_second: 0.2
+        #  burst_count: 10
+        #
         #rc_registration:
         #  per_second: 0.17
         #  burst_count: 3
@@ -92,29 +124,28 @@ class RatelimitConfig(Config):
         #    per_second: 0.17
         #    burst_count: 3
 
-        # The federation window size in milliseconds
-        #
-        #federation_rc_window_size: 1000
-
-        # The number of federation requests from a single server in a window
-        # before the server will delay processing the request.
-        #
-        #federation_rc_sleep_limit: 10
 
-        # The duration in milliseconds to delay processing events from
-        # remote servers by if they go over the sleep limit.
+        # Ratelimiting settings for incoming federation
         #
-        #federation_rc_sleep_delay: 500
-
-        # The maximum number of concurrent federation requests allowed
-        # from a single server
+        # The rc_federation configuration is made up of the following settings:
+        #   - window_size: window size in milliseconds
+        #   - sleep_limit: number of federation requests from a single server in
+        #     a window before the server will delay processing the request.
+        #   - sleep_delay: duration in milliseconds to delay processing events
+        #     from remote servers by if they go over the sleep limit.
+        #   - reject_limit: maximum number of concurrent federation requests
+        #     allowed from a single server
+        #   - concurrent: number of federation requests to concurrently process
+        #     from a single server
         #
-        #federation_rc_reject_limit: 50
-
-        # The number of federation requests to concurrently process from a
-        # single server
+        # The defaults are as shown below.
         #
-        #federation_rc_concurrent: 3
+        #rc_federation:
+        #  window_size: 1000
+        #  sleep_limit: 10
+        #  sleep_delay: 500
+        #  reject_limit: 50
+        #  concurrent: 3
 
         # Target outgoing federation transaction frequency for sending read-receipts,
         # per-room.
diff --git a/synapse/config/server.py b/synapse/config/server.py
index 8dce75c56a..1b8968608e 100644
--- a/synapse/config/server.py
+++ b/synapse/config/server.py
@@ -1,6 +1,7 @@
 # -*- coding: utf-8 -*-
 # Copyright 2014-2016 OpenMarket Ltd
 # Copyright 2017-2018 New Vector Ltd
+# Copyright 2019 The Matrix.org Foundation C.I.C.
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
 # you may not use this file except in compliance with the License.
@@ -17,6 +18,8 @@
 import logging
 import os.path
 
+from netaddr import IPSet
+
 from synapse.http.endpoint import parse_and_validate_server_name
 from synapse.python_dependencies import DependencyException, check_requirements
 
@@ -137,6 +140,24 @@ class ServerConfig(Config):
             for domain in federation_domain_whitelist:
                 self.federation_domain_whitelist[domain] = True
 
+        self.federation_ip_range_blacklist = config.get(
+            "federation_ip_range_blacklist", [],
+        )
+
+        # Attempt to create an IPSet from the given ranges
+        try:
+            self.federation_ip_range_blacklist = IPSet(
+                self.federation_ip_range_blacklist
+            )
+
+            # Always blacklist 0.0.0.0, ::
+            self.federation_ip_range_blacklist.update(["0.0.0.0", "::"])
+        except Exception as e:
+            raise ConfigError(
+                "Invalid range(s) provided in "
+                "federation_ip_range_blacklist: %s" % e
+            )
+
         if self.public_baseurl is not None:
             if self.public_baseurl[-1] != '/':
                 self.public_baseurl += '/'
@@ -153,6 +174,10 @@ class ServerConfig(Config):
             "require_membership_for_aliases", True,
         )
 
+        # Whether to allow per-room membership profiles through the send of membership
+        # events with profile information that differ from the target's global profile.
+        self.allow_per_room_profiles = config.get("allow_per_room_profiles", True)
+
         self.listeners = []
         for listener in config.get("listeners", []):
             if not isinstance(listener.get("port", None), int):
@@ -386,6 +411,24 @@ class ServerConfig(Config):
         #  - nyc.example.com
         #  - syd.example.com
 
+        # Prevent federation requests from being sent to the following
+        # blacklist IP address CIDR ranges. If this option is not specified, or
+        # specified with an empty list, no ip range blacklist will be enforced.
+        #
+        # (0.0.0.0 and :: are always blacklisted, whether or not they are explicitly
+        # listed here, since they correspond to unroutable addresses.)
+        #
+        federation_ip_range_blacklist:
+          - '127.0.0.0/8'
+          - '10.0.0.0/8'
+          - '172.16.0.0/12'
+          - '192.168.0.0/16'
+          - '100.64.0.0/10'
+          - '169.254.0.0/16'
+          - '::1/128'
+          - 'fe80::/64'
+          - 'fc00::/7'
+
         # List of ports that Synapse should listen on, their purpose and their
         # configuration.
         #
@@ -528,6 +571,12 @@ class ServerConfig(Config):
         # Defaults to 'true'.
         #
         #require_membership_for_aliases: false
+
+        # Whether to allow per-room membership profiles through the send of membership
+        # events with profile information that differ from the target's global profile.
+        # Defaults to 'true'.
+        #
+        #allow_per_room_profiles: false
         """ % locals()
 
     def read_arguments(self, args):
diff --git a/synapse/events/utils.py b/synapse/events/utils.py
index 07fccdd8f9..a5454556cc 100644
--- a/synapse/events/utils.py
+++ b/synapse/events/utils.py
@@ -19,7 +19,10 @@ from six import string_types
 
 from frozendict import frozendict
 
+from twisted.internet import defer
+
 from synapse.api.constants import EventTypes
+from synapse.util.async_helpers import yieldable_gather_results
 
 from . import EventBase
 
@@ -311,3 +314,44 @@ def serialize_event(e, time_now_ms, as_client_event=True,
         d = only_fields(d, only_event_fields)
 
     return d
+
+
+class EventClientSerializer(object):
+    """Serializes events that are to be sent to clients.
+
+    This is used for bundling extra information with any events to be sent to
+    clients.
+    """
+
+    def __init__(self, hs):
+        pass
+
+    def serialize_event(self, event, time_now, **kwargs):
+        """Serializes a single event.
+
+        Args:
+            event (EventBase)
+            time_now (int): The current time in milliseconds
+            **kwargs: Arguments to pass to `serialize_event`
+
+        Returns:
+            Deferred[dict]: The serialized event
+        """
+        event = serialize_event(event, time_now, **kwargs)
+        return defer.succeed(event)
+
+    def serialize_events(self, events, time_now, **kwargs):
+        """Serializes multiple events.
+
+        Args:
+            event (iter[EventBase])
+            time_now (int): The current time in milliseconds
+            **kwargs: Arguments to pass to `serialize_event`
+
+        Returns:
+            Deferred[list[dict]]: The list of serialized events
+        """
+        return yieldable_gather_results(
+            self.serialize_event, events,
+            time_now=time_now, **kwargs
+        )
diff --git a/synapse/federation/transport/server.py b/synapse/federation/transport/server.py
index 9030eb18c5..385eda2dca 100644
--- a/synapse/federation/transport/server.py
+++ b/synapse/federation/transport/server.py
@@ -63,11 +63,7 @@ class TransportLayerServer(JsonResource):
         self.authenticator = Authenticator(hs)
         self.ratelimiter = FederationRateLimiter(
             self.clock,
-            window_size=hs.config.federation_rc_window_size,
-            sleep_limit=hs.config.federation_rc_sleep_limit,
-            sleep_msec=hs.config.federation_rc_sleep_delay,
-            reject_limit=hs.config.federation_rc_reject_limit,
-            concurrent_requests=hs.config.federation_rc_concurrent,
+            config=hs.config.rc_federation,
         )
 
         self.register_servlets()
diff --git a/synapse/handlers/_base.py b/synapse/handlers/_base.py
index ac09d03ba9..dca337ec61 100644
--- a/synapse/handlers/_base.py
+++ b/synapse/handlers/_base.py
@@ -90,8 +90,8 @@ class BaseHandler(object):
             messages_per_second = override.messages_per_second
             burst_count = override.burst_count
         else:
-            messages_per_second = self.hs.config.rc_messages_per_second
-            burst_count = self.hs.config.rc_message_burst_count
+            messages_per_second = self.hs.config.rc_message.per_second
+            burst_count = self.hs.config.rc_message.burst_count
 
         allowed, time_allowed = self.ratelimiter.can_do_action(
             user_id, time_now,
diff --git a/synapse/handlers/events.py b/synapse/handlers/events.py
index 1b4d8c74ae..6003ad9cca 100644
--- a/synapse/handlers/events.py
+++ b/synapse/handlers/events.py
@@ -21,7 +21,6 @@ from twisted.internet import defer
 from synapse.api.constants import EventTypes, Membership
 from synapse.api.errors import AuthError, SynapseError
 from synapse.events import EventBase
-from synapse.events.utils import serialize_event
 from synapse.types import UserID
 from synapse.util.logutils import log_function
 from synapse.visibility import filter_events_for_client
@@ -50,6 +49,7 @@ class EventStreamHandler(BaseHandler):
         self.notifier = hs.get_notifier()
         self.state = hs.get_state_handler()
         self._server_notices_sender = hs.get_server_notices_sender()
+        self._event_serializer = hs.get_event_client_serializer()
 
     @defer.inlineCallbacks
     @log_function
@@ -120,9 +120,9 @@ class EventStreamHandler(BaseHandler):
 
             time_now = self.clock.time_msec()
 
-            chunks = [
-                serialize_event(e, time_now, as_client_event) for e in events
-            ]
+            chunks = yield self._event_serializer.serialize_events(
+                events, time_now, as_client_event=as_client_event,
+            )
 
             chunk = {
                 "chunk": chunks,
diff --git a/synapse/handlers/initial_sync.py b/synapse/handlers/initial_sync.py
index 7dfae78db0..aaee5db0b7 100644
--- a/synapse/handlers/initial_sync.py
+++ b/synapse/handlers/initial_sync.py
@@ -19,7 +19,6 @@ from twisted.internet import defer
 
 from synapse.api.constants import EventTypes, Membership
 from synapse.api.errors import AuthError, Codes, SynapseError
-from synapse.events.utils import serialize_event
 from synapse.events.validator import EventValidator
 from synapse.handlers.presence import format_user_presence_state
 from synapse.streams.config import PaginationConfig
@@ -43,6 +42,7 @@ class InitialSyncHandler(BaseHandler):
         self.clock = hs.get_clock()
         self.validator = EventValidator()
         self.snapshot_cache = SnapshotCache()
+        self._event_serializer = hs.get_event_client_serializer()
 
     def snapshot_all_rooms(self, user_id=None, pagin_config=None,
                            as_client_event=True, include_archived=False):
@@ -138,7 +138,9 @@ class InitialSyncHandler(BaseHandler):
                 d["inviter"] = event.sender
 
                 invite_event = yield self.store.get_event(event.event_id)
-                d["invite"] = serialize_event(invite_event, time_now, as_client_event)
+                d["invite"] = yield self._event_serializer.serialize_event(
+                    invite_event, time_now, as_client_event,
+                )
 
             rooms_ret.append(d)
 
@@ -185,18 +187,21 @@ class InitialSyncHandler(BaseHandler):
                 time_now = self.clock.time_msec()
 
                 d["messages"] = {
-                    "chunk": [
-                        serialize_event(m, time_now, as_client_event)
-                        for m in messages
-                    ],
+                    "chunk": (
+                        yield self._event_serializer.serialize_events(
+                            messages, time_now=time_now,
+                            as_client_event=as_client_event,
+                        )
+                    ),
                     "start": start_token.to_string(),
                     "end": end_token.to_string(),
                 }
 
-                d["state"] = [
-                    serialize_event(c, time_now, as_client_event)
-                    for c in current_state.values()
-                ]
+                d["state"] = yield self._event_serializer.serialize_events(
+                    current_state.values(),
+                    time_now=time_now,
+                    as_client_event=as_client_event
+                )
 
                 account_data_events = []
                 tags = tags_by_room.get(event.room_id)
@@ -337,11 +342,15 @@ class InitialSyncHandler(BaseHandler):
             "membership": membership,
             "room_id": room_id,
             "messages": {
-                "chunk": [serialize_event(m, time_now) for m in messages],
+                "chunk": (yield self._event_serializer.serialize_events(
+                    messages, time_now,
+                )),
                 "start": start_token.to_string(),
                 "end": end_token.to_string(),
             },
-            "state": [serialize_event(s, time_now) for s in room_state.values()],
+            "state": (yield self._event_serializer.serialize_events(
+                room_state.values(), time_now,
+            )),
             "presence": [],
             "receipts": [],
         })
@@ -355,10 +364,9 @@ class InitialSyncHandler(BaseHandler):
 
         # TODO: These concurrently
         time_now = self.clock.time_msec()
-        state = [
-            serialize_event(x, time_now)
-            for x in current_state.values()
-        ]
+        state = yield self._event_serializer.serialize_events(
+            current_state.values(), time_now,
+        )
 
         now_token = yield self.hs.get_event_sources().get_current_token()
 
@@ -425,7 +433,9 @@ class InitialSyncHandler(BaseHandler):
         ret = {
             "room_id": room_id,
             "messages": {
-                "chunk": [serialize_event(m, time_now) for m in messages],
+                "chunk": (yield self._event_serializer.serialize_events(
+                    messages, time_now,
+                )),
                 "start": start_token.to_string(),
                 "end": end_token.to_string(),
             },
diff --git a/synapse/handlers/message.py b/synapse/handlers/message.py
index e5afeadf68..7b2c33a922 100644
--- a/synapse/handlers/message.py
+++ b/synapse/handlers/message.py
@@ -32,7 +32,6 @@ from synapse.api.errors import (
 )
 from synapse.api.room_versions import RoomVersions
 from synapse.api.urls import ConsentURIBuilder
-from synapse.events.utils import serialize_event
 from synapse.events.validator import EventValidator
 from synapse.replication.http.send_event import ReplicationSendEventRestServlet
 from synapse.storage.state import StateFilter
@@ -57,6 +56,7 @@ class MessageHandler(object):
         self.clock = hs.get_clock()
         self.state = hs.get_state_handler()
         self.store = hs.get_datastore()
+        self._event_serializer = hs.get_event_client_serializer()
 
     @defer.inlineCallbacks
     def get_room_data(self, user_id=None, room_id=None,
@@ -164,9 +164,10 @@ class MessageHandler(object):
                 room_state = room_state[membership_event_id]
 
         now = self.clock.time_msec()
-        defer.returnValue(
-            [serialize_event(c, now) for c in room_state.values()]
+        events = yield self._event_serializer.serialize_events(
+            room_state.values(), now,
         )
+        defer.returnValue(events)
 
     @defer.inlineCallbacks
     def get_joined_members(self, requester, room_id):
diff --git a/synapse/handlers/pagination.py b/synapse/handlers/pagination.py
index e4fdae9266..8f811e24fe 100644
--- a/synapse/handlers/pagination.py
+++ b/synapse/handlers/pagination.py
@@ -20,7 +20,6 @@ from twisted.python.failure import Failure
 
 from synapse.api.constants import EventTypes, Membership
 from synapse.api.errors import SynapseError
-from synapse.events.utils import serialize_event
 from synapse.storage.state import StateFilter
 from synapse.types import RoomStreamToken
 from synapse.util.async_helpers import ReadWriteLock
@@ -78,6 +77,7 @@ class PaginationHandler(object):
         self._purges_in_progress_by_room = set()
         # map from purge id to PurgeStatus
         self._purges_by_id = {}
+        self._event_serializer = hs.get_event_client_serializer()
 
     def start_purge_history(self, room_id, token,
                             delete_local_events=False):
@@ -278,18 +278,22 @@ class PaginationHandler(object):
         time_now = self.clock.time_msec()
 
         chunk = {
-            "chunk": [
-                serialize_event(e, time_now, as_client_event)
-                for e in events
-            ],
+            "chunk": (
+                yield self._event_serializer.serialize_events(
+                    events, time_now,
+                    as_client_event=as_client_event,
+                )
+            ),
             "start": pagin_config.from_token.to_string(),
             "end": next_token.to_string(),
         }
 
         if state:
-            chunk["state"] = [
-                serialize_event(e, time_now, as_client_event)
-                for e in state
-            ]
+            chunk["state"] = (
+                yield self._event_serializer.serialize_events(
+                    state, time_now,
+                    as_client_event=as_client_event,
+                )
+            )
 
         defer.returnValue(chunk)
diff --git a/synapse/handlers/room_member.py b/synapse/handlers/room_member.py
index 3e86b9c690..ffc588d454 100644
--- a/synapse/handlers/room_member.py
+++ b/synapse/handlers/room_member.py
@@ -1,6 +1,7 @@
 # -*- coding: utf-8 -*-
 # Copyright 2016 OpenMarket Ltd
 # Copyright 2018 New Vector Ltd
+# Copyright 2019 The Matrix.org Foundation C.I.C.
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
 # you may not use this file except in compliance with the License.
@@ -73,6 +74,7 @@ class RoomMemberHandler(object):
         self.spam_checker = hs.get_spam_checker()
         self._server_notices_mxid = self.config.server_notices_mxid
         self._enable_lookup = hs.config.enable_3pid_lookup
+        self.allow_per_room_profiles = self.config.allow_per_room_profiles
 
         # This is only used to get at ratelimit function, and
         # maybe_kick_guest_users. It's fine there are multiple of these as
@@ -357,6 +359,13 @@ class RoomMemberHandler(object):
             # later on.
             content = dict(content)
 
+        if not self.allow_per_room_profiles:
+            # Strip profile data, knowing that new profile data will be added to the
+            # event's content in event_creation_handler.create_event() using the target's
+            # global profile.
+            content.pop("displayname", None)
+            content.pop("avatar_url", None)
+
         effective_membership_state = action
         if action in ["kick", "unban"]:
             effective_membership_state = "leave"
diff --git a/synapse/handlers/search.py b/synapse/handlers/search.py
index 49c439313e..9bba74d6c9 100644
--- a/synapse/handlers/search.py
+++ b/synapse/handlers/search.py
@@ -23,7 +23,6 @@ from twisted.internet import defer
 from synapse.api.constants import EventTypes, Membership
 from synapse.api.errors import SynapseError
 from synapse.api.filtering import Filter
-from synapse.events.utils import serialize_event
 from synapse.storage.state import StateFilter
 from synapse.visibility import filter_events_for_client
 
@@ -36,6 +35,7 @@ class SearchHandler(BaseHandler):
 
     def __init__(self, hs):
         super(SearchHandler, self).__init__(hs)
+        self._event_serializer = hs.get_event_client_serializer()
 
     @defer.inlineCallbacks
     def get_old_rooms_from_upgraded_room(self, room_id):
@@ -401,14 +401,16 @@ class SearchHandler(BaseHandler):
         time_now = self.clock.time_msec()
 
         for context in contexts.values():
-            context["events_before"] = [
-                serialize_event(e, time_now)
-                for e in context["events_before"]
-            ]
-            context["events_after"] = [
-                serialize_event(e, time_now)
-                for e in context["events_after"]
-            ]
+            context["events_before"] = (
+                yield self._event_serializer.serialize_events(
+                    context["events_before"], time_now,
+                )
+            )
+            context["events_after"] = (
+                yield self._event_serializer.serialize_events(
+                    context["events_after"], time_now,
+                )
+            )
 
         state_results = {}
         if include_state:
@@ -422,14 +424,13 @@ class SearchHandler(BaseHandler):
         # We're now about to serialize the events. We should not make any
         # blocking calls after this. Otherwise the 'age' will be wrong
 
-        results = [
-            {
+        results = []
+        for e in allowed_events:
+            results.append({
                 "rank": rank_map[e.event_id],
-                "result": serialize_event(e, time_now),
+                "result": (yield self._event_serializer.serialize_event(e, time_now)),
                 "context": contexts.get(e.event_id, {}),
-            }
-            for e in allowed_events
-        ]
+            })
 
         rooms_cat_res = {
             "results": results,
@@ -438,10 +439,13 @@ class SearchHandler(BaseHandler):
         }
 
         if state_results:
-            rooms_cat_res["state"] = {
-                room_id: [serialize_event(e, time_now) for e in state]
-                for room_id, state in state_results.items()
-            }
+            s = {}
+            for room_id, state in state_results.items():
+                s[room_id] = yield self._event_serializer.serialize_events(
+                    state, time_now,
+                )
+
+            rooms_cat_res["state"] = s
 
         if room_groups and "room_id" in group_keys:
             rooms_cat_res.setdefault("groups", {})["room_id"] = room_groups
diff --git a/synapse/handlers/sync.py b/synapse/handlers/sync.py
index 153312e39f..1ee9a6e313 100644
--- a/synapse/handlers/sync.py
+++ b/synapse/handlers/sync.py
@@ -934,7 +934,7 @@ class SyncHandler(object):
         res = yield self._generate_sync_entry_for_rooms(
             sync_result_builder, account_data_by_room
         )
-        newly_joined_rooms, newly_joined_users, _, _ = res
+        newly_joined_rooms, newly_joined_or_invited_users, _, _ = res
         _, _, newly_left_rooms, newly_left_users = res
 
         block_all_presence_data = (
@@ -943,7 +943,7 @@ class SyncHandler(object):
         )
         if self.hs_config.use_presence and not block_all_presence_data:
             yield self._generate_sync_entry_for_presence(
-                sync_result_builder, newly_joined_rooms, newly_joined_users
+                sync_result_builder, newly_joined_rooms, newly_joined_or_invited_users
             )
 
         yield self._generate_sync_entry_for_to_device(sync_result_builder)
@@ -951,7 +951,7 @@ class SyncHandler(object):
         device_lists = yield self._generate_sync_entry_for_device_list(
             sync_result_builder,
             newly_joined_rooms=newly_joined_rooms,
-            newly_joined_users=newly_joined_users,
+            newly_joined_or_invited_users=newly_joined_or_invited_users,
             newly_left_rooms=newly_left_rooms,
             newly_left_users=newly_left_users,
         )
@@ -1036,7 +1036,8 @@ class SyncHandler(object):
     @measure_func("_generate_sync_entry_for_device_list")
     @defer.inlineCallbacks
     def _generate_sync_entry_for_device_list(self, sync_result_builder,
-                                             newly_joined_rooms, newly_joined_users,
+                                             newly_joined_rooms,
+                                             newly_joined_or_invited_users,
                                              newly_left_rooms, newly_left_users):
         user_id = sync_result_builder.sync_config.user.to_string()
         since_token = sync_result_builder.since_token
@@ -1050,7 +1051,7 @@ class SyncHandler(object):
             # share a room with?
             for room_id in newly_joined_rooms:
                 joined_users = yield self.state.get_current_users_in_room(room_id)
-                newly_joined_users.update(joined_users)
+                newly_joined_or_invited_users.update(joined_users)
 
             for room_id in newly_left_rooms:
                 left_users = yield self.state.get_current_users_in_room(room_id)
@@ -1058,7 +1059,7 @@ class SyncHandler(object):
 
             # TODO: Check that these users are actually new, i.e. either they
             # weren't in the previous sync *or* they left and rejoined.
-            changed.update(newly_joined_users)
+            changed.update(newly_joined_or_invited_users)
 
             if not changed and not newly_left_users:
                 defer.returnValue(DeviceLists(
@@ -1176,7 +1177,7 @@ class SyncHandler(object):
 
     @defer.inlineCallbacks
     def _generate_sync_entry_for_presence(self, sync_result_builder, newly_joined_rooms,
-                                          newly_joined_users):
+                                          newly_joined_or_invited_users):
         """Generates the presence portion of the sync response. Populates the
         `sync_result_builder` with the result.
 
@@ -1184,8 +1185,9 @@ class SyncHandler(object):
             sync_result_builder(SyncResultBuilder)
             newly_joined_rooms(list): List of rooms that the user has joined
                 since the last sync (or empty if an initial sync)
-            newly_joined_users(list): List of users that have joined rooms
-                since the last sync (or empty if an initial sync)
+            newly_joined_or_invited_users(list): List of users that have joined
+                or been invited to rooms since the last sync (or empty if an initial
+                sync)
         """
         now_token = sync_result_builder.now_token
         sync_config = sync_result_builder.sync_config
@@ -1211,7 +1213,7 @@ class SyncHandler(object):
             "presence_key", presence_key
         )
 
-        extra_users_ids = set(newly_joined_users)
+        extra_users_ids = set(newly_joined_or_invited_users)
         for room_id in newly_joined_rooms:
             users = yield self.state.get_current_users_in_room(room_id)
             extra_users_ids.update(users)
@@ -1243,7 +1245,8 @@ class SyncHandler(object):
 
         Returns:
             Deferred(tuple): Returns a 4-tuple of
-            `(newly_joined_rooms, newly_joined_users, newly_left_rooms, newly_left_users)`
+            `(newly_joined_rooms, newly_joined_or_invited_users,
+            newly_left_rooms, newly_left_users)`
         """
         user_id = sync_result_builder.sync_config.user.to_string()
         block_all_room_ephemeral = (
@@ -1314,8 +1317,8 @@ class SyncHandler(object):
 
         sync_result_builder.invited.extend(invited)
 
-        # Now we want to get any newly joined users
-        newly_joined_users = set()
+        # Now we want to get any newly joined or invited users
+        newly_joined_or_invited_users = set()
         newly_left_users = set()
         if since_token:
             for joined_sync in sync_result_builder.joined:
@@ -1324,19 +1327,22 @@ class SyncHandler(object):
                 )
                 for event in it:
                     if event.type == EventTypes.Member:
-                        if event.membership == Membership.JOIN:
-                            newly_joined_users.add(event.state_key)
+                        if (
+                            event.membership == Membership.JOIN or
+                            event.membership == Membership.INVITE
+                        ):
+                            newly_joined_or_invited_users.add(event.state_key)
                         else:
                             prev_content = event.unsigned.get("prev_content", {})
                             prev_membership = prev_content.get("membership", None)
                             if prev_membership == Membership.JOIN:
                                 newly_left_users.add(event.state_key)
 
-        newly_left_users -= newly_joined_users
+        newly_left_users -= newly_joined_or_invited_users
 
         defer.returnValue((
             newly_joined_rooms,
-            newly_joined_users,
+            newly_joined_or_invited_users,
             newly_left_rooms,
             newly_left_users,
         ))
@@ -1381,7 +1387,7 @@ class SyncHandler(object):
             where:
                 room_entries is a list [RoomSyncResultBuilder]
                 invited_rooms is a list [InvitedSyncResult]
-                newly_joined rooms is a list[str] of room ids
+                newly_joined_rooms is a list[str] of room ids
                 newly_left_rooms is a list[str] of room ids
         """
         user_id = sync_result_builder.sync_config.user.to_string()
@@ -1422,7 +1428,7 @@ class SyncHandler(object):
             if room_id in sync_result_builder.joined_room_ids and non_joins:
                 # Always include if the user (re)joined the room, especially
                 # important so that device list changes are calculated correctly.
-                # If there are non join member events, but we are still in the room,
+                # If there are non-join member events, but we are still in the room,
                 # then the user must have left and joined
                 newly_joined_rooms.append(room_id)
 
diff --git a/synapse/http/client.py b/synapse/http/client.py
index ddbfb72228..77fe68818b 100644
--- a/synapse/http/client.py
+++ b/synapse/http/client.py
@@ -165,7 +165,8 @@ class BlacklistingAgentWrapper(Agent):
                 ip_address, self._ip_whitelist, self._ip_blacklist
             ):
                 logger.info(
-                    "Blocking access to %s because of blacklist" % (ip_address,)
+                    "Blocking access to %s due to blacklist" %
+                    (ip_address,)
                 )
                 e = SynapseError(403, "IP address blocked by IP blacklist entry")
                 return defer.fail(Failure(e))
@@ -263,9 +264,6 @@ class SimpleHttpClient(object):
             uri (str): URI to query.
             data (bytes): Data to send in the request body, if applicable.
             headers (t.w.http_headers.Headers): Request headers.
-
-        Raises:
-            SynapseError: If the IP is blacklisted.
         """
         # A small wrapper around self.agent.request() so we can easily attach
         # counters to it
diff --git a/synapse/http/matrixfederationclient.py b/synapse/http/matrixfederationclient.py
index ff63d0b2a8..7eefc7b1fc 100644
--- a/synapse/http/matrixfederationclient.py
+++ b/synapse/http/matrixfederationclient.py
@@ -27,9 +27,11 @@ import treq
 from canonicaljson import encode_canonical_json
 from prometheus_client import Counter
 from signedjson.sign import sign_json
+from zope.interface import implementer
 
 from twisted.internet import defer, protocol
 from twisted.internet.error import DNSLookupError
+from twisted.internet.interfaces import IReactorPluggableNameResolver
 from twisted.internet.task import _EPSILON, Cooperator
 from twisted.web._newclient import ResponseDone
 from twisted.web.http_headers import Headers
@@ -44,6 +46,7 @@ from synapse.api.errors import (
     SynapseError,
 )
 from synapse.http import QuieterFileBodyProducer
+from synapse.http.client import BlacklistingAgentWrapper, IPBlacklistingResolver
 from synapse.http.federation.matrix_federation_agent import MatrixFederationAgent
 from synapse.util.async_helpers import timeout_deferred
 from synapse.util.logcontext import make_deferred_yieldable
@@ -172,19 +175,44 @@ class MatrixFederationHttpClient(object):
         self.hs = hs
         self.signing_key = hs.config.signing_key[0]
         self.server_name = hs.hostname
-        reactor = hs.get_reactor()
+
+        real_reactor = hs.get_reactor()
+
+        # We need to use a DNS resolver which filters out blacklisted IP
+        # addresses, to prevent DNS rebinding.
+        nameResolver = IPBlacklistingResolver(
+            real_reactor, None, hs.config.federation_ip_range_blacklist,
+        )
+
+        @implementer(IReactorPluggableNameResolver)
+        class Reactor(object):
+            def __getattr__(_self, attr):
+                if attr == "nameResolver":
+                    return nameResolver
+                else:
+                    return getattr(real_reactor, attr)
+
+        self.reactor = Reactor()
 
         self.agent = MatrixFederationAgent(
-            hs.get_reactor(),
+            self.reactor,
             tls_client_options_factory,
         )
+
+        # Use a BlacklistingAgentWrapper to prevent circumventing the IP
+        # blacklist via IP literals in server names
+        self.agent = BlacklistingAgentWrapper(
+            self.agent, self.reactor,
+            ip_blacklist=hs.config.federation_ip_range_blacklist,
+        )
+
         self.clock = hs.get_clock()
         self._store = hs.get_datastore()
         self.version_string_bytes = hs.version_string.encode('ascii')
         self.default_timeout = 60
 
         def schedule(x):
-            reactor.callLater(_EPSILON, x)
+            self.reactor.callLater(_EPSILON, x)
 
         self._cooperator = Cooperator(scheduler=schedule)
 
@@ -370,7 +398,7 @@ class MatrixFederationHttpClient(object):
                             request_deferred = timeout_deferred(
                                 request_deferred,
                                 timeout=_sec_timeout,
-                                reactor=self.hs.get_reactor(),
+                                reactor=self.reactor,
                             )
 
                             response = yield request_deferred
@@ -397,7 +425,7 @@ class MatrixFederationHttpClient(object):
                         d = timeout_deferred(
                             d,
                             timeout=_sec_timeout,
-                            reactor=self.hs.get_reactor(),
+                            reactor=self.reactor,
                         )
 
                         try:
@@ -586,7 +614,7 @@ class MatrixFederationHttpClient(object):
         )
 
         body = yield _handle_json_response(
-            self.hs.get_reactor(), self.default_timeout, request, response,
+            self.reactor, self.default_timeout, request, response,
         )
 
         defer.returnValue(body)
@@ -645,7 +673,7 @@ class MatrixFederationHttpClient(object):
             _sec_timeout = self.default_timeout
 
         body = yield _handle_json_response(
-            self.hs.get_reactor(), _sec_timeout, request, response,
+            self.reactor, _sec_timeout, request, response,
         )
         defer.returnValue(body)
 
@@ -704,7 +732,7 @@ class MatrixFederationHttpClient(object):
         )
 
         body = yield _handle_json_response(
-            self.hs.get_reactor(), self.default_timeout, request, response,
+            self.reactor, self.default_timeout, request, response,
         )
 
         defer.returnValue(body)
@@ -753,7 +781,7 @@ class MatrixFederationHttpClient(object):
         )
 
         body = yield _handle_json_response(
-            self.hs.get_reactor(), self.default_timeout, request, response,
+            self.reactor, self.default_timeout, request, response,
         )
         defer.returnValue(body)
 
@@ -801,7 +829,7 @@ class MatrixFederationHttpClient(object):
 
         try:
             d = _readBodyToFile(response, output_stream, max_size)
-            d.addTimeout(self.default_timeout, self.hs.get_reactor())
+            d.addTimeout(self.default_timeout, self.reactor)
             length = yield make_deferred_yieldable(d)
         except Exception as e:
             logger.warn(
diff --git a/synapse/python_dependencies.py b/synapse/python_dependencies.py
index 2708f5e820..fdcfb90a7e 100644
--- a/synapse/python_dependencies.py
+++ b/synapse/python_dependencies.py
@@ -53,7 +53,7 @@ REQUIREMENTS = [
     "pyasn1-modules>=0.0.7",
     "daemonize>=2.3.1",
     "bcrypt>=3.1.0",
-    "pillow>=3.1.2",
+    "pillow>=4.3.0",
     "sortedcontainers>=1.4.4",
     "psutil>=2.0.0",
     "pymacaroons>=0.13.0",
diff --git a/synapse/rest/client/v1/base.py b/synapse/rest/client/v1/base.py
index c77d7aba68..dc63b661c0 100644
--- a/synapse/rest/client/v1/base.py
+++ b/synapse/rest/client/v1/base.py
@@ -19,7 +19,7 @@
 import logging
 import re
 
-from synapse.api.urls import CLIENT_PREFIX
+from synapse.api.urls import CLIENT_API_PREFIX
 from synapse.http.servlet import RestServlet
 from synapse.rest.client.transactions import HttpTransactionCache
 
@@ -36,12 +36,12 @@ def client_path_patterns(path_regex, releases=(0,), include_in_unstable=True):
     Returns:
         SRE_Pattern
     """
-    patterns = [re.compile("^" + CLIENT_PREFIX + path_regex)]
+    patterns = [re.compile("^" + CLIENT_API_PREFIX + "/api/v1" + path_regex)]
     if include_in_unstable:
-        unstable_prefix = CLIENT_PREFIX.replace("/api/v1", "/unstable")
+        unstable_prefix = CLIENT_API_PREFIX + "/unstable"
         patterns.append(re.compile("^" + unstable_prefix + path_regex))
     for release in releases:
-        new_prefix = CLIENT_PREFIX.replace("/api/v1", "/r%d" % release)
+        new_prefix = CLIENT_API_PREFIX + "/r%d" % (release,)
         patterns.append(re.compile("^" + new_prefix + path_regex))
     return patterns
 
diff --git a/synapse/rest/client/v1/events.py b/synapse/rest/client/v1/events.py
index cd9b3bdbd1..c3b0a39ab7 100644
--- a/synapse/rest/client/v1/events.py
+++ b/synapse/rest/client/v1/events.py
@@ -19,7 +19,6 @@ import logging
 from twisted.internet import defer
 
 from synapse.api.errors import SynapseError
-from synapse.events.utils import serialize_event
 from synapse.streams.config import PaginationConfig
 
 from .base import ClientV1RestServlet, client_path_patterns
@@ -84,6 +83,7 @@ class EventRestServlet(ClientV1RestServlet):
         super(EventRestServlet, self).__init__(hs)
         self.clock = hs.get_clock()
         self.event_handler = hs.get_event_handler()
+        self._event_serializer = hs.get_event_client_serializer()
 
     @defer.inlineCallbacks
     def on_GET(self, request, event_id):
@@ -92,7 +92,8 @@ class EventRestServlet(ClientV1RestServlet):
 
         time_now = self.clock.time_msec()
         if event:
-            defer.returnValue((200, serialize_event(event, time_now)))
+            event = yield self._event_serializer.serialize_event(event, time_now)
+            defer.returnValue((200, event))
         else:
             defer.returnValue((404, "Event not found."))
 
diff --git a/synapse/rest/client/v1/room.py b/synapse/rest/client/v1/room.py
index fab04965cb..255a85c588 100644
--- a/synapse/rest/client/v1/room.py
+++ b/synapse/rest/client/v1/room.py
@@ -26,7 +26,7 @@ from twisted.internet import defer
 from synapse.api.constants import EventTypes, Membership
 from synapse.api.errors import AuthError, Codes, SynapseError
 from synapse.api.filtering import Filter
-from synapse.events.utils import format_event_for_client_v2, serialize_event
+from synapse.events.utils import format_event_for_client_v2
 from synapse.http.servlet import (
     assert_params_in_dict,
     parse_integer,
@@ -537,6 +537,7 @@ class RoomEventServlet(ClientV1RestServlet):
         super(RoomEventServlet, self).__init__(hs)
         self.clock = hs.get_clock()
         self.event_handler = hs.get_event_handler()
+        self._event_serializer = hs.get_event_client_serializer()
 
     @defer.inlineCallbacks
     def on_GET(self, request, room_id, event_id):
@@ -545,7 +546,8 @@ class RoomEventServlet(ClientV1RestServlet):
 
         time_now = self.clock.time_msec()
         if event:
-            defer.returnValue((200, serialize_event(event, time_now)))
+            event = yield self._event_serializer.serialize_event(event, time_now)
+            defer.returnValue((200, event))
         else:
             defer.returnValue((404, "Event not found."))
 
@@ -559,6 +561,7 @@ class RoomEventContextServlet(ClientV1RestServlet):
         super(RoomEventContextServlet, self).__init__(hs)
         self.clock = hs.get_clock()
         self.room_context_handler = hs.get_room_context_handler()
+        self._event_serializer = hs.get_event_client_serializer()
 
     @defer.inlineCallbacks
     def on_GET(self, request, room_id, event_id):
@@ -588,16 +591,18 @@ class RoomEventContextServlet(ClientV1RestServlet):
             )
 
         time_now = self.clock.time_msec()
-        results["events_before"] = [
-            serialize_event(event, time_now) for event in results["events_before"]
-        ]
-        results["event"] = serialize_event(results["event"], time_now)
-        results["events_after"] = [
-            serialize_event(event, time_now) for event in results["events_after"]
-        ]
-        results["state"] = [
-            serialize_event(event, time_now) for event in results["state"]
-        ]
+        results["events_before"] = yield self._event_serializer.serialize_events(
+            results["events_before"], time_now,
+        )
+        results["event"] = yield self._event_serializer.serialize_event(
+            results["event"], time_now,
+        )
+        results["events_after"] = yield self._event_serializer.serialize_events(
+            results["events_after"], time_now,
+        )
+        results["state"] = yield self._event_serializer.serialize_events(
+            results["state"], time_now,
+        )
 
         defer.returnValue((200, results))
 
diff --git a/synapse/rest/client/v2_alpha/_base.py b/synapse/rest/client/v2_alpha/_base.py
index 77434937ff..24ac26bf03 100644
--- a/synapse/rest/client/v2_alpha/_base.py
+++ b/synapse/rest/client/v2_alpha/_base.py
@@ -21,13 +21,12 @@ import re
 from twisted.internet import defer
 
 from synapse.api.errors import InteractiveAuthIncompleteError
-from synapse.api.urls import CLIENT_V2_ALPHA_PREFIX
+from synapse.api.urls import CLIENT_API_PREFIX
 
 logger = logging.getLogger(__name__)
 
 
 def client_v2_patterns(path_regex, releases=(0,),
-                       v2_alpha=True,
                        unstable=True):
     """Creates a regex compiled client path with the correct client path
     prefix.
@@ -39,13 +38,11 @@ def client_v2_patterns(path_regex, releases=(0,),
         SRE_Pattern
     """
     patterns = []
-    if v2_alpha:
-        patterns.append(re.compile("^" + CLIENT_V2_ALPHA_PREFIX + path_regex))
     if unstable:
-        unstable_prefix = CLIENT_V2_ALPHA_PREFIX.replace("/v2_alpha", "/unstable")
+        unstable_prefix = CLIENT_API_PREFIX + "/unstable"
         patterns.append(re.compile("^" + unstable_prefix + path_regex))
     for release in releases:
-        new_prefix = CLIENT_V2_ALPHA_PREFIX.replace("/v2_alpha", "/r%d" % release)
+        new_prefix = CLIENT_API_PREFIX + "/r%d" % (release,)
         patterns.append(re.compile("^" + new_prefix + path_regex))
     return patterns
 
diff --git a/synapse/rest/client/v2_alpha/auth.py b/synapse/rest/client/v2_alpha/auth.py
index ac035c7735..4c380ab84d 100644
--- a/synapse/rest/client/v2_alpha/auth.py
+++ b/synapse/rest/client/v2_alpha/auth.py
@@ -19,7 +19,7 @@ from twisted.internet import defer
 
 from synapse.api.constants import LoginType
 from synapse.api.errors import SynapseError
-from synapse.api.urls import CLIENT_V2_ALPHA_PREFIX
+from synapse.api.urls import CLIENT_API_PREFIX
 from synapse.http.server import finish_request
 from synapse.http.servlet import RestServlet, parse_string
 
@@ -139,8 +139,8 @@ class AuthRestServlet(RestServlet):
         if stagetype == LoginType.RECAPTCHA:
             html = RECAPTCHA_TEMPLATE % {
                 'session': session,
-                'myurl': "%s/auth/%s/fallback/web" % (
-                    CLIENT_V2_ALPHA_PREFIX, LoginType.RECAPTCHA
+                'myurl': "%s/r0/auth/%s/fallback/web" % (
+                    CLIENT_API_PREFIX, LoginType.RECAPTCHA
                 ),
                 'sitekey': self.hs.config.recaptcha_public_key,
             }
@@ -159,8 +159,8 @@ class AuthRestServlet(RestServlet):
                     self.hs.config.public_baseurl,
                     self.hs.config.user_consent_version,
                 ),
-                'myurl': "%s/auth/%s/fallback/web" % (
-                    CLIENT_V2_ALPHA_PREFIX, LoginType.TERMS
+                'myurl': "%s/r0/auth/%s/fallback/web" % (
+                    CLIENT_API_PREFIX, LoginType.TERMS
                 ),
             }
             html_bytes = html.encode("utf8")
@@ -203,8 +203,8 @@ class AuthRestServlet(RestServlet):
             else:
                 html = RECAPTCHA_TEMPLATE % {
                     'session': session,
-                    'myurl': "%s/auth/%s/fallback/web" % (
-                        CLIENT_V2_ALPHA_PREFIX, LoginType.RECAPTCHA
+                    'myurl': "%s/r0/auth/%s/fallback/web" % (
+                        CLIENT_API_PREFIX, LoginType.RECAPTCHA
                     ),
                     'sitekey': self.hs.config.recaptcha_public_key,
                 }
@@ -240,8 +240,8 @@ class AuthRestServlet(RestServlet):
                         self.hs.config.public_baseurl,
                         self.hs.config.user_consent_version,
                     ),
-                    'myurl': "%s/auth/%s/fallback/web" % (
-                        CLIENT_V2_ALPHA_PREFIX, LoginType.TERMS
+                    'myurl': "%s/r0/auth/%s/fallback/web" % (
+                        CLIENT_API_PREFIX, LoginType.TERMS
                     ),
                 }
             html_bytes = html.encode("utf8")
diff --git a/synapse/rest/client/v2_alpha/devices.py b/synapse/rest/client/v2_alpha/devices.py
index 9b75bb1377..5a5be7c390 100644
--- a/synapse/rest/client/v2_alpha/devices.py
+++ b/synapse/rest/client/v2_alpha/devices.py
@@ -30,7 +30,7 @@ logger = logging.getLogger(__name__)
 
 
 class DevicesRestServlet(RestServlet):
-    PATTERNS = client_v2_patterns("/devices$", v2_alpha=False)
+    PATTERNS = client_v2_patterns("/devices$")
 
     def __init__(self, hs):
         """
@@ -56,7 +56,7 @@ class DeleteDevicesRestServlet(RestServlet):
     API for bulk deletion of devices. Accepts a JSON object with a devices
     key which lists the device_ids to delete. Requires user interactive auth.
     """
-    PATTERNS = client_v2_patterns("/delete_devices", v2_alpha=False)
+    PATTERNS = client_v2_patterns("/delete_devices")
 
     def __init__(self, hs):
         super(DeleteDevicesRestServlet, self).__init__()
@@ -95,7 +95,7 @@ class DeleteDevicesRestServlet(RestServlet):
 
 
 class DeviceRestServlet(RestServlet):
-    PATTERNS = client_v2_patterns("/devices/(?P<device_id>[^/]*)$", v2_alpha=False)
+    PATTERNS = client_v2_patterns("/devices/(?P<device_id>[^/]*)$")
 
     def __init__(self, hs):
         """
diff --git a/synapse/rest/client/v2_alpha/notifications.py b/synapse/rest/client/v2_alpha/notifications.py
index 2a6ea3df5f..0a1eb0ae45 100644
--- a/synapse/rest/client/v2_alpha/notifications.py
+++ b/synapse/rest/client/v2_alpha/notifications.py
@@ -17,10 +17,7 @@ import logging
 
 from twisted.internet import defer
 
-from synapse.events.utils import (
-    format_event_for_client_v2_without_room_id,
-    serialize_event,
-)
+from synapse.events.utils import format_event_for_client_v2_without_room_id
 from synapse.http.servlet import RestServlet, parse_integer, parse_string
 
 from ._base import client_v2_patterns
@@ -36,6 +33,7 @@ class NotificationsServlet(RestServlet):
         self.store = hs.get_datastore()
         self.auth = hs.get_auth()
         self.clock = hs.get_clock()
+        self._event_serializer = hs.get_event_client_serializer()
 
     @defer.inlineCallbacks
     def on_GET(self, request):
@@ -69,11 +67,11 @@ class NotificationsServlet(RestServlet):
                 "profile_tag": pa["profile_tag"],
                 "actions": pa["actions"],
                 "ts": pa["received_ts"],
-                "event": serialize_event(
+                "event": (yield self._event_serializer.serialize_event(
                     notif_events[pa["event_id"]],
                     self.clock.time_msec(),
                     event_format=format_event_for_client_v2_without_room_id,
-                ),
+                )),
             }
 
             if pa["room_id"] not in receipts_by_room:
diff --git a/synapse/rest/client/v2_alpha/register.py b/synapse/rest/client/v2_alpha/register.py
index dc3e265bcd..042f636135 100644
--- a/synapse/rest/client/v2_alpha/register.py
+++ b/synapse/rest/client/v2_alpha/register.py
@@ -31,6 +31,7 @@ from synapse.api.errors import (
     SynapseError,
     UnrecognizedRequestError,
 )
+from synapse.config.ratelimiting import FederationRateLimitConfig
 from synapse.config.server import is_threepid_reserved
 from synapse.http.servlet import (
     RestServlet,
@@ -153,16 +154,18 @@ class UsernameAvailabilityRestServlet(RestServlet):
         self.registration_handler = hs.get_registration_handler()
         self.ratelimiter = FederationRateLimiter(
             hs.get_clock(),
-            # Time window of 2s
-            window_size=2000,
-            # Artificially delay requests if rate > sleep_limit/window_size
-            sleep_limit=1,
-            # Amount of artificial delay to apply
-            sleep_msec=1000,
-            # Error with 429 if more than reject_limit requests are queued
-            reject_limit=1,
-            # Allow 1 request at a time
-            concurrent_requests=1,
+            FederationRateLimitConfig(
+                # Time window of 2s
+                window_size=2000,
+                # Artificially delay requests if rate > sleep_limit/window_size
+                sleep_limit=1,
+                # Amount of artificial delay to apply
+                sleep_msec=1000,
+                # Error with 429 if more than reject_limit requests are queued
+                reject_limit=1,
+                # Allow 1 request at a time
+                concurrent_requests=1,
+            )
         )
 
     @defer.inlineCallbacks
@@ -345,18 +348,22 @@ class RegisterRestServlet(RestServlet):
         if self.hs.config.enable_registration_captcha:
             # only support 3PIDless registration if no 3PIDs are required
             if not require_email and not require_msisdn:
-                flows.extend([[LoginType.RECAPTCHA]])
+                # Also add a dummy flow here, otherwise if a client completes
+                # recaptcha first we'll assume they were going for this flow
+                # and complete the request, when they could have been trying to
+                # complete one of the flows with email/msisdn auth.
+                flows.extend([[LoginType.RECAPTCHA, LoginType.DUMMY]])
             # only support the email-only flow if we don't require MSISDN 3PIDs
             if not require_msisdn:
-                flows.extend([[LoginType.EMAIL_IDENTITY, LoginType.RECAPTCHA]])
+                flows.extend([[LoginType.RECAPTCHA, LoginType.EMAIL_IDENTITY]])
 
             if show_msisdn:
                 # only support the MSISDN-only flow if we don't require email 3PIDs
                 if not require_email:
-                    flows.extend([[LoginType.MSISDN, LoginType.RECAPTCHA]])
+                    flows.extend([[LoginType.RECAPTCHA, LoginType.MSISDN]])
                 # always let users provide both MSISDN & email
                 flows.extend([
-                    [LoginType.MSISDN, LoginType.EMAIL_IDENTITY, LoginType.RECAPTCHA],
+                    [LoginType.RECAPTCHA, LoginType.MSISDN, LoginType.EMAIL_IDENTITY],
                 ])
         else:
             # only support 3PIDless registration if no 3PIDs are required
@@ -379,7 +386,15 @@ class RegisterRestServlet(RestServlet):
         if self.hs.config.user_consent_at_registration:
             new_flows = []
             for flow in flows:
-                flow.append(LoginType.TERMS)
+                inserted = False
+                # m.login.terms should go near the end but before msisdn or email auth
+                for i, stage in enumerate(flow):
+                    if stage == LoginType.EMAIL_IDENTITY or stage == LoginType.MSISDN:
+                        flow.insert(i, LoginType.TERMS)
+                        inserted = True
+                        break
+                if not inserted:
+                    flow.append(LoginType.TERMS)
             flows.extend(new_flows)
 
         auth_result, params, session_id = yield self.auth_handler.check_auth(
@@ -391,13 +406,6 @@ class RegisterRestServlet(RestServlet):
         # the user-facing checks will probably already have happened in
         # /register/email/requestToken when we requested a 3pid, but that's not
         # guaranteed.
-        #
-        # Also check that we're not trying to register a 3pid that's already
-        # been registered.
-        #
-        # This has probably happened in /register/email/requestToken as well,
-        # but if a user hits this endpoint twice then clicks on each link from
-        # the two activation emails, they would register the same 3pid twice.
 
         if auth_result:
             for login_type in [LoginType.EMAIL_IDENTITY, LoginType.MSISDN]:
@@ -413,17 +421,6 @@ class RegisterRestServlet(RestServlet):
                             Codes.THREEPID_DENIED,
                         )
 
-                    existingUid = yield self.store.get_user_id_by_threepid(
-                        medium, address,
-                    )
-
-                    if existingUid is not None:
-                        raise SynapseError(
-                            400,
-                            "%s is already in use" % medium,
-                            Codes.THREEPID_IN_USE,
-                        )
-
         if registered_user_id is not None:
             logger.info(
                 "Already registered user ID %r for this session",
@@ -446,6 +443,28 @@ class RegisterRestServlet(RestServlet):
             if auth_result:
                 threepid = auth_result.get(LoginType.EMAIL_IDENTITY)
 
+                # Also check that we're not trying to register a 3pid that's already
+                # been registered.
+                #
+                # This has probably happened in /register/email/requestToken as well,
+                # but if a user hits this endpoint twice then clicks on each link from
+                # the two activation emails, they would register the same 3pid twice.
+                for login_type in [LoginType.EMAIL_IDENTITY, LoginType.MSISDN]:
+                    if login_type in auth_result:
+                        medium = auth_result[login_type]['medium']
+                        address = auth_result[login_type]['address']
+
+                        existingUid = yield self.store.get_user_id_by_threepid(
+                            medium, address,
+                        )
+
+                        if existingUid is not None:
+                            raise SynapseError(
+                                400,
+                                "%s is already in use" % medium,
+                                Codes.THREEPID_IN_USE,
+                            )
+
             (registered_user_id, _) = yield self.registration_handler.register(
                 localpart=desired_username,
                 password=new_password,
diff --git a/synapse/rest/client/v2_alpha/room_upgrade_rest_servlet.py b/synapse/rest/client/v2_alpha/room_upgrade_rest_servlet.py
index 3db7ff8d1b..62b8de71fa 100644
--- a/synapse/rest/client/v2_alpha/room_upgrade_rest_servlet.py
+++ b/synapse/rest/client/v2_alpha/room_upgrade_rest_servlet.py
@@ -50,7 +50,6 @@ class RoomUpgradeRestServlet(RestServlet):
     PATTERNS = client_v2_patterns(
         # /rooms/$roomid/upgrade
         "/rooms/(?P<room_id>[^/]*)/upgrade$",
-        v2_alpha=False,
     )
 
     def __init__(self, hs):
diff --git a/synapse/rest/client/v2_alpha/sendtodevice.py b/synapse/rest/client/v2_alpha/sendtodevice.py
index a9e9a47a0b..21e9cef2d0 100644
--- a/synapse/rest/client/v2_alpha/sendtodevice.py
+++ b/synapse/rest/client/v2_alpha/sendtodevice.py
@@ -29,7 +29,6 @@ logger = logging.getLogger(__name__)
 class SendToDeviceRestServlet(servlet.RestServlet):
     PATTERNS = client_v2_patterns(
         "/sendToDevice/(?P<message_type>[^/]*)/(?P<txn_id>[^/]*)$",
-        v2_alpha=False
     )
 
     def __init__(self, hs):
diff --git a/synapse/rest/client/v2_alpha/sync.py b/synapse/rest/client/v2_alpha/sync.py
index 39d157a44b..c701e534e7 100644
--- a/synapse/rest/client/v2_alpha/sync.py
+++ b/synapse/rest/client/v2_alpha/sync.py
@@ -26,7 +26,6 @@ from synapse.api.filtering import DEFAULT_FILTER_COLLECTION, FilterCollection
 from synapse.events.utils import (
     format_event_for_client_v2_without_room_id,
     format_event_raw,
-    serialize_event,
 )
 from synapse.handlers.presence import format_user_presence_state
 from synapse.handlers.sync import SyncConfig
@@ -86,6 +85,7 @@ class SyncRestServlet(RestServlet):
         self.filtering = hs.get_filtering()
         self.presence_handler = hs.get_presence_handler()
         self._server_notices_sender = hs.get_server_notices_sender()
+        self._event_serializer = hs.get_event_client_serializer()
 
     @defer.inlineCallbacks
     def on_GET(self, request):
@@ -168,14 +168,14 @@ class SyncRestServlet(RestServlet):
             )
 
         time_now = self.clock.time_msec()
-        response_content = self.encode_response(
+        response_content = yield self.encode_response(
             time_now, sync_result, requester.access_token_id, filter
         )
 
         defer.returnValue((200, response_content))
 
-    @staticmethod
-    def encode_response(time_now, sync_result, access_token_id, filter):
+    @defer.inlineCallbacks
+    def encode_response(self, time_now, sync_result, access_token_id, filter):
         if filter.event_format == 'client':
             event_formatter = format_event_for_client_v2_without_room_id
         elif filter.event_format == 'federation':
@@ -183,24 +183,24 @@ class SyncRestServlet(RestServlet):
         else:
             raise Exception("Unknown event format %s" % (filter.event_format, ))
 
-        joined = SyncRestServlet.encode_joined(
+        joined = yield self.encode_joined(
             sync_result.joined, time_now, access_token_id,
             filter.event_fields,
             event_formatter,
         )
 
-        invited = SyncRestServlet.encode_invited(
+        invited = yield self.encode_invited(
             sync_result.invited, time_now, access_token_id,
             event_formatter,
         )
 
-        archived = SyncRestServlet.encode_archived(
+        archived = yield self.encode_archived(
             sync_result.archived, time_now, access_token_id,
             filter.event_fields,
             event_formatter,
         )
 
-        return {
+        defer.returnValue({
             "account_data": {"events": sync_result.account_data},
             "to_device": {"events": sync_result.to_device},
             "device_lists": {
@@ -222,7 +222,7 @@ class SyncRestServlet(RestServlet):
             },
             "device_one_time_keys_count": sync_result.device_one_time_keys_count,
             "next_batch": sync_result.next_batch.to_string(),
-        }
+        })
 
     @staticmethod
     def encode_presence(events, time_now):
@@ -239,8 +239,8 @@ class SyncRestServlet(RestServlet):
             ]
         }
 
-    @staticmethod
-    def encode_joined(rooms, time_now, token_id, event_fields, event_formatter):
+    @defer.inlineCallbacks
+    def encode_joined(self, rooms, time_now, token_id, event_fields, event_formatter):
         """
         Encode the joined rooms in a sync result
 
@@ -261,15 +261,15 @@ class SyncRestServlet(RestServlet):
         """
         joined = {}
         for room in rooms:
-            joined[room.room_id] = SyncRestServlet.encode_room(
+            joined[room.room_id] = yield self.encode_room(
                 room, time_now, token_id, joined=True, only_fields=event_fields,
                 event_formatter=event_formatter,
             )
 
-        return joined
+        defer.returnValue(joined)
 
-    @staticmethod
-    def encode_invited(rooms, time_now, token_id, event_formatter):
+    @defer.inlineCallbacks
+    def encode_invited(self, rooms, time_now, token_id, event_formatter):
         """
         Encode the invited rooms in a sync result
 
@@ -289,7 +289,7 @@ class SyncRestServlet(RestServlet):
         """
         invited = {}
         for room in rooms:
-            invite = serialize_event(
+            invite = yield self._event_serializer.serialize_event(
                 room.invite, time_now, token_id=token_id,
                 event_format=event_formatter,
                 is_invite=True,
@@ -302,10 +302,10 @@ class SyncRestServlet(RestServlet):
                 "invite_state": {"events": invited_state}
             }
 
-        return invited
+        defer.returnValue(invited)
 
-    @staticmethod
-    def encode_archived(rooms, time_now, token_id, event_fields, event_formatter):
+    @defer.inlineCallbacks
+    def encode_archived(self, rooms, time_now, token_id, event_fields, event_formatter):
         """
         Encode the archived rooms in a sync result
 
@@ -326,17 +326,17 @@ class SyncRestServlet(RestServlet):
         """
         joined = {}
         for room in rooms:
-            joined[room.room_id] = SyncRestServlet.encode_room(
+            joined[room.room_id] = yield self.encode_room(
                 room, time_now, token_id, joined=False,
                 only_fields=event_fields,
                 event_formatter=event_formatter,
             )
 
-        return joined
+        defer.returnValue(joined)
 
-    @staticmethod
+    @defer.inlineCallbacks
     def encode_room(
-            room, time_now, token_id, joined,
+            self, room, time_now, token_id, joined,
             only_fields, event_formatter,
     ):
         """
@@ -355,9 +355,10 @@ class SyncRestServlet(RestServlet):
         Returns:
             dict[str, object]: the room, encoded in our response format
         """
-        def serialize(event):
-            return serialize_event(
-                event, time_now, token_id=token_id,
+        def serialize(events):
+            return self._event_serializer.serialize_events(
+                events, time_now=time_now,
+                token_id=token_id,
                 event_format=event_formatter,
                 only_event_fields=only_fields,
             )
@@ -376,8 +377,8 @@ class SyncRestServlet(RestServlet):
                     event.event_id, room.room_id, event.room_id,
                 )
 
-        serialized_state = [serialize(e) for e in state_events]
-        serialized_timeline = [serialize(e) for e in timeline_events]
+        serialized_state = yield serialize(state_events)
+        serialized_timeline = yield serialize(timeline_events)
 
         account_data = room.account_data
 
@@ -397,7 +398,7 @@ class SyncRestServlet(RestServlet):
             result["unread_notifications"] = room.unread_notifications
             result["summary"] = room.summary
 
-        return result
+        defer.returnValue(result)
 
 
 def register_servlets(hs, http_server):
diff --git a/synapse/rest/media/v1/media_repository.py b/synapse/rest/media/v1/media_repository.py
index bdffa97805..8569677355 100644
--- a/synapse/rest/media/v1/media_repository.py
+++ b/synapse/rest/media/v1/media_repository.py
@@ -444,6 +444,9 @@ class MediaRepository(object):
             )
             return
 
+        if thumbnailer.transpose_method is not None:
+            m_width, m_height = thumbnailer.transpose()
+
         if t_method == "crop":
             t_byte_source = thumbnailer.crop(t_width, t_height, t_type)
         elif t_method == "scale":
@@ -578,6 +581,12 @@ class MediaRepository(object):
             )
             return
 
+        if thumbnailer.transpose_method is not None:
+            m_width, m_height = yield logcontext.defer_to_thread(
+                self.hs.get_reactor(),
+                thumbnailer.transpose
+            )
+
         # We deduplicate the thumbnail sizes by ignoring the cropped versions if
         # they have the same dimensions of a scaled one.
         thumbnails = {}
diff --git a/synapse/rest/media/v1/storage_provider.py b/synapse/rest/media/v1/storage_provider.py
index 5aa03031f6..d90cbfb56a 100644
--- a/synapse/rest/media/v1/storage_provider.py
+++ b/synapse/rest/media/v1/storage_provider.py
@@ -108,6 +108,7 @@ class FileStorageProviderBackend(StorageProvider):
     """
 
     def __init__(self, hs, config):
+        self.hs = hs
         self.cache_directory = hs.config.media_store_path
         self.base_directory = config
 
diff --git a/synapse/rest/media/v1/thumbnailer.py b/synapse/rest/media/v1/thumbnailer.py
index a4b26c2587..3efd0d80fc 100644
--- a/synapse/rest/media/v1/thumbnailer.py
+++ b/synapse/rest/media/v1/thumbnailer.py
@@ -20,6 +20,17 @@ import PIL.Image as Image
 
 logger = logging.getLogger(__name__)
 
+EXIF_ORIENTATION_TAG = 0x0112
+EXIF_TRANSPOSE_MAPPINGS = {
+    2: Image.FLIP_LEFT_RIGHT,
+    3: Image.ROTATE_180,
+    4: Image.FLIP_TOP_BOTTOM,
+    5: Image.TRANSPOSE,
+    6: Image.ROTATE_270,
+    7: Image.TRANSVERSE,
+    8: Image.ROTATE_90
+}
+
 
 class Thumbnailer(object):
 
@@ -31,6 +42,30 @@ class Thumbnailer(object):
     def __init__(self, input_path):
         self.image = Image.open(input_path)
         self.width, self.height = self.image.size
+        self.transpose_method = None
+        try:
+            # We don't use ImageOps.exif_transpose since it crashes with big EXIF
+            image_exif = self.image._getexif()
+            if image_exif is not None:
+                image_orientation = image_exif.get(EXIF_ORIENTATION_TAG)
+                self.transpose_method = EXIF_TRANSPOSE_MAPPINGS.get(image_orientation)
+        except Exception as e:
+            # A lot of parsing errors can happen when parsing EXIF
+            logger.info("Error parsing image EXIF information: %s", e)
+
+    def transpose(self):
+        """Transpose the image using its EXIF Orientation tag
+
+        Returns:
+            Tuple[int, int]: (width, height) containing the new image size in pixels.
+        """
+        if self.transpose_method is not None:
+            self.image = self.image.transpose(self.transpose_method)
+            self.width, self.height = self.image.size
+            self.transpose_method = None
+            # We don't need EXIF any more
+            self.image.info["exif"] = None
+        return self.image.size
 
     def aspect(self, max_width, max_height):
         """Calculate the largest size that preserves aspect ratio which
diff --git a/synapse/server.py b/synapse/server.py
index 8c30ac2fa5..80d40b9272 100644
--- a/synapse/server.py
+++ b/synapse/server.py
@@ -35,6 +35,7 @@ from synapse.crypto import context_factory
 from synapse.crypto.keyring import Keyring
 from synapse.events.builder import EventBuilderFactory
 from synapse.events.spamcheck import SpamChecker
+from synapse.events.utils import EventClientSerializer
 from synapse.federation.federation_client import FederationClient
 from synapse.federation.federation_server import (
     FederationHandlerRegistry,
@@ -185,6 +186,7 @@ class HomeServer(object):
         'sendmail',
         'registration_handler',
         'account_validity_handler',
+        'event_client_serializer',
     ]
 
     REQUIRED_ON_MASTER_STARTUP = [
@@ -511,6 +513,9 @@ class HomeServer(object):
     def build_account_validity_handler(self):
         return AccountValidityHandler(self)
 
+    def build_event_client_serializer(self):
+        return EventClientSerializer(self)
+
     def remove_pusher(self, app_id, push_key, user_id):
         return self.get_pusherpool().remove_pusher(app_id, push_key, user_id)
 
diff --git a/synapse/storage/appservice.py b/synapse/storage/appservice.py
index 6092f600ba..eb329ebd8b 100644
--- a/synapse/storage/appservice.py
+++ b/synapse/storage/appservice.py
@@ -302,7 +302,7 @@ class ApplicationServiceTransactionWorkerStore(
 
         event_ids = json.loads(entry["event_ids"])
 
-        events = yield self._get_events(event_ids)
+        events = yield self.get_events_as_list(event_ids)
 
         defer.returnValue(
             AppServiceTransaction(service=service, id=entry["txn_id"], events=events)
@@ -358,7 +358,7 @@ class ApplicationServiceTransactionWorkerStore(
             "get_new_events_for_appservice", get_new_events_for_appservice_txn
         )
 
-        events = yield self._get_events(event_ids)
+        events = yield self.get_events_as_list(event_ids)
 
         defer.returnValue((upper_bound, events))
 
diff --git a/synapse/storage/event_federation.py b/synapse/storage/event_federation.py
index 956f876572..09e39c2c28 100644
--- a/synapse/storage/event_federation.py
+++ b/synapse/storage/event_federation.py
@@ -45,7 +45,7 @@ class EventFederationWorkerStore(EventsWorkerStore, SignatureWorkerStore, SQLBas
         """
         return self.get_auth_chain_ids(
             event_ids, include_given=include_given
-        ).addCallback(self._get_events)
+        ).addCallback(self.get_events_as_list)
 
     def get_auth_chain_ids(self, event_ids, include_given=False):
         """Get auth events for given event_ids. The events *must* be state events.
@@ -316,7 +316,7 @@ class EventFederationWorkerStore(EventsWorkerStore, SignatureWorkerStore, SQLBas
                 event_list,
                 limit,
             )
-            .addCallback(self._get_events)
+            .addCallback(self.get_events_as_list)
             .addCallback(lambda l: sorted(l, key=lambda e: -e.depth))
         )
 
@@ -382,7 +382,7 @@ class EventFederationWorkerStore(EventsWorkerStore, SignatureWorkerStore, SQLBas
             latest_events,
             limit,
         )
-        events = yield self._get_events(ids)
+        events = yield self.get_events_as_list(ids)
         defer.returnValue(events)
 
     def _get_missing_events(self, txn, room_id, earliest_events, latest_events, limit):
diff --git a/synapse/storage/events_worker.py b/synapse/storage/events_worker.py
index 663991a9b6..adc6cf26b5 100644
--- a/synapse/storage/events_worker.py
+++ b/synapse/storage/events_worker.py
@@ -103,7 +103,7 @@ class EventsWorkerStore(SQLBaseStore):
         Returns:
             Deferred : A FrozenEvent.
         """
-        events = yield self._get_events(
+        events = yield self.get_events_as_list(
             [event_id],
             check_redacted=check_redacted,
             get_prev_content=get_prev_content,
@@ -142,7 +142,7 @@ class EventsWorkerStore(SQLBaseStore):
         Returns:
             Deferred : Dict from event_id to event.
         """
-        events = yield self._get_events(
+        events = yield self.get_events_as_list(
             event_ids,
             check_redacted=check_redacted,
             get_prev_content=get_prev_content,
@@ -152,13 +152,32 @@ class EventsWorkerStore(SQLBaseStore):
         defer.returnValue({e.event_id: e for e in events})
 
     @defer.inlineCallbacks
-    def _get_events(
+    def get_events_as_list(
         self,
         event_ids,
         check_redacted=True,
         get_prev_content=False,
         allow_rejected=False,
     ):
+        """Get events from the database and return in a list in the same order
+        as given by `event_ids` arg.
+
+        Args:
+            event_ids (list): The event_ids of the events to fetch
+            check_redacted (bool): If True, check if event has been redacted
+                and redact it.
+            get_prev_content (bool): If True and event is a state event,
+                include the previous states content in the unsigned field.
+            allow_rejected (bool): If True return rejected events.
+
+        Returns:
+            Deferred[list[EventBase]]: List of events fetched from the database. The
+            events are in the same order as `event_ids` arg.
+
+            Note that the returned list may be smaller than the list of event
+            IDs if not all events could be fetched.
+        """
+
         if not event_ids:
             defer.returnValue([])
 
@@ -202,21 +221,22 @@ class EventsWorkerStore(SQLBaseStore):
                     #
                     # The problem is that we end up at this point when an event
                     # which has been redacted is pulled out of the database by
-                    # _enqueue_events, because _enqueue_events needs to check the
-                    # redaction before it can cache the redacted event. So obviously,
-                    # calling get_event to get the redacted event out of the database
-                    # gives us an infinite loop.
+                    # _enqueue_events, because _enqueue_events needs to check
+                    # the redaction before it can cache the redacted event. So
+                    # obviously, calling get_event to get the redacted event out
+                    # of the database gives us an infinite loop.
                     #
-                    # For now (quick hack to fix during 0.99 release cycle), we just
-                    # go and fetch the relevant row from the db, but it would be nice
-                    # to think about how we can cache this rather than hit the db
-                    # every time we access a redaction event.
+                    # For now (quick hack to fix during 0.99 release cycle), we
+                    # just go and fetch the relevant row from the db, but it
+                    # would be nice to think about how we can cache this rather
+                    # than hit the db every time we access a redaction event.
                     #
                     # One thought on how to do this:
-                    #  1. split _get_events up so that it is divided into (a) get the
-                    #     rawish event from the db/cache, (b) do the redaction/rejection
-                    #     filtering
-                    #  2. have _get_event_from_row just call the first half of that
+                    #  1. split get_events_as_list up so that it is divided into
+                    #     (a) get the rawish event from the db/cache, (b) do the
+                    #     redaction/rejection filtering
+                    #  2. have _get_event_from_row just call the first half of
+                    #     that
 
                     orig_sender = yield self._simple_select_one_onecol(
                         table="events",
diff --git a/synapse/storage/search.py b/synapse/storage/search.py
index 226f8f1b7e..ff49eaae02 100644
--- a/synapse/storage/search.py
+++ b/synapse/storage/search.py
@@ -460,7 +460,7 @@ class SearchStore(BackgroundUpdateStore):
 
         results = list(filter(lambda row: row["room_id"] in room_ids, results))
 
-        events = yield self._get_events([r["event_id"] for r in results])
+        events = yield self.get_events_as_list([r["event_id"] for r in results])
 
         event_map = {ev.event_id: ev for ev in events}
 
@@ -605,7 +605,7 @@ class SearchStore(BackgroundUpdateStore):
 
         results = list(filter(lambda row: row["room_id"] in room_ids, results))
 
-        events = yield self._get_events([r["event_id"] for r in results])
+        events = yield self.get_events_as_list([r["event_id"] for r in results])
 
         event_map = {ev.event_id: ev for ev in events}
 
diff --git a/synapse/storage/stream.py b/synapse/storage/stream.py
index 9cd1e0f9fe..d105b6b17d 100644
--- a/synapse/storage/stream.py
+++ b/synapse/storage/stream.py
@@ -319,7 +319,9 @@ class StreamWorkerStore(EventsWorkerStore, SQLBaseStore):
 
         rows = yield self.runInteraction("get_room_events_stream_for_room", f)
 
-        ret = yield self._get_events([r.event_id for r in rows], get_prev_content=True)
+        ret = yield self.get_events_as_list([
+            r.event_id for r in rows], get_prev_content=True,
+        )
 
         self._set_before_and_after(ret, rows, topo_order=from_id is None)
 
@@ -367,7 +369,9 @@ class StreamWorkerStore(EventsWorkerStore, SQLBaseStore):
 
         rows = yield self.runInteraction("get_membership_changes_for_user", f)
 
-        ret = yield self._get_events([r.event_id for r in rows], get_prev_content=True)
+        ret = yield self.get_events_as_list(
+            [r.event_id for r in rows], get_prev_content=True,
+        )
 
         self._set_before_and_after(ret, rows, topo_order=False)
 
@@ -394,7 +398,7 @@ class StreamWorkerStore(EventsWorkerStore, SQLBaseStore):
         )
 
         logger.debug("stream before")
-        events = yield self._get_events(
+        events = yield self.get_events_as_list(
             [r.event_id for r in rows], get_prev_content=True
         )
         logger.debug("stream after")
@@ -580,11 +584,11 @@ class StreamWorkerStore(EventsWorkerStore, SQLBaseStore):
             event_filter,
         )
 
-        events_before = yield self._get_events(
+        events_before = yield self.get_events_as_list(
             [e for e in results["before"]["event_ids"]], get_prev_content=True
         )
 
-        events_after = yield self._get_events(
+        events_after = yield self.get_events_as_list(
             [e for e in results["after"]["event_ids"]], get_prev_content=True
         )
 
@@ -697,7 +701,7 @@ class StreamWorkerStore(EventsWorkerStore, SQLBaseStore):
             "get_all_new_events_stream", get_all_new_events_stream_txn
         )
 
-        events = yield self._get_events(event_ids)
+        events = yield self.get_events_as_list(event_ids)
 
         defer.returnValue((upper_bound, events))
 
@@ -849,7 +853,7 @@ class StreamWorkerStore(EventsWorkerStore, SQLBaseStore):
             event_filter,
         )
 
-        events = yield self._get_events(
+        events = yield self.get_events_as_list(
             [r.event_id for r in rows], get_prev_content=True
         )
 
diff --git a/synapse/util/async_helpers.py b/synapse/util/async_helpers.py
index 2f16f23d91..7253ba120f 100644
--- a/synapse/util/async_helpers.py
+++ b/synapse/util/async_helpers.py
@@ -156,6 +156,25 @@ def concurrently_execute(func, args, limit):
     ], consumeErrors=True)).addErrback(unwrapFirstError)
 
 
+def yieldable_gather_results(func, iter, *args, **kwargs):
+    """Executes the function with each argument concurrently.
+
+    Args:
+        func (func): Function to execute that returns a Deferred
+        iter (iter): An iterable that yields items that get passed as the first
+            argument to the function
+        *args: Arguments to be passed to each call to func
+
+    Returns
+        Deferred[list]: Resolved when all functions have been invoked, or errors if
+        one of the function calls fails.
+    """
+    return logcontext.make_deferred_yieldable(defer.gatherResults([
+        run_in_background(func, item, *args, **kwargs)
+        for item in iter
+    ], consumeErrors=True)).addErrback(unwrapFirstError)
+
+
 class Linearizer(object):
     """Limits concurrent access to resources based on a key. Useful to ensure
     only a few things happen at a time on a given resource.
diff --git a/synapse/util/distributor.py b/synapse/util/distributor.py
index 194da87639..e14c8bdfda 100644
--- a/synapse/util/distributor.py
+++ b/synapse/util/distributor.py
@@ -27,6 +27,7 @@ def user_left_room(distributor, user, room_id):
     distributor.fire("user_left_room", user=user, room_id=room_id)
 
 
+# XXX: this is no longer used. We should probably kill it.
 def user_joined_room(distributor, user, room_id):
     distributor.fire("user_joined_room", user=user, room_id=room_id)
 
diff --git a/synapse/util/ratelimitutils.py b/synapse/util/ratelimitutils.py
index 7deb38f2a7..b146d137f4 100644
--- a/synapse/util/ratelimitutils.py
+++ b/synapse/util/ratelimitutils.py
@@ -30,31 +30,14 @@ logger = logging.getLogger(__name__)
 
 
 class FederationRateLimiter(object):
-    def __init__(self, clock, window_size, sleep_limit, sleep_msec,
-                 reject_limit, concurrent_requests):
+    def __init__(self, clock, config):
         """
         Args:
             clock (Clock)
-            window_size (int): The window size in milliseconds.
-            sleep_limit (int): The number of requests received in the last
-                `window_size` milliseconds before we artificially start
-                delaying processing of requests.
-            sleep_msec (int): The number of milliseconds to delay processing
-                of incoming requests by.
-            reject_limit (int): The maximum number of requests that are can be
-                queued for processing before we start rejecting requests with
-                a 429 Too Many Requests response.
-            concurrent_requests (int): The number of concurrent requests to
-                process.
+            config (FederationRateLimitConfig)
         """
         self.clock = clock
-
-        self.window_size = window_size
-        self.sleep_limit = sleep_limit
-        self.sleep_msec = sleep_msec
-        self.reject_limit = reject_limit
-        self.concurrent_requests = concurrent_requests
-
+        self._config = config
         self.ratelimiters = {}
 
     def ratelimit(self, host):
@@ -76,25 +59,25 @@ class FederationRateLimiter(object):
             host,
             _PerHostRatelimiter(
                 clock=self.clock,
-                window_size=self.window_size,
-                sleep_limit=self.sleep_limit,
-                sleep_msec=self.sleep_msec,
-                reject_limit=self.reject_limit,
-                concurrent_requests=self.concurrent_requests,
+                config=self._config,
             )
         ).ratelimit()
 
 
 class _PerHostRatelimiter(object):
-    def __init__(self, clock, window_size, sleep_limit, sleep_msec,
-                 reject_limit, concurrent_requests):
+    def __init__(self, clock, config):
+        """
+        Args:
+            clock (Clock)
+            config (FederationRateLimitConfig)
+        """
         self.clock = clock
 
-        self.window_size = window_size
-        self.sleep_limit = sleep_limit
-        self.sleep_sec = sleep_msec / 1000.0
-        self.reject_limit = reject_limit
-        self.concurrent_requests = concurrent_requests
+        self.window_size = config.window_size
+        self.sleep_limit = config.sleep_limit
+        self.sleep_sec = config.sleep_delay / 1000.0
+        self.reject_limit = config.reject_limit
+        self.concurrent_requests = config.concurrent
 
         # request_id objects for requests which have been slept
         self.sleeping_requests = set()
diff --git a/tests/handlers/test_register.py b/tests/handlers/test_register.py
index 017ea0385e..1c253d0579 100644
--- a/tests/handlers/test_register.py
+++ b/tests/handlers/test_register.py
@@ -37,8 +37,12 @@ class RegistrationTestCase(unittest.HomeserverTestCase):
         hs_config = self.default_config("test")
 
         # some of the tests rely on us having a user consent version
-        hs_config.user_consent_version = "test_consent_version"
-        hs_config.max_mau_value = 50
+        hs_config["user_consent"] = {
+            "version": "test_consent_version",
+            "template_dir": ".",
+        }
+        hs_config["max_mau_value"] = 50
+        hs_config["limit_usage_by_mau"] = True
 
         hs = self.setup_test_homeserver(config=hs_config, expire_access_token=True)
         return hs
diff --git a/tests/handlers/test_user_directory.py b/tests/handlers/test_user_directory.py
index 44468f5382..9021e647fe 100644
--- a/tests/handlers/test_user_directory.py
+++ b/tests/handlers/test_user_directory.py
@@ -37,7 +37,7 @@ class UserDirectoryTestCase(unittest.HomeserverTestCase):
     def make_homeserver(self, reactor, clock):
 
         config = self.default_config()
-        config.update_user_directory = True
+        config["update_user_directory"] = True
         return self.setup_test_homeserver(config=config)
 
     def prepare(self, reactor, clock, hs):
@@ -333,7 +333,7 @@ class TestUserDirSearchDisabled(unittest.HomeserverTestCase):
 
     def make_homeserver(self, reactor, clock):
         config = self.default_config()
-        config.update_user_directory = True
+        config["update_user_directory"] = True
         hs = self.setup_test_homeserver(config=config)
 
         self.config = hs.config
diff --git a/tests/http/federation/test_matrix_federation_agent.py b/tests/http/federation/test_matrix_federation_agent.py
index 7036615041..ed0ca079d9 100644
--- a/tests/http/federation/test_matrix_federation_agent.py
+++ b/tests/http/federation/test_matrix_federation_agent.py
@@ -54,7 +54,9 @@ class MatrixFederationAgentTests(TestCase):
 
         self.agent = MatrixFederationAgent(
             reactor=self.reactor,
-            tls_client_options_factory=ClientTLSOptionsFactory(default_config("test")),
+            tls_client_options_factory=ClientTLSOptionsFactory(
+                default_config("test", parse=True)
+            ),
             _well_known_tls_policy=TrustingTLSPolicyForHTTPS(),
             _srv_resolver=self.mock_resolver,
             _well_known_cache=self.well_known_cache,
diff --git a/tests/http/test_fedclient.py b/tests/http/test_fedclient.py
index 279e456614..ee767f3a5a 100644
--- a/tests/http/test_fedclient.py
+++ b/tests/http/test_fedclient.py
@@ -15,6 +15,8 @@
 
 from mock import Mock
 
+from netaddr import IPSet
+
 from twisted.internet import defer
 from twisted.internet.defer import TimeoutError
 from twisted.internet.error import ConnectingCancelledError, DNSLookupError
@@ -209,6 +211,75 @@ class FederationClientTests(HomeserverTestCase):
         self.assertIsInstance(f.value, RequestSendFailed)
         self.assertIsInstance(f.value.inner_exception, ResponseNeverReceived)
 
+    def test_client_ip_range_blacklist(self):
+        """Ensure that Synapse does not try to connect to blacklisted IPs"""
+
+        # Set up the ip_range blacklist
+        self.hs.config.federation_ip_range_blacklist = IPSet([
+            "127.0.0.0/8",
+            "fe80::/64",
+        ])
+        self.reactor.lookups["internal"] = "127.0.0.1"
+        self.reactor.lookups["internalv6"] = "fe80:0:0:0:0:8a2e:370:7337"
+        self.reactor.lookups["fine"] = "10.20.30.40"
+        cl = MatrixFederationHttpClient(self.hs, None)
+
+        # Try making a GET request to a blacklisted IPv4 address
+        # ------------------------------------------------------
+        # Make the request
+        d = cl.get_json("internal:8008", "foo/bar", timeout=10000)
+
+        # Nothing happened yet
+        self.assertNoResult(d)
+
+        self.pump(1)
+
+        # Check that it was unable to resolve the address
+        clients = self.reactor.tcpClients
+        self.assertEqual(len(clients), 0)
+
+        f = self.failureResultOf(d)
+        self.assertIsInstance(f.value, RequestSendFailed)
+        self.assertIsInstance(f.value.inner_exception, DNSLookupError)
+
+        # Try making a POST request to a blacklisted IPv6 address
+        # -------------------------------------------------------
+        # Make the request
+        d = cl.post_json("internalv6:8008", "foo/bar", timeout=10000)
+
+        # Nothing has happened yet
+        self.assertNoResult(d)
+
+        # Move the reactor forwards
+        self.pump(1)
+
+        # Check that it was unable to resolve the address
+        clients = self.reactor.tcpClients
+        self.assertEqual(len(clients), 0)
+
+        # Check that it was due to a blacklisted DNS lookup
+        f = self.failureResultOf(d, RequestSendFailed)
+        self.assertIsInstance(f.value.inner_exception, DNSLookupError)
+
+        # Try making a GET request to a non-blacklisted IPv4 address
+        # ----------------------------------------------------------
+        # Make the request
+        d = cl.post_json("fine:8008", "foo/bar", timeout=10000)
+
+        # Nothing has happened yet
+        self.assertNoResult(d)
+
+        # Move the reactor forwards
+        self.pump(1)
+
+        # Check that it was able to resolve the address
+        clients = self.reactor.tcpClients
+        self.assertNotEqual(len(clients), 0)
+
+        # Connection will still fail as this IP address does not resolve to anything
+        f = self.failureResultOf(d, RequestSendFailed)
+        self.assertIsInstance(f.value.inner_exception, ConnectingCancelledError)
+
     def test_client_gets_headers(self):
         """
         Once the client gets the headers, _request returns successfully.
diff --git a/tests/push/test_email.py b/tests/push/test_email.py
index 325ea449ae..9cdde1a9bd 100644
--- a/tests/push/test_email.py
+++ b/tests/push/test_email.py
@@ -52,22 +52,26 @@ class EmailPusherTests(HomeserverTestCase):
             return d
 
         config = self.default_config()
-        config.email_enable_notifs = True
-        config.start_pushers = True
-
-        config.email_template_dir = os.path.abspath(
-            pkg_resources.resource_filename('synapse', 'res/templates')
-        )
-        config.email_notif_template_html = "notif_mail.html"
-        config.email_notif_template_text = "notif_mail.txt"
-        config.email_smtp_host = "127.0.0.1"
-        config.email_smtp_port = 20
-        config.require_transport_security = False
-        config.email_smtp_user = None
-        config.email_smtp_pass = None
-        config.email_app_name = "Matrix"
-        config.email_notif_from = "test@example.com"
-        config.email_riot_base_url = None
+        config["email"] = {
+            "enable_notifs": True,
+            "template_dir": os.path.abspath(
+                pkg_resources.resource_filename('synapse', 'res/templates')
+            ),
+            "expiry_template_html": "notice_expiry.html",
+            "expiry_template_text": "notice_expiry.txt",
+            "notif_template_html": "notif_mail.html",
+            "notif_template_text": "notif_mail.txt",
+            "smtp_host": "127.0.0.1",
+            "smtp_port": 20,
+            "require_transport_security": False,
+            "smtp_user": None,
+            "smtp_pass": None,
+            "app_name": "Matrix",
+            "notif_from": "test@example.com",
+            "riot_base_url": None,
+        }
+        config["public_baseurl"] = "aaa"
+        config["start_pushers"] = True
 
         hs = self.setup_test_homeserver(config=config, sendmail=sendmail)
 
diff --git a/tests/push/test_http.py b/tests/push/test_http.py
index 13bd2c8688..aba618b2be 100644
--- a/tests/push/test_http.py
+++ b/tests/push/test_http.py
@@ -54,7 +54,7 @@ class HTTPPusherTests(HomeserverTestCase):
         m.post_json_get_json = post_json_get_json
 
         config = self.default_config()
-        config.start_pushers = True
+        config["start_pushers"] = True
 
         hs = self.setup_test_homeserver(config=config, simple_http_client=m)
 
diff --git a/tests/rest/client/test_consent.py b/tests/rest/client/test_consent.py
index 5528971190..88f8f1abdc 100644
--- a/tests/rest/client/test_consent.py
+++ b/tests/rest/client/test_consent.py
@@ -42,15 +42,18 @@ class ConsentResourceTestCase(unittest.HomeserverTestCase):
     def make_homeserver(self, reactor, clock):
 
         config = self.default_config()
-        config.user_consent_version = "1"
-        config.public_baseurl = ""
-        config.form_secret = "123abc"
+        config["public_baseurl"] = "aaaa"
+        config["form_secret"] = "123abc"
 
         # Make some temporary templates...
         temp_consent_path = self.mktemp()
         os.mkdir(temp_consent_path)
         os.mkdir(os.path.join(temp_consent_path, 'en'))
-        config.user_consent_template_dir = os.path.abspath(temp_consent_path)
+
+        config["user_consent"] = {
+            "version": "1",
+            "template_dir": os.path.abspath(temp_consent_path),
+        }
 
         with open(os.path.join(temp_consent_path, "en/1.html"), 'w') as f:
             f.write("{{version}},{{has_consented}}")
diff --git a/tests/rest/client/test_identity.py b/tests/rest/client/test_identity.py
index 1a714ff58a..68949307d9 100644
--- a/tests/rest/client/test_identity.py
+++ b/tests/rest/client/test_identity.py
@@ -32,7 +32,7 @@ class IdentityTestCase(unittest.HomeserverTestCase):
     def make_homeserver(self, reactor, clock):
 
         config = self.default_config()
-        config.enable_3pid_lookup = False
+        config["enable_3pid_lookup"] = False
         self.hs = self.setup_test_homeserver(config=config)
 
         return self.hs
diff --git a/tests/rest/client/v1/test_directory.py b/tests/rest/client/v1/test_directory.py
index 73c5b44b46..633b7dbda0 100644
--- a/tests/rest/client/v1/test_directory.py
+++ b/tests/rest/client/v1/test_directory.py
@@ -34,7 +34,7 @@ class DirectoryTestCase(unittest.HomeserverTestCase):
 
     def make_homeserver(self, reactor, clock):
         config = self.default_config()
-        config.require_membership_for_aliases = True
+        config["require_membership_for_aliases"] = True
 
         self.hs = self.setup_test_homeserver(config=config)
 
diff --git a/tests/rest/client/v1/test_events.py b/tests/rest/client/v1/test_events.py
index 8a9a55a527..f340b7e851 100644
--- a/tests/rest/client/v1/test_events.py
+++ b/tests/rest/client/v1/test_events.py
@@ -36,9 +36,9 @@ class EventStreamPermissionsTestCase(unittest.HomeserverTestCase):
     def make_homeserver(self, reactor, clock):
 
         config = self.default_config()
-        config.enable_registration_captcha = False
-        config.enable_registration = True
-        config.auto_join_rooms = []
+        config["enable_registration_captcha"] = False
+        config["enable_registration"] = True
+        config["auto_join_rooms"] = []
 
         hs = self.setup_test_homeserver(
             config=config, ratelimiter=NonCallableMock(spec_set=["can_do_action"])
diff --git a/tests/rest/client/v1/test_profile.py b/tests/rest/client/v1/test_profile.py
index ed034879cf..769c37ce52 100644
--- a/tests/rest/client/v1/test_profile.py
+++ b/tests/rest/client/v1/test_profile.py
@@ -171,7 +171,7 @@ class ProfilesRestrictedTestCase(unittest.HomeserverTestCase):
     def make_homeserver(self, reactor, clock):
 
         config = self.default_config()
-        config.require_auth_for_profile_requests = True
+        config["require_auth_for_profile_requests"] = True
         self.hs = self.setup_test_homeserver(config=config)
 
         return self.hs
diff --git a/tests/rest/client/v1/test_rooms.py b/tests/rest/client/v1/test_rooms.py
index 9b191436cc..5f75ad7579 100644
--- a/tests/rest/client/v1/test_rooms.py
+++ b/tests/rest/client/v1/test_rooms.py
@@ -1,5 +1,6 @@
 # -*- coding: utf-8 -*-
 # Copyright 2014-2016 OpenMarket Ltd
+# Copyright 2019 The Matrix.org Foundation C.I.C.
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
 # you may not use this file except in compliance with the License.
@@ -24,7 +25,7 @@ from twisted.internet import defer
 
 import synapse.rest.admin
 from synapse.api.constants import Membership
-from synapse.rest.client.v1 import login, room
+from synapse.rest.client.v1 import login, profile, room
 
 from tests import unittest
 
@@ -919,7 +920,7 @@ class PublicRoomsRestrictedTestCase(unittest.HomeserverTestCase):
         self.url = b"/_matrix/client/r0/publicRooms"
 
         config = self.default_config()
-        config.restrict_public_rooms_to_local_users = True
+        config["restrict_public_rooms_to_local_users"] = True
         self.hs = self.setup_test_homeserver(config=config)
 
         return self.hs
@@ -936,3 +937,70 @@ class PublicRoomsRestrictedTestCase(unittest.HomeserverTestCase):
         request, channel = self.make_request("GET", self.url, access_token=tok)
         self.render(request)
         self.assertEqual(channel.code, 200, channel.result)
+
+
+class PerRoomProfilesForbiddenTestCase(unittest.HomeserverTestCase):
+
+    servlets = [
+        synapse.rest.admin.register_servlets_for_client_rest_resource,
+        room.register_servlets,
+        login.register_servlets,
+        profile.register_servlets,
+    ]
+
+    def make_homeserver(self, reactor, clock):
+        config = self.default_config()
+        config["allow_per_room_profiles"] = False
+        self.hs = self.setup_test_homeserver(config=config)
+
+        return self.hs
+
+    def prepare(self, reactor, clock, homeserver):
+        self.user_id = self.register_user("test", "test")
+        self.tok = self.login("test", "test")
+
+        # Set a profile for the test user
+        self.displayname = "test user"
+        data = {
+            "displayname": self.displayname,
+        }
+        request_data = json.dumps(data)
+        request, channel = self.make_request(
+            "PUT",
+            "/_matrix/client/r0/profile/%s/displayname" % (self.user_id,),
+            request_data,
+            access_token=self.tok,
+        )
+        self.render(request)
+        self.assertEqual(channel.code, 200, channel.result)
+
+        self.room_id = self.helper.create_room_as(self.user_id, tok=self.tok)
+
+    def test_per_room_profile_forbidden(self):
+        data = {
+            "membership": "join",
+            "displayname": "other test user"
+        }
+        request_data = json.dumps(data)
+        request, channel = self.make_request(
+            "PUT",
+            "/_matrix/client/r0/rooms/%s/state/m.room.member/%s" % (
+                self.room_id, self.user_id,
+            ),
+            request_data,
+            access_token=self.tok,
+        )
+        self.render(request)
+        self.assertEqual(channel.code, 200, channel.result)
+        event_id = channel.json_body["event_id"]
+
+        request, channel = self.make_request(
+            "GET",
+            "/_matrix/client/r0/rooms/%s/event/%s" % (self.room_id, event_id),
+            access_token=self.tok,
+        )
+        self.render(request)
+        self.assertEqual(channel.code, 200, channel.result)
+
+        res_displayname = channel.json_body["content"]["displayname"]
+        self.assertEqual(res_displayname, self.displayname, channel.result)
diff --git a/tests/rest/client/v2_alpha/test_auth.py b/tests/rest/client/v2_alpha/test_auth.py
index 0ca3c4657b..b9ef46e8fb 100644
--- a/tests/rest/client/v2_alpha/test_auth.py
+++ b/tests/rest/client/v2_alpha/test_auth.py
@@ -36,9 +36,9 @@ class FallbackAuthTests(unittest.HomeserverTestCase):
 
         config = self.default_config()
 
-        config.enable_registration_captcha = True
-        config.recaptcha_public_key = "brokencake"
-        config.registrations_require_3pid = []
+        config["enable_registration_captcha"] = True
+        config["recaptcha_public_key"] = "brokencake"
+        config["registrations_require_3pid"] = []
 
         hs = self.setup_test_homeserver(config=config)
         return hs
@@ -92,7 +92,14 @@ class FallbackAuthTests(unittest.HomeserverTestCase):
         self.assertEqual(len(self.recaptcha_attempts), 1)
         self.assertEqual(self.recaptcha_attempts[0][0]["response"], "a")
 
-        # Now we have fufilled the recaptcha fallback step, we can then send a
+        # also complete the dummy auth
+        request, channel = self.make_request(
+            "POST", "register", {"auth": {"session": session, "type": "m.login.dummy"}}
+        )
+        self.render(request)
+
+        # Now we should have fufilled a complete auth flow, including
+        # the recaptcha fallback step, we can then send a
         # request to the register API with the session in the authdict.
         request, channel = self.make_request(
             "POST", "register", {"auth": {"session": session}}
diff --git a/tests/rest/client/v2_alpha/test_register.py b/tests/rest/client/v2_alpha/test_register.py
index be95dc592d..65685883db 100644
--- a/tests/rest/client/v2_alpha/test_register.py
+++ b/tests/rest/client/v2_alpha/test_register.py
@@ -201,9 +201,11 @@ class AccountValidityTestCase(unittest.HomeserverTestCase):
     def make_homeserver(self, reactor, clock):
         config = self.default_config()
         # Test for account expiring after a week.
-        config.enable_registration = True
-        config.account_validity.enabled = True
-        config.account_validity.period = 604800000  # Time in ms for 1 week
+        config["enable_registration"] = True
+        config["account_validity"] = {
+            "enabled": True,
+            "period": 604800000,  # Time in ms for 1 week
+        }
         self.hs = self.setup_test_homeserver(config=config)
 
         return self.hs
@@ -299,14 +301,17 @@ class AccountValidityRenewalByEmailTestCase(unittest.HomeserverTestCase):
 
     def make_homeserver(self, reactor, clock):
         config = self.default_config()
+
         # Test for account expiring after a week and renewal emails being sent 2
         # days before expiry.
-        config.enable_registration = True
-        config.account_validity.enabled = True
-        config.account_validity.renew_by_email_enabled = True
-        config.account_validity.period = 604800000  # Time in ms for 1 week
-        config.account_validity.renew_at = 172800000  # Time in ms for 2 days
-        config.account_validity.renew_email_subject = "Renew your account"
+        config["enable_registration"] = True
+        config["account_validity"] = {
+            "enabled": True,
+            "period": 604800000,  # Time in ms for 1 week
+            "renew_at": 172800000,  # Time in ms for 2 days
+            "renew_by_email_enabled": True,
+            "renew_email_subject": "Renew your account",
+        }
 
         # Email config.
         self.email_attempts = []
@@ -315,17 +320,23 @@ class AccountValidityRenewalByEmailTestCase(unittest.HomeserverTestCase):
             self.email_attempts.append((args, kwargs))
             return
 
-        config.email_template_dir = os.path.abspath(
-            pkg_resources.resource_filename('synapse', 'res/templates')
-        )
-        config.email_expiry_template_html = "notice_expiry.html"
-        config.email_expiry_template_text = "notice_expiry.txt"
-        config.email_smtp_host = "127.0.0.1"
-        config.email_smtp_port = 20
-        config.require_transport_security = False
-        config.email_smtp_user = None
-        config.email_smtp_pass = None
-        config.email_notif_from = "test@example.com"
+        config["email"] = {
+            "enable_notifs": True,
+            "template_dir": os.path.abspath(
+                pkg_resources.resource_filename('synapse', 'res/templates')
+            ),
+            "expiry_template_html": "notice_expiry.html",
+            "expiry_template_text": "notice_expiry.txt",
+            "notif_template_html": "notif_mail.html",
+            "notif_template_text": "notif_mail.txt",
+            "smtp_host": "127.0.0.1",
+            "smtp_port": 20,
+            "require_transport_security": False,
+            "smtp_user": None,
+            "smtp_pass": None,
+            "notif_from": "test@example.com",
+        }
+        config["public_baseurl"] = "aaa"
 
         self.hs = self.setup_test_homeserver(config=config, sendmail=sendmail)
 
diff --git a/tests/rest/media/v1/test_media_storage.py b/tests/rest/media/v1/test_media_storage.py
index ad5e9a612f..1069a44145 100644
--- a/tests/rest/media/v1/test_media_storage.py
+++ b/tests/rest/media/v1/test_media_storage.py
@@ -25,13 +25,11 @@ from six.moves.urllib import parse
 from twisted.internet import defer, reactor
 from twisted.internet.defer import Deferred
 
-from synapse.config.repository import MediaStorageProviderConfig
 from synapse.rest.media.v1._base import FileInfo
 from synapse.rest.media.v1.filepath import MediaFilePaths
 from synapse.rest.media.v1.media_storage import MediaStorage
 from synapse.rest.media.v1.storage_provider import FileStorageProviderBackend
 from synapse.util.logcontext import make_deferred_yieldable
-from synapse.util.module_loader import load_module
 
 from tests import unittest
 
@@ -120,12 +118,14 @@ class MediaRepoTests(unittest.HomeserverTestCase):
         client.get_file = get_file
 
         self.storage_path = self.mktemp()
+        self.media_store_path = self.mktemp()
         os.mkdir(self.storage_path)
+        os.mkdir(self.media_store_path)
 
         config = self.default_config()
-        config.media_store_path = self.storage_path
-        config.thumbnail_requirements = {}
-        config.max_image_pixels = 2000000
+        config["media_store_path"] = self.media_store_path
+        config["thumbnail_requirements"] = {}
+        config["max_image_pixels"] = 2000000
 
         provider_config = {
             "module": "synapse.rest.media.v1.storage_provider.FileStorageProviderBackend",
@@ -134,12 +134,7 @@ class MediaRepoTests(unittest.HomeserverTestCase):
             "store_remote": True,
             "config": {"directory": self.storage_path},
         }
-
-        loaded = list(load_module(provider_config)) + [
-            MediaStorageProviderConfig(False, False, False)
-        ]
-
-        config.media_storage_providers = [loaded]
+        config["media_storage_providers"] = [provider_config]
 
         hs = self.setup_test_homeserver(config=config, http_client=client)
 
diff --git a/tests/rest/media/v1/test_url_preview.py b/tests/rest/media/v1/test_url_preview.py
index f696395f3c..1ab0f7293a 100644
--- a/tests/rest/media/v1/test_url_preview.py
+++ b/tests/rest/media/v1/test_url_preview.py
@@ -16,7 +16,6 @@
 import os
 
 import attr
-from netaddr import IPSet
 
 from twisted.internet._resolver import HostResolution
 from twisted.internet.address import IPv4Address, IPv6Address
@@ -25,9 +24,6 @@ from twisted.python.failure import Failure
 from twisted.test.proto_helpers import AccumulatingProtocol
 from twisted.web._newclient import ResponseDone
 
-from synapse.config.repository import MediaStorageProviderConfig
-from synapse.util.module_loader import load_module
-
 from tests import unittest
 from tests.server import FakeTransport
 
@@ -67,23 +63,23 @@ class URLPreviewTests(unittest.HomeserverTestCase):
 
     def make_homeserver(self, reactor, clock):
 
-        self.storage_path = self.mktemp()
-        os.mkdir(self.storage_path)
-
         config = self.default_config()
-        config.url_preview_enabled = True
-        config.max_spider_size = 9999999
-        config.url_preview_ip_range_blacklist = IPSet(
-            (
-                "192.168.1.1",
-                "1.0.0.0/8",
-                "3fff:ffff:ffff:ffff:ffff:ffff:ffff:ffff",
-                "2001:800::/21",
-            )
+        config["url_preview_enabled"] = True
+        config["max_spider_size"] = 9999999
+        config["url_preview_ip_range_blacklist"] = (
+            "192.168.1.1",
+            "1.0.0.0/8",
+            "3fff:ffff:ffff:ffff:ffff:ffff:ffff:ffff",
+            "2001:800::/21",
         )
-        config.url_preview_ip_range_whitelist = IPSet(("1.1.1.1",))
-        config.url_preview_url_blacklist = []
-        config.media_store_path = self.storage_path
+        config["url_preview_ip_range_whitelist"] = ("1.1.1.1",)
+        config["url_preview_url_blacklist"] = []
+
+        self.storage_path = self.mktemp()
+        self.media_store_path = self.mktemp()
+        os.mkdir(self.storage_path)
+        os.mkdir(self.media_store_path)
+        config["media_store_path"] = self.media_store_path
 
         provider_config = {
             "module": "synapse.rest.media.v1.storage_provider.FileStorageProviderBackend",
@@ -93,11 +89,7 @@ class URLPreviewTests(unittest.HomeserverTestCase):
             "config": {"directory": self.storage_path},
         }
 
-        loaded = list(load_module(provider_config)) + [
-            MediaStorageProviderConfig(False, False, False)
-        ]
-
-        config.media_storage_providers = [loaded]
+        config["media_storage_providers"] = [provider_config]
 
         hs = self.setup_test_homeserver(config=config)
 
diff --git a/tests/server.py b/tests/server.py
index fc41345488..c15a47f2a4 100644
--- a/tests/server.py
+++ b/tests/server.py
@@ -227,6 +227,8 @@ class ThreadedMemoryReactorClock(MemoryReactorClock):
     """
 
     def __init__(self):
+        self.threadpool = ThreadPool(self)
+
         self._udp = []
         lookups = self.lookups = {}
 
@@ -255,6 +257,37 @@ class ThreadedMemoryReactorClock(MemoryReactorClock):
         self.callLater(0, d.callback, True)
         return d
 
+    def getThreadPool(self):
+        return self.threadpool
+
+
+class ThreadPool:
+    """
+    Threadless thread pool.
+    """
+
+    def __init__(self, reactor):
+        self._reactor = reactor
+
+    def start(self):
+        pass
+
+    def stop(self):
+        pass
+
+    def callInThreadWithCallback(self, onResult, function, *args, **kwargs):
+        def _(res):
+            if isinstance(res, Failure):
+                onResult(False, res)
+            else:
+                onResult(True, res)
+
+        d = Deferred()
+        d.addCallback(lambda x: function(*args, **kwargs))
+        d.addBoth(_)
+        self._reactor.callLater(0, d.callback, True)
+        return d
+
 
 def setup_test_homeserver(cleanup_func, *args, **kwargs):
     """
@@ -290,36 +323,10 @@ def setup_test_homeserver(cleanup_func, *args, **kwargs):
             **kwargs
         )
 
-    class ThreadPool:
-        """
-        Threadless thread pool.
-        """
-
-        def start(self):
-            pass
-
-        def stop(self):
-            pass
-
-        def callInThreadWithCallback(self, onResult, function, *args, **kwargs):
-            def _(res):
-                if isinstance(res, Failure):
-                    onResult(False, res)
-                else:
-                    onResult(True, res)
-
-            d = Deferred()
-            d.addCallback(lambda x: function(*args, **kwargs))
-            d.addBoth(_)
-            clock._reactor.callLater(0, d.callback, True)
-            return d
-
-    clock.threadpool = ThreadPool()
-
     if pool:
         pool.runWithConnection = runWithConnection
         pool.runInteraction = runInteraction
-        pool.threadpool = ThreadPool()
+        pool.threadpool = ThreadPool(clock._reactor)
         pool.running = True
     return d
 
diff --git a/tests/server_notices/test_consent.py b/tests/server_notices/test_consent.py
index e0b4e0eb63..872039c8f1 100644
--- a/tests/server_notices/test_consent.py
+++ b/tests/server_notices/test_consent.py
@@ -12,6 +12,9 @@
 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 # See the License for the specific language governing permissions and
 # limitations under the License.
+
+import os
+
 import synapse.rest.admin
 from synapse.rest.client.v1 import login, room
 from synapse.rest.client.v2_alpha import sync
@@ -30,20 +33,27 @@ class ConsentNoticesTests(unittest.HomeserverTestCase):
 
     def make_homeserver(self, reactor, clock):
 
+        tmpdir = self.mktemp()
+        os.mkdir(tmpdir)
         self.consent_notice_message = "consent %(consent_uri)s"
         config = self.default_config()
-        config.user_consent_version = "1"
-        config.user_consent_server_notice_content = {
-            "msgtype": "m.text",
-            "body": self.consent_notice_message,
+        config["user_consent"] = {
+            "version": "1",
+            "template_dir": tmpdir,
+            "server_notice_content": {
+                "msgtype": "m.text",
+                "body": self.consent_notice_message,
+            },
+        }
+        config["public_baseurl"] = "https://example.com/"
+        config["form_secret"] = "123abc"
+
+        config["server_notices"] = {
+            "system_mxid_localpart": "notices",
+            "system_mxid_display_name": "test display name",
+            "system_mxid_avatar_url": None,
+            "room_name": "Server Notices",
         }
-        config.public_baseurl = "https://example.com/"
-        config.form_secret = "123abc"
-
-        config.server_notices_mxid = "@notices:test"
-        config.server_notices_mxid_display_name = "test display name"
-        config.server_notices_mxid_avatar_url = None
-        config.server_notices_room_name = "Server Notices"
 
         hs = self.setup_test_homeserver(config=config)
 
diff --git a/tests/server_notices/test_resource_limits_server_notices.py b/tests/server_notices/test_resource_limits_server_notices.py
index a490b81ed4..739ee59ce4 100644
--- a/tests/server_notices/test_resource_limits_server_notices.py
+++ b/tests/server_notices/test_resource_limits_server_notices.py
@@ -29,7 +29,12 @@ from tests import unittest
 class TestResourceLimitsServerNotices(unittest.HomeserverTestCase):
     def make_homeserver(self, reactor, clock):
         hs_config = self.default_config("test")
-        hs_config.server_notices_mxid = "@server:test"
+        hs_config["server_notices"] = {
+            "system_mxid_localpart": "server",
+            "system_mxid_display_name": "test display name",
+            "system_mxid_avatar_url": None,
+            "room_name": "Server Notices",
+        }
 
         hs = self.setup_test_homeserver(config=hs_config, expire_access_token=True)
         return hs
@@ -79,7 +84,7 @@ class TestResourceLimitsServerNotices(unittest.HomeserverTestCase):
         self._send_notice.assert_not_called()
         # Test when mau limiting disabled
         self.hs.config.hs_disabled = False
-        self.hs.limit_usage_by_mau = False
+        self.hs.config.limit_usage_by_mau = False
         self.get_success(self._rlsn.maybe_send_server_notice_to_user(self.user_id))
 
         self._send_notice.assert_not_called()
diff --git a/tests/storage/test_appservice.py b/tests/storage/test_appservice.py
index 3f0083831b..25a6c89ef5 100644
--- a/tests/storage/test_appservice.py
+++ b/tests/storage/test_appservice.py
@@ -340,7 +340,7 @@ class ApplicationServiceTransactionStoreTestCase(unittest.TestCase):
         other_events = [Mock(event_id="e5"), Mock(event_id="e6")]
 
         # we aren't testing store._base stuff here, so mock this out
-        self.store._get_events = Mock(return_value=events)
+        self.store.get_events_as_list = Mock(return_value=events)
 
         yield self._insert_txn(self.as_list[1]["id"], 9, other_events)
         yield self._insert_txn(service.id, 10, events)
diff --git a/tests/test_state.py b/tests/test_state.py
index 5bcc6aaa18..6491a7105a 100644
--- a/tests/test_state.py
+++ b/tests/test_state.py
@@ -168,7 +168,7 @@ class StateTestCase(unittest.TestCase):
                 "get_state_resolution_handler",
             ]
         )
-        hs.config = default_config("tesths")
+        hs.config = default_config("tesths", True)
         hs.get_datastore.return_value = self.store
         hs.get_state_handler.return_value = None
         hs.get_clock.return_value = MockClock()
diff --git a/tests/test_terms_auth.py b/tests/test_terms_auth.py
index f412985d2c..52739fbabc 100644
--- a/tests/test_terms_auth.py
+++ b/tests/test_terms_auth.py
@@ -59,7 +59,7 @@ class TermsTestCase(unittest.HomeserverTestCase):
         for flow in channel.json_body["flows"]:
             self.assertIsInstance(flow["stages"], list)
             self.assertTrue(len(flow["stages"]) > 0)
-            self.assertEquals(flow["stages"][-1], "m.login.terms")
+            self.assertTrue("m.login.terms" in flow["stages"])
 
         expected_params = {
             "m.login.terms": {
diff --git a/tests/unittest.py b/tests/unittest.py
index 94df8cf47e..26204470b1 100644
--- a/tests/unittest.py
+++ b/tests/unittest.py
@@ -27,6 +27,7 @@ import twisted.logger
 from twisted.internet.defer import Deferred
 from twisted.trial import unittest
 
+from synapse.config.homeserver import HomeServerConfig
 from synapse.http.server import JsonResource
 from synapse.http.site import SynapseRequest
 from synapse.server import HomeServer
@@ -245,7 +246,7 @@ class HomeserverTestCase(TestCase):
 
     def default_config(self, name="test"):
         """
-        Get a default HomeServer config object.
+        Get a default HomeServer config dict.
 
         Args:
             name (str): The homeserver name/domain.
@@ -335,7 +336,14 @@ class HomeserverTestCase(TestCase):
         kwargs.update(self._hs_args)
         if "config" not in kwargs:
             config = self.default_config()
-            kwargs["config"] = config
+        else:
+            config = kwargs["config"]
+
+        # Parse the config from a config dict into a HomeServerConfig
+        config_obj = HomeServerConfig()
+        config_obj.parse_config_dict(config)
+        kwargs["config"] = config_obj
+
         hs = setup_test_homeserver(self.addCleanup, *args, **kwargs)
         stor = hs.get_datastore()
 
diff --git a/tests/utils.py b/tests/utils.py
index c2ef4b0bb5..200c1ceabe 100644
--- a/tests/utils.py
+++ b/tests/utils.py
@@ -110,7 +110,7 @@ def setupdb():
         atexit.register(_cleanup)
 
 
-def default_config(name):
+def default_config(name, parse=False):
     """
     Create a reasonable test config.
     """
@@ -121,75 +121,71 @@ def default_config(name):
         # the test signing key is just an arbitrary ed25519 key to keep the config
         # parser happy
         "signing_key": "ed25519 a_lPym qvioDNmfExFBRPgdTU+wtFYKq4JfwFRv7sYVgWvmgJg",
+        "event_cache_size": 1,
+        "enable_registration": True,
+        "enable_registration_captcha": False,
+        "macaroon_secret_key": "not even a little secret",
+        "expire_access_token": False,
+        "trusted_third_party_id_servers": [],
+        "room_invite_state_types": [],
+        "password_providers": [],
+        "worker_replication_url": "",
+        "worker_app": None,
+        "email_enable_notifs": False,
+        "block_non_admin_invites": False,
+        "federation_domain_whitelist": None,
+        "filter_timeline_limit": 5000,
+        "user_directory_search_all_users": False,
+        "user_consent_server_notice_content": None,
+        "block_events_without_consent_error": None,
+        "user_consent_at_registration": False,
+        "user_consent_policy_name": "Privacy Policy",
+        "media_storage_providers": [],
+        "autocreate_auto_join_rooms": True,
+        "auto_join_rooms": [],
+        "limit_usage_by_mau": False,
+        "hs_disabled": False,
+        "hs_disabled_message": "",
+        "hs_disabled_limit_type": "",
+        "max_mau_value": 50,
+        "mau_trial_days": 0,
+        "mau_stats_only": False,
+        "mau_limits_reserved_threepids": [],
+        "admin_contact": None,
+        "rc_federation": {
+            "reject_limit": 10,
+            "sleep_limit": 10,
+            "sleep_delay": 10,
+            "concurrent": 10,
+        },
+        "rc_message": {"per_second": 10000, "burst_count": 10000},
+        "rc_registration": {"per_second": 10000, "burst_count": 10000},
+        "rc_login": {
+            "address": {"per_second": 10000, "burst_count": 10000},
+            "account": {"per_second": 10000, "burst_count": 10000},
+            "failed_attempts": {"per_second": 10000, "burst_count": 10000},
+        },
+        "saml2_enabled": False,
+        "public_baseurl": None,
+        "default_identity_server": None,
+        "key_refresh_interval": 24 * 60 * 60 * 1000,
+        "old_signing_keys": {},
+        "tls_fingerprints": [],
+        "use_frozen_dicts": False,
+        # We need a sane default_room_version, otherwise attempts to create
+        # rooms will fail.
+        "default_room_version": "1",
+        # disable user directory updates, because they get done in the
+        # background, which upsets the test runner.
+        "update_user_directory": False,
     }
 
-    config = HomeServerConfig()
-    config.parse_config_dict(config_dict)
-
-    # TODO: move this stuff into config_dict or get rid of it
-    config.event_cache_size = 1
-    config.enable_registration = True
-    config.enable_registration_captcha = False
-    config.macaroon_secret_key = "not even a little secret"
-    config.expire_access_token = False
-    config.trusted_third_party_id_servers = []
-    config.room_invite_state_types = []
-    config.password_providers = []
-    config.worker_replication_url = ""
-    config.worker_app = None
-    config.email_enable_notifs = False
-    config.block_non_admin_invites = False
-    config.federation_domain_whitelist = None
-    config.federation_rc_reject_limit = 10
-    config.federation_rc_sleep_limit = 10
-    config.federation_rc_sleep_delay = 100
-    config.federation_rc_concurrent = 10
-    config.filter_timeline_limit = 5000
-    config.user_directory_search_all_users = False
-    config.user_consent_server_notice_content = None
-    config.block_events_without_consent_error = None
-    config.user_consent_at_registration = False
-    config.user_consent_policy_name = "Privacy Policy"
-    config.media_storage_providers = []
-    config.autocreate_auto_join_rooms = True
-    config.auto_join_rooms = []
-    config.limit_usage_by_mau = False
-    config.hs_disabled = False
-    config.hs_disabled_message = ""
-    config.hs_disabled_limit_type = ""
-    config.max_mau_value = 50
-    config.mau_trial_days = 0
-    config.mau_stats_only = False
-    config.mau_limits_reserved_threepids = []
-    config.admin_contact = None
-    config.rc_messages_per_second = 10000
-    config.rc_message_burst_count = 10000
-    config.rc_registration.per_second = 10000
-    config.rc_registration.burst_count = 10000
-    config.rc_login_address.per_second = 10000
-    config.rc_login_address.burst_count = 10000
-    config.rc_login_account.per_second = 10000
-    config.rc_login_account.burst_count = 10000
-    config.rc_login_failed_attempts.per_second = 10000
-    config.rc_login_failed_attempts.burst_count = 10000
-    config.saml2_enabled = False
-    config.public_baseurl = None
-    config.default_identity_server = None
-    config.key_refresh_interval = 24 * 60 * 60 * 1000
-    config.old_signing_keys = {}
-    config.tls_fingerprints = []
-
-    config.use_frozen_dicts = False
-
-    # we need a sane default_room_version, otherwise attempts to create rooms will
-    # fail.
-    config.default_room_version = "1"
-
-    # disable user directory updates, because they get done in the
-    # background, which upsets the test runner.
-    config.update_user_directory = False
-
-    return config
+    if parse:
+        config = HomeServerConfig()
+        config.parse_config_dict(config_dict)
+        return config
+
+    return config_dict
 
 
 class TestHomeServer(HomeServer):
@@ -223,7 +219,7 @@ def setup_test_homeserver(
         from twisted.internet import reactor
 
     if config is None:
-        config = default_config(name)
+        config = default_config(name, parse=True)
 
     config.ldap_enabled = False
 
@@ -380,12 +376,7 @@ def register_federation_servlets(hs, resource):
         resource=resource,
         authenticator=federation_server.Authenticator(hs),
         ratelimiter=FederationRateLimiter(
-            hs.get_clock(),
-            window_size=hs.config.federation_rc_window_size,
-            sleep_limit=hs.config.federation_rc_sleep_limit,
-            sleep_msec=hs.config.federation_rc_sleep_delay,
-            reject_limit=hs.config.federation_rc_reject_limit,
-            concurrent_requests=hs.config.federation_rc_concurrent,
+            hs.get_clock(), config=hs.config.rc_federation
         ),
     )