summary refs log tree commit diff
diff options
context:
space:
mode:
-rw-r--r--MANIFEST.in1
-rw-r--r--changelog.d/4408.feature1
-rw-r--r--changelog.d/4408.misc1
-rw-r--r--changelog.d/4409.feature1
-rw-r--r--changelog.d/4409.misc1
-rw-r--r--changelog.d/4426.feature1
-rw-r--r--changelog.d/4426.misc1
-rw-r--r--changelog.d/4427.feature1
-rw-r--r--changelog.d/4427.misc1
-rw-r--r--changelog.d/4428.feature1
-rw-r--r--changelog.d/4428.misc1
-rw-r--r--changelog.d/4464.feature1
-rw-r--r--changelog.d/4464.misc1
-rw-r--r--changelog.d/4468.feature1
-rw-r--r--changelog.d/4468.misc1
-rw-r--r--changelog.d/4481.misc1
-rw-r--r--changelog.d/4482.misc1
-rw-r--r--changelog.d/4487.feature1
-rw-r--r--changelog.d/4487.misc1
-rw-r--r--changelog.d/4489.feature1
-rw-r--r--changelog.d/4493.misc1
-rw-r--r--changelog.d/4494.misc1
-rw-r--r--changelog.d/4496.misc1
-rw-r--r--changelog.d/4497.feature1
-rw-r--r--changelog.d/4505.misc1
-rw-r--r--changelog.d/4506.misc1
-rw-r--r--changelog.d/4507.misc1
-rw-r--r--changelog.d/4509.removal1
-rw-r--r--changelog.d/4510.misc1
-rw-r--r--synapse/api/auth.py27
-rw-r--r--synapse/config/tls.py61
-rw-r--r--synapse/crypto/event_signing.py109
-rw-r--r--synapse/event_auth.py3
-rw-r--r--synapse/events/builder.py282
-rw-r--r--synapse/events/utils.py47
-rw-r--r--synapse/events/validator.py60
-rw-r--r--synapse/federation/federation_client.py84
-rw-r--r--synapse/federation/federation_server.py2
-rw-r--r--synapse/federation/transport/client.py39
-rw-r--r--synapse/handlers/federation.py29
-rw-r--r--synapse/handlers/message.py45
-rw-r--r--synapse/handlers/room.py5
-rw-r--r--synapse/http/federation/matrix_federation_agent.py114
-rw-r--r--synapse/replication/http/federation.py2
-rw-r--r--synapse/server.py5
-rw-r--r--synapse/state/__init__.py2
-rw-r--r--synapse/state/v1.py14
-rw-r--r--synapse/state/v2.py14
-rw-r--r--synapse/storage/_base.py2
-rw-r--r--synapse/storage/event_federation.py23
-rw-r--r--synapse/util/async_helpers.py10
-rw-r--r--tests/config/test_generate.py2
-rw-r--r--tests/config/test_tls.py75
-rw-r--r--tests/crypto/test_event_signing.py56
-rw-r--r--tests/http/__init__.py42
-rw-r--r--tests/http/federation/test_matrix_federation_agent.py230
-rw-r--r--tests/http/server.pem81
-rw-r--r--tests/server.py54
-rw-r--r--tests/state/test_v2.py4
-rw-r--r--tests/test_event_auth.py13
-rw-r--r--tests/test_utils/__init__.py18
-rw-r--r--tests/test_utils/logging_setup.py54
-rw-r--r--tests/test_visibility.py2
-rw-r--r--tests/unittest.py37
64 files changed, 1269 insertions, 407 deletions
diff --git a/MANIFEST.in b/MANIFEST.in
index 7e4c031b79..eb2de60f72 100644
--- a/MANIFEST.in
+++ b/MANIFEST.in
@@ -15,6 +15,7 @@ recursive-include docs *
 recursive-include scripts *
 recursive-include scripts-dev *
 recursive-include synapse *.pyi
+recursive-include tests *.pem
 recursive-include tests *.py
 
 recursive-include synapse/res *
diff --git a/changelog.d/4408.feature b/changelog.d/4408.feature
new file mode 100644
index 0000000000..bda713adf9
--- /dev/null
+++ b/changelog.d/4408.feature
@@ -0,0 +1 @@
+Implement MSC1708 (.well-known routing for server-server federation)
\ No newline at end of file
diff --git a/changelog.d/4408.misc b/changelog.d/4408.misc
deleted file mode 100644
index 729bafd62e..0000000000
--- a/changelog.d/4408.misc
+++ /dev/null
@@ -1 +0,0 @@
-Refactor 'sign_request' as 'build_auth_headers'
\ No newline at end of file
diff --git a/changelog.d/4409.feature b/changelog.d/4409.feature
new file mode 100644
index 0000000000..bda713adf9
--- /dev/null
+++ b/changelog.d/4409.feature
@@ -0,0 +1 @@
+Implement MSC1708 (.well-known routing for server-server federation)
\ No newline at end of file
diff --git a/changelog.d/4409.misc b/changelog.d/4409.misc
deleted file mode 100644
index 9cf2adfbb1..0000000000
--- a/changelog.d/4409.misc
+++ /dev/null
@@ -1 +0,0 @@
-Remove redundant federation connection wrapping code
diff --git a/changelog.d/4426.feature b/changelog.d/4426.feature
new file mode 100644
index 0000000000..bda713adf9
--- /dev/null
+++ b/changelog.d/4426.feature
@@ -0,0 +1 @@
+Implement MSC1708 (.well-known routing for server-server federation)
\ No newline at end of file
diff --git a/changelog.d/4426.misc b/changelog.d/4426.misc
deleted file mode 100644
index cda50438e0..0000000000
--- a/changelog.d/4426.misc
+++ /dev/null
@@ -1 +0,0 @@
-Remove redundant SynapseKeyClientProtocol magic
\ No newline at end of file
diff --git a/changelog.d/4427.feature b/changelog.d/4427.feature
new file mode 100644
index 0000000000..bda713adf9
--- /dev/null
+++ b/changelog.d/4427.feature
@@ -0,0 +1 @@
+Implement MSC1708 (.well-known routing for server-server federation)
\ No newline at end of file
diff --git a/changelog.d/4427.misc b/changelog.d/4427.misc
deleted file mode 100644
index 75500bdbc2..0000000000
--- a/changelog.d/4427.misc
+++ /dev/null
@@ -1 +0,0 @@
-Refactor and cleanup for SRV record lookup
diff --git a/changelog.d/4428.feature b/changelog.d/4428.feature
new file mode 100644
index 0000000000..bda713adf9
--- /dev/null
+++ b/changelog.d/4428.feature
@@ -0,0 +1 @@
+Implement MSC1708 (.well-known routing for server-server federation)
\ No newline at end of file
diff --git a/changelog.d/4428.misc b/changelog.d/4428.misc
deleted file mode 100644
index 9a51434755..0000000000
--- a/changelog.d/4428.misc
+++ /dev/null
@@ -1 +0,0 @@
-Move SRV logic into the Agent layer
diff --git a/changelog.d/4464.feature b/changelog.d/4464.feature
new file mode 100644
index 0000000000..bda713adf9
--- /dev/null
+++ b/changelog.d/4464.feature
@@ -0,0 +1 @@
+Implement MSC1708 (.well-known routing for server-server federation)
\ No newline at end of file
diff --git a/changelog.d/4464.misc b/changelog.d/4464.misc
deleted file mode 100644
index 9a51434755..0000000000
--- a/changelog.d/4464.misc
+++ /dev/null
@@ -1 +0,0 @@
-Move SRV logic into the Agent layer
diff --git a/changelog.d/4468.feature b/changelog.d/4468.feature
new file mode 100644
index 0000000000..bda713adf9
--- /dev/null
+++ b/changelog.d/4468.feature
@@ -0,0 +1 @@
+Implement MSC1708 (.well-known routing for server-server federation)
\ No newline at end of file
diff --git a/changelog.d/4468.misc b/changelog.d/4468.misc
deleted file mode 100644
index 9a51434755..0000000000
--- a/changelog.d/4468.misc
+++ /dev/null
@@ -1 +0,0 @@
-Move SRV logic into the Agent layer
diff --git a/changelog.d/4481.misc b/changelog.d/4481.misc
new file mode 100644
index 0000000000..43f8963614
--- /dev/null
+++ b/changelog.d/4481.misc
@@ -0,0 +1 @@
+Add infrastructure to support different event formats
diff --git a/changelog.d/4482.misc b/changelog.d/4482.misc
new file mode 100644
index 0000000000..43f8963614
--- /dev/null
+++ b/changelog.d/4482.misc
@@ -0,0 +1 @@
+Add infrastructure to support different event formats
diff --git a/changelog.d/4487.feature b/changelog.d/4487.feature
new file mode 100644
index 0000000000..bda713adf9
--- /dev/null
+++ b/changelog.d/4487.feature
@@ -0,0 +1 @@
+Implement MSC1708 (.well-known routing for server-server federation)
\ No newline at end of file
diff --git a/changelog.d/4487.misc b/changelog.d/4487.misc
deleted file mode 100644
index 79de8eb3ad..0000000000
--- a/changelog.d/4487.misc
+++ /dev/null
@@ -1 +0,0 @@
-Fix idna and ipv6 literal handling in MatrixFederationAgent
diff --git a/changelog.d/4489.feature b/changelog.d/4489.feature
new file mode 100644
index 0000000000..bda713adf9
--- /dev/null
+++ b/changelog.d/4489.feature
@@ -0,0 +1 @@
+Implement MSC1708 (.well-known routing for server-server federation)
\ No newline at end of file
diff --git a/changelog.d/4493.misc b/changelog.d/4493.misc
new file mode 100644
index 0000000000..43f8963614
--- /dev/null
+++ b/changelog.d/4493.misc
@@ -0,0 +1 @@
+Add infrastructure to support different event formats
diff --git a/changelog.d/4494.misc b/changelog.d/4494.misc
new file mode 100644
index 0000000000..43f8963614
--- /dev/null
+++ b/changelog.d/4494.misc
@@ -0,0 +1 @@
+Add infrastructure to support different event formats
diff --git a/changelog.d/4496.misc b/changelog.d/4496.misc
new file mode 100644
index 0000000000..43f8963614
--- /dev/null
+++ b/changelog.d/4496.misc
@@ -0,0 +1 @@
+Add infrastructure to support different event formats
diff --git a/changelog.d/4497.feature b/changelog.d/4497.feature
new file mode 100644
index 0000000000..bda713adf9
--- /dev/null
+++ b/changelog.d/4497.feature
@@ -0,0 +1 @@
+Implement MSC1708 (.well-known routing for server-server federation)
\ No newline at end of file
diff --git a/changelog.d/4505.misc b/changelog.d/4505.misc
new file mode 100644
index 0000000000..994801fd1e
--- /dev/null
+++ b/changelog.d/4505.misc
@@ -0,0 +1 @@
+ Synapse will now take advantage of native UPSERT functionality in PostgreSQL 9.5+ and SQLite 3.24+.
diff --git a/changelog.d/4506.misc b/changelog.d/4506.misc
new file mode 100644
index 0000000000..ea0e7d9580
--- /dev/null
+++ b/changelog.d/4506.misc
@@ -0,0 +1 @@
+Make it possible to set the log level for tests via an environment variable
\ No newline at end of file
diff --git a/changelog.d/4507.misc b/changelog.d/4507.misc
new file mode 100644
index 0000000000..baf45b2bec
--- /dev/null
+++ b/changelog.d/4507.misc
@@ -0,0 +1 @@
+Reduce the log level of linearizer lock acquirement to DEBUG.
diff --git a/changelog.d/4509.removal b/changelog.d/4509.removal
new file mode 100644
index 0000000000..9165009813
--- /dev/null
+++ b/changelog.d/4509.removal
@@ -0,0 +1 @@
+Synapse no longer generates self-signed TLS certificates when generating a configuration file.
diff --git a/changelog.d/4510.misc b/changelog.d/4510.misc
new file mode 100644
index 0000000000..43f8963614
--- /dev/null
+++ b/changelog.d/4510.misc
@@ -0,0 +1 @@
+Add infrastructure to support different event formats
diff --git a/synapse/api/auth.py b/synapse/api/auth.py
index e37b807c94..2d78a257d3 100644
--- a/synapse/api/auth.py
+++ b/synapse/api/auth.py
@@ -65,7 +65,7 @@ class Auth(object):
         register_cache("cache", "token_cache", self.token_cache)
 
     @defer.inlineCallbacks
-    def check_from_context(self, event, context, do_sig_check=True):
+    def check_from_context(self, room_version, event, context, do_sig_check=True):
         prev_state_ids = yield context.get_prev_state_ids(self.store)
         auth_events_ids = yield self.compute_auth_events(
             event, prev_state_ids, for_verification=True,
@@ -74,12 +74,16 @@ class Auth(object):
         auth_events = {
             (e.type, e.state_key): e for e in itervalues(auth_events)
         }
-        self.check(event, auth_events=auth_events, do_sig_check=do_sig_check)
+        self.check(
+            room_version, event,
+            auth_events=auth_events, do_sig_check=do_sig_check,
+        )
 
-    def check(self, event, auth_events, do_sig_check=True):
+    def check(self, room_version, event, auth_events, do_sig_check=True):
         """ Checks if this event is correctly authed.
 
         Args:
+            room_version (str): version of the room
             event: the event being checked.
             auth_events (dict: event-key -> event): the existing room state.
 
@@ -88,7 +92,9 @@ class Auth(object):
             True if the auth checks pass.
         """
         with Measure(self.clock, "auth.check"):
-            event_auth.check(event, auth_events, do_sig_check=do_sig_check)
+            event_auth.check(
+                room_version, event, auth_events, do_sig_check=do_sig_check
+            )
 
     @defer.inlineCallbacks
     def check_joined_room(self, room_id, user_id, current_state=None):
@@ -545,17 +551,6 @@ class Auth(object):
         return self.store.is_server_admin(user)
 
     @defer.inlineCallbacks
-    def add_auth_events(self, builder, context):
-        prev_state_ids = yield context.get_prev_state_ids(self.store)
-        auth_ids = yield self.compute_auth_events(builder, prev_state_ids)
-
-        auth_events_entries = yield self.store.add_event_hashes(
-            auth_ids
-        )
-
-        builder.auth_events = auth_events_entries
-
-    @defer.inlineCallbacks
     def compute_auth_events(self, event, current_state_ids, for_verification=False):
         if event.type == EventTypes.Create:
             defer.returnValue([])
@@ -571,7 +566,7 @@ class Auth(object):
         key = (EventTypes.JoinRules, "", )
         join_rule_event_id = current_state_ids.get(key)
 
-        key = (EventTypes.Member, event.user_id, )
+        key = (EventTypes.Member, event.sender, )
         member_event_id = current_state_ids.get(key)
 
         key = (EventTypes.Create, "", )
diff --git a/synapse/config/tls.py b/synapse/config/tls.py
index a75e233aa0..734f612db7 100644
--- a/synapse/config/tls.py
+++ b/synapse/config/tls.py
@@ -15,6 +15,7 @@
 
 import logging
 import os
+import warnings
 from datetime import datetime
 from hashlib import sha256
 
@@ -39,8 +40,8 @@ class TlsConfig(Config):
         self.acme_bind_addresses = acme_config.get("bind_addresses", ["127.0.0.1"])
         self.acme_reprovision_threshold = acme_config.get("reprovision_threshold", 30)
 
-        self.tls_certificate_file = os.path.abspath(config.get("tls_certificate_path"))
-        self.tls_private_key_file = os.path.abspath(config.get("tls_private_key_path"))
+        self.tls_certificate_file = self.abspath(config.get("tls_certificate_path"))
+        self.tls_private_key_file = self.abspath(config.get("tls_private_key_path"))
         self._original_tls_fingerprints = config["tls_fingerprints"]
         self.tls_fingerprints = list(self._original_tls_fingerprints)
         self.no_tls = config.get("no_tls", False)
@@ -94,6 +95,16 @@ class TlsConfig(Config):
         """
         self.tls_certificate = self.read_tls_certificate(self.tls_certificate_file)
 
+        # Check if it is self-signed, and issue a warning if so.
+        if self.tls_certificate.get_issuer() == self.tls_certificate.get_subject():
+            warnings.warn(
+                (
+                    "Self-signed TLS certificates will not be accepted by Synapse 1.0. "
+                    "Please either provide a valid certificate, or use Synapse's ACME "
+                    "support to provision one."
+                )
+            )
+
         if not self.no_tls:
             self.tls_private_key = self.read_tls_private_key(self.tls_private_key_file)
 
@@ -118,10 +129,11 @@ class TlsConfig(Config):
         return (
             """\
         # PEM encoded X509 certificate for TLS.
-        # You can replace the self-signed certificate that synapse
-        # autogenerates on launch with your own SSL certificate + key pair
-        # if you like.  Any required intermediary certificates can be
-        # appended after the primary certificate in hierarchical order.
+        # This certificate, as of Synapse 1.0, will need to be a valid
+        # and verifiable certificate, with a root that is available in
+        # the root store of other servers you wish to federate to. Any
+        # required intermediary certificates can be appended after the
+        # primary certificate in hierarchical order.
         tls_certificate_path: "%(tls_certificate_path)s"
 
         # PEM encoded private key for TLS
@@ -183,40 +195,3 @@ class TlsConfig(Config):
     def read_tls_private_key(self, private_key_path):
         private_key_pem = self.read_file(private_key_path, "tls_private_key")
         return crypto.load_privatekey(crypto.FILETYPE_PEM, private_key_pem)
-
-    def generate_files(self, config):
-        tls_certificate_path = config["tls_certificate_path"]
-        tls_private_key_path = config["tls_private_key_path"]
-
-        if not self.path_exists(tls_private_key_path):
-            with open(tls_private_key_path, "wb") as private_key_file:
-                tls_private_key = crypto.PKey()
-                tls_private_key.generate_key(crypto.TYPE_RSA, 2048)
-                private_key_pem = crypto.dump_privatekey(
-                    crypto.FILETYPE_PEM, tls_private_key
-                )
-                private_key_file.write(private_key_pem)
-        else:
-            with open(tls_private_key_path) as private_key_file:
-                private_key_pem = private_key_file.read()
-                tls_private_key = crypto.load_privatekey(
-                    crypto.FILETYPE_PEM, private_key_pem
-                )
-
-        if not self.path_exists(tls_certificate_path):
-            with open(tls_certificate_path, "wb") as certificate_file:
-                cert = crypto.X509()
-                subject = cert.get_subject()
-                subject.CN = config["server_name"]
-
-                cert.set_serial_number(1000)
-                cert.gmtime_adj_notBefore(0)
-                cert.gmtime_adj_notAfter(10 * 365 * 24 * 60 * 60)
-                cert.set_issuer(cert.get_subject())
-                cert.set_pubkey(tls_private_key)
-
-                cert.sign(tls_private_key, 'sha256')
-
-                cert_pem = crypto.dump_certificate(crypto.FILETYPE_PEM, cert)
-
-                certificate_file.write(cert_pem)
diff --git a/synapse/crypto/event_signing.py b/synapse/crypto/event_signing.py
index 8774b28967..1dfa727fcf 100644
--- a/synapse/crypto/event_signing.py
+++ b/synapse/crypto/event_signing.py
@@ -23,14 +23,14 @@ from signedjson.sign import sign_json
 from unpaddedbase64 import decode_base64, encode_base64
 
 from synapse.api.errors import Codes, SynapseError
-from synapse.events.utils import prune_event
+from synapse.events.utils import prune_event, prune_event_dict
 
 logger = logging.getLogger(__name__)
 
 
 def check_event_content_hash(event, hash_algorithm=hashlib.sha256):
     """Check whether the hash for this PDU matches the contents"""
-    name, expected_hash = compute_content_hash(event, hash_algorithm)
+    name, expected_hash = compute_content_hash(event.get_pdu_json(), hash_algorithm)
     logger.debug("Expecting hash: %s", encode_base64(expected_hash))
 
     # some malformed events lack a 'hashes'. Protect against it being missing
@@ -59,35 +59,70 @@ def check_event_content_hash(event, hash_algorithm=hashlib.sha256):
     return message_hash_bytes == expected_hash
 
 
-def compute_content_hash(event, hash_algorithm):
-    event_json = event.get_pdu_json()
-    event_json.pop("age_ts", None)
-    event_json.pop("unsigned", None)
-    event_json.pop("signatures", None)
-    event_json.pop("hashes", None)
-    event_json.pop("outlier", None)
-    event_json.pop("destinations", None)
+def compute_content_hash(event_dict, hash_algorithm):
+    """Compute the content hash of an event, which is the hash of the
+    unredacted event.
 
-    event_json_bytes = encode_canonical_json(event_json)
+    Args:
+        event_dict (dict): The unredacted event as a dict
+        hash_algorithm: A hasher from `hashlib`, e.g. hashlib.sha256, to use
+            to hash the event
+
+    Returns:
+        tuple[str, bytes]: A tuple of the name of hash and the hash as raw
+        bytes.
+    """
+    event_dict = dict(event_dict)
+    event_dict.pop("age_ts", None)
+    event_dict.pop("unsigned", None)
+    event_dict.pop("signatures", None)
+    event_dict.pop("hashes", None)
+    event_dict.pop("outlier", None)
+    event_dict.pop("destinations", None)
+
+    event_json_bytes = encode_canonical_json(event_dict)
 
     hashed = hash_algorithm(event_json_bytes)
     return (hashed.name, hashed.digest())
 
 
 def compute_event_reference_hash(event, hash_algorithm=hashlib.sha256):
+    """Computes the event reference hash. This is the hash of the redacted
+    event.
+
+    Args:
+        event (FrozenEvent)
+        hash_algorithm: A hasher from `hashlib`, e.g. hashlib.sha256, to use
+            to hash the event
+
+    Returns:
+        tuple[str, bytes]: A tuple of the name of hash and the hash as raw
+        bytes.
+    """
     tmp_event = prune_event(event)
-    event_json = tmp_event.get_pdu_json()
-    event_json.pop("signatures", None)
-    event_json.pop("age_ts", None)
-    event_json.pop("unsigned", None)
-    event_json_bytes = encode_canonical_json(event_json)
+    event_dict = tmp_event.get_pdu_json()
+    event_dict.pop("signatures", None)
+    event_dict.pop("age_ts", None)
+    event_dict.pop("unsigned", None)
+    event_json_bytes = encode_canonical_json(event_dict)
     hashed = hash_algorithm(event_json_bytes)
     return (hashed.name, hashed.digest())
 
 
-def compute_event_signature(event, signature_name, signing_key):
-    tmp_event = prune_event(event)
-    redact_json = tmp_event.get_pdu_json()
+def compute_event_signature(event_dict, signature_name, signing_key):
+    """Compute the signature of the event for the given name and key.
+
+    Args:
+        event_dict (dict): The event as a dict
+        signature_name (str): The name of the entity signing the event
+            (typically the server's hostname).
+        signing_key (syutil.crypto.SigningKey): The key to sign with
+
+    Returns:
+        dict[str, dict[str, str]]: Returns a dictionary in the same format of
+        an event's signatures field.
+    """
+    redact_json = prune_event_dict(event_dict)
     redact_json.pop("age_ts", None)
     redact_json.pop("unsigned", None)
     logger.debug("Signing event: %s", encode_canonical_json(redact_json))
@@ -96,25 +131,25 @@ def compute_event_signature(event, signature_name, signing_key):
     return redact_json["signatures"]
 
 
-def add_hashes_and_signatures(event, signature_name, signing_key,
+def add_hashes_and_signatures(event_dict, signature_name, signing_key,
                               hash_algorithm=hashlib.sha256):
-    # if hasattr(event, "old_state_events"):
-    #     state_json_bytes = encode_canonical_json(
-    #         [e.event_id for e in event.old_state_events.values()]
-    #     )
-    #     hashed = hash_algorithm(state_json_bytes)
-    #     event.state_hash = {
-    #         hashed.name: encode_base64(hashed.digest())
-    #     }
-
-    name, digest = compute_content_hash(event, hash_algorithm=hash_algorithm)
-
-    if not hasattr(event, "hashes"):
-        event.hashes = {}
-    event.hashes[name] = encode_base64(digest)
-
-    event.signatures = compute_event_signature(
-        event,
+    """Add content hash and sign the event
+
+    Args:
+        event_dict (dict): The event to add hashes to and sign
+        signature_name (str): The name of the entity signing the event
+            (typically the server's hostname).
+        signing_key (syutil.crypto.SigningKey): The key to sign with
+        hash_algorithm: A hasher from `hashlib`, e.g. hashlib.sha256, to use
+            to hash the event
+    """
+
+    name, digest = compute_content_hash(event_dict, hash_algorithm=hash_algorithm)
+
+    event_dict.setdefault("hashes", {})[name] = encode_base64(digest)
+
+    event_dict["signatures"] = compute_event_signature(
+        event_dict,
         signature_name=signature_name,
         signing_key=signing_key,
     )
diff --git a/synapse/event_auth.py b/synapse/event_auth.py
index c81d8e6729..9adedbbb02 100644
--- a/synapse/event_auth.py
+++ b/synapse/event_auth.py
@@ -27,10 +27,11 @@ from synapse.types import UserID, get_domain_from_id
 logger = logging.getLogger(__name__)
 
 
-def check(event, auth_events, do_sig_check=True, do_size_check=True):
+def check(room_version, event, auth_events, do_sig_check=True, do_size_check=True):
     """ Checks if this event is correctly authed.
 
     Args:
+        room_version (str): the version of the room
         event: the event being checked.
         auth_events (dict: event-key -> event): the existing room state.
 
diff --git a/synapse/events/builder.py b/synapse/events/builder.py
index 7e63371095..fb0683cea8 100644
--- a/synapse/events/builder.py
+++ b/synapse/events/builder.py
@@ -13,79 +13,156 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-import copy
+import attr
 
-from synapse.api.constants import RoomVersions
+from twisted.internet import defer
+
+from synapse.api.constants import (
+    KNOWN_EVENT_FORMAT_VERSIONS,
+    KNOWN_ROOM_VERSIONS,
+    MAX_DEPTH,
+)
+from synapse.crypto.event_signing import add_hashes_and_signatures
 from synapse.types import EventID
 from synapse.util.stringutils import random_string
 
-from . import EventBase, FrozenEvent, _event_dict_property
+from . import (
+    _EventInternalMetadata,
+    event_type_from_format_version,
+    room_version_to_event_format,
+)
 
 
-def get_event_builder(room_version, key_values={}, internal_metadata_dict={}):
-    """Generate an event builder appropriate for the given room version
+@attr.s(slots=True, cmp=False, frozen=True)
+class EventBuilder(object):
+    """A format independent event builder used to build up the event content
+    before signing the event.
 
-    Args:
-        room_version (str): Version of the room that we're creating an
-            event builder for
-        key_values (dict): Fields used as the basis of the new event
-        internal_metadata_dict (dict): Used to create the `_EventInternalMetadata`
-            object.
+    (Note that while objects of this class are frozen, the
+    content/unsigned/internal_metadata fields are still mutable)
 
-    Returns:
-        EventBuilder
+    Attributes:
+        format_version (int): Event format version
+        room_id (str)
+        type (str)
+        sender (str)
+        content (dict)
+        unsigned (dict)
+        internal_metadata (_EventInternalMetadata)
+
+        _state (StateHandler)
+        _auth (synapse.api.Auth)
+        _store (DataStore)
+        _clock (Clock)
+        _hostname (str): The hostname of the server creating the event
+        _signing_key: The signing key to use to sign the event as the server
     """
-    if room_version in {
-        RoomVersions.V1,
-        RoomVersions.V2,
-        RoomVersions.VDH_TEST,
-        RoomVersions.STATE_V2_TEST,
-    }:
-        return EventBuilder(key_values, internal_metadata_dict)
-    else:
-        raise Exception(
-            "No event format defined for version %r" % (room_version,)
-        )
 
+    _state = attr.ib()
+    _auth = attr.ib()
+    _store = attr.ib()
+    _clock = attr.ib()
+    _hostname = attr.ib()
+    _signing_key = attr.ib()
+
+    format_version = attr.ib()
+
+    room_id = attr.ib()
+    type = attr.ib()
+    sender = attr.ib()
+
+    content = attr.ib(default=attr.Factory(dict))
+    unsigned = attr.ib(default=attr.Factory(dict))
+
+    # These only exist on a subset of events, so they raise AttributeError if
+    # someone tries to get them when they don't exist.
+    _state_key = attr.ib(default=None)
+    _redacts = attr.ib(default=None)
+
+    internal_metadata = attr.ib(default=attr.Factory(lambda: _EventInternalMetadata({})))
+
+    @property
+    def state_key(self):
+        if self._state_key is not None:
+            return self._state_key
+
+        raise AttributeError("state_key")
+
+    def is_state(self):
+        return self._state_key is not None
 
-class EventBuilder(EventBase):
-    def __init__(self, key_values={}, internal_metadata_dict={}):
-        signatures = copy.deepcopy(key_values.pop("signatures", {}))
-        unsigned = copy.deepcopy(key_values.pop("unsigned", {}))
+    @defer.inlineCallbacks
+    def build(self, prev_event_ids):
+        """Transform into a fully signed and hashed event
 
-        super(EventBuilder, self).__init__(
-            key_values,
-            signatures=signatures,
-            unsigned=unsigned,
-            internal_metadata_dict=internal_metadata_dict,
+        Args:
+            prev_event_ids (list[str]): The event IDs to use as the prev events
+
+        Returns:
+            Deferred[FrozenEvent]
+        """
+
+        state_ids = yield self._state.get_current_state_ids(
+            self.room_id, prev_event_ids,
+        )
+        auth_ids = yield self._auth.compute_auth_events(
+            self, state_ids,
         )
 
-    event_id = _event_dict_property("event_id")
-    state_key = _event_dict_property("state_key")
-    type = _event_dict_property("type")
+        auth_events = yield self._store.add_event_hashes(auth_ids)
+        prev_events = yield self._store.add_event_hashes(prev_event_ids)
 
-    def build(self):
-        return FrozenEvent.from_event(self)
+        old_depth = yield self._store.get_max_depth_of(
+            prev_event_ids,
+        )
+        depth = old_depth + 1
 
+        # we cap depth of generated events, to ensure that they are not
+        # rejected by other servers (and so that they can be persisted in
+        # the db)
+        depth = min(depth, MAX_DEPTH)
 
-class EventBuilderFactory(object):
-    def __init__(self, clock, hostname):
-        self.clock = clock
-        self.hostname = hostname
+        event_dict = {
+            "auth_events": auth_events,
+            "prev_events": prev_events,
+            "type": self.type,
+            "room_id": self.room_id,
+            "sender": self.sender,
+            "content": self.content,
+            "unsigned": self.unsigned,
+            "depth": depth,
+            "prev_state": [],
+        }
+
+        if self.is_state():
+            event_dict["state_key"] = self._state_key
 
-        self.event_id_count = 0
+        if self._redacts is not None:
+            event_dict["redacts"] = self._redacts
 
-    def create_event_id(self):
-        i = str(self.event_id_count)
-        self.event_id_count += 1
+        defer.returnValue(
+            create_local_event_from_event_dict(
+                clock=self._clock,
+                hostname=self._hostname,
+                signing_key=self._signing_key,
+                format_version=self.format_version,
+                event_dict=event_dict,
+                internal_metadata_dict=self.internal_metadata.get_dict(),
+            )
+        )
 
-        local_part = str(int(self.clock.time())) + i + random_string(5)
 
-        e_id = EventID(local_part, self.hostname)
+class EventBuilderFactory(object):
+    def __init__(self, hs):
+        self.clock = hs.get_clock()
+        self.hostname = hs.hostname
+        self.signing_key = hs.config.signing_key[0]
 
-        return e_id.to_string()
+        self.store = hs.get_datastore()
+        self.state = hs.get_state_handler()
+        self.auth = hs.get_auth()
 
-    def new(self, room_version, key_values={}):
+    def new(self, room_version, key_values):
         """Generate an event builder appropriate for the given room version
 
         Args:
@@ -98,27 +175,102 @@ class EventBuilderFactory(object):
         """
 
         # There's currently only the one event version defined
-        if room_version not in {
-            RoomVersions.V1,
-            RoomVersions.V2,
-            RoomVersions.VDH_TEST,
-            RoomVersions.STATE_V2_TEST,
-        }:
+        if room_version not in KNOWN_ROOM_VERSIONS:
             raise Exception(
                 "No event format defined for version %r" % (room_version,)
             )
 
-        key_values["event_id"] = self.create_event_id()
+        return EventBuilder(
+            store=self.store,
+            state=self.state,
+            auth=self.auth,
+            clock=self.clock,
+            hostname=self.hostname,
+            signing_key=self.signing_key,
+            format_version=room_version_to_event_format(room_version),
+            type=key_values["type"],
+            state_key=key_values.get("state_key"),
+            room_id=key_values["room_id"],
+            sender=key_values["sender"],
+            content=key_values.get("content", {}),
+            unsigned=key_values.get("unsigned", {}),
+            redacts=key_values.get("redacts", None),
+        )
+
+
+def create_local_event_from_event_dict(clock, hostname, signing_key,
+                                       format_version, event_dict,
+                                       internal_metadata_dict=None):
+    """Takes a fully formed event dict, ensuring that fields like `origin`
+    and `origin_server_ts` have correct values for a locally produced event,
+    then signs and hashes it.
+
+    Args:
+        clock (Clock)
+        hostname (str)
+        signing_key
+        format_version (int)
+        event_dict (dict)
+        internal_metadata_dict (dict|None)
+
+    Returns:
+        FrozenEvent
+    """
+
+    # There's currently only the one event version defined
+    if format_version not in KNOWN_EVENT_FORMAT_VERSIONS:
+        raise Exception(
+            "No event format defined for version %r" % (format_version,)
+        )
+
+    if internal_metadata_dict is None:
+        internal_metadata_dict = {}
+
+    time_now = int(clock.time_msec())
+
+    event_dict["event_id"] = _create_event_id(clock, hostname)
+
+    event_dict["origin"] = hostname
+    event_dict["origin_server_ts"] = time_now
+
+    event_dict.setdefault("unsigned", {})
+    age = event_dict["unsigned"].pop("age", 0)
+    event_dict["unsigned"].setdefault("age_ts", time_now - age)
+
+    event_dict.setdefault("signatures", {})
+
+    add_hashes_and_signatures(
+        event_dict,
+        hostname,
+        signing_key,
+    )
+    return event_type_from_format_version(format_version)(
+        event_dict, internal_metadata_dict=internal_metadata_dict,
+    )
+
+
+# A counter used when generating new event IDs
+_event_id_counter = 0
+
+
+def _create_event_id(clock, hostname):
+    """Create a new event ID
+
+    Args:
+        clock (Clock)
+        hostname (str): The server name for the event ID
+
+    Returns:
+        str
+    """
 
-        time_now = int(self.clock.time_msec())
+    global _event_id_counter
 
-        key_values.setdefault("origin", self.hostname)
-        key_values.setdefault("origin_server_ts", time_now)
+    i = str(_event_id_counter)
+    _event_id_counter += 1
 
-        key_values.setdefault("unsigned", {})
-        age = key_values["unsigned"].pop("age", 0)
-        key_values["unsigned"].setdefault("age_ts", time_now - age)
+    local_part = str(int(clock.time())) + i + random_string(5)
 
-        key_values["signatures"] = {}
+    e_id = EventID(local_part, hostname)
 
-        return EventBuilder(key_values=key_values,)
+    return e_id.to_string()
diff --git a/synapse/events/utils.py b/synapse/events/utils.py
index 652941ca0d..63f693f259 100644
--- a/synapse/events/utils.py
+++ b/synapse/events/utils.py
@@ -38,8 +38,31 @@ def prune_event(event):
     This is used when we "redact" an event. We want to remove all fields that
     the user has specified, but we do want to keep necessary information like
     type, state_key etc.
+
+    Args:
+        event (FrozenEvent)
+
+    Returns:
+        FrozenEvent
+    """
+    pruned_event_dict = prune_event_dict(event.get_dict())
+
+    from . import event_type_from_format_version
+    return event_type_from_format_version(event.format_version)(
+        pruned_event_dict, event.internal_metadata.get_dict()
+    )
+
+
+def prune_event_dict(event_dict):
+    """Redacts the event_dict in the same way as `prune_event`, except it
+    operates on dicts rather than event objects
+
+    Args:
+        event_dict (dict)
+
+    Returns:
+        dict: A copy of the pruned event dict
     """
-    event_type = event.type
 
     allowed_keys = [
         "event_id",
@@ -59,13 +82,13 @@ def prune_event(event):
         "membership",
     ]
 
-    event_dict = event.get_dict()
+    event_type = event_dict["type"]
 
     new_content = {}
 
     def add_fields(*fields):
         for field in fields:
-            if field in event.content:
+            if field in event_dict["content"]:
                 new_content[field] = event_dict["content"][field]
 
     if event_type == EventTypes.Member:
@@ -98,17 +121,17 @@ def prune_event(event):
 
     allowed_fields["content"] = new_content
 
-    allowed_fields["unsigned"] = {}
+    unsigned = {}
+    allowed_fields["unsigned"] = unsigned
 
-    if "age_ts" in event.unsigned:
-        allowed_fields["unsigned"]["age_ts"] = event.unsigned["age_ts"]
-    if "replaces_state" in event.unsigned:
-        allowed_fields["unsigned"]["replaces_state"] = event.unsigned["replaces_state"]
+    event_unsigned = event_dict.get("unsigned", {})
 
-    return type(event)(
-        allowed_fields,
-        internal_metadata_dict=event.internal_metadata.get_dict()
-    )
+    if "age_ts" in event_unsigned:
+        unsigned["age_ts"] = event_unsigned["age_ts"]
+    if "replaces_state" in event_unsigned:
+        unsigned["replaces_state"] = event_unsigned["replaces_state"]
+
+    return allowed_fields
 
 
 def _copy_field(src, dst, field):
diff --git a/synapse/events/validator.py b/synapse/events/validator.py
index cf184748a1..c53bf44e51 100644
--- a/synapse/events/validator.py
+++ b/synapse/events/validator.py
@@ -21,17 +21,22 @@ from synapse.types import EventID, RoomID, UserID
 
 
 class EventValidator(object):
+    def validate_new(self, event):
+        """Validates the event has roughly the right format
+
+        Args:
+            event (FrozenEvent)
+        """
+        self.validate_builder(event)
 
-    def validate(self, event):
         EventID.from_string(event.event_id)
-        RoomID.from_string(event.room_id)
 
         required = [
-            # "auth_events",
+            "auth_events",
             "content",
-            # "hashes",
+            "hashes",
             "origin",
-            # "prev_events",
+            "prev_events",
             "sender",
             "type",
         ]
@@ -41,8 +46,25 @@ class EventValidator(object):
                 raise SynapseError(400, "Event does not have key %s" % (k,))
 
         # Check that the following keys have string values
-        strings = [
+        event_strings = [
             "origin",
+        ]
+
+        for s in event_strings:
+            if not isinstance(getattr(event, s), string_types):
+                raise SynapseError(400, "'%s' not a string type" % (s,))
+
+    def validate_builder(self, event):
+        """Validates that the builder/event has roughly the right format. Only
+        checks values that we expect a proto event to have, rather than all the
+        fields an event would have
+
+        Args:
+            event (EventBuilder|FrozenEvent)
+        """
+
+        strings = [
+            "room_id",
             "sender",
             "type",
         ]
@@ -54,22 +76,7 @@ class EventValidator(object):
             if not isinstance(getattr(event, s), string_types):
                 raise SynapseError(400, "Not '%s' a string type" % (s,))
 
-        if event.type == EventTypes.Member:
-            if "membership" not in event.content:
-                raise SynapseError(400, "Content has not membership key")
-
-            if event.content["membership"] not in Membership.LIST:
-                raise SynapseError(400, "Invalid membership key")
-
-        # Check that the following keys have dictionary values
-        # TODO
-
-        # Check that the following keys have the correct format for DAGs
-        # TODO
-
-    def validate_new(self, event):
-        self.validate(event)
-
+        RoomID.from_string(event.room_id)
         UserID.from_string(event.sender)
 
         if event.type == EventTypes.Message:
@@ -86,9 +93,16 @@ class EventValidator(object):
         elif event.type == EventTypes.Name:
             self._ensure_strings(event.content, ["name"])
 
+        elif event.type == EventTypes.Member:
+            if "membership" not in event.content:
+                raise SynapseError(400, "Content has not membership key")
+
+            if event.content["membership"] not in Membership.LIST:
+                raise SynapseError(400, "Invalid membership key")
+
     def _ensure_strings(self, d, keys):
         for s in keys:
             if s not in d:
                 raise SynapseError(400, "'%s' not in content" % (s,))
             if not isinstance(d[s], string_types):
-                raise SynapseError(400, "Not '%s' a string type" % (s,))
+                raise SynapseError(400, "'%s' not a string type" % (s,))
diff --git a/synapse/federation/federation_client.py b/synapse/federation/federation_client.py
index 71809893c5..9b4acd2ed7 100644
--- a/synapse/federation/federation_client.py
+++ b/synapse/federation/federation_client.py
@@ -37,8 +37,7 @@ from synapse.api.errors import (
     HttpResponseException,
     SynapseError,
 )
-from synapse.crypto.event_signing import add_hashes_and_signatures
-from synapse.events import room_version_to_event_format
+from synapse.events import builder, room_version_to_event_format
 from synapse.federation.federation_base import FederationBase, event_from_pdu_json
 from synapse.util import logcontext, unwrapFirstError
 from synapse.util.caches.expiringcache import ExpiringCache
@@ -72,7 +71,8 @@ class FederationClient(FederationBase):
         self.state = hs.get_state_handler()
         self.transport_layer = hs.get_federation_transport_client()
 
-        self.event_builder_factory = hs.get_event_builder_factory()
+        self.hostname = hs.hostname
+        self.signing_key = hs.config.signing_key[0]
 
         self._get_pdu_cache = ExpiringCache(
             cache_name="get_pdu_cache",
@@ -608,18 +608,10 @@ class FederationClient(FederationBase):
             if "prev_state" not in pdu_dict:
                 pdu_dict["prev_state"] = []
 
-            # Strip off the fields that we want to clobber.
-            pdu_dict.pop("origin", None)
-            pdu_dict.pop("origin_server_ts", None)
-            pdu_dict.pop("unsigned", None)
-
-            builder = self.event_builder_factory.new(room_version, pdu_dict)
-            add_hashes_and_signatures(
-                builder,
-                self.hs.hostname,
-                self.hs.config.signing_key[0]
+            ev = builder.create_local_event_from_event_dict(
+                self._clock, self.hostname, self.signing_key,
+                format_version=event_format, event_dict=pdu_dict,
             )
-            ev = builder.build()
 
             defer.returnValue(
                 (destination, ev, event_format)
@@ -751,18 +743,9 @@ class FederationClient(FederationBase):
 
     @defer.inlineCallbacks
     def send_invite(self, destination, room_id, event_id, pdu):
-        time_now = self._clock.time_msec()
-        try:
-            code, content = yield self.transport_layer.send_invite(
-                destination=destination,
-                room_id=room_id,
-                event_id=event_id,
-                content=pdu.get_pdu_json(time_now),
-            )
-        except HttpResponseException as e:
-            if e.code == 403:
-                raise e.to_synapse_error()
-            raise
+        room_version = yield self.store.get_room_version(room_id)
+
+        content = yield self._do_send_invite(destination, pdu, room_version)
 
         pdu_dict = content["event"]
 
@@ -780,6 +763,55 @@ class FederationClient(FederationBase):
 
         defer.returnValue(pdu)
 
+    @defer.inlineCallbacks
+    def _do_send_invite(self, destination, pdu, room_version):
+        """Actually sends the invite, first trying v2 API and falling back to
+        v1 API if necessary.
+
+        Args:
+            destination (str): Target server
+            pdu (FrozenEvent)
+            room_version (str)
+
+        Returns:
+            dict: The event as a dict as returned by the remote server
+        """
+        time_now = self._clock.time_msec()
+
+        try:
+            content = yield self.transport_layer.send_invite_v2(
+                destination=destination,
+                room_id=pdu.room_id,
+                event_id=pdu.event_id,
+                content={
+                    "event": pdu.get_pdu_json(time_now),
+                    "room_version": room_version,
+                    "invite_room_state": pdu.unsigned.get("invite_room_state", []),
+                },
+            )
+            defer.returnValue(content)
+        except HttpResponseException as e:
+            if e.code in [400, 404]:
+                if room_version in (RoomVersions.V1, RoomVersions.V2):
+                    pass  # We'll fall through
+                else:
+                    raise Exception("Remote server is too old")
+            elif e.code == 403:
+                raise e.to_synapse_error()
+            else:
+                raise
+
+        # Didn't work, try v1 API.
+        # Note the v1 API returns a tuple of `(200, content)`
+
+        _, content = yield self.transport_layer.send_invite_v1(
+            destination=destination,
+            room_id=pdu.room_id,
+            event_id=pdu.event_id,
+            content=pdu.get_pdu_json(time_now),
+        )
+        defer.returnValue(content)
+
     def send_leave(self, destinations, pdu):
         """Sends a leave event to one of a list of homeservers.
 
diff --git a/synapse/federation/federation_server.py b/synapse/federation/federation_server.py
index 4aa04b9588..6681614232 100644
--- a/synapse/federation/federation_server.py
+++ b/synapse/federation/federation_server.py
@@ -322,7 +322,7 @@ class FederationServer(FederationBase):
             if self.hs.is_mine_id(event.event_id):
                 event.signatures.update(
                     compute_event_signature(
-                        event,
+                        event.get_pdu_json(),
                         self.hs.hostname,
                         self.hs.config.signing_key[0]
                     )
diff --git a/synapse/federation/transport/client.py b/synapse/federation/transport/client.py
index 260178c47b..8e2be218e2 100644
--- a/synapse/federation/transport/client.py
+++ b/synapse/federation/transport/client.py
@@ -21,7 +21,7 @@ from six.moves import urllib
 from twisted.internet import defer
 
 from synapse.api.constants import Membership
-from synapse.api.urls import FEDERATION_V1_PREFIX
+from synapse.api.urls import FEDERATION_V1_PREFIX, FEDERATION_V2_PREFIX
 from synapse.util.logutils import log_function
 
 logger = logging.getLogger(__name__)
@@ -289,7 +289,7 @@ class TransportLayerClient(object):
 
     @defer.inlineCallbacks
     @log_function
-    def send_invite(self, destination, room_id, event_id, content):
+    def send_invite_v1(self, destination, room_id, event_id, content):
         path = _create_v1_path("/invite/%s/%s", room_id, event_id)
 
         response = yield self.client.put_json(
@@ -303,6 +303,20 @@ class TransportLayerClient(object):
 
     @defer.inlineCallbacks
     @log_function
+    def send_invite_v2(self, destination, room_id, event_id, content):
+        path = _create_v2_path("/invite/%s/%s", room_id, event_id)
+
+        response = yield self.client.put_json(
+            destination=destination,
+            path=path,
+            data=content,
+            ignore_backoff=True,
+        )
+
+        defer.returnValue(response)
+
+    @defer.inlineCallbacks
+    @log_function
     def get_public_rooms(self, remote_server, limit, since_token,
                          search_filter=None, include_all_networks=False,
                          third_party_instance_id=None):
@@ -958,3 +972,24 @@ def _create_v1_path(path, *args):
         FEDERATION_V1_PREFIX
         + path % tuple(urllib.parse.quote(arg, "") for arg in args)
     )
+
+
+def _create_v2_path(path, *args):
+    """Creates a path against V2 federation API from the path template and
+    args. Ensures that all args are url encoded.
+
+    Example:
+
+        _create_v2_path("/event/%s/", event_id)
+
+    Args:
+        path (str): String template for the path
+        args: ([str]): Args to insert into path. Each arg will be url encoded
+
+    Returns:
+        str
+    """
+    return (
+        FEDERATION_V2_PREFIX
+        + path % tuple(urllib.parse.quote(arg, "") for arg in args)
+    )
diff --git a/synapse/handlers/federation.py b/synapse/handlers/federation.py
index a4b771049c..fcaf7530b0 100644
--- a/synapse/handlers/federation.py
+++ b/synapse/handlers/federation.py
@@ -1189,7 +1189,9 @@ class FederationHandler(BaseHandler):
 
         # The remote hasn't signed it yet, obviously. We'll do the full checks
         # when we get the event back in `on_send_join_request`
-        yield self.auth.check_from_context(event, context, do_sig_check=False)
+        yield self.auth.check_from_context(
+            room_version, event, context, do_sig_check=False,
+        )
 
         defer.returnValue(event)
 
@@ -1298,7 +1300,7 @@ class FederationHandler(BaseHandler):
 
         event.signatures.update(
             compute_event_signature(
-                event,
+                event.get_pdu_json(),
                 self.hs.hostname,
                 self.hs.config.signing_key[0]
             )
@@ -1388,7 +1390,9 @@ class FederationHandler(BaseHandler):
         try:
             # The remote hasn't signed it yet, obviously. We'll do the full checks
             # when we get the event back in `on_send_leave_request`
-            yield self.auth.check_from_context(event, context, do_sig_check=False)
+            yield self.auth.check_from_context(
+                room_version, event, context, do_sig_check=False,
+            )
         except AuthError as e:
             logger.warn("Failed to create new leave %r because %s", event, e)
             raise e
@@ -1683,7 +1687,7 @@ class FederationHandler(BaseHandler):
                 auth_for_e[(EventTypes.Create, "")] = create_event
 
             try:
-                self.auth.check(e, auth_events=auth_for_e)
+                self.auth.check(room_version, e, auth_events=auth_for_e)
             except SynapseError as err:
                 # we may get SynapseErrors here as well as AuthErrors. For
                 # instance, there are a couple of (ancient) events in some
@@ -1927,6 +1931,8 @@ class FederationHandler(BaseHandler):
         current_state = set(e.event_id for e in auth_events.values())
         different_auth = event_auth_events - current_state
 
+        room_version = yield self.store.get_room_version(event.room_id)
+
         if different_auth and not event.internal_metadata.is_outlier():
             # Do auth conflict res.
             logger.info("Different auth: %s", different_auth)
@@ -1951,8 +1957,6 @@ class FederationHandler(BaseHandler):
                     (d.type, d.state_key): d for d in different_events if d
                 })
 
-                room_version = yield self.store.get_room_version(event.room_id)
-
                 new_state = yield self.state_handler.resolve_events(
                     room_version,
                     [list(local_view.values()), list(remote_view.values())],
@@ -2052,7 +2056,7 @@ class FederationHandler(BaseHandler):
                 )
 
         try:
-            self.auth.check(event, auth_events=auth_events)
+            self.auth.check(room_version, event, auth_events=auth_events)
         except AuthError as e:
             logger.warn("Failed auth resolution for %r because %s", event, e)
             raise e
@@ -2278,7 +2282,7 @@ class FederationHandler(BaseHandler):
             room_version = yield self.store.get_room_version(room_id)
             builder = self.event_builder_factory.new(room_version, event_dict)
 
-            EventValidator().validate_new(builder)
+            EventValidator().validate_builder(builder)
             event, context = yield self.event_creation_handler.create_new_client_event(
                 builder=builder
             )
@@ -2287,8 +2291,10 @@ class FederationHandler(BaseHandler):
                 room_version, event_dict, event, context
             )
 
+            EventValidator().validate_new(event)
+
             try:
-                yield self.auth.check_from_context(event, context)
+                yield self.auth.check_from_context(room_version, event, context)
             except AuthError as e:
                 logger.warn("Denying new third party invite %r because %s", event, e)
                 raise e
@@ -2330,7 +2336,7 @@ class FederationHandler(BaseHandler):
         )
 
         try:
-            self.auth.check_from_context(event, context)
+            self.auth.check_from_context(room_version, event, context)
         except AuthError as e:
             logger.warn("Denying third party invite %r because %s", event, e)
             raise e
@@ -2372,10 +2378,11 @@ class FederationHandler(BaseHandler):
             # auth check code will explode appropriately.
 
         builder = self.event_builder_factory.new(room_version, event_dict)
-        EventValidator().validate_new(builder)
+        EventValidator().validate_builder(builder)
         event, context = yield self.event_creation_handler.create_new_client_event(
             builder=builder,
         )
+        EventValidator().validate_new(event)
         defer.returnValue((event, context))
 
     @defer.inlineCallbacks
diff --git a/synapse/handlers/message.py b/synapse/handlers/message.py
index 7aaa4fba33..37a7dca794 100644
--- a/synapse/handlers/message.py
+++ b/synapse/handlers/message.py
@@ -22,7 +22,7 @@ from canonicaljson import encode_canonical_json, json
 from twisted.internet import defer
 from twisted.internet.defer import succeed
 
-from synapse.api.constants import MAX_DEPTH, EventTypes, Membership
+from synapse.api.constants import EventTypes, Membership, RoomVersions
 from synapse.api.errors import (
     AuthError,
     Codes,
@@ -31,7 +31,6 @@ from synapse.api.errors import (
     SynapseError,
 )
 from synapse.api.urls import ConsentURIBuilder
-from synapse.crypto.event_signing import add_hashes_and_signatures
 from synapse.events.utils import serialize_event
 from synapse.events.validator import EventValidator
 from synapse.replication.http.send_event import ReplicationSendEventRestServlet
@@ -288,7 +287,7 @@ class EventCreationHandler(object):
 
         builder = self.event_builder_factory.new(room_version, event_dict)
 
-        self.validator.validate_new(builder)
+        self.validator.validate_builder(builder)
 
         if builder.type == EventTypes.Member:
             membership = builder.content.get("membership", None)
@@ -326,6 +325,8 @@ class EventCreationHandler(object):
             prev_events_and_hashes=prev_events_and_hashes,
         )
 
+        self.validator.validate_new(event)
+
         defer.returnValue((event, context))
 
     def _is_exempt_from_privacy_policy(self, builder, requester):
@@ -543,40 +544,19 @@ class EventCreationHandler(object):
             prev_events_and_hashes = \
                 yield self.store.get_prev_events_for_room(builder.room_id)
 
-        if prev_events_and_hashes:
-            depth = max([d for _, _, d in prev_events_and_hashes]) + 1
-            # we cap depth of generated events, to ensure that they are not
-            # rejected by other servers (and so that they can be persisted in
-            # the db)
-            depth = min(depth, MAX_DEPTH)
-        else:
-            depth = 1
-
         prev_events = [
             (event_id, prev_hashes)
             for event_id, prev_hashes, _ in prev_events_and_hashes
         ]
 
-        builder.prev_events = prev_events
-        builder.depth = depth
-
-        context = yield self.state.compute_event_context(builder)
+        event = yield builder.build(
+            prev_event_ids=[p for p, _ in prev_events],
+        )
+        context = yield self.state.compute_event_context(event)
         if requester:
             context.app_service = requester.app_service
 
-        if builder.is_state():
-            builder.prev_state = yield self.store.add_event_hashes(
-                context.prev_state_events
-            )
-
-        yield self.auth.add_auth_events(builder, context)
-
-        signing_key = self.hs.config.signing_key[0]
-        add_hashes_and_signatures(
-            builder, self.server_name, signing_key
-        )
-
-        event = builder.build()
+        self.validator.validate_new(event)
 
         logger.debug(
             "Created event %s",
@@ -611,8 +591,13 @@ class EventCreationHandler(object):
             extra_users (list(UserID)): Any extra users to notify about event
         """
 
+        if event.is_state() and (event.type, event.state_key) == (EventTypes.Create, ""):
+            room_version = event.content.get("room_version", RoomVersions.V1)
+        else:
+            room_version = yield self.store.get_room_version(event.room_id)
+
         try:
-            yield self.auth.check_from_context(event, context)
+            yield self.auth.check_from_context(room_version, event, context)
         except AuthError as err:
             logger.warn("Denying new event %r because %s", event, err)
             raise err
diff --git a/synapse/handlers/room.py b/synapse/handlers/room.py
index 388302de09..13ba9291b0 100644
--- a/synapse/handlers/room.py
+++ b/synapse/handlers/room.py
@@ -123,7 +123,10 @@ class RoomCreationHandler(BaseHandler):
                     token_id=requester.access_token_id,
                 )
             )
-            yield self.auth.check_from_context(tombstone_event, tombstone_context)
+            old_room_version = yield self.store.get_room_version(old_room_id)
+            yield self.auth.check_from_context(
+                old_room_version, tombstone_event, tombstone_context,
+            )
 
             yield self.clone_existing_room(
                 requester,
diff --git a/synapse/http/federation/matrix_federation_agent.py b/synapse/http/federation/matrix_federation_agent.py
index 4a6f634c8b..07c72c9351 100644
--- a/synapse/http/federation/matrix_federation_agent.py
+++ b/synapse/http/federation/matrix_federation_agent.py
@@ -12,6 +12,8 @@
 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 # See the License for the specific language governing permissions and
 # limitations under the License.
+import cgi
+import json
 import logging
 
 import attr
@@ -20,7 +22,7 @@ from zope.interface import implementer
 
 from twisted.internet import defer
 from twisted.internet.endpoints import HostnameEndpoint, wrapClientTLS
-from twisted.web.client import URI, Agent, HTTPConnectionPool
+from twisted.web.client import URI, Agent, HTTPConnectionPool, readBody
 from twisted.web.http_headers import Headers
 from twisted.web.iweb import IAgent
 
@@ -43,13 +45,19 @@ class MatrixFederationAgent(object):
         tls_client_options_factory (ClientTLSOptionsFactory|None):
             factory to use for fetching client tls options, or none to disable TLS.
 
+        _well_known_tls_policy (IPolicyForHTTPS|None):
+            TLS policy to use for fetching .well-known files. None to use a default
+            (browser-like) implementation.
+
         srv_resolver (SrvResolver|None):
             SRVResolver impl to use for looking up SRV records. None to use a default
             implementation.
     """
 
     def __init__(
-        self, reactor, tls_client_options_factory, _srv_resolver=None,
+        self, reactor, tls_client_options_factory,
+        _well_known_tls_policy=None,
+        _srv_resolver=None,
     ):
         self._reactor = reactor
         self._tls_client_options_factory = tls_client_options_factory
@@ -62,6 +70,14 @@ class MatrixFederationAgent(object):
         self._pool.maxPersistentPerHost = 5
         self._pool.cachedConnectionTimeout = 2 * 60
 
+        agent_args = {}
+        if _well_known_tls_policy is not None:
+            # the param is called 'contextFactory', but actually passing a
+            # contextfactory is deprecated, and it expects an IPolicyForHTTPS.
+            agent_args['contextFactory'] = _well_known_tls_policy
+        _well_known_agent = Agent(self._reactor, pool=self._pool, **agent_args)
+        self._well_known_agent = _well_known_agent
+
     @defer.inlineCallbacks
     def request(self, method, uri, headers=None, bodyProducer=None):
         """
@@ -114,7 +130,11 @@ class MatrixFederationAgent(object):
         class EndpointFactory(object):
             @staticmethod
             def endpointForURI(_uri):
-                logger.info("Connecting to %s:%s", res.target_host, res.target_port)
+                logger.info(
+                    "Connecting to %s:%i",
+                    res.target_host.decode("ascii"),
+                    res.target_port,
+                )
                 ep = HostnameEndpoint(self._reactor, res.target_host, res.target_port)
                 if tls_options is not None:
                     ep = wrapClientTLS(tls_options, ep)
@@ -127,7 +147,7 @@ class MatrixFederationAgent(object):
         defer.returnValue(res)
 
     @defer.inlineCallbacks
-    def _route_matrix_uri(self, parsed_uri):
+    def _route_matrix_uri(self, parsed_uri, lookup_well_known=True):
         """Helper for `request`: determine the routing for a Matrix URI
 
         Args:
@@ -135,6 +155,9 @@ class MatrixFederationAgent(object):
                 parsed with URI.fromBytes(uri, defaultPort=-1) to set the `port` to -1
                 if there is no explicit port given.
 
+            lookup_well_known (bool): True if we should look up the .well-known file if
+                there is no SRV record.
+
         Returns:
             Deferred[_RoutingResult]
         """
@@ -169,6 +192,42 @@ class MatrixFederationAgent(object):
         service_name = b"_matrix._tcp.%s" % (parsed_uri.host,)
         server_list = yield self._srv_resolver.resolve_service(service_name)
 
+        if not server_list and lookup_well_known:
+            # try a .well-known lookup
+            well_known_server = yield self._get_well_known(parsed_uri.host)
+
+            if well_known_server:
+                # if we found a .well-known, start again, but don't do another
+                # .well-known lookup.
+
+                # parse the server name in the .well-known response into host/port.
+                # (This code is lifted from twisted.web.client.URI.fromBytes).
+                if b':' in well_known_server:
+                    well_known_host, well_known_port = well_known_server.rsplit(b':', 1)
+                    try:
+                        well_known_port = int(well_known_port)
+                    except ValueError:
+                        # the part after the colon could not be parsed as an int
+                        # - we assume it is an IPv6 literal with no port (the closing
+                        # ']' stops it being parsed as an int)
+                        well_known_host, well_known_port = well_known_server, -1
+                else:
+                    well_known_host, well_known_port = well_known_server, -1
+
+                new_uri = URI(
+                    scheme=parsed_uri.scheme,
+                    netloc=well_known_server,
+                    host=well_known_host,
+                    port=well_known_port,
+                    path=parsed_uri.path,
+                    params=parsed_uri.params,
+                    query=parsed_uri.query,
+                    fragment=parsed_uri.fragment,
+                )
+
+                res = yield self._route_matrix_uri(new_uri, lookup_well_known=False)
+                defer.returnValue(res)
+
         if not server_list:
             target_host = parsed_uri.host
             port = 8448
@@ -190,6 +249,53 @@ class MatrixFederationAgent(object):
             target_port=port,
         ))
 
+    @defer.inlineCallbacks
+    def _get_well_known(self, server_name):
+        """Attempt to fetch and parse a .well-known file for the given server
+
+        Args:
+            server_name (bytes): name of the server, from the requested url
+
+        Returns:
+            Deferred[bytes|None]: either the new server name, from the .well-known, or
+                None if there was no .well-known file.
+        """
+        # FIXME: add a cache
+
+        uri = b"https://%s/.well-known/matrix/server" % (server_name, )
+        logger.info("Fetching %s", uri.decode("ascii"))
+        try:
+            response = yield make_deferred_yieldable(
+                self._well_known_agent.request(b"GET", uri),
+            )
+        except Exception as e:
+            logger.info(
+                "Connection error fetching %s: %s",
+                uri.decode("ascii"), e,
+            )
+            defer.returnValue(None)
+
+        body = yield make_deferred_yieldable(readBody(response))
+
+        if response.code != 200:
+            logger.info(
+                "Error response %i from %s: %s",
+                response.code, uri.decode("ascii"), body,
+            )
+            defer.returnValue(None)
+
+        content_types = response.headers.getRawHeaders(u'content-type')
+        if content_types is None:
+            raise Exception("no content-type header on .well-known response")
+        content_type, _opts = cgi.parse_header(content_types[-1])
+        if content_type != 'application/json':
+            raise Exception("content-type not application/json on .well-known response")
+        parsed_body = json.loads(body.decode('utf-8'))
+        logger.info("Response from .well-known: %s", parsed_body)
+        if not isinstance(parsed_body, dict) or "m.server" not in parsed_body:
+            raise Exception("invalid .well-known response")
+        defer.returnValue(parsed_body["m.server"].encode("ascii"))
+
 
 @attr.s
 class _RoutingResult(object):
diff --git a/synapse/replication/http/federation.py b/synapse/replication/http/federation.py
index 2e16c69666..0f0a07c422 100644
--- a/synapse/replication/http/federation.py
+++ b/synapse/replication/http/federation.py
@@ -95,7 +95,7 @@ class ReplicationFederationSendEventsRestServlet(ReplicationEndpoint):
             event_and_contexts = []
             for event_payload in event_payloads:
                 event_dict = event_payload["event"]
-                format_ver = content["event_format_version"]
+                format_ver = event_payload["event_format_version"]
                 internal_metadata = event_payload["internal_metadata"]
                 rejected_reason = event_payload["rejected_reason"]
 
diff --git a/synapse/server.py b/synapse/server.py
index c8914302cf..6c52101616 100644
--- a/synapse/server.py
+++ b/synapse/server.py
@@ -355,10 +355,7 @@ class HomeServer(object):
         return Keyring(self)
 
     def build_event_builder_factory(self):
-        return EventBuilderFactory(
-            clock=self.get_clock(),
-            hostname=self.hostname,
-        )
+        return EventBuilderFactory(self)
 
     def build_filtering(self):
         return Filtering(self)
diff --git a/synapse/state/__init__.py b/synapse/state/__init__.py
index e9ecb00277..2fca51d0b2 100644
--- a/synapse/state/__init__.py
+++ b/synapse/state/__init__.py
@@ -611,7 +611,7 @@ def resolve_events_with_store(room_version, state_sets, event_map, state_res_sto
         RoomVersions.VDH_TEST, RoomVersions.STATE_V2_TEST, RoomVersions.V2,
     ):
         return v2.resolve_events_with_store(
-            state_sets, event_map, state_res_store,
+            room_version, state_sets, event_map, state_res_store,
         )
     else:
         # This should only happen if we added a version but forgot to add it to
diff --git a/synapse/state/v1.py b/synapse/state/v1.py
index 19e091ce3b..6d3afcae7c 100644
--- a/synapse/state/v1.py
+++ b/synapse/state/v1.py
@@ -21,7 +21,7 @@ from six import iteritems, iterkeys, itervalues
 from twisted.internet import defer
 
 from synapse import event_auth
-from synapse.api.constants import EventTypes
+from synapse.api.constants import EventTypes, RoomVersions
 from synapse.api.errors import AuthError
 
 logger = logging.getLogger(__name__)
@@ -274,7 +274,11 @@ def _resolve_auth_events(events, auth_events):
         auth_events[(prev_event.type, prev_event.state_key)] = prev_event
         try:
             # The signatures have already been checked at this point
-            event_auth.check(event, auth_events, do_sig_check=False, do_size_check=False)
+            event_auth.check(
+                RoomVersions.V1, event, auth_events,
+                do_sig_check=False,
+                do_size_check=False,
+            )
             prev_event = event
         except AuthError:
             return prev_event
@@ -286,7 +290,11 @@ def _resolve_normal_events(events, auth_events):
     for event in _ordered_events(events):
         try:
             # The signatures have already been checked at this point
-            event_auth.check(event, auth_events, do_sig_check=False, do_size_check=False)
+            event_auth.check(
+                RoomVersions.V1, event, auth_events,
+                do_sig_check=False,
+                do_size_check=False,
+            )
             return event
         except AuthError:
             pass
diff --git a/synapse/state/v2.py b/synapse/state/v2.py
index 3573bb0028..650995c92c 100644
--- a/synapse/state/v2.py
+++ b/synapse/state/v2.py
@@ -29,10 +29,12 @@ logger = logging.getLogger(__name__)
 
 
 @defer.inlineCallbacks
-def resolve_events_with_store(state_sets, event_map, state_res_store):
+def resolve_events_with_store(room_version, state_sets, event_map, state_res_store):
     """Resolves the state using the v2 state resolution algorithm
 
     Args:
+        room_version (str): The room version
+
         state_sets(list): List of dicts of (type, state_key) -> event_id,
             which are the different state groups to resolve.
 
@@ -104,7 +106,7 @@ def resolve_events_with_store(state_sets, event_map, state_res_store):
 
     # Now sequentially auth each one
     resolved_state = yield _iterative_auth_checks(
-        sorted_power_events, unconflicted_state, event_map,
+        room_version, sorted_power_events, unconflicted_state, event_map,
         state_res_store,
     )
 
@@ -129,7 +131,7 @@ def resolve_events_with_store(state_sets, event_map, state_res_store):
     logger.debug("resolving remaining events")
 
     resolved_state = yield _iterative_auth_checks(
-        leftover_events, resolved_state, event_map,
+        room_version, leftover_events, resolved_state, event_map,
         state_res_store,
     )
 
@@ -350,11 +352,13 @@ def _reverse_topological_power_sort(event_ids, event_map, state_res_store, auth_
 
 
 @defer.inlineCallbacks
-def _iterative_auth_checks(event_ids, base_state, event_map, state_res_store):
+def _iterative_auth_checks(room_version, event_ids, base_state, event_map,
+                           state_res_store):
     """Sequentially apply auth checks to each event in given list, updating the
     state as it goes along.
 
     Args:
+        room_version (str)
         event_ids (list[str]): Ordered list of events to apply auth checks to
         base_state (dict[tuple[str, str], str]): The set of state to start with
         event_map (dict[str,FrozenEvent])
@@ -385,7 +389,7 @@ def _iterative_auth_checks(event_ids, base_state, event_map, state_res_store):
 
         try:
             event_auth.check(
-                event, auth_events,
+                room_version, event, auth_events,
                 do_sig_check=False,
                 do_size_check=False
             )
diff --git a/synapse/storage/_base.py b/synapse/storage/_base.py
index 5109bc3e2e..4872ff55b6 100644
--- a/synapse/storage/_base.py
+++ b/synapse/storage/_base.py
@@ -236,7 +236,7 @@ class SQLBaseStore(object):
             self._unsafe_to_upsert_tables.discard("user_ips")
 
         # If there's any tables left to check, reschedule to run.
-        if self.updates:
+        if updates:
             self._clock.call_later(
                 15.0,
                 run_as_background_process,
diff --git a/synapse/storage/event_federation.py b/synapse/storage/event_federation.py
index d3b9dea1d6..38809ed0fc 100644
--- a/synapse/storage/event_federation.py
+++ b/synapse/storage/event_federation.py
@@ -125,6 +125,29 @@ class EventFederationWorkerStore(EventsWorkerStore, SignatureWorkerStore,
 
         return dict(txn)
 
+    @defer.inlineCallbacks
+    def get_max_depth_of(self, event_ids):
+        """Returns the max depth of a set of event IDs
+
+        Args:
+            event_ids (list[str])
+
+        Returns
+            Deferred[int]
+        """
+        rows = yield self._simple_select_many_batch(
+            table="events",
+            column="event_id",
+            iterable=event_ids,
+            retcols=("depth",),
+            desc="get_max_depth_of",
+        )
+
+        if not rows:
+            defer.returnValue(0)
+        else:
+            defer.returnValue(max(row["depth"] for row in rows))
+
     def _get_oldest_events_in_room_txn(self, txn, room_id):
         return self._simple_select_onecol_txn(
             txn,
diff --git a/synapse/util/async_helpers.py b/synapse/util/async_helpers.py
index 430bb15f51..f0e4a0e10c 100644
--- a/synapse/util/async_helpers.py
+++ b/synapse/util/async_helpers.py
@@ -201,7 +201,7 @@ class Linearizer(object):
         if entry[0] >= self.max_count:
             res = self._await_lock(key)
         else:
-            logger.info(
+            logger.debug(
                 "Acquired uncontended linearizer lock %r for key %r", self.name, key,
             )
             entry[0] += 1
@@ -215,7 +215,7 @@ class Linearizer(object):
             try:
                 yield
             finally:
-                logger.info("Releasing linearizer lock %r for key %r", self.name, key)
+                logger.debug("Releasing linearizer lock %r for key %r", self.name, key)
 
                 # We've finished executing so check if there are any things
                 # blocked waiting to execute and start one of them
@@ -247,7 +247,7 @@ class Linearizer(object):
         """
         entry = self.key_to_defer[key]
 
-        logger.info(
+        logger.debug(
             "Waiting to acquire linearizer lock %r for key %r", self.name, key,
         )
 
@@ -255,7 +255,7 @@ class Linearizer(object):
         entry[1][new_defer] = 1
 
         def cb(_r):
-            logger.info("Acquired linearizer lock %r for key %r", self.name, key)
+            logger.debug("Acquired linearizer lock %r for key %r", self.name, key)
             entry[0] += 1
 
             # if the code holding the lock completes synchronously, then it
@@ -273,7 +273,7 @@ class Linearizer(object):
         def eb(e):
             logger.info("defer %r got err %r", new_defer, e)
             if isinstance(e, CancelledError):
-                logger.info(
+                logger.debug(
                     "Cancelling wait for linearizer lock %r for key %r",
                     self.name, key,
                 )
diff --git a/tests/config/test_generate.py b/tests/config/test_generate.py
index b5ad99348d..795b4c298d 100644
--- a/tests/config/test_generate.py
+++ b/tests/config/test_generate.py
@@ -50,8 +50,6 @@ class ConfigGenerationTestCase(unittest.TestCase):
                     "homeserver.yaml",
                     "lemurs.win.log.config",
                     "lemurs.win.signing.key",
-                    "lemurs.win.tls.crt",
-                    "lemurs.win.tls.key",
                 ]
             ),
             set(os.listdir(self.dir)),
diff --git a/tests/config/test_tls.py b/tests/config/test_tls.py
new file mode 100644
index 0000000000..4ccaf35603
--- /dev/null
+++ b/tests/config/test_tls.py
@@ -0,0 +1,75 @@
+# -*- coding: utf-8 -*-
+# Copyright 2019 New Vector Ltd
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import os
+
+from synapse.config.tls import TlsConfig
+
+from tests.unittest import TestCase
+
+
+class TLSConfigTests(TestCase):
+
+    def test_warn_self_signed(self):
+        """
+        Synapse will give a warning when it loads a self-signed certificate.
+        """
+        config_dir = self.mktemp()
+        os.mkdir(config_dir)
+        with open(os.path.join(config_dir, "cert.pem"), 'w') as f:
+            f.write("""-----BEGIN CERTIFICATE-----
+MIID6DCCAtACAws9CjANBgkqhkiG9w0BAQUFADCBtzELMAkGA1UEBhMCVFIxDzAN
+BgNVBAgMBsOHb3J1bTEUMBIGA1UEBwwLQmHFn21ha8OnxLExEjAQBgNVBAMMCWxv
+Y2FsaG9zdDEcMBoGA1UECgwTVHdpc3RlZCBNYXRyaXggTGFiczEkMCIGA1UECwwb
+QXV0b21hdGVkIFRlc3RpbmcgQXV0aG9yaXR5MSkwJwYJKoZIhvcNAQkBFhpzZWN1
+cml0eUB0d2lzdGVkbWF0cml4LmNvbTAgFw0xNzA3MTIxNDAxNTNaGA8yMTE3MDYx
+ODE0MDE1M1owgbcxCzAJBgNVBAYTAlRSMQ8wDQYDVQQIDAbDh29ydW0xFDASBgNV
+BAcMC0JhxZ9tYWvDp8SxMRIwEAYDVQQDDAlsb2NhbGhvc3QxHDAaBgNVBAoME1R3
+aXN0ZWQgTWF0cml4IExhYnMxJDAiBgNVBAsMG0F1dG9tYXRlZCBUZXN0aW5nIEF1
+dGhvcml0eTEpMCcGCSqGSIb3DQEJARYac2VjdXJpdHlAdHdpc3RlZG1hdHJpeC5j
+b20wggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDwT6kbqtMUI0sMkx4h
+I+L780dA59KfksZCqJGmOsMD6hte9EguasfkZzvCF3dk3NhwCjFSOvKx6rCwiteo
+WtYkVfo+rSuVNmt7bEsOUDtuTcaxTzIFB+yHOYwAaoz3zQkyVW0c4pzioiLCGCmf
+FLdiDBQGGp74tb+7a0V6kC3vMLFoM3L6QWq5uYRB5+xLzlPJ734ltyvfZHL3Us6p
+cUbK+3WTWvb4ER0W2RqArAj6Bc/ERQKIAPFEiZi9bIYTwvBH27OKHRz+KoY/G8zY
++l+WZoJqDhupRAQAuh7O7V/y6bSP+KNxJRie9QkZvw1PSaGSXtGJI3WWdO12/Ulg
+epJpAgMBAAEwDQYJKoZIhvcNAQEFBQADggEBAJXEq5P9xwvP9aDkXIqzcD0L8sf8
+ewlhlxTQdeqt2Nace0Yk18lIo2oj1t86Y8jNbpAnZJeI813Rr5M7FbHCXoRc/SZG
+I8OtG1xGwcok53lyDuuUUDexnK4O5BkjKiVlNPg4HPim5Kuj2hRNFfNt/F2BVIlj
+iZupikC5MT1LQaRwidkSNxCku1TfAyueiBwhLnFwTmIGNnhuDCutEVAD9kFmcJN2
+SznugAcPk4doX2+rL+ila+ThqgPzIkwTUHtnmjI0TI6xsDUlXz5S3UyudrE2Qsfz
+s4niecZKPBizL6aucT59CsunNmmb5Glq8rlAcU+1ZTZZzGYqVYhF6axB9Qg=
+-----END CERTIFICATE-----""")
+
+        config = {
+            "tls_certificate_path": os.path.join(config_dir, "cert.pem"),
+            "no_tls": True,
+            "tls_fingerprints": []
+        }
+
+        t = TlsConfig()
+        t.read_config(config)
+        t.read_certificate_from_disk()
+
+        warnings = self.flushWarnings()
+        self.assertEqual(len(warnings), 1)
+        self.assertEqual(
+            warnings[0]["message"],
+            (
+                "Self-signed TLS certificates will not be accepted by "
+                "Synapse 1.0. Please either provide a valid certificate, "
+                "or use Synapse's ACME support to provision one."
+            )
+        )
diff --git a/tests/crypto/test_event_signing.py b/tests/crypto/test_event_signing.py
index b2536c1e69..71aa731439 100644
--- a/tests/crypto/test_event_signing.py
+++ b/tests/crypto/test_event_signing.py
@@ -18,7 +18,7 @@ import nacl.signing
 from unpaddedbase64 import decode_base64
 
 from synapse.crypto.event_signing import add_hashes_and_signatures
-from synapse.events.builder import EventBuilder
+from synapse.events import FrozenEvent
 
 from tests import unittest
 
@@ -40,20 +40,18 @@ class EventSigningTestCase(unittest.TestCase):
         self.signing_key.version = KEY_VER
 
     def test_sign_minimal(self):
-        builder = EventBuilder(
-            {
-                'event_id': "$0:domain",
-                'origin': "domain",
-                'origin_server_ts': 1000000,
-                'signatures': {},
-                'type': "X",
-                'unsigned': {'age_ts': 1000000},
-            }
-        )
+        event_dict = {
+            'event_id': "$0:domain",
+            'origin': "domain",
+            'origin_server_ts': 1000000,
+            'signatures': {},
+            'type': "X",
+            'unsigned': {'age_ts': 1000000},
+        }
 
-        add_hashes_and_signatures(builder, HOSTNAME, self.signing_key)
+        add_hashes_and_signatures(event_dict, HOSTNAME, self.signing_key)
 
-        event = builder.build()
+        event = FrozenEvent(event_dict)
 
         self.assertTrue(hasattr(event, 'hashes'))
         self.assertIn('sha256', event.hashes)
@@ -71,23 +69,21 @@ class EventSigningTestCase(unittest.TestCase):
         )
 
     def test_sign_message(self):
-        builder = EventBuilder(
-            {
-                'content': {'body': "Here is the message content"},
-                'event_id': "$0:domain",
-                'origin': "domain",
-                'origin_server_ts': 1000000,
-                'type': "m.room.message",
-                'room_id': "!r:domain",
-                'sender': "@u:domain",
-                'signatures': {},
-                'unsigned': {'age_ts': 1000000},
-            }
-        )
-
-        add_hashes_and_signatures(builder, HOSTNAME, self.signing_key)
-
-        event = builder.build()
+        event_dict = {
+            'content': {'body': "Here is the message content"},
+            'event_id': "$0:domain",
+            'origin': "domain",
+            'origin_server_ts': 1000000,
+            'type': "m.room.message",
+            'room_id': "!r:domain",
+            'sender': "@u:domain",
+            'signatures': {},
+            'unsigned': {'age_ts': 1000000},
+        }
+
+        add_hashes_and_signatures(event_dict, HOSTNAME, self.signing_key)
+
+        event = FrozenEvent(event_dict)
 
         self.assertTrue(hasattr(event, 'hashes'))
         self.assertIn('sha256', event.hashes)
diff --git a/tests/http/__init__.py b/tests/http/__init__.py
index e69de29bb2..ee8010f598 100644
--- a/tests/http/__init__.py
+++ b/tests/http/__init__.py
@@ -0,0 +1,42 @@
+# -*- coding: utf-8 -*-
+# Copyright 2019 New Vector Ltd
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+import os.path
+
+from OpenSSL import SSL
+
+
+def get_test_cert_file():
+    """get the path to the test cert"""
+
+    # the cert file itself is made with:
+    #
+    # openssl req -x509 -newkey rsa:4096 -keyout server.pem  -out server.pem -days 36500 \
+    #     -nodes -subj '/CN=testserv'
+    return os.path.join(
+        os.path.dirname(__file__),
+        'server.pem',
+    )
+
+
+class ServerTLSContext(object):
+    """A TLS Context which presents our test cert."""
+    def __init__(self):
+        self.filename = get_test_cert_file()
+
+    def getContext(self):
+        ctx = SSL.Context(SSL.TLSv1_METHOD)
+        ctx.use_certificate_file(self.filename)
+        ctx.use_privatekey_file(self.filename)
+        return ctx
diff --git a/tests/http/federation/test_matrix_federation_agent.py b/tests/http/federation/test_matrix_federation_agent.py
index 8257594fb8..bd80dd0cb6 100644
--- a/tests/http/federation/test_matrix_federation_agent.py
+++ b/tests/http/federation/test_matrix_federation_agent.py
@@ -17,18 +17,21 @@ import logging
 from mock import Mock
 
 import treq
+from zope.interface import implementer
 
 from twisted.internet import defer
+from twisted.internet._sslverify import ClientTLSOptions, OpenSSLCertificateOptions
 from twisted.internet.protocol import Factory
 from twisted.protocols.tls import TLSMemoryBIOFactory
-from twisted.test.ssl_helpers import ServerTLSContext
 from twisted.web.http import HTTPChannel
+from twisted.web.iweb import IPolicyForHTTPS
 
 from synapse.crypto.context_factory import ClientTLSOptionsFactory
 from synapse.http.federation.matrix_federation_agent import MatrixFederationAgent
 from synapse.http.federation.srv_resolver import Server
 from synapse.util.logcontext import LoggingContext
 
+from tests.http import ServerTLSContext
 from tests.server import FakeTransport, ThreadedMemoryReactorClock
 from tests.unittest import TestCase
 
@@ -44,6 +47,7 @@ class MatrixFederationAgentTests(TestCase):
         self.agent = MatrixFederationAgent(
             reactor=self.reactor,
             tls_client_options_factory=ClientTLSOptionsFactory(None),
+            _well_known_tls_policy=TrustingTLSPolicyForHTTPS(),
             _srv_resolver=self.mock_resolver,
         )
 
@@ -65,10 +69,14 @@ class MatrixFederationAgentTests(TestCase):
         # Normally this would be done by the TCP socket code in Twisted, but we are
         # stubbing that out here.
         client_protocol = client_factory.buildProtocol(None)
-        client_protocol.makeConnection(FakeTransport(server_tls_protocol, self.reactor))
+        client_protocol.makeConnection(
+            FakeTransport(server_tls_protocol, self.reactor, client_protocol),
+        )
 
         # tell the server tls protocol to send its stuff back to the client, too
-        server_tls_protocol.makeConnection(FakeTransport(client_protocol, self.reactor))
+        server_tls_protocol.makeConnection(
+            FakeTransport(client_protocol, self.reactor, server_tls_protocol),
+        )
 
         # give the reactor a pump to get the TLS juices flowing.
         self.reactor.pump((0.1,))
@@ -101,9 +109,49 @@ class MatrixFederationAgentTests(TestCase):
             try:
                 fetch_res = yield fetch_d
                 defer.returnValue(fetch_res)
+            except Exception as e:
+                logger.info("Fetch of %s failed: %s", uri.decode("ascii"), e)
+                raise
             finally:
                 _check_logcontext(context)
 
+    def _handle_well_known_connection(self, client_factory, expected_sni, target_server):
+        """Handle an outgoing HTTPs connection: wire it up to a server, check that the
+        request is for a .well-known, and send the response.
+
+        Args:
+            client_factory (IProtocolFactory): outgoing connection
+            expected_sni (bytes): SNI that we expect the outgoing connection to send
+            target_server (bytes): target server that we should redirect to in the
+                .well-known response.
+        """
+        # make the connection for .well-known
+        well_known_server = self._make_connection(
+            client_factory,
+            expected_sni=expected_sni,
+        )
+        # check the .well-known request and send a response
+        self.assertEqual(len(well_known_server.requests), 1)
+        request = well_known_server.requests[0]
+        self._send_well_known_response(request, target_server)
+
+    def _send_well_known_response(self, request, target_server):
+        """Check that an incoming request looks like a valid .well-known request, and
+        send back the response.
+        """
+        self.assertEqual(request.method, b'GET')
+        self.assertEqual(request.path, b'/.well-known/matrix/server')
+        self.assertEqual(
+            request.requestHeaders.getRawHeaders(b'host'),
+            [b'testserv'],
+        )
+        # send back a response
+        request.responseHeaders.setRawHeaders(b'Content-Type', [b'application/json'])
+        request.write(b'{ "m.server": "%s" }' % (target_server,))
+        request.finish()
+
+        self.reactor.pump((0.1, ))
+
     def test_get(self):
         """
         happy-path test of a GET request with an explicit port
@@ -283,9 +331,9 @@ class MatrixFederationAgentTests(TestCase):
         self.reactor.pump((0.1,))
         self.successResultOf(test_d)
 
-    def test_get_hostname_no_srv(self):
+    def test_get_no_srv_no_well_known(self):
         """
-        Test the behaviour when the server name has no port, and no SRV record
+        Test the behaviour when the server name has no port, no SRV, and no well-known
         """
 
         self.mock_resolver.resolve_service.side_effect = lambda _: []
@@ -300,11 +348,24 @@ class MatrixFederationAgentTests(TestCase):
             b"_matrix._tcp.testserv",
         )
 
-        # Make sure treq is trying to connect
+        # there should be an attempt to connect on port 443 for the .well-known
         clients = self.reactor.tcpClients
         self.assertEqual(len(clients), 1)
         (host, port, client_factory, _timeout, _bindAddress) = clients[0]
         self.assertEqual(host, '1.2.3.4')
+        self.assertEqual(port, 443)
+
+        # fonx the connection
+        client_factory.clientConnectionFailed(None, Exception("nope"))
+
+        # attemptdelay on the hostnameendpoint is 0.3, so  takes that long before the
+        # .well-known request fails.
+        self.reactor.pump((0.4,))
+
+        # we should fall back to a direct connection
+        self.assertEqual(len(clients), 2)
+        (host, port, client_factory, _timeout, _bindAddress) = clients[1]
+        self.assertEqual(host, '1.2.3.4')
         self.assertEqual(port, 8448)
 
         # make a test server, and wire up the client
@@ -327,6 +388,67 @@ class MatrixFederationAgentTests(TestCase):
         self.reactor.pump((0.1,))
         self.successResultOf(test_d)
 
+    def test_get_well_known(self):
+        """Test the behaviour when the server name has no port and no SRV record, but
+        the .well-known redirects elsewhere
+        """
+
+        self.mock_resolver.resolve_service.side_effect = lambda _: []
+        self.reactor.lookups["testserv"] = "1.2.3.4"
+        self.reactor.lookups["target-server"] = "1::f"
+
+        test_d = self._make_get_request(b"matrix://testserv/foo/bar")
+
+        # Nothing happened yet
+        self.assertNoResult(test_d)
+
+        self.mock_resolver.resolve_service.assert_called_once_with(
+            b"_matrix._tcp.testserv",
+        )
+        self.mock_resolver.resolve_service.reset_mock()
+
+        # there should be an attempt to connect on port 443 for the .well-known
+        clients = self.reactor.tcpClients
+        self.assertEqual(len(clients), 1)
+        (host, port, client_factory, _timeout, _bindAddress) = clients[0]
+        self.assertEqual(host, '1.2.3.4')
+        self.assertEqual(port, 443)
+
+        self._handle_well_known_connection(
+            client_factory, expected_sni=b"testserv", target_server=b"target-server",
+        )
+
+        # there should be another SRV lookup
+        self.mock_resolver.resolve_service.assert_called_once_with(
+            b"_matrix._tcp.target-server",
+        )
+
+        # now we should get a connection to the target server
+        self.assertEqual(len(clients), 2)
+        (host, port, client_factory, _timeout, _bindAddress) = clients[1]
+        self.assertEqual(host, '1::f')
+        self.assertEqual(port, 8448)
+
+        # make a test server, and wire up the client
+        http_server = self._make_connection(
+            client_factory,
+            expected_sni=b'target-server',
+        )
+
+        self.assertEqual(len(http_server.requests), 1)
+        request = http_server.requests[0]
+        self.assertEqual(request.method, b'GET')
+        self.assertEqual(request.path, b'/foo/bar')
+        self.assertEqual(
+            request.requestHeaders.getRawHeaders(b'host'),
+            [b'target-server'],
+        )
+
+        # finish the request
+        request.finish()
+        self.reactor.pump((0.1,))
+        self.successResultOf(test_d)
+
     def test_get_hostname_srv(self):
         """
         Test the behaviour when there is a single SRV record
@@ -372,14 +494,78 @@ class MatrixFederationAgentTests(TestCase):
         self.reactor.pump((0.1,))
         self.successResultOf(test_d)
 
+    def test_get_well_known_srv(self):
+        """Test the behaviour when the server name has no port and no SRV record, but
+        the .well-known redirects to a place where there is a SRV.
+        """
+
+        self.mock_resolver.resolve_service.side_effect = lambda _: []
+        self.reactor.lookups["testserv"] = "1.2.3.4"
+        self.reactor.lookups["srvtarget"] = "5.6.7.8"
+
+        test_d = self._make_get_request(b"matrix://testserv/foo/bar")
+
+        # Nothing happened yet
+        self.assertNoResult(test_d)
+
+        self.mock_resolver.resolve_service.assert_called_once_with(
+            b"_matrix._tcp.testserv",
+        )
+        self.mock_resolver.resolve_service.reset_mock()
+
+        # there should be an attempt to connect on port 443 for the .well-known
+        clients = self.reactor.tcpClients
+        self.assertEqual(len(clients), 1)
+        (host, port, client_factory, _timeout, _bindAddress) = clients[0]
+        self.assertEqual(host, '1.2.3.4')
+        self.assertEqual(port, 443)
+
+        self.mock_resolver.resolve_service.side_effect = lambda _: [
+            Server(host=b"srvtarget", port=8443),
+        ]
+
+        self._handle_well_known_connection(
+            client_factory, expected_sni=b"testserv", target_server=b"target-server",
+        )
+
+        # there should be another SRV lookup
+        self.mock_resolver.resolve_service.assert_called_once_with(
+            b"_matrix._tcp.target-server",
+        )
+
+        # now we should get a connection to the target of the SRV record
+        self.assertEqual(len(clients), 2)
+        (host, port, client_factory, _timeout, _bindAddress) = clients[1]
+        self.assertEqual(host, '5.6.7.8')
+        self.assertEqual(port, 8443)
+
+        # make a test server, and wire up the client
+        http_server = self._make_connection(
+            client_factory,
+            expected_sni=b'target-server',
+        )
+
+        self.assertEqual(len(http_server.requests), 1)
+        request = http_server.requests[0]
+        self.assertEqual(request.method, b'GET')
+        self.assertEqual(request.path, b'/foo/bar')
+        self.assertEqual(
+            request.requestHeaders.getRawHeaders(b'host'),
+            [b'target-server'],
+        )
+
+        # finish the request
+        request.finish()
+        self.reactor.pump((0.1,))
+        self.successResultOf(test_d)
+
     def test_idna_servername(self):
         """test the behaviour when the server name has idna chars in"""
 
         self.mock_resolver.resolve_service.side_effect = lambda _: []
 
-        # hostnameendpoint does the lookup on the unicode value (getaddrinfo encodes
-        # it back to idna)
-        self.reactor.lookups[u"bücher.com"] = "1.2.3.4"
+        # the resolver is always called with the IDNA hostname as a native string.
+        self.reactor.lookups["xn--bcher-kva.com"] = "1.2.3.4"
 
         # this is idna for bücher.com
         test_d = self._make_get_request(b"matrix://xn--bcher-kva.com/foo/bar")
@@ -391,11 +577,25 @@ class MatrixFederationAgentTests(TestCase):
             b"_matrix._tcp.xn--bcher-kva.com",
         )
 
-        # Make sure treq is trying to connect
+        # there should be an attempt to connect on port 443 for the .well-known
         clients = self.reactor.tcpClients
         self.assertEqual(len(clients), 1)
         (host, port, client_factory, _timeout, _bindAddress) = clients[0]
         self.assertEqual(host, '1.2.3.4')
+        self.assertEqual(port, 443)
+
+        # fonx the connection
+        client_factory.clientConnectionFailed(None, Exception("nope"))
+
+        # attemptdelay on the hostnameendpoint is 0.3, so  takes that long before the
+        # .well-known request fails.
+        self.reactor.pump((0.4,))
+
+        # We should fall back to port 8448
+        clients = self.reactor.tcpClients
+        self.assertEqual(len(clients), 2)
+        (host, port, client_factory, _timeout, _bindAddress) = clients[1]
+        self.assertEqual(host, '1.2.3.4')
         self.assertEqual(port, 8448)
 
         # make a test server, and wire up the client
@@ -424,7 +624,7 @@ class MatrixFederationAgentTests(TestCase):
         self.mock_resolver.resolve_service.side_effect = lambda _: [
             Server(host=b"xn--trget-3qa.com", port=8443)  # târget.com
         ]
-        self.reactor.lookups[u"târget.com"] = "1.2.3.4"
+        self.reactor.lookups["xn--trget-3qa.com"] = "1.2.3.4"
 
         test_d = self._make_get_request(b"matrix://xn--bcher-kva.com/foo/bar")
 
@@ -493,3 +693,11 @@ def _build_test_server():
 def _log_request(request):
     """Implements Factory.log, which is expected by Request.finish"""
     logger.info("Completed request %s", request)
+
+
+@implementer(IPolicyForHTTPS)
+class TrustingTLSPolicyForHTTPS(object):
+    """An IPolicyForHTTPS which doesn't do any certificate verification"""
+    def creatorForNetloc(self, hostname, port):
+        certificateOptions = OpenSSLCertificateOptions()
+        return ClientTLSOptions(hostname, certificateOptions.getContext())
diff --git a/tests/http/server.pem b/tests/http/server.pem
new file mode 100644
index 0000000000..0584cf1a80
--- /dev/null
+++ b/tests/http/server.pem
@@ -0,0 +1,81 @@
+-----BEGIN PRIVATE KEY-----
+MIIJQgIBADANBgkqhkiG9w0BAQEFAASCCSwwggkoAgEAAoICAQCgF43/3lAgJ+p0
+x7Rn8UcL8a4fctvdkikvZrCngw96LkB34Evfq8YGWlOVjU+f9naUJLAKMatmAfEN
+r+rMX4VOXmpTwuu6iLtqwreUrRFMESyrmvQxa15p+y85gkY0CFmXMblv6ORbxHTG
+ncBGwST4WK4Poewcgt6jcISFCESTUKu1zc3cw1ANIDRyDLB5K44KwIe36dcKckyN
+Kdtv4BJ+3fcIZIkPJH62zqCypgFF1oiFt40uJzClxgHdJZlKYpgkfnDTckw4Y/Mx
+9k8BbE310KAzUNMV9H7I1eEolzrNr66FQj1eN64X/dqO8lTbwCqAd4diCT4sIUk0
+0SVsAUjNd3g8j651hx+Qb1t8fuOjrny8dmeMxtUgIBHoQcpcj76R55Fs7KZ9uar0
+8OFTyGIze51W1jG2K/7/5M1zxIqrA+7lsXu5OR81s7I+Ng/UUAhiHA/z+42/aiNa
+qEuk6tqj3rHfLctnCbtZ+JrRNqSSwEi8F0lMA021ivEd2eJV+284OyJjhXOmKHrX
+QADHrmS7Sh4syTZvRNm9n+qWID0KdDr2Sji/KnS3Enp44HDQ4xriT6/xhwEGsyuX
+oH5aAkdLznulbWkHBbyx1SUQSTLpOqzaioF9m1vRrLsFvrkrY3D253mPJ5eU9HM/
+dilduFcUgj4rz+6cdXUAh+KK/v95zwIDAQABAoICAFG5tJPaOa0ws0/KYx5s3YgL
+aIhFalhCNSQtmCDrlwsYcXDA3/rfBchYdDL0YKGYgBBAal3J3WXFt/j0xThvyu2m
+5UC9UPl4s7RckrsjXqEmY1d3UxGnbhtMT19cUdpeKN42VCP9EBaIw9Rg07dLAkSF
+gNYaIx6q8F0fI4eGIPvTQtUcqur4CfWpaxyNvckdovV6M85/YXfDwbCOnacPDGIX
+jfSK3i0MxGMuOHr6o8uzKR6aBUh6WStHWcw7VXXTvzdiFNbckmx3Gb93rf1b/LBw
+QFfx+tBKcC62gKroCOzXso/0sL9YTVeSD/DJZOiJwSiz3Dj/3u1IUMbVvfTU8wSi
+CYS7Z+jHxwSOCSSNTXm1wO/MtDsNKbI1+R0cohr/J9pOMQvrVh1+2zSDOFvXAQ1S
+yvjn+uqdmijRoV2VEGVHd+34C+ci7eJGAhL/f92PohuuFR2shUETgGWzpACZSJwg
+j1d90Hs81hj07vWRb+xCeDh00vimQngz9AD8vYvv/S4mqRGQ6TZdfjLoUwSTg0JD
+6sQgRXX026gQhLhn687vLKZfHwzQPZkpQdxOR0dTZ/ho/RyGGRJXH4kN4cA2tPr+
+AKYQ29YXGlEzGG7OqikaZcprNWG6UFgEpuXyBxCgp9r4ladZo3J+1Rhgus8ZYatd
+uO98q3WEBmP6CZ2n32mBAoIBAQDS/c/ybFTos0YpGHakwdmSfj5OOQJto2y8ywfG
+qDHwO0ebcpNnS1+MA+7XbKUQb/3Iq7iJljkkzJG2DIJ6rpKynYts1ViYpM7M/t0T
+W3V1gvUcUL62iqkgws4pnpWmubFkqV31cPSHcfIIclnzeQ1aOEGsGHNAvhty0ciC
+DnkJACbqApvopFLOR5f6UFTtKExE+hDH0WqgpsCAKJ1L4g6pBzZatI32/CN9JEVU
+tDbxLV75hHlFFjUrG7nT1rPyr/gI8Ceh9/2xeXPfjJUR0PrG3U1nwLqUCZkvFzO6
+XpN2+A+/v4v5xqMjKDKDFy1oq6SCMomwv/viw6wl/84TMbolAoIBAQDCPiMecnR8
+REik6tqVzQO/uSe9ZHjz6J15t5xdwaI6HpSwLlIkQPkLTjyXtFpemK5DOYRxrJvQ
+remfrZrN2qtLlb/DKpuGPWRsPOvWCrSuNEp48ivUehtclljrzxAFfy0sM+fWeJ48
+nTnR+td9KNhjNtZixzWdAy/mE+jdaMsXVnk66L73Uz+2WsnvVMW2R6cpCR0F2eP/
+B4zDWRqlT2w47sePAB81mFYSQLvPC6Xcgg1OqMubfiizJI49c8DO6Jt+FFYdsxhd
+kG52Eqa/Net6rN3ueiS6yXL5TU3Y6g96bPA2KyNCypucGcddcBfqaiVx/o4AH6yT
+NrdsrYtyvk/jAoIBAQDHUwKVeeRJJbvdbQAArCV4MI155n+1xhMe1AuXkCQFWGtQ
+nlBE4D72jmyf1UKnIbW2Uwv15xY6/ouVWYIWlj9+QDmMaozVP7Uiko+WDuwLRNl8
+k4dn+dzHV2HejbPBG2JLv3lFOx23q1zEwArcaXrExaq9Ayg2fKJ/uVHcFAIiD6Oz
+pR1XDY4w1A/uaN+iYFSVQUyDCQLbnEz1hej73CaPZoHh9Pq83vxD5/UbjVjuRTeZ
+L55FNzKpc/r89rNvTPBcuUwnxplDhYKDKVNWzn9rSXwrzTY2Tk8J3rh+k4RqevSd
+6D47jH1n5Dy7/TRn0ueKHGZZtTUnyEUkbOJo3ayFAoIBAHKDyZaQqaX9Z8p6fwWj
+yVsFoK0ih8BcWkLBAdmwZ6DWGJjJpjmjaG/G3ygc9s4gO1R8m12dAnuDnGE8KzDD
+gwtbrKM2Alyg4wyA2hTlWOH/CAzH0RlCJ9Fs/d1/xJVJBeuyajLiB3/6vXTS6qnq
+I7BSSxAPG8eGcn21LSsjNeB7ZZtaTgNnu/8ZBUYo9yrgkWc67TZe3/ChldYxOOlO
+qqHh/BqNWtjxB4VZTp/g4RbgQVInZ2ozdXEv0v/dt0UEk29ANAjsZif7F3RayJ2f
+/0TilzCaJ/9K9pKNhaClVRy7Dt8QjYg6BIWCGSw4ApF7pLnQ9gySn95mersCkVzD
+YDsCggEAb0E/TORjQhKfNQvahyLfQFm151e+HIoqBqa4WFyfFxe/IJUaLH/JSSFw
+VohbQqPdCmaAeuQ8ERL564DdkcY5BgKcax79fLLCOYP5bT11aQx6uFpfl2Dcm6Z9
+QdCRI4jzPftsd5fxLNH1XtGyC4t6vTic4Pji2O71WgWzx0j5v4aeDY4sZQeFxqCV
+/q7Ee8hem1Rn5RFHu14FV45RS4LAWl6wvf5pQtneSKzx8YL0GZIRRytOzdEfnGKr
+FeUlAj5uL+5/p0ZEgM7gPsEBwdm8scF79qSUn8UWSoXNeIauF9D4BDg8RZcFFxka
+KILVFsq3cQC+bEnoM4eVbjEQkGs1RQ==
+-----END PRIVATE KEY-----
+-----BEGIN CERTIFICATE-----
+MIIE/jCCAuagAwIBAgIJANFtVaGvJWZlMA0GCSqGSIb3DQEBCwUAMBMxETAPBgNV
+BAMMCHRlc3RzZXJ2MCAXDTE5MDEyNzIyMDIzNloYDzIxMTkwMTAzMjIwMjM2WjAT
+MREwDwYDVQQDDAh0ZXN0c2VydjCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoC
+ggIBAKAXjf/eUCAn6nTHtGfxRwvxrh9y292SKS9msKeDD3ouQHfgS9+rxgZaU5WN
+T5/2dpQksAoxq2YB8Q2v6sxfhU5ealPC67qIu2rCt5StEUwRLKua9DFrXmn7LzmC
+RjQIWZcxuW/o5FvEdMadwEbBJPhYrg+h7ByC3qNwhIUIRJNQq7XNzdzDUA0gNHIM
+sHkrjgrAh7fp1wpyTI0p22/gEn7d9whkiQ8kfrbOoLKmAUXWiIW3jS4nMKXGAd0l
+mUpimCR+cNNyTDhj8zH2TwFsTfXQoDNQ0xX0fsjV4SiXOs2vroVCPV43rhf92o7y
+VNvAKoB3h2IJPiwhSTTRJWwBSM13eDyPrnWHH5BvW3x+46OufLx2Z4zG1SAgEehB
+ylyPvpHnkWzspn25qvTw4VPIYjN7nVbWMbYr/v/kzXPEiqsD7uWxe7k5HzWzsj42
+D9RQCGIcD/P7jb9qI1qoS6Tq2qPesd8ty2cJu1n4mtE2pJLASLwXSUwDTbWK8R3Z
+4lX7bzg7ImOFc6YoetdAAMeuZLtKHizJNm9E2b2f6pYgPQp0OvZKOL8qdLcSenjg
+cNDjGuJPr/GHAQazK5egfloCR0vOe6VtaQcFvLHVJRBJMuk6rNqKgX2bW9GsuwW+
+uStjcPbneY8nl5T0cz92KV24VxSCPivP7px1dQCH4or+/3nPAgMBAAGjUzBRMB0G
+A1UdDgQWBBQcQZpzLzTk5KdS/Iz7sGCV7gTd/zAfBgNVHSMEGDAWgBQcQZpzLzTk
+5KdS/Iz7sGCV7gTd/zAPBgNVHRMBAf8EBTADAQH/MA0GCSqGSIb3DQEBCwUAA4IC
+AQAr/Pgha57jqYsDDX1LyRrVdqoVBpLBeB7x/p9dKYm7S6tBTDFNMZ0SZyQP8VEG
+7UoC9/OQ9nCdEMoR7ZKpQsmipwcIqpXHS6l4YOkf5EEq5jpMgvlEesHmBJJeJew/
+FEPDl1bl8d0tSrmWaL3qepmwzA+2lwAAouWk2n+rLiP8CZ3jZeoTXFqYYrUlEqO9
+fHMvuWqTV4KCSyNY+GWCrnHetulgKHlg+W2J1mZnrCKcBhWf9C2DesTJO+JldIeM
+ornTFquSt21hZi+k3aySuMn2N3MWiNL8XsZVsAnPSs0zA+2fxjJkShls8Gc7cCvd
+a6XrNC+PY6pONguo7rEU4HiwbvnawSTngFFglmH/ImdA/HkaAekW6o82aI8/UxFx
+V9fFMO3iKDQdOrg77hI1bx9RlzKNZZinE2/Pu26fWd5d2zqDWCjl8ykGQRAfXgYN
+H3BjgyXLl+ao5/pOUYYtzm3ruTXTgRcy5hhL6hVTYhSrf9vYh4LNIeXNKnZ78tyG
+TX77/kU2qXhBGCFEUUMqUNV/+ITir2lmoxVjknt19M07aGr8C7SgYt6Rs+qDpMiy
+JurgvRh8LpVq4pHx1efxzxCFmo58DMrG40I0+CF3y/niNpOb1gp2wAqByRiORkds
+f0ytW6qZ0TpHbD6gOtQLYDnhx3ISuX+QYSekVwQUpffeWQ==
+-----END CERTIFICATE-----
diff --git a/tests/server.py b/tests/server.py
index ed2a046ae6..3d7ae9875c 100644
--- a/tests/server.py
+++ b/tests/server.py
@@ -8,11 +8,10 @@ import attr
 from zope.interface import implementer
 
 from twisted.internet import address, threads, udp
-from twisted.internet._resolver import HostResolution
-from twisted.internet.address import IPv4Address
-from twisted.internet.defer import Deferred
+from twisted.internet._resolver import SimpleResolverComplexifier
+from twisted.internet.defer import Deferred, fail, succeed
 from twisted.internet.error import DNSLookupError
-from twisted.internet.interfaces import IReactorPluggableNameResolver
+from twisted.internet.interfaces import IReactorPluggableNameResolver, IResolverSimple
 from twisted.python.failure import Failure
 from twisted.test.proto_helpers import MemoryReactorClock
 from twisted.web.http import unquote
@@ -227,30 +226,16 @@ class ThreadedMemoryReactorClock(MemoryReactorClock):
 
     def __init__(self):
         self._udp = []
-        self.lookups = {}
-
-        class Resolver(object):
-            def resolveHostName(
-                _self,
-                resolutionReceiver,
-                hostName,
-                portNumber=0,
-                addressTypes=None,
-                transportSemantics='TCP',
-            ):
-
-                resolution = HostResolution(hostName)
-                resolutionReceiver.resolutionBegan(resolution)
-                if hostName not in self.lookups:
-                    raise DNSLookupError("OH NO")
-
-                resolutionReceiver.addressResolved(
-                    IPv4Address('TCP', self.lookups[hostName], portNumber)
-                )
-                resolutionReceiver.resolutionComplete()
-                return resolution
-
-        self.nameResolver = Resolver()
+        lookups = self.lookups = {}
+
+        @implementer(IResolverSimple)
+        class FakeResolver(object):
+            def getHostByName(self, name, timeout=None):
+                if name not in lookups:
+                    return fail(DNSLookupError("OH NO: unknown %s" % (name, )))
+                return succeed(lookups[name])
+
+        self.nameResolver = SimpleResolverComplexifier(FakeResolver())
         super(ThreadedMemoryReactorClock, self).__init__()
 
     def listenUDP(self, port, protocol, interface='', maxPacketSize=8196):
@@ -369,6 +354,11 @@ class FakeTransport(object):
     :type: twisted.internet.interfaces.IReactorTime
     """
 
+    _protocol = attr.ib(default=None)
+    """The Protocol which is producing data for this transport. Optional, but if set
+    will get called back for connectionLost() notifications etc.
+    """
+
     disconnecting = False
     buffer = attr.ib(default=b'')
     producer = attr.ib(default=None)
@@ -379,8 +369,12 @@ class FakeTransport(object):
     def getHost(self):
         return None
 
-    def loseConnection(self):
-        self.disconnecting = True
+    def loseConnection(self, reason=None):
+        logger.info("FakeTransport: loseConnection(%s)", reason)
+        if not self.disconnecting:
+            self.disconnecting = True
+            if self._protocol:
+                self._protocol.connectionLost(reason)
 
     def abortConnection(self):
         self.disconnecting = True
diff --git a/tests/state/test_v2.py b/tests/state/test_v2.py
index 2e073a3afc..9a5c816927 100644
--- a/tests/state/test_v2.py
+++ b/tests/state/test_v2.py
@@ -19,7 +19,7 @@ from six.moves import zip
 
 import attr
 
-from synapse.api.constants import EventTypes, JoinRules, Membership
+from synapse.api.constants import EventTypes, JoinRules, Membership, RoomVersions
 from synapse.event_auth import auth_types_for_event
 from synapse.events import FrozenEvent
 from synapse.state.v2 import lexicographical_topological_sort, resolve_events_with_store
@@ -539,6 +539,7 @@ class StateTestCase(unittest.TestCase):
                 state_before = dict(state_at_event[prev_events[0]])
             else:
                 state_d = resolve_events_with_store(
+                    RoomVersions.V2,
                     [state_at_event[n] for n in prev_events],
                     event_map=event_map,
                     state_res_store=TestStateResolutionStore(event_map),
@@ -685,6 +686,7 @@ class SimpleParamStateTestCase(unittest.TestCase):
         # Test that we correctly handle passing `None` as the event_map
 
         state_d = resolve_events_with_store(
+            RoomVersions.V2,
             [self.state_at_bob, self.state_at_charlie],
             event_map=None,
             state_res_store=TestStateResolutionStore(self.event_map),
diff --git a/tests/test_event_auth.py b/tests/test_event_auth.py
index 411b4a9f86..7ee318e4e8 100644
--- a/tests/test_event_auth.py
+++ b/tests/test_event_auth.py
@@ -16,6 +16,7 @@
 import unittest
 
 from synapse import event_auth
+from synapse.api.constants import RoomVersions
 from synapse.api.errors import AuthError
 from synapse.events import FrozenEvent
 
@@ -35,12 +36,16 @@ class EventAuthTestCase(unittest.TestCase):
         }
 
         # creator should be able to send state
-        event_auth.check(_random_state_event(creator), auth_events, do_sig_check=False)
+        event_auth.check(
+            RoomVersions.V1, _random_state_event(creator), auth_events,
+            do_sig_check=False,
+        )
 
         # joiner should not be able to send state
         self.assertRaises(
             AuthError,
             event_auth.check,
+            RoomVersions.V1,
             _random_state_event(joiner),
             auth_events,
             do_sig_check=False,
@@ -69,13 +74,17 @@ class EventAuthTestCase(unittest.TestCase):
         self.assertRaises(
             AuthError,
             event_auth.check,
+            RoomVersions.V1,
             _random_state_event(pleb),
             auth_events,
             do_sig_check=False,
         ),
 
         # king should be able to send state
-        event_auth.check(_random_state_event(king), auth_events, do_sig_check=False)
+        event_auth.check(
+            RoomVersions.V1, _random_state_event(king), auth_events,
+            do_sig_check=False,
+        )
 
 
 # helpers for making events
diff --git a/tests/test_utils/__init__.py b/tests/test_utils/__init__.py
new file mode 100644
index 0000000000..a7310cf12a
--- /dev/null
+++ b/tests/test_utils/__init__.py
@@ -0,0 +1,18 @@
+# -*- coding: utf-8 -*-
+# Copyright 2019 New Vector Ltd
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""
+Utilities for running the unit tests
+"""
diff --git a/tests/test_utils/logging_setup.py b/tests/test_utils/logging_setup.py
new file mode 100644
index 0000000000..d0bc8e2112
--- /dev/null
+++ b/tests/test_utils/logging_setup.py
@@ -0,0 +1,54 @@
+# -*- coding: utf-8 -*-
+# Copyright 2019 New Vector Ltd
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+import logging
+import os
+
+import twisted.logger
+
+from synapse.util.logcontext import LoggingContextFilter
+
+
+class ToTwistedHandler(logging.Handler):
+    """logging handler which sends the logs to the twisted log"""
+    tx_log = twisted.logger.Logger()
+
+    def emit(self, record):
+        log_entry = self.format(record)
+        log_level = record.levelname.lower().replace('warning', 'warn')
+        self.tx_log.emit(
+            twisted.logger.LogLevel.levelWithName(log_level),
+            log_entry.replace("{", r"(").replace("}", r")"),
+        )
+
+
+def setup_logging():
+    """Configure the python logging appropriately for the tests.
+
+    (Logs will end up in _trial_temp.)
+    """
+    root_logger = logging.getLogger()
+
+    log_format = (
+        "%(asctime)s - %(name)s - %(lineno)d - %(levelname)s - %(request)s - %(message)s"
+    )
+
+    handler = ToTwistedHandler()
+    formatter = logging.Formatter(log_format)
+    handler.setFormatter(formatter)
+    handler.addFilter(LoggingContextFilter(request=""))
+    root_logger.addHandler(handler)
+
+    log_level = os.environ.get("SYNAPSE_TEST_LOG_LEVEL", "ERROR")
+    root_logger.setLevel(log_level)
diff --git a/tests/test_visibility.py b/tests/test_visibility.py
index 82d63ce00e..455db9f276 100644
--- a/tests/test_visibility.py
+++ b/tests/test_visibility.py
@@ -166,7 +166,7 @@ class FilterEventsForServerTestCase(tests.unittest.TestCase):
     @defer.inlineCallbacks
     def inject_message(self, user_id, content=None):
         if content is None:
-            content = {"body": "testytest"}
+            content = {"body": "testytest", "msgtype": "m.text"}
         builder = self.event_builder_factory.new(
             RoomVersions.V1,
             {
diff --git a/tests/unittest.py b/tests/unittest.py
index cda549c783..fac254ff10 100644
--- a/tests/unittest.py
+++ b/tests/unittest.py
@@ -31,38 +31,14 @@ from synapse.http.server import JsonResource
 from synapse.http.site import SynapseRequest
 from synapse.server import HomeServer
 from synapse.types import UserID, create_requester
-from synapse.util.logcontext import LoggingContext, LoggingContextFilter
+from synapse.util.logcontext import LoggingContext
 
 from tests.server import get_clock, make_request, render, setup_test_homeserver
+from tests.test_utils.logging_setup import setup_logging
 from tests.utils import default_config, setupdb
 
 setupdb()
-
-# Set up putting Synapse's logs into Trial's.
-rootLogger = logging.getLogger()
-
-log_format = (
-    "%(asctime)s - %(name)s - %(lineno)d - %(levelname)s - %(request)s - %(message)s"
-)
-
-
-class ToTwistedHandler(logging.Handler):
-    tx_log = twisted.logger.Logger()
-
-    def emit(self, record):
-        log_entry = self.format(record)
-        log_level = record.levelname.lower().replace('warning', 'warn')
-        self.tx_log.emit(
-            twisted.logger.LogLevel.levelWithName(log_level),
-            log_entry.replace("{", r"(").replace("}", r")"),
-        )
-
-
-handler = ToTwistedHandler()
-formatter = logging.Formatter(log_format)
-handler.setFormatter(formatter)
-handler.addFilter(LoggingContextFilter(request=""))
-rootLogger.addHandler(handler)
+setup_logging()
 
 
 def around(target):
@@ -96,7 +72,7 @@ class TestCase(unittest.TestCase):
 
         method = getattr(self, methodName)
 
-        level = getattr(method, "loglevel", getattr(self, "loglevel", logging.WARNING))
+        level = getattr(method, "loglevel", getattr(self, "loglevel", None))
 
         @around(self)
         def setUp(orig):
@@ -114,7 +90,7 @@ class TestCase(unittest.TestCase):
                 )
 
             old_level = logging.getLogger().level
-            if old_level != level:
+            if level is not None and old_level != level:
 
                 @around(self)
                 def tearDown(orig):
@@ -122,7 +98,8 @@ class TestCase(unittest.TestCase):
                     logging.getLogger().setLevel(old_level)
                     return ret
 
-            logging.getLogger().setLevel(level)
+                logging.getLogger().setLevel(level)
+
             return orig()
 
         @around(self)