diff --git a/synapse/api/auth.py b/synapse/api/auth.py
index 963e0e7d60..5992d30623 100644
--- a/synapse/api/auth.py
+++ b/synapse/api/auth.py
@@ -551,17 +551,6 @@ class Auth(object):
return self.store.is_server_admin(user)
@defer.inlineCallbacks
- def add_auth_events(self, builder, context):
- prev_state_ids = yield context.get_prev_state_ids(self.store)
- auth_ids = yield self.compute_auth_events(builder, prev_state_ids)
-
- auth_events_entries = yield self.store.add_event_hashes(
- auth_ids
- )
-
- builder.auth_events = auth_events_entries
-
- @defer.inlineCallbacks
def compute_auth_events(self, event, current_state_ids, for_verification=False):
if event.type == EventTypes.Create:
defer.returnValue([])
@@ -577,7 +566,7 @@ class Auth(object):
key = (EventTypes.JoinRules, "", )
join_rule_event_id = current_state_ids.get(key)
- key = (EventTypes.Member, event.user_id, )
+ key = (EventTypes.Member, event.sender, )
member_event_id = current_state_ids.get(key)
key = (EventTypes.Create, "", )
diff --git a/synapse/api/constants.py b/synapse/api/constants.py
index 302e1e2f1f..4912a55785 100644
--- a/synapse/api/constants.py
+++ b/synapse/api/constants.py
@@ -125,10 +125,12 @@ class EventFormatVersions(object):
independently from the room version.
"""
V1 = 1
+ V2 = 2
KNOWN_EVENT_FORMAT_VERSIONS = {
EventFormatVersions.V1,
+ EventFormatVersions.V2,
}
diff --git a/synapse/config/consent_config.py b/synapse/config/consent_config.py
index f193a090ae..9f2e85342f 100644
--- a/synapse/config/consent_config.py
+++ b/synapse/config/consent_config.py
@@ -13,6 +13,10 @@
# See the License for the specific language governing permissions and
# limitations under the License.
+from os import path
+
+from synapse.config import ConfigError
+
from ._base import Config
DEFAULT_CONFIG = """\
@@ -85,7 +89,15 @@ class ConsentConfig(Config):
if consent_config is None:
return
self.user_consent_version = str(consent_config["version"])
- self.user_consent_template_dir = consent_config["template_dir"]
+ self.user_consent_template_dir = self.abspath(
+ consent_config["template_dir"]
+ )
+ if not path.isdir(self.user_consent_template_dir):
+ raise ConfigError(
+ "Could not find template directory '%s'" % (
+ self.user_consent_template_dir,
+ ),
+ )
self.user_consent_server_notice_content = consent_config.get(
"server_notice_content",
)
diff --git a/synapse/config/server.py b/synapse/config/server.py
index 22dcc87d8a..268a43ff00 100644
--- a/synapse/config/server.py
+++ b/synapse/config/server.py
@@ -261,7 +261,7 @@ class ServerConfig(Config):
# enter into the 'custom HS URL' field on their client. If you
# use synapse with a reverse proxy, this should be the URL to reach
# synapse via the proxy.
- # public_baseurl: https://example.com:8448/
+ # public_baseurl: https://example.com/
# Set the soft limit on the number of file descriptors synapse can use
# Zero is used to indicate synapse should set the soft limit to the
diff --git a/synapse/config/tls.py b/synapse/config/tls.py
index a75e233aa0..734f612db7 100644
--- a/synapse/config/tls.py
+++ b/synapse/config/tls.py
@@ -15,6 +15,7 @@
import logging
import os
+import warnings
from datetime import datetime
from hashlib import sha256
@@ -39,8 +40,8 @@ class TlsConfig(Config):
self.acme_bind_addresses = acme_config.get("bind_addresses", ["127.0.0.1"])
self.acme_reprovision_threshold = acme_config.get("reprovision_threshold", 30)
- self.tls_certificate_file = os.path.abspath(config.get("tls_certificate_path"))
- self.tls_private_key_file = os.path.abspath(config.get("tls_private_key_path"))
+ self.tls_certificate_file = self.abspath(config.get("tls_certificate_path"))
+ self.tls_private_key_file = self.abspath(config.get("tls_private_key_path"))
self._original_tls_fingerprints = config["tls_fingerprints"]
self.tls_fingerprints = list(self._original_tls_fingerprints)
self.no_tls = config.get("no_tls", False)
@@ -94,6 +95,16 @@ class TlsConfig(Config):
"""
self.tls_certificate = self.read_tls_certificate(self.tls_certificate_file)
+ # Check if it is self-signed, and issue a warning if so.
+ if self.tls_certificate.get_issuer() == self.tls_certificate.get_subject():
+ warnings.warn(
+ (
+ "Self-signed TLS certificates will not be accepted by Synapse 1.0. "
+ "Please either provide a valid certificate, or use Synapse's ACME "
+ "support to provision one."
+ )
+ )
+
if not self.no_tls:
self.tls_private_key = self.read_tls_private_key(self.tls_private_key_file)
@@ -118,10 +129,11 @@ class TlsConfig(Config):
return (
"""\
# PEM encoded X509 certificate for TLS.
- # You can replace the self-signed certificate that synapse
- # autogenerates on launch with your own SSL certificate + key pair
- # if you like. Any required intermediary certificates can be
- # appended after the primary certificate in hierarchical order.
+ # This certificate, as of Synapse 1.0, will need to be a valid
+ # and verifiable certificate, with a root that is available in
+ # the root store of other servers you wish to federate to. Any
+ # required intermediary certificates can be appended after the
+ # primary certificate in hierarchical order.
tls_certificate_path: "%(tls_certificate_path)s"
# PEM encoded private key for TLS
@@ -183,40 +195,3 @@ class TlsConfig(Config):
def read_tls_private_key(self, private_key_path):
private_key_pem = self.read_file(private_key_path, "tls_private_key")
return crypto.load_privatekey(crypto.FILETYPE_PEM, private_key_pem)
-
- def generate_files(self, config):
- tls_certificate_path = config["tls_certificate_path"]
- tls_private_key_path = config["tls_private_key_path"]
-
- if not self.path_exists(tls_private_key_path):
- with open(tls_private_key_path, "wb") as private_key_file:
- tls_private_key = crypto.PKey()
- tls_private_key.generate_key(crypto.TYPE_RSA, 2048)
- private_key_pem = crypto.dump_privatekey(
- crypto.FILETYPE_PEM, tls_private_key
- )
- private_key_file.write(private_key_pem)
- else:
- with open(tls_private_key_path) as private_key_file:
- private_key_pem = private_key_file.read()
- tls_private_key = crypto.load_privatekey(
- crypto.FILETYPE_PEM, private_key_pem
- )
-
- if not self.path_exists(tls_certificate_path):
- with open(tls_certificate_path, "wb") as certificate_file:
- cert = crypto.X509()
- subject = cert.get_subject()
- subject.CN = config["server_name"]
-
- cert.set_serial_number(1000)
- cert.gmtime_adj_notBefore(0)
- cert.gmtime_adj_notAfter(10 * 365 * 24 * 60 * 60)
- cert.set_issuer(cert.get_subject())
- cert.set_pubkey(tls_private_key)
-
- cert.sign(tls_private_key, 'sha256')
-
- cert_pem = crypto.dump_certificate(crypto.FILETYPE_PEM, cert)
-
- certificate_file.write(cert_pem)
diff --git a/synapse/crypto/event_signing.py b/synapse/crypto/event_signing.py
index d01ac5075c..1dfa727fcf 100644
--- a/synapse/crypto/event_signing.py
+++ b/synapse/crypto/event_signing.py
@@ -131,12 +131,12 @@ def compute_event_signature(event_dict, signature_name, signing_key):
return redact_json["signatures"]
-def add_hashes_and_signatures(event, signature_name, signing_key,
+def add_hashes_and_signatures(event_dict, signature_name, signing_key,
hash_algorithm=hashlib.sha256):
"""Add content hash and sign the event
Args:
- event_dict (EventBuilder): The event to add hashes to and sign
+ event_dict (dict): The event to add hashes to and sign
signature_name (str): The name of the entity signing the event
(typically the server's hostname).
signing_key (syutil.crypto.SigningKey): The key to sign with
@@ -144,16 +144,12 @@ def add_hashes_and_signatures(event, signature_name, signing_key,
to hash the event
"""
- name, digest = compute_content_hash(
- event.get_pdu_json(), hash_algorithm=hash_algorithm,
- )
+ name, digest = compute_content_hash(event_dict, hash_algorithm=hash_algorithm)
- if not hasattr(event, "hashes"):
- event.hashes = {}
- event.hashes[name] = encode_base64(digest)
+ event_dict.setdefault("hashes", {})[name] = encode_base64(digest)
- event.signatures = compute_event_signature(
- event.get_pdu_json(),
+ event_dict["signatures"] = compute_event_signature(
+ event_dict,
signature_name=signature_name,
signing_key=signing_key,
)
diff --git a/synapse/events/__init__.py b/synapse/events/__init__.py
index 527aec8c69..44b2b41f18 100644
--- a/synapse/events/__init__.py
+++ b/synapse/events/__init__.py
@@ -1,5 +1,6 @@
# -*- coding: utf-8 -*-
# Copyright 2014-2016 OpenMarket Ltd
+# Copyright 2019 New Vector Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -18,11 +19,9 @@ from distutils.util import strtobool
import six
-from synapse.api.constants import (
- KNOWN_EVENT_FORMAT_VERSIONS,
- KNOWN_ROOM_VERSIONS,
- EventFormatVersions,
-)
+from unpaddedbase64 import encode_base64
+
+from synapse.api.constants import KNOWN_ROOM_VERSIONS, EventFormatVersions, RoomVersions
from synapse.util.caches import intern_dict
from synapse.util.frozenutils import freeze
@@ -240,22 +239,91 @@ class FrozenEvent(EventBase):
rejected_reason=rejected_reason,
)
- @staticmethod
- def from_event(event):
- e = FrozenEvent(
- event.get_pdu_json()
+ def __str__(self):
+ return self.__repr__()
+
+ def __repr__(self):
+ return "<FrozenEvent event_id='%s', type='%s', state_key='%s'>" % (
+ self.get("event_id", None),
+ self.get("type", None),
+ self.get("state_key", None),
)
- e.internal_metadata = event.internal_metadata
- return e
+class FrozenEventV2(EventBase):
+ format_version = EventFormatVersions.V2 # All events of this type are V2
+
+ def __init__(self, event_dict, internal_metadata_dict={}, rejected_reason=None):
+ event_dict = dict(event_dict)
+
+ # Signatures is a dict of dicts, and this is faster than doing a
+ # copy.deepcopy
+ signatures = {
+ name: {sig_id: sig for sig_id, sig in sigs.items()}
+ for name, sigs in event_dict.pop("signatures", {}).items()
+ }
+
+ assert "event_id" not in event_dict
+
+ unsigned = dict(event_dict.pop("unsigned", {}))
+
+ # We intern these strings because they turn up a lot (especially when
+ # caching).
+ event_dict = intern_dict(event_dict)
+
+ if USE_FROZEN_DICTS:
+ frozen_dict = freeze(event_dict)
+ else:
+ frozen_dict = event_dict
+
+ self._event_id = None
+ self.type = event_dict["type"]
+ if "state_key" in event_dict:
+ self.state_key = event_dict["state_key"]
+
+ super(FrozenEventV2, self).__init__(
+ frozen_dict,
+ signatures=signatures,
+ unsigned=unsigned,
+ internal_metadata_dict=internal_metadata_dict,
+ rejected_reason=rejected_reason,
+ )
+
+ @property
+ def event_id(self):
+ # We have to import this here as otherwise we get an import loop which
+ # is hard to break.
+ from synapse.crypto.event_signing import compute_event_reference_hash
+
+ if self._event_id:
+ return self._event_id
+ self._event_id = "$" + encode_base64(compute_event_reference_hash(self)[1])
+ return self._event_id
+
+ def prev_event_ids(self):
+ """Returns the list of prev event IDs. The order matches the order
+ specified in the event, though there is no meaning to it.
+
+ Returns:
+ list[str]: The list of event IDs of this event's prev_events
+ """
+ return self.prev_events
+
+ def auth_event_ids(self):
+ """Returns the list of auth event IDs. The order matches the order
+ specified in the event, though there is no meaning to it.
+
+ Returns:
+ list[str]: The list of event IDs of this event's auth_events
+ """
+ return self.auth_events
def __str__(self):
return self.__repr__()
def __repr__(self):
- return "<FrozenEvent event_id='%s', type='%s', state_key='%s'>" % (
- self.get("event_id", None),
+ return "<FrozenEventV2 event_id='%s', type='%s', state_key='%s'>" % (
+ self.event_id,
self.get("type", None),
self.get("state_key", None),
)
@@ -274,7 +342,13 @@ def room_version_to_event_format(room_version):
# We should have already checked version, so this should not happen
raise RuntimeError("Unrecognized room version %s" % (room_version,))
- return EventFormatVersions.V1
+ if room_version in (
+ RoomVersions.V1, RoomVersions.V2, RoomVersions.VDH_TEST,
+ RoomVersions.STATE_V2_TEST,
+ ):
+ return EventFormatVersions.V1
+ else:
+ raise RuntimeError("Unrecognized room version %s" % (room_version,))
def event_type_from_format_version(format_version):
@@ -288,8 +362,12 @@ def event_type_from_format_version(format_version):
type: A type that can be initialized as per the initializer of
`FrozenEvent`
"""
- if format_version not in KNOWN_EVENT_FORMAT_VERSIONS:
+
+ if format_version == EventFormatVersions.V1:
+ return FrozenEvent
+ elif format_version == EventFormatVersions.V2:
+ return FrozenEventV2
+ else:
raise Exception(
"No event format %r" % (format_version,)
)
- return FrozenEvent
diff --git a/synapse/events/builder.py b/synapse/events/builder.py
index 9ca405b56b..06e01be918 100644
--- a/synapse/events/builder.py
+++ b/synapse/events/builder.py
@@ -13,78 +13,161 @@
# See the License for the specific language governing permissions and
# limitations under the License.
-import copy
+import attr
-from synapse.api.constants import RoomVersions
+from twisted.internet import defer
+
+from synapse.api.constants import (
+ KNOWN_EVENT_FORMAT_VERSIONS,
+ KNOWN_ROOM_VERSIONS,
+ MAX_DEPTH,
+ EventFormatVersions,
+)
+from synapse.crypto.event_signing import add_hashes_and_signatures
from synapse.types import EventID
from synapse.util.stringutils import random_string
-from . import EventBase, FrozenEvent, _event_dict_property
+from . import (
+ _EventInternalMetadata,
+ event_type_from_format_version,
+ room_version_to_event_format,
+)
-def get_event_builder(room_version, key_values={}, internal_metadata_dict={}):
- """Generate an event builder appropriate for the given room version
+@attr.s(slots=True, cmp=False, frozen=True)
+class EventBuilder(object):
+ """A format independent event builder used to build up the event content
+ before signing the event.
- Args:
- room_version (str): Version of the room that we're creating an
- event builder for
- key_values (dict): Fields used as the basis of the new event
- internal_metadata_dict (dict): Used to create the `_EventInternalMetadata`
- object.
+ (Note that while objects of this class are frozen, the
+ content/unsigned/internal_metadata fields are still mutable)
- Returns:
- EventBuilder
+ Attributes:
+ format_version (int): Event format version
+ room_id (str)
+ type (str)
+ sender (str)
+ content (dict)
+ unsigned (dict)
+ internal_metadata (_EventInternalMetadata)
+
+ _state (StateHandler)
+ _auth (synapse.api.Auth)
+ _store (DataStore)
+ _clock (Clock)
+ _hostname (str): The hostname of the server creating the event
+ _signing_key: The signing key to use to sign the event as the server
"""
- if room_version in {
- RoomVersions.V1,
- RoomVersions.V2,
- RoomVersions.STATE_V2_TEST,
- }:
- return EventBuilder(key_values, internal_metadata_dict)
- else:
- raise Exception(
- "No event format defined for version %r" % (room_version,)
- )
+ _state = attr.ib()
+ _auth = attr.ib()
+ _store = attr.ib()
+ _clock = attr.ib()
+ _hostname = attr.ib()
+ _signing_key = attr.ib()
+
+ format_version = attr.ib()
+
+ room_id = attr.ib()
+ type = attr.ib()
+ sender = attr.ib()
+
+ content = attr.ib(default=attr.Factory(dict))
+ unsigned = attr.ib(default=attr.Factory(dict))
+
+ # These only exist on a subset of events, so they raise AttributeError if
+ # someone tries to get them when they don't exist.
+ _state_key = attr.ib(default=None)
+ _redacts = attr.ib(default=None)
+
+ internal_metadata = attr.ib(default=attr.Factory(lambda: _EventInternalMetadata({})))
+
+ @property
+ def state_key(self):
+ if self._state_key is not None:
+ return self._state_key
+
+ raise AttributeError("state_key")
+
+ def is_state(self):
+ return self._state_key is not None
-class EventBuilder(EventBase):
- def __init__(self, key_values={}, internal_metadata_dict={}):
- signatures = copy.deepcopy(key_values.pop("signatures", {}))
- unsigned = copy.deepcopy(key_values.pop("unsigned", {}))
+ @defer.inlineCallbacks
+ def build(self, prev_event_ids):
+ """Transform into a fully signed and hashed event
- super(EventBuilder, self).__init__(
- key_values,
- signatures=signatures,
- unsigned=unsigned,
- internal_metadata_dict=internal_metadata_dict,
+ Args:
+ prev_event_ids (list[str]): The event IDs to use as the prev events
+
+ Returns:
+ Deferred[FrozenEvent]
+ """
+
+ state_ids = yield self._state.get_current_state_ids(
+ self.room_id, prev_event_ids,
+ )
+ auth_ids = yield self._auth.compute_auth_events(
+ self, state_ids,
)
- event_id = _event_dict_property("event_id")
- state_key = _event_dict_property("state_key")
- type = _event_dict_property("type")
+ if self.format_version == EventFormatVersions.V1:
+ auth_events = yield self._store.add_event_hashes(auth_ids)
+ prev_events = yield self._store.add_event_hashes(prev_event_ids)
+ else:
+ auth_events = auth_ids
+ prev_events = prev_event_ids
- def build(self):
- return FrozenEvent.from_event(self)
+ old_depth = yield self._store.get_max_depth_of(
+ prev_event_ids,
+ )
+ depth = old_depth + 1
+ # we cap depth of generated events, to ensure that they are not
+ # rejected by other servers (and so that they can be persisted in
+ # the db)
+ depth = min(depth, MAX_DEPTH)
-class EventBuilderFactory(object):
- def __init__(self, clock, hostname):
- self.clock = clock
- self.hostname = hostname
+ event_dict = {
+ "auth_events": auth_events,
+ "prev_events": prev_events,
+ "type": self.type,
+ "room_id": self.room_id,
+ "sender": self.sender,
+ "content": self.content,
+ "unsigned": self.unsigned,
+ "depth": depth,
+ "prev_state": [],
+ }
+
+ if self.is_state():
+ event_dict["state_key"] = self._state_key
- self.event_id_count = 0
+ if self._redacts is not None:
+ event_dict["redacts"] = self._redacts
- def create_event_id(self):
- i = str(self.event_id_count)
- self.event_id_count += 1
+ defer.returnValue(
+ create_local_event_from_event_dict(
+ clock=self._clock,
+ hostname=self._hostname,
+ signing_key=self._signing_key,
+ format_version=self.format_version,
+ event_dict=event_dict,
+ internal_metadata_dict=self.internal_metadata.get_dict(),
+ )
+ )
- local_part = str(int(self.clock.time())) + i + random_string(5)
- e_id = EventID(local_part, self.hostname)
+class EventBuilderFactory(object):
+ def __init__(self, hs):
+ self.clock = hs.get_clock()
+ self.hostname = hs.hostname
+ self.signing_key = hs.config.signing_key[0]
- return e_id.to_string()
+ self.store = hs.get_datastore()
+ self.state = hs.get_state_handler()
+ self.auth = hs.get_auth()
- def new(self, room_version, key_values={}):
+ def new(self, room_version, key_values):
"""Generate an event builder appropriate for the given room version
Args:
@@ -97,26 +180,103 @@ class EventBuilderFactory(object):
"""
# There's currently only the one event version defined
- if room_version not in {
- RoomVersions.V1,
- RoomVersions.V2,
- RoomVersions.STATE_V2_TEST,
- }:
+ if room_version not in KNOWN_ROOM_VERSIONS:
raise Exception(
"No event format defined for version %r" % (room_version,)
)
- key_values["event_id"] = self.create_event_id()
+ return EventBuilder(
+ store=self.store,
+ state=self.state,
+ auth=self.auth,
+ clock=self.clock,
+ hostname=self.hostname,
+ signing_key=self.signing_key,
+ format_version=room_version_to_event_format(room_version),
+ type=key_values["type"],
+ state_key=key_values.get("state_key"),
+ room_id=key_values["room_id"],
+ sender=key_values["sender"],
+ content=key_values.get("content", {}),
+ unsigned=key_values.get("unsigned", {}),
+ redacts=key_values.get("redacts", None),
+ )
+
+
+def create_local_event_from_event_dict(clock, hostname, signing_key,
+ format_version, event_dict,
+ internal_metadata_dict=None):
+ """Takes a fully formed event dict, ensuring that fields like `origin`
+ and `origin_server_ts` have correct values for a locally produced event,
+ then signs and hashes it.
+
+ Args:
+ clock (Clock)
+ hostname (str)
+ signing_key
+ format_version (int)
+ event_dict (dict)
+ internal_metadata_dict (dict|None)
+
+ Returns:
+ FrozenEvent
+ """
+
+ # There's currently only the one event version defined
+ if format_version not in KNOWN_EVENT_FORMAT_VERSIONS:
+ raise Exception(
+ "No event format defined for version %r" % (format_version,)
+ )
+
+ if internal_metadata_dict is None:
+ internal_metadata_dict = {}
+
+ time_now = int(clock.time_msec())
+
+ if format_version == EventFormatVersions.V1:
+ event_dict["event_id"] = _create_event_id(clock, hostname)
+
+ event_dict["origin"] = hostname
+ event_dict["origin_server_ts"] = time_now
+
+ event_dict.setdefault("unsigned", {})
+ age = event_dict["unsigned"].pop("age", 0)
+ event_dict["unsigned"].setdefault("age_ts", time_now - age)
+
+ event_dict.setdefault("signatures", {})
+
+ add_hashes_and_signatures(
+ event_dict,
+ hostname,
+ signing_key,
+ )
+ return event_type_from_format_version(format_version)(
+ event_dict, internal_metadata_dict=internal_metadata_dict,
+ )
+
+
+# A counter used when generating new event IDs
+_event_id_counter = 0
+
+
+def _create_event_id(clock, hostname):
+ """Create a new event ID
+
+ Args:
+ clock (Clock)
+ hostname (str): The server name for the event ID
+
+ Returns:
+ str
+ """
- time_now = int(self.clock.time_msec())
+ global _event_id_counter
- key_values.setdefault("origin", self.hostname)
- key_values.setdefault("origin_server_ts", time_now)
+ i = str(_event_id_counter)
+ _event_id_counter += 1
- key_values.setdefault("unsigned", {})
- age = key_values["unsigned"].pop("age", 0)
- key_values["unsigned"].setdefault("age_ts", time_now - age)
+ local_part = str(int(clock.time())) + i + random_string(5)
- key_values["signatures"] = {}
+ e_id = EventID(local_part, hostname)
- return EventBuilder(key_values=key_values,)
+ return e_id.to_string()
diff --git a/synapse/events/utils.py b/synapse/events/utils.py
index 63f693f259..07fccdd8f9 100644
--- a/synapse/events/utils.py
+++ b/synapse/events/utils.py
@@ -267,6 +267,7 @@ def serialize_event(e, time_now_ms, as_client_event=True,
Returns:
dict
"""
+
# FIXME(erikj): To handle the case of presence events and the like
if not isinstance(e, EventBase):
return e
@@ -276,6 +277,8 @@ def serialize_event(e, time_now_ms, as_client_event=True,
# Should this strip out None's?
d = {k: v for k, v in e.get_dict().items()}
+ d["event_id"] = e.event_id
+
if "age_ts" in d["unsigned"]:
d["unsigned"]["age"] = time_now_ms - d["unsigned"]["age_ts"]
del d["unsigned"]["age_ts"]
diff --git a/synapse/federation/federation_client.py b/synapse/federation/federation_client.py
index cacb1c8aaf..9b4acd2ed7 100644
--- a/synapse/federation/federation_client.py
+++ b/synapse/federation/federation_client.py
@@ -37,8 +37,7 @@ from synapse.api.errors import (
HttpResponseException,
SynapseError,
)
-from synapse.crypto.event_signing import add_hashes_and_signatures
-from synapse.events import room_version_to_event_format
+from synapse.events import builder, room_version_to_event_format
from synapse.federation.federation_base import FederationBase, event_from_pdu_json
from synapse.util import logcontext, unwrapFirstError
from synapse.util.caches.expiringcache import ExpiringCache
@@ -72,7 +71,8 @@ class FederationClient(FederationBase):
self.state = hs.get_state_handler()
self.transport_layer = hs.get_federation_transport_client()
- self.event_builder_factory = hs.get_event_builder_factory()
+ self.hostname = hs.hostname
+ self.signing_key = hs.config.signing_key[0]
self._get_pdu_cache = ExpiringCache(
cache_name="get_pdu_cache",
@@ -608,18 +608,10 @@ class FederationClient(FederationBase):
if "prev_state" not in pdu_dict:
pdu_dict["prev_state"] = []
- # Strip off the fields that we want to clobber.
- pdu_dict.pop("origin", None)
- pdu_dict.pop("origin_server_ts", None)
- pdu_dict.pop("unsigned", None)
-
- builder = self.event_builder_factory.new(room_version, pdu_dict)
- add_hashes_and_signatures(
- builder,
- self.hs.hostname,
- self.hs.config.signing_key[0]
+ ev = builder.create_local_event_from_event_dict(
+ self._clock, self.hostname, self.signing_key,
+ format_version=event_format, event_dict=pdu_dict,
)
- ev = builder.build()
defer.returnValue(
(destination, ev, event_format)
diff --git a/synapse/federation/federation_server.py b/synapse/federation/federation_server.py
index 4aa04b9588..6681614232 100644
--- a/synapse/federation/federation_server.py
+++ b/synapse/federation/federation_server.py
@@ -322,7 +322,7 @@ class FederationServer(FederationBase):
if self.hs.is_mine_id(event.event_id):
event.signatures.update(
compute_event_signature(
- event,
+ event.get_pdu_json(),
self.hs.hostname,
self.hs.config.signing_key[0]
)
diff --git a/synapse/handlers/federation.py b/synapse/handlers/federation.py
index adf59db7a8..fcaf7530b0 100644
--- a/synapse/handlers/federation.py
+++ b/synapse/handlers/federation.py
@@ -1300,7 +1300,7 @@ class FederationHandler(BaseHandler):
event.signatures.update(
compute_event_signature(
- event,
+ event.get_pdu_json(),
self.hs.hostname,
self.hs.config.signing_key[0]
)
diff --git a/synapse/handlers/message.py b/synapse/handlers/message.py
index 0cfced43d5..3981fe69ce 100644
--- a/synapse/handlers/message.py
+++ b/synapse/handlers/message.py
@@ -22,7 +22,7 @@ from canonicaljson import encode_canonical_json, json
from twisted.internet import defer
from twisted.internet.defer import succeed
-from synapse.api.constants import MAX_DEPTH, EventTypes, Membership, RoomVersions
+from synapse.api.constants import EventTypes, Membership, RoomVersions
from synapse.api.errors import (
AuthError,
Codes,
@@ -31,7 +31,6 @@ from synapse.api.errors import (
SynapseError,
)
from synapse.api.urls import ConsentURIBuilder
-from synapse.crypto.event_signing import add_hashes_and_signatures
from synapse.events.utils import serialize_event
from synapse.events.validator import EventValidator
from synapse.replication.http.send_event import ReplicationSendEventRestServlet
@@ -545,40 +544,19 @@ class EventCreationHandler(object):
prev_events_and_hashes = \
yield self.store.get_prev_events_for_room(builder.room_id)
- if prev_events_and_hashes:
- depth = max([d for _, _, d in prev_events_and_hashes]) + 1
- # we cap depth of generated events, to ensure that they are not
- # rejected by other servers (and so that they can be persisted in
- # the db)
- depth = min(depth, MAX_DEPTH)
- else:
- depth = 1
-
prev_events = [
(event_id, prev_hashes)
for event_id, prev_hashes, _ in prev_events_and_hashes
]
- builder.prev_events = prev_events
- builder.depth = depth
-
- context = yield self.state.compute_event_context(builder)
+ event = yield builder.build(
+ prev_event_ids=[p for p, _ in prev_events],
+ )
+ context = yield self.state.compute_event_context(event)
if requester:
context.app_service = requester.app_service
- if builder.is_state():
- builder.prev_state = yield self.store.add_event_hashes(
- context.prev_state_events
- )
-
- yield self.auth.add_auth_events(builder, context)
-
- signing_key = self.hs.config.signing_key[0]
- add_hashes_and_signatures(
- builder, self.server_name, signing_key
- )
-
- event = builder.build()
+ self.validator.validate_new(event)
logger.debug(
"Created event %s",
diff --git a/synapse/http/federation/matrix_federation_agent.py b/synapse/http/federation/matrix_federation_agent.py
index 4a6f634c8b..f81fcd4301 100644
--- a/synapse/http/federation/matrix_federation_agent.py
+++ b/synapse/http/federation/matrix_federation_agent.py
@@ -12,6 +12,7 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
+import json
import logging
import attr
@@ -20,7 +21,7 @@ from zope.interface import implementer
from twisted.internet import defer
from twisted.internet.endpoints import HostnameEndpoint, wrapClientTLS
-from twisted.web.client import URI, Agent, HTTPConnectionPool
+from twisted.web.client import URI, Agent, HTTPConnectionPool, readBody
from twisted.web.http_headers import Headers
from twisted.web.iweb import IAgent
@@ -43,13 +44,19 @@ class MatrixFederationAgent(object):
tls_client_options_factory (ClientTLSOptionsFactory|None):
factory to use for fetching client tls options, or none to disable TLS.
+ _well_known_tls_policy (IPolicyForHTTPS|None):
+ TLS policy to use for fetching .well-known files. None to use a default
+ (browser-like) implementation.
+
srv_resolver (SrvResolver|None):
SRVResolver impl to use for looking up SRV records. None to use a default
implementation.
"""
def __init__(
- self, reactor, tls_client_options_factory, _srv_resolver=None,
+ self, reactor, tls_client_options_factory,
+ _well_known_tls_policy=None,
+ _srv_resolver=None,
):
self._reactor = reactor
self._tls_client_options_factory = tls_client_options_factory
@@ -62,6 +69,14 @@ class MatrixFederationAgent(object):
self._pool.maxPersistentPerHost = 5
self._pool.cachedConnectionTimeout = 2 * 60
+ agent_args = {}
+ if _well_known_tls_policy is not None:
+ # the param is called 'contextFactory', but actually passing a
+ # contextfactory is deprecated, and it expects an IPolicyForHTTPS.
+ agent_args['contextFactory'] = _well_known_tls_policy
+ _well_known_agent = Agent(self._reactor, pool=self._pool, **agent_args)
+ self._well_known_agent = _well_known_agent
+
@defer.inlineCallbacks
def request(self, method, uri, headers=None, bodyProducer=None):
"""
@@ -114,7 +129,11 @@ class MatrixFederationAgent(object):
class EndpointFactory(object):
@staticmethod
def endpointForURI(_uri):
- logger.info("Connecting to %s:%s", res.target_host, res.target_port)
+ logger.info(
+ "Connecting to %s:%i",
+ res.target_host.decode("ascii"),
+ res.target_port,
+ )
ep = HostnameEndpoint(self._reactor, res.target_host, res.target_port)
if tls_options is not None:
ep = wrapClientTLS(tls_options, ep)
@@ -127,7 +146,7 @@ class MatrixFederationAgent(object):
defer.returnValue(res)
@defer.inlineCallbacks
- def _route_matrix_uri(self, parsed_uri):
+ def _route_matrix_uri(self, parsed_uri, lookup_well_known=True):
"""Helper for `request`: determine the routing for a Matrix URI
Args:
@@ -135,6 +154,9 @@ class MatrixFederationAgent(object):
parsed with URI.fromBytes(uri, defaultPort=-1) to set the `port` to -1
if there is no explicit port given.
+ lookup_well_known (bool): True if we should look up the .well-known file if
+ there is no SRV record.
+
Returns:
Deferred[_RoutingResult]
"""
@@ -169,6 +191,42 @@ class MatrixFederationAgent(object):
service_name = b"_matrix._tcp.%s" % (parsed_uri.host,)
server_list = yield self._srv_resolver.resolve_service(service_name)
+ if not server_list and lookup_well_known:
+ # try a .well-known lookup
+ well_known_server = yield self._get_well_known(parsed_uri.host)
+
+ if well_known_server:
+ # if we found a .well-known, start again, but don't do another
+ # .well-known lookup.
+
+ # parse the server name in the .well-known response into host/port.
+ # (This code is lifted from twisted.web.client.URI.fromBytes).
+ if b':' in well_known_server:
+ well_known_host, well_known_port = well_known_server.rsplit(b':', 1)
+ try:
+ well_known_port = int(well_known_port)
+ except ValueError:
+ # the part after the colon could not be parsed as an int
+ # - we assume it is an IPv6 literal with no port (the closing
+ # ']' stops it being parsed as an int)
+ well_known_host, well_known_port = well_known_server, -1
+ else:
+ well_known_host, well_known_port = well_known_server, -1
+
+ new_uri = URI(
+ scheme=parsed_uri.scheme,
+ netloc=well_known_server,
+ host=well_known_host,
+ port=well_known_port,
+ path=parsed_uri.path,
+ params=parsed_uri.params,
+ query=parsed_uri.query,
+ fragment=parsed_uri.fragment,
+ )
+
+ res = yield self._route_matrix_uri(new_uri, lookup_well_known=False)
+ defer.returnValue(res)
+
if not server_list:
target_host = parsed_uri.host
port = 8448
@@ -190,6 +248,47 @@ class MatrixFederationAgent(object):
target_port=port,
))
+ @defer.inlineCallbacks
+ def _get_well_known(self, server_name):
+ """Attempt to fetch and parse a .well-known file for the given server
+
+ Args:
+ server_name (bytes): name of the server, from the requested url
+
+ Returns:
+ Deferred[bytes|None]: either the new server name, from the .well-known, or
+ None if there was no .well-known file.
+ """
+ # FIXME: add a cache
+
+ uri = b"https://%s/.well-known/matrix/server" % (server_name, )
+ uri_str = uri.decode("ascii")
+ logger.info("Fetching %s", uri_str)
+ try:
+ response = yield make_deferred_yieldable(
+ self._well_known_agent.request(b"GET", uri),
+ )
+ except Exception as e:
+ logger.info("Connection error fetching %s: %s", uri_str, e)
+ defer.returnValue(None)
+
+ body = yield make_deferred_yieldable(readBody(response))
+
+ if response.code != 200:
+ logger.info("Error response %i from %s", response.code, uri_str)
+ defer.returnValue(None)
+
+ try:
+ parsed_body = json.loads(body.decode('utf-8'))
+ logger.info("Response from .well-known: %s", parsed_body)
+ if not isinstance(parsed_body, dict):
+ raise Exception("not a dict")
+ if "m.server" not in parsed_body:
+ raise Exception("Missing key 'm.server'")
+ except Exception as e:
+ raise Exception("invalid .well-known response from %s: %s" % (uri_str, e,))
+ defer.returnValue(parsed_body["m.server"].encode("ascii"))
+
@attr.s
class _RoutingResult(object):
diff --git a/synapse/rest/consent/consent_resource.py b/synapse/rest/consent/consent_resource.py
index 80611cfe84..008d4edae5 100644
--- a/synapse/rest/consent/consent_resource.py
+++ b/synapse/rest/consent/consent_resource.py
@@ -101,16 +101,7 @@ class ConsentResource(Resource):
"missing in config file.",
)
- # daemonize changes the cwd to /, so make the path absolute now.
- consent_template_directory = path.abspath(
- hs.config.user_consent_template_dir,
- )
- if not path.isdir(consent_template_directory):
- raise ConfigError(
- "Could not find template directory '%s'" % (
- consent_template_directory,
- ),
- )
+ consent_template_directory = hs.config.user_consent_template_dir
loader = jinja2.FileSystemLoader(consent_template_directory)
self._jinja_env = jinja2.Environment(
diff --git a/synapse/server.py b/synapse/server.py
index c8914302cf..6c52101616 100644
--- a/synapse/server.py
+++ b/synapse/server.py
@@ -355,10 +355,7 @@ class HomeServer(object):
return Keyring(self)
def build_event_builder_factory(self):
- return EventBuilderFactory(
- clock=self.get_clock(),
- hostname=self.hostname,
- )
+ return EventBuilderFactory(self)
def build_filtering(self):
return Filtering(self)
diff --git a/synapse/storage/event_federation.py b/synapse/storage/event_federation.py
index d3b9dea1d6..38809ed0fc 100644
--- a/synapse/storage/event_federation.py
+++ b/synapse/storage/event_federation.py
@@ -125,6 +125,29 @@ class EventFederationWorkerStore(EventsWorkerStore, SignatureWorkerStore,
return dict(txn)
+ @defer.inlineCallbacks
+ def get_max_depth_of(self, event_ids):
+ """Returns the max depth of a set of event IDs
+
+ Args:
+ event_ids (list[str])
+
+ Returns
+ Deferred[int]
+ """
+ rows = yield self._simple_select_many_batch(
+ table="events",
+ column="event_id",
+ iterable=event_ids,
+ retcols=("depth",),
+ desc="get_max_depth_of",
+ )
+
+ if not rows:
+ defer.returnValue(0)
+ else:
+ defer.returnValue(max(row["depth"] for row in rows))
+
def _get_oldest_events_in_room_txn(self, txn, room_id):
return self._simple_select_onecol_txn(
txn,
|