diff options
28 files changed, 245 insertions, 152 deletions
diff --git a/changelog.d/4482.misc b/changelog.d/4482.misc new file mode 100644 index 0000000000..43f8963614 --- /dev/null +++ b/changelog.d/4482.misc @@ -0,0 +1 @@ +Add infrastructure to support different event formats diff --git a/changelog.d/4492.feature b/changelog.d/4492.feature new file mode 100644 index 0000000000..c7f595cec2 --- /dev/null +++ b/changelog.d/4492.feature @@ -0,0 +1 @@ + Synapse can now automatically provision TLS certificates via ACME (the protocol used by CAs like Let's Encrypt). diff --git a/changelog.d/4493.misc b/changelog.d/4493.misc new file mode 100644 index 0000000000..43f8963614 --- /dev/null +++ b/changelog.d/4493.misc @@ -0,0 +1 @@ +Add infrastructure to support different event formats diff --git a/changelog.d/4497.feature b/changelog.d/4497.feature new file mode 100644 index 0000000000..bda713adf9 --- /dev/null +++ b/changelog.d/4497.feature @@ -0,0 +1 @@ +Implement MSC1708 (.well-known routing for server-server federation) \ No newline at end of file diff --git a/changelog.d/4505.misc b/changelog.d/4505.misc new file mode 100644 index 0000000000..994801fd1e --- /dev/null +++ b/changelog.d/4505.misc @@ -0,0 +1 @@ + Synapse will now take advantage of native UPSERT functionality in PostgreSQL 9.5+ and SQLite 3.24+. diff --git a/synapse/api/auth.py b/synapse/api/auth.py index e37b807c94..7b213e54c8 100644 --- a/synapse/api/auth.py +++ b/synapse/api/auth.py @@ -65,7 +65,7 @@ class Auth(object): register_cache("cache", "token_cache", self.token_cache) @defer.inlineCallbacks - def check_from_context(self, event, context, do_sig_check=True): + def check_from_context(self, room_version, event, context, do_sig_check=True): prev_state_ids = yield context.get_prev_state_ids(self.store) auth_events_ids = yield self.compute_auth_events( event, prev_state_ids, for_verification=True, @@ -74,12 +74,16 @@ class Auth(object): auth_events = { (e.type, e.state_key): e for e in itervalues(auth_events) } - self.check(event, auth_events=auth_events, do_sig_check=do_sig_check) + self.check( + room_version, event, + auth_events=auth_events, do_sig_check=do_sig_check, + ) - def check(self, event, auth_events, do_sig_check=True): + def check(self, room_version, event, auth_events, do_sig_check=True): """ Checks if this event is correctly authed. Args: + room_version (str): version of the room event: the event being checked. auth_events (dict: event-key -> event): the existing room state. @@ -88,7 +92,9 @@ class Auth(object): True if the auth checks pass. """ with Measure(self.clock, "auth.check"): - event_auth.check(event, auth_events, do_sig_check=do_sig_check) + event_auth.check( + room_version, event, auth_events, do_sig_check=do_sig_check + ) @defer.inlineCallbacks def check_joined_room(self, room_id, user_id, current_state=None): diff --git a/synapse/app/client_reader.py b/synapse/app/client_reader.py index 76aed8c60a..f8a417cb60 100644 --- a/synapse/app/client_reader.py +++ b/synapse/app/client_reader.py @@ -164,23 +164,23 @@ def start(config_options): database_engine = create_engine(config.database_config) - tls_server_context_factory = context_factory.ServerContextFactory(config) - tls_client_options_factory = context_factory.ClientTLSOptionsFactory(config) - ss = ClientReaderServer( config.server_name, db_config=config.database_config, - tls_server_context_factory=tls_server_context_factory, - tls_client_options_factory=tls_client_options_factory, config=config, version_string="Synapse/" + get_version_string(synapse), database_engine=database_engine, ) ss.setup() - ss.start_listening(config.worker_listeners) def start(): + ss.config.read_certificate_from_disk() + ss.tls_server_context_factory = context_factory.ServerContextFactory(config) + ss.tls_client_options_factory = context_factory.ClientTLSOptionsFactory( + config + ) + ss.start_listening(config.worker_listeners) ss.get_datastore().start_profiling() reactor.callWhenRunning(start) diff --git a/synapse/app/event_creator.py b/synapse/app/event_creator.py index e4a68715aa..656e0edc0f 100644 --- a/synapse/app/event_creator.py +++ b/synapse/app/event_creator.py @@ -185,23 +185,23 @@ def start(config_options): database_engine = create_engine(config.database_config) - tls_server_context_factory = context_factory.ServerContextFactory(config) - tls_client_options_factory = context_factory.ClientTLSOptionsFactory(config) - ss = EventCreatorServer( config.server_name, db_config=config.database_config, - tls_server_context_factory=tls_server_context_factory, - tls_client_options_factory=tls_client_options_factory, config=config, version_string="Synapse/" + get_version_string(synapse), database_engine=database_engine, ) ss.setup() - ss.start_listening(config.worker_listeners) def start(): + ss.config.read_certificate_from_disk() + ss.tls_server_context_factory = context_factory.ServerContextFactory(config) + ss.tls_client_options_factory = context_factory.ClientTLSOptionsFactory( + config + ) + ss.start_listening(config.worker_listeners) ss.get_datastore().start_profiling() reactor.callWhenRunning(start) diff --git a/synapse/app/federation_reader.py b/synapse/app/federation_reader.py index 228a297fb8..3de2715132 100644 --- a/synapse/app/federation_reader.py +++ b/synapse/app/federation_reader.py @@ -151,23 +151,23 @@ def start(config_options): database_engine = create_engine(config.database_config) - tls_server_context_factory = context_factory.ServerContextFactory(config) - tls_client_options_factory = context_factory.ClientTLSOptionsFactory(config) - ss = FederationReaderServer( config.server_name, db_config=config.database_config, - tls_server_context_factory=tls_server_context_factory, - tls_client_options_factory=tls_client_options_factory, config=config, version_string="Synapse/" + get_version_string(synapse), database_engine=database_engine, ) ss.setup() - ss.start_listening(config.worker_listeners) def start(): + ss.config.read_certificate_from_disk() + ss.tls_server_context_factory = context_factory.ServerContextFactory(config) + ss.tls_client_options_factory = context_factory.ClientTLSOptionsFactory( + config + ) + ss.start_listening(config.worker_listeners) ss.get_datastore().start_profiling() reactor.callWhenRunning(start) diff --git a/synapse/app/federation_sender.py b/synapse/app/federation_sender.py index e9a99d76e1..d944e0517f 100644 --- a/synapse/app/federation_sender.py +++ b/synapse/app/federation_sender.py @@ -183,24 +183,24 @@ def start(config_options): # Force the pushers to start since they will be disabled in the main config config.send_federation = True - tls_server_context_factory = context_factory.ServerContextFactory(config) - tls_client_options_factory = context_factory.ClientTLSOptionsFactory(config) - - ps = FederationSenderServer( + ss = FederationSenderServer( config.server_name, db_config=config.database_config, - tls_server_context_factory=tls_server_context_factory, - tls_client_options_factory=tls_client_options_factory, config=config, version_string="Synapse/" + get_version_string(synapse), database_engine=database_engine, ) - ps.setup() - ps.start_listening(config.worker_listeners) + ss.setup() def start(): - ps.get_datastore().start_profiling() + ss.config.read_certificate_from_disk() + ss.tls_server_context_factory = context_factory.ServerContextFactory(config) + ss.tls_client_options_factory = context_factory.ClientTLSOptionsFactory( + config + ) + ss.start_listening(config.worker_listeners) + ss.get_datastore().start_profiling() reactor.callWhenRunning(start) _base.start_worker_reactor("synapse-federation-sender", config) diff --git a/synapse/app/frontend_proxy.py b/synapse/app/frontend_proxy.py index f5c61dec5b..d9ef6edc3c 100644 --- a/synapse/app/frontend_proxy.py +++ b/synapse/app/frontend_proxy.py @@ -241,23 +241,23 @@ def start(config_options): database_engine = create_engine(config.database_config) - tls_server_context_factory = context_factory.ServerContextFactory(config) - tls_client_options_factory = context_factory.ClientTLSOptionsFactory(config) - ss = FrontendProxyServer( config.server_name, db_config=config.database_config, - tls_server_context_factory=tls_server_context_factory, - tls_client_options_factory=tls_client_options_factory, config=config, version_string="Synapse/" + get_version_string(synapse), database_engine=database_engine, ) ss.setup() - ss.start_listening(config.worker_listeners) def start(): + ss.config.read_certificate_from_disk() + ss.tls_server_context_factory = context_factory.ServerContextFactory(config) + ss.tls_client_options_factory = context_factory.ClientTLSOptionsFactory( + config + ) + ss.start_listening(config.worker_listeners) ss.get_datastore().start_profiling() reactor.callWhenRunning(start) diff --git a/synapse/app/media_repository.py b/synapse/app/media_repository.py index acc0487adc..4ecf64031b 100644 --- a/synapse/app/media_repository.py +++ b/synapse/app/media_repository.py @@ -151,23 +151,23 @@ def start(config_options): database_engine = create_engine(config.database_config) - tls_server_context_factory = context_factory.ServerContextFactory(config) - tls_client_options_factory = context_factory.ClientTLSOptionsFactory(config) - ss = MediaRepositoryServer( config.server_name, db_config=config.database_config, - tls_server_context_factory=tls_server_context_factory, - tls_client_options_factory=tls_client_options_factory, config=config, version_string="Synapse/" + get_version_string(synapse), database_engine=database_engine, ) ss.setup() - ss.start_listening(config.worker_listeners) def start(): + ss.config.read_certificate_from_disk() + ss.tls_server_context_factory = context_factory.ServerContextFactory(config) + ss.tls_client_options_factory = context_factory.ClientTLSOptionsFactory( + config + ) + ss.start_listening(config.worker_listeners) ss.get_datastore().start_profiling() reactor.callWhenRunning(start) diff --git a/synapse/app/user_dir.py b/synapse/app/user_dir.py index 0a5f62b509..176d55a783 100644 --- a/synapse/app/user_dir.py +++ b/synapse/app/user_dir.py @@ -211,24 +211,24 @@ def start(config_options): # Force the pushers to start since they will be disabled in the main config config.update_user_directory = True - tls_server_context_factory = context_factory.ServerContextFactory(config) - tls_client_options_factory = context_factory.ClientTLSOptionsFactory(config) - - ps = UserDirectoryServer( + ss = UserDirectoryServer( config.server_name, db_config=config.database_config, - tls_server_context_factory=tls_server_context_factory, - tls_client_options_factory=tls_client_options_factory, config=config, version_string="Synapse/" + get_version_string(synapse), database_engine=database_engine, ) - ps.setup() - ps.start_listening(config.worker_listeners) + ss.setup() def start(): - ps.get_datastore().start_profiling() + ss.config.read_certificate_from_disk() + ss.tls_server_context_factory = context_factory.ServerContextFactory(config) + ss.tls_client_options_factory = context_factory.ClientTLSOptionsFactory( + config + ) + ss.start_listening(config.worker_listeners) + ss.get_datastore().start_profiling() reactor.callWhenRunning(start) diff --git a/synapse/crypto/event_signing.py b/synapse/crypto/event_signing.py index 8774b28967..d01ac5075c 100644 --- a/synapse/crypto/event_signing.py +++ b/synapse/crypto/event_signing.py @@ -23,14 +23,14 @@ from signedjson.sign import sign_json from unpaddedbase64 import decode_base64, encode_base64 from synapse.api.errors import Codes, SynapseError -from synapse.events.utils import prune_event +from synapse.events.utils import prune_event, prune_event_dict logger = logging.getLogger(__name__) def check_event_content_hash(event, hash_algorithm=hashlib.sha256): """Check whether the hash for this PDU matches the contents""" - name, expected_hash = compute_content_hash(event, hash_algorithm) + name, expected_hash = compute_content_hash(event.get_pdu_json(), hash_algorithm) logger.debug("Expecting hash: %s", encode_base64(expected_hash)) # some malformed events lack a 'hashes'. Protect against it being missing @@ -59,35 +59,70 @@ def check_event_content_hash(event, hash_algorithm=hashlib.sha256): return message_hash_bytes == expected_hash -def compute_content_hash(event, hash_algorithm): - event_json = event.get_pdu_json() - event_json.pop("age_ts", None) - event_json.pop("unsigned", None) - event_json.pop("signatures", None) - event_json.pop("hashes", None) - event_json.pop("outlier", None) - event_json.pop("destinations", None) +def compute_content_hash(event_dict, hash_algorithm): + """Compute the content hash of an event, which is the hash of the + unredacted event. - event_json_bytes = encode_canonical_json(event_json) + Args: + event_dict (dict): The unredacted event as a dict + hash_algorithm: A hasher from `hashlib`, e.g. hashlib.sha256, to use + to hash the event + + Returns: + tuple[str, bytes]: A tuple of the name of hash and the hash as raw + bytes. + """ + event_dict = dict(event_dict) + event_dict.pop("age_ts", None) + event_dict.pop("unsigned", None) + event_dict.pop("signatures", None) + event_dict.pop("hashes", None) + event_dict.pop("outlier", None) + event_dict.pop("destinations", None) + + event_json_bytes = encode_canonical_json(event_dict) hashed = hash_algorithm(event_json_bytes) return (hashed.name, hashed.digest()) def compute_event_reference_hash(event, hash_algorithm=hashlib.sha256): + """Computes the event reference hash. This is the hash of the redacted + event. + + Args: + event (FrozenEvent) + hash_algorithm: A hasher from `hashlib`, e.g. hashlib.sha256, to use + to hash the event + + Returns: + tuple[str, bytes]: A tuple of the name of hash and the hash as raw + bytes. + """ tmp_event = prune_event(event) - event_json = tmp_event.get_pdu_json() - event_json.pop("signatures", None) - event_json.pop("age_ts", None) - event_json.pop("unsigned", None) - event_json_bytes = encode_canonical_json(event_json) + event_dict = tmp_event.get_pdu_json() + event_dict.pop("signatures", None) + event_dict.pop("age_ts", None) + event_dict.pop("unsigned", None) + event_json_bytes = encode_canonical_json(event_dict) hashed = hash_algorithm(event_json_bytes) return (hashed.name, hashed.digest()) -def compute_event_signature(event, signature_name, signing_key): - tmp_event = prune_event(event) - redact_json = tmp_event.get_pdu_json() +def compute_event_signature(event_dict, signature_name, signing_key): + """Compute the signature of the event for the given name and key. + + Args: + event_dict (dict): The event as a dict + signature_name (str): The name of the entity signing the event + (typically the server's hostname). + signing_key (syutil.crypto.SigningKey): The key to sign with + + Returns: + dict[str, dict[str, str]]: Returns a dictionary in the same format of + an event's signatures field. + """ + redact_json = prune_event_dict(event_dict) redact_json.pop("age_ts", None) redact_json.pop("unsigned", None) logger.debug("Signing event: %s", encode_canonical_json(redact_json)) @@ -98,23 +133,27 @@ def compute_event_signature(event, signature_name, signing_key): def add_hashes_and_signatures(event, signature_name, signing_key, hash_algorithm=hashlib.sha256): - # if hasattr(event, "old_state_events"): - # state_json_bytes = encode_canonical_json( - # [e.event_id for e in event.old_state_events.values()] - # ) - # hashed = hash_algorithm(state_json_bytes) - # event.state_hash = { - # hashed.name: encode_base64(hashed.digest()) - # } - - name, digest = compute_content_hash(event, hash_algorithm=hash_algorithm) + """Add content hash and sign the event + + Args: + event_dict (EventBuilder): The event to add hashes to and sign + signature_name (str): The name of the entity signing the event + (typically the server's hostname). + signing_key (syutil.crypto.SigningKey): The key to sign with + hash_algorithm: A hasher from `hashlib`, e.g. hashlib.sha256, to use + to hash the event + """ + + name, digest = compute_content_hash( + event.get_pdu_json(), hash_algorithm=hash_algorithm, + ) if not hasattr(event, "hashes"): event.hashes = {} event.hashes[name] = encode_base64(digest) event.signatures = compute_event_signature( - event, + event.get_pdu_json(), signature_name=signature_name, signing_key=signing_key, ) diff --git a/synapse/event_auth.py b/synapse/event_auth.py index c81d8e6729..9adedbbb02 100644 --- a/synapse/event_auth.py +++ b/synapse/event_auth.py @@ -27,10 +27,11 @@ from synapse.types import UserID, get_domain_from_id logger = logging.getLogger(__name__) -def check(event, auth_events, do_sig_check=True, do_size_check=True): +def check(room_version, event, auth_events, do_sig_check=True, do_size_check=True): """ Checks if this event is correctly authed. Args: + room_version (str): the version of the room event: the event being checked. auth_events (dict: event-key -> event): the existing room state. diff --git a/synapse/events/utils.py b/synapse/events/utils.py index 652941ca0d..63f693f259 100644 --- a/synapse/events/utils.py +++ b/synapse/events/utils.py @@ -38,8 +38,31 @@ def prune_event(event): This is used when we "redact" an event. We want to remove all fields that the user has specified, but we do want to keep necessary information like type, state_key etc. + + Args: + event (FrozenEvent) + + Returns: + FrozenEvent + """ + pruned_event_dict = prune_event_dict(event.get_dict()) + + from . import event_type_from_format_version + return event_type_from_format_version(event.format_version)( + pruned_event_dict, event.internal_metadata.get_dict() + ) + + +def prune_event_dict(event_dict): + """Redacts the event_dict in the same way as `prune_event`, except it + operates on dicts rather than event objects + + Args: + event_dict (dict) + + Returns: + dict: A copy of the pruned event dict """ - event_type = event.type allowed_keys = [ "event_id", @@ -59,13 +82,13 @@ def prune_event(event): "membership", ] - event_dict = event.get_dict() + event_type = event_dict["type"] new_content = {} def add_fields(*fields): for field in fields: - if field in event.content: + if field in event_dict["content"]: new_content[field] = event_dict["content"][field] if event_type == EventTypes.Member: @@ -98,17 +121,17 @@ def prune_event(event): allowed_fields["content"] = new_content - allowed_fields["unsigned"] = {} + unsigned = {} + allowed_fields["unsigned"] = unsigned - if "age_ts" in event.unsigned: - allowed_fields["unsigned"]["age_ts"] = event.unsigned["age_ts"] - if "replaces_state" in event.unsigned: - allowed_fields["unsigned"]["replaces_state"] = event.unsigned["replaces_state"] + event_unsigned = event_dict.get("unsigned", {}) - return type(event)( - allowed_fields, - internal_metadata_dict=event.internal_metadata.get_dict() - ) + if "age_ts" in event_unsigned: + unsigned["age_ts"] = event_unsigned["age_ts"] + if "replaces_state" in event_unsigned: + unsigned["replaces_state"] = event_unsigned["replaces_state"] + + return allowed_fields def _copy_field(src, dst, field): diff --git a/synapse/handlers/federation.py b/synapse/handlers/federation.py index 13333818ae..adf59db7a8 100644 --- a/synapse/handlers/federation.py +++ b/synapse/handlers/federation.py @@ -1189,7 +1189,9 @@ class FederationHandler(BaseHandler): # The remote hasn't signed it yet, obviously. We'll do the full checks # when we get the event back in `on_send_join_request` - yield self.auth.check_from_context(event, context, do_sig_check=False) + yield self.auth.check_from_context( + room_version, event, context, do_sig_check=False, + ) defer.returnValue(event) @@ -1388,7 +1390,9 @@ class FederationHandler(BaseHandler): try: # The remote hasn't signed it yet, obviously. We'll do the full checks # when we get the event back in `on_send_leave_request` - yield self.auth.check_from_context(event, context, do_sig_check=False) + yield self.auth.check_from_context( + room_version, event, context, do_sig_check=False, + ) except AuthError as e: logger.warn("Failed to create new leave %r because %s", event, e) raise e @@ -1683,7 +1687,7 @@ class FederationHandler(BaseHandler): auth_for_e[(EventTypes.Create, "")] = create_event try: - self.auth.check(e, auth_events=auth_for_e) + self.auth.check(room_version, e, auth_events=auth_for_e) except SynapseError as err: # we may get SynapseErrors here as well as AuthErrors. For # instance, there are a couple of (ancient) events in some @@ -1927,6 +1931,8 @@ class FederationHandler(BaseHandler): current_state = set(e.event_id for e in auth_events.values()) different_auth = event_auth_events - current_state + room_version = yield self.store.get_room_version(event.room_id) + if different_auth and not event.internal_metadata.is_outlier(): # Do auth conflict res. logger.info("Different auth: %s", different_auth) @@ -1951,8 +1957,6 @@ class FederationHandler(BaseHandler): (d.type, d.state_key): d for d in different_events if d }) - room_version = yield self.store.get_room_version(event.room_id) - new_state = yield self.state_handler.resolve_events( room_version, [list(local_view.values()), list(remote_view.values())], @@ -2052,7 +2056,7 @@ class FederationHandler(BaseHandler): ) try: - self.auth.check(event, auth_events=auth_events) + self.auth.check(room_version, event, auth_events=auth_events) except AuthError as e: logger.warn("Failed auth resolution for %r because %s", event, e) raise e @@ -2290,7 +2294,7 @@ class FederationHandler(BaseHandler): EventValidator().validate_new(event) try: - yield self.auth.check_from_context(event, context) + yield self.auth.check_from_context(room_version, event, context) except AuthError as e: logger.warn("Denying new third party invite %r because %s", event, e) raise e @@ -2332,7 +2336,7 @@ class FederationHandler(BaseHandler): ) try: - self.auth.check_from_context(event, context) + self.auth.check_from_context(room_version, event, context) except AuthError as e: logger.warn("Denying third party invite %r because %s", event, e) raise e diff --git a/synapse/handlers/message.py b/synapse/handlers/message.py index d2aab25111..05d1370c18 100644 --- a/synapse/handlers/message.py +++ b/synapse/handlers/message.py @@ -22,7 +22,7 @@ from canonicaljson import encode_canonical_json, json from twisted.internet import defer from twisted.internet.defer import succeed -from synapse.api.constants import MAX_DEPTH, EventTypes, Membership +from synapse.api.constants import MAX_DEPTH, EventTypes, Membership, RoomVersions from synapse.api.errors import ( AuthError, Codes, @@ -613,8 +613,13 @@ class EventCreationHandler(object): extra_users (list(UserID)): Any extra users to notify about event """ + if event.is_state() and (event.type, event.state_key) == (EventTypes.Create, ""): + room_version = event.content.get("room_version", RoomVersions.V1) + else: + room_version = yield self.store.get_room_version(event.room_id) + try: - yield self.auth.check_from_context(event, context) + yield self.auth.check_from_context(room_version, event, context) except AuthError as err: logger.warn("Denying new event %r because %s", event, err) raise err diff --git a/synapse/handlers/room.py b/synapse/handlers/room.py index 388302de09..13ba9291b0 100644 --- a/synapse/handlers/room.py +++ b/synapse/handlers/room.py @@ -123,7 +123,10 @@ class RoomCreationHandler(BaseHandler): token_id=requester.access_token_id, ) ) - yield self.auth.check_from_context(tombstone_event, tombstone_context) + old_room_version = yield self.store.get_room_version(old_room_id) + yield self.auth.check_from_context( + old_room_version, tombstone_event, tombstone_context, + ) yield self.clone_existing_room( requester, diff --git a/synapse/replication/http/federation.py b/synapse/replication/http/federation.py index 2e16c69666..0f0a07c422 100644 --- a/synapse/replication/http/federation.py +++ b/synapse/replication/http/federation.py @@ -95,7 +95,7 @@ class ReplicationFederationSendEventsRestServlet(ReplicationEndpoint): event_and_contexts = [] for event_payload in event_payloads: event_dict = event_payload["event"] - format_ver = content["event_format_version"] + format_ver = event_payload["event_format_version"] internal_metadata = event_payload["internal_metadata"] rejected_reason = event_payload["rejected_reason"] diff --git a/synapse/state/__init__.py b/synapse/state/__init__.py index e9ecb00277..2fca51d0b2 100644 --- a/synapse/state/__init__.py +++ b/synapse/state/__init__.py @@ -611,7 +611,7 @@ def resolve_events_with_store(room_version, state_sets, event_map, state_res_sto RoomVersions.VDH_TEST, RoomVersions.STATE_V2_TEST, RoomVersions.V2, ): return v2.resolve_events_with_store( - state_sets, event_map, state_res_store, + room_version, state_sets, event_map, state_res_store, ) else: # This should only happen if we added a version but forgot to add it to diff --git a/synapse/state/v1.py b/synapse/state/v1.py index 19e091ce3b..6d3afcae7c 100644 --- a/synapse/state/v1.py +++ b/synapse/state/v1.py @@ -21,7 +21,7 @@ from six import iteritems, iterkeys, itervalues from twisted.internet import defer from synapse import event_auth -from synapse.api.constants import EventTypes +from synapse.api.constants import EventTypes, RoomVersions from synapse.api.errors import AuthError logger = logging.getLogger(__name__) @@ -274,7 +274,11 @@ def _resolve_auth_events(events, auth_events): auth_events[(prev_event.type, prev_event.state_key)] = prev_event try: # The signatures have already been checked at this point - event_auth.check(event, auth_events, do_sig_check=False, do_size_check=False) + event_auth.check( + RoomVersions.V1, event, auth_events, + do_sig_check=False, + do_size_check=False, + ) prev_event = event except AuthError: return prev_event @@ -286,7 +290,11 @@ def _resolve_normal_events(events, auth_events): for event in _ordered_events(events): try: # The signatures have already been checked at this point - event_auth.check(event, auth_events, do_sig_check=False, do_size_check=False) + event_auth.check( + RoomVersions.V1, event, auth_events, + do_sig_check=False, + do_size_check=False, + ) return event except AuthError: pass diff --git a/synapse/state/v2.py b/synapse/state/v2.py index 3573bb0028..650995c92c 100644 --- a/synapse/state/v2.py +++ b/synapse/state/v2.py @@ -29,10 +29,12 @@ logger = logging.getLogger(__name__) @defer.inlineCallbacks -def resolve_events_with_store(state_sets, event_map, state_res_store): +def resolve_events_with_store(room_version, state_sets, event_map, state_res_store): """Resolves the state using the v2 state resolution algorithm Args: + room_version (str): The room version + state_sets(list): List of dicts of (type, state_key) -> event_id, which are the different state groups to resolve. @@ -104,7 +106,7 @@ def resolve_events_with_store(state_sets, event_map, state_res_store): # Now sequentially auth each one resolved_state = yield _iterative_auth_checks( - sorted_power_events, unconflicted_state, event_map, + room_version, sorted_power_events, unconflicted_state, event_map, state_res_store, ) @@ -129,7 +131,7 @@ def resolve_events_with_store(state_sets, event_map, state_res_store): logger.debug("resolving remaining events") resolved_state = yield _iterative_auth_checks( - leftover_events, resolved_state, event_map, + room_version, leftover_events, resolved_state, event_map, state_res_store, ) @@ -350,11 +352,13 @@ def _reverse_topological_power_sort(event_ids, event_map, state_res_store, auth_ @defer.inlineCallbacks -def _iterative_auth_checks(event_ids, base_state, event_map, state_res_store): +def _iterative_auth_checks(room_version, event_ids, base_state, event_map, + state_res_store): """Sequentially apply auth checks to each event in given list, updating the state as it goes along. Args: + room_version (str) event_ids (list[str]): Ordered list of events to apply auth checks to base_state (dict[tuple[str, str], str]): The set of state to start with event_map (dict[str,FrozenEvent]) @@ -385,7 +389,7 @@ def _iterative_auth_checks(event_ids, base_state, event_map, state_res_store): try: event_auth.check( - event, auth_events, + room_version, event, auth_events, do_sig_check=False, do_size_check=False ) diff --git a/synapse/storage/_base.py b/synapse/storage/_base.py index 5109bc3e2e..4872ff55b6 100644 --- a/synapse/storage/_base.py +++ b/synapse/storage/_base.py @@ -236,7 +236,7 @@ class SQLBaseStore(object): self._unsafe_to_upsert_tables.discard("user_ips") # If there's any tables left to check, reschedule to run. - if self.updates: + if updates: self._clock.call_later( 15.0, run_as_background_process, diff --git a/tests/http/federation/test_matrix_federation_agent.py b/tests/http/federation/test_matrix_federation_agent.py index 8257594fb8..53b52ace59 100644 --- a/tests/http/federation/test_matrix_federation_agent.py +++ b/tests/http/federation/test_matrix_federation_agent.py @@ -377,9 +377,8 @@ class MatrixFederationAgentTests(TestCase): self.mock_resolver.resolve_service.side_effect = lambda _: [] - # hostnameendpoint does the lookup on the unicode value (getaddrinfo encodes - # it back to idna) - self.reactor.lookups[u"bücher.com"] = "1.2.3.4" + # the resolver is always called with the IDNA hostname as a native string. + self.reactor.lookups["xn--bcher-kva.com"] = "1.2.3.4" # this is idna for bücher.com test_d = self._make_get_request(b"matrix://xn--bcher-kva.com/foo/bar") @@ -424,7 +423,7 @@ class MatrixFederationAgentTests(TestCase): self.mock_resolver.resolve_service.side_effect = lambda _: [ Server(host=b"xn--trget-3qa.com", port=8443) # târget.com ] - self.reactor.lookups[u"târget.com"] = "1.2.3.4" + self.reactor.lookups["xn--trget-3qa.com"] = "1.2.3.4" test_d = self._make_get_request(b"matrix://xn--bcher-kva.com/foo/bar") diff --git a/tests/server.py b/tests/server.py index ed2a046ae6..6adcc73f91 100644 --- a/tests/server.py +++ b/tests/server.py @@ -8,11 +8,10 @@ import attr from zope.interface import implementer from twisted.internet import address, threads, udp -from twisted.internet._resolver import HostResolution -from twisted.internet.address import IPv4Address -from twisted.internet.defer import Deferred +from twisted.internet._resolver import SimpleResolverComplexifier +from twisted.internet.defer import Deferred, fail, succeed from twisted.internet.error import DNSLookupError -from twisted.internet.interfaces import IReactorPluggableNameResolver +from twisted.internet.interfaces import IReactorPluggableNameResolver, IResolverSimple from twisted.python.failure import Failure from twisted.test.proto_helpers import MemoryReactorClock from twisted.web.http import unquote @@ -227,30 +226,16 @@ class ThreadedMemoryReactorClock(MemoryReactorClock): def __init__(self): self._udp = [] - self.lookups = {} - - class Resolver(object): - def resolveHostName( - _self, - resolutionReceiver, - hostName, - portNumber=0, - addressTypes=None, - transportSemantics='TCP', - ): - - resolution = HostResolution(hostName) - resolutionReceiver.resolutionBegan(resolution) - if hostName not in self.lookups: - raise DNSLookupError("OH NO") - - resolutionReceiver.addressResolved( - IPv4Address('TCP', self.lookups[hostName], portNumber) - ) - resolutionReceiver.resolutionComplete() - return resolution - - self.nameResolver = Resolver() + lookups = self.lookups = {} + + @implementer(IResolverSimple) + class FakeResolver(object): + def getHostByName(self, name, timeout=None): + if name not in lookups: + return fail(DNSLookupError("OH NO: unknown %s" % (name, ))) + return succeed(lookups[name]) + + self.nameResolver = SimpleResolverComplexifier(FakeResolver()) super(ThreadedMemoryReactorClock, self).__init__() def listenUDP(self, port, protocol, interface='', maxPacketSize=8196): diff --git a/tests/state/test_v2.py b/tests/state/test_v2.py index 2e073a3afc..9a5c816927 100644 --- a/tests/state/test_v2.py +++ b/tests/state/test_v2.py @@ -19,7 +19,7 @@ from six.moves import zip import attr -from synapse.api.constants import EventTypes, JoinRules, Membership +from synapse.api.constants import EventTypes, JoinRules, Membership, RoomVersions from synapse.event_auth import auth_types_for_event from synapse.events import FrozenEvent from synapse.state.v2 import lexicographical_topological_sort, resolve_events_with_store @@ -539,6 +539,7 @@ class StateTestCase(unittest.TestCase): state_before = dict(state_at_event[prev_events[0]]) else: state_d = resolve_events_with_store( + RoomVersions.V2, [state_at_event[n] for n in prev_events], event_map=event_map, state_res_store=TestStateResolutionStore(event_map), @@ -685,6 +686,7 @@ class SimpleParamStateTestCase(unittest.TestCase): # Test that we correctly handle passing `None` as the event_map state_d = resolve_events_with_store( + RoomVersions.V2, [self.state_at_bob, self.state_at_charlie], event_map=None, state_res_store=TestStateResolutionStore(self.event_map), diff --git a/tests/test_event_auth.py b/tests/test_event_auth.py index 411b4a9f86..7ee318e4e8 100644 --- a/tests/test_event_auth.py +++ b/tests/test_event_auth.py @@ -16,6 +16,7 @@ import unittest from synapse import event_auth +from synapse.api.constants import RoomVersions from synapse.api.errors import AuthError from synapse.events import FrozenEvent @@ -35,12 +36,16 @@ class EventAuthTestCase(unittest.TestCase): } # creator should be able to send state - event_auth.check(_random_state_event(creator), auth_events, do_sig_check=False) + event_auth.check( + RoomVersions.V1, _random_state_event(creator), auth_events, + do_sig_check=False, + ) # joiner should not be able to send state self.assertRaises( AuthError, event_auth.check, + RoomVersions.V1, _random_state_event(joiner), auth_events, do_sig_check=False, @@ -69,13 +74,17 @@ class EventAuthTestCase(unittest.TestCase): self.assertRaises( AuthError, event_auth.check, + RoomVersions.V1, _random_state_event(pleb), auth_events, do_sig_check=False, ), # king should be able to send state - event_auth.check(_random_state_event(king), auth_events, do_sig_check=False) + event_auth.check( + RoomVersions.V1, _random_state_event(king), auth_events, + do_sig_check=False, + ) # helpers for making events |