diff options
Diffstat (limited to 'synapse')
-rw-r--r-- | synapse/api/auth.py | 14 | ||||
-rw-r--r-- | synapse/app/client_reader.py | 12 | ||||
-rw-r--r-- | synapse/app/event_creator.py | 12 | ||||
-rw-r--r-- | synapse/app/federation_reader.py | 12 | ||||
-rw-r--r-- | synapse/app/federation_sender.py | 18 | ||||
-rw-r--r-- | synapse/app/frontend_proxy.py | 12 | ||||
-rw-r--r-- | synapse/app/media_repository.py | 12 | ||||
-rw-r--r-- | synapse/app/user_dir.py | 18 | ||||
-rw-r--r-- | synapse/crypto/event_signing.py | 99 | ||||
-rw-r--r-- | synapse/event_auth.py | 3 | ||||
-rw-r--r-- | synapse/events/utils.py | 47 | ||||
-rw-r--r-- | synapse/handlers/federation.py | 20 | ||||
-rw-r--r-- | synapse/handlers/message.py | 9 | ||||
-rw-r--r-- | synapse/handlers/room.py | 5 | ||||
-rw-r--r-- | synapse/replication/http/federation.py | 2 | ||||
-rw-r--r-- | synapse/state/__init__.py | 2 | ||||
-rw-r--r-- | synapse/state/v1.py | 14 | ||||
-rw-r--r-- | synapse/state/v2.py | 14 | ||||
-rw-r--r-- | synapse/storage/_base.py | 2 |
19 files changed, 210 insertions, 117 deletions
diff --git a/synapse/api/auth.py b/synapse/api/auth.py index e37b807c94..7b213e54c8 100644 --- a/synapse/api/auth.py +++ b/synapse/api/auth.py @@ -65,7 +65,7 @@ class Auth(object): register_cache("cache", "token_cache", self.token_cache) @defer.inlineCallbacks - def check_from_context(self, event, context, do_sig_check=True): + def check_from_context(self, room_version, event, context, do_sig_check=True): prev_state_ids = yield context.get_prev_state_ids(self.store) auth_events_ids = yield self.compute_auth_events( event, prev_state_ids, for_verification=True, @@ -74,12 +74,16 @@ class Auth(object): auth_events = { (e.type, e.state_key): e for e in itervalues(auth_events) } - self.check(event, auth_events=auth_events, do_sig_check=do_sig_check) + self.check( + room_version, event, + auth_events=auth_events, do_sig_check=do_sig_check, + ) - def check(self, event, auth_events, do_sig_check=True): + def check(self, room_version, event, auth_events, do_sig_check=True): """ Checks if this event is correctly authed. Args: + room_version (str): version of the room event: the event being checked. auth_events (dict: event-key -> event): the existing room state. @@ -88,7 +92,9 @@ class Auth(object): True if the auth checks pass. """ with Measure(self.clock, "auth.check"): - event_auth.check(event, auth_events, do_sig_check=do_sig_check) + event_auth.check( + room_version, event, auth_events, do_sig_check=do_sig_check + ) @defer.inlineCallbacks def check_joined_room(self, room_id, user_id, current_state=None): diff --git a/synapse/app/client_reader.py b/synapse/app/client_reader.py index 76aed8c60a..f8a417cb60 100644 --- a/synapse/app/client_reader.py +++ b/synapse/app/client_reader.py @@ -164,23 +164,23 @@ def start(config_options): database_engine = create_engine(config.database_config) - tls_server_context_factory = context_factory.ServerContextFactory(config) - tls_client_options_factory = context_factory.ClientTLSOptionsFactory(config) - ss = ClientReaderServer( config.server_name, db_config=config.database_config, - tls_server_context_factory=tls_server_context_factory, - tls_client_options_factory=tls_client_options_factory, config=config, version_string="Synapse/" + get_version_string(synapse), database_engine=database_engine, ) ss.setup() - ss.start_listening(config.worker_listeners) def start(): + ss.config.read_certificate_from_disk() + ss.tls_server_context_factory = context_factory.ServerContextFactory(config) + ss.tls_client_options_factory = context_factory.ClientTLSOptionsFactory( + config + ) + ss.start_listening(config.worker_listeners) ss.get_datastore().start_profiling() reactor.callWhenRunning(start) diff --git a/synapse/app/event_creator.py b/synapse/app/event_creator.py index e4a68715aa..656e0edc0f 100644 --- a/synapse/app/event_creator.py +++ b/synapse/app/event_creator.py @@ -185,23 +185,23 @@ def start(config_options): database_engine = create_engine(config.database_config) - tls_server_context_factory = context_factory.ServerContextFactory(config) - tls_client_options_factory = context_factory.ClientTLSOptionsFactory(config) - ss = EventCreatorServer( config.server_name, db_config=config.database_config, - tls_server_context_factory=tls_server_context_factory, - tls_client_options_factory=tls_client_options_factory, config=config, version_string="Synapse/" + get_version_string(synapse), database_engine=database_engine, ) ss.setup() - ss.start_listening(config.worker_listeners) def start(): + ss.config.read_certificate_from_disk() + ss.tls_server_context_factory = context_factory.ServerContextFactory(config) + ss.tls_client_options_factory = context_factory.ClientTLSOptionsFactory( + config + ) + ss.start_listening(config.worker_listeners) ss.get_datastore().start_profiling() reactor.callWhenRunning(start) diff --git a/synapse/app/federation_reader.py b/synapse/app/federation_reader.py index 228a297fb8..3de2715132 100644 --- a/synapse/app/federation_reader.py +++ b/synapse/app/federation_reader.py @@ -151,23 +151,23 @@ def start(config_options): database_engine = create_engine(config.database_config) - tls_server_context_factory = context_factory.ServerContextFactory(config) - tls_client_options_factory = context_factory.ClientTLSOptionsFactory(config) - ss = FederationReaderServer( config.server_name, db_config=config.database_config, - tls_server_context_factory=tls_server_context_factory, - tls_client_options_factory=tls_client_options_factory, config=config, version_string="Synapse/" + get_version_string(synapse), database_engine=database_engine, ) ss.setup() - ss.start_listening(config.worker_listeners) def start(): + ss.config.read_certificate_from_disk() + ss.tls_server_context_factory = context_factory.ServerContextFactory(config) + ss.tls_client_options_factory = context_factory.ClientTLSOptionsFactory( + config + ) + ss.start_listening(config.worker_listeners) ss.get_datastore().start_profiling() reactor.callWhenRunning(start) diff --git a/synapse/app/federation_sender.py b/synapse/app/federation_sender.py index e9a99d76e1..d944e0517f 100644 --- a/synapse/app/federation_sender.py +++ b/synapse/app/federation_sender.py @@ -183,24 +183,24 @@ def start(config_options): # Force the pushers to start since they will be disabled in the main config config.send_federation = True - tls_server_context_factory = context_factory.ServerContextFactory(config) - tls_client_options_factory = context_factory.ClientTLSOptionsFactory(config) - - ps = FederationSenderServer( + ss = FederationSenderServer( config.server_name, db_config=config.database_config, - tls_server_context_factory=tls_server_context_factory, - tls_client_options_factory=tls_client_options_factory, config=config, version_string="Synapse/" + get_version_string(synapse), database_engine=database_engine, ) - ps.setup() - ps.start_listening(config.worker_listeners) + ss.setup() def start(): - ps.get_datastore().start_profiling() + ss.config.read_certificate_from_disk() + ss.tls_server_context_factory = context_factory.ServerContextFactory(config) + ss.tls_client_options_factory = context_factory.ClientTLSOptionsFactory( + config + ) + ss.start_listening(config.worker_listeners) + ss.get_datastore().start_profiling() reactor.callWhenRunning(start) _base.start_worker_reactor("synapse-federation-sender", config) diff --git a/synapse/app/frontend_proxy.py b/synapse/app/frontend_proxy.py index f5c61dec5b..d9ef6edc3c 100644 --- a/synapse/app/frontend_proxy.py +++ b/synapse/app/frontend_proxy.py @@ -241,23 +241,23 @@ def start(config_options): database_engine = create_engine(config.database_config) - tls_server_context_factory = context_factory.ServerContextFactory(config) - tls_client_options_factory = context_factory.ClientTLSOptionsFactory(config) - ss = FrontendProxyServer( config.server_name, db_config=config.database_config, - tls_server_context_factory=tls_server_context_factory, - tls_client_options_factory=tls_client_options_factory, config=config, version_string="Synapse/" + get_version_string(synapse), database_engine=database_engine, ) ss.setup() - ss.start_listening(config.worker_listeners) def start(): + ss.config.read_certificate_from_disk() + ss.tls_server_context_factory = context_factory.ServerContextFactory(config) + ss.tls_client_options_factory = context_factory.ClientTLSOptionsFactory( + config + ) + ss.start_listening(config.worker_listeners) ss.get_datastore().start_profiling() reactor.callWhenRunning(start) diff --git a/synapse/app/media_repository.py b/synapse/app/media_repository.py index acc0487adc..4ecf64031b 100644 --- a/synapse/app/media_repository.py +++ b/synapse/app/media_repository.py @@ -151,23 +151,23 @@ def start(config_options): database_engine = create_engine(config.database_config) - tls_server_context_factory = context_factory.ServerContextFactory(config) - tls_client_options_factory = context_factory.ClientTLSOptionsFactory(config) - ss = MediaRepositoryServer( config.server_name, db_config=config.database_config, - tls_server_context_factory=tls_server_context_factory, - tls_client_options_factory=tls_client_options_factory, config=config, version_string="Synapse/" + get_version_string(synapse), database_engine=database_engine, ) ss.setup() - ss.start_listening(config.worker_listeners) def start(): + ss.config.read_certificate_from_disk() + ss.tls_server_context_factory = context_factory.ServerContextFactory(config) + ss.tls_client_options_factory = context_factory.ClientTLSOptionsFactory( + config + ) + ss.start_listening(config.worker_listeners) ss.get_datastore().start_profiling() reactor.callWhenRunning(start) diff --git a/synapse/app/user_dir.py b/synapse/app/user_dir.py index 0a5f62b509..176d55a783 100644 --- a/synapse/app/user_dir.py +++ b/synapse/app/user_dir.py @@ -211,24 +211,24 @@ def start(config_options): # Force the pushers to start since they will be disabled in the main config config.update_user_directory = True - tls_server_context_factory = context_factory.ServerContextFactory(config) - tls_client_options_factory = context_factory.ClientTLSOptionsFactory(config) - - ps = UserDirectoryServer( + ss = UserDirectoryServer( config.server_name, db_config=config.database_config, - tls_server_context_factory=tls_server_context_factory, - tls_client_options_factory=tls_client_options_factory, config=config, version_string="Synapse/" + get_version_string(synapse), database_engine=database_engine, ) - ps.setup() - ps.start_listening(config.worker_listeners) + ss.setup() def start(): - ps.get_datastore().start_profiling() + ss.config.read_certificate_from_disk() + ss.tls_server_context_factory = context_factory.ServerContextFactory(config) + ss.tls_client_options_factory = context_factory.ClientTLSOptionsFactory( + config + ) + ss.start_listening(config.worker_listeners) + ss.get_datastore().start_profiling() reactor.callWhenRunning(start) diff --git a/synapse/crypto/event_signing.py b/synapse/crypto/event_signing.py index 8774b28967..d01ac5075c 100644 --- a/synapse/crypto/event_signing.py +++ b/synapse/crypto/event_signing.py @@ -23,14 +23,14 @@ from signedjson.sign import sign_json from unpaddedbase64 import decode_base64, encode_base64 from synapse.api.errors import Codes, SynapseError -from synapse.events.utils import prune_event +from synapse.events.utils import prune_event, prune_event_dict logger = logging.getLogger(__name__) def check_event_content_hash(event, hash_algorithm=hashlib.sha256): """Check whether the hash for this PDU matches the contents""" - name, expected_hash = compute_content_hash(event, hash_algorithm) + name, expected_hash = compute_content_hash(event.get_pdu_json(), hash_algorithm) logger.debug("Expecting hash: %s", encode_base64(expected_hash)) # some malformed events lack a 'hashes'. Protect against it being missing @@ -59,35 +59,70 @@ def check_event_content_hash(event, hash_algorithm=hashlib.sha256): return message_hash_bytes == expected_hash -def compute_content_hash(event, hash_algorithm): - event_json = event.get_pdu_json() - event_json.pop("age_ts", None) - event_json.pop("unsigned", None) - event_json.pop("signatures", None) - event_json.pop("hashes", None) - event_json.pop("outlier", None) - event_json.pop("destinations", None) +def compute_content_hash(event_dict, hash_algorithm): + """Compute the content hash of an event, which is the hash of the + unredacted event. - event_json_bytes = encode_canonical_json(event_json) + Args: + event_dict (dict): The unredacted event as a dict + hash_algorithm: A hasher from `hashlib`, e.g. hashlib.sha256, to use + to hash the event + + Returns: + tuple[str, bytes]: A tuple of the name of hash and the hash as raw + bytes. + """ + event_dict = dict(event_dict) + event_dict.pop("age_ts", None) + event_dict.pop("unsigned", None) + event_dict.pop("signatures", None) + event_dict.pop("hashes", None) + event_dict.pop("outlier", None) + event_dict.pop("destinations", None) + + event_json_bytes = encode_canonical_json(event_dict) hashed = hash_algorithm(event_json_bytes) return (hashed.name, hashed.digest()) def compute_event_reference_hash(event, hash_algorithm=hashlib.sha256): + """Computes the event reference hash. This is the hash of the redacted + event. + + Args: + event (FrozenEvent) + hash_algorithm: A hasher from `hashlib`, e.g. hashlib.sha256, to use + to hash the event + + Returns: + tuple[str, bytes]: A tuple of the name of hash and the hash as raw + bytes. + """ tmp_event = prune_event(event) - event_json = tmp_event.get_pdu_json() - event_json.pop("signatures", None) - event_json.pop("age_ts", None) - event_json.pop("unsigned", None) - event_json_bytes = encode_canonical_json(event_json) + event_dict = tmp_event.get_pdu_json() + event_dict.pop("signatures", None) + event_dict.pop("age_ts", None) + event_dict.pop("unsigned", None) + event_json_bytes = encode_canonical_json(event_dict) hashed = hash_algorithm(event_json_bytes) return (hashed.name, hashed.digest()) -def compute_event_signature(event, signature_name, signing_key): - tmp_event = prune_event(event) - redact_json = tmp_event.get_pdu_json() +def compute_event_signature(event_dict, signature_name, signing_key): + """Compute the signature of the event for the given name and key. + + Args: + event_dict (dict): The event as a dict + signature_name (str): The name of the entity signing the event + (typically the server's hostname). + signing_key (syutil.crypto.SigningKey): The key to sign with + + Returns: + dict[str, dict[str, str]]: Returns a dictionary in the same format of + an event's signatures field. + """ + redact_json = prune_event_dict(event_dict) redact_json.pop("age_ts", None) redact_json.pop("unsigned", None) logger.debug("Signing event: %s", encode_canonical_json(redact_json)) @@ -98,23 +133,27 @@ def compute_event_signature(event, signature_name, signing_key): def add_hashes_and_signatures(event, signature_name, signing_key, hash_algorithm=hashlib.sha256): - # if hasattr(event, "old_state_events"): - # state_json_bytes = encode_canonical_json( - # [e.event_id for e in event.old_state_events.values()] - # ) - # hashed = hash_algorithm(state_json_bytes) - # event.state_hash = { - # hashed.name: encode_base64(hashed.digest()) - # } - - name, digest = compute_content_hash(event, hash_algorithm=hash_algorithm) + """Add content hash and sign the event + + Args: + event_dict (EventBuilder): The event to add hashes to and sign + signature_name (str): The name of the entity signing the event + (typically the server's hostname). + signing_key (syutil.crypto.SigningKey): The key to sign with + hash_algorithm: A hasher from `hashlib`, e.g. hashlib.sha256, to use + to hash the event + """ + + name, digest = compute_content_hash( + event.get_pdu_json(), hash_algorithm=hash_algorithm, + ) if not hasattr(event, "hashes"): event.hashes = {} event.hashes[name] = encode_base64(digest) event.signatures = compute_event_signature( - event, + event.get_pdu_json(), signature_name=signature_name, signing_key=signing_key, ) diff --git a/synapse/event_auth.py b/synapse/event_auth.py index c81d8e6729..9adedbbb02 100644 --- a/synapse/event_auth.py +++ b/synapse/event_auth.py @@ -27,10 +27,11 @@ from synapse.types import UserID, get_domain_from_id logger = logging.getLogger(__name__) -def check(event, auth_events, do_sig_check=True, do_size_check=True): +def check(room_version, event, auth_events, do_sig_check=True, do_size_check=True): """ Checks if this event is correctly authed. Args: + room_version (str): the version of the room event: the event being checked. auth_events (dict: event-key -> event): the existing room state. diff --git a/synapse/events/utils.py b/synapse/events/utils.py index 652941ca0d..63f693f259 100644 --- a/synapse/events/utils.py +++ b/synapse/events/utils.py @@ -38,8 +38,31 @@ def prune_event(event): This is used when we "redact" an event. We want to remove all fields that the user has specified, but we do want to keep necessary information like type, state_key etc. + + Args: + event (FrozenEvent) + + Returns: + FrozenEvent + """ + pruned_event_dict = prune_event_dict(event.get_dict()) + + from . import event_type_from_format_version + return event_type_from_format_version(event.format_version)( + pruned_event_dict, event.internal_metadata.get_dict() + ) + + +def prune_event_dict(event_dict): + """Redacts the event_dict in the same way as `prune_event`, except it + operates on dicts rather than event objects + + Args: + event_dict (dict) + + Returns: + dict: A copy of the pruned event dict """ - event_type = event.type allowed_keys = [ "event_id", @@ -59,13 +82,13 @@ def prune_event(event): "membership", ] - event_dict = event.get_dict() + event_type = event_dict["type"] new_content = {} def add_fields(*fields): for field in fields: - if field in event.content: + if field in event_dict["content"]: new_content[field] = event_dict["content"][field] if event_type == EventTypes.Member: @@ -98,17 +121,17 @@ def prune_event(event): allowed_fields["content"] = new_content - allowed_fields["unsigned"] = {} + unsigned = {} + allowed_fields["unsigned"] = unsigned - if "age_ts" in event.unsigned: - allowed_fields["unsigned"]["age_ts"] = event.unsigned["age_ts"] - if "replaces_state" in event.unsigned: - allowed_fields["unsigned"]["replaces_state"] = event.unsigned["replaces_state"] + event_unsigned = event_dict.get("unsigned", {}) - return type(event)( - allowed_fields, - internal_metadata_dict=event.internal_metadata.get_dict() - ) + if "age_ts" in event_unsigned: + unsigned["age_ts"] = event_unsigned["age_ts"] + if "replaces_state" in event_unsigned: + unsigned["replaces_state"] = event_unsigned["replaces_state"] + + return allowed_fields def _copy_field(src, dst, field): diff --git a/synapse/handlers/federation.py b/synapse/handlers/federation.py index 13333818ae..adf59db7a8 100644 --- a/synapse/handlers/federation.py +++ b/synapse/handlers/federation.py @@ -1189,7 +1189,9 @@ class FederationHandler(BaseHandler): # The remote hasn't signed it yet, obviously. We'll do the full checks # when we get the event back in `on_send_join_request` - yield self.auth.check_from_context(event, context, do_sig_check=False) + yield self.auth.check_from_context( + room_version, event, context, do_sig_check=False, + ) defer.returnValue(event) @@ -1388,7 +1390,9 @@ class FederationHandler(BaseHandler): try: # The remote hasn't signed it yet, obviously. We'll do the full checks # when we get the event back in `on_send_leave_request` - yield self.auth.check_from_context(event, context, do_sig_check=False) + yield self.auth.check_from_context( + room_version, event, context, do_sig_check=False, + ) except AuthError as e: logger.warn("Failed to create new leave %r because %s", event, e) raise e @@ -1683,7 +1687,7 @@ class FederationHandler(BaseHandler): auth_for_e[(EventTypes.Create, "")] = create_event try: - self.auth.check(e, auth_events=auth_for_e) + self.auth.check(room_version, e, auth_events=auth_for_e) except SynapseError as err: # we may get SynapseErrors here as well as AuthErrors. For # instance, there are a couple of (ancient) events in some @@ -1927,6 +1931,8 @@ class FederationHandler(BaseHandler): current_state = set(e.event_id for e in auth_events.values()) different_auth = event_auth_events - current_state + room_version = yield self.store.get_room_version(event.room_id) + if different_auth and not event.internal_metadata.is_outlier(): # Do auth conflict res. logger.info("Different auth: %s", different_auth) @@ -1951,8 +1957,6 @@ class FederationHandler(BaseHandler): (d.type, d.state_key): d for d in different_events if d }) - room_version = yield self.store.get_room_version(event.room_id) - new_state = yield self.state_handler.resolve_events( room_version, [list(local_view.values()), list(remote_view.values())], @@ -2052,7 +2056,7 @@ class FederationHandler(BaseHandler): ) try: - self.auth.check(event, auth_events=auth_events) + self.auth.check(room_version, event, auth_events=auth_events) except AuthError as e: logger.warn("Failed auth resolution for %r because %s", event, e) raise e @@ -2290,7 +2294,7 @@ class FederationHandler(BaseHandler): EventValidator().validate_new(event) try: - yield self.auth.check_from_context(event, context) + yield self.auth.check_from_context(room_version, event, context) except AuthError as e: logger.warn("Denying new third party invite %r because %s", event, e) raise e @@ -2332,7 +2336,7 @@ class FederationHandler(BaseHandler): ) try: - self.auth.check_from_context(event, context) + self.auth.check_from_context(room_version, event, context) except AuthError as e: logger.warn("Denying third party invite %r because %s", event, e) raise e diff --git a/synapse/handlers/message.py b/synapse/handlers/message.py index d2aab25111..05d1370c18 100644 --- a/synapse/handlers/message.py +++ b/synapse/handlers/message.py @@ -22,7 +22,7 @@ from canonicaljson import encode_canonical_json, json from twisted.internet import defer from twisted.internet.defer import succeed -from synapse.api.constants import MAX_DEPTH, EventTypes, Membership +from synapse.api.constants import MAX_DEPTH, EventTypes, Membership, RoomVersions from synapse.api.errors import ( AuthError, Codes, @@ -613,8 +613,13 @@ class EventCreationHandler(object): extra_users (list(UserID)): Any extra users to notify about event """ + if event.is_state() and (event.type, event.state_key) == (EventTypes.Create, ""): + room_version = event.content.get("room_version", RoomVersions.V1) + else: + room_version = yield self.store.get_room_version(event.room_id) + try: - yield self.auth.check_from_context(event, context) + yield self.auth.check_from_context(room_version, event, context) except AuthError as err: logger.warn("Denying new event %r because %s", event, err) raise err diff --git a/synapse/handlers/room.py b/synapse/handlers/room.py index 388302de09..13ba9291b0 100644 --- a/synapse/handlers/room.py +++ b/synapse/handlers/room.py @@ -123,7 +123,10 @@ class RoomCreationHandler(BaseHandler): token_id=requester.access_token_id, ) ) - yield self.auth.check_from_context(tombstone_event, tombstone_context) + old_room_version = yield self.store.get_room_version(old_room_id) + yield self.auth.check_from_context( + old_room_version, tombstone_event, tombstone_context, + ) yield self.clone_existing_room( requester, diff --git a/synapse/replication/http/federation.py b/synapse/replication/http/federation.py index 2e16c69666..0f0a07c422 100644 --- a/synapse/replication/http/federation.py +++ b/synapse/replication/http/federation.py @@ -95,7 +95,7 @@ class ReplicationFederationSendEventsRestServlet(ReplicationEndpoint): event_and_contexts = [] for event_payload in event_payloads: event_dict = event_payload["event"] - format_ver = content["event_format_version"] + format_ver = event_payload["event_format_version"] internal_metadata = event_payload["internal_metadata"] rejected_reason = event_payload["rejected_reason"] diff --git a/synapse/state/__init__.py b/synapse/state/__init__.py index e9ecb00277..2fca51d0b2 100644 --- a/synapse/state/__init__.py +++ b/synapse/state/__init__.py @@ -611,7 +611,7 @@ def resolve_events_with_store(room_version, state_sets, event_map, state_res_sto RoomVersions.VDH_TEST, RoomVersions.STATE_V2_TEST, RoomVersions.V2, ): return v2.resolve_events_with_store( - state_sets, event_map, state_res_store, + room_version, state_sets, event_map, state_res_store, ) else: # This should only happen if we added a version but forgot to add it to diff --git a/synapse/state/v1.py b/synapse/state/v1.py index 19e091ce3b..6d3afcae7c 100644 --- a/synapse/state/v1.py +++ b/synapse/state/v1.py @@ -21,7 +21,7 @@ from six import iteritems, iterkeys, itervalues from twisted.internet import defer from synapse import event_auth -from synapse.api.constants import EventTypes +from synapse.api.constants import EventTypes, RoomVersions from synapse.api.errors import AuthError logger = logging.getLogger(__name__) @@ -274,7 +274,11 @@ def _resolve_auth_events(events, auth_events): auth_events[(prev_event.type, prev_event.state_key)] = prev_event try: # The signatures have already been checked at this point - event_auth.check(event, auth_events, do_sig_check=False, do_size_check=False) + event_auth.check( + RoomVersions.V1, event, auth_events, + do_sig_check=False, + do_size_check=False, + ) prev_event = event except AuthError: return prev_event @@ -286,7 +290,11 @@ def _resolve_normal_events(events, auth_events): for event in _ordered_events(events): try: # The signatures have already been checked at this point - event_auth.check(event, auth_events, do_sig_check=False, do_size_check=False) + event_auth.check( + RoomVersions.V1, event, auth_events, + do_sig_check=False, + do_size_check=False, + ) return event except AuthError: pass diff --git a/synapse/state/v2.py b/synapse/state/v2.py index 3573bb0028..650995c92c 100644 --- a/synapse/state/v2.py +++ b/synapse/state/v2.py @@ -29,10 +29,12 @@ logger = logging.getLogger(__name__) @defer.inlineCallbacks -def resolve_events_with_store(state_sets, event_map, state_res_store): +def resolve_events_with_store(room_version, state_sets, event_map, state_res_store): """Resolves the state using the v2 state resolution algorithm Args: + room_version (str): The room version + state_sets(list): List of dicts of (type, state_key) -> event_id, which are the different state groups to resolve. @@ -104,7 +106,7 @@ def resolve_events_with_store(state_sets, event_map, state_res_store): # Now sequentially auth each one resolved_state = yield _iterative_auth_checks( - sorted_power_events, unconflicted_state, event_map, + room_version, sorted_power_events, unconflicted_state, event_map, state_res_store, ) @@ -129,7 +131,7 @@ def resolve_events_with_store(state_sets, event_map, state_res_store): logger.debug("resolving remaining events") resolved_state = yield _iterative_auth_checks( - leftover_events, resolved_state, event_map, + room_version, leftover_events, resolved_state, event_map, state_res_store, ) @@ -350,11 +352,13 @@ def _reverse_topological_power_sort(event_ids, event_map, state_res_store, auth_ @defer.inlineCallbacks -def _iterative_auth_checks(event_ids, base_state, event_map, state_res_store): +def _iterative_auth_checks(room_version, event_ids, base_state, event_map, + state_res_store): """Sequentially apply auth checks to each event in given list, updating the state as it goes along. Args: + room_version (str) event_ids (list[str]): Ordered list of events to apply auth checks to base_state (dict[tuple[str, str], str]): The set of state to start with event_map (dict[str,FrozenEvent]) @@ -385,7 +389,7 @@ def _iterative_auth_checks(event_ids, base_state, event_map, state_res_store): try: event_auth.check( - event, auth_events, + room_version, event, auth_events, do_sig_check=False, do_size_check=False ) diff --git a/synapse/storage/_base.py b/synapse/storage/_base.py index 5109bc3e2e..4872ff55b6 100644 --- a/synapse/storage/_base.py +++ b/synapse/storage/_base.py @@ -236,7 +236,7 @@ class SQLBaseStore(object): self._unsafe_to_upsert_tables.discard("user_ips") # If there's any tables left to check, reschedule to run. - if self.updates: + if updates: self._clock.call_later( 15.0, run_as_background_process, |