diff --git a/synapse/crypto/keyring.py b/synapse/crypto/keyring.py
index ed2e994437..d8ba870cca 100644
--- a/synapse/crypto/keyring.py
+++ b/synapse/crypto/keyring.py
@@ -114,40 +114,54 @@ class Keyring(object):
server_name. The deferreds run their callbacks in the sentinel
logcontext.
"""
+ # a list of VerifyKeyRequests
verify_requests = []
+ handle = preserve_fn(_handle_key_deferred)
- for server_name, json_object in server_and_json:
+ def process(server_name, json_object):
+ """Process an entry in the request list
+ Given a (server_name, json_object) pair from the request list,
+ adds a key request to verify_requests, and returns a deferred which will
+ complete or fail (in the sentinel context) when verification completes.
+ """
key_ids = signature_ids(json_object, server_name)
+
if not key_ids:
- logger.warn("Request from %s: no supported signature keys",
- server_name)
- deferred = defer.fail(SynapseError(
- 400,
- "Not signed with a supported algorithm",
- Codes.UNAUTHORIZED,
- ))
- else:
- deferred = defer.Deferred()
+ return defer.fail(
+ SynapseError(
+ 400,
+ "Not signed by %s" % (server_name,),
+ Codes.UNAUTHORIZED,
+ )
+ )
logger.debug("Verifying for %s with key_ids %s",
server_name, key_ids)
+ # add the key request to the queue, but don't start it off yet.
verify_request = VerifyKeyRequest(
- server_name, key_ids, json_object, deferred
+ server_name, key_ids, json_object, defer.Deferred(),
)
-
verify_requests.append(verify_request)
- run_in_background(self._start_key_lookups, verify_requests)
+ # now run _handle_key_deferred, which will wait for the key request
+ # to complete and then do the verification.
+ #
+ # We want _handle_key_request to log to the right context, so we
+ # wrap it with preserve_fn (aka run_in_background)
+ return handle(verify_request)
- # Pass those keys to handle_key_deferred so that the json object
- # signatures can be verified
- handle = preserve_fn(_handle_key_deferred)
- return [
- handle(rq) for rq in verify_requests
+ results = [
+ process(server_name, json_object)
+ for server_name, json_object in server_and_json
]
+ if verify_requests:
+ run_in_background(self._start_key_lookups, verify_requests)
+
+ return results
+
@defer.inlineCallbacks
def _start_key_lookups(self, verify_requests):
"""Sets off the key fetches for each verify request
diff --git a/synapse/federation/federation_base.py b/synapse/federation/federation_base.py
index dfe6b4aa5c..cffa831d80 100644
--- a/synapse/federation/federation_base.py
+++ b/synapse/federation/federation_base.py
@@ -269,7 +269,18 @@ def _check_sigs_on_pdus(keyring, room_version, pdus):
for p in pdus_to_check_sender
])
+ def sender_err(e, pdu_to_check):
+ errmsg = "event id %s: unable to verify signature for sender %s: %s" % (
+ pdu_to_check.pdu.event_id,
+ pdu_to_check.sender_domain,
+ e.getErrorMessage(),
+ )
+ # XX not really sure if these are the right codes, but they are what
+ # we've done for ages
+ raise SynapseError(400, errmsg, Codes.UNAUTHORIZED)
+
for p, d in zip(pdus_to_check_sender, more_deferreds):
+ d.addErrback(sender_err, p)
p.deferreds.append(d)
# now let's look for events where the sender's domain is different to the
@@ -291,7 +302,18 @@ def _check_sigs_on_pdus(keyring, room_version, pdus):
for p in pdus_to_check_event_id
])
+ def event_err(e, pdu_to_check):
+ errmsg = (
+ "event id %s: unable to verify signature for event id domain: %s" % (
+ pdu_to_check.pdu.event_id,
+ e.getErrorMessage(),
+ )
+ )
+ # XX as above: not really sure if these are the right codes
+ raise SynapseError(400, errmsg, Codes.UNAUTHORIZED)
+
for p, d in zip(pdus_to_check_event_id, more_deferreds):
+ d.addErrback(event_err, p)
p.deferreds.append(d)
# replace lists of deferreds with single Deferreds
diff --git a/synapse/handlers/presence.py b/synapse/handlers/presence.py
index bd1285b15c..59d53f1050 100644
--- a/synapse/handlers/presence.py
+++ b/synapse/handlers/presence.py
@@ -828,6 +828,11 @@ class PresenceHandler(object):
if typ != EventTypes.Member:
continue
+ if event_id is None:
+ # state has been deleted, so this is not a join. We only care about
+ # joins.
+ continue
+
event = yield self.store.get_event(event_id)
if event.content.get("membership") != Membership.JOIN:
# We only care about joins
diff --git a/synapse/push/baserules.py b/synapse/push/baserules.py
index 8f0682c948..3523a40108 100644
--- a/synapse/push/baserules.py
+++ b/synapse/push/baserules.py
@@ -261,6 +261,23 @@ BASE_APPEND_OVERRIDE_RULES = [
'value': True,
}
]
+ },
+ {
+ 'rule_id': 'global/override/.m.rule.tombstone',
+ 'conditions': [
+ {
+ 'kind': 'event_match',
+ 'key': 'type',
+ 'pattern': 'm.room.tombstone',
+ '_id': '_tombstone',
+ }
+ ],
+ 'actions': [
+ 'notify', {
+ 'set_tweak': 'highlight',
+ 'value': True,
+ }
+ ]
}
]
diff --git a/synapse/storage/state_deltas.py b/synapse/storage/state_deltas.py
index 56e42f583d..31a0279b18 100644
--- a/synapse/storage/state_deltas.py
+++ b/synapse/storage/state_deltas.py
@@ -22,6 +22,24 @@ logger = logging.getLogger(__name__)
class StateDeltasStore(SQLBaseStore):
def get_current_state_deltas(self, prev_stream_id):
+ """Fetch a list of room state changes since the given stream id
+
+ Each entry in the result contains the following fields:
+ - stream_id (int)
+ - room_id (str)
+ - type (str): event type
+ - state_key (str):
+ - event_id (str|None): new event_id for this state key. None if the
+ state has been deleted.
+ - prev_event_id (str|None): previous event_id for this state key. None
+ if it's new state.
+
+ Args:
+ prev_stream_id (int): point to get changes since (exclusive)
+
+ Returns:
+ Deferred[list[dict]]: results
+ """
prev_stream_id = int(prev_stream_id)
if not self._curr_state_delta_stream_cache.has_any_entity_changed(
prev_stream_id
|