From 7d09ab891528c16f66fc4adebbafb8134c51f484 Mon Sep 17 00:00:00 2001 From: Daniel Wagner-Hall Date: Wed, 13 Jan 2016 13:19:47 +0000 Subject: Require AS users to be registered before use --- synapse/api/auth.py | 5 +++++ 1 file changed, 5 insertions(+) (limited to 'synapse') diff --git a/synapse/api/auth.py b/synapse/api/auth.py index 876869bb74..e36313e2fb 100644 --- a/synapse/api/auth.py +++ b/synapse/api/auth.py @@ -528,6 +528,11 @@ class Auth(object): 403, "Application service cannot masquerade as this user." ) + if not (yield self.store.get_user_by_id(user_id)): + raise AuthError( + 403, + "Application service has not registered this user" + ) if not user_id: raise KeyError -- cgit 1.4.1 From 9c1f853d58440d1f924fa55bc242b248c410dd7c Mon Sep 17 00:00:00 2001 From: Mark Haines Date: Wed, 13 Jan 2016 13:08:59 +0000 Subject: Rename 'user_name' to 'user_id' in push to make it consistent with the rest of the code --- synapse/push/__init__.py | 36 ++++++++++++------------- synapse/push/baserules.py | 12 ++++----- synapse/push/httppusher.py | 6 ++--- synapse/push/push_rule_evaluator.py | 24 ++++++++--------- synapse/push/pusherpool.py | 30 ++++++++++----------- synapse/rest/client/v1/push_rule.py | 10 +++---- synapse/rest/client/v1/pusher.py | 4 +-- synapse/storage/push_rule.py | 54 ++++++++++++++++++------------------- synapse/storage/pusher.py | 22 +++++++-------- 9 files changed, 99 insertions(+), 99 deletions(-) (limited to 'synapse') diff --git a/synapse/push/__init__.py b/synapse/push/__init__.py index c5ddfb564c..a5dc84160c 100644 --- a/synapse/push/__init__.py +++ b/synapse/push/__init__.py @@ -35,7 +35,7 @@ class Pusher(object): MAX_BACKOFF = 60 * 60 * 1000 GIVE_UP_AFTER = 24 * 60 * 60 * 1000 - def __init__(self, _hs, profile_tag, user_name, app_id, + def __init__(self, _hs, profile_tag, user_id, app_id, app_display_name, device_display_name, pushkey, pushkey_ts, data, last_token, last_success, failing_since): self.hs = _hs @@ -43,7 +43,7 @@ class Pusher(object): self.store = self.hs.get_datastore() self.clock = self.hs.get_clock() self.profile_tag = profile_tag - self.user_name = user_name + self.user_id = user_id self.app_id = app_id self.app_display_name = app_display_name self.device_display_name = device_display_name @@ -92,15 +92,15 @@ class Pusher(object): # we fail to dispatch the push) config = PaginationConfig(from_token=None, limit='1') chunk = yield self.evStreamHandler.get_stream( - self.user_name, config, timeout=0, affect_presence=False, + self.user_id, config, timeout=0, affect_presence=False, only_room_events=True ) self.last_token = chunk['end'] self.store.update_pusher_last_token( - self.app_id, self.pushkey, self.user_name, self.last_token + self.app_id, self.pushkey, self.user_id, self.last_token ) logger.info("Pusher %s for user %s starting from token %s", - self.pushkey, self.user_name, self.last_token) + self.pushkey, self.user_id, self.last_token) wait = 0 while self.alive: @@ -125,7 +125,7 @@ class Pusher(object): config = PaginationConfig(from_token=from_tok, limit='1') timeout = (300 + random.randint(-60, 60)) * 1000 chunk = yield self.evStreamHandler.get_stream( - self.user_name, config, timeout=timeout, affect_presence=False, + self.user_id, config, timeout=timeout, affect_presence=False, only_room_events=True ) @@ -142,7 +142,7 @@ class Pusher(object): yield self.store.update_pusher_last_token( self.app_id, self.pushkey, - self.user_name, + self.user_id, self.last_token ) return @@ -153,8 +153,8 @@ class Pusher(object): processed = False rule_evaluator = yield \ - push_rule_evaluator.evaluator_for_user_name_and_profile_tag( - self.user_name, self.profile_tag, single_event['room_id'], self.store + push_rule_evaluator.evaluator_for_user_id_and_profile_tag( + self.user_id, self.profile_tag, single_event['room_id'], self.store ) actions = yield rule_evaluator.actions_for_event(single_event) @@ -179,7 +179,7 @@ class Pusher(object): pk ) yield self.hs.get_pusherpool().remove_pusher( - self.app_id, pk, self.user_name + self.app_id, pk, self.user_id ) else: processed = True @@ -193,7 +193,7 @@ class Pusher(object): yield self.store.update_pusher_last_token_and_success( self.app_id, self.pushkey, - self.user_name, + self.user_id, self.last_token, self.clock.time_msec() ) @@ -202,7 +202,7 @@ class Pusher(object): yield self.store.update_pusher_failing_since( self.app_id, self.pushkey, - self.user_name, + self.user_id, self.failing_since) else: if not self.failing_since: @@ -210,7 +210,7 @@ class Pusher(object): yield self.store.update_pusher_failing_since( self.app_id, self.pushkey, - self.user_name, + self.user_id, self.failing_since ) @@ -222,13 +222,13 @@ class Pusher(object): # of old notifications. logger.warn("Giving up on a notification to user %s, " "pushkey %s", - self.user_name, self.pushkey) + self.user_id, self.pushkey) self.backoff_delay = Pusher.INITIAL_BACKOFF self.last_token = chunk['end'] yield self.store.update_pusher_last_token( self.app_id, self.pushkey, - self.user_name, + self.user_id, self.last_token ) @@ -236,14 +236,14 @@ class Pusher(object): yield self.store.update_pusher_failing_since( self.app_id, self.pushkey, - self.user_name, + self.user_id, self.failing_since ) else: logger.warn("Failed to dispatch push for user %s " "(failing for %dms)." "Trying again in %dms", - self.user_name, + self.user_id, self.clock.time_msec() - self.failing_since, self.backoff_delay) yield synapse.util.async.sleep(self.backoff_delay / 1000.0) @@ -280,7 +280,7 @@ class Pusher(object): if last_active > self.last_last_active_time: self.last_last_active_time = last_active if self.has_unread: - logger.info("Resetting badge count for %s", self.user_name) + logger.info("Resetting badge count for %s", self.user_id) self.reset_badge_count() self.has_unread = False diff --git a/synapse/push/baserules.py b/synapse/push/baserules.py index 57de0e34b4..8bac7fd6af 100644 --- a/synapse/push/baserules.py +++ b/synapse/push/baserules.py @@ -15,27 +15,27 @@ from synapse.push.rulekinds import PRIORITY_CLASS_MAP, PRIORITY_CLASS_INVERSE_MAP -def list_with_base_rules(rawrules, user_name): +def list_with_base_rules(rawrules, user_id): ruleslist = [] # shove the server default rules for each kind onto the end of each current_prio_class = PRIORITY_CLASS_INVERSE_MAP.keys()[-1] ruleslist.extend(make_base_prepend_rules( - user_name, PRIORITY_CLASS_INVERSE_MAP[current_prio_class] + user_id, PRIORITY_CLASS_INVERSE_MAP[current_prio_class] )) for r in rawrules: if r['priority_class'] < current_prio_class: while r['priority_class'] < current_prio_class: ruleslist.extend(make_base_append_rules( - user_name, + user_id, PRIORITY_CLASS_INVERSE_MAP[current_prio_class] )) current_prio_class -= 1 if current_prio_class > 0: ruleslist.extend(make_base_prepend_rules( - user_name, + user_id, PRIORITY_CLASS_INVERSE_MAP[current_prio_class] )) @@ -43,13 +43,13 @@ def list_with_base_rules(rawrules, user_name): while current_prio_class > 0: ruleslist.extend(make_base_append_rules( - user_name, + user_id, PRIORITY_CLASS_INVERSE_MAP[current_prio_class] )) current_prio_class -= 1 if current_prio_class > 0: ruleslist.extend(make_base_prepend_rules( - user_name, + user_id, PRIORITY_CLASS_INVERSE_MAP[current_prio_class] )) diff --git a/synapse/push/httppusher.py b/synapse/push/httppusher.py index 7866db6a24..28f1fab0e4 100644 --- a/synapse/push/httppusher.py +++ b/synapse/push/httppusher.py @@ -23,13 +23,13 @@ logger = logging.getLogger(__name__) class HttpPusher(Pusher): - def __init__(self, _hs, profile_tag, user_name, app_id, + def __init__(self, _hs, profile_tag, user_id, app_id, app_display_name, device_display_name, pushkey, pushkey_ts, data, last_token, last_success, failing_since): super(HttpPusher, self).__init__( _hs, profile_tag, - user_name, + user_id, app_id, app_display_name, device_display_name, @@ -87,7 +87,7 @@ class HttpPusher(Pusher): } if event['type'] == 'm.room.member': d['notification']['membership'] = event['content']['membership'] - d['notification']['user_is_target'] = event['state_key'] == self.user_name + d['notification']['user_is_target'] = event['state_key'] == self.user_id if 'content' in event: d['notification']['content'] = event['content'] diff --git a/synapse/push/push_rule_evaluator.py b/synapse/push/push_rule_evaluator.py index 705ab8c967..b0283743a2 100644 --- a/synapse/push/push_rule_evaluator.py +++ b/synapse/push/push_rule_evaluator.py @@ -27,17 +27,17 @@ logger = logging.getLogger(__name__) @defer.inlineCallbacks -def evaluator_for_user_name_and_profile_tag(user_name, profile_tag, room_id, store): - rawrules = yield store.get_push_rules_for_user(user_name) - enabled_map = yield store.get_push_rules_enabled_for_user(user_name) +def evaluator_for_user_id_and_profile_tag(user_id, profile_tag, room_id, store): + rawrules = yield store.get_push_rules_for_user(user_id) + enabled_map = yield store.get_push_rules_enabled_for_user(user_id) our_member_event = yield store.get_current_state( room_id=room_id, event_type='m.room.member', - state_key=user_name, + state_key=user_id, ) defer.returnValue(PushRuleEvaluator( - user_name, profile_tag, rawrules, enabled_map, + user_id, profile_tag, rawrules, enabled_map, room_id, our_member_event, store )) @@ -46,9 +46,9 @@ class PushRuleEvaluator: DEFAULT_ACTIONS = [] INEQUALITY_EXPR = re.compile("^([=<>]*)([0-9]*)$") - def __init__(self, user_name, profile_tag, raw_rules, enabled_map, room_id, + def __init__(self, user_id, profile_tag, raw_rules, enabled_map, room_id, our_member_event, store): - self.user_name = user_name + self.user_id = user_id self.profile_tag = profile_tag self.room_id = room_id self.our_member_event = our_member_event @@ -61,7 +61,7 @@ class PushRuleEvaluator: rule['actions'] = json.loads(raw_rule['actions']) rules.append(rule) - user = UserID.from_string(self.user_name) + user = UserID.from_string(self.user_id) self.rules = baserules.list_with_base_rules(rules, user) self.enabled_map = enabled_map @@ -83,7 +83,7 @@ class PushRuleEvaluator: has configured both globally and per-room when we have the ability to do such things. """ - if ev['user_id'] == self.user_name: + if ev['user_id'] == self.user_id: # let's assume you probably know about messages you sent yourself defer.returnValue([]) @@ -124,13 +124,13 @@ class PushRuleEvaluator: if len(actions) == 0: logger.warn( "Ignoring rule id %s with no actions for user %s", - r['rule_id'], self.user_name + r['rule_id'], self.user_id ) continue if matches: logger.info( "%s matches for user %s, event %s", - r['rule_id'], self.user_name, ev['event_id'] + r['rule_id'], self.user_id, ev['event_id'] ) # filter out dont_notify as we treat an empty actions list @@ -141,7 +141,7 @@ class PushRuleEvaluator: logger.info( "No rules match for user %s, event %s", - self.user_name, ev['event_id'] + self.user_id, ev['event_id'] ) defer.returnValue(PushRuleEvaluator.DEFAULT_ACTIONS) diff --git a/synapse/push/pusherpool.py b/synapse/push/pusherpool.py index 4208e5c76c..12c4af14bd 100644 --- a/synapse/push/pusherpool.py +++ b/synapse/push/pusherpool.py @@ -38,12 +38,12 @@ class PusherPool: @defer.inlineCallbacks def user_presence_changed(self, user, state): - user_name = user.to_string() + user_id = user.to_string() # until we have read receipts, pushers use this to reset a user's # badge counters to zero for p in self.pushers.values(): - if p.user_name == user_name: + if p.user_id == user_id: yield p.presence_changed(state) @defer.inlineCallbacks @@ -52,14 +52,14 @@ class PusherPool: self._start_pushers(pushers) @defer.inlineCallbacks - def add_pusher(self, user_name, access_token, profile_tag, kind, app_id, + def add_pusher(self, user_id, access_token, profile_tag, kind, app_id, app_display_name, device_display_name, pushkey, lang, data): # we try to create the pusher just to validate the config: it # will then get pulled out of the database, # recreated, added and started: this means we have only one # code path adding pushers. self._create_pusher({ - "user_name": user_name, + "user_name": user_id, "kind": kind, "profile_tag": profile_tag, "app_id": app_id, @@ -74,7 +74,7 @@ class PusherPool: "failing_since": None }) yield self._add_pusher_to_store( - user_name, access_token, profile_tag, kind, app_id, + user_id, access_token, profile_tag, kind, app_id, app_display_name, device_display_name, pushkey, lang, data ) @@ -109,11 +109,11 @@ class PusherPool: self.remove_pusher(p['app_id'], p['pushkey'], p['user_name']) @defer.inlineCallbacks - def _add_pusher_to_store(self, user_name, access_token, profile_tag, kind, + def _add_pusher_to_store(self, user_id, access_token, profile_tag, kind, app_id, app_display_name, device_display_name, pushkey, lang, data): yield self.store.add_pusher( - user_name=user_name, + user_id=user_id, access_token=access_token, profile_tag=profile_tag, kind=kind, @@ -125,14 +125,14 @@ class PusherPool: lang=lang, data=data, ) - self._refresh_pusher(app_id, pushkey, user_name) + self._refresh_pusher(app_id, pushkey, user_id) def _create_pusher(self, pusherdict): if pusherdict['kind'] == 'http': return HttpPusher( self.hs, profile_tag=pusherdict['profile_tag'], - user_name=pusherdict['user_name'], + user_id=pusherdict['user_name'], app_id=pusherdict['app_id'], app_display_name=pusherdict['app_display_name'], device_display_name=pusherdict['device_display_name'], @@ -150,14 +150,14 @@ class PusherPool: ) @defer.inlineCallbacks - def _refresh_pusher(self, app_id, pushkey, user_name): + def _refresh_pusher(self, app_id, pushkey, user_id): resultlist = yield self.store.get_pushers_by_app_id_and_pushkey( app_id, pushkey ) p = None for r in resultlist: - if r['user_name'] == user_name: + if r['user_name'] == user_id: p = r if p: @@ -186,12 +186,12 @@ class PusherPool: logger.info("Started pushers") @defer.inlineCallbacks - def remove_pusher(self, app_id, pushkey, user_name): - fullid = "%s:%s:%s" % (app_id, pushkey, user_name) + def remove_pusher(self, app_id, pushkey, user_id): + fullid = "%s:%s:%s" % (app_id, pushkey, user_id) if fullid in self.pushers: logger.info("Stopping pusher %s", fullid) self.pushers[fullid].stop() del self.pushers[fullid] - yield self.store.delete_pusher_by_app_id_pushkey_user_name( - app_id, pushkey, user_name + yield self.store.delete_pusher_by_app_id_pushkey_user_id( + app_id, pushkey, user_id ) diff --git a/synapse/rest/client/v1/push_rule.py b/synapse/rest/client/v1/push_rule.py index c0a21c0c12..df53824d2d 100644 --- a/synapse/rest/client/v1/push_rule.py +++ b/synapse/rest/client/v1/push_rule.py @@ -73,7 +73,7 @@ class PushRuleRestServlet(ClientV1RestServlet): try: yield self.hs.get_datastore().add_push_rule( - user_name=requester.user.to_string(), + user_id=requester.user.to_string(), rule_id=_namespaced_rule_id_from_spec(spec), priority_class=priority_class, conditions=conditions, @@ -206,7 +206,7 @@ class PushRuleRestServlet(ClientV1RestServlet): def on_OPTIONS(self, _): return 200, {} - def set_rule_attr(self, user_name, spec, val): + def set_rule_attr(self, user_id, spec, val): if spec['attr'] == 'enabled': if isinstance(val, dict) and "enabled" in val: val = val["enabled"] @@ -217,15 +217,15 @@ class PushRuleRestServlet(ClientV1RestServlet): raise SynapseError(400, "Value for 'enabled' must be boolean") namespaced_rule_id = _namespaced_rule_id_from_spec(spec) self.hs.get_datastore().set_push_rule_enabled( - user_name, namespaced_rule_id, val + user_id, namespaced_rule_id, val ) else: raise UnrecognizedRequestError() - def get_rule_attr(self, user_name, namespaced_rule_id, attr): + def get_rule_attr(self, user_id, namespaced_rule_id, attr): if attr == 'enabled': return self.hs.get_datastore().get_push_rule_enabled_by_user_rule_id( - user_name, namespaced_rule_id + user_id, namespaced_rule_id ) else: raise UnrecognizedRequestError() diff --git a/synapse/rest/client/v1/pusher.py b/synapse/rest/client/v1/pusher.py index b162b210bc..e218ed215c 100644 --- a/synapse/rest/client/v1/pusher.py +++ b/synapse/rest/client/v1/pusher.py @@ -41,7 +41,7 @@ class PusherRestServlet(ClientV1RestServlet): and 'kind' in content and content['kind'] is None): yield pusher_pool.remove_pusher( - content['app_id'], content['pushkey'], user_name=user.to_string() + content['app_id'], content['pushkey'], user_id=user.to_string() ) defer.returnValue((200, {})) @@ -71,7 +71,7 @@ class PusherRestServlet(ClientV1RestServlet): try: yield pusher_pool.add_pusher( - user_name=user.to_string(), + user_id=user.to_string(), access_token=requester.access_token_id, profile_tag=content['profile_tag'], kind=content['kind'], diff --git a/synapse/storage/push_rule.py b/synapse/storage/push_rule.py index 448009b4b6..2adfefd994 100644 --- a/synapse/storage/push_rule.py +++ b/synapse/storage/push_rule.py @@ -25,11 +25,11 @@ logger = logging.getLogger(__name__) class PushRuleStore(SQLBaseStore): @cachedInlineCallbacks() - def get_push_rules_for_user(self, user_name): + def get_push_rules_for_user(self, user_id): rows = yield self._simple_select_list( table="push_rules", keyvalues={ - "user_name": user_name, + "user_name": user_id, }, retcols=( "user_name", "rule_id", "priority_class", "priority", @@ -45,11 +45,11 @@ class PushRuleStore(SQLBaseStore): defer.returnValue(rows) @cachedInlineCallbacks() - def get_push_rules_enabled_for_user(self, user_name): + def get_push_rules_enabled_for_user(self, user_id): results = yield self._simple_select_list( table="push_rules_enable", keyvalues={ - 'user_name': user_name + 'user_name': user_id }, retcols=( "user_name", "rule_id", "enabled", @@ -122,7 +122,7 @@ class PushRuleStore(SQLBaseStore): ) defer.returnValue(ret) - def _add_push_rule_relative_txn(self, txn, user_name, **kwargs): + def _add_push_rule_relative_txn(self, txn, user_id, **kwargs): after = kwargs.pop("after", None) relative_to_rule = kwargs.pop("before", after) @@ -130,7 +130,7 @@ class PushRuleStore(SQLBaseStore): txn, table="push_rules", keyvalues={ - "user_name": user_name, + "user_name": user_id, "rule_id": relative_to_rule, }, retcols=["priority_class", "priority"], @@ -154,7 +154,7 @@ class PushRuleStore(SQLBaseStore): new_rule.pop("before", None) new_rule.pop("after", None) new_rule['priority_class'] = priority_class - new_rule['user_name'] = user_name + new_rule['user_name'] = user_id new_rule['id'] = self._push_rule_id_gen.get_next_txn(txn) # check if the priority before/after is free @@ -170,7 +170,7 @@ class PushRuleStore(SQLBaseStore): "SELECT COUNT(*) FROM push_rules" " WHERE user_name = ? AND priority_class = ? AND priority = ?" ) - txn.execute(sql, (user_name, priority_class, new_rule_priority)) + txn.execute(sql, (user_id, priority_class, new_rule_priority)) res = txn.fetchall() num_conflicting = res[0][0] @@ -187,14 +187,14 @@ class PushRuleStore(SQLBaseStore): else: sql += ">= ?" - txn.execute(sql, (user_name, priority_class, new_rule_priority)) + txn.execute(sql, (user_id, priority_class, new_rule_priority)) txn.call_after( - self.get_push_rules_for_user.invalidate, (user_name,) + self.get_push_rules_for_user.invalidate, (user_id,) ) txn.call_after( - self.get_push_rules_enabled_for_user.invalidate, (user_name,) + self.get_push_rules_enabled_for_user.invalidate, (user_id,) ) self._simple_insert_txn( @@ -203,14 +203,14 @@ class PushRuleStore(SQLBaseStore): values=new_rule, ) - def _add_push_rule_highest_priority_txn(self, txn, user_name, + def _add_push_rule_highest_priority_txn(self, txn, user_id, priority_class, **kwargs): # find the highest priority rule in that class sql = ( "SELECT COUNT(*), MAX(priority) FROM push_rules" " WHERE user_name = ? and priority_class = ?" ) - txn.execute(sql, (user_name, priority_class)) + txn.execute(sql, (user_id, priority_class)) res = txn.fetchall() (how_many, highest_prio) = res[0] @@ -221,15 +221,15 @@ class PushRuleStore(SQLBaseStore): # and insert the new rule new_rule = kwargs new_rule['id'] = self._push_rule_id_gen.get_next_txn(txn) - new_rule['user_name'] = user_name + new_rule['user_name'] = user_id new_rule['priority_class'] = priority_class new_rule['priority'] = new_prio txn.call_after( - self.get_push_rules_for_user.invalidate, (user_name,) + self.get_push_rules_for_user.invalidate, (user_id,) ) txn.call_after( - self.get_push_rules_enabled_for_user.invalidate, (user_name,) + self.get_push_rules_enabled_for_user.invalidate, (user_id,) ) self._simple_insert_txn( @@ -239,48 +239,48 @@ class PushRuleStore(SQLBaseStore): ) @defer.inlineCallbacks - def delete_push_rule(self, user_name, rule_id): + def delete_push_rule(self, user_id, rule_id): """ Delete a push rule. Args specify the row to be deleted and can be any of the columns in the push_rule table, but below are the standard ones Args: - user_name (str): The matrix ID of the push rule owner + user_id (str): The matrix ID of the push rule owner rule_id (str): The rule_id of the rule to be deleted """ yield self._simple_delete_one( "push_rules", - {'user_name': user_name, 'rule_id': rule_id}, + {'user_name': user_id, 'rule_id': rule_id}, desc="delete_push_rule", ) - self.get_push_rules_for_user.invalidate((user_name,)) - self.get_push_rules_enabled_for_user.invalidate((user_name,)) + self.get_push_rules_for_user.invalidate((user_id,)) + self.get_push_rules_enabled_for_user.invalidate((user_id,)) @defer.inlineCallbacks - def set_push_rule_enabled(self, user_name, rule_id, enabled): + def set_push_rule_enabled(self, user_id, rule_id, enabled): ret = yield self.runInteraction( "_set_push_rule_enabled_txn", self._set_push_rule_enabled_txn, - user_name, rule_id, enabled + user_id, rule_id, enabled ) defer.returnValue(ret) - def _set_push_rule_enabled_txn(self, txn, user_name, rule_id, enabled): + def _set_push_rule_enabled_txn(self, txn, user_id, rule_id, enabled): new_id = self._push_rules_enable_id_gen.get_next_txn(txn) self._simple_upsert_txn( txn, "push_rules_enable", - {'user_name': user_name, 'rule_id': rule_id}, + {'user_name': user_id, 'rule_id': rule_id}, {'enabled': 1 if enabled else 0}, {'id': new_id}, ) txn.call_after( - self.get_push_rules_for_user.invalidate, (user_name,) + self.get_push_rules_for_user.invalidate, (user_id,) ) txn.call_after( - self.get_push_rules_enabled_for_user.invalidate, (user_name,) + self.get_push_rules_enabled_for_user.invalidate, (user_id,) ) diff --git a/synapse/storage/pusher.py b/synapse/storage/pusher.py index 2b90d6c622..8ec706178a 100644 --- a/synapse/storage/pusher.py +++ b/synapse/storage/pusher.py @@ -80,7 +80,7 @@ class PusherStore(SQLBaseStore): defer.returnValue(rows) @defer.inlineCallbacks - def add_pusher(self, user_name, access_token, profile_tag, kind, app_id, + def add_pusher(self, user_id, access_token, profile_tag, kind, app_id, app_display_name, device_display_name, pushkey, pushkey_ts, lang, data): try: @@ -90,7 +90,7 @@ class PusherStore(SQLBaseStore): dict( app_id=app_id, pushkey=pushkey, - user_name=user_name, + user_name=user_id, ), dict( access_token=access_token, @@ -112,38 +112,38 @@ class PusherStore(SQLBaseStore): raise StoreError(500, "Problem creating pusher.") @defer.inlineCallbacks - def delete_pusher_by_app_id_pushkey_user_name(self, app_id, pushkey, user_name): + def delete_pusher_by_app_id_pushkey_user_id(self, app_id, pushkey, user_id): yield self._simple_delete_one( "pushers", - {"app_id": app_id, "pushkey": pushkey, 'user_name': user_name}, - desc="delete_pusher_by_app_id_pushkey_user_name", + {"app_id": app_id, "pushkey": pushkey, 'user_name': user_id}, + desc="delete_pusher_by_app_id_pushkey_user_id", ) @defer.inlineCallbacks - def update_pusher_last_token(self, app_id, pushkey, user_name, last_token): + def update_pusher_last_token(self, app_id, pushkey, user_id, last_token): yield self._simple_update_one( "pushers", - {'app_id': app_id, 'pushkey': pushkey, 'user_name': user_name}, + {'app_id': app_id, 'pushkey': pushkey, 'user_name': user_id}, {'last_token': last_token}, desc="update_pusher_last_token", ) @defer.inlineCallbacks - def update_pusher_last_token_and_success(self, app_id, pushkey, user_name, + def update_pusher_last_token_and_success(self, app_id, pushkey, user_id, last_token, last_success): yield self._simple_update_one( "pushers", - {'app_id': app_id, 'pushkey': pushkey, 'user_name': user_name}, + {'app_id': app_id, 'pushkey': pushkey, 'user_name': user_id}, {'last_token': last_token, 'last_success': last_success}, desc="update_pusher_last_token_and_success", ) @defer.inlineCallbacks - def update_pusher_failing_since(self, app_id, pushkey, user_name, + def update_pusher_failing_since(self, app_id, pushkey, user_id, failing_since): yield self._simple_update_one( "pushers", - {'app_id': app_id, 'pushkey': pushkey, 'user_name': user_name}, + {'app_id': app_id, 'pushkey': pushkey, 'user_name': user_id}, {'failing_since': failing_since}, desc="update_pusher_failing_since", ) -- cgit 1.4.1 From 93afb40cd4413008c40f6bdde032ccf5f1cefd9f Mon Sep 17 00:00:00 2001 From: Daniel Wagner-Hall Date: Tue, 5 Jan 2016 18:12:37 +0000 Subject: Skip, rather than erroring, invalid guest requests Erroring causes problems when people make illegal requests, because they don't know what limit parameter they should pass. This is definitely buggy. It leaks message counts for rooms people don't have permission to see, via tokens. But apparently we already consciously decided to allow that as a team, so this preserves that behaviour. --- synapse/handlers/_base.py | 16 ++-------------- synapse/handlers/message.py | 4 ++-- synapse/handlers/room.py | 2 -- synapse/handlers/sync.py | 1 - synapse/notifier.py | 3 +-- 5 files changed, 5 insertions(+), 21 deletions(-) (limited to 'synapse') diff --git a/synapse/handlers/_base.py b/synapse/handlers/_base.py index 5fd20285d2..b474042e84 100644 --- a/synapse/handlers/_base.py +++ b/synapse/handlers/_base.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2014, 2015 OpenMarket Ltd +# Copyright 2014 - 2016 OpenMarket Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -52,8 +52,7 @@ class BaseHandler(object): self.event_builder_factory = hs.get_event_builder_factory() @defer.inlineCallbacks - def _filter_events_for_client(self, user_id, events, is_guest=False, - require_all_visible_for_guests=True): + def _filter_events_for_client(self, user_id, events, is_guest=False): # Assumes that user has at some point joined the room if not is_guest. def allowed(event, membership, visibility): @@ -114,17 +113,6 @@ class BaseHandler(object): if should_include: events_to_return.append(event) - if (require_all_visible_for_guests - and is_guest - and len(events_to_return) < len(events)): - # This indicates that some events in the requested range were not - # visible to guest users. To be safe, we reject the entire request, - # so that we don't have to worry about interpreting visibility - # boundaries. - raise AuthError(403, "User %s does not have permission" % ( - user_id - )) - defer.returnValue(events_to_return) def ratelimit(self, user_id): diff --git a/synapse/handlers/message.py b/synapse/handlers/message.py index a1bed9b0dc..5805190ce8 100644 --- a/synapse/handlers/message.py +++ b/synapse/handlers/message.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2014, 2015 OpenMarket Ltd +# Copyright 2014 - 2016 OpenMarket Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -685,7 +685,7 @@ class MessageHandler(BaseHandler): ).addErrback(unwrapFirstError) messages = yield self._filter_events_for_client( - user_id, messages, is_guest=is_guest, require_all_visible_for_guests=False + user_id, messages, is_guest=is_guest, ) start_token = now_token.copy_and_replace("room_key", token[0]) diff --git a/synapse/handlers/room.py b/synapse/handlers/room.py index 13f66e0df0..725b318cfe 100644 --- a/synapse/handlers/room.py +++ b/synapse/handlers/room.py @@ -879,14 +879,12 @@ class RoomContextHandler(BaseHandler): user.to_string(), results["events_before"], is_guest=is_guest, - require_all_visible_for_guests=False ) results["events_after"] = yield self._filter_events_for_client( user.to_string(), results["events_after"], is_guest=is_guest, - require_all_visible_for_guests=False ) if results["events_after"]: diff --git a/synapse/handlers/sync.py b/synapse/handlers/sync.py index feea407ea2..9d52d592ba 100644 --- a/synapse/handlers/sync.py +++ b/synapse/handlers/sync.py @@ -648,7 +648,6 @@ class SyncHandler(BaseHandler): sync_config.user.to_string(), loaded_recents, is_guest=sync_config.is_guest, - require_all_visible_for_guests=False ) loaded_recents.extend(recents) recents = loaded_recents diff --git a/synapse/notifier.py b/synapse/notifier.py index fd52578325..0a5653b8d5 100644 --- a/synapse/notifier.py +++ b/synapse/notifier.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2014, 2015 OpenMarket Ltd +# Copyright 2014 - 2016 OpenMarket Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -386,7 +386,6 @@ class Notifier(object): user.to_string(), new_events, is_guest=is_guest, - require_all_visible_for_guests=False ) events.extend(new_events) -- cgit 1.4.1 From 49f33f64386954e08b8409842f28b449b011849e Mon Sep 17 00:00:00 2001 From: Richard van der Hoff Date: Wed, 13 Jan 2016 14:19:22 +0000 Subject: Add 'event' result to 'context' endpoint ... because the context isn't much use without the event. --- synapse/handlers/room.py | 35 +++++++++++++++++++++++------------ synapse/rest/client/v1/room.py | 6 ++++++ 2 files changed, 29 insertions(+), 12 deletions(-) (limited to 'synapse') diff --git a/synapse/handlers/room.py b/synapse/handlers/room.py index 725b318cfe..5291089366 100644 --- a/synapse/handlers/room.py +++ b/synapse/handlers/room.py @@ -864,28 +864,39 @@ class RoomContextHandler(BaseHandler): (excluding state). Returns: - dict + dict, or None if the event isn't found """ before_limit = math.floor(limit/2.) after_limit = limit - before_limit now_token = yield self.hs.get_event_sources().get_current_token() + def filter_evts(events): + return self._filter_events_for_client( + user.to_string(), + events, + is_guest=is_guest) + + event = yield self.store.get_event(event_id, get_prev_content=True, + allow_none=True) + if not event: + defer.returnValue(None) + return + + filtered = yield(filter_evts([event])) + if not filtered: + raise AuthError( + 403, + "You don't have permission to access that event." + ) + results = yield self.store.get_events_around( room_id, event_id, before_limit, after_limit ) - results["events_before"] = yield self._filter_events_for_client( - user.to_string(), - results["events_before"], - is_guest=is_guest, - ) - - results["events_after"] = yield self._filter_events_for_client( - user.to_string(), - results["events_after"], - is_guest=is_guest, - ) + results["events_before"] = yield filter_evts(results["events_before"]) + results["events_after"] = yield filter_evts(results["events_after"]) + results["event"] = event if results["events_after"]: last_event_id = results["events_after"][-1].event_id diff --git a/synapse/rest/client/v1/room.py b/synapse/rest/client/v1/room.py index 6fe53f70e5..62ce0f066b 100644 --- a/synapse/rest/client/v1/room.py +++ b/synapse/rest/client/v1/room.py @@ -402,10 +402,16 @@ class RoomEventContext(ClientV1RestServlet): user, room_id, event_id, limit, is_guest ) + if not results: + raise SynapseError( + 404, "Event not found.", errcode=Codes.NOT_FOUND + ) + time_now = self.clock.time_msec() results["events_before"] = [ serialize_event(event, time_now) for event in results["events_before"] ] + results["event"] = serialize_event(results["event"], time_now) results["events_after"] = [ serialize_event(event, time_now) for event in results["events_after"] ] -- cgit 1.4.1 From 244b356a37ada45d2a4e6aec1f08986aaa7eaaa1 Mon Sep 17 00:00:00 2001 From: Daniel Wagner-Hall Date: Wed, 13 Jan 2016 17:03:58 +0000 Subject: Delete unused code --- synapse/storage/appservice.py | 59 ------------------------------------------- 1 file changed, 59 deletions(-) (limited to 'synapse') diff --git a/synapse/storage/appservice.py b/synapse/storage/appservice.py index eab58d9ce9..1a2b4678a2 100644 --- a/synapse/storage/appservice.py +++ b/synapse/storage/appservice.py @@ -15,7 +15,6 @@ import logging import urllib import yaml -from simplejson import JSONDecodeError import simplejson as json from twisted.internet import defer @@ -144,64 +143,6 @@ class ApplicationServiceStore(SQLBaseStore): return rooms_for_user_matching_user_id - def _parse_services_dict(self, results): - # SQL results in the form: - # [ - # { - # 'regex': "something", - # 'url': "something", - # 'namespace': enum, - # 'as_id': 0, - # 'token': "something", - # 'hs_token': "otherthing", - # 'id': 0 - # } - # ] - services = {} - for res in results: - as_token = res["token"] - if as_token is None: - continue - if as_token not in services: - # add the service - services[as_token] = { - "id": res["id"], - "url": res["url"], - "token": as_token, - "hs_token": res["hs_token"], - "sender": res["sender"], - "namespaces": { - ApplicationService.NS_USERS: [], - ApplicationService.NS_ALIASES: [], - ApplicationService.NS_ROOMS: [] - } - } - # add the namespace regex if one exists - ns_int = res["namespace"] - if ns_int is None: - continue - try: - services[as_token]["namespaces"][ - ApplicationService.NS_LIST[ns_int]].append( - json.loads(res["regex"]) - ) - except IndexError: - logger.error("Bad namespace enum '%s'. %s", ns_int, res) - except JSONDecodeError: - logger.error("Bad regex object '%s'", res["regex"]) - - service_list = [] - for service in services.values(): - service_list.append(ApplicationService( - token=service["token"], - url=service["url"], - namespaces=service["namespaces"], - hs_token=service["hs_token"], - sender=service["sender"], - id=service["id"] - )) - return service_list - def _load_appservice(self, as_info): required_string_fields = [ "url", "as_token", "hs_token", "sender_localpart" -- cgit 1.4.1 From f6fcff360250eb362986835c195a96825567a03d Mon Sep 17 00:00:00 2001 From: Daniel Wagner-Hall Date: Wed, 13 Jan 2016 17:09:24 +0000 Subject: Don't start server if ASes are invalidly configured --- synapse/storage/appservice.py | 1 + 1 file changed, 1 insertion(+) (limited to 'synapse') diff --git a/synapse/storage/appservice.py b/synapse/storage/appservice.py index eab58d9ce9..25a6f14f40 100644 --- a/synapse/storage/appservice.py +++ b/synapse/storage/appservice.py @@ -265,6 +265,7 @@ class ApplicationServiceStore(SQLBaseStore): except Exception as e: logger.error("Failed to load appservice from '%s'", config_file) logger.exception(e) + raise class ApplicationServiceTransactionStore(SQLBaseStore): -- cgit 1.4.1 From fcb05b4c8269ac7cb2e153e0a68c040a965d8279 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Wed, 13 Jan 2016 17:37:09 +0000 Subject: Temporarily disable notification branch --- synapse/handlers/federation.py | 13 +++++++------ synapse/handlers/sync.py | 5 ++++- synapse/push/action_generator.py | 3 +++ 3 files changed, 14 insertions(+), 7 deletions(-) (limited to 'synapse') diff --git a/synapse/handlers/federation.py b/synapse/handlers/federation.py index 5dee24e21d..2f6359c768 100644 --- a/synapse/handlers/federation.py +++ b/synapse/handlers/federation.py @@ -36,7 +36,7 @@ from synapse.events.utils import prune_event from synapse.util.retryutils import NotRetryingDestination -from synapse.push.action_generator import ActionGenerator +# from synapse.push.action_generator import ActionGenerator from twisted.internet import defer @@ -244,11 +244,12 @@ class FederationHandler(BaseHandler): user = UserID.from_string(event.state_key) yield user_joined_room(self.distributor, user, event.room_id) - if not backfilled and not event.internal_metadata.is_outlier(): - action_generator = ActionGenerator(self.store) - yield action_generator.handle_push_actions_for_event( - event, self - ) + # Temporarily disable notifications due to performance concerns. + # if not backfilled and not event.internal_metadata.is_outlier(): + # action_generator = ActionGenerator(self.store) + # yield action_generator.handle_push_actions_for_event( + # event, self + # ) @defer.inlineCallbacks def _filter_events_for_server(self, server_name, room_id, events): diff --git a/synapse/handlers/sync.py b/synapse/handlers/sync.py index 33c1a4512c..69707c47ed 100644 --- a/synapse/handlers/sync.py +++ b/synapse/handlers/sync.py @@ -840,7 +840,10 @@ class SyncHandler(BaseHandler): return False @defer.inlineCallbacks - def unread_notifs_for_room_id(self, room_id, sync_config, ephemeral_by_room): + def unreadnotifs_for_room_id(self, room_id, sync_config, ephemeral_by_room): + # Temporarily disable notifications due to performance concerns. + return + last_unread_event_id = self.last_read_event_id_for_room_and_user( room_id, sync_config.user.to_string(), ephemeral_by_room ) diff --git a/synapse/push/action_generator.py b/synapse/push/action_generator.py index 4cf94f6c61..73467f3adc 100644 --- a/synapse/push/action_generator.py +++ b/synapse/push/action_generator.py @@ -36,6 +36,9 @@ class ActionGenerator: @defer.inlineCallbacks def handle_push_actions_for_event(self, event, handler): + # Temporarily disable notifications due to performance concerns. + return + if event.type == EventTypes.Redaction and event.redacts is not None: yield self.store.remove_push_actions_for_event_id( event.room_id, event.redacts -- cgit 1.4.1 From 2655d61d703cc6718f669dd43e814862750b7748 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Wed, 13 Jan 2016 17:43:39 +0000 Subject: Don't change signature. Return empty list --- synapse/handlers/sync.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) (limited to 'synapse') diff --git a/synapse/handlers/sync.py b/synapse/handlers/sync.py index 69707c47ed..7dfa2eeedb 100644 --- a/synapse/handlers/sync.py +++ b/synapse/handlers/sync.py @@ -840,9 +840,9 @@ class SyncHandler(BaseHandler): return False @defer.inlineCallbacks - def unreadnotifs_for_room_id(self, room_id, sync_config, ephemeral_by_room): + def unread_notifs_for_room_id(self, room_id, sync_config, ephemeral_by_room): # Temporarily disable notifications due to performance concerns. - return + defer.returnValue([]) last_unread_event_id = self.last_read_event_id_for_room_and_user( room_id, sync_config.user.to_string(), ephemeral_by_room -- cgit 1.4.1 From bce602eb4ea5d477abf818849ab3e689381a6954 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Thu, 14 Jan 2016 09:56:26 +0000 Subject: Use logger not logging --- synapse/handlers/sync.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) (limited to 'synapse') diff --git a/synapse/handlers/sync.py b/synapse/handlers/sync.py index 7dfa2eeedb..1942268c3c 100644 --- a/synapse/handlers/sync.py +++ b/synapse/handlers/sync.py @@ -671,7 +671,7 @@ class SyncHandler(BaseHandler): room_id, sync_config, now_token, since_token, ) - logging.debug("Recents %r", batch) + logger.debug("Recents %r", batch) current_state = yield self.get_state_at(room_id, now_token) @@ -734,7 +734,7 @@ class SyncHandler(BaseHandler): leave_event.room_id, sync_config, leave_token, since_token, ) - logging.debug("Recents %r", batch) + logger.debug("Recents %r", batch) state_events_at_leave = yield self.store.get_state_for_event( leave_event.event_id -- cgit 1.4.1 From 339c8f013319f812629bb9a294fb235ada1e888d Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Thu, 14 Jan 2016 10:22:02 +0000 Subject: Clamp pagination limits to at most 1000 --- synapse/streams/config.py | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) (limited to 'synapse') diff --git a/synapse/streams/config.py b/synapse/streams/config.py index 8c082bf4e0..4f089bfb94 100644 --- a/synapse/streams/config.py +++ b/synapse/streams/config.py @@ -22,6 +22,9 @@ import logging logger = logging.getLogger(__name__) +MAX_LIMIT = 1000 + + class SourcePaginationConfig(object): """A configuration object which stores pagination parameters for a @@ -32,7 +35,7 @@ class SourcePaginationConfig(object): self.from_key = from_key self.to_key = to_key self.direction = 'f' if direction == 'f' else 'b' - self.limit = int(limit) if limit is not None else None + self.limit = min(int(limit), MAX_LIMIT) if limit is not None else None def __repr__(self): return ( @@ -49,7 +52,7 @@ class PaginationConfig(object): self.from_token = from_token self.to_token = to_token self.direction = 'f' if direction == 'f' else 'b' - self.limit = int(limit) if limit is not None else None + self.limit = min(int(limit), MAX_LIMIT) if limit is not None else None @classmethod def from_request(cls, request, raise_invalid_params=True, -- cgit 1.4.1 From a7927c13fd2181df75f1a3f3cf47889b0a240449 Mon Sep 17 00:00:00 2001 From: David Baker Date: Thu, 14 Jan 2016 10:53:44 +0000 Subject: Fix enabling & disabling push rules --- synapse/rest/client/v1/push_rule.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) (limited to 'synapse') diff --git a/synapse/rest/client/v1/push_rule.py b/synapse/rest/client/v1/push_rule.py index df53824d2d..0cbd9fe08a 100644 --- a/synapse/rest/client/v1/push_rule.py +++ b/synapse/rest/client/v1/push_rule.py @@ -51,7 +51,7 @@ class PushRuleRestServlet(ClientV1RestServlet): content = _parse_json(request) if 'attr' in spec: - self.set_rule_attr(requester.user, spec, content) + self.set_rule_attr(requester.user.to_string(), spec, content) defer.returnValue((200, {})) try: -- cgit 1.4.1 From 2680043bc6a64053b93b9bab144aeb5f45007976 Mon Sep 17 00:00:00 2001 From: Daniel Wagner-Hall Date: Thu, 14 Jan 2016 14:34:01 +0000 Subject: Require ID and as_token be unique for ASs Defaults ID to as_token if not specified. This will change when IDs are fully supported. --- synapse/storage/appservice.py | 26 +++++++++- tests/appservice/test_appservice.py | 1 + tests/storage/test_appservice.py | 101 ++++++++++++++++++++++++++++++------ 3 files changed, 111 insertions(+), 17 deletions(-) (limited to 'synapse') diff --git a/synapse/storage/appservice.py b/synapse/storage/appservice.py index f4bc457eca..b5aa55c0a3 100644 --- a/synapse/storage/appservice.py +++ b/synapse/storage/appservice.py @@ -20,6 +20,7 @@ from twisted.internet import defer from synapse.api.constants import Membership from synapse.appservice import ApplicationService, AppServiceTransaction +from synapse.config._base import ConfigError from synapse.storage.roommember import RoomsForUser from synapse.types import UserID from ._base import SQLBaseStore @@ -145,6 +146,7 @@ class ApplicationServiceStore(SQLBaseStore): def _load_appservice(self, as_info): required_string_fields = [ + # TODO: Add id here when it's stable to release "url", "as_token", "hs_token", "sender_localpart" ] for field in required_string_fields: @@ -186,7 +188,7 @@ class ApplicationServiceStore(SQLBaseStore): namespaces=as_info["namespaces"], hs_token=as_info["hs_token"], sender=user_id, - id=as_info["as_token"] # the token is the only unique thing here + id=as_info["id"] if "id" in as_info else as_info["as_token"], ) def _populate_appservice_cache(self, config_files): @@ -197,10 +199,32 @@ class ApplicationServiceStore(SQLBaseStore): ) return + # Dicts of value -> filename + seen_as_tokens = {} + seen_ids = {} + for config_file in config_files: try: with open(config_file, 'r') as f: appservice = self._load_appservice(yaml.load(f)) + if appservice.id in seen_ids: + raise ConfigError( + "Cannot reuse ID across application services: " + "%s (files: %s, %s)" % ( + appservice.id, config_file, seen_ids[appservice.id], + ) + ) + seen_ids[appservice.id] = config_file + if appservice.token in seen_as_tokens: + raise ConfigError( + "Cannot reuse as_token across application services: " + "%s (files: %s, %s)" % ( + appservice.token, + config_file, + seen_as_tokens[appservice.token], + ) + ) + seen_as_tokens[appservice.token] = config_file logger.info("Loaded application service: %s", appservice) self.services_cache.append(appservice) except Exception as e: diff --git a/tests/appservice/test_appservice.py b/tests/appservice/test_appservice.py index 191c420c4d..ef48bbc296 100644 --- a/tests/appservice/test_appservice.py +++ b/tests/appservice/test_appservice.py @@ -29,6 +29,7 @@ class ApplicationServiceTestCase(unittest.TestCase): def setUp(self): self.service = ApplicationService( + id="unique_identifier", url="some_url", token="some_token", namespaces={ diff --git a/tests/storage/test_appservice.py b/tests/storage/test_appservice.py index a5a464640f..5abecdf6e0 100644 --- a/tests/storage/test_appservice.py +++ b/tests/storage/test_appservice.py @@ -12,12 +12,13 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. +import tempfile +from synapse.config._base import ConfigError from tests import unittest from twisted.internet import defer from tests.utils import setup_test_homeserver from synapse.appservice import ApplicationService, ApplicationServiceState -from synapse.server import HomeServer from synapse.storage.appservice import ( ApplicationServiceStore, ApplicationServiceTransactionStore ) @@ -26,7 +27,6 @@ import json import os import yaml from mock import Mock -from tests.utils import SQLiteMemoryDbPool, MockClock class ApplicationServiceStoreTestCase(unittest.TestCase): @@ -41,9 +41,16 @@ class ApplicationServiceStoreTestCase(unittest.TestCase): self.as_token = "token1" self.as_url = "some_url" - self._add_appservice(self.as_token, self.as_url, "some_hs_token", "bob") - self._add_appservice("token2", "some_url", "some_hs_token", "bob") - self._add_appservice("token3", "some_url", "some_hs_token", "bob") + self.as_id = "as1" + self._add_appservice( + self.as_token, + self.as_id, + self.as_url, + "some_hs_token", + "bob" + ) + self._add_appservice("token2", "as2", "some_url", "some_hs_token", "bob") + self._add_appservice("token3", "as3", "some_url", "some_hs_token", "bob") # must be done after inserts self.store = ApplicationServiceStore(hs) @@ -55,9 +62,9 @@ class ApplicationServiceStoreTestCase(unittest.TestCase): except: pass - def _add_appservice(self, as_token, url, hs_token, sender): + def _add_appservice(self, as_token, id, url, hs_token, sender): as_yaml = dict(url=url, as_token=as_token, hs_token=hs_token, - sender_localpart=sender, namespaces={}) + id=id, sender_localpart=sender, namespaces={}) # use the token as the filename with open(as_token, 'w') as outfile: outfile.write(yaml.dump(as_yaml)) @@ -74,6 +81,7 @@ class ApplicationServiceStoreTestCase(unittest.TestCase): self.as_token ) self.assertEquals(stored_service.token, self.as_token) + self.assertEquals(stored_service.id, self.as_id) self.assertEquals(stored_service.url, self.as_url) self.assertEquals( stored_service.namespaces[ApplicationService.NS_ALIASES], @@ -110,34 +118,34 @@ class ApplicationServiceTransactionStoreTestCase(unittest.TestCase): { "token": "token1", "url": "https://matrix-as.org", - "id": "token1" + "id": "id_1" }, { "token": "alpha_tok", "url": "https://alpha.com", - "id": "alpha_tok" + "id": "id_alpha" }, { "token": "beta_tok", "url": "https://beta.com", - "id": "beta_tok" + "id": "id_beta" }, { - "token": "delta_tok", - "url": "https://delta.com", - "id": "delta_tok" + "token": "gamma_tok", + "url": "https://gamma.com", + "id": "id_gamma" }, ] for s in self.as_list: - yield self._add_service(s["url"], s["token"]) + yield self._add_service(s["url"], s["token"], s["id"]) self.as_yaml_files = [] self.store = TestTransactionStore(hs) - def _add_service(self, url, as_token): + def _add_service(self, url, as_token, id): as_yaml = dict(url=url, as_token=as_token, hs_token="something", - sender_localpart="a_sender", namespaces={}) + id=id, sender_localpart="a_sender", namespaces={}) # use the token as the filename with open(as_token, 'w') as outfile: outfile.write(yaml.dump(as_yaml)) @@ -405,3 +413,64 @@ class TestTransactionStore(ApplicationServiceTransactionStore, def __init__(self, hs): super(TestTransactionStore, self).__init__(hs) + + +class ApplicationServiceStoreConfigTestCase(unittest.TestCase): + + def _write_config(self, suffix, **kwargs): + vals = { + "id": "id" + suffix, + "url": "url" + suffix, + "as_token": "as_token" + suffix, + "hs_token": "hs_token" + suffix, + "sender_localpart": "sender_localpart" + suffix, + "namespaces": {}, + } + vals.update(kwargs) + + _, path = tempfile.mkstemp(prefix="as_config") + with open(path, "w") as f: + f.write(yaml.dump(vals)) + return path + + @defer.inlineCallbacks + def test_unique_works(self): + f1 = self._write_config(suffix="1") + f2 = self._write_config(suffix="2") + + config = Mock(app_service_config_files=[f1, f2]) + hs = yield setup_test_homeserver(config=config) + + ApplicationServiceStore(hs) + + @defer.inlineCallbacks + def test_duplicate_ids(self): + f1 = self._write_config(id="id", suffix="1") + f2 = self._write_config(id="id", suffix="2") + + config = Mock(app_service_config_files=[f1, f2]) + hs = yield setup_test_homeserver(config=config) + + with self.assertRaises(ConfigError) as cm: + ApplicationServiceStore(hs) + + e = cm.exception + self.assertIn(f1, e.message) + self.assertIn(f2, e.message) + self.assertIn("id", e.message) + + @defer.inlineCallbacks + def test_duplicate_as_tokens(self): + f1 = self._write_config(as_token="as_token", suffix="1") + f2 = self._write_config(as_token="as_token", suffix="2") + + config = Mock(app_service_config_files=[f1, f2]) + hs = yield setup_test_homeserver(config=config) + + with self.assertRaises(ConfigError) as cm: + ApplicationServiceStore(hs) + + e = cm.exception + self.assertIn(f1, e.message) + self.assertIn(f2, e.message) + self.assertIn("as_token", e.message) -- cgit 1.4.1 From 3f8db3d597dc631af02c31995426a3690746c8b5 Mon Sep 17 00:00:00 2001 From: David Baker Date: Thu, 14 Jan 2016 17:21:04 +0000 Subject: Add specific error code for invalid user names. --- synapse/api/errors.py | 1 + synapse/handlers/register.py | 3 ++- 2 files changed, 3 insertions(+), 1 deletion(-) (limited to 'synapse') diff --git a/synapse/api/errors.py b/synapse/api/errors.py index be0c58a4ca..e6d32acced 100644 --- a/synapse/api/errors.py +++ b/synapse/api/errors.py @@ -42,6 +42,7 @@ class Codes(object): EXCLUSIVE = "M_EXCLUSIVE" THREEPID_AUTH_FAILED = "M_THREEPID_AUTH_FAILED" THREEPID_IN_USE = "THREEPID_IN_USE" + INVALID_USER_NAME = "M_INVALID_USER_NAME" class CodeMessageException(RuntimeError): diff --git a/synapse/handlers/register.py b/synapse/handlers/register.py index ba26d13d49..83f4daaa8c 100644 --- a/synapse/handlers/register.py +++ b/synapse/handlers/register.py @@ -53,7 +53,8 @@ class RegistrationHandler(BaseHandler): raise SynapseError( 400, "User ID must only contain characters which do not" - " require URL encoding." + " require URL encoding.", + Codes.INVALID_USER_NAME ) user = UserID(localpart, self.hs.hostname) -- cgit 1.4.1 From 5819b7a78ccbb07914d9c03ab426df084ba86f2c Mon Sep 17 00:00:00 2001 From: David Baker Date: Fri, 15 Jan 2016 10:06:34 +0000 Subject: M_INVALID_USERNAME to be consistent with the parameter name --- synapse/api/errors.py | 2 +- synapse/handlers/register.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) (limited to 'synapse') diff --git a/synapse/api/errors.py b/synapse/api/errors.py index e6d32acced..ce0fc53668 100644 --- a/synapse/api/errors.py +++ b/synapse/api/errors.py @@ -42,7 +42,7 @@ class Codes(object): EXCLUSIVE = "M_EXCLUSIVE" THREEPID_AUTH_FAILED = "M_THREEPID_AUTH_FAILED" THREEPID_IN_USE = "THREEPID_IN_USE" - INVALID_USER_NAME = "M_INVALID_USER_NAME" + INVALID_USERNAME = "M_INVALID_USERNAME" class CodeMessageException(RuntimeError): diff --git a/synapse/handlers/register.py b/synapse/handlers/register.py index 83f4daaa8c..8e601b052b 100644 --- a/synapse/handlers/register.py +++ b/synapse/handlers/register.py @@ -54,7 +54,7 @@ class RegistrationHandler(BaseHandler): 400, "User ID must only contain characters which do not" " require URL encoding.", - Codes.INVALID_USER_NAME + Codes.INVALID_USERNAME ) user = UserID(localpart, self.hs.hostname) -- cgit 1.4.1 From b5ce4f042721aaa3ce26c268dcd29787386408cb Mon Sep 17 00:00:00 2001 From: Daniel Wagner-Hall Date: Fri, 15 Jan 2016 13:54:03 +0000 Subject: Remove unused parameters --- synapse/handlers/_base.py | 8 +++----- synapse/handlers/room.py | 16 +++++----------- 2 files changed, 8 insertions(+), 16 deletions(-) (limited to 'synapse') diff --git a/synapse/handlers/_base.py b/synapse/handlers/_base.py index 66e35de6e4..bb2c6733d5 100644 --- a/synapse/handlers/_base.py +++ b/synapse/handlers/_base.py @@ -171,12 +171,10 @@ class BaseHandler(object): ) @defer.inlineCallbacks - def handle_new_client_event(self, event, context, extra_destinations=[], - extra_users=[], suppress_auth=False): + def handle_new_client_event(self, event, context, extra_users=[]): # We now need to go and hit out to wherever we need to hit out to. - if not suppress_auth: - self.auth.check(event, auth_events=context.current_state) + self.auth.check(event, auth_events=context.current_state) yield self.maybe_kick_guest_users(event, context.current_state.values()) @@ -258,7 +256,7 @@ class BaseHandler(object): event, self ) - destinations = set(extra_destinations) + destinations = set() for k, s in context.current_state.items(): try: if k[0] == EventTypes.Member: diff --git a/synapse/handlers/room.py b/synapse/handlers/room.py index 3a26f99a8b..a410e4394c 100644 --- a/synapse/handlers/room.py +++ b/synapse/handlers/room.py @@ -397,7 +397,7 @@ class RoomMemberHandler(BaseHandler): remotedomains.add(member.domain) @defer.inlineCallbacks - def change_membership(self, event, context, do_auth=True, is_guest=False): + def change_membership(self, event, context, is_guest=False): """ Change the membership status of a user in a room. Args: @@ -432,7 +432,7 @@ class RoomMemberHandler(BaseHandler): if not is_guest_access_allowed: raise AuthError(403, "Guest access not allowed") - yield self._do_join(event, context, do_auth=do_auth) + yield self._do_join(event, context) else: if event.membership == Membership.LEAVE: is_host_in_room = yield self.is_host_in_room(room_id, context) @@ -459,9 +459,7 @@ class RoomMemberHandler(BaseHandler): yield self._do_local_membership_update( event, - membership=event.content["membership"], context=context, - do_auth=do_auth, ) if prev_state and prev_state.membership == Membership.JOIN: @@ -497,12 +495,12 @@ class RoomMemberHandler(BaseHandler): }) event, context = yield self._create_new_client_event(builder) - yield self._do_join(event, context, room_hosts=hosts, do_auth=True) + yield self._do_join(event, context, room_hosts=hosts) defer.returnValue({"room_id": room_id}) @defer.inlineCallbacks - def _do_join(self, event, context, room_hosts=None, do_auth=True): + def _do_join(self, event, context, room_hosts=None): room_id = event.room_id # XXX: We don't do an auth check if we are doing an invite @@ -536,9 +534,7 @@ class RoomMemberHandler(BaseHandler): yield self._do_local_membership_update( event, - membership=event.content["membership"], context=context, - do_auth=do_auth, ) prev_state = context.current_state.get((event.type, event.state_key)) @@ -603,8 +599,7 @@ class RoomMemberHandler(BaseHandler): defer.returnValue(room_ids) @defer.inlineCallbacks - def _do_local_membership_update(self, event, membership, context, - do_auth): + def _do_local_membership_update(self, event, context): yield run_on_reactor() target_user = UserID.from_string(event.state_key) @@ -613,7 +608,6 @@ class RoomMemberHandler(BaseHandler): event, context, extra_users=[target_user], - suppress_auth=(not do_auth), ) @defer.inlineCallbacks -- cgit 1.4.1 From ac5a4477adc772e4416c868e8b16ae41a2c0c4ef Mon Sep 17 00:00:00 2001 From: Daniel Wagner-Hall Date: Fri, 15 Jan 2016 16:27:26 +0000 Subject: Require unbanning before other membership changes --- synapse/api/errors.py | 1 + synapse/handlers/federation.py | 4 +-- synapse/handlers/message.py | 57 +++++++++++++++++++++++++++++++++--------- synapse/handlers/room.py | 55 ++++++++++++++++++++++++++++++++++++++-- synapse/rest/client/v1/room.py | 51 +++++++++---------------------------- tests/handlers/test_room.py | 6 ++--- 6 files changed, 116 insertions(+), 58 deletions(-) (limited to 'synapse') diff --git a/synapse/api/errors.py b/synapse/api/errors.py index ce0fc53668..b106fbed6d 100644 --- a/synapse/api/errors.py +++ b/synapse/api/errors.py @@ -29,6 +29,7 @@ class Codes(object): USER_IN_USE = "M_USER_IN_USE" ROOM_IN_USE = "M_ROOM_IN_USE" BAD_PAGINATION = "M_BAD_PAGINATION" + BAD_STATE = "M_BAD_STATE" UNKNOWN = "M_UNKNOWN" NOT_FOUND = "M_NOT_FOUND" MISSING_TOKEN = "M_MISSING_TOKEN" diff --git a/synapse/handlers/federation.py b/synapse/handlers/federation.py index 2f6359c768..26402ea9cd 100644 --- a/synapse/handlers/federation.py +++ b/synapse/handlers/federation.py @@ -1693,7 +1693,7 @@ class FederationHandler(BaseHandler): self.auth.check(event, context.current_state) yield self._validate_keyserver(event, auth_events=context.current_state) member_handler = self.hs.get_handlers().room_member_handler - yield member_handler.change_membership(event, context) + yield member_handler.send_membership_event(event, context) else: destinations = set([x.split(":", 1)[-1] for x in (sender, room_id)]) yield self.replication_layer.forward_third_party_invite( @@ -1722,7 +1722,7 @@ class FederationHandler(BaseHandler): # TODO: Make sure the signatures actually are correct. event.signatures.update(returned_invite.signatures) member_handler = self.hs.get_handlers().room_member_handler - yield member_handler.change_membership(event, context) + yield member_handler.send_membership_event(event, context) @defer.inlineCallbacks def add_display_name_to_third_party_invite(self, event_dict, event, context): diff --git a/synapse/handlers/message.py b/synapse/handlers/message.py index 5805190ce8..4c7bf2bef3 100644 --- a/synapse/handlers/message.py +++ b/synapse/handlers/message.py @@ -174,30 +174,25 @@ class MessageHandler(BaseHandler): defer.returnValue(chunk) @defer.inlineCallbacks - def create_and_send_event(self, event_dict, ratelimit=True, - token_id=None, txn_id=None, is_guest=False): - """ Given a dict from a client, create and handle a new event. + def create_event(self, event_dict, token_id=None, txn_id=None): + """ + Given a dict from a client, create a new event. Creates an FrozenEvent object, filling out auth_events, prev_events, etc. Adds display names to Join membership events. - Persists and notifies local clients and federation. - Args: event_dict (dict): An entire event + + Returns: + Tuple of created event (FrozenEvent), Context """ builder = self.event_builder_factory.new(event_dict) self.validator.validate_new(builder) - if ratelimit: - self.ratelimit(builder.user_id) - # TODO(paul): Why does 'event' not have a 'user' object? - user = UserID.from_string(builder.user_id) - assert self.hs.is_mine(user), "User must be our own: %s" % (user,) - if builder.type == EventTypes.Member: membership = builder.content.get("membership", None) if membership == Membership.JOIN: @@ -216,6 +211,25 @@ class MessageHandler(BaseHandler): event, context = yield self._create_new_client_event( builder=builder, ) + defer.returnValue((event, context)) + + @defer.inlineCallbacks + def send_event(self, event, context, ratelimit=True, is_guest=False): + """ + Persists and notifies local clients and federation of an event. + + Args: + event (FrozenEvent) the event to send. + context (Context) the context of the event. + ratelimit (bool): Whether to rate limit this send. + is_guest (bool): Whether the sender is a guest. + """ + user = UserID.from_string(event.sender) + + assert self.hs.is_mine(user), "User must be our own: %s" % (user,) + + if ratelimit: + self.ratelimit(event.sender) if event.is_state(): prev_state = context.current_state.get((event.type, event.state_key)) @@ -229,7 +243,7 @@ class MessageHandler(BaseHandler): if event.type == EventTypes.Member: member_handler = self.hs.get_handlers().room_member_handler - yield member_handler.change_membership(event, context, is_guest=is_guest) + yield member_handler.send_membership_event(event, context, is_guest=is_guest) else: yield self.handle_new_client_event( event=event, @@ -241,6 +255,25 @@ class MessageHandler(BaseHandler): with PreserveLoggingContext(): presence.bump_presence_active_time(user) + @defer.inlineCallbacks + def create_and_send_event(self, event_dict, ratelimit=True, + token_id=None, txn_id=None, is_guest=False): + """ + Creates an event, then sends it. + + See self.create_event and self.send_event. + """ + event, context = yield self.create_event( + event_dict, + token_id=token_id, + txn_id=txn_id + ) + yield self.send_event( + event, + context, + ratelimit=ratelimit, + is_guest=is_guest + ) defer.returnValue(event) @defer.inlineCallbacks diff --git a/synapse/handlers/room.py b/synapse/handlers/room.py index a410e4394c..a1baf9d200 100644 --- a/synapse/handlers/room.py +++ b/synapse/handlers/room.py @@ -22,7 +22,7 @@ from synapse.types import UserID, RoomAlias, RoomID from synapse.api.constants import ( EventTypes, Membership, JoinRules, RoomCreationPreset, ) -from synapse.api.errors import AuthError, StoreError, SynapseError +from synapse.api.errors import AuthError, StoreError, SynapseError, Codes from synapse.util import stringutils, unwrapFirstError from synapse.util.async import run_on_reactor @@ -397,7 +397,58 @@ class RoomMemberHandler(BaseHandler): remotedomains.add(member.domain) @defer.inlineCallbacks - def change_membership(self, event, context, is_guest=False): + def update_membership(self, requester, target, room_id, action, txn_id=None): + effective_membership_state = action + if action in ["kick", "unban"]: + effective_membership_state = "leave" + elif action == "forget": + effective_membership_state = "leave" + + msg_handler = self.hs.get_handlers().message_handler + + content = {"membership": unicode(effective_membership_state)} + if requester.is_guest: + content["kind"] = "guest" + + event, context = yield msg_handler.create_event( + { + "type": EventTypes.Member, + "content": content, + "room_id": room_id, + "sender": requester.user.to_string(), + "state_key": target.to_string(), + }, + token_id=requester.access_token_id, + txn_id=txn_id, + ) + + old_state = context.current_state.get((EventTypes.Member, event.state_key)) + old_membership = old_state.content.get("membership") if old_state else None + if action == "unban" and old_membership != "ban": + raise SynapseError( + 403, + "Cannot unban user who was not banned (membership=%s)" % old_membership, + errcode=Codes.BAD_STATE + ) + if old_membership == "ban" and action != "unban": + raise SynapseError( + 403, + "Cannot %s user who was is banned" % (action,), + errcode=Codes.BAD_STATE + ) + + yield msg_handler.send_event( + event, + context, + ratelimit=True, + is_guest=requester.is_guest + ) + + if action == "forget": + yield self.forget(requester.user, room_id) + + @defer.inlineCallbacks + def send_membership_event(self, event, context, is_guest=False): """ Change the membership status of a user in a room. Args: diff --git a/synapse/rest/client/v1/room.py b/synapse/rest/client/v1/room.py index 8b1b2b852d..85b9f253e3 100644 --- a/synapse/rest/client/v1/room.py +++ b/synapse/rest/client/v1/room.py @@ -442,7 +442,7 @@ class RoomMembershipRestServlet(ClientV1RestServlet): def register(self, http_server): # /rooms/$roomid/[invite|join|leave] PATTERNS = ("/rooms/(?P[^/]*)/" - "(?Pjoin|invite|leave|ban|kick|forget)") + "(?Pjoin|invite|leave|ban|unban|kick|forget)") register_txn_path(self, PATTERNS, http_server) @defer.inlineCallbacks @@ -451,9 +451,6 @@ class RoomMembershipRestServlet(ClientV1RestServlet): request, allow_guest=True, ) - user = requester.user - - effective_membership_action = membership_action if requester.is_guest and membership_action not in { Membership.JOIN, @@ -463,13 +460,10 @@ class RoomMembershipRestServlet(ClientV1RestServlet): content = _parse_json(request) - # target user is you unless it is an invite - state_key = user.to_string() - if membership_action == "invite" and self._has_3pid_invite_keys(content): yield self.handlers.room_member_handler.do_3pid_invite( room_id, - user, + requester.user, content["medium"], content["address"], content["id_server"], @@ -478,42 +472,21 @@ class RoomMembershipRestServlet(ClientV1RestServlet): ) defer.returnValue((200, {})) return - elif membership_action in ["invite", "ban", "kick"]: - if "user_id" in content: - state_key = content["user_id"] - else: - raise SynapseError(400, "Missing user_id key.") - - # make sure it looks like a user ID; it'll throw if it's invalid. - UserID.from_string(state_key) - if membership_action == "kick": - effective_membership_action = "leave" - elif membership_action == "forget": - effective_membership_action = "leave" - - msg_handler = self.handlers.message_handler - - content = {"membership": unicode(effective_membership_action)} - if requester.is_guest: - content["kind"] = "guest" + target = requester.user + if membership_action in ["invite", "ban", "unban", "kick"]: + if "user_id" not in content: + raise SynapseError(400, "Missing user_id key.") + target = UserID.from_string(content["user_id"]) - yield msg_handler.create_and_send_event( - { - "type": EventTypes.Member, - "content": content, - "room_id": room_id, - "sender": user.to_string(), - "state_key": state_key, - }, - token_id=requester.access_token_id, + yield self.handlers.room_member_handler.update_membership( + requester=requester, + target=target, + room_id=room_id, + action=membership_action, txn_id=txn_id, - is_guest=requester.is_guest, ) - if membership_action == "forget": - yield self.handlers.room_member_handler.forget(user, room_id) - defer.returnValue((200, {})) def _has_3pid_invite_keys(self, content): diff --git a/tests/handlers/test_room.py b/tests/handlers/test_room.py index 97491848a3..e7a12a2ba2 100644 --- a/tests/handlers/test_room.py +++ b/tests/handlers/test_room.py @@ -156,7 +156,7 @@ class RoomMemberHandlerTestCase(unittest.TestCase): builder ) - yield room_handler.change_membership(event, context) + yield room_handler.send_membership_event(event, context) self.state_handler.compute_event_context.assert_called_once_with( builder @@ -232,7 +232,7 @@ class RoomMemberHandlerTestCase(unittest.TestCase): ) # Actual invocation - yield room_handler.change_membership(event, context) + yield room_handler.send_membership_event(event, context) self.federation.handle_new_event.assert_called_once_with( event, destinations=set() @@ -312,7 +312,7 @@ class RoomMemberHandlerTestCase(unittest.TestCase): self.distributor.observe("user_left_room", leave_signal_observer) # Actual invocation - yield room_handler.change_membership(event, context) + yield room_handler.send_membership_event(event, context) self.federation.handle_new_event.assert_called_once_with( event, destinations=set(['red']) -- cgit 1.4.1 From cc66a9a5e3fc954b0da48ba891e9f77be31aa832 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Mon, 18 Jan 2016 10:45:09 +0000 Subject: Allow filtering events for multiple users at once --- synapse/handlers/_base.py | 93 +++++++++++++++++++++++++------------------ synapse/storage/roommember.py | 13 ++++++ 2 files changed, 67 insertions(+), 39 deletions(-) (limited to 'synapse') diff --git a/synapse/handlers/_base.py b/synapse/handlers/_base.py index bb2c6733d5..2d1167296a 100644 --- a/synapse/handlers/_base.py +++ b/synapse/handlers/_base.py @@ -53,16 +53,54 @@ class BaseHandler(object): self.event_builder_factory = hs.get_event_builder_factory() @defer.inlineCallbacks - def _filter_events_for_client(self, user_id, events, is_guest=False): - # Assumes that user has at some point joined the room if not is_guest. + def _filter_events_for_clients(self, users, events): + """ Returns dict of user_id -> list of events that user is allowed to + see. + """ + event_id_to_state = yield self.store.get_state_for_events( + frozenset(e.event_id for e in events), + types=( + (EventTypes.RoomHistoryVisibility, ""), + (EventTypes.Member, None), + ) + ) + + forgotten = yield defer.gatherResults([ + self.store.who_forgot_in_room( + room_id, + ) + for room_id in frozenset(e.room_id for e in events) + ], consumeErrors=True) + + # Set of membership event_ids that have been forgotten + event_id_forgotten = frozenset( + row["event_id"] for rows in forgotten for row in rows + ) + + def allowed(event, user_id, is_guest): + state = event_id_to_state[event.event_id] + + visibility_event = state.get((EventTypes.RoomHistoryVisibility, ""), None) + if visibility_event: + visibility = visibility_event.content.get("history_visibility", "shared") + else: + visibility = "shared" - def allowed(event, membership, visibility): if visibility == "world_readable": return True if is_guest: return False + membership_event = state.get((EventTypes.Member, user_id), None) + if membership_event: + if membership_event.event_id in event_id_forgotten: + membership = None + else: + membership = membership_event.membership + else: + membership = None + if membership == Membership.JOIN: return True @@ -78,43 +116,20 @@ class BaseHandler(object): return True - event_id_to_state = yield self.store.get_state_for_events( - frozenset(e.event_id for e in events), - types=( - (EventTypes.RoomHistoryVisibility, ""), - (EventTypes.Member, user_id), - ) - ) - - events_to_return = [] - for event in events: - state = event_id_to_state[event.event_id] + defer.returnValue({ + user_id: [ + event + for event in events + if allowed(event, user_id, is_guest) + ] + for user_id, is_guest in users + }) - membership_event = state.get((EventTypes.Member, user_id), None) - if membership_event: - was_forgotten_at_event = yield self.store.was_forgotten_at( - membership_event.state_key, - membership_event.room_id, - membership_event.event_id - ) - if was_forgotten_at_event: - membership = None - else: - membership = membership_event.membership - else: - membership = None - - visibility_event = state.get((EventTypes.RoomHistoryVisibility, ""), None) - if visibility_event: - visibility = visibility_event.content.get("history_visibility", "shared") - else: - visibility = "shared" - - should_include = allowed(event, membership, visibility) - if should_include: - events_to_return.append(event) - - defer.returnValue(events_to_return) + @defer.inlineCallbacks + def _filter_events_for_client(self, user_id, events, is_guest=False): + # Assumes that user has at some point joined the room if not is_guest. + res = yield self._filter_events_for_clients([(user_id, is_guest)], events) + defer.returnValue(res.get(user_id, [])) def ratelimit(self, user_id): time_now = self.clock.time() diff --git a/synapse/storage/roommember.py b/synapse/storage/roommember.py index 7d3ce4579d..68ac88905f 100644 --- a/synapse/storage/roommember.py +++ b/synapse/storage/roommember.py @@ -287,6 +287,7 @@ class RoomMemberStore(SQLBaseStore): txn.execute(sql, (user_id, room_id)) yield self.runInteraction("forget_membership", f) self.was_forgotten_at.invalidate_all() + self.who_forgot_in_room.invalidate_all() self.did_forget.invalidate((user_id, room_id)) @cachedInlineCallbacks(num_args=2) @@ -336,3 +337,15 @@ class RoomMemberStore(SQLBaseStore): return rows[0][0] forgot = yield self.runInteraction("did_forget_membership_at", f) defer.returnValue(forgot == 1) + + @cached() + def who_forgot_in_room(self, room_id): + return self._simple_select_list( + table="room_memberships", + retcols=("user_id", "event_id"), + keyvalues={ + "room_id": room_id, + "forgotten": 1, + }, + desc="who_forgot" + ) -- cgit 1.4.1 From 2068678b8cf40a1ec7f73bd9c400bb297e031454 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Mon, 18 Jan 2016 10:46:09 +0000 Subject: Make Event objects behave more like dicts --- synapse/events/__init__.py | 9 +++++++++ 1 file changed, 9 insertions(+) (limited to 'synapse') diff --git a/synapse/events/__init__.py b/synapse/events/__init__.py index 869a623090..bbfa5a7265 100644 --- a/synapse/events/__init__.py +++ b/synapse/events/__init__.py @@ -117,6 +117,15 @@ class EventBase(object): def __set__(self, instance, value): raise AttributeError("Unrecognized attribute %s" % (instance,)) + def __getitem__(self, field): + return self._event_dict[field] + + def __contains__(self, field): + return field in self._event_dict + + def items(self): + return self._event_dict.items() + class FrozenEvent(EventBase): def __init__(self, event_dict, internal_metadata_dict={}, rejected_reason=None): -- cgit 1.4.1 From f59b56450797746230046137b2e2008cb66cb604 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Mon, 18 Jan 2016 14:09:47 +0000 Subject: Make notifications go quicker --- synapse/push/bulk_push_rule_evaluator.py | 116 +++++++++------- synapse/push/push_rule_evaluator.py | 226 ++++++++++++++++++++----------- synapse/storage/push_rule.py | 23 +++- synapse/storage/registration.py | 26 +++- 4 files changed, 260 insertions(+), 131 deletions(-) (limited to 'synapse') diff --git a/synapse/push/bulk_push_rule_evaluator.py b/synapse/push/bulk_push_rule_evaluator.py index ce244fa959..b9f78fd598 100644 --- a/synapse/push/bulk_push_rule_evaluator.py +++ b/synapse/push/bulk_push_rule_evaluator.py @@ -14,16 +14,16 @@ # limitations under the License. import logging -import simplejson as json +import ujson as json from twisted.internet import defer -from synapse.types import UserID - import baserules -from push_rule_evaluator import PushRuleEvaluator +from push_rule_evaluator import PushRuleEvaluatorForEvent + +from synapse.api.constants import EventTypes +from synapse.types import UserID -from synapse.events.utils import serialize_event logger = logging.getLogger(__name__) @@ -35,28 +35,25 @@ def decode_rule_json(rule): @defer.inlineCallbacks -def evaluator_for_room_id(room_id, store): - users = yield store.get_users_in_room(room_id) - rules_by_user = yield store.bulk_get_push_rules(users) +def _get_rules(room_id, user_ids, store): + rules_by_user = yield store.bulk_get_push_rules(user_ids) rules_by_user = { uid: baserules.list_with_base_rules( - [decode_rule_json(rule_list) for rule_list in rules_by_user[uid]] - if uid in rules_by_user else [], + [decode_rule_json(rule_list) for rule_list in rules_by_user.get(uid, [])], UserID.from_string(uid), ) - for uid in users + for uid in user_ids } - member_events = yield store.get_current_state( - room_id=room_id, - event_type='m.room.member', - ) - display_names = {} - for ev in member_events: - if ev.content.get("displayname"): - display_names[ev.state_key] = ev.content.get("displayname") + defer.returnValue(rules_by_user) + + +@defer.inlineCallbacks +def evaluator_for_room_id(room_id, store): + users = yield store.get_users_in_room(room_id) + rules_by_user = yield _get_rules(room_id, users, store) defer.returnValue(BulkPushRuleEvaluator( - room_id, rules_by_user, display_names, users, store + room_id, rules_by_user, users, store )) @@ -69,10 +66,9 @@ class BulkPushRuleEvaluator: the same logic to run the actual rules, but could be optimised further (see https://matrix.org/jira/browse/SYN-562) """ - def __init__(self, room_id, rules_by_user, display_names, users_in_room, store): + def __init__(self, room_id, rules_by_user, users_in_room, store): self.room_id = room_id self.rules_by_user = rules_by_user - self.display_names = display_names self.users_in_room = users_in_room self.store = store @@ -80,15 +76,30 @@ class BulkPushRuleEvaluator: def action_for_event_by_user(self, event, handler): actions_by_user = {} + users_dict = yield self.store.are_guests(self.rules_by_user.keys()) + + filtered_by_user = yield handler._filter_events_for_clients( + users_dict.items(), [event] + ) + + evaluator = PushRuleEvaluatorForEvent.create(event, len(self.users_in_room)) + + condition_cache = {} + + member_state = yield self.store.get_state_for_event( + event.event_id, + ) + + display_names = {} + for ev in member_state.values(): + nm = ev.content.get("displayname", None) + if nm and ev.type == EventTypes.Member: + display_names[ev.state_key] = nm + for uid, rules in self.rules_by_user.items(): - display_name = None - if uid in self.display_names: - display_name = self.display_names[uid] - - is_guest = yield self.store.is_guest(UserID.from_string(uid)) - filtered = yield handler._filter_events_for_client( - uid, [event], is_guest=is_guest - ) + display_name = display_names.get(uid, None) + + filtered = filtered_by_user[uid] if len(filtered) == 0: continue @@ -96,29 +107,32 @@ class BulkPushRuleEvaluator: if 'enabled' in rule and not rule['enabled']: continue - # XXX: profile tags - if BulkPushRuleEvaluator.event_matches_rule( - event, rule, - display_name, len(self.users_in_room), None - ): + matches = _condition_checker( + evaluator, rule['conditions'], display_name, condition_cache + ) + if matches: actions = [x for x in rule['actions'] if x != 'dont_notify'] - if len(actions) > 0: + if actions: actions_by_user[uid] = actions break defer.returnValue(actions_by_user) - @staticmethod - def event_matches_rule(event, rule, - display_name, room_member_count, profile_tag): - matches = True - - # passing the clock all the way into here is extremely awkward and push - # rules do not care about any of the relative timestamps, so we just - # pass 0 for the current time. - client_event = serialize_event(event, 0) - - for cond in rule['conditions']: - matches &= PushRuleEvaluator._event_fulfills_condition( - client_event, cond, display_name, room_member_count, profile_tag - ) - return matches + +def _condition_checker(evaluator, conditions, display_name, cache): + for cond in conditions: + _id = cond.get("_id", None) + if _id: + res = cache.get(_id, None) + if res is False: + break + elif res is True: + continue + + res = evaluator.matches(cond, display_name, None) + if _id: + cache[_id] = res + + if res is False: + return False + + return True diff --git a/synapse/push/push_rule_evaluator.py b/synapse/push/push_rule_evaluator.py index b0283743a2..bbc8308c2d 100644 --- a/synapse/push/push_rule_evaluator.py +++ b/synapse/push/push_rule_evaluator.py @@ -15,17 +15,22 @@ from twisted.internet import defer -from synapse.types import UserID - import baserules import logging import simplejson as json import re +from synapse.types import UserID + logger = logging.getLogger(__name__) +GLOB_REGEX = re.compile(r'\\\[(\\\!|)(.*)\\\]') +IS_GLOB = re.compile(r'[\?\*\[\]]') +INEQUALITY_EXPR = re.compile("^([=<>]*)([0-9]*)$") + + @defer.inlineCallbacks def evaluator_for_user_id_and_profile_tag(user_id, profile_tag, room_id, store): rawrules = yield store.get_push_rules_for_user(user_id) @@ -42,9 +47,34 @@ def evaluator_for_user_id_and_profile_tag(user_id, profile_tag, room_id, store): )) +def _room_member_count(ev, condition, room_member_count): + if 'is' not in condition: + return False + m = INEQUALITY_EXPR.match(condition['is']) + if not m: + return False + ineq = m.group(1) + rhs = m.group(2) + if not rhs.isdigit(): + return False + rhs = int(rhs) + + if ineq == '' or ineq == '==': + return room_member_count == rhs + elif ineq == '<': + return room_member_count < rhs + elif ineq == '>': + return room_member_count > rhs + elif ineq == '>=': + return room_member_count >= rhs + elif ineq == '<=': + return room_member_count <= rhs + else: + return False + + class PushRuleEvaluator: DEFAULT_ACTIONS = [] - INEQUALITY_EXPR = re.compile("^([=<>]*)([0-9]*)$") def __init__(self, user_id, profile_tag, raw_rules, enabled_map, room_id, our_member_event, store): @@ -98,6 +128,8 @@ class PushRuleEvaluator: room_members = yield self.store.get_users_in_room(room_id) room_member_count = len(room_members) + evaluator = PushRuleEvaluatorForEvent.create(ev, room_member_count) + for r in self.rules: if r['rule_id'] in self.enabled_map: r['enabled'] = self.enabled_map[r['rule_id']] @@ -105,21 +137,10 @@ class PushRuleEvaluator: r['enabled'] = True if not r['enabled']: continue - matches = True conditions = r['conditions'] actions = r['actions'] - for c in conditions: - matches &= self._event_fulfills_condition( - ev, c, display_name=my_display_name, - room_member_count=room_member_count, - profile_tag=self.profile_tag - ) - logger.debug( - "Rule %s %s", - r['rule_id'], "matches" if matches else "doesn't match" - ) # ignore rules with no actions (we have an explict 'dont_notify') if len(actions) == 0: logger.warn( @@ -127,6 +148,18 @@ class PushRuleEvaluator: r['rule_id'], self.user_id ) continue + + matches = True + for c in conditions: + matches = evaluator.matches(c, my_display_name, self.profile_tag) + if not matches: + break + + logger.debug( + "Rule %s %s", + r['rule_id'], "matches" if matches else "doesn't match" + ) + if matches: logger.info( "%s matches for user %s, event %s", @@ -145,81 +178,84 @@ class PushRuleEvaluator: ) defer.returnValue(PushRuleEvaluator.DEFAULT_ACTIONS) - @staticmethod - def _glob_to_regexp(glob): - r = re.escape(glob) - r = re.sub(r'\\\*', r'.*?', r) - r = re.sub(r'\\\?', r'.', r) - # handle [abc], [a-z] and [!a-z] style ranges. - r = re.sub(r'\\\[(\\\!|)(.*)\\\]', - lambda x: ('[%s%s]' % (x.group(1) and '^' or '', - re.sub(r'\\\-', '-', x.group(2)))), r) - return r +class PushRuleEvaluatorForEvent(object): + WORD_BOUNDARY = re.compile(r'\b') + + def __init__(self, event, body_parts, room_member_count): + self._event = event + self._body_parts = body_parts + self._room_member_count = room_member_count + + self._value_cache = _flatten_dict(event) @staticmethod - def _event_fulfills_condition(ev, condition, - display_name, room_member_count, profile_tag): - if condition['kind'] == 'event_match': - if 'pattern' not in condition: - logger.warn("event_match condition with no pattern") - return False - # XXX: optimisation: cache our pattern regexps - if condition['key'] == 'content.body': - r = r'\b%s\b' % PushRuleEvaluator._glob_to_regexp(condition['pattern']) - else: - r = r'^%s$' % PushRuleEvaluator._glob_to_regexp(condition['pattern']) - val = _value_for_dotted_key(condition['key'], ev) - if val is None: - return False - return re.search(r, val, flags=re.IGNORECASE) is not None + def create(event, room_member_count): + body = event.get("content", {}).get("body", None) + if body: + body_parts = PushRuleEvaluatorForEvent.WORD_BOUNDARY.split(body) + body_parts[:] = [ + part.lower() for part in body_parts + ] + else: + body_parts = [] + + return PushRuleEvaluatorForEvent(event, body_parts, room_member_count) + def matches(self, condition, display_name, profile_tag): + if condition['kind'] == 'event_match': + return self._event_match(condition) elif condition['kind'] == 'device': if 'profile_tag' not in condition: return True return condition['profile_tag'] == profile_tag - elif condition['kind'] == 'contains_display_name': - # This is special because display names can be different - # between rooms and so you can't really hard code it in a rule. - # Optimisation: we should cache these names and update them from - # the event stream. - if 'content' not in ev or 'body' not in ev['content']: - return False - if not display_name: - return False - return re.search( - r"\b%s\b" % re.escape(display_name), ev['content']['body'], - flags=re.IGNORECASE - ) is not None - + return self._contains_display_name(display_name) elif condition['kind'] == 'room_member_count': - if 'is' not in condition: - return False - m = PushRuleEvaluator.INEQUALITY_EXPR.match(condition['is']) - if not m: - return False - ineq = m.group(1) - rhs = m.group(2) - if not rhs.isdigit(): - return False - rhs = int(rhs) - - if ineq == '' or ineq == '==': - return room_member_count == rhs - elif ineq == '<': - return room_member_count < rhs - elif ineq == '>': - return room_member_count > rhs - elif ineq == '>=': - return room_member_count >= rhs - elif ineq == '<=': - return room_member_count <= rhs - else: - return False + return _room_member_count( + self._event, condition, self._room_member_count + ) else: return True + def _event_match(self, condition): + pattern = condition.get('pattern', None) + + if not pattern: + logger.warn("event_match condition with no pattern") + return False + + # XXX: optimisation: cache our pattern regexps + if condition['key'] == 'content.body': + matcher = _glob_to_matcher(pattern) + + for part in self._body_parts: + if matcher(part): + return True + return False + else: + haystack = self._get_value(condition['key']) + if haystack is None: + return False + + matcher = _glob_to_matcher(pattern) + + return matcher(haystack.lower()) + + def _contains_display_name(self, display_name): + if not display_name: + return False + + lower_display_name = display_name.lower() + for part in self._body_parts: + if part == lower_display_name: + return True + + return False + + def _get_value(self, dotted_key): + return self._value_cache.get(dotted_key, None) + def _value_for_dotted_key(dotted_key, event): parts = dotted_key.split(".") @@ -229,4 +265,42 @@ def _value_for_dotted_key(dotted_key, event): return None val = val[parts[0]] parts = parts[1:] + return val + + +def _glob_to_matcher(glob): + glob = glob.lower() + + if not IS_GLOB.search(glob): + return lambda value: value == glob + + r = re.escape(glob) + + r = r.replace(r'\*', '.*?') + r = r.replace(r'\?', '.') + + # handle [abc], [a-z] and [!a-z] style ranges. + r = GLOB_REGEX.sub( + lambda x: ( + '[%s%s]' % ( + x.group(1) and '^' or '', + x.group(2).replace(r'\\\-', '-') + ) + ), + r, + ) + + r = r + "$" + r = re.compile(r) + return lambda value: r.match(value) + + +def _flatten_dict(d, prefix=[], result={}): + for key, value in d.items(): + if isinstance(value, basestring): + result[".".join(prefix + [key])] = value.lower() + elif hasattr(value, "items"): + _flatten_dict(value, prefix=(prefix+[key]), result=result) + + return result diff --git a/synapse/storage/push_rule.py b/synapse/storage/push_rule.py index 2adfefd994..1adf28b893 100644 --- a/synapse/storage/push_rule.py +++ b/synapse/storage/push_rule.py @@ -14,7 +14,7 @@ # limitations under the License. from ._base import SQLBaseStore -from synapse.util.caches.descriptors import cachedInlineCallbacks +from synapse.util.caches.descriptors import cached, cachedInlineCallbacks, cachedList from twisted.internet import defer import logging @@ -60,6 +60,27 @@ class PushRuleStore(SQLBaseStore): r['rule_id']: False if r['enabled'] == 0 else True for r in results }) + @cached() + def _get_push_rules_enabled_for_user(self, user_id): + def f(txn): + sql = ( + "SELECT pr.*" + " FROM push_rules AS pr" + " LEFT JOIN push_rules_enable AS pre" + " ON pr.user_name = pre.user_name AND pr.rule_id = pre.rule_id" + " WHERE pr.user_name = ?" + " AND (pre.enabled IS NULL OR pre.enabled = 1)" + " ORDER BY pr.priority_class DESC, pr.priority DESC" + ) + txn.execute(sql, (user_id,)) + return self.cursor_to_dict(txn) + + return self.runInteraction( + "_get_push_rules_enabled_for_user", f + ) + + # @cachedList(cache=_get_push_rules_enabled_for_user.cache, list_name="user_ids", + # num_args=1, inlineCallbacks=True) @defer.inlineCallbacks def bulk_get_push_rules(self, user_ids): if not user_ids: diff --git a/synapse/storage/registration.py b/synapse/storage/registration.py index 999b710fbb..70cde0d04d 100644 --- a/synapse/storage/registration.py +++ b/synapse/storage/registration.py @@ -18,7 +18,7 @@ from twisted.internet import defer from synapse.api.errors import StoreError, Codes from ._base import SQLBaseStore -from synapse.util.caches.descriptors import cached, cachedInlineCallbacks +from synapse.util.caches.descriptors import cached, cachedInlineCallbacks, cachedList class RegistrationStore(SQLBaseStore): @@ -256,10 +256,10 @@ class RegistrationStore(SQLBaseStore): defer.returnValue(res if res else False) @cachedInlineCallbacks() - def is_guest(self, user): + def is_guest(self, user_id): res = yield self._simple_select_one_onecol( table="users", - keyvalues={"name": user.to_string()}, + keyvalues={"name": user_id}, retcol="is_guest", allow_none=True, desc="is_guest", @@ -267,6 +267,26 @@ class RegistrationStore(SQLBaseStore): defer.returnValue(res if res else False) + @cachedList(cache=is_guest.cache, list_name="user_ids", num_args=1, + inlineCallbacks=True) + def are_guests(self, user_ids): + sql = "SELECT name, is_guest FROM users WHERE name IN (%s)" % ( + ",".join("?" for _ in user_ids), + ) + + rows = yield self._execute( + "are_guests", self.cursor_to_dict, sql, *user_ids + ) + + result = {user_id: False for user_id in user_ids} + + result.update({ + row["name"]: bool(row["is_guest"]) + for row in rows + }) + + defer.returnValue(result) + def _query_for_auth(self, txn, token): sql = ( "SELECT users.name, users.is_guest, access_tokens.id as token_id" -- cgit 1.4.1 From 63485b30298f906efbdbe39311dd7503101e7602 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Mon, 18 Jan 2016 14:11:43 +0000 Subject: Re-enable urnead notifications --- synapse/handlers/federation.py | 13 ++++++------- synapse/handlers/sync.py | 3 --- synapse/push/action_generator.py | 3 --- 3 files changed, 6 insertions(+), 13 deletions(-) (limited to 'synapse') diff --git a/synapse/handlers/federation.py b/synapse/handlers/federation.py index 26402ea9cd..4b94940e99 100644 --- a/synapse/handlers/federation.py +++ b/synapse/handlers/federation.py @@ -36,7 +36,7 @@ from synapse.events.utils import prune_event from synapse.util.retryutils import NotRetryingDestination -# from synapse.push.action_generator import ActionGenerator +from synapse.push.action_generator import ActionGenerator from twisted.internet import defer @@ -244,12 +244,11 @@ class FederationHandler(BaseHandler): user = UserID.from_string(event.state_key) yield user_joined_room(self.distributor, user, event.room_id) - # Temporarily disable notifications due to performance concerns. - # if not backfilled and not event.internal_metadata.is_outlier(): - # action_generator = ActionGenerator(self.store) - # yield action_generator.handle_push_actions_for_event( - # event, self - # ) + if not backfilled and not event.internal_metadata.is_outlier(): + action_generator = ActionGenerator(self.store) + yield action_generator.handle_push_actions_for_event( + event, self + ) @defer.inlineCallbacks def _filter_events_for_server(self, server_name, room_id, events): diff --git a/synapse/handlers/sync.py b/synapse/handlers/sync.py index 1942268c3c..52202d8e63 100644 --- a/synapse/handlers/sync.py +++ b/synapse/handlers/sync.py @@ -841,9 +841,6 @@ class SyncHandler(BaseHandler): @defer.inlineCallbacks def unread_notifs_for_room_id(self, room_id, sync_config, ephemeral_by_room): - # Temporarily disable notifications due to performance concerns. - defer.returnValue([]) - last_unread_event_id = self.last_read_event_id_for_room_and_user( room_id, sync_config.user.to_string(), ephemeral_by_room ) diff --git a/synapse/push/action_generator.py b/synapse/push/action_generator.py index 73467f3adc..4cf94f6c61 100644 --- a/synapse/push/action_generator.py +++ b/synapse/push/action_generator.py @@ -36,9 +36,6 @@ class ActionGenerator: @defer.inlineCallbacks def handle_push_actions_for_event(self, event, handler): - # Temporarily disable notifications due to performance concerns. - return - if event.type == EventTypes.Redaction and event.redacts is not None: yield self.store.remove_push_actions_for_event_id( event.room_id, event.redacts -- cgit 1.4.1 From 2c176e02ae910ce52197539b31f78ae1b1ef4c3c Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Mon, 18 Jan 2016 14:24:31 +0000 Subject: Make unit tests work --- synapse/storage/push_rule.py | 2 +- tests/handlers/test_federation.py | 141 ------------- tests/handlers/test_room.py | 418 -------------------------------------- 3 files changed, 1 insertion(+), 560 deletions(-) delete mode 100644 tests/handlers/test_federation.py delete mode 100644 tests/handlers/test_room.py (limited to 'synapse') diff --git a/synapse/storage/push_rule.py b/synapse/storage/push_rule.py index 1adf28b893..f210e6c14d 100644 --- a/synapse/storage/push_rule.py +++ b/synapse/storage/push_rule.py @@ -14,7 +14,7 @@ # limitations under the License. from ._base import SQLBaseStore -from synapse.util.caches.descriptors import cached, cachedInlineCallbacks, cachedList +from synapse.util.caches.descriptors import cached, cachedInlineCallbacks from twisted.internet import defer import logging diff --git a/tests/handlers/test_federation.py b/tests/handlers/test_federation.py deleted file mode 100644 index 11a3d94bb0..0000000000 --- a/tests/handlers/test_federation.py +++ /dev/null @@ -1,141 +0,0 @@ -# Copyright 2014-2016 OpenMarket Ltd -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - - -from twisted.internet import defer -from tests import unittest - -from synapse.api.constants import EventTypes -from synapse.events import FrozenEvent -from synapse.handlers.federation import FederationHandler - -from mock import NonCallableMock, ANY, Mock - -from ..utils import setup_test_homeserver - - -class FederationTestCase(unittest.TestCase): - - @defer.inlineCallbacks - def setUp(self): - - self.state_handler = NonCallableMock(spec_set=[ - "compute_event_context", - ]) - - self.auth = NonCallableMock(spec_set=[ - "check", - "check_host_in_room", - ]) - - self.hostname = "test" - hs = yield setup_test_homeserver( - self.hostname, - datastore=NonCallableMock(spec_set=[ - "persist_event", - "store_room", - "get_room", - "get_destination_retry_timings", - "set_destination_retry_timings", - "have_events", - "get_users_in_room", - "bulk_get_push_rules", - "get_current_state", - "set_push_actions_for_event_and_users", - "is_guest", - "get_state_for_events", - ]), - resource_for_federation=NonCallableMock(), - http_client=NonCallableMock(spec_set=[]), - notifier=NonCallableMock(spec_set=["on_new_room_event"]), - handlers=NonCallableMock(spec_set=[ - "room_member_handler", - "federation_handler", - ]), - auth=self.auth, - state_handler=self.state_handler, - keyring=Mock(), - ) - - self.datastore = hs.get_datastore() - self.handlers = hs.get_handlers() - self.notifier = hs.get_notifier() - self.hs = hs - - self.handlers.federation_handler = FederationHandler(self.hs) - - self.datastore.get_state_for_events.return_value = {"$a:b": {}} - - @defer.inlineCallbacks - def test_msg(self): - pdu = FrozenEvent({ - "type": EventTypes.Message, - "room_id": "foo", - "content": {"msgtype": u"fooo"}, - "origin_server_ts": 0, - "event_id": "$a:b", - "user_id":"@a:b", - "origin": "b", - "auth_events": [], - "hashes": {"sha256":"AcLrgtUIqqwaGoHhrEvYG1YLDIsVPYJdSRGhkp3jJp8"}, - }) - - self.datastore.persist_event.return_value = defer.succeed((1,1)) - self.datastore.get_room.return_value = defer.succeed(True) - self.datastore.get_users_in_room.return_value = ["@a:b"] - self.datastore.bulk_get_push_rules.return_value = {} - self.datastore.get_current_state.return_value = {} - self.auth.check_host_in_room.return_value = defer.succeed(True) - - retry_timings_res = { - "destination": "", - "retry_last_ts": 0, - "retry_interval": 0, - } - self.datastore.get_destination_retry_timings.return_value = ( - defer.succeed(retry_timings_res) - ) - - def have_events(event_ids): - return defer.succeed({}) - self.datastore.have_events.side_effect = have_events - - def annotate(ev, old_state=None, outlier=False): - context = Mock() - context.current_state = {} - context.auth_events = {} - return defer.succeed(context) - self.state_handler.compute_event_context.side_effect = annotate - - yield self.handlers.federation_handler.on_receive_pdu( - "fo", pdu, False - ) - - self.datastore.persist_event.assert_called_once_with( - ANY, - is_new_state=True, - backfilled=False, - current_state=None, - context=ANY, - ) - - self.state_handler.compute_event_context.assert_called_once_with( - ANY, old_state=None, outlier=False - ) - - self.auth.check.assert_called_once_with(ANY, auth_events={}) - - self.notifier.on_new_room_event.assert_called_once_with( - ANY, 1, 1, extra_users=[] - ) diff --git a/tests/handlers/test_room.py b/tests/handlers/test_room.py deleted file mode 100644 index e7a12a2ba2..0000000000 --- a/tests/handlers/test_room.py +++ /dev/null @@ -1,418 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2014-2016 OpenMarket Ltd -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - - -from twisted.internet import defer -from .. import unittest - -from synapse.api.constants import EventTypes, Membership -from synapse.handlers.room import RoomMemberHandler, RoomCreationHandler -from synapse.handlers.profile import ProfileHandler -from synapse.types import UserID -from ..utils import setup_test_homeserver - -from mock import Mock, NonCallableMock - - -class RoomMemberHandlerTestCase(unittest.TestCase): - - @defer.inlineCallbacks - def setUp(self): - self.hostname = "red" - hs = yield setup_test_homeserver( - self.hostname, - ratelimiter=NonCallableMock(spec_set=[ - "send_message", - ]), - datastore=NonCallableMock(spec_set=[ - "persist_event", - "get_room_member", - "get_room", - "store_room", - "get_latest_events_in_room", - "add_event_hashes", - "get_users_in_room", - "bulk_get_push_rules", - "get_current_state", - "set_push_actions_for_event_and_users", - "get_state_for_events", - "is_guest", - ]), - resource_for_federation=NonCallableMock(), - http_client=NonCallableMock(spec_set=[]), - notifier=NonCallableMock(spec_set=["on_new_room_event"]), - handlers=NonCallableMock(spec_set=[ - "room_member_handler", - "profile_handler", - "federation_handler", - ]), - auth=NonCallableMock(spec_set=[ - "check", - "add_auth_events", - "check_host_in_room", - ]), - state_handler=NonCallableMock(spec_set=[ - "compute_event_context", - "get_current_state", - ]), - ) - - self.federation = NonCallableMock(spec_set=[ - "handle_new_event", - "send_invite", - "get_state_for_room", - ]) - - self.datastore = hs.get_datastore() - self.handlers = hs.get_handlers() - self.notifier = hs.get_notifier() - self.state_handler = hs.get_state_handler() - self.distributor = hs.get_distributor() - self.auth = hs.get_auth() - self.hs = hs - - self.handlers.federation_handler = self.federation - - self.distributor.declare("collect_presencelike_data") - - self.handlers.room_member_handler = RoomMemberHandler(self.hs) - self.handlers.profile_handler = ProfileHandler(self.hs) - self.room_member_handler = self.handlers.room_member_handler - - self.ratelimiter = hs.get_ratelimiter() - self.ratelimiter.send_message.return_value = (True, 0) - - self.datastore.persist_event.return_value = (1,1) - self.datastore.add_event_hashes.return_value = [] - self.datastore.get_users_in_room.return_value = ["@bob:red"] - self.datastore.bulk_get_push_rules.return_value = {} - - @defer.inlineCallbacks - def test_invite(self): - room_id = "!foo:red" - user_id = "@bob:red" - target_user_id = "@red:blue" - content = {"membership": Membership.INVITE} - - builder = self.hs.get_event_builder_factory().new({ - "type": EventTypes.Member, - "sender": user_id, - "state_key": target_user_id, - "room_id": room_id, - "content": content, - }) - - self.datastore.get_latest_events_in_room.return_value = ( - defer.succeed([]) - ) - self.datastore.get_current_state.return_value = {} - self.datastore.get_state_for_events = lambda event_ids,types: {x: {} for x in event_ids} - - def annotate(_): - ctx = Mock() - ctx.current_state = { - (EventTypes.Member, "@alice:green"): self._create_member( - user_id="@alice:green", - room_id=room_id, - ), - (EventTypes.Member, "@bob:red"): self._create_member( - user_id="@bob:red", - room_id=room_id, - ), - } - ctx.prev_state_events = [] - - return defer.succeed(ctx) - - self.state_handler.compute_event_context.side_effect = annotate - - def add_auth(_, ctx): - ctx.auth_events = ctx.current_state[ - (EventTypes.Member, "@bob:red") - ] - - return defer.succeed(True) - self.auth.add_auth_events.side_effect = add_auth - - def send_invite(domain, event): - return defer.succeed(event) - - self.federation.send_invite.side_effect = send_invite - - room_handler = self.room_member_handler - event, context = yield room_handler._create_new_client_event( - builder - ) - - yield room_handler.send_membership_event(event, context) - - self.state_handler.compute_event_context.assert_called_once_with( - builder - ) - - self.auth.add_auth_events.assert_called_once_with( - builder, context - ) - - self.federation.send_invite.assert_called_once_with( - "blue", event, - ) - - self.datastore.persist_event.assert_called_once_with( - event, context=context, - ) - self.notifier.on_new_room_event.assert_called_once_with( - event, 1, 1, extra_users=[UserID.from_string(target_user_id)] - ) - self.assertFalse(self.datastore.get_room.called) - self.assertFalse(self.datastore.store_room.called) - self.assertFalse(self.federation.get_state_for_room.called) - - @defer.inlineCallbacks - def test_simple_join(self): - room_id = "!foo:red" - user_id = "@bob:red" - user = UserID.from_string(user_id) - - join_signal_observer = Mock() - self.distributor.observe("user_joined_room", join_signal_observer) - - builder = self.hs.get_event_builder_factory().new({ - "type": EventTypes.Member, - "sender": user_id, - "state_key": user_id, - "room_id": room_id, - "content": {"membership": Membership.JOIN}, - }) - - self.datastore.get_latest_events_in_room.return_value = ( - defer.succeed([]) - ) - self.datastore.get_current_state.return_value = {} - self.datastore.get_state_for_events = lambda event_ids,types: {x: {} for x in event_ids} - - def annotate(_): - ctx = Mock() - ctx.current_state = { - (EventTypes.Member, "@bob:red"): self._create_member( - user_id="@bob:red", - room_id=room_id, - membership=Membership.INVITE - ), - } - ctx.prev_state_events = [] - - return defer.succeed(ctx) - - self.state_handler.compute_event_context.side_effect = annotate - - def add_auth(_, ctx): - ctx.auth_events = ctx.current_state[ - (EventTypes.Member, "@bob:red") - ] - - return defer.succeed(True) - self.auth.add_auth_events.side_effect = add_auth - - room_handler = self.room_member_handler - event, context = yield room_handler._create_new_client_event( - builder - ) - - # Actual invocation - yield room_handler.send_membership_event(event, context) - - self.federation.handle_new_event.assert_called_once_with( - event, destinations=set() - ) - - self.datastore.persist_event.assert_called_once_with( - event, context=context - ) - self.notifier.on_new_room_event.assert_called_once_with( - event, 1, 1, extra_users=[user] - ) - - join_signal_observer.assert_called_with( - user=user, room_id=room_id - ) - - def _create_member(self, user_id, room_id, membership=Membership.JOIN): - builder = self.hs.get_event_builder_factory().new({ - "type": EventTypes.Member, - "sender": user_id, - "state_key": user_id, - "room_id": room_id, - "content": {"membership": membership}, - }) - - return builder.build() - - @defer.inlineCallbacks - def test_simple_leave(self): - room_id = "!foo:red" - user_id = "@bob:red" - user = UserID.from_string(user_id) - - builder = self.hs.get_event_builder_factory().new({ - "type": EventTypes.Member, - "sender": user_id, - "state_key": user_id, - "room_id": room_id, - "content": {"membership": Membership.LEAVE}, - }) - - self.datastore.get_latest_events_in_room.return_value = ( - defer.succeed([]) - ) - self.datastore.get_current_state.return_value = {} - self.datastore.get_state_for_events = lambda event_ids,types: {x: {} for x in event_ids} - - def annotate(_): - ctx = Mock() - ctx.current_state = { - (EventTypes.Member, "@bob:red"): self._create_member( - user_id="@bob:red", - room_id=room_id, - membership=Membership.JOIN - ), - } - ctx.prev_state_events = [] - - return defer.succeed(ctx) - - self.state_handler.compute_event_context.side_effect = annotate - - def add_auth(_, ctx): - ctx.auth_events = ctx.current_state[ - (EventTypes.Member, "@bob:red") - ] - - return defer.succeed(True) - self.auth.add_auth_events.side_effect = add_auth - - room_handler = self.room_member_handler - event, context = yield room_handler._create_new_client_event( - builder - ) - - leave_signal_observer = Mock() - self.distributor.observe("user_left_room", leave_signal_observer) - - # Actual invocation - yield room_handler.send_membership_event(event, context) - - self.federation.handle_new_event.assert_called_once_with( - event, destinations=set(['red']) - ) - - self.datastore.persist_event.assert_called_once_with( - event, context=context - ) - self.notifier.on_new_room_event.assert_called_once_with( - event, 1, 1, extra_users=[user] - ) - - leave_signal_observer.assert_called_with( - user=user, room_id=room_id - ) - - -class RoomCreationTest(unittest.TestCase): - - @defer.inlineCallbacks - def setUp(self): - self.hostname = "red" - - hs = yield setup_test_homeserver( - self.hostname, - datastore=NonCallableMock(spec_set=[ - "store_room", - "snapshot_room", - "persist_event", - "get_joined_hosts_for_room", - ]), - http_client=NonCallableMock(spec_set=[]), - notifier=NonCallableMock(spec_set=["on_new_room_event"]), - handlers=NonCallableMock(spec_set=[ - "room_creation_handler", - "message_handler", - ]), - auth=NonCallableMock(spec_set=["check", "add_auth_events"]), - ratelimiter=NonCallableMock(spec_set=[ - "send_message", - ]), - ) - - self.federation = NonCallableMock(spec_set=[ - "handle_new_event", - ]) - - self.handlers = hs.get_handlers() - - self.handlers.room_creation_handler = RoomCreationHandler(hs) - self.room_creation_handler = self.handlers.room_creation_handler - - self.message_handler = self.handlers.message_handler - - self.ratelimiter = hs.get_ratelimiter() - self.ratelimiter.send_message.return_value = (True, 0) - - @defer.inlineCallbacks - def test_room_creation(self): - user_id = "@foo:red" - room_id = "!bobs_room:red" - config = {"visibility": "private"} - - yield self.room_creation_handler.create_room( - user_id=user_id, - room_id=room_id, - config=config, - ) - - self.assertTrue(self.message_handler.create_and_send_event.called) - - event_dicts = [ - e[0][0] - for e in self.message_handler.create_and_send_event.call_args_list - ] - - self.assertTrue(len(event_dicts) > 3) - - self.assertDictContainsSubset( - { - "type": EventTypes.Create, - "sender": user_id, - "room_id": room_id, - }, - event_dicts[0] - ) - - self.assertEqual(user_id, event_dicts[0]["content"]["creator"]) - - self.assertDictContainsSubset( - { - "type": EventTypes.Member, - "sender": user_id, - "room_id": room_id, - "state_key": user_id, - }, - event_dicts[1] - ) - - self.assertEqual( - Membership.JOIN, - event_dicts[1]["content"]["membership"] - ) -- cgit 1.4.1 From 345ff2196a8b381a3dfea537ab659aac0837045a Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Mon, 18 Jan 2016 14:39:34 +0000 Subject: Don't edit ruleset --- synapse/push/push_rule_evaluator.py | 9 ++++----- 1 file changed, 4 insertions(+), 5 deletions(-) (limited to 'synapse') diff --git a/synapse/push/push_rule_evaluator.py b/synapse/push/push_rule_evaluator.py index bbc8308c2d..60d9f1f239 100644 --- a/synapse/push/push_rule_evaluator.py +++ b/synapse/push/push_rule_evaluator.py @@ -131,11 +131,10 @@ class PushRuleEvaluator: evaluator = PushRuleEvaluatorForEvent.create(ev, room_member_count) for r in self.rules: - if r['rule_id'] in self.enabled_map: - r['enabled'] = self.enabled_map[r['rule_id']] - elif 'enabled' not in r: - r['enabled'] = True - if not r['enabled']: + if self.enabled_map.get(r['rule_id'], None) is False: + continue + + if not r.get("enabled", True): continue conditions = r['conditions'] -- cgit 1.4.1 From 0e39dcd1352bcab53908fca574cae43a89e407f0 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Mon, 18 Jan 2016 14:39:43 +0000 Subject: Remove internal ids --- synapse/rest/client/v1/push_rule.py | 4 ++++ 1 file changed, 4 insertions(+) (limited to 'synapse') diff --git a/synapse/rest/client/v1/push_rule.py b/synapse/rest/client/v1/push_rule.py index 0cbd9fe08a..b176efd8a8 100644 --- a/synapse/rest/client/v1/push_rule.py +++ b/synapse/rest/client/v1/push_rule.py @@ -140,6 +140,10 @@ class PushRuleRestServlet(ClientV1RestServlet): template_name = _priority_class_to_template_name(r['priority_class']) + # Remove internal stuff. + for c in r["conditions"]: + c.pop("_id", None) + if r['priority_class'] > PRIORITY_CLASS_MAP['override']: # per-device rule profile_tag = _profile_tag_from_conditions(r["conditions"]) -- cgit 1.4.1 From d1f56f732e1c213e203f287945d84966c3eec6f3 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Mon, 18 Jan 2016 10:09:14 +0000 Subject: Use static for const dicts --- synapse/push/baserules.py | 364 +++++++++++++++---------------- synapse/push/bulk_push_rule_evaluator.py | 15 +- synapse/push/push_rule_evaluator.py | 20 +- synapse/rest/client/v1/push_rule.py | 8 +- 4 files changed, 209 insertions(+), 198 deletions(-) (limited to 'synapse') diff --git a/synapse/push/baserules.py b/synapse/push/baserules.py index 8bac7fd6af..d8a0eda9fa 100644 --- a/synapse/push/baserules.py +++ b/synapse/push/baserules.py @@ -15,27 +15,25 @@ from synapse.push.rulekinds import PRIORITY_CLASS_MAP, PRIORITY_CLASS_INVERSE_MAP -def list_with_base_rules(rawrules, user_id): +def list_with_base_rules(rawrules): ruleslist = [] # shove the server default rules for each kind onto the end of each current_prio_class = PRIORITY_CLASS_INVERSE_MAP.keys()[-1] ruleslist.extend(make_base_prepend_rules( - user_id, PRIORITY_CLASS_INVERSE_MAP[current_prio_class] + PRIORITY_CLASS_INVERSE_MAP[current_prio_class] )) for r in rawrules: if r['priority_class'] < current_prio_class: while r['priority_class'] < current_prio_class: ruleslist.extend(make_base_append_rules( - user_id, PRIORITY_CLASS_INVERSE_MAP[current_prio_class] )) current_prio_class -= 1 if current_prio_class > 0: ruleslist.extend(make_base_prepend_rules( - user_id, PRIORITY_CLASS_INVERSE_MAP[current_prio_class] )) @@ -43,28 +41,26 @@ def list_with_base_rules(rawrules, user_id): while current_prio_class > 0: ruleslist.extend(make_base_append_rules( - user_id, PRIORITY_CLASS_INVERSE_MAP[current_prio_class] )) current_prio_class -= 1 if current_prio_class > 0: ruleslist.extend(make_base_prepend_rules( - user_id, PRIORITY_CLASS_INVERSE_MAP[current_prio_class] )) return ruleslist -def make_base_append_rules(user, kind): +def make_base_append_rules(kind): rules = [] if kind == 'override': - rules = make_base_append_override_rules() + rules = BASE_APPEND_OVRRIDE_RULES elif kind == 'underride': - rules = make_base_append_underride_rules(user) + rules = BASE_APPEND_UNDERRIDE_RULES elif kind == 'content': - rules = make_base_append_content_rules(user) + rules = BASE_APPEND_CONTENT_RULES for r in rules: r['priority_class'] = PRIORITY_CLASS_MAP[kind] @@ -73,11 +69,11 @@ def make_base_append_rules(user, kind): return rules -def make_base_prepend_rules(user, kind): +def make_base_prepend_rules(kind): rules = [] if kind == 'override': - rules = make_base_prepend_override_rules() + rules = BASE_PREPEND_OVERRIDE_RULES for r in rules: r['priority_class'] = PRIORITY_CLASS_MAP[kind] @@ -86,180 +82,182 @@ def make_base_prepend_rules(user, kind): return rules -def make_base_append_content_rules(user): - return [ - { - 'rule_id': 'global/content/.m.rule.contains_user_name', - 'conditions': [ - { - 'kind': 'event_match', - 'key': 'content.body', - 'pattern': user.localpart, # Matrix ID match - } - ], - 'actions': [ - 'notify', - { - 'set_tweak': 'sound', - 'value': 'default', - }, { - 'set_tweak': 'highlight' - } - ] - }, - ] +BASE_APPEND_CONTENT_RULES = [ + { + 'rule_id': 'global/content/.m.rule.contains_user_name', + 'conditions': [ + { + 'kind': 'event_match', + 'key': 'content.body', + 'pattern_type': 'user_localpart' + } + ], + 'actions': [ + 'notify', + { + 'set_tweak': 'sound', + 'value': 'default', + }, { + 'set_tweak': 'highlight' + } + ] + }, +] -def make_base_prepend_override_rules(): - return [ - { - 'rule_id': 'global/override/.m.rule.master', - 'enabled': False, - 'conditions': [], - 'actions': [ - "dont_notify" - ] - } - ] +BASE_PREPEND_OVERRIDE_RULES = [ + { + 'rule_id': 'global/override/.m.rule.master', + 'enabled': False, + 'conditions': [], + 'actions': [ + "dont_notify" + ] + } +] -def make_base_append_override_rules(): - return [ - { - 'rule_id': 'global/override/.m.rule.suppress_notices', - 'conditions': [ - { - 'kind': 'event_match', - 'key': 'content.msgtype', - 'pattern': 'm.notice', - } - ], - 'actions': [ - 'dont_notify', - ] - } - ] +BASE_APPEND_OVRRIDE_RULES = [ + { + 'rule_id': 'global/override/.m.rule.suppress_notices', + 'conditions': [ + { + 'kind': 'event_match', + 'key': 'content.msgtype', + 'pattern': 'm.notice', + '_id': '_suppress_notices', + } + ], + 'actions': [ + 'dont_notify', + ] + } +] -def make_base_append_underride_rules(user): - return [ - { - 'rule_id': 'global/underride/.m.rule.call', - 'conditions': [ - { - 'kind': 'event_match', - 'key': 'type', - 'pattern': 'm.call.invite', - } - ], - 'actions': [ - 'notify', - { - 'set_tweak': 'sound', - 'value': 'ring' - }, { - 'set_tweak': 'highlight', - 'value': False - } - ] - }, - { - 'rule_id': 'global/underride/.m.rule.contains_display_name', - 'conditions': [ - { - 'kind': 'contains_display_name' - } - ], - 'actions': [ - 'notify', - { - 'set_tweak': 'sound', - 'value': 'default' - }, { - 'set_tweak': 'highlight' - } - ] - }, - { - 'rule_id': 'global/underride/.m.rule.room_one_to_one', - 'conditions': [ - { - 'kind': 'room_member_count', - 'is': '2' - } - ], - 'actions': [ - 'notify', - { - 'set_tweak': 'sound', - 'value': 'default' - }, { - 'set_tweak': 'highlight', - 'value': False - } - ] - }, - { - 'rule_id': 'global/underride/.m.rule.invite_for_me', - 'conditions': [ - { - 'kind': 'event_match', - 'key': 'type', - 'pattern': 'm.room.member', - }, - { - 'kind': 'event_match', - 'key': 'content.membership', - 'pattern': 'invite', - }, - { - 'kind': 'event_match', - 'key': 'state_key', - 'pattern': user.to_string(), - }, - ], - 'actions': [ - 'notify', - { - 'set_tweak': 'sound', - 'value': 'default' - }, { - 'set_tweak': 'highlight', - 'value': False - } - ] - }, - { - 'rule_id': 'global/underride/.m.rule.member_event', - 'conditions': [ - { - 'kind': 'event_match', - 'key': 'type', - 'pattern': 'm.room.member', - } - ], - 'actions': [ - 'notify', { - 'set_tweak': 'highlight', - 'value': False - } - ] - }, - { - 'rule_id': 'global/underride/.m.rule.message', - 'enabled': False, - 'conditions': [ - { - 'kind': 'event_match', - 'key': 'type', - 'pattern': 'm.room.message', - } - ], - 'actions': [ - 'notify', { - 'set_tweak': 'highlight', - 'value': False - } - ] - } - ] +BASE_APPEND_UNDERRIDE_RULES = [ + { + 'rule_id': 'global/underride/.m.rule.call', + 'conditions': [ + { + 'kind': 'event_match', + 'key': 'type', + 'pattern': 'm.call.invite', + '_id': '_call', + } + ], + 'actions': [ + 'notify', + { + 'set_tweak': 'sound', + 'value': 'ring' + }, { + 'set_tweak': 'highlight', + 'value': False + } + ] + }, + { + 'rule_id': 'global/underride/.m.rule.contains_display_name', + 'conditions': [ + { + 'kind': 'contains_display_name' + } + ], + 'actions': [ + 'notify', + { + 'set_tweak': 'sound', + 'value': 'default' + }, { + 'set_tweak': 'highlight' + } + ] + }, + { + 'rule_id': 'global/underride/.m.rule.room_one_to_one', + 'conditions': [ + { + 'kind': 'room_member_count', + 'is': '2' + } + ], + 'actions': [ + 'notify', + { + 'set_tweak': 'sound', + 'value': 'default' + }, { + 'set_tweak': 'highlight', + 'value': False + } + ] + }, + { + 'rule_id': 'global/underride/.m.rule.invite_for_me', + 'conditions': [ + { + 'kind': 'event_match', + 'key': 'type', + 'pattern': 'm.room.member', + '_id': '_invite_type', + }, + { + 'kind': 'event_match', + 'key': 'content.membership', + 'pattern': 'invite', + '_id': '_invite_member', + }, + { + 'kind': 'event_match', + 'key': 'state_key', + 'pattern_type': 'user_id' + }, + ], + 'actions': [ + 'notify', + { + 'set_tweak': 'sound', + 'value': 'default' + }, { + 'set_tweak': 'highlight', + 'value': False + } + ] + }, + { + 'rule_id': 'global/underride/.m.rule.member_event', + 'conditions': [ + { + 'kind': 'event_match', + 'key': 'type', + 'pattern': 'm.room.member', + '_id': '_member', + } + ], + 'actions': [ + 'notify', { + 'set_tweak': 'highlight', + 'value': False + } + ] + }, + { + 'rule_id': 'global/underride/.m.rule.message', + 'enabled': False, + 'conditions': [ + { + 'kind': 'event_match', + 'key': 'type', + 'pattern': 'm.room.message', + '_id': '_message', + } + ], + 'actions': [ + 'notify', { + 'set_tweak': 'highlight', + 'value': False + } + ] + } +] diff --git a/synapse/push/bulk_push_rule_evaluator.py b/synapse/push/bulk_push_rule_evaluator.py index b9f78fd598..f1910f7da7 100644 --- a/synapse/push/bulk_push_rule_evaluator.py +++ b/synapse/push/bulk_push_rule_evaluator.py @@ -22,7 +22,6 @@ import baserules from push_rule_evaluator import PushRuleEvaluatorForEvent from synapse.api.constants import EventTypes -from synapse.types import UserID logger = logging.getLogger(__name__) @@ -38,10 +37,10 @@ def decode_rule_json(rule): def _get_rules(room_id, user_ids, store): rules_by_user = yield store.bulk_get_push_rules(user_ids) rules_by_user = { - uid: baserules.list_with_base_rules( - [decode_rule_json(rule_list) for rule_list in rules_by_user.get(uid, [])], - UserID.from_string(uid), - ) + uid: baserules.list_with_base_rules([ + decode_rule_json(rule_list) + for rule_list in rules_by_user.get(uid, []) + ]) for uid in user_ids } defer.returnValue(rules_by_user) @@ -108,7 +107,7 @@ class BulkPushRuleEvaluator: continue matches = _condition_checker( - evaluator, rule['conditions'], display_name, condition_cache + evaluator, rule['conditions'], uid, display_name, condition_cache ) if matches: actions = [x for x in rule['actions'] if x != 'dont_notify'] @@ -118,7 +117,7 @@ class BulkPushRuleEvaluator: defer.returnValue(actions_by_user) -def _condition_checker(evaluator, conditions, display_name, cache): +def _condition_checker(evaluator, conditions, uid, display_name, cache): for cond in conditions: _id = cond.get("_id", None) if _id: @@ -128,7 +127,7 @@ def _condition_checker(evaluator, conditions, display_name, cache): elif res is True: continue - res = evaluator.matches(cond, display_name, None) + res = evaluator.matches(cond, uid, display_name, None) if _id: cache[_id] = res diff --git a/synapse/push/push_rule_evaluator.py b/synapse/push/push_rule_evaluator.py index 60d9f1f239..9332fe5c5e 100644 --- a/synapse/push/push_rule_evaluator.py +++ b/synapse/push/push_rule_evaluator.py @@ -91,8 +91,7 @@ class PushRuleEvaluator: rule['actions'] = json.loads(raw_rule['actions']) rules.append(rule) - user = UserID.from_string(self.user_id) - self.rules = baserules.list_with_base_rules(rules, user) + self.rules = baserules.list_with_base_rules(rules) self.enabled_map = enabled_map @@ -150,7 +149,9 @@ class PushRuleEvaluator: matches = True for c in conditions: - matches = evaluator.matches(c, my_display_name, self.profile_tag) + matches = evaluator.matches( + c, self.user_id, my_display_name, self.profile_tag + ) if not matches: break @@ -201,9 +202,9 @@ class PushRuleEvaluatorForEvent(object): return PushRuleEvaluatorForEvent(event, body_parts, room_member_count) - def matches(self, condition, display_name, profile_tag): + def matches(self, condition, user_id, display_name, profile_tag): if condition['kind'] == 'event_match': - return self._event_match(condition) + return self._event_match(condition, user_id) elif condition['kind'] == 'device': if 'profile_tag' not in condition: return True @@ -217,9 +218,16 @@ class PushRuleEvaluatorForEvent(object): else: return True - def _event_match(self, condition): + def _event_match(self, condition, user_id): pattern = condition.get('pattern', None) + if not pattern: + pattern_type = condition.get('pattern_type', None) + if pattern_type == "user_id": + pattern = user_id + elif pattern_type == "user_localpart": + pattern = UserID.from_string(user_id).localpart + if not pattern: logger.warn("event_match condition with no pattern") return False diff --git a/synapse/rest/client/v1/push_rule.py b/synapse/rest/client/v1/push_rule.py index b176efd8a8..aa861e7033 100644 --- a/synapse/rest/client/v1/push_rule.py +++ b/synapse/rest/client/v1/push_rule.py @@ -126,7 +126,7 @@ class PushRuleRestServlet(ClientV1RestServlet): rule["actions"] = json.loads(rawrule["actions"]) ruleslist.append(rule) - ruleslist = baserules.list_with_base_rules(ruleslist, user) + ruleslist = baserules.list_with_base_rules(ruleslist) rules = {'global': {}, 'device': {}} @@ -144,6 +144,12 @@ class PushRuleRestServlet(ClientV1RestServlet): for c in r["conditions"]: c.pop("_id", None) + pattern_type = c.pop("pattern_type", None) + if pattern_type == "user_id": + c["pattern"] = user.to_string() + elif pattern_type == "user_localpart": + c["pattern"] = user.localpart + if r['priority_class'] > PRIORITY_CLASS_MAP['override']: # per-device rule profile_tag = _profile_tag_from_conditions(r["conditions"]) -- cgit 1.4.1 From 866fe27e783697f74f45573d0eb202e3803fe756 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Mon, 18 Jan 2016 15:29:41 +0000 Subject: Do for loop once at start --- synapse/push/baserules.py | 25 +++++++++++++++++-------- 1 file changed, 17 insertions(+), 8 deletions(-) (limited to 'synapse') diff --git a/synapse/push/baserules.py b/synapse/push/baserules.py index d8a0eda9fa..8be47f0975 100644 --- a/synapse/push/baserules.py +++ b/synapse/push/baserules.py @@ -62,10 +62,6 @@ def make_base_append_rules(kind): elif kind == 'content': rules = BASE_APPEND_CONTENT_RULES - for r in rules: - r['priority_class'] = PRIORITY_CLASS_MAP[kind] - r['default'] = True # Deprecated, left for backwards compat - return rules @@ -75,10 +71,6 @@ def make_base_prepend_rules(kind): if kind == 'override': rules = BASE_PREPEND_OVERRIDE_RULES - for r in rules: - r['priority_class'] = PRIORITY_CLASS_MAP[kind] - r['default'] = True # Deprecated, left for backwards compat - return rules @@ -261,3 +253,20 @@ BASE_APPEND_UNDERRIDE_RULES = [ ] } ] + + +for r in BASE_APPEND_CONTENT_RULES: + r['priority_class'] = PRIORITY_CLASS_MAP['content'] + r['default'] = True + +for r in BASE_PREPEND_OVERRIDE_RULES: + r['priority_class'] = PRIORITY_CLASS_MAP['override'] + r['default'] = True + +for r in BASE_APPEND_OVRRIDE_RULES: + r['priority_class'] = PRIORITY_CLASS_MAP['override'] + r['default'] = True + +for r in BASE_APPEND_UNDERRIDE_RULES: + r['priority_class'] = PRIORITY_CLASS_MAP['underride'] + r['default'] = True -- cgit 1.4.1 From 7dd14e5d1c8950f50279efccce83b9ff30f0bcfb Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Mon, 18 Jan 2016 15:42:23 +0000 Subject: Add comments and remove dead code --- synapse/push/push_rule_evaluator.py | 23 +++++++++++------------ 1 file changed, 11 insertions(+), 12 deletions(-) (limited to 'synapse') diff --git a/synapse/push/push_rule_evaluator.py b/synapse/push/push_rule_evaluator.py index 9332fe5c5e..eab7fc7d5c 100644 --- a/synapse/push/push_rule_evaluator.py +++ b/synapse/push/push_rule_evaluator.py @@ -184,9 +184,14 @@ class PushRuleEvaluatorForEvent(object): def __init__(self, event, body_parts, room_member_count): self._event = event + + # This is a list of words of the content.body (if event has one). Each + # word has been converted to lower case. self._body_parts = body_parts + self._room_member_count = room_member_count + # Maps strings of e.g. 'content.body' -> event["content"]["body"] self._value_cache = _flatten_dict(event) @staticmethod @@ -264,19 +269,13 @@ class PushRuleEvaluatorForEvent(object): return self._value_cache.get(dotted_key, None) -def _value_for_dotted_key(dotted_key, event): - parts = dotted_key.split(".") - val = event - while len(parts) > 0: - if parts[0] not in val: - return None - val = val[parts[0]] - parts = parts[1:] - - return val - - def _glob_to_matcher(glob): + """Takes a glob and returns a `func(string) -> bool`, which returns if the + string matches the glob. Assumes given string is lower case. + + The matcher returned is either a simple string comparison for globs without + wildcards, or a regex matcher for globs with wildcards. + """ glob = glob.lower() if not IS_GLOB.search(glob): -- cgit 1.4.1 From d16dcf642e56e2ad3e4e6fb5834844251f5383f4 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Mon, 18 Jan 2016 15:44:04 +0000 Subject: Drop log levels --- synapse/push/push_rule_evaluator.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) (limited to 'synapse') diff --git a/synapse/push/push_rule_evaluator.py b/synapse/push/push_rule_evaluator.py index eab7fc7d5c..0816b632b4 100644 --- a/synapse/push/push_rule_evaluator.py +++ b/synapse/push/push_rule_evaluator.py @@ -161,7 +161,7 @@ class PushRuleEvaluator: ) if matches: - logger.info( + logger.debug( "%s matches for user %s, event %s", r['rule_id'], self.user_id, ev['event_id'] ) @@ -172,7 +172,7 @@ class PushRuleEvaluator: defer.returnValue(actions) - logger.info( + logger.debug( "No rules match for user %s, event %s", self.user_id, ev['event_id'] ) -- cgit 1.4.1 From 74474a6d637359de6913ce6d02d93fdf82450df1 Mon Sep 17 00:00:00 2001 From: Daniel Wagner-Hall Date: Mon, 18 Jan 2016 16:32:33 +0000 Subject: Pull out app service user lookup I find this a lot simpler than nested try-catches and stuff --- synapse/api/auth.py | 59 +++++++++++++++++++++++++---------------------------- 1 file changed, 28 insertions(+), 31 deletions(-) (limited to 'synapse') diff --git a/synapse/api/auth.py b/synapse/api/auth.py index e36313e2fb..cc0296adf3 100644 --- a/synapse/api/auth.py +++ b/synapse/api/auth.py @@ -510,42 +510,14 @@ class Auth(object): """ # Can optionally look elsewhere in the request (e.g. headers) try: - access_token = request.args["access_token"][0] - - # Check for application service tokens with a user_id override - try: - app_service = yield self.store.get_app_service_by_token( - access_token - ) - if not app_service: - raise KeyError - - user_id = app_service.sender - if "user_id" in request.args: - user_id = request.args["user_id"][0] - if not app_service.is_interested_in_user(user_id): - raise AuthError( - 403, - "Application service cannot masquerade as this user." - ) - if not (yield self.store.get_user_by_id(user_id)): - raise AuthError( - 403, - "Application service has not registered this user" - ) - - if not user_id: - raise KeyError - + user_id = yield self._get_appservice_user_id(request.args) + if user_id: request.authenticated_entity = user_id - defer.returnValue( Requester(UserID.from_string(user_id), "", False) ) - return - except KeyError: - pass # normal users won't have the user_id query parameter set. + access_token = request.args["access_token"][0] user_info = yield self._get_user_by_access_token(access_token) user = user_info["user"] token_id = user_info["token_id"] @@ -578,6 +550,31 @@ class Auth(object): errcode=Codes.MISSING_TOKEN ) + @defer.inlineCallbacks + def _get_appservice_user_id(self, request_args): + app_service = yield self.store.get_app_service_by_token( + request_args["access_token"][0] + ) + if app_service is None: + defer.returnValue(None) + + if "user_id" not in request_args: + defer.returnValue(app_service.sender) + + user_id = request_args["user_id"][0] + + if not app_service.is_interested_in_user(user_id): + raise AuthError( + 403, + "Application service cannot masquerade as this user." + ) + if not (yield self.store.get_user_by_id(user_id)): + raise AuthError( + 403, + "Application service has not registered this user" + ) + defer.returnValue(user_id) + @defer.inlineCallbacks def _get_user_by_access_token(self, token): """ Get a registered user's ID. -- cgit 1.4.1 From 808a8aedab4dbd2166b5935b86edf65501cc24a3 Mon Sep 17 00:00:00 2001 From: Daniel Wagner-Hall Date: Mon, 18 Jan 2016 16:33:05 +0000 Subject: Don't error on AS non-ghost user use This will probably go away either when we fix our existing ASes, or when we kill the concept of non-ghost users. --- synapse/api/auth.py | 2 ++ 1 file changed, 2 insertions(+) (limited to 'synapse') diff --git a/synapse/api/auth.py b/synapse/api/auth.py index cc0296adf3..b5536e8565 100644 --- a/synapse/api/auth.py +++ b/synapse/api/auth.py @@ -562,6 +562,8 @@ class Auth(object): defer.returnValue(app_service.sender) user_id = request_args["user_id"][0] + if app_service.sender == user_id: + defer.returnValue(app_service.sender) if not app_service.is_interested_in_user(user_id): raise AuthError( -- cgit 1.4.1 From 29c353c5536520cd149e1aacf6bd42c7c3f8f4e7 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Mon, 18 Jan 2016 16:48:17 +0000 Subject: Don't split at word boundaries, actually use regex --- synapse/push/bulk_push_rule_evaluator.py | 2 +- synapse/push/push_rule_evaluator.py | 109 +++++++++++++------------------ 2 files changed, 48 insertions(+), 63 deletions(-) (limited to 'synapse') diff --git a/synapse/push/bulk_push_rule_evaluator.py b/synapse/push/bulk_push_rule_evaluator.py index f1910f7da7..b0b3a38db7 100644 --- a/synapse/push/bulk_push_rule_evaluator.py +++ b/synapse/push/bulk_push_rule_evaluator.py @@ -81,7 +81,7 @@ class BulkPushRuleEvaluator: users_dict.items(), [event] ) - evaluator = PushRuleEvaluatorForEvent.create(event, len(self.users_in_room)) + evaluator = PushRuleEvaluatorForEvent(event, len(self.users_in_room)) condition_cache = {} diff --git a/synapse/push/push_rule_evaluator.py b/synapse/push/push_rule_evaluator.py index 0816b632b4..78d4b564d4 100644 --- a/synapse/push/push_rule_evaluator.py +++ b/synapse/push/push_rule_evaluator.py @@ -127,7 +127,7 @@ class PushRuleEvaluator: room_members = yield self.store.get_users_in_room(room_id) room_member_count = len(room_members) - evaluator = PushRuleEvaluatorForEvent.create(ev, room_member_count) + evaluator = PushRuleEvaluatorForEvent(ev, room_member_count) for r in self.rules: if self.enabled_map.get(r['rule_id'], None) is False: @@ -180,33 +180,13 @@ class PushRuleEvaluator: class PushRuleEvaluatorForEvent(object): - WORD_BOUNDARY = re.compile(r'\b') - - def __init__(self, event, body_parts, room_member_count): + def __init__(self, event, room_member_count): self._event = event - - # This is a list of words of the content.body (if event has one). Each - # word has been converted to lower case. - self._body_parts = body_parts - self._room_member_count = room_member_count # Maps strings of e.g. 'content.body' -> event["content"]["body"] self._value_cache = _flatten_dict(event) - @staticmethod - def create(event, room_member_count): - body = event.get("content", {}).get("body", None) - if body: - body_parts = PushRuleEvaluatorForEvent.WORD_BOUNDARY.split(body) - body_parts[:] = [ - part.lower() for part in body_parts - ] - else: - body_parts = [] - - return PushRuleEvaluatorForEvent(event, body_parts, room_member_count) - def matches(self, condition, user_id, display_name, profile_tag): if condition['kind'] == 'event_match': return self._event_match(condition, user_id) @@ -239,67 +219,72 @@ class PushRuleEvaluatorForEvent(object): # XXX: optimisation: cache our pattern regexps if condition['key'] == 'content.body': - matcher = _glob_to_matcher(pattern) + body = self._event["content"].get("body", None) + if not body: + return False - for part in self._body_parts: - if matcher(part): - return True - return False + return _glob_matches(pattern, body, word_boundary=True) else: haystack = self._get_value(condition['key']) if haystack is None: return False - matcher = _glob_to_matcher(pattern) - - return matcher(haystack.lower()) + return _glob_matches(pattern, haystack) def _contains_display_name(self, display_name): if not display_name: return False - lower_display_name = display_name.lower() - for part in self._body_parts: - if part == lower_display_name: - return True + body = self._event["content"].get("body", None) + if not body: + return False - return False + return _glob_matches(display_name, body, word_boundary=True) def _get_value(self, dotted_key): return self._value_cache.get(dotted_key, None) -def _glob_to_matcher(glob): - """Takes a glob and returns a `func(string) -> bool`, which returns if the - string matches the glob. Assumes given string is lower case. - - The matcher returned is either a simple string comparison for globs without - wildcards, or a regex matcher for globs with wildcards. - """ - glob = glob.lower() - - if not IS_GLOB.search(glob): - return lambda value: value == glob +def _glob_matches(glob, value, word_boundary=False): + """Tests if value matches glob. - r = re.escape(glob) + Args: + glob (string) + value (string): String to test against glob. + word_boundary (bool): Whether to match against word boundaries or entire + string. Defaults to False. - r = r.replace(r'\*', '.*?') - r = r.replace(r'\?', '.') + Returns: + bool + """ + if IS_GLOB.search(glob): + r = re.escape(glob) + + r = r.replace(r'\*', '.*?') + r = r.replace(r'\?', '.') + + # handle [abc], [a-z] and [!a-z] style ranges. + r = GLOB_REGEX.sub( + lambda x: ( + '[%s%s]' % ( + x.group(1) and '^' or '', + x.group(2).replace(r'\\\-', '-') + ) + ), + r, + ) + r = r + "$" + r = re.compile(r, flags=re.IGNORECASE) - # handle [abc], [a-z] and [!a-z] style ranges. - r = GLOB_REGEX.sub( - lambda x: ( - '[%s%s]' % ( - x.group(1) and '^' or '', - x.group(2).replace(r'\\\-', '-') - ) - ), - r, - ) + return r.match(value) + elif word_boundary: + r = re.escape(glob) + r = "\b%s\b" % (r,) + r = re.compile(r, flags=re.IGNORECASE) - r = r + "$" - r = re.compile(r) - return lambda value: r.match(value) + return r.search(value) + else: + return value.lower() == glob.lower() def _flatten_dict(d, prefix=[], result={}): -- cgit 1.4.1 From 5cd2126a6a6e23ec1f694cfba7be7bbf29bd1506 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Mon, 18 Jan 2016 16:48:37 +0000 Subject: Remove dead code --- synapse/storage/push_rule.py | 23 +---------------------- 1 file changed, 1 insertion(+), 22 deletions(-) (limited to 'synapse') diff --git a/synapse/storage/push_rule.py b/synapse/storage/push_rule.py index f210e6c14d..2adfefd994 100644 --- a/synapse/storage/push_rule.py +++ b/synapse/storage/push_rule.py @@ -14,7 +14,7 @@ # limitations under the License. from ._base import SQLBaseStore -from synapse.util.caches.descriptors import cached, cachedInlineCallbacks +from synapse.util.caches.descriptors import cachedInlineCallbacks from twisted.internet import defer import logging @@ -60,27 +60,6 @@ class PushRuleStore(SQLBaseStore): r['rule_id']: False if r['enabled'] == 0 else True for r in results }) - @cached() - def _get_push_rules_enabled_for_user(self, user_id): - def f(txn): - sql = ( - "SELECT pr.*" - " FROM push_rules AS pr" - " LEFT JOIN push_rules_enable AS pre" - " ON pr.user_name = pre.user_name AND pr.rule_id = pre.rule_id" - " WHERE pr.user_name = ?" - " AND (pre.enabled IS NULL OR pre.enabled = 1)" - " ORDER BY pr.priority_class DESC, pr.priority DESC" - ) - txn.execute(sql, (user_id,)) - return self.cursor_to_dict(txn) - - return self.runInteraction( - "_get_push_rules_enabled_for_user", f - ) - - # @cachedList(cache=_get_push_rules_enabled_for_user.cache, list_name="user_ids", - # num_args=1, inlineCallbacks=True) @defer.inlineCallbacks def bulk_get_push_rules(self, user_ids): if not user_ids: -- cgit 1.4.1 From 47f82e4408763d834a0097dceb8b2cca4b0ba4d5 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Mon, 18 Jan 2016 17:04:36 +0000 Subject: Fix branch didn't check word_boundary --- synapse/push/push_rule_evaluator.py | 12 +++++++++--- 1 file changed, 9 insertions(+), 3 deletions(-) (limited to 'synapse') diff --git a/synapse/push/push_rule_evaluator.py b/synapse/push/push_rule_evaluator.py index 78d4b564d4..86d8ac5c4f 100644 --- a/synapse/push/push_rule_evaluator.py +++ b/synapse/push/push_rule_evaluator.py @@ -273,10 +273,16 @@ def _glob_matches(glob, value, word_boundary=False): ), r, ) - r = r + "$" - r = re.compile(r, flags=re.IGNORECASE) + if word_boundary: + r = "\b%s\b" % (r,) + r = re.compile(r, flags=re.IGNORECASE) + + return r.search(value) + else: + r = r + "$" + r = re.compile(r, flags=re.IGNORECASE) - return r.match(value) + return r.match(value) elif word_boundary: r = re.escape(glob) r = "\b%s\b" % (r,) -- cgit 1.4.1 From a284ad4092871d55dfb213398d5a1994bb666a78 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Mon, 18 Jan 2016 17:20:44 +0000 Subject: You need to escape backslashes --- synapse/push/push_rule_evaluator.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) (limited to 'synapse') diff --git a/synapse/push/push_rule_evaluator.py b/synapse/push/push_rule_evaluator.py index 86d8ac5c4f..2524922131 100644 --- a/synapse/push/push_rule_evaluator.py +++ b/synapse/push/push_rule_evaluator.py @@ -274,7 +274,7 @@ def _glob_matches(glob, value, word_boundary=False): r, ) if word_boundary: - r = "\b%s\b" % (r,) + r = r"\b%s\b" % (r,) r = re.compile(r, flags=re.IGNORECASE) return r.search(value) @@ -285,7 +285,7 @@ def _glob_matches(glob, value, word_boundary=False): return r.match(value) elif word_boundary: r = re.escape(glob) - r = "\b%s\b" % (r,) + r = r"\b%s\b" % (r,) r = re.compile(r, flags=re.IGNORECASE) return r.search(value) -- cgit 1.4.1 From 003853e702a9ce1c58da7d326402a1f2cfa69e1f Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Mon, 18 Jan 2016 17:34:02 +0000 Subject: Preserve truthiness --- synapse/push/push_rule_evaluator.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) (limited to 'synapse') diff --git a/synapse/push/push_rule_evaluator.py b/synapse/push/push_rule_evaluator.py index 2524922131..379652c513 100644 --- a/synapse/push/push_rule_evaluator.py +++ b/synapse/push/push_rule_evaluator.py @@ -130,7 +130,8 @@ class PushRuleEvaluator: evaluator = PushRuleEvaluatorForEvent(ev, room_member_count) for r in self.rules: - if self.enabled_map.get(r['rule_id'], None) is False: + enabled = self.enabled_map.get(r['rule_id'], None) + if enabled is not None and not enabled: continue if not r.get("enabled", True): -- cgit 1.4.1 From f750a442f738ef9bdc2994810cc3220586f8c765 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Tue, 19 Jan 2016 10:14:53 +0000 Subject: Update _id --- synapse/push/baserules.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) (limited to 'synapse') diff --git a/synapse/push/baserules.py b/synapse/push/baserules.py index 8be47f0975..e1217b5c52 100644 --- a/synapse/push/baserules.py +++ b/synapse/push/baserules.py @@ -171,7 +171,8 @@ BASE_APPEND_UNDERRIDE_RULES = [ 'conditions': [ { 'kind': 'room_member_count', - 'is': '2' + 'is': '2', + '_id': 'member_count', } ], 'actions': [ @@ -192,7 +193,7 @@ BASE_APPEND_UNDERRIDE_RULES = [ 'kind': 'event_match', 'key': 'type', 'pattern': 'm.room.member', - '_id': '_invite_type', + '_id': '_member', }, { 'kind': 'event_match', -- cgit 1.4.1 From 0d241e1114c20e7225d49678f34e9e4e5b3872c9 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Tue, 19 Jan 2016 10:15:12 +0000 Subject: Take a deepcopy of push rules before mutating them --- synapse/rest/client/v1/push_rule.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) (limited to 'synapse') diff --git a/synapse/rest/client/v1/push_rule.py b/synapse/rest/client/v1/push_rule.py index aa861e7033..2272d66dc7 100644 --- a/synapse/rest/client/v1/push_rule.py +++ b/synapse/rest/client/v1/push_rule.py @@ -27,6 +27,7 @@ from synapse.push.rulekinds import ( PRIORITY_CLASS_MAP, PRIORITY_CLASS_INVERSE_MAP ) +import copy import simplejson as json @@ -126,7 +127,8 @@ class PushRuleRestServlet(ClientV1RestServlet): rule["actions"] = json.loads(rawrule["actions"]) ruleslist.append(rule) - ruleslist = baserules.list_with_base_rules(ruleslist) + # We're going to be mutating this a lot, so do a deep copy + ruleslist = copy.deepcopy(baserules.list_with_base_rules(ruleslist)) rules = {'global': {}, 'device': {}} -- cgit 1.4.1