From 9c1f853d58440d1f924fa55bc242b248c410dd7c Mon Sep 17 00:00:00 2001 From: Mark Haines Date: Wed, 13 Jan 2016 13:08:59 +0000 Subject: Rename 'user_name' to 'user_id' in push to make it consistent with the rest of the code --- synapse/storage/push_rule.py | 54 ++++++++++++++++++++++---------------------- synapse/storage/pusher.py | 22 +++++++++--------- 2 files changed, 38 insertions(+), 38 deletions(-) (limited to 'synapse/storage') diff --git a/synapse/storage/push_rule.py b/synapse/storage/push_rule.py index 448009b4b6..2adfefd994 100644 --- a/synapse/storage/push_rule.py +++ b/synapse/storage/push_rule.py @@ -25,11 +25,11 @@ logger = logging.getLogger(__name__) class PushRuleStore(SQLBaseStore): @cachedInlineCallbacks() - def get_push_rules_for_user(self, user_name): + def get_push_rules_for_user(self, user_id): rows = yield self._simple_select_list( table="push_rules", keyvalues={ - "user_name": user_name, + "user_name": user_id, }, retcols=( "user_name", "rule_id", "priority_class", "priority", @@ -45,11 +45,11 @@ class PushRuleStore(SQLBaseStore): defer.returnValue(rows) @cachedInlineCallbacks() - def get_push_rules_enabled_for_user(self, user_name): + def get_push_rules_enabled_for_user(self, user_id): results = yield self._simple_select_list( table="push_rules_enable", keyvalues={ - 'user_name': user_name + 'user_name': user_id }, retcols=( "user_name", "rule_id", "enabled", @@ -122,7 +122,7 @@ class PushRuleStore(SQLBaseStore): ) defer.returnValue(ret) - def _add_push_rule_relative_txn(self, txn, user_name, **kwargs): + def _add_push_rule_relative_txn(self, txn, user_id, **kwargs): after = kwargs.pop("after", None) relative_to_rule = kwargs.pop("before", after) @@ -130,7 +130,7 @@ class PushRuleStore(SQLBaseStore): txn, table="push_rules", keyvalues={ - "user_name": user_name, + "user_name": user_id, "rule_id": relative_to_rule, }, retcols=["priority_class", "priority"], @@ -154,7 +154,7 @@ class PushRuleStore(SQLBaseStore): new_rule.pop("before", None) new_rule.pop("after", None) new_rule['priority_class'] = priority_class - new_rule['user_name'] = user_name + new_rule['user_name'] = user_id new_rule['id'] = self._push_rule_id_gen.get_next_txn(txn) # check if the priority before/after is free @@ -170,7 +170,7 @@ class PushRuleStore(SQLBaseStore): "SELECT COUNT(*) FROM push_rules" " WHERE user_name = ? AND priority_class = ? AND priority = ?" ) - txn.execute(sql, (user_name, priority_class, new_rule_priority)) + txn.execute(sql, (user_id, priority_class, new_rule_priority)) res = txn.fetchall() num_conflicting = res[0][0] @@ -187,14 +187,14 @@ class PushRuleStore(SQLBaseStore): else: sql += ">= ?" - txn.execute(sql, (user_name, priority_class, new_rule_priority)) + txn.execute(sql, (user_id, priority_class, new_rule_priority)) txn.call_after( - self.get_push_rules_for_user.invalidate, (user_name,) + self.get_push_rules_for_user.invalidate, (user_id,) ) txn.call_after( - self.get_push_rules_enabled_for_user.invalidate, (user_name,) + self.get_push_rules_enabled_for_user.invalidate, (user_id,) ) self._simple_insert_txn( @@ -203,14 +203,14 @@ class PushRuleStore(SQLBaseStore): values=new_rule, ) - def _add_push_rule_highest_priority_txn(self, txn, user_name, + def _add_push_rule_highest_priority_txn(self, txn, user_id, priority_class, **kwargs): # find the highest priority rule in that class sql = ( "SELECT COUNT(*), MAX(priority) FROM push_rules" " WHERE user_name = ? and priority_class = ?" ) - txn.execute(sql, (user_name, priority_class)) + txn.execute(sql, (user_id, priority_class)) res = txn.fetchall() (how_many, highest_prio) = res[0] @@ -221,15 +221,15 @@ class PushRuleStore(SQLBaseStore): # and insert the new rule new_rule = kwargs new_rule['id'] = self._push_rule_id_gen.get_next_txn(txn) - new_rule['user_name'] = user_name + new_rule['user_name'] = user_id new_rule['priority_class'] = priority_class new_rule['priority'] = new_prio txn.call_after( - self.get_push_rules_for_user.invalidate, (user_name,) + self.get_push_rules_for_user.invalidate, (user_id,) ) txn.call_after( - self.get_push_rules_enabled_for_user.invalidate, (user_name,) + self.get_push_rules_enabled_for_user.invalidate, (user_id,) ) self._simple_insert_txn( @@ -239,48 +239,48 @@ class PushRuleStore(SQLBaseStore): ) @defer.inlineCallbacks - def delete_push_rule(self, user_name, rule_id): + def delete_push_rule(self, user_id, rule_id): """ Delete a push rule. Args specify the row to be deleted and can be any of the columns in the push_rule table, but below are the standard ones Args: - user_name (str): The matrix ID of the push rule owner + user_id (str): The matrix ID of the push rule owner rule_id (str): The rule_id of the rule to be deleted """ yield self._simple_delete_one( "push_rules", - {'user_name': user_name, 'rule_id': rule_id}, + {'user_name': user_id, 'rule_id': rule_id}, desc="delete_push_rule", ) - self.get_push_rules_for_user.invalidate((user_name,)) - self.get_push_rules_enabled_for_user.invalidate((user_name,)) + self.get_push_rules_for_user.invalidate((user_id,)) + self.get_push_rules_enabled_for_user.invalidate((user_id,)) @defer.inlineCallbacks - def set_push_rule_enabled(self, user_name, rule_id, enabled): + def set_push_rule_enabled(self, user_id, rule_id, enabled): ret = yield self.runInteraction( "_set_push_rule_enabled_txn", self._set_push_rule_enabled_txn, - user_name, rule_id, enabled + user_id, rule_id, enabled ) defer.returnValue(ret) - def _set_push_rule_enabled_txn(self, txn, user_name, rule_id, enabled): + def _set_push_rule_enabled_txn(self, txn, user_id, rule_id, enabled): new_id = self._push_rules_enable_id_gen.get_next_txn(txn) self._simple_upsert_txn( txn, "push_rules_enable", - {'user_name': user_name, 'rule_id': rule_id}, + {'user_name': user_id, 'rule_id': rule_id}, {'enabled': 1 if enabled else 0}, {'id': new_id}, ) txn.call_after( - self.get_push_rules_for_user.invalidate, (user_name,) + self.get_push_rules_for_user.invalidate, (user_id,) ) txn.call_after( - self.get_push_rules_enabled_for_user.invalidate, (user_name,) + self.get_push_rules_enabled_for_user.invalidate, (user_id,) ) diff --git a/synapse/storage/pusher.py b/synapse/storage/pusher.py index 2b90d6c622..8ec706178a 100644 --- a/synapse/storage/pusher.py +++ b/synapse/storage/pusher.py @@ -80,7 +80,7 @@ class PusherStore(SQLBaseStore): defer.returnValue(rows) @defer.inlineCallbacks - def add_pusher(self, user_name, access_token, profile_tag, kind, app_id, + def add_pusher(self, user_id, access_token, profile_tag, kind, app_id, app_display_name, device_display_name, pushkey, pushkey_ts, lang, data): try: @@ -90,7 +90,7 @@ class PusherStore(SQLBaseStore): dict( app_id=app_id, pushkey=pushkey, - user_name=user_name, + user_name=user_id, ), dict( access_token=access_token, @@ -112,38 +112,38 @@ class PusherStore(SQLBaseStore): raise StoreError(500, "Problem creating pusher.") @defer.inlineCallbacks - def delete_pusher_by_app_id_pushkey_user_name(self, app_id, pushkey, user_name): + def delete_pusher_by_app_id_pushkey_user_id(self, app_id, pushkey, user_id): yield self._simple_delete_one( "pushers", - {"app_id": app_id, "pushkey": pushkey, 'user_name': user_name}, - desc="delete_pusher_by_app_id_pushkey_user_name", + {"app_id": app_id, "pushkey": pushkey, 'user_name': user_id}, + desc="delete_pusher_by_app_id_pushkey_user_id", ) @defer.inlineCallbacks - def update_pusher_last_token(self, app_id, pushkey, user_name, last_token): + def update_pusher_last_token(self, app_id, pushkey, user_id, last_token): yield self._simple_update_one( "pushers", - {'app_id': app_id, 'pushkey': pushkey, 'user_name': user_name}, + {'app_id': app_id, 'pushkey': pushkey, 'user_name': user_id}, {'last_token': last_token}, desc="update_pusher_last_token", ) @defer.inlineCallbacks - def update_pusher_last_token_and_success(self, app_id, pushkey, user_name, + def update_pusher_last_token_and_success(self, app_id, pushkey, user_id, last_token, last_success): yield self._simple_update_one( "pushers", - {'app_id': app_id, 'pushkey': pushkey, 'user_name': user_name}, + {'app_id': app_id, 'pushkey': pushkey, 'user_name': user_id}, {'last_token': last_token, 'last_success': last_success}, desc="update_pusher_last_token_and_success", ) @defer.inlineCallbacks - def update_pusher_failing_since(self, app_id, pushkey, user_name, + def update_pusher_failing_since(self, app_id, pushkey, user_id, failing_since): yield self._simple_update_one( "pushers", - {'app_id': app_id, 'pushkey': pushkey, 'user_name': user_name}, + {'app_id': app_id, 'pushkey': pushkey, 'user_name': user_id}, {'failing_since': failing_since}, desc="update_pusher_failing_since", ) -- cgit 1.5.1 From 244b356a37ada45d2a4e6aec1f08986aaa7eaaa1 Mon Sep 17 00:00:00 2001 From: Daniel Wagner-Hall Date: Wed, 13 Jan 2016 17:03:58 +0000 Subject: Delete unused code --- synapse/storage/appservice.py | 59 ------------------------------------------- 1 file changed, 59 deletions(-) (limited to 'synapse/storage') diff --git a/synapse/storage/appservice.py b/synapse/storage/appservice.py index eab58d9ce9..1a2b4678a2 100644 --- a/synapse/storage/appservice.py +++ b/synapse/storage/appservice.py @@ -15,7 +15,6 @@ import logging import urllib import yaml -from simplejson import JSONDecodeError import simplejson as json from twisted.internet import defer @@ -144,64 +143,6 @@ class ApplicationServiceStore(SQLBaseStore): return rooms_for_user_matching_user_id - def _parse_services_dict(self, results): - # SQL results in the form: - # [ - # { - # 'regex': "something", - # 'url': "something", - # 'namespace': enum, - # 'as_id': 0, - # 'token': "something", - # 'hs_token': "otherthing", - # 'id': 0 - # } - # ] - services = {} - for res in results: - as_token = res["token"] - if as_token is None: - continue - if as_token not in services: - # add the service - services[as_token] = { - "id": res["id"], - "url": res["url"], - "token": as_token, - "hs_token": res["hs_token"], - "sender": res["sender"], - "namespaces": { - ApplicationService.NS_USERS: [], - ApplicationService.NS_ALIASES: [], - ApplicationService.NS_ROOMS: [] - } - } - # add the namespace regex if one exists - ns_int = res["namespace"] - if ns_int is None: - continue - try: - services[as_token]["namespaces"][ - ApplicationService.NS_LIST[ns_int]].append( - json.loads(res["regex"]) - ) - except IndexError: - logger.error("Bad namespace enum '%s'. %s", ns_int, res) - except JSONDecodeError: - logger.error("Bad regex object '%s'", res["regex"]) - - service_list = [] - for service in services.values(): - service_list.append(ApplicationService( - token=service["token"], - url=service["url"], - namespaces=service["namespaces"], - hs_token=service["hs_token"], - sender=service["sender"], - id=service["id"] - )) - return service_list - def _load_appservice(self, as_info): required_string_fields = [ "url", "as_token", "hs_token", "sender_localpart" -- cgit 1.5.1 From f6fcff360250eb362986835c195a96825567a03d Mon Sep 17 00:00:00 2001 From: Daniel Wagner-Hall Date: Wed, 13 Jan 2016 17:09:24 +0000 Subject: Don't start server if ASes are invalidly configured --- synapse/storage/appservice.py | 1 + 1 file changed, 1 insertion(+) (limited to 'synapse/storage') diff --git a/synapse/storage/appservice.py b/synapse/storage/appservice.py index eab58d9ce9..25a6f14f40 100644 --- a/synapse/storage/appservice.py +++ b/synapse/storage/appservice.py @@ -265,6 +265,7 @@ class ApplicationServiceStore(SQLBaseStore): except Exception as e: logger.error("Failed to load appservice from '%s'", config_file) logger.exception(e) + raise class ApplicationServiceTransactionStore(SQLBaseStore): -- cgit 1.5.1 From 2680043bc6a64053b93b9bab144aeb5f45007976 Mon Sep 17 00:00:00 2001 From: Daniel Wagner-Hall Date: Thu, 14 Jan 2016 14:34:01 +0000 Subject: Require ID and as_token be unique for ASs Defaults ID to as_token if not specified. This will change when IDs are fully supported. --- synapse/storage/appservice.py | 26 +++++++++- tests/appservice/test_appservice.py | 1 + tests/storage/test_appservice.py | 101 ++++++++++++++++++++++++++++++------ 3 files changed, 111 insertions(+), 17 deletions(-) (limited to 'synapse/storage') diff --git a/synapse/storage/appservice.py b/synapse/storage/appservice.py index f4bc457eca..b5aa55c0a3 100644 --- a/synapse/storage/appservice.py +++ b/synapse/storage/appservice.py @@ -20,6 +20,7 @@ from twisted.internet import defer from synapse.api.constants import Membership from synapse.appservice import ApplicationService, AppServiceTransaction +from synapse.config._base import ConfigError from synapse.storage.roommember import RoomsForUser from synapse.types import UserID from ._base import SQLBaseStore @@ -145,6 +146,7 @@ class ApplicationServiceStore(SQLBaseStore): def _load_appservice(self, as_info): required_string_fields = [ + # TODO: Add id here when it's stable to release "url", "as_token", "hs_token", "sender_localpart" ] for field in required_string_fields: @@ -186,7 +188,7 @@ class ApplicationServiceStore(SQLBaseStore): namespaces=as_info["namespaces"], hs_token=as_info["hs_token"], sender=user_id, - id=as_info["as_token"] # the token is the only unique thing here + id=as_info["id"] if "id" in as_info else as_info["as_token"], ) def _populate_appservice_cache(self, config_files): @@ -197,10 +199,32 @@ class ApplicationServiceStore(SQLBaseStore): ) return + # Dicts of value -> filename + seen_as_tokens = {} + seen_ids = {} + for config_file in config_files: try: with open(config_file, 'r') as f: appservice = self._load_appservice(yaml.load(f)) + if appservice.id in seen_ids: + raise ConfigError( + "Cannot reuse ID across application services: " + "%s (files: %s, %s)" % ( + appservice.id, config_file, seen_ids[appservice.id], + ) + ) + seen_ids[appservice.id] = config_file + if appservice.token in seen_as_tokens: + raise ConfigError( + "Cannot reuse as_token across application services: " + "%s (files: %s, %s)" % ( + appservice.token, + config_file, + seen_as_tokens[appservice.token], + ) + ) + seen_as_tokens[appservice.token] = config_file logger.info("Loaded application service: %s", appservice) self.services_cache.append(appservice) except Exception as e: diff --git a/tests/appservice/test_appservice.py b/tests/appservice/test_appservice.py index 191c420c4d..ef48bbc296 100644 --- a/tests/appservice/test_appservice.py +++ b/tests/appservice/test_appservice.py @@ -29,6 +29,7 @@ class ApplicationServiceTestCase(unittest.TestCase): def setUp(self): self.service = ApplicationService( + id="unique_identifier", url="some_url", token="some_token", namespaces={ diff --git a/tests/storage/test_appservice.py b/tests/storage/test_appservice.py index a5a464640f..5abecdf6e0 100644 --- a/tests/storage/test_appservice.py +++ b/tests/storage/test_appservice.py @@ -12,12 +12,13 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. +import tempfile +from synapse.config._base import ConfigError from tests import unittest from twisted.internet import defer from tests.utils import setup_test_homeserver from synapse.appservice import ApplicationService, ApplicationServiceState -from synapse.server import HomeServer from synapse.storage.appservice import ( ApplicationServiceStore, ApplicationServiceTransactionStore ) @@ -26,7 +27,6 @@ import json import os import yaml from mock import Mock -from tests.utils import SQLiteMemoryDbPool, MockClock class ApplicationServiceStoreTestCase(unittest.TestCase): @@ -41,9 +41,16 @@ class ApplicationServiceStoreTestCase(unittest.TestCase): self.as_token = "token1" self.as_url = "some_url" - self._add_appservice(self.as_token, self.as_url, "some_hs_token", "bob") - self._add_appservice("token2", "some_url", "some_hs_token", "bob") - self._add_appservice("token3", "some_url", "some_hs_token", "bob") + self.as_id = "as1" + self._add_appservice( + self.as_token, + self.as_id, + self.as_url, + "some_hs_token", + "bob" + ) + self._add_appservice("token2", "as2", "some_url", "some_hs_token", "bob") + self._add_appservice("token3", "as3", "some_url", "some_hs_token", "bob") # must be done after inserts self.store = ApplicationServiceStore(hs) @@ -55,9 +62,9 @@ class ApplicationServiceStoreTestCase(unittest.TestCase): except: pass - def _add_appservice(self, as_token, url, hs_token, sender): + def _add_appservice(self, as_token, id, url, hs_token, sender): as_yaml = dict(url=url, as_token=as_token, hs_token=hs_token, - sender_localpart=sender, namespaces={}) + id=id, sender_localpart=sender, namespaces={}) # use the token as the filename with open(as_token, 'w') as outfile: outfile.write(yaml.dump(as_yaml)) @@ -74,6 +81,7 @@ class ApplicationServiceStoreTestCase(unittest.TestCase): self.as_token ) self.assertEquals(stored_service.token, self.as_token) + self.assertEquals(stored_service.id, self.as_id) self.assertEquals(stored_service.url, self.as_url) self.assertEquals( stored_service.namespaces[ApplicationService.NS_ALIASES], @@ -110,34 +118,34 @@ class ApplicationServiceTransactionStoreTestCase(unittest.TestCase): { "token": "token1", "url": "https://matrix-as.org", - "id": "token1" + "id": "id_1" }, { "token": "alpha_tok", "url": "https://alpha.com", - "id": "alpha_tok" + "id": "id_alpha" }, { "token": "beta_tok", "url": "https://beta.com", - "id": "beta_tok" + "id": "id_beta" }, { - "token": "delta_tok", - "url": "https://delta.com", - "id": "delta_tok" + "token": "gamma_tok", + "url": "https://gamma.com", + "id": "id_gamma" }, ] for s in self.as_list: - yield self._add_service(s["url"], s["token"]) + yield self._add_service(s["url"], s["token"], s["id"]) self.as_yaml_files = [] self.store = TestTransactionStore(hs) - def _add_service(self, url, as_token): + def _add_service(self, url, as_token, id): as_yaml = dict(url=url, as_token=as_token, hs_token="something", - sender_localpart="a_sender", namespaces={}) + id=id, sender_localpart="a_sender", namespaces={}) # use the token as the filename with open(as_token, 'w') as outfile: outfile.write(yaml.dump(as_yaml)) @@ -405,3 +413,64 @@ class TestTransactionStore(ApplicationServiceTransactionStore, def __init__(self, hs): super(TestTransactionStore, self).__init__(hs) + + +class ApplicationServiceStoreConfigTestCase(unittest.TestCase): + + def _write_config(self, suffix, **kwargs): + vals = { + "id": "id" + suffix, + "url": "url" + suffix, + "as_token": "as_token" + suffix, + "hs_token": "hs_token" + suffix, + "sender_localpart": "sender_localpart" + suffix, + "namespaces": {}, + } + vals.update(kwargs) + + _, path = tempfile.mkstemp(prefix="as_config") + with open(path, "w") as f: + f.write(yaml.dump(vals)) + return path + + @defer.inlineCallbacks + def test_unique_works(self): + f1 = self._write_config(suffix="1") + f2 = self._write_config(suffix="2") + + config = Mock(app_service_config_files=[f1, f2]) + hs = yield setup_test_homeserver(config=config) + + ApplicationServiceStore(hs) + + @defer.inlineCallbacks + def test_duplicate_ids(self): + f1 = self._write_config(id="id", suffix="1") + f2 = self._write_config(id="id", suffix="2") + + config = Mock(app_service_config_files=[f1, f2]) + hs = yield setup_test_homeserver(config=config) + + with self.assertRaises(ConfigError) as cm: + ApplicationServiceStore(hs) + + e = cm.exception + self.assertIn(f1, e.message) + self.assertIn(f2, e.message) + self.assertIn("id", e.message) + + @defer.inlineCallbacks + def test_duplicate_as_tokens(self): + f1 = self._write_config(as_token="as_token", suffix="1") + f2 = self._write_config(as_token="as_token", suffix="2") + + config = Mock(app_service_config_files=[f1, f2]) + hs = yield setup_test_homeserver(config=config) + + with self.assertRaises(ConfigError) as cm: + ApplicationServiceStore(hs) + + e = cm.exception + self.assertIn(f1, e.message) + self.assertIn(f2, e.message) + self.assertIn("as_token", e.message) -- cgit 1.5.1 From cc66a9a5e3fc954b0da48ba891e9f77be31aa832 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Mon, 18 Jan 2016 10:45:09 +0000 Subject: Allow filtering events for multiple users at once --- synapse/handlers/_base.py | 93 +++++++++++++++++++++++++------------------ synapse/storage/roommember.py | 13 ++++++ 2 files changed, 67 insertions(+), 39 deletions(-) (limited to 'synapse/storage') diff --git a/synapse/handlers/_base.py b/synapse/handlers/_base.py index bb2c6733d5..2d1167296a 100644 --- a/synapse/handlers/_base.py +++ b/synapse/handlers/_base.py @@ -53,16 +53,54 @@ class BaseHandler(object): self.event_builder_factory = hs.get_event_builder_factory() @defer.inlineCallbacks - def _filter_events_for_client(self, user_id, events, is_guest=False): - # Assumes that user has at some point joined the room if not is_guest. + def _filter_events_for_clients(self, users, events): + """ Returns dict of user_id -> list of events that user is allowed to + see. + """ + event_id_to_state = yield self.store.get_state_for_events( + frozenset(e.event_id for e in events), + types=( + (EventTypes.RoomHistoryVisibility, ""), + (EventTypes.Member, None), + ) + ) + + forgotten = yield defer.gatherResults([ + self.store.who_forgot_in_room( + room_id, + ) + for room_id in frozenset(e.room_id for e in events) + ], consumeErrors=True) + + # Set of membership event_ids that have been forgotten + event_id_forgotten = frozenset( + row["event_id"] for rows in forgotten for row in rows + ) + + def allowed(event, user_id, is_guest): + state = event_id_to_state[event.event_id] + + visibility_event = state.get((EventTypes.RoomHistoryVisibility, ""), None) + if visibility_event: + visibility = visibility_event.content.get("history_visibility", "shared") + else: + visibility = "shared" - def allowed(event, membership, visibility): if visibility == "world_readable": return True if is_guest: return False + membership_event = state.get((EventTypes.Member, user_id), None) + if membership_event: + if membership_event.event_id in event_id_forgotten: + membership = None + else: + membership = membership_event.membership + else: + membership = None + if membership == Membership.JOIN: return True @@ -78,43 +116,20 @@ class BaseHandler(object): return True - event_id_to_state = yield self.store.get_state_for_events( - frozenset(e.event_id for e in events), - types=( - (EventTypes.RoomHistoryVisibility, ""), - (EventTypes.Member, user_id), - ) - ) - - events_to_return = [] - for event in events: - state = event_id_to_state[event.event_id] + defer.returnValue({ + user_id: [ + event + for event in events + if allowed(event, user_id, is_guest) + ] + for user_id, is_guest in users + }) - membership_event = state.get((EventTypes.Member, user_id), None) - if membership_event: - was_forgotten_at_event = yield self.store.was_forgotten_at( - membership_event.state_key, - membership_event.room_id, - membership_event.event_id - ) - if was_forgotten_at_event: - membership = None - else: - membership = membership_event.membership - else: - membership = None - - visibility_event = state.get((EventTypes.RoomHistoryVisibility, ""), None) - if visibility_event: - visibility = visibility_event.content.get("history_visibility", "shared") - else: - visibility = "shared" - - should_include = allowed(event, membership, visibility) - if should_include: - events_to_return.append(event) - - defer.returnValue(events_to_return) + @defer.inlineCallbacks + def _filter_events_for_client(self, user_id, events, is_guest=False): + # Assumes that user has at some point joined the room if not is_guest. + res = yield self._filter_events_for_clients([(user_id, is_guest)], events) + defer.returnValue(res.get(user_id, [])) def ratelimit(self, user_id): time_now = self.clock.time() diff --git a/synapse/storage/roommember.py b/synapse/storage/roommember.py index 7d3ce4579d..68ac88905f 100644 --- a/synapse/storage/roommember.py +++ b/synapse/storage/roommember.py @@ -287,6 +287,7 @@ class RoomMemberStore(SQLBaseStore): txn.execute(sql, (user_id, room_id)) yield self.runInteraction("forget_membership", f) self.was_forgotten_at.invalidate_all() + self.who_forgot_in_room.invalidate_all() self.did_forget.invalidate((user_id, room_id)) @cachedInlineCallbacks(num_args=2) @@ -336,3 +337,15 @@ class RoomMemberStore(SQLBaseStore): return rows[0][0] forgot = yield self.runInteraction("did_forget_membership_at", f) defer.returnValue(forgot == 1) + + @cached() + def who_forgot_in_room(self, room_id): + return self._simple_select_list( + table="room_memberships", + retcols=("user_id", "event_id"), + keyvalues={ + "room_id": room_id, + "forgotten": 1, + }, + desc="who_forgot" + ) -- cgit 1.5.1 From f59b56450797746230046137b2e2008cb66cb604 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Mon, 18 Jan 2016 14:09:47 +0000 Subject: Make notifications go quicker --- synapse/push/bulk_push_rule_evaluator.py | 116 +++++++++------- synapse/push/push_rule_evaluator.py | 226 ++++++++++++++++++++----------- synapse/storage/push_rule.py | 23 +++- synapse/storage/registration.py | 26 +++- 4 files changed, 260 insertions(+), 131 deletions(-) (limited to 'synapse/storage') diff --git a/synapse/push/bulk_push_rule_evaluator.py b/synapse/push/bulk_push_rule_evaluator.py index ce244fa959..b9f78fd598 100644 --- a/synapse/push/bulk_push_rule_evaluator.py +++ b/synapse/push/bulk_push_rule_evaluator.py @@ -14,16 +14,16 @@ # limitations under the License. import logging -import simplejson as json +import ujson as json from twisted.internet import defer -from synapse.types import UserID - import baserules -from push_rule_evaluator import PushRuleEvaluator +from push_rule_evaluator import PushRuleEvaluatorForEvent + +from synapse.api.constants import EventTypes +from synapse.types import UserID -from synapse.events.utils import serialize_event logger = logging.getLogger(__name__) @@ -35,28 +35,25 @@ def decode_rule_json(rule): @defer.inlineCallbacks -def evaluator_for_room_id(room_id, store): - users = yield store.get_users_in_room(room_id) - rules_by_user = yield store.bulk_get_push_rules(users) +def _get_rules(room_id, user_ids, store): + rules_by_user = yield store.bulk_get_push_rules(user_ids) rules_by_user = { uid: baserules.list_with_base_rules( - [decode_rule_json(rule_list) for rule_list in rules_by_user[uid]] - if uid in rules_by_user else [], + [decode_rule_json(rule_list) for rule_list in rules_by_user.get(uid, [])], UserID.from_string(uid), ) - for uid in users + for uid in user_ids } - member_events = yield store.get_current_state( - room_id=room_id, - event_type='m.room.member', - ) - display_names = {} - for ev in member_events: - if ev.content.get("displayname"): - display_names[ev.state_key] = ev.content.get("displayname") + defer.returnValue(rules_by_user) + + +@defer.inlineCallbacks +def evaluator_for_room_id(room_id, store): + users = yield store.get_users_in_room(room_id) + rules_by_user = yield _get_rules(room_id, users, store) defer.returnValue(BulkPushRuleEvaluator( - room_id, rules_by_user, display_names, users, store + room_id, rules_by_user, users, store )) @@ -69,10 +66,9 @@ class BulkPushRuleEvaluator: the same logic to run the actual rules, but could be optimised further (see https://matrix.org/jira/browse/SYN-562) """ - def __init__(self, room_id, rules_by_user, display_names, users_in_room, store): + def __init__(self, room_id, rules_by_user, users_in_room, store): self.room_id = room_id self.rules_by_user = rules_by_user - self.display_names = display_names self.users_in_room = users_in_room self.store = store @@ -80,15 +76,30 @@ class BulkPushRuleEvaluator: def action_for_event_by_user(self, event, handler): actions_by_user = {} + users_dict = yield self.store.are_guests(self.rules_by_user.keys()) + + filtered_by_user = yield handler._filter_events_for_clients( + users_dict.items(), [event] + ) + + evaluator = PushRuleEvaluatorForEvent.create(event, len(self.users_in_room)) + + condition_cache = {} + + member_state = yield self.store.get_state_for_event( + event.event_id, + ) + + display_names = {} + for ev in member_state.values(): + nm = ev.content.get("displayname", None) + if nm and ev.type == EventTypes.Member: + display_names[ev.state_key] = nm + for uid, rules in self.rules_by_user.items(): - display_name = None - if uid in self.display_names: - display_name = self.display_names[uid] - - is_guest = yield self.store.is_guest(UserID.from_string(uid)) - filtered = yield handler._filter_events_for_client( - uid, [event], is_guest=is_guest - ) + display_name = display_names.get(uid, None) + + filtered = filtered_by_user[uid] if len(filtered) == 0: continue @@ -96,29 +107,32 @@ class BulkPushRuleEvaluator: if 'enabled' in rule and not rule['enabled']: continue - # XXX: profile tags - if BulkPushRuleEvaluator.event_matches_rule( - event, rule, - display_name, len(self.users_in_room), None - ): + matches = _condition_checker( + evaluator, rule['conditions'], display_name, condition_cache + ) + if matches: actions = [x for x in rule['actions'] if x != 'dont_notify'] - if len(actions) > 0: + if actions: actions_by_user[uid] = actions break defer.returnValue(actions_by_user) - @staticmethod - def event_matches_rule(event, rule, - display_name, room_member_count, profile_tag): - matches = True - - # passing the clock all the way into here is extremely awkward and push - # rules do not care about any of the relative timestamps, so we just - # pass 0 for the current time. - client_event = serialize_event(event, 0) - - for cond in rule['conditions']: - matches &= PushRuleEvaluator._event_fulfills_condition( - client_event, cond, display_name, room_member_count, profile_tag - ) - return matches + +def _condition_checker(evaluator, conditions, display_name, cache): + for cond in conditions: + _id = cond.get("_id", None) + if _id: + res = cache.get(_id, None) + if res is False: + break + elif res is True: + continue + + res = evaluator.matches(cond, display_name, None) + if _id: + cache[_id] = res + + if res is False: + return False + + return True diff --git a/synapse/push/push_rule_evaluator.py b/synapse/push/push_rule_evaluator.py index b0283743a2..bbc8308c2d 100644 --- a/synapse/push/push_rule_evaluator.py +++ b/synapse/push/push_rule_evaluator.py @@ -15,17 +15,22 @@ from twisted.internet import defer -from synapse.types import UserID - import baserules import logging import simplejson as json import re +from synapse.types import UserID + logger = logging.getLogger(__name__) +GLOB_REGEX = re.compile(r'\\\[(\\\!|)(.*)\\\]') +IS_GLOB = re.compile(r'[\?\*\[\]]') +INEQUALITY_EXPR = re.compile("^([=<>]*)([0-9]*)$") + + @defer.inlineCallbacks def evaluator_for_user_id_and_profile_tag(user_id, profile_tag, room_id, store): rawrules = yield store.get_push_rules_for_user(user_id) @@ -42,9 +47,34 @@ def evaluator_for_user_id_and_profile_tag(user_id, profile_tag, room_id, store): )) +def _room_member_count(ev, condition, room_member_count): + if 'is' not in condition: + return False + m = INEQUALITY_EXPR.match(condition['is']) + if not m: + return False + ineq = m.group(1) + rhs = m.group(2) + if not rhs.isdigit(): + return False + rhs = int(rhs) + + if ineq == '' or ineq == '==': + return room_member_count == rhs + elif ineq == '<': + return room_member_count < rhs + elif ineq == '>': + return room_member_count > rhs + elif ineq == '>=': + return room_member_count >= rhs + elif ineq == '<=': + return room_member_count <= rhs + else: + return False + + class PushRuleEvaluator: DEFAULT_ACTIONS = [] - INEQUALITY_EXPR = re.compile("^([=<>]*)([0-9]*)$") def __init__(self, user_id, profile_tag, raw_rules, enabled_map, room_id, our_member_event, store): @@ -98,6 +128,8 @@ class PushRuleEvaluator: room_members = yield self.store.get_users_in_room(room_id) room_member_count = len(room_members) + evaluator = PushRuleEvaluatorForEvent.create(ev, room_member_count) + for r in self.rules: if r['rule_id'] in self.enabled_map: r['enabled'] = self.enabled_map[r['rule_id']] @@ -105,21 +137,10 @@ class PushRuleEvaluator: r['enabled'] = True if not r['enabled']: continue - matches = True conditions = r['conditions'] actions = r['actions'] - for c in conditions: - matches &= self._event_fulfills_condition( - ev, c, display_name=my_display_name, - room_member_count=room_member_count, - profile_tag=self.profile_tag - ) - logger.debug( - "Rule %s %s", - r['rule_id'], "matches" if matches else "doesn't match" - ) # ignore rules with no actions (we have an explict 'dont_notify') if len(actions) == 0: logger.warn( @@ -127,6 +148,18 @@ class PushRuleEvaluator: r['rule_id'], self.user_id ) continue + + matches = True + for c in conditions: + matches = evaluator.matches(c, my_display_name, self.profile_tag) + if not matches: + break + + logger.debug( + "Rule %s %s", + r['rule_id'], "matches" if matches else "doesn't match" + ) + if matches: logger.info( "%s matches for user %s, event %s", @@ -145,81 +178,84 @@ class PushRuleEvaluator: ) defer.returnValue(PushRuleEvaluator.DEFAULT_ACTIONS) - @staticmethod - def _glob_to_regexp(glob): - r = re.escape(glob) - r = re.sub(r'\\\*', r'.*?', r) - r = re.sub(r'\\\?', r'.', r) - # handle [abc], [a-z] and [!a-z] style ranges. - r = re.sub(r'\\\[(\\\!|)(.*)\\\]', - lambda x: ('[%s%s]' % (x.group(1) and '^' or '', - re.sub(r'\\\-', '-', x.group(2)))), r) - return r +class PushRuleEvaluatorForEvent(object): + WORD_BOUNDARY = re.compile(r'\b') + + def __init__(self, event, body_parts, room_member_count): + self._event = event + self._body_parts = body_parts + self._room_member_count = room_member_count + + self._value_cache = _flatten_dict(event) @staticmethod - def _event_fulfills_condition(ev, condition, - display_name, room_member_count, profile_tag): - if condition['kind'] == 'event_match': - if 'pattern' not in condition: - logger.warn("event_match condition with no pattern") - return False - # XXX: optimisation: cache our pattern regexps - if condition['key'] == 'content.body': - r = r'\b%s\b' % PushRuleEvaluator._glob_to_regexp(condition['pattern']) - else: - r = r'^%s$' % PushRuleEvaluator._glob_to_regexp(condition['pattern']) - val = _value_for_dotted_key(condition['key'], ev) - if val is None: - return False - return re.search(r, val, flags=re.IGNORECASE) is not None + def create(event, room_member_count): + body = event.get("content", {}).get("body", None) + if body: + body_parts = PushRuleEvaluatorForEvent.WORD_BOUNDARY.split(body) + body_parts[:] = [ + part.lower() for part in body_parts + ] + else: + body_parts = [] + + return PushRuleEvaluatorForEvent(event, body_parts, room_member_count) + def matches(self, condition, display_name, profile_tag): + if condition['kind'] == 'event_match': + return self._event_match(condition) elif condition['kind'] == 'device': if 'profile_tag' not in condition: return True return condition['profile_tag'] == profile_tag - elif condition['kind'] == 'contains_display_name': - # This is special because display names can be different - # between rooms and so you can't really hard code it in a rule. - # Optimisation: we should cache these names and update them from - # the event stream. - if 'content' not in ev or 'body' not in ev['content']: - return False - if not display_name: - return False - return re.search( - r"\b%s\b" % re.escape(display_name), ev['content']['body'], - flags=re.IGNORECASE - ) is not None - + return self._contains_display_name(display_name) elif condition['kind'] == 'room_member_count': - if 'is' not in condition: - return False - m = PushRuleEvaluator.INEQUALITY_EXPR.match(condition['is']) - if not m: - return False - ineq = m.group(1) - rhs = m.group(2) - if not rhs.isdigit(): - return False - rhs = int(rhs) - - if ineq == '' or ineq == '==': - return room_member_count == rhs - elif ineq == '<': - return room_member_count < rhs - elif ineq == '>': - return room_member_count > rhs - elif ineq == '>=': - return room_member_count >= rhs - elif ineq == '<=': - return room_member_count <= rhs - else: - return False + return _room_member_count( + self._event, condition, self._room_member_count + ) else: return True + def _event_match(self, condition): + pattern = condition.get('pattern', None) + + if not pattern: + logger.warn("event_match condition with no pattern") + return False + + # XXX: optimisation: cache our pattern regexps + if condition['key'] == 'content.body': + matcher = _glob_to_matcher(pattern) + + for part in self._body_parts: + if matcher(part): + return True + return False + else: + haystack = self._get_value(condition['key']) + if haystack is None: + return False + + matcher = _glob_to_matcher(pattern) + + return matcher(haystack.lower()) + + def _contains_display_name(self, display_name): + if not display_name: + return False + + lower_display_name = display_name.lower() + for part in self._body_parts: + if part == lower_display_name: + return True + + return False + + def _get_value(self, dotted_key): + return self._value_cache.get(dotted_key, None) + def _value_for_dotted_key(dotted_key, event): parts = dotted_key.split(".") @@ -229,4 +265,42 @@ def _value_for_dotted_key(dotted_key, event): return None val = val[parts[0]] parts = parts[1:] + return val + + +def _glob_to_matcher(glob): + glob = glob.lower() + + if not IS_GLOB.search(glob): + return lambda value: value == glob + + r = re.escape(glob) + + r = r.replace(r'\*', '.*?') + r = r.replace(r'\?', '.') + + # handle [abc], [a-z] and [!a-z] style ranges. + r = GLOB_REGEX.sub( + lambda x: ( + '[%s%s]' % ( + x.group(1) and '^' or '', + x.group(2).replace(r'\\\-', '-') + ) + ), + r, + ) + + r = r + "$" + r = re.compile(r) + return lambda value: r.match(value) + + +def _flatten_dict(d, prefix=[], result={}): + for key, value in d.items(): + if isinstance(value, basestring): + result[".".join(prefix + [key])] = value.lower() + elif hasattr(value, "items"): + _flatten_dict(value, prefix=(prefix+[key]), result=result) + + return result diff --git a/synapse/storage/push_rule.py b/synapse/storage/push_rule.py index 2adfefd994..1adf28b893 100644 --- a/synapse/storage/push_rule.py +++ b/synapse/storage/push_rule.py @@ -14,7 +14,7 @@ # limitations under the License. from ._base import SQLBaseStore -from synapse.util.caches.descriptors import cachedInlineCallbacks +from synapse.util.caches.descriptors import cached, cachedInlineCallbacks, cachedList from twisted.internet import defer import logging @@ -60,6 +60,27 @@ class PushRuleStore(SQLBaseStore): r['rule_id']: False if r['enabled'] == 0 else True for r in results }) + @cached() + def _get_push_rules_enabled_for_user(self, user_id): + def f(txn): + sql = ( + "SELECT pr.*" + " FROM push_rules AS pr" + " LEFT JOIN push_rules_enable AS pre" + " ON pr.user_name = pre.user_name AND pr.rule_id = pre.rule_id" + " WHERE pr.user_name = ?" + " AND (pre.enabled IS NULL OR pre.enabled = 1)" + " ORDER BY pr.priority_class DESC, pr.priority DESC" + ) + txn.execute(sql, (user_id,)) + return self.cursor_to_dict(txn) + + return self.runInteraction( + "_get_push_rules_enabled_for_user", f + ) + + # @cachedList(cache=_get_push_rules_enabled_for_user.cache, list_name="user_ids", + # num_args=1, inlineCallbacks=True) @defer.inlineCallbacks def bulk_get_push_rules(self, user_ids): if not user_ids: diff --git a/synapse/storage/registration.py b/synapse/storage/registration.py index 999b710fbb..70cde0d04d 100644 --- a/synapse/storage/registration.py +++ b/synapse/storage/registration.py @@ -18,7 +18,7 @@ from twisted.internet import defer from synapse.api.errors import StoreError, Codes from ._base import SQLBaseStore -from synapse.util.caches.descriptors import cached, cachedInlineCallbacks +from synapse.util.caches.descriptors import cached, cachedInlineCallbacks, cachedList class RegistrationStore(SQLBaseStore): @@ -256,10 +256,10 @@ class RegistrationStore(SQLBaseStore): defer.returnValue(res if res else False) @cachedInlineCallbacks() - def is_guest(self, user): + def is_guest(self, user_id): res = yield self._simple_select_one_onecol( table="users", - keyvalues={"name": user.to_string()}, + keyvalues={"name": user_id}, retcol="is_guest", allow_none=True, desc="is_guest", @@ -267,6 +267,26 @@ class RegistrationStore(SQLBaseStore): defer.returnValue(res if res else False) + @cachedList(cache=is_guest.cache, list_name="user_ids", num_args=1, + inlineCallbacks=True) + def are_guests(self, user_ids): + sql = "SELECT name, is_guest FROM users WHERE name IN (%s)" % ( + ",".join("?" for _ in user_ids), + ) + + rows = yield self._execute( + "are_guests", self.cursor_to_dict, sql, *user_ids + ) + + result = {user_id: False for user_id in user_ids} + + result.update({ + row["name"]: bool(row["is_guest"]) + for row in rows + }) + + defer.returnValue(result) + def _query_for_auth(self, txn, token): sql = ( "SELECT users.name, users.is_guest, access_tokens.id as token_id" -- cgit 1.5.1 From 2c176e02ae910ce52197539b31f78ae1b1ef4c3c Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Mon, 18 Jan 2016 14:24:31 +0000 Subject: Make unit tests work --- synapse/storage/push_rule.py | 2 +- tests/handlers/test_federation.py | 141 ------------- tests/handlers/test_room.py | 418 -------------------------------------- 3 files changed, 1 insertion(+), 560 deletions(-) delete mode 100644 tests/handlers/test_federation.py delete mode 100644 tests/handlers/test_room.py (limited to 'synapse/storage') diff --git a/synapse/storage/push_rule.py b/synapse/storage/push_rule.py index 1adf28b893..f210e6c14d 100644 --- a/synapse/storage/push_rule.py +++ b/synapse/storage/push_rule.py @@ -14,7 +14,7 @@ # limitations under the License. from ._base import SQLBaseStore -from synapse.util.caches.descriptors import cached, cachedInlineCallbacks, cachedList +from synapse.util.caches.descriptors import cached, cachedInlineCallbacks from twisted.internet import defer import logging diff --git a/tests/handlers/test_federation.py b/tests/handlers/test_federation.py deleted file mode 100644 index 11a3d94bb0..0000000000 --- a/tests/handlers/test_federation.py +++ /dev/null @@ -1,141 +0,0 @@ -# Copyright 2014-2016 OpenMarket Ltd -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - - -from twisted.internet import defer -from tests import unittest - -from synapse.api.constants import EventTypes -from synapse.events import FrozenEvent -from synapse.handlers.federation import FederationHandler - -from mock import NonCallableMock, ANY, Mock - -from ..utils import setup_test_homeserver - - -class FederationTestCase(unittest.TestCase): - - @defer.inlineCallbacks - def setUp(self): - - self.state_handler = NonCallableMock(spec_set=[ - "compute_event_context", - ]) - - self.auth = NonCallableMock(spec_set=[ - "check", - "check_host_in_room", - ]) - - self.hostname = "test" - hs = yield setup_test_homeserver( - self.hostname, - datastore=NonCallableMock(spec_set=[ - "persist_event", - "store_room", - "get_room", - "get_destination_retry_timings", - "set_destination_retry_timings", - "have_events", - "get_users_in_room", - "bulk_get_push_rules", - "get_current_state", - "set_push_actions_for_event_and_users", - "is_guest", - "get_state_for_events", - ]), - resource_for_federation=NonCallableMock(), - http_client=NonCallableMock(spec_set=[]), - notifier=NonCallableMock(spec_set=["on_new_room_event"]), - handlers=NonCallableMock(spec_set=[ - "room_member_handler", - "federation_handler", - ]), - auth=self.auth, - state_handler=self.state_handler, - keyring=Mock(), - ) - - self.datastore = hs.get_datastore() - self.handlers = hs.get_handlers() - self.notifier = hs.get_notifier() - self.hs = hs - - self.handlers.federation_handler = FederationHandler(self.hs) - - self.datastore.get_state_for_events.return_value = {"$a:b": {}} - - @defer.inlineCallbacks - def test_msg(self): - pdu = FrozenEvent({ - "type": EventTypes.Message, - "room_id": "foo", - "content": {"msgtype": u"fooo"}, - "origin_server_ts": 0, - "event_id": "$a:b", - "user_id":"@a:b", - "origin": "b", - "auth_events": [], - "hashes": {"sha256":"AcLrgtUIqqwaGoHhrEvYG1YLDIsVPYJdSRGhkp3jJp8"}, - }) - - self.datastore.persist_event.return_value = defer.succeed((1,1)) - self.datastore.get_room.return_value = defer.succeed(True) - self.datastore.get_users_in_room.return_value = ["@a:b"] - self.datastore.bulk_get_push_rules.return_value = {} - self.datastore.get_current_state.return_value = {} - self.auth.check_host_in_room.return_value = defer.succeed(True) - - retry_timings_res = { - "destination": "", - "retry_last_ts": 0, - "retry_interval": 0, - } - self.datastore.get_destination_retry_timings.return_value = ( - defer.succeed(retry_timings_res) - ) - - def have_events(event_ids): - return defer.succeed({}) - self.datastore.have_events.side_effect = have_events - - def annotate(ev, old_state=None, outlier=False): - context = Mock() - context.current_state = {} - context.auth_events = {} - return defer.succeed(context) - self.state_handler.compute_event_context.side_effect = annotate - - yield self.handlers.federation_handler.on_receive_pdu( - "fo", pdu, False - ) - - self.datastore.persist_event.assert_called_once_with( - ANY, - is_new_state=True, - backfilled=False, - current_state=None, - context=ANY, - ) - - self.state_handler.compute_event_context.assert_called_once_with( - ANY, old_state=None, outlier=False - ) - - self.auth.check.assert_called_once_with(ANY, auth_events={}) - - self.notifier.on_new_room_event.assert_called_once_with( - ANY, 1, 1, extra_users=[] - ) diff --git a/tests/handlers/test_room.py b/tests/handlers/test_room.py deleted file mode 100644 index e7a12a2ba2..0000000000 --- a/tests/handlers/test_room.py +++ /dev/null @@ -1,418 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2014-2016 OpenMarket Ltd -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - - -from twisted.internet import defer -from .. import unittest - -from synapse.api.constants import EventTypes, Membership -from synapse.handlers.room import RoomMemberHandler, RoomCreationHandler -from synapse.handlers.profile import ProfileHandler -from synapse.types import UserID -from ..utils import setup_test_homeserver - -from mock import Mock, NonCallableMock - - -class RoomMemberHandlerTestCase(unittest.TestCase): - - @defer.inlineCallbacks - def setUp(self): - self.hostname = "red" - hs = yield setup_test_homeserver( - self.hostname, - ratelimiter=NonCallableMock(spec_set=[ - "send_message", - ]), - datastore=NonCallableMock(spec_set=[ - "persist_event", - "get_room_member", - "get_room", - "store_room", - "get_latest_events_in_room", - "add_event_hashes", - "get_users_in_room", - "bulk_get_push_rules", - "get_current_state", - "set_push_actions_for_event_and_users", - "get_state_for_events", - "is_guest", - ]), - resource_for_federation=NonCallableMock(), - http_client=NonCallableMock(spec_set=[]), - notifier=NonCallableMock(spec_set=["on_new_room_event"]), - handlers=NonCallableMock(spec_set=[ - "room_member_handler", - "profile_handler", - "federation_handler", - ]), - auth=NonCallableMock(spec_set=[ - "check", - "add_auth_events", - "check_host_in_room", - ]), - state_handler=NonCallableMock(spec_set=[ - "compute_event_context", - "get_current_state", - ]), - ) - - self.federation = NonCallableMock(spec_set=[ - "handle_new_event", - "send_invite", - "get_state_for_room", - ]) - - self.datastore = hs.get_datastore() - self.handlers = hs.get_handlers() - self.notifier = hs.get_notifier() - self.state_handler = hs.get_state_handler() - self.distributor = hs.get_distributor() - self.auth = hs.get_auth() - self.hs = hs - - self.handlers.federation_handler = self.federation - - self.distributor.declare("collect_presencelike_data") - - self.handlers.room_member_handler = RoomMemberHandler(self.hs) - self.handlers.profile_handler = ProfileHandler(self.hs) - self.room_member_handler = self.handlers.room_member_handler - - self.ratelimiter = hs.get_ratelimiter() - self.ratelimiter.send_message.return_value = (True, 0) - - self.datastore.persist_event.return_value = (1,1) - self.datastore.add_event_hashes.return_value = [] - self.datastore.get_users_in_room.return_value = ["@bob:red"] - self.datastore.bulk_get_push_rules.return_value = {} - - @defer.inlineCallbacks - def test_invite(self): - room_id = "!foo:red" - user_id = "@bob:red" - target_user_id = "@red:blue" - content = {"membership": Membership.INVITE} - - builder = self.hs.get_event_builder_factory().new({ - "type": EventTypes.Member, - "sender": user_id, - "state_key": target_user_id, - "room_id": room_id, - "content": content, - }) - - self.datastore.get_latest_events_in_room.return_value = ( - defer.succeed([]) - ) - self.datastore.get_current_state.return_value = {} - self.datastore.get_state_for_events = lambda event_ids,types: {x: {} for x in event_ids} - - def annotate(_): - ctx = Mock() - ctx.current_state = { - (EventTypes.Member, "@alice:green"): self._create_member( - user_id="@alice:green", - room_id=room_id, - ), - (EventTypes.Member, "@bob:red"): self._create_member( - user_id="@bob:red", - room_id=room_id, - ), - } - ctx.prev_state_events = [] - - return defer.succeed(ctx) - - self.state_handler.compute_event_context.side_effect = annotate - - def add_auth(_, ctx): - ctx.auth_events = ctx.current_state[ - (EventTypes.Member, "@bob:red") - ] - - return defer.succeed(True) - self.auth.add_auth_events.side_effect = add_auth - - def send_invite(domain, event): - return defer.succeed(event) - - self.federation.send_invite.side_effect = send_invite - - room_handler = self.room_member_handler - event, context = yield room_handler._create_new_client_event( - builder - ) - - yield room_handler.send_membership_event(event, context) - - self.state_handler.compute_event_context.assert_called_once_with( - builder - ) - - self.auth.add_auth_events.assert_called_once_with( - builder, context - ) - - self.federation.send_invite.assert_called_once_with( - "blue", event, - ) - - self.datastore.persist_event.assert_called_once_with( - event, context=context, - ) - self.notifier.on_new_room_event.assert_called_once_with( - event, 1, 1, extra_users=[UserID.from_string(target_user_id)] - ) - self.assertFalse(self.datastore.get_room.called) - self.assertFalse(self.datastore.store_room.called) - self.assertFalse(self.federation.get_state_for_room.called) - - @defer.inlineCallbacks - def test_simple_join(self): - room_id = "!foo:red" - user_id = "@bob:red" - user = UserID.from_string(user_id) - - join_signal_observer = Mock() - self.distributor.observe("user_joined_room", join_signal_observer) - - builder = self.hs.get_event_builder_factory().new({ - "type": EventTypes.Member, - "sender": user_id, - "state_key": user_id, - "room_id": room_id, - "content": {"membership": Membership.JOIN}, - }) - - self.datastore.get_latest_events_in_room.return_value = ( - defer.succeed([]) - ) - self.datastore.get_current_state.return_value = {} - self.datastore.get_state_for_events = lambda event_ids,types: {x: {} for x in event_ids} - - def annotate(_): - ctx = Mock() - ctx.current_state = { - (EventTypes.Member, "@bob:red"): self._create_member( - user_id="@bob:red", - room_id=room_id, - membership=Membership.INVITE - ), - } - ctx.prev_state_events = [] - - return defer.succeed(ctx) - - self.state_handler.compute_event_context.side_effect = annotate - - def add_auth(_, ctx): - ctx.auth_events = ctx.current_state[ - (EventTypes.Member, "@bob:red") - ] - - return defer.succeed(True) - self.auth.add_auth_events.side_effect = add_auth - - room_handler = self.room_member_handler - event, context = yield room_handler._create_new_client_event( - builder - ) - - # Actual invocation - yield room_handler.send_membership_event(event, context) - - self.federation.handle_new_event.assert_called_once_with( - event, destinations=set() - ) - - self.datastore.persist_event.assert_called_once_with( - event, context=context - ) - self.notifier.on_new_room_event.assert_called_once_with( - event, 1, 1, extra_users=[user] - ) - - join_signal_observer.assert_called_with( - user=user, room_id=room_id - ) - - def _create_member(self, user_id, room_id, membership=Membership.JOIN): - builder = self.hs.get_event_builder_factory().new({ - "type": EventTypes.Member, - "sender": user_id, - "state_key": user_id, - "room_id": room_id, - "content": {"membership": membership}, - }) - - return builder.build() - - @defer.inlineCallbacks - def test_simple_leave(self): - room_id = "!foo:red" - user_id = "@bob:red" - user = UserID.from_string(user_id) - - builder = self.hs.get_event_builder_factory().new({ - "type": EventTypes.Member, - "sender": user_id, - "state_key": user_id, - "room_id": room_id, - "content": {"membership": Membership.LEAVE}, - }) - - self.datastore.get_latest_events_in_room.return_value = ( - defer.succeed([]) - ) - self.datastore.get_current_state.return_value = {} - self.datastore.get_state_for_events = lambda event_ids,types: {x: {} for x in event_ids} - - def annotate(_): - ctx = Mock() - ctx.current_state = { - (EventTypes.Member, "@bob:red"): self._create_member( - user_id="@bob:red", - room_id=room_id, - membership=Membership.JOIN - ), - } - ctx.prev_state_events = [] - - return defer.succeed(ctx) - - self.state_handler.compute_event_context.side_effect = annotate - - def add_auth(_, ctx): - ctx.auth_events = ctx.current_state[ - (EventTypes.Member, "@bob:red") - ] - - return defer.succeed(True) - self.auth.add_auth_events.side_effect = add_auth - - room_handler = self.room_member_handler - event, context = yield room_handler._create_new_client_event( - builder - ) - - leave_signal_observer = Mock() - self.distributor.observe("user_left_room", leave_signal_observer) - - # Actual invocation - yield room_handler.send_membership_event(event, context) - - self.federation.handle_new_event.assert_called_once_with( - event, destinations=set(['red']) - ) - - self.datastore.persist_event.assert_called_once_with( - event, context=context - ) - self.notifier.on_new_room_event.assert_called_once_with( - event, 1, 1, extra_users=[user] - ) - - leave_signal_observer.assert_called_with( - user=user, room_id=room_id - ) - - -class RoomCreationTest(unittest.TestCase): - - @defer.inlineCallbacks - def setUp(self): - self.hostname = "red" - - hs = yield setup_test_homeserver( - self.hostname, - datastore=NonCallableMock(spec_set=[ - "store_room", - "snapshot_room", - "persist_event", - "get_joined_hosts_for_room", - ]), - http_client=NonCallableMock(spec_set=[]), - notifier=NonCallableMock(spec_set=["on_new_room_event"]), - handlers=NonCallableMock(spec_set=[ - "room_creation_handler", - "message_handler", - ]), - auth=NonCallableMock(spec_set=["check", "add_auth_events"]), - ratelimiter=NonCallableMock(spec_set=[ - "send_message", - ]), - ) - - self.federation = NonCallableMock(spec_set=[ - "handle_new_event", - ]) - - self.handlers = hs.get_handlers() - - self.handlers.room_creation_handler = RoomCreationHandler(hs) - self.room_creation_handler = self.handlers.room_creation_handler - - self.message_handler = self.handlers.message_handler - - self.ratelimiter = hs.get_ratelimiter() - self.ratelimiter.send_message.return_value = (True, 0) - - @defer.inlineCallbacks - def test_room_creation(self): - user_id = "@foo:red" - room_id = "!bobs_room:red" - config = {"visibility": "private"} - - yield self.room_creation_handler.create_room( - user_id=user_id, - room_id=room_id, - config=config, - ) - - self.assertTrue(self.message_handler.create_and_send_event.called) - - event_dicts = [ - e[0][0] - for e in self.message_handler.create_and_send_event.call_args_list - ] - - self.assertTrue(len(event_dicts) > 3) - - self.assertDictContainsSubset( - { - "type": EventTypes.Create, - "sender": user_id, - "room_id": room_id, - }, - event_dicts[0] - ) - - self.assertEqual(user_id, event_dicts[0]["content"]["creator"]) - - self.assertDictContainsSubset( - { - "type": EventTypes.Member, - "sender": user_id, - "room_id": room_id, - "state_key": user_id, - }, - event_dicts[1] - ) - - self.assertEqual( - Membership.JOIN, - event_dicts[1]["content"]["membership"] - ) -- cgit 1.5.1 From 5cd2126a6a6e23ec1f694cfba7be7bbf29bd1506 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Mon, 18 Jan 2016 16:48:37 +0000 Subject: Remove dead code --- synapse/storage/push_rule.py | 23 +---------------------- 1 file changed, 1 insertion(+), 22 deletions(-) (limited to 'synapse/storage') diff --git a/synapse/storage/push_rule.py b/synapse/storage/push_rule.py index f210e6c14d..2adfefd994 100644 --- a/synapse/storage/push_rule.py +++ b/synapse/storage/push_rule.py @@ -14,7 +14,7 @@ # limitations under the License. from ._base import SQLBaseStore -from synapse.util.caches.descriptors import cached, cachedInlineCallbacks +from synapse.util.caches.descriptors import cachedInlineCallbacks from twisted.internet import defer import logging @@ -60,27 +60,6 @@ class PushRuleStore(SQLBaseStore): r['rule_id']: False if r['enabled'] == 0 else True for r in results }) - @cached() - def _get_push_rules_enabled_for_user(self, user_id): - def f(txn): - sql = ( - "SELECT pr.*" - " FROM push_rules AS pr" - " LEFT JOIN push_rules_enable AS pre" - " ON pr.user_name = pre.user_name AND pr.rule_id = pre.rule_id" - " WHERE pr.user_name = ?" - " AND (pre.enabled IS NULL OR pre.enabled = 1)" - " ORDER BY pr.priority_class DESC, pr.priority DESC" - ) - txn.execute(sql, (user_id,)) - return self.cursor_to_dict(txn) - - return self.runInteraction( - "_get_push_rules_enabled_for_user", f - ) - - # @cachedList(cache=_get_push_rules_enabled_for_user.cache, list_name="user_ids", - # num_args=1, inlineCallbacks=True) @defer.inlineCallbacks def bulk_get_push_rules(self, user_ids): if not user_ids: -- cgit 1.5.1 From 3adcc4c86aade29f502b7245acc2353326a62256 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Tue, 19 Jan 2016 11:35:50 +0000 Subject: Return highlight_count in /sync --- synapse/handlers/sync.py | 42 ++++++++++++++++++++++++++++++----- synapse/rest/client/v2_alpha/sync.py | 1 + synapse/storage/event_push_actions.py | 5 +++-- 3 files changed, 40 insertions(+), 8 deletions(-) (limited to 'synapse/storage') diff --git a/synapse/handlers/sync.py b/synapse/handlers/sync.py index 52202d8e63..66e57bd4d6 100644 --- a/synapse/handlers/sync.py +++ b/synapse/handlers/sync.py @@ -55,6 +55,7 @@ class JoinedSyncResult(collections.namedtuple("JoinedSyncResult", [ "ephemeral", "account_data", "unread_notification_count", + "unread_highlight_count", ])): __slots__ = [] @@ -292,9 +293,14 @@ class SyncHandler(BaseHandler): notifs = yield self.unread_notifs_for_room_id( room_id, sync_config, ephemeral_by_room ) + notif_count = None + highlight_count = None if notifs is not None: notif_count = len(notifs) + highlight_count = len([ + 1 for notif in notifs if _action_has_highlight(notif["actions"]) + ]) current_state = yield self.get_state_at(room_id, now_token) @@ -307,6 +313,7 @@ class SyncHandler(BaseHandler): room_id, tags_by_room, account_data_by_room ), unread_notification_count=notif_count, + unread_highlight_count=highlight_count, )) def account_data_for_user(self, account_data): @@ -529,9 +536,14 @@ class SyncHandler(BaseHandler): notifs = yield self.unread_notifs_for_room_id( room_id, sync_config, all_ephemeral_by_room ) + notif_count = None + highlight_count = None if notifs is not None: notif_count = len(notifs) + highlight_count = len([ + 1 for notif in notifs if _action_has_highlight(notif["actions"]) + ]) just_joined = yield self.check_joined_room(sync_config, state) if just_joined: @@ -553,7 +565,8 @@ class SyncHandler(BaseHandler): account_data=self.account_data_for_room( room_id, tags_by_room, account_data_by_room ), - unread_notification_count=notif_count + unread_notification_count=notif_count, + unread_highlight_count=highlight_count, ) logger.debug("Result for room %s: %r", room_id, room_sync) @@ -692,9 +705,14 @@ class SyncHandler(BaseHandler): notifs = yield self.unread_notifs_for_room_id( room_id, sync_config, ephemeral_by_room ) + notif_count = None + highlight_count = None if notifs is not None: notif_count = len(notifs) + highlight_count = len([ + 1 for notif in notifs if _action_has_highlight(notif["actions"]) + ]) room_sync = JoinedSyncResult( room_id=room_id, @@ -705,6 +723,7 @@ class SyncHandler(BaseHandler): room_id, tags_by_room, account_data_by_room ), unread_notification_count=notif_count, + unread_highlight_count=highlight_count, ) logger.debug("Room sync: %r", room_sync) @@ -850,8 +869,19 @@ class SyncHandler(BaseHandler): notifs = yield self.store.get_unread_event_push_actions_by_room_for_user( room_id, sync_config.user.to_string(), last_unread_event_id ) - else: - # There is no new information in this period, so your notification - # count is whatever it was last time. - defer.returnValue(None) - defer.returnValue(notifs) + defer.returnValue(notifs) + + # There is no new information in this period, so your notification + # count is whatever it was last time. + defer.returnValue(None) + + +def _action_has_highlight(actions): + for action in actions: + try: + if action.get("set_tweak", None) == "highlight": + return action.get("value", True) + except AttributeError: + pass + + return False diff --git a/synapse/rest/client/v2_alpha/sync.py b/synapse/rest/client/v2_alpha/sync.py index 826f9db189..e300ced214 100644 --- a/synapse/rest/client/v2_alpha/sync.py +++ b/synapse/rest/client/v2_alpha/sync.py @@ -313,6 +313,7 @@ class SyncRestServlet(RestServlet): ephemeral_events = filter.filter_room_ephemeral(room.ephemeral) result["ephemeral"] = {"events": ephemeral_events} result["unread_notification_count"] = room.unread_notification_count + result["unread_highlight_count"] = room.unread_highlight_count return result diff --git a/synapse/storage/event_push_actions.py b/synapse/storage/event_push_actions.py index d99171ee87..6b7cebc9ce 100644 --- a/synapse/storage/event_push_actions.py +++ b/synapse/storage/event_push_actions.py @@ -17,7 +17,7 @@ from ._base import SQLBaseStore from twisted.internet import defer import logging -import simplejson as json +import ujson as json logger = logging.getLogger(__name__) @@ -84,7 +84,8 @@ class EventPushActionsStore(SQLBaseStore): ) ) return [ - {"event_id": row[0], "actions": row[1]} for row in txn.fetchall() + {"event_id": row[0], "actions": json.loads(row[1])} + for row in txn.fetchall() ] ret = yield self.runInteraction( -- cgit 1.5.1 From 5a7d1ecffcab7a94caf70471a2eec56eb868573c Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Tue, 19 Jan 2016 16:01:05 +0000 Subject: Add regex cache. Only caculate push actions for users that have sent read receipts, and are on that server --- synapse/handlers/_base.py | 2 +- synapse/handlers/federation.py | 2 +- synapse/push/action_generator.py | 7 ++++--- synapse/push/bulk_push_rule_evaluator.py | 15 ++++++++++----- synapse/push/push_rule_evaluator.py | 20 +++++++++++++++++--- synapse/server.py | 4 ++++ synapse/storage/receipts.py | 14 +++++++++++++- 7 files changed, 50 insertions(+), 14 deletions(-) (limited to 'synapse/storage') diff --git a/synapse/handlers/_base.py b/synapse/handlers/_base.py index 2d1167296a..5c7617de44 100644 --- a/synapse/handlers/_base.py +++ b/synapse/handlers/_base.py @@ -266,7 +266,7 @@ class BaseHandler(object): event, context=context ) - action_generator = ActionGenerator(self.store) + action_generator = ActionGenerator(self.hs) yield action_generator.handle_push_actions_for_event( event, self ) diff --git a/synapse/handlers/federation.py b/synapse/handlers/federation.py index 4b94940e99..6c19d6ae8c 100644 --- a/synapse/handlers/federation.py +++ b/synapse/handlers/federation.py @@ -245,7 +245,7 @@ class FederationHandler(BaseHandler): yield user_joined_room(self.distributor, user, event.room_id) if not backfilled and not event.internal_metadata.is_outlier(): - action_generator = ActionGenerator(self.store) + action_generator = ActionGenerator(self.hs) yield action_generator.handle_push_actions_for_event( event, self ) diff --git a/synapse/push/action_generator.py b/synapse/push/action_generator.py index 4cf94f6c61..1d2e558f9a 100644 --- a/synapse/push/action_generator.py +++ b/synapse/push/action_generator.py @@ -25,8 +25,9 @@ logger = logging.getLogger(__name__) class ActionGenerator: - def __init__(self, store): - self.store = store + def __init__(self, hs): + self.hs = hs + self.store = hs.get_datastore() # really we want to get all user ids and all profile tags too, # since we want the actions for each profile tag for every user and # also actions for a client with no profile tag for each user. @@ -42,7 +43,7 @@ class ActionGenerator: ) bulk_evaluator = yield bulk_push_rule_evaluator.evaluator_for_room_id( - event.room_id, self.store + event.room_id, self.hs, self.store ) actions_by_user = yield bulk_evaluator.action_for_event_by_user(event, handler) diff --git a/synapse/push/bulk_push_rule_evaluator.py b/synapse/push/bulk_push_rule_evaluator.py index efd686fa6e..1000ae6301 100644 --- a/synapse/push/bulk_push_rule_evaluator.py +++ b/synapse/push/bulk_push_rule_evaluator.py @@ -36,6 +36,7 @@ def decode_rule_json(rule): @defer.inlineCallbacks def _get_rules(room_id, user_ids, store): rules_by_user = yield store.bulk_get_push_rules(user_ids) + rules_by_user = { uid: baserules.list_with_base_rules([ decode_rule_json(rule_list) @@ -47,12 +48,16 @@ def _get_rules(room_id, user_ids, store): @defer.inlineCallbacks -def evaluator_for_room_id(room_id, store): - users = yield store.get_users_in_room(room_id) - rules_by_user = yield _get_rules(room_id, users, store) +def evaluator_for_room_id(room_id, hs, store): + results = yield store.get_receipts_for_room(room_id, "m.read") + user_ids = [ + row["user_id"] for row in results + if hs.is_mine_id(row["user_id"]) + ] + rules_by_user = yield _get_rules(room_id, user_ids, store) defer.returnValue(BulkPushRuleEvaluator( - room_id, rules_by_user, users, store + room_id, rules_by_user, user_ids, store )) @@ -129,7 +134,7 @@ def _condition_checker(evaluator, conditions, uid, display_name, cache): res = evaluator.matches(cond, uid, display_name, None) if _id: - cache[_id] = res + cache[_id] = bool(res) if not res: return False diff --git a/synapse/push/push_rule_evaluator.py b/synapse/push/push_rule_evaluator.py index 4654994d2d..753b6469e2 100644 --- a/synapse/push/push_rule_evaluator.py +++ b/synapse/push/push_rule_evaluator.py @@ -22,6 +22,7 @@ import simplejson as json import re from synapse.types import UserID +from synapse.util.caches.lrucache import LruCache logger = logging.getLogger(__name__) @@ -277,18 +278,18 @@ def _glob_matches(glob, value, word_boundary=False): ) if word_boundary: r = r"\b%s\b" % (r,) - r = re.compile(r, flags=re.IGNORECASE) + r = _compile_regex(r) return r.search(value) else: r = r + "$" - r = re.compile(r, flags=re.IGNORECASE) + r = _compile_regex(r) return r.match(value) elif word_boundary: r = re.escape(glob) r = r"\b%s\b" % (r,) - r = re.compile(r, flags=re.IGNORECASE) + r = _compile_regex(r) return r.search(value) else: @@ -306,3 +307,16 @@ def _flatten_dict(d, prefix=[], result={}): _flatten_dict(value, prefix=(prefix+[key]), result=result) return result + + +regex_cache = LruCache(100000) + + +def _compile_regex(regex_str): + r = regex_cache.get(regex_str, None) + if r: + return r + + r = re.compile(regex_str, flags=re.IGNORECASE) + regex_cache[regex_str] = r + return r diff --git a/synapse/server.py b/synapse/server.py index ffd4f936d0..63f9059837 100644 --- a/synapse/server.py +++ b/synapse/server.py @@ -96,6 +96,7 @@ class BaseHomeServer(object): hostname : The hostname for the server. """ self.hostname = hostname + self.hostname_with_colon = ":" + hostname self._building = {} # Other kwargs are explicit dependencies @@ -139,6 +140,9 @@ class BaseHomeServer(object): def is_mine(self, domain_specific_string): return domain_specific_string.domain == self.hostname + def is_mine_id(self, string): + return string.endswith(self.hostname_with_colon) + # Build magic accessors for every dependency for depname in BaseHomeServer.DEPENDENCIES: BaseHomeServer._make_dependency_method(depname) diff --git a/synapse/storage/receipts.py b/synapse/storage/receipts.py index 21cf88b3da..c80e576620 100644 --- a/synapse/storage/receipts.py +++ b/synapse/storage/receipts.py @@ -14,7 +14,7 @@ # limitations under the License. from ._base import SQLBaseStore -from synapse.util.caches.descriptors import cachedInlineCallbacks, cachedList +from synapse.util.caches.descriptors import cachedInlineCallbacks, cachedList, cached from synapse.util.caches import cache_counter, caches_by_name from twisted.internet import defer @@ -33,6 +33,18 @@ class ReceiptsStore(SQLBaseStore): self._receipts_stream_cache = _RoomStreamChangeCache() + @cached(num_args=2) + def get_receipts_for_room(self, room_id, receipt_type): + return self._simple_select_list( + table="receipts_linearized", + keyvalues={ + "room_id": room_id, + "receipt_type": receipt_type, + }, + retcols=("user_id", "event_id"), + desc="get_receipts_for_room", + ) + @defer.inlineCallbacks def get_linearized_receipts_for_rooms(self, room_ids, to_key, from_key=None): """Get receipts for multiple rooms for sending to clients. -- cgit 1.5.1