From 9725c59247131d243316ff299e6864098d9bdc58 Mon Sep 17 00:00:00 2001 From: Brendan Abolivier Date: Tue, 28 Jul 2020 19:20:55 +0100 Subject: Implement new experimental push rules with a database hack to enable them --- synapse/push/baserules.py | 217 ++++++++++++++++++++- synapse/storage/data_stores/main/push_rule.py | 35 +++- .../main/schema/delta/58/13new_push_rules_tmp.sql | 21 ++ 3 files changed, 259 insertions(+), 14 deletions(-) create mode 100644 synapse/storage/data_stores/main/schema/delta/58/13new_push_rules_tmp.sql (limited to 'synapse') diff --git a/synapse/push/baserules.py b/synapse/push/baserules.py index 286374d0b5..e06b1a01e6 100644 --- a/synapse/push/baserules.py +++ b/synapse/push/baserules.py @@ -19,7 +19,7 @@ import copy from synapse.push.rulekinds import PRIORITY_CLASS_INVERSE_MAP, PRIORITY_CLASS_MAP -def list_with_base_rules(rawrules): +def list_with_base_rules(rawrules, use_new_defaults=False): """Combine the list of rules set by the user with the default push rules Args: @@ -43,7 +43,9 @@ def list_with_base_rules(rawrules): ruleslist.extend( make_base_prepend_rules( - PRIORITY_CLASS_INVERSE_MAP[current_prio_class], modified_base_rules + PRIORITY_CLASS_INVERSE_MAP[current_prio_class], + modified_base_rules, + use_new_defaults, ) ) @@ -54,6 +56,7 @@ def list_with_base_rules(rawrules): make_base_append_rules( PRIORITY_CLASS_INVERSE_MAP[current_prio_class], modified_base_rules, + use_new_defaults, ) ) current_prio_class -= 1 @@ -62,6 +65,7 @@ def list_with_base_rules(rawrules): make_base_prepend_rules( PRIORITY_CLASS_INVERSE_MAP[current_prio_class], modified_base_rules, + use_new_defaults, ) ) @@ -70,27 +74,31 @@ def list_with_base_rules(rawrules): while current_prio_class > 0: ruleslist.extend( make_base_append_rules( - PRIORITY_CLASS_INVERSE_MAP[current_prio_class], modified_base_rules + PRIORITY_CLASS_INVERSE_MAP[current_prio_class], + modified_base_rules, + use_new_defaults, ) ) current_prio_class -= 1 if current_prio_class > 0: ruleslist.extend( make_base_prepend_rules( - PRIORITY_CLASS_INVERSE_MAP[current_prio_class], modified_base_rules + PRIORITY_CLASS_INVERSE_MAP[current_prio_class], + modified_base_rules, + use_new_defaults, ) ) return ruleslist -def make_base_append_rules(kind, modified_base_rules): +def make_base_append_rules(kind, modified_base_rules, use_new_defaults=False): rules = [] if kind == "override": - rules = BASE_APPEND_OVERRIDE_RULES + rules = NEW_APPEND_OVERRIDE_RULES if use_new_defaults else BASE_APPEND_OVERRIDE_RULES elif kind == "underride": - rules = BASE_APPEND_UNDERRIDE_RULES + rules = NEW_APPEND_UNDERRIDE_RULES if use_new_defaults else BASE_APPEND_UNDERRIDE_RULES elif kind == "content": rules = BASE_APPEND_CONTENT_RULES @@ -105,11 +113,11 @@ def make_base_append_rules(kind, modified_base_rules): return rules -def make_base_prepend_rules(kind, modified_base_rules): +def make_base_prepend_rules(kind, modified_base_rules, use_new_defaults=False): rules = [] if kind == "override": - rules = BASE_PREPEND_OVERRIDE_RULES + rules = NEW_PREPEND_OVERRIDE_RULES if use_new_defaults else BASE_PREPEND_OVERRIDE_RULES # Copy the rules before modifying them rules = copy.deepcopy(rules) @@ -151,6 +159,16 @@ BASE_PREPEND_OVERRIDE_RULES = [ ] +NEW_PREPEND_OVERRIDE_RULES = [ + { + "rule_id": "global/override/.m.rule.master", + "enabled": False, + "conditions": [], + "actions": [], + } +] + + BASE_APPEND_OVERRIDE_RULES = [ { "rule_id": "global/override/.m.rule.suppress_notices", @@ -270,6 +288,141 @@ BASE_APPEND_OVERRIDE_RULES = [ ] +NEW_APPEND_OVERRIDE_RULES = [ + { + "rule_id": "global/override/.m.rule.encrypted", + "conditions": [ + { + "kind": "event_match", + "key": "type", + "pattern": "m.room.encrypted", + "_id": "_encrypted", + } + ], + "actions": ["notify"], + }, + { + "rule_id": "global/override/.m.rule.suppress_notices", + "conditions": [ + { + "kind": "event_match", + "key": "type", + "pattern": "m.room.message", + "_id": "_suppress_notices_type", + }, + { + "kind": "event_match", + "key": "content.msgtype", + "pattern": "m.notice", + "_id": "_suppress_notices", + } + ], + "actions": [], + }, + { + "rule_id": "global/underride/.m.rule.suppress_edits", + "conditions": [ + { + "kind": "event_match", + "key": "m.relates_to.m.rel_type", + "pattern": "m.replace", + "_id": "_suppress_edits", + } + ], + "actions": [], + }, + { + "rule_id": "global/override/.m.rule.invite_for_me", + "conditions": [ + { + "kind": "event_match", + "key": "type", + "pattern": "m.room.member", + "_id": "_member", + }, + { + "kind": "event_match", + "key": "content.membership", + "pattern": "invite", + "_id": "_invite_member", + }, + {"kind": "event_match", "key": "state_key", "pattern_type": "user_id"}, + ], + "actions": [ + "notify", + {"set_tweak": "sound", "value": "default"}, + ], + }, + { + "rule_id": "global/override/.m.rule.contains_display_name", + "conditions": [{"kind": "contains_display_name"}], + "actions": [ + "notify", + {"set_tweak": "sound", "value": "default"}, + {"set_tweak": "highlight"}, + ], + }, + { + "rule_id": "global/override/.m.rule.tombstone", + "conditions": [ + { + "kind": "event_match", + "key": "type", + "pattern": "m.room.tombstone", + "_id": "_tombstone", + }, + { + "kind": "event_match", + "key": "state_key", + "pattern": "", + "_id": "_tombstone_statekey", + }, + ], + "actions": [ + "notify", + {"set_tweak": "sound", "value": "default"}, + {"set_tweak": "highlight"}, + ], + }, + { + "rule_id": "global/override/.m.rule.roomnotif", + "conditions": [ + { + "kind": "event_match", + "key": "content.body", + "pattern": "@room", + "_id": "_roomnotif_content", + }, + { + "kind": "sender_notification_permission", + "key": "room", + "_id": "_roomnotif_pl", + }, + ], + "actions": [ + "notify", + {"set_tweak": "highlight"}, + {"set_tweak": "sound", "value": "default"}, + ], + }, + { + "rule_id": "global/override/.m.rule.call", + "conditions": [ + { + "kind": "event_match", + "key": "type", + "pattern": "m.call.invite", + "_id": "_call", + } + ], + "actions": [ + "notify", + {"set_tweak": "sound", "value": "ring"}, + ], + } +] + + BASE_APPEND_UNDERRIDE_RULES = [ { "rule_id": "global/underride/.m.rule.call", @@ -354,6 +507,29 @@ BASE_APPEND_UNDERRIDE_RULES = [ ] +NEW_APPEND_UNDERRIDE_RULES = [ + { + "rule_id": "global/underride/.m.rule.room_one_to_one", + "conditions": [ + {"kind": "room_member_count", "is": "2", "_id": "member_count"}, + {"kind": "event_match", "key": "content.body", "pattern": "*", "_id": "body"}, + ], + "actions": [ + "notify", + {"set_tweak": "sound", "value": "default"}, + ], + }, + { + "rule_id": "global/underride/.m.rule.message", + "conditions": [ + {"kind": "event_match", "key": "content.body", "pattern": "*", "_id": "body"}, + ], + "actions": ["notify"], + "enabled": False, + }, +] + + BASE_RULE_IDS = set() for r in BASE_APPEND_CONTENT_RULES: @@ -375,3 +551,26 @@ for r in BASE_APPEND_UNDERRIDE_RULES: r["priority_class"] = PRIORITY_CLASS_MAP["underride"] r["default"] = True BASE_RULE_IDS.add(r["rule_id"]) + + +NEW_RULE_IDS = set() + +for r in BASE_APPEND_CONTENT_RULES: + r["priority_class"] = PRIORITY_CLASS_MAP["content"] + r["default"] = True + NEW_RULE_IDS.add(r["rule_id"]) + +for r in NEW_PREPEND_OVERRIDE_RULES: + r["priority_class"] = PRIORITY_CLASS_MAP["override"] + r["default"] = True + NEW_RULE_IDS.add(r["rule_id"]) + +for r in NEW_APPEND_OVERRIDE_RULES: + r["priority_class"] = PRIORITY_CLASS_MAP["override"] + r["default"] = True + NEW_RULE_IDS.add(r["rule_id"]) + +for r in NEW_APPEND_UNDERRIDE_RULES: + r["priority_class"] = PRIORITY_CLASS_MAP["underride"] + r["default"] = True + NEW_RULE_IDS.add(r["rule_id"]) diff --git a/synapse/storage/data_stores/main/push_rule.py b/synapse/storage/data_stores/main/push_rule.py index d181488db7..c10da245d2 100644 --- a/synapse/storage/data_stores/main/push_rule.py +++ b/synapse/storage/data_stores/main/push_rule.py @@ -39,7 +39,7 @@ from synapse.util.caches.stream_change_cache import StreamChangeCache logger = logging.getLogger(__name__) -def _load_rules(rawrules, enabled_map): +def _load_rules(rawrules, enabled_map, use_new_defaults=False): ruleslist = [] for rawrule in rawrules: rule = dict(rawrule) @@ -49,7 +49,7 @@ def _load_rules(rawrules, enabled_map): ruleslist.append(rule) # We're going to be mutating this a lot, so do a deep copy - rules = list(list_with_base_rules(ruleslist)) + rules = list(list_with_base_rules(ruleslist, use_new_defaults)) for i, rule in enumerate(rules): rule_id = rule["rule_id"] @@ -115,7 +115,7 @@ class PushRulesWorkerStore( raise NotImplementedError() @cachedInlineCallbacks(max_entries=5000) - def get_push_rules_for_user(self, user_id): + def _get_push_rules_for_user(self, user_id, use_new_defaults=False): rows = yield self.db.simple_select_list( table="push_rules", keyvalues={"user_name": user_id}, @@ -134,8 +134,22 @@ class PushRulesWorkerStore( enabled_map = yield self.get_push_rules_enabled_for_user(user_id) - rules = _load_rules(rows, enabled_map) + rules = _load_rules(rows, enabled_map, use_new_defaults) + + return rules + + @defer.inlineCallbacks + def get_push_rules_for_user(self, user_id): + # Temporary hack so we can use the new experimental default push rules to some + # users without impacting others. + use_new_defaults = yield self.db.simple_select_list( + table="new_push_rules_users_tmp", + keyvalues={"user_id": user_id}, + retcols=("user_id",), + desc="get_user_new_default_push_rules", + ) + rules = yield self._get_push_rules_for_user(user_id, bool(use_new_defaults)) return rules @cachedInlineCallbacks(max_entries=5000) @@ -194,7 +208,18 @@ class PushRulesWorkerStore( enabled_map_by_user = yield self.bulk_get_push_rules_enabled(user_ids) for user_id, rules in results.items(): - results[user_id] = _load_rules(rules, enabled_map_by_user.get(user_id, {})) + # Temporary hack so we can use the new experimental default push rules to some + # users without impacting others. + use_new_defaults = yield self.db.simple_select_list( + table="new_push_rules_users_tmp", + keyvalues={"user_id": user_id}, + retcols=("user_id",), + desc="get_user_new_default_push_rules", + ) + + results[user_id] = _load_rules( + rules, enabled_map_by_user.get(user_id, {}), bool(use_new_defaults), + ) return results diff --git a/synapse/storage/data_stores/main/schema/delta/58/13new_push_rules_tmp.sql b/synapse/storage/data_stores/main/schema/delta/58/13new_push_rules_tmp.sql new file mode 100644 index 0000000000..b7daf1c67b --- /dev/null +++ b/synapse/storage/data_stores/main/schema/delta/58/13new_push_rules_tmp.sql @@ -0,0 +1,21 @@ +/* Copyright 2020 The Matrix.org Foundation C.I.C + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +-- This is a temporary table in which we store the IDs of the users for which we need to +-- serve the new experimental default push rules. The purpose of this table is to help +-- test these new defaults, so it shall be dropped when the experimentation is done. +CREATE TABLE IF NOT EXISTS new_push_rules_users_tmp ( + user_id TEXT PRIMARY KEY +); \ No newline at end of file -- cgit 1.5.1 From 60328ce9fbe90299253ba740f2648c42b9091920 Mon Sep 17 00:00:00 2001 From: Brendan Abolivier Date: Thu, 30 Jul 2020 19:02:28 +0100 Subject: Lint --- synapse/push/baserules.py | 51 +++++++++++++++++++++++++++++------------------ 1 file changed, 32 insertions(+), 19 deletions(-) (limited to 'synapse') diff --git a/synapse/push/baserules.py b/synapse/push/baserules.py index e06b1a01e6..172fd00f19 100644 --- a/synapse/push/baserules.py +++ b/synapse/push/baserules.py @@ -96,9 +96,17 @@ def make_base_append_rules(kind, modified_base_rules, use_new_defaults=False): rules = [] if kind == "override": - rules = NEW_APPEND_OVERRIDE_RULES if use_new_defaults else BASE_APPEND_OVERRIDE_RULES + rules = ( + NEW_APPEND_OVERRIDE_RULES + if use_new_defaults + else BASE_APPEND_OVERRIDE_RULES + ) elif kind == "underride": - rules = NEW_APPEND_UNDERRIDE_RULES if use_new_defaults else BASE_APPEND_UNDERRIDE_RULES + rules = ( + NEW_APPEND_UNDERRIDE_RULES + if use_new_defaults + else BASE_APPEND_UNDERRIDE_RULES + ) elif kind == "content": rules = BASE_APPEND_CONTENT_RULES @@ -117,7 +125,11 @@ def make_base_prepend_rules(kind, modified_base_rules, use_new_defaults=False): rules = [] if kind == "override": - rules = NEW_PREPEND_OVERRIDE_RULES if use_new_defaults else BASE_PREPEND_OVERRIDE_RULES + rules = ( + NEW_PREPEND_OVERRIDE_RULES + if use_new_defaults + else BASE_PREPEND_OVERRIDE_RULES + ) # Copy the rules before modifying them rules = copy.deepcopy(rules) @@ -315,7 +327,7 @@ NEW_APPEND_OVERRIDE_RULES = [ "key": "content.msgtype", "pattern": "m.notice", "_id": "_suppress_notices", - } + }, ], "actions": [], }, @@ -348,10 +360,7 @@ NEW_APPEND_OVERRIDE_RULES = [ }, {"kind": "event_match", "key": "state_key", "pattern_type": "user_id"}, ], - "actions": [ - "notify", - {"set_tweak": "sound", "value": "default"}, - ], + "actions": ["notify", {"set_tweak": "sound", "value": "default"}], }, { "rule_id": "global/override/.m.rule.contains_display_name", @@ -415,11 +424,8 @@ NEW_APPEND_OVERRIDE_RULES = [ "_id": "_call", } ], - "actions": [ - "notify", - {"set_tweak": "sound", "value": "ring"}, - ], - } + "actions": ["notify", {"set_tweak": "sound", "value": "ring"}], + }, ] @@ -512,17 +518,24 @@ NEW_APPEND_UNDERRIDE_RULES = [ "rule_id": "global/underride/.m.rule.room_one_to_one", "conditions": [ {"kind": "room_member_count", "is": "2", "_id": "member_count"}, - {"kind": "event_match", "key": "content.body", "pattern": "*", "_id": "body"}, - ], - "actions": [ - "notify", - {"set_tweak": "sound", "value": "default"}, + { + "kind": "event_match", + "key": "content.body", + "pattern": "*", + "_id": "body", + }, ], + "actions": ["notify", {"set_tweak": "sound", "value": "default"}], }, { "rule_id": "global/underride/.m.rule.message", "conditions": [ - {"kind": "event_match", "key": "content.body", "pattern": "*", "_id": "body"}, + { + "kind": "event_match", + "key": "content.body", + "pattern": "*", + "_id": "body", + }, ], "actions": ["notify"], "enabled": False, -- cgit 1.5.1 From 79d991eff060abaa074ef23201f0e68cc8228e7e Mon Sep 17 00:00:00 2001 From: Brendan Abolivier Date: Fri, 31 Jul 2020 13:58:42 +0100 Subject: Fix cache invalidation calls --- synapse/replication/slave/storage/push_rule.py | 2 +- synapse/storage/data_stores/main/push_rule.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) (limited to 'synapse') diff --git a/synapse/replication/slave/storage/push_rule.py b/synapse/replication/slave/storage/push_rule.py index 23ec1c5b11..6bebd4d5c1 100644 --- a/synapse/replication/slave/storage/push_rule.py +++ b/synapse/replication/slave/storage/push_rule.py @@ -34,7 +34,7 @@ class SlavedPushRuleStore(SlavedEventStore, PushRulesWorkerStore): if stream_name == PushRulesStream.NAME: self._push_rules_stream_id_gen.advance(token) for row in rows: - self.get_push_rules_for_user.invalidate((row.user_id,)) + self._get_push_rules_for_user.invalidate((row.user_id,)) self.get_push_rules_enabled_for_user.invalidate((row.user_id,)) self.push_rules_stream_cache.entity_has_changed(row.user_id, token) return super().process_replication_rows(stream_name, instance_name, token, rows) diff --git a/synapse/storage/data_stores/main/push_rule.py b/synapse/storage/data_stores/main/push_rule.py index 861050814d..85cd24ce72 100644 --- a/synapse/storage/data_stores/main/push_rule.py +++ b/synapse/storage/data_stores/main/push_rule.py @@ -768,7 +768,7 @@ class PushRuleStore(PushRulesWorkerStore): self.db.simple_insert_txn(txn, "push_rules_stream", values=values) - txn.call_after(self.get_push_rules_for_user.invalidate, (user_id,)) + txn.call_after(self._get_push_rules_for_user.invalidate, (user_id,)) txn.call_after(self.get_push_rules_enabled_for_user.invalidate, (user_id,)) txn.call_after( self.push_rules_stream_cache.entity_has_changed, user_id, stream_id -- cgit 1.5.1 From cf42d0a60cce6cbb4b56f58bdb25d7a90e3ecff6 Mon Sep 17 00:00:00 2001 From: Brendan Abolivier Date: Fri, 31 Jul 2020 15:06:41 +0100 Subject: Fix cache name --- synapse/storage/data_stores/main/push_rule.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) (limited to 'synapse') diff --git a/synapse/storage/data_stores/main/push_rule.py b/synapse/storage/data_stores/main/push_rule.py index 85cd24ce72..267fc5f5a3 100644 --- a/synapse/storage/data_stores/main/push_rule.py +++ b/synapse/storage/data_stores/main/push_rule.py @@ -181,7 +181,7 @@ class PushRulesWorkerStore( ) @cachedList( - cached_method_name="get_push_rules_for_user", + cached_method_name="_get_push_rules_for_user", list_name="user_ids", num_args=1, inlineCallbacks=True, -- cgit 1.5.1 From 1678057b56a82467c25259d4727a69097dad0ea3 Mon Sep 17 00:00:00 2001 From: Brendan Abolivier Date: Mon, 3 Aug 2020 11:22:22 +0100 Subject: Back out the database hack and replace it with a temporary config setting --- synapse/config/server.py | 10 +++++++ synapse/replication/slave/storage/push_rule.py | 2 +- synapse/storage/data_stores/main/push_rule.py | 35 ++++++---------------- .../main/schema/delta/58/13new_push_rules_tmp.sql | 21 ------------- 4 files changed, 20 insertions(+), 48 deletions(-) delete mode 100644 synapse/storage/data_stores/main/schema/delta/58/13new_push_rules_tmp.sql (limited to 'synapse') diff --git a/synapse/config/server.py b/synapse/config/server.py index 848587d232..68d143410f 100644 --- a/synapse/config/server.py +++ b/synapse/config/server.py @@ -530,6 +530,16 @@ class ServerConfig(Config): "request_token_inhibit_3pid_errors", False, ) + # List of users trialing the new experimental default push rules. This setting is + # not included in the sample configuration file on purpose as it's a temporary + # hack, so that some users can trial the new defaults without impacting every + # user on the homeserver. + self.users_new_default_push_rules = ( + config.get("users_new_default_push_rules") or [] + ) + if not isinstance(self.users_new_default_push_rules, list): + raise ConfigError("'users_new_default_push_rules' must be a list") + def has_tls_listener(self) -> bool: return any(listener.tls for listener in self.listeners) diff --git a/synapse/replication/slave/storage/push_rule.py b/synapse/replication/slave/storage/push_rule.py index 6bebd4d5c1..23ec1c5b11 100644 --- a/synapse/replication/slave/storage/push_rule.py +++ b/synapse/replication/slave/storage/push_rule.py @@ -34,7 +34,7 @@ class SlavedPushRuleStore(SlavedEventStore, PushRulesWorkerStore): if stream_name == PushRulesStream.NAME: self._push_rules_stream_id_gen.advance(token) for row in rows: - self._get_push_rules_for_user.invalidate((row.user_id,)) + self.get_push_rules_for_user.invalidate((row.user_id,)) self.get_push_rules_enabled_for_user.invalidate((row.user_id,)) self.push_rules_stream_cache.entity_has_changed(row.user_id, token) return super().process_replication_rows(stream_name, instance_name, token, rows) diff --git a/synapse/storage/data_stores/main/push_rule.py b/synapse/storage/data_stores/main/push_rule.py index 267fc5f5a3..d644a0b8ce 100644 --- a/synapse/storage/data_stores/main/push_rule.py +++ b/synapse/storage/data_stores/main/push_rule.py @@ -105,6 +105,8 @@ class PushRulesWorkerStore( prefilled_cache=push_rules_prefill, ) + self.users_new_default_push_rules = hs.config.users_new_default_push_rules + @abc.abstractmethod def get_max_push_rules_stream_id(self): """Get the position of the push rules stream. @@ -115,7 +117,7 @@ class PushRulesWorkerStore( raise NotImplementedError() @cachedInlineCallbacks(max_entries=5000) - def _get_push_rules_for_user(self, user_id, use_new_defaults=False): + def get_push_rules_for_user(self, user_id): rows = yield self.db.simple_select_list( table="push_rules", keyvalues={"user_name": user_id}, @@ -134,22 +136,10 @@ class PushRulesWorkerStore( enabled_map = yield self.get_push_rules_enabled_for_user(user_id) - rules = _load_rules(rows, enabled_map, use_new_defaults) - - return rules + use_new_defaults = user_id in self.users_new_default_push_rules - @defer.inlineCallbacks - def get_push_rules_for_user(self, user_id): - # Temporary hack so we can use the new experimental default push rules to some - # users without impacting others. - use_new_defaults = yield self.db.simple_select_list( - table="new_push_rules_users_tmp", - keyvalues={"user_id": user_id}, - retcols=("user_id",), - desc="get_user_new_default_push_rules", - ) + rules = _load_rules(rows, enabled_map, use_new_defaults) - rules = yield self._get_push_rules_for_user(user_id, bool(use_new_defaults)) return rules @cachedInlineCallbacks(max_entries=5000) @@ -181,7 +171,7 @@ class PushRulesWorkerStore( ) @cachedList( - cached_method_name="_get_push_rules_for_user", + cached_method_name="get_push_rules_for_user", list_name="user_ids", num_args=1, inlineCallbacks=True, @@ -208,17 +198,10 @@ class PushRulesWorkerStore( enabled_map_by_user = yield self.bulk_get_push_rules_enabled(user_ids) for user_id, rules in results.items(): - # Temporary hack so we can use the new experimental default push rules to some - # users without impacting others. - use_new_defaults = yield self.db.simple_select_list( - table="new_push_rules_users_tmp", - keyvalues={"user_id": user_id}, - retcols=("user_id",), - desc="get_user_new_default_push_rules", - ) + use_new_defaults = user_id in self.users_new_default_push_rules results[user_id] = _load_rules( - rules, enabled_map_by_user.get(user_id, {}), bool(use_new_defaults), + rules, enabled_map_by_user.get(user_id, {}), use_new_defaults, ) return results @@ -768,7 +751,7 @@ class PushRuleStore(PushRulesWorkerStore): self.db.simple_insert_txn(txn, "push_rules_stream", values=values) - txn.call_after(self._get_push_rules_for_user.invalidate, (user_id,)) + txn.call_after(self.get_push_rules_for_user.invalidate, (user_id,)) txn.call_after(self.get_push_rules_enabled_for_user.invalidate, (user_id,)) txn.call_after( self.push_rules_stream_cache.entity_has_changed, user_id, stream_id diff --git a/synapse/storage/data_stores/main/schema/delta/58/13new_push_rules_tmp.sql b/synapse/storage/data_stores/main/schema/delta/58/13new_push_rules_tmp.sql deleted file mode 100644 index b7daf1c67b..0000000000 --- a/synapse/storage/data_stores/main/schema/delta/58/13new_push_rules_tmp.sql +++ /dev/null @@ -1,21 +0,0 @@ -/* Copyright 2020 The Matrix.org Foundation C.I.C - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - --- This is a temporary table in which we store the IDs of the users for which we need to --- serve the new experimental default push rules. The purpose of this table is to help --- test these new defaults, so it shall be dropped when the experimentation is done. -CREATE TABLE IF NOT EXISTS new_push_rules_users_tmp ( - user_id TEXT PRIMARY KEY -); \ No newline at end of file -- cgit 1.5.1 From e2f1cccc8aec83f265220283c1f9d3707d49bb7e Mon Sep 17 00:00:00 2001 From: Brendan Abolivier Date: Mon, 3 Aug 2020 11:52:52 +0100 Subject: Fix PUT /pushrules to use the right rule IDs --- synapse/rest/client/v1/push_rule.py | 11 +++++++++-- 1 file changed, 9 insertions(+), 2 deletions(-) (limited to 'synapse') diff --git a/synapse/rest/client/v1/push_rule.py b/synapse/rest/client/v1/push_rule.py index 9fd4908136..f66b8fa7c4 100644 --- a/synapse/rest/client/v1/push_rule.py +++ b/synapse/rest/client/v1/push_rule.py @@ -25,7 +25,7 @@ from synapse.http.servlet import ( parse_json_value_from_request, parse_string, ) -from synapse.push.baserules import BASE_RULE_IDS +from synapse.push.baserules import BASE_RULE_IDS, NEW_RULE_IDS from synapse.push.clientformat import format_push_rules_for_user from synapse.push.rulekinds import PRIORITY_CLASS_MAP from synapse.rest.client.v2_alpha._base import client_patterns @@ -45,6 +45,8 @@ class PushRuleRestServlet(RestServlet): self.notifier = hs.get_notifier() self._is_worker = hs.config.worker_app is not None + self.users_new_default_push_rules = hs.config.users_new_default_push_rules + async def on_PUT(self, request, path): if self._is_worker: raise Exception("Cannot handle PUT /push_rules on worker") @@ -179,7 +181,12 @@ class PushRuleRestServlet(RestServlet): rule_id = spec["rule_id"] is_default_rule = rule_id.startswith(".") if is_default_rule: - if namespaced_rule_id not in BASE_RULE_IDS: + if user_id in self.users_new_default_push_rules: + rule_ids = NEW_RULE_IDS + else: + rule_ids = BASE_RULE_IDS + + if namespaced_rule_id not in rule_ids: raise SynapseError(404, "Unknown rule %r" % (namespaced_rule_id,)) return self.store.set_push_rule_actions( user_id, namespaced_rule_id, actions, is_default_rule -- cgit 1.5.1 From dd11f575a29b59aced6cfa7ea7b9faea6f968f8d Mon Sep 17 00:00:00 2001 From: Brendan Abolivier Date: Thu, 6 Aug 2020 10:52:26 +0100 Subject: Incorporate review --- synapse/config/server.py | 3 +++ synapse/push/baserules.py | 20 ++++---------------- synapse/rest/client/v1/push_rule.py | 4 ++-- synapse/storage/data_stores/main/push_rule.py | 6 +++--- 4 files changed, 12 insertions(+), 21 deletions(-) (limited to 'synapse') diff --git a/synapse/config/server.py b/synapse/config/server.py index 68d143410f..00fa07c225 100644 --- a/synapse/config/server.py +++ b/synapse/config/server.py @@ -540,6 +540,9 @@ class ServerConfig(Config): if not isinstance(self.users_new_default_push_rules, list): raise ConfigError("'users_new_default_push_rules' must be a list") + # Turn the list into a set to improve lookup speed. + self.users_new_default_push_rules = set(self.users_new_default_push_rules) + def has_tls_listener(self) -> bool: return any(listener.tls for listener in self.listeners) diff --git a/synapse/push/baserules.py b/synapse/push/baserules.py index 172fd00f19..8047873ff1 100644 --- a/synapse/push/baserules.py +++ b/synapse/push/baserules.py @@ -24,6 +24,8 @@ def list_with_base_rules(rawrules, use_new_defaults=False): Args: rawrules(list): The rules the user has modified or set. + use_new_defaults(bool): Whether to use the new experimental default rules when + appending or prepending default rules. Returns: A new list with the rules set by the user combined with the defaults. @@ -125,11 +127,7 @@ def make_base_prepend_rules(kind, modified_base_rules, use_new_defaults=False): rules = [] if kind == "override": - rules = ( - NEW_PREPEND_OVERRIDE_RULES - if use_new_defaults - else BASE_PREPEND_OVERRIDE_RULES - ) + rules = BASE_PREPEND_OVERRIDE_RULES # Copy the rules before modifying them rules = copy.deepcopy(rules) @@ -171,16 +169,6 @@ BASE_PREPEND_OVERRIDE_RULES = [ ] -NEW_PREPEND_OVERRIDE_RULES = [ - { - "rule_id": "global/override/.m.rule.master", - "enabled": False, - "conditions": [], - "actions": [], - } -] - - BASE_APPEND_OVERRIDE_RULES = [ { "rule_id": "global/override/.m.rule.suppress_notices", @@ -573,7 +561,7 @@ for r in BASE_APPEND_CONTENT_RULES: r["default"] = True NEW_RULE_IDS.add(r["rule_id"]) -for r in NEW_PREPEND_OVERRIDE_RULES: +for r in BASE_PREPEND_OVERRIDE_RULES: r["priority_class"] = PRIORITY_CLASS_MAP["override"] r["default"] = True NEW_RULE_IDS.add(r["rule_id"]) diff --git a/synapse/rest/client/v1/push_rule.py b/synapse/rest/client/v1/push_rule.py index f66b8fa7c4..00831879f3 100644 --- a/synapse/rest/client/v1/push_rule.py +++ b/synapse/rest/client/v1/push_rule.py @@ -45,7 +45,7 @@ class PushRuleRestServlet(RestServlet): self.notifier = hs.get_notifier() self._is_worker = hs.config.worker_app is not None - self.users_new_default_push_rules = hs.config.users_new_default_push_rules + self._users_new_default_push_rules = hs.config.users_new_default_push_rules async def on_PUT(self, request, path): if self._is_worker: @@ -181,7 +181,7 @@ class PushRuleRestServlet(RestServlet): rule_id = spec["rule_id"] is_default_rule = rule_id.startswith(".") if is_default_rule: - if user_id in self.users_new_default_push_rules: + if user_id in self._users_new_default_push_rules: rule_ids = NEW_RULE_IDS else: rule_ids = BASE_RULE_IDS diff --git a/synapse/storage/data_stores/main/push_rule.py b/synapse/storage/data_stores/main/push_rule.py index d644a0b8ce..6b650f8ba8 100644 --- a/synapse/storage/data_stores/main/push_rule.py +++ b/synapse/storage/data_stores/main/push_rule.py @@ -105,7 +105,7 @@ class PushRulesWorkerStore( prefilled_cache=push_rules_prefill, ) - self.users_new_default_push_rules = hs.config.users_new_default_push_rules + self._users_new_default_push_rules = hs.config.users_new_default_push_rules @abc.abstractmethod def get_max_push_rules_stream_id(self): @@ -136,7 +136,7 @@ class PushRulesWorkerStore( enabled_map = yield self.get_push_rules_enabled_for_user(user_id) - use_new_defaults = user_id in self.users_new_default_push_rules + use_new_defaults = user_id in self._users_new_default_push_rules rules = _load_rules(rows, enabled_map, use_new_defaults) @@ -198,7 +198,7 @@ class PushRulesWorkerStore( enabled_map_by_user = yield self.bulk_get_push_rules_enabled(user_ids) for user_id, rules in results.items(): - use_new_defaults = user_id in self.users_new_default_push_rules + use_new_defaults = user_id in self._users_new_default_push_rules results[user_id] = _load_rules( rules, enabled_map_by_user.get(user_id, {}), use_new_defaults, -- cgit 1.5.1 From bf33d5c457643c0b69ebcc26401b7f3090a09e6c Mon Sep 17 00:00:00 2001 From: Brendan Abolivier Date: Thu, 6 Aug 2020 17:52:34 +0100 Subject: Incorporate review --- synapse/config/server.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) (limited to 'synapse') diff --git a/synapse/config/server.py b/synapse/config/server.py index 00fa07c225..22673be9dd 100644 --- a/synapse/config/server.py +++ b/synapse/config/server.py @@ -534,10 +534,10 @@ class ServerConfig(Config): # not included in the sample configuration file on purpose as it's a temporary # hack, so that some users can trial the new defaults without impacting every # user on the homeserver. - self.users_new_default_push_rules = ( + users_new_default_push_rules = ( config.get("users_new_default_push_rules") or [] ) - if not isinstance(self.users_new_default_push_rules, list): + if not isinstance(users_new_default_push_rules, list): raise ConfigError("'users_new_default_push_rules' must be a list") # Turn the list into a set to improve lookup speed. -- cgit 1.5.1 From 367e9e6e9ec081a08361cff14e248dd03610f57f Mon Sep 17 00:00:00 2001 From: Brendan Abolivier Date: Thu, 6 Aug 2020 17:57:58 +0100 Subject: Lint --- synapse/config/server.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) (limited to 'synapse') diff --git a/synapse/config/server.py b/synapse/config/server.py index 22673be9dd..23ddd0ab08 100644 --- a/synapse/config/server.py +++ b/synapse/config/server.py @@ -536,7 +536,7 @@ class ServerConfig(Config): # user on the homeserver. users_new_default_push_rules = ( config.get("users_new_default_push_rules") or [] - ) + ) # type: list if not isinstance(users_new_default_push_rules, list): raise ConfigError("'users_new_default_push_rules' must be a list") -- cgit 1.5.1 From cee6c6012ed11f1b8407e57679ab66ad308ff590 Mon Sep 17 00:00:00 2001 From: Brendan Abolivier Date: Mon, 10 Aug 2020 11:10:34 +0100 Subject: why mypy why --- synapse/config/server.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) (limited to 'synapse') diff --git a/synapse/config/server.py b/synapse/config/server.py index 23ddd0ab08..493d2bd513 100644 --- a/synapse/config/server.py +++ b/synapse/config/server.py @@ -541,7 +541,9 @@ class ServerConfig(Config): raise ConfigError("'users_new_default_push_rules' must be a list") # Turn the list into a set to improve lookup speed. - self.users_new_default_push_rules = set(self.users_new_default_push_rules) + self.users_new_default_push_rules = ( + set(self.users_new_default_push_rules) + ) # type: set def has_tls_listener(self) -> bool: return any(listener.tls for listener in self.listeners) -- cgit 1.5.1 From 1a3aabcf3facd38f6d4c58fb4f55ad79450acefa Mon Sep 17 00:00:00 2001 From: Brendan Abolivier Date: Mon, 10 Aug 2020 11:13:21 +0100 Subject: Lint --- synapse/config/server.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) (limited to 'synapse') diff --git a/synapse/config/server.py b/synapse/config/server.py index 493d2bd513..b5c2098ecb 100644 --- a/synapse/config/server.py +++ b/synapse/config/server.py @@ -541,8 +541,8 @@ class ServerConfig(Config): raise ConfigError("'users_new_default_push_rules' must be a list") # Turn the list into a set to improve lookup speed. - self.users_new_default_push_rules = ( - set(self.users_new_default_push_rules) + self.users_new_default_push_rules = set( + self.users_new_default_push_rules ) # type: set def has_tls_listener(self) -> bool: -- cgit 1.5.1 From 5c43c43240a85ca6b65ad327b6a5b1c9e29bd653 Mon Sep 17 00:00:00 2001 From: Brendan Abolivier Date: Mon, 10 Aug 2020 11:23:24 +0100 Subject: Typo --- synapse/config/server.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) (limited to 'synapse') diff --git a/synapse/config/server.py b/synapse/config/server.py index b5c2098ecb..9f15ed109e 100644 --- a/synapse/config/server.py +++ b/synapse/config/server.py @@ -542,7 +542,7 @@ class ServerConfig(Config): # Turn the list into a set to improve lookup speed. self.users_new_default_push_rules = set( - self.users_new_default_push_rules + users_new_default_push_rules ) # type: set def has_tls_listener(self) -> bool: -- cgit 1.5.1 From fcbab08cbd46d28976411b1d014a4efb76c8b7a4 Mon Sep 17 00:00:00 2001 From: Richard van der Hoff <1389908+richvdh@users.noreply.github.com> Date: Mon, 10 Aug 2020 12:29:47 +0100 Subject: Add an assertion on prev_events in create_new_client_event (#8041) I think this would have caught all the cases in https://github.com/matrix-org/synapse/issues/7642 - and I think a 500 makes more sense here than a 403 --- changelog.d/8041.misc | 1 + synapse/handlers/message.py | 9 +++++++++ tests/storage/test_redaction.py | 4 ++++ 3 files changed, 14 insertions(+) create mode 100644 changelog.d/8041.misc (limited to 'synapse') diff --git a/changelog.d/8041.misc b/changelog.d/8041.misc new file mode 100644 index 0000000000..eefa98d744 --- /dev/null +++ b/changelog.d/8041.misc @@ -0,0 +1 @@ +Add an assertion on prev_events in create_new_client_event. diff --git a/synapse/handlers/message.py b/synapse/handlers/message.py index 708533d4d1..8ddded8389 100644 --- a/synapse/handlers/message.py +++ b/synapse/handlers/message.py @@ -768,6 +768,15 @@ class EventCreationHandler(object): else: prev_event_ids = await self.store.get_prev_events_for_room(builder.room_id) + # we now ought to have some prev_events (unless it's a create event). + # + # do a quick sanity check here, rather than waiting until we've created the + # event and then try to auth it (which fails with a somewhat confusing "No + # create event in auth events") + assert ( + builder.type == EventTypes.Create or len(prev_event_ids) > 0 + ), "Attempting to create an event with no prev_events" + event = await builder.build(prev_event_ids=prev_event_ids) context = await self.state.compute_event_context(event) if requester: diff --git a/tests/storage/test_redaction.py b/tests/storage/test_redaction.py index 41511d479f..1ea35d60c1 100644 --- a/tests/storage/test_redaction.py +++ b/tests/storage/test_redaction.py @@ -251,6 +251,10 @@ class RedactionTestCase(unittest.HomeserverTestCase): def room_id(self): return self._base_builder.room_id + @property + def type(self): + return self._base_builder.type + event_1, context_1 = self.get_success( self.event_creation_handler.create_new_client_event( EventIdManglingBuilder( -- cgit 1.5.1 From 0cb169900ebd39b6f46dbff1b1909cc5b3c17493 Mon Sep 17 00:00:00 2001 From: Richard van der Hoff <1389908+richvdh@users.noreply.github.com> Date: Tue, 11 Aug 2020 16:08:10 +0100 Subject: Implement login blocking based on SAML attributes (#8052) Hopefully this mostly speaks for itself. I also did a bit of cleaning up of the error handling. Fixes #8047 --- changelog.d/8052.feature | 1 + docs/sample_config.yaml | 11 ++++++++ synapse/config/_util.py | 49 ++++++++++++++++++++++++++++++++++ synapse/config/saml2_config.py | 50 +++++++++++++++++++++++++++++++++++ synapse/handlers/saml_handler.py | 42 ++++++++++++++++++++++++----- synapse/res/templates/saml_error.html | 17 ++++++++---- 6 files changed, 159 insertions(+), 11 deletions(-) create mode 100644 changelog.d/8052.feature create mode 100644 synapse/config/_util.py (limited to 'synapse') diff --git a/changelog.d/8052.feature b/changelog.d/8052.feature new file mode 100644 index 0000000000..6aa020c764 --- /dev/null +++ b/changelog.d/8052.feature @@ -0,0 +1 @@ +Allow login to be blocked based on the values of SAML attributes. diff --git a/docs/sample_config.yaml b/docs/sample_config.yaml index fe85978a1f..9235b89fb1 100644 --- a/docs/sample_config.yaml +++ b/docs/sample_config.yaml @@ -1577,6 +1577,17 @@ saml2_config: # #grandfathered_mxid_source_attribute: upn + # It is possible to configure Synapse to only allow logins if SAML attributes + # match particular values. The requirements can be listed under + # `attribute_requirements` as shown below. All of the listed attributes must + # match for the login to be permitted. + # + #attribute_requirements: + # - attribute: userGroup + # value: "staff" + # - attribute: department + # value: "sales" + # Directory in which Synapse will try to find the template files below. # If not set, default templates from within the Synapse package will be used. # diff --git a/synapse/config/_util.py b/synapse/config/_util.py new file mode 100644 index 0000000000..cd31b1c3c9 --- /dev/null +++ b/synapse/config/_util.py @@ -0,0 +1,49 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 The Matrix.org Foundation C.I.C. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +from typing import Any, List + +import jsonschema + +from synapse.config._base import ConfigError +from synapse.types import JsonDict + + +def validate_config(json_schema: JsonDict, config: Any, config_path: List[str]) -> None: + """Validates a config setting against a JsonSchema definition + + This can be used to validate a section of the config file against a schema + definition. If the validation fails, a ConfigError is raised with a textual + description of the problem. + + Args: + json_schema: the schema to validate against + config: the configuration value to be validated + config_path: the path within the config file. This will be used as a basis + for the error message. + """ + try: + jsonschema.validate(config, json_schema) + except jsonschema.ValidationError as e: + # copy `config_path` before modifying it. + path = list(config_path) + for p in list(e.path): + if isinstance(p, int): + path.append("" % p) + else: + path.append(str(p)) + + raise ConfigError( + "Unable to parse configuration: %s at %s" % (e.message, ".".join(path)) + ) diff --git a/synapse/config/saml2_config.py b/synapse/config/saml2_config.py index 293643b2de..9277b5f342 100644 --- a/synapse/config/saml2_config.py +++ b/synapse/config/saml2_config.py @@ -15,7 +15,9 @@ # limitations under the License. import logging +from typing import Any, List +import attr import jinja2 import pkg_resources @@ -23,6 +25,7 @@ from synapse.python_dependencies import DependencyException, check_requirements from synapse.util.module_loader import load_module, load_python_module from ._base import Config, ConfigError +from ._util import validate_config logger = logging.getLogger(__name__) @@ -80,6 +83,11 @@ class SAML2Config(Config): self.saml2_enabled = True + attribute_requirements = saml2_config.get("attribute_requirements") or [] + self.attribute_requirements = _parse_attribute_requirements_def( + attribute_requirements + ) + self.saml2_grandfathered_mxid_source_attribute = saml2_config.get( "grandfathered_mxid_source_attribute", "uid" ) @@ -341,6 +349,17 @@ class SAML2Config(Config): # #grandfathered_mxid_source_attribute: upn + # It is possible to configure Synapse to only allow logins if SAML attributes + # match particular values. The requirements can be listed under + # `attribute_requirements` as shown below. All of the listed attributes must + # match for the login to be permitted. + # + #attribute_requirements: + # - attribute: userGroup + # value: "staff" + # - attribute: department + # value: "sales" + # Directory in which Synapse will try to find the template files below. # If not set, default templates from within the Synapse package will be used. # @@ -368,3 +387,34 @@ class SAML2Config(Config): """ % { "config_dir_path": config_dir_path } + + +@attr.s(frozen=True) +class SamlAttributeRequirement: + """Object describing a single requirement for SAML attributes.""" + + attribute = attr.ib(type=str) + value = attr.ib(type=str) + + JSON_SCHEMA = { + "type": "object", + "properties": {"attribute": {"type": "string"}, "value": {"type": "string"}}, + "required": ["attribute", "value"], + } + + +ATTRIBUTE_REQUIREMENTS_SCHEMA = { + "type": "array", + "items": SamlAttributeRequirement.JSON_SCHEMA, +} + + +def _parse_attribute_requirements_def( + attribute_requirements: Any, +) -> List[SamlAttributeRequirement]: + validate_config( + ATTRIBUTE_REQUIREMENTS_SCHEMA, + attribute_requirements, + config_path=["saml2_config", "attribute_requirements"], + ) + return [SamlAttributeRequirement(**x) for x in attribute_requirements] diff --git a/synapse/handlers/saml_handler.py b/synapse/handlers/saml_handler.py index 2d506dc1f2..c1fcb98454 100644 --- a/synapse/handlers/saml_handler.py +++ b/synapse/handlers/saml_handler.py @@ -14,15 +14,16 @@ # limitations under the License. import logging import re -from typing import Callable, Dict, Optional, Set, Tuple +from typing import TYPE_CHECKING, Callable, Dict, Optional, Set, Tuple import attr import saml2 import saml2.response from saml2.client import Saml2Client -from synapse.api.errors import SynapseError +from synapse.api.errors import AuthError, SynapseError from synapse.config import ConfigError +from synapse.config.saml2_config import SamlAttributeRequirement from synapse.http.servlet import parse_string from synapse.http.site import SynapseRequest from synapse.module_api import ModuleApi @@ -34,6 +35,9 @@ from synapse.types import ( from synapse.util.async_helpers import Linearizer from synapse.util.iterutils import chunk_seq +if TYPE_CHECKING: + import synapse.server + logger = logging.getLogger(__name__) @@ -49,7 +53,7 @@ class Saml2SessionData: class SamlHandler: - def __init__(self, hs): + def __init__(self, hs: "synapse.server.HomeServer"): self._saml_client = Saml2Client(hs.config.saml2_sp_config) self._auth = hs.get_auth() self._auth_handler = hs.get_auth_handler() @@ -62,6 +66,7 @@ class SamlHandler: self._grandfathered_mxid_source_attribute = ( hs.config.saml2_grandfathered_mxid_source_attribute ) + self._saml2_attribute_requirements = hs.config.saml2.attribute_requirements # plugin to do custom mapping from saml response to mxid self._user_mapping_provider = hs.config.saml2_user_mapping_provider_class( @@ -73,7 +78,7 @@ class SamlHandler: self._auth_provider_id = "saml" # a map from saml session id to Saml2SessionData object - self._outstanding_requests_dict = {} + self._outstanding_requests_dict = {} # type: Dict[str, Saml2SessionData] # a lock on the mappings self._mapping_lock = Linearizer(name="saml_mapping", clock=self._clock) @@ -165,11 +170,18 @@ class SamlHandler: saml2.BINDING_HTTP_POST, outstanding=self._outstanding_requests_dict, ) + except saml2.response.UnsolicitedResponse as e: + # the pysaml2 library helpfully logs an ERROR here, but neglects to log + # the session ID. I don't really want to put the full text of the exception + # in the (user-visible) exception message, so let's log the exception here + # so we can track down the session IDs later. + logger.warning(str(e)) + raise SynapseError(400, "Unexpected SAML2 login.") except Exception as e: - raise SynapseError(400, "Unable to parse SAML2 response: %s" % (e,)) + raise SynapseError(400, "Unable to parse SAML2 response: %s." % (e,)) if saml2_auth.not_signed: - raise SynapseError(400, "SAML2 response was not signed") + raise SynapseError(400, "SAML2 response was not signed.") logger.debug("SAML2 response: %s", saml2_auth.origxml) for assertion in saml2_auth.assertions: @@ -188,6 +200,9 @@ class SamlHandler: saml2_auth.in_response_to, None ) + for requirement in self._saml2_attribute_requirements: + _check_attribute_requirement(saml2_auth.ava, requirement) + remote_user_id = self._user_mapping_provider.get_remote_user_id( saml2_auth, client_redirect_url ) @@ -294,6 +309,21 @@ class SamlHandler: del self._outstanding_requests_dict[reqid] +def _check_attribute_requirement(ava: dict, req: SamlAttributeRequirement): + values = ava.get(req.attribute, []) + for v in values: + if v == req.value: + return + + logger.info( + "SAML2 attribute %s did not match required value '%s' (was '%s')", + req.attribute, + req.value, + values, + ) + raise AuthError(403, "You are not authorized to log in here.") + + DOT_REPLACE_PATTERN = re.compile( ("[^%s]" % (re.escape("".join(mxid_localpart_allowed_characters)),)) ) diff --git a/synapse/res/templates/saml_error.html b/synapse/res/templates/saml_error.html index bfd6449c5d..01cd9bdaf3 100644 --- a/synapse/res/templates/saml_error.html +++ b/synapse/res/templates/saml_error.html @@ -2,10 +2,17 @@ - SSO error + SSO login error -

Oops! Something went wrong during authentication.

+{# a 403 means we have actively rejected their login #} +{% if code == 403 %} +

You are not allowed to log in here.

+{% else %} +

+ There was an error during authentication: +

+
{{ msg }}

If you are seeing this page after clicking a link sent to you via email, make sure you only click the confirmation link once, and that you open the @@ -37,9 +44,9 @@ // to print one. let errorDesc = new URLSearchParams(searchStr).get("error_description") if (errorDesc) { - - document.getElementById("errormsg").innerText = ` ("${errorDesc}")`; + document.getElementById("errormsg").innerText = errorDesc; } +{% endif %} - \ No newline at end of file + -- cgit 1.5.1 From db131b6b22350ea73128ac69ea82a48a6cf78a20 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Tue, 11 Aug 2020 18:09:46 +0100 Subject: Change the default log config to reduce disk I/O and storage (#8040) * Change default log config to buffer by default. This batches up writes to the filesystem, which is more efficient for disk I/O. This means that it can take some time for logs to get written to disk. Note that ERROR logs (and above) immediately flush the buffer. This only effects new installs, as we only write the log config if started with `--generate-config` (in the same way we do for generating signing keys). * Default to keeping last 4 days of logs. This hopefully reduces the amount of logs kept for new servers. Keeping the last 1GB of logs is likely overkill for new servers, but equally may not be enough for busy ones. Instead, we keep the last four days worth of logs, enough so that admins can investigate any problems that happened over e.g. a long weekend. --- changelog.d/8040.misc | 1 + docs/sample_log_config.yaml | 41 ++++++++++++++++++++++++++++++++++++----- synapse/config/logger.py | 41 ++++++++++++++++++++++++++++++++++++----- 3 files changed, 73 insertions(+), 10 deletions(-) create mode 100644 changelog.d/8040.misc (limited to 'synapse') diff --git a/changelog.d/8040.misc b/changelog.d/8040.misc new file mode 100644 index 0000000000..a126151392 --- /dev/null +++ b/changelog.d/8040.misc @@ -0,0 +1 @@ +Change the default log config to reduce disk I/O and storage for new servers. diff --git a/docs/sample_log_config.yaml b/docs/sample_log_config.yaml index 1a2739455e..403ac005ee 100644 --- a/docs/sample_log_config.yaml +++ b/docs/sample_log_config.yaml @@ -18,13 +18,29 @@ filters: handlers: file: - class: logging.handlers.RotatingFileHandler + class: logging.handlers.TimedRotatingFileHandler formatter: precise filename: /var/log/matrix-synapse/homeserver.log - maxBytes: 104857600 - backupCount: 10 - filters: [context] + when: midnight + backupCount: 3 # Does not include the current log file. encoding: utf8 + + # Default to buffering writes to log file for efficiency. This means that + # will be a delay for INFO/DEBUG logs to get written, but WARNING/ERROR + # logs will still be flushed immediately. + buffer: + class: logging.handlers.MemoryHandler + filters: [context] + target: file + # The capacity is the number of log lines that are buffered before + # being written to disk. Increasing this will lead to better + # performance, at the expensive of it taking longer for log lines to + # be written to disk. + capacity: 10 + flushLevel: 30 # Flush for WARNING logs as well + + # A handler that writes logs to stderr. Unused by default, but can be used + # instead of "buffer" and "file" in the logger handlers. console: class: logging.StreamHandler formatter: precise @@ -36,8 +52,23 @@ loggers: # information such as access tokens. level: INFO + twisted: + # We send the twisted logging directly to the file handler, + # to work around https://github.com/matrix-org/synapse/issues/3471 + # when using "buffer" logger. Use "console" to log to stderr instead. + handlers: [file] + propagate: false + root: level: INFO - handlers: [file, console] + + # Write logs to the `buffer` handler, which will buffer them together in memory, + # then write them to a file. + # + # Replace "buffer" with "console" to log to stderr instead. (Note that you'll + # also need to update the configuation for the `twisted` logger above, in + # this case.) + # + handlers: [buffer] disable_existing_loggers: false diff --git a/synapse/config/logger.py b/synapse/config/logger.py index dd775a97e8..493e98462d 100644 --- a/synapse/config/logger.py +++ b/synapse/config/logger.py @@ -62,13 +62,29 @@ filters: handlers: file: - class: logging.handlers.RotatingFileHandler + class: logging.handlers.TimedRotatingFileHandler formatter: precise filename: ${log_file} - maxBytes: 104857600 - backupCount: 10 - filters: [context] + when: midnight + backupCount: 3 # Does not include the current log file. encoding: utf8 + + # Default to buffering writes to log file for efficiency. This means that + # will be a delay for INFO/DEBUG logs to get written, but WARNING/ERROR + # logs will still be flushed immediately. + buffer: + class: logging.handlers.MemoryHandler + filters: [context] + target: file + # The capacity is the number of log lines that are buffered before + # being written to disk. Increasing this will lead to better + # performance, at the expensive of it taking longer for log lines to + # be written to disk. + capacity: 10 + flushLevel: 30 # Flush for WARNING logs as well + + # A handler that writes logs to stderr. Unused by default, but can be used + # instead of "buffer" and "file" in the logger handlers. console: class: logging.StreamHandler formatter: precise @@ -80,9 +96,24 @@ loggers: # information such as access tokens. level: INFO + twisted: + # We send the twisted logging directly to the file handler, + # to work around https://github.com/matrix-org/synapse/issues/3471 + # when using "buffer" logger. Use "console" to log to stderr instead. + handlers: [file] + propagate: false + root: level: INFO - handlers: [file, console] + + # Write logs to the `buffer` handler, which will buffer them together in memory, + # then write them to a file. + # + # Replace "buffer" with "console" to log to stderr instead. (Note that you'll + # also need to update the configuation for the `twisted` logger above, in + # this case.) + # + handlers: [buffer] disable_existing_loggers: false """ -- cgit 1.5.1