summary refs log tree commit diff
diff options
context:
space:
mode:
authorErik Johnston <erik@matrix.org>2016-05-03 16:01:24 +0100
committerErik Johnston <erik@matrix.org>2016-05-04 10:16:46 +0100
commita438a6d2bca7fae3a4f1fa38b72b821c742cdc8a (patch)
tree378301f592f41cc91ebee70ab501e87ad5faf057
parentMerge pull request #758 from matrix-org/dbkr/fix_password_reset (diff)
downloadsynapse-a438a6d2bca7fae3a4f1fa38b72b821c742cdc8a.tar.xz
Implement basic ignore user
-rw-r--r--synapse/api/filtering.py8
-rw-r--r--synapse/handlers/_base.py26
-rw-r--r--synapse/handlers/sync.py22
-rw-r--r--synapse/storage/account_data.py31
4 files changed, 78 insertions, 9 deletions
diff --git a/synapse/api/filtering.py b/synapse/api/filtering.py
index cd699ef27f..4f5a4281fa 100644
--- a/synapse/api/filtering.py
+++ b/synapse/api/filtering.py
@@ -15,6 +15,8 @@
 from synapse.api.errors import SynapseError
 from synapse.types import UserID, RoomID
 
+from twisted.internet import defer
+
 import ujson as json
 
 
@@ -24,10 +26,10 @@ class Filtering(object):
         super(Filtering, self).__init__()
         self.store = hs.get_datastore()
 
+    @defer.inlineCallbacks
     def get_user_filter(self, user_localpart, filter_id):
-        result = self.store.get_user_filter(user_localpart, filter_id)
-        result.addCallback(FilterCollection)
-        return result
+        result = yield self.store.get_user_filter(user_localpart, filter_id)
+        defer.returnValue(FilterCollection(result))
 
     def add_user_filter(self, user_localpart, user_filter):
         self.check_valid_filter(user_filter)
diff --git a/synapse/handlers/_base.py b/synapse/handlers/_base.py
index 13a675b208..0912274e1a 100644
--- a/synapse/handlers/_base.py
+++ b/synapse/handlers/_base.py
@@ -84,7 +84,7 @@ class BaseHandler(object):
             events ([synapse.events.EventBase]): list of events to filter
         """
         forgotten = yield defer.gatherResults([
-            self.store.who_forgot_in_room(
+            preserve_fn(self.store.who_forgot_in_room)(
                 room_id,
             )
             for room_id in frozenset(e.room_id for e in events)
@@ -95,13 +95,33 @@ class BaseHandler(object):
             row["event_id"] for rows in forgotten for row in rows
         )
 
-        def allowed(event, user_id, is_peeking):
+        # Maps user_id -> account data content
+        ignore_dict_content = yield defer.gatherResults([
+            preserve_fn(self.store.get_global_account_data_by_type_for_user)(
+                user_id, "m.ignored_user_list"
+            ).addCallback(lambda d, u: (u, d), user_id)
+            for user_id, is_peeking in user_tuples
+        ]).addCallback(dict)
+
+        # FIXME: This will explode if people upload something incorrect.
+        ignore_dict = {
+            user_id: frozenset(
+                content.get("ignored_users", {}).keys() if content else []
+            )
+            for user_id, content in ignore_dict_content.items()
+        }
+
+        def allowed(event, user_id, is_peeking, ignore_list):
             """
             Args:
                 event (synapse.events.EventBase): event to check
                 user_id (str)
                 is_peeking (bool)
+                ignore_list (list): list of users to ignore
             """
+            if not event.is_state() and event.sender in ignore_list:
+                return False
+
             state = event_id_to_state[event.event_id]
 
             # get the room_visibility at the time of the event.
@@ -186,7 +206,7 @@ class BaseHandler(object):
             user_id: [
                 event
                 for event in events
-                if allowed(event, user_id, is_peeking)
+                if allowed(event, user_id, is_peeking, ignore_dict.get(user_id, []))
             ]
             for user_id, is_peeking in user_tuples
         })
diff --git a/synapse/handlers/sync.py b/synapse/handlers/sync.py
index 231140b655..27c3c1b525 100644
--- a/synapse/handlers/sync.py
+++ b/synapse/handlers/sync.py
@@ -247,6 +247,10 @@ class SyncHandler(BaseHandler):
             sync_config.user.to_string()
         )
 
+        ignored_users = account_data.get(
+            "m.ignored_user_list", {}
+        ).get("ignored_users", {}).keys()
+
         joined = []
         invited = []
         archived = []
@@ -267,6 +271,8 @@ class SyncHandler(BaseHandler):
                 )
                 joined.append(room_result)
             elif event.membership == Membership.INVITE:
+                if event.sender in ignored_users:
+                    return
                 invite = yield self.store.get_event(event.event_id)
                 invited.append(InvitedSyncResult(
                     room_id=event.room_id,
@@ -515,6 +521,15 @@ class SyncHandler(BaseHandler):
                 sync_config.user
             )
 
+        ignored_account_data = yield self.store.get_global_account_data_by_type_for_user(
+            user_id, "m.ignored_user_list"
+        )
+
+        if ignored_account_data:
+            ignored_users = ignored_account_data.get("ignored_users", {}).keys()
+        else:
+            ignored_users = frozenset()
+
         # Get a list of membership change events that have happened.
         rooms_changed = yield self.store.get_membership_changes_for_user(
             user_id, since_token.room_key, now_token.room_key
@@ -549,9 +564,10 @@ class SyncHandler(BaseHandler):
             # Only bother if we're still currently invited
             should_invite = non_joins[-1].membership == Membership.INVITE
             if should_invite:
-                room_sync = InvitedSyncResult(room_id, invite=non_joins[-1])
-                if room_sync:
-                    invited.append(room_sync)
+                if event.sender not in ignored_users:
+                    room_sync = InvitedSyncResult(room_id, invite=non_joins[-1])
+                    if room_sync:
+                        invited.append(room_sync)
 
             # Always include leave/ban events. Just take the last one.
             # TODO: How do we handle ban -> leave in same batch?
diff --git a/synapse/storage/account_data.py b/synapse/storage/account_data.py
index 7a7fbf1e52..cc0b92bc89 100644
--- a/synapse/storage/account_data.py
+++ b/synapse/storage/account_data.py
@@ -16,6 +16,8 @@
 from ._base import SQLBaseStore
 from twisted.internet import defer
 
+from synapse.util.caches.descriptors import cached, cachedInlineCallbacks
+
 import ujson as json
 import logging
 
@@ -24,6 +26,7 @@ logger = logging.getLogger(__name__)
 
 class AccountDataStore(SQLBaseStore):
 
+    @cached()
     def get_account_data_for_user(self, user_id):
         """Get all the client account_data for a user.
 
@@ -60,6 +63,28 @@ class AccountDataStore(SQLBaseStore):
             "get_account_data_for_user", get_account_data_for_user_txn
         )
 
+    @cachedInlineCallbacks(num_args=2)
+    def get_global_account_data_by_type_for_user(self, user_id, data_type):
+        """
+        Returns:
+            Deferred: A dict
+        """
+        result = yield self._simple_select_one_onecol(
+            table="account_data",
+            keyvalues={
+                "user_id": user_id,
+                "account_data_type": data_type,
+            },
+            retcol="content",
+            desc="get_global_account_data_by_type_for_user",
+            allow_none=True,
+        )
+
+        if result:
+            defer.returnValue(json.loads(result))
+        else:
+            defer.returnValue(None)
+
     def get_account_data_for_room(self, user_id, room_id):
         """Get all the client account_data for a user for a room.
 
@@ -193,6 +218,7 @@ class AccountDataStore(SQLBaseStore):
                 self._account_data_stream_cache.entity_has_changed,
                 user_id, next_id,
             )
+            txn.call_after(self.get_account_data_for_user.invalidate, (user_id,))
             self._update_max_stream_id(txn, next_id)
 
         with self._account_data_id_gen.get_next() as next_id:
@@ -232,6 +258,11 @@ class AccountDataStore(SQLBaseStore):
                 self._account_data_stream_cache.entity_has_changed,
                 user_id, next_id,
             )
+            txn.call_after(self.get_account_data_for_user.invalidate, (user_id,))
+            txn.call_after(
+                self.get_global_account_data_by_type_for_user.invalidate,
+                (user_id, account_data_type,)
+            )
             self._update_max_stream_id(txn, next_id)
 
         with self._account_data_id_gen.get_next() as next_id: