summary refs log tree commit diff
diff options
context:
space:
mode:
authorMark Haines <mark.haines@matrix.org>2016-01-13 11:46:07 +0000
committerMark Haines <mark.haines@matrix.org>2016-01-13 11:46:07 +0000
commitf4dad9f63995c8aa493ef884f793ee0155549a50 (patch)
tree879daa5a1d430dadc74df56bbf1c97136381b59f
parentDelete the table objects from TransactionStore (diff)
parentbulk_get_push_rules should handle empty lists (diff)
downloadsynapse-f4dad9f63995c8aa493ef884f793ee0155549a50.tar.xz
Merge remote-tracking branch 'origin/erikj/bulk_get_push_rules' into markjh/table_name
Conflicts:
	synapse/storage/push_rule.py
-rw-r--r--synapse/handlers/events.py7
-rw-r--r--synapse/handlers/sync.py6
-rw-r--r--synapse/storage/push_rule.py26
3 files changed, 13 insertions, 26 deletions
diff --git a/synapse/handlers/events.py b/synapse/handlers/events.py
index 28c674730e..c73eec2b91 100644
--- a/synapse/handlers/events.py
+++ b/synapse/handlers/events.py
@@ -36,10 +36,6 @@ def stopped_user_eventstream(distributor, user):
     return distributor.fire("stopped_user_eventstream", user)
 
 
-def user_joined_room(distributor, user, room_id):
-    return distributor.fire("user_joined_room", user, room_id)
-
-
 class EventStreamHandler(BaseHandler):
 
     def __init__(self, hs):
@@ -136,9 +132,6 @@ class EventStreamHandler(BaseHandler):
                 # thundering herds on restart.
                 timeout = random.randint(int(timeout*0.9), int(timeout*1.1))
 
-            if is_guest:
-                yield user_joined_room(self.distributor, auth_user, room_id)
-
             events, tokens = yield self.notifier.get_events_for(
                 auth_user, pagin_config, timeout,
                 only_room_events=only_room_events,
diff --git a/synapse/handlers/sync.py b/synapse/handlers/sync.py
index 27fdbe28ee..33c1a4512c 100644
--- a/synapse/handlers/sync.py
+++ b/synapse/handlers/sync.py
@@ -585,7 +585,8 @@ class SyncHandler(BaseHandler):
                 sync_config, leave_event, since_token, tags_by_room,
                 account_data_by_room
             )
-            archived.append(room_sync)
+            if room_sync:
+                archived.append(room_sync)
 
         invited = [
             InvitedSyncResult(room_id=event.room_id, invite=event)
@@ -726,6 +727,9 @@ class SyncHandler(BaseHandler):
 
         leave_token = since_token.copy_and_replace("room_key", stream_token)
 
+        if since_token.is_after(leave_token):
+            defer.returnValue(None)
+
         batch = yield self.load_filtered_recents(
             leave_event.room_id, sync_config, leave_token, since_token,
         )
diff --git a/synapse/storage/push_rule.py b/synapse/storage/push_rule.py
index a4dde1aac0..448009b4b6 100644
--- a/synapse/storage/push_rule.py
+++ b/synapse/storage/push_rule.py
@@ -62,13 +62,14 @@ class PushRuleStore(SQLBaseStore):
 
     @defer.inlineCallbacks
     def bulk_get_push_rules(self, user_ids):
+        if not user_ids:
+            defer.returnValue({})
+
         batch_size = 100
 
         def f(txn, user_ids_to_fetch):
             sql = (
-                "SELECT"
-                "  pr.user_name, pr.rule_id, priority_class, priority,"
-                "  conditions, actions"
+                "SELECT pr.*"
                 " FROM push_rules AS pr"
                 " LEFT JOIN push_rules_enable AS pre"
                 " ON pr.user_name = pre.user_name AND pr.rule_id = pre.rule_id"
@@ -78,29 +79,18 @@ class PushRuleStore(SQLBaseStore):
                 " ORDER BY pr.user_name, pr.priority_class DESC, pr.priority DESC"
             )
             txn.execute(sql, user_ids_to_fetch)
-            return txn.fetchall()
+            return self.cursor_to_dict(txn)
 
         results = {}
 
-        batch_start = 0
-        while batch_start < len(user_ids):
-            batch_end = min(len(user_ids), batch_size)
-            batch_user_ids = user_ids[batch_start:batch_end]
-            batch_start = batch_end
-
+        chunks = [user_ids[i:i+batch_size] for i in xrange(0, len(user_ids), batch_size)]
+        for batch_user_ids in chunks:
             rows = yield self.runInteraction(
                 "bulk_get_push_rules", f, batch_user_ids
             )
 
-            cols = (
-                "user_name", "rule_id", "priority_class", "priority",
-                "conditions", "actions",
-            )
-
             for row in rows:
-                rawdict = dict(zip(cols, rows))
-                results.setdefault(rawdict["user_name"], []).append(rawdict)
-
+                results.setdefault(row['user_name'], []).append(row)
         defer.returnValue(results)
 
     @defer.inlineCallbacks