1 files changed, 13 insertions, 21 deletions
diff --git a/synapse/storage/push_rule.py b/synapse/storage/push_rule.py
index 0829262f42..24d137f8ca 100644
--- a/synapse/storage/push_rule.py
+++ b/synapse/storage/push_rule.py
@@ -57,43 +57,35 @@ class PushRuleStore(SQLBaseStore):
@defer.inlineCallbacks
def bulk_get_push_rules(self, user_ids):
+ if not user_ids:
+ defer.returnValue({})
+
batch_size = 100
def f(txn, user_ids_to_fetch):
sql = (
- "SELECT " +
- ",".join("pr."+x for x in PushRuleTable.fields) +
- " FROM " + PushRuleTable.table_name + " pr " +
- " LEFT JOIN " + PushRuleEnableTable.table_name + " pre " +
- " ON pr.user_name = pre.user_name and pr.rule_id = pre.rule_id " +
- " WHERE pr.user_name " +
+ "SELECT pr.*"
+ " FROM push_rules as pr "
+ " LEFT JOIN push_rules_enable as pre "
+ " ON pr.user_name = pre.user_name and pr.rule_id = pre.rule_id "
+ " WHERE pr.user_name "
" IN (" + ",".join("?" for _ in user_ids_to_fetch) + ")"
" AND (pre.enabled is null or pre.enabled = 1)"
" ORDER BY pr.user_name, pr.priority_class DESC, pr.priority DESC"
)
txn.execute(sql, user_ids_to_fetch)
- return txn.fetchall()
+ return self.cursor_to_dict(txn)
results = {}
- batch_start = 0
- while batch_start < len(user_ids):
- batch_end = min(len(user_ids), batch_size)
- batch_user_ids = user_ids[batch_start:batch_end]
- batch_start = batch_end
-
+ chunks = [user_ids[i:i+batch_size] for i in xrange(0, len(user_ids), batch_size)]
+ for batch_user_ids in chunks:
rows = yield self.runInteraction(
"bulk_get_push_rules", f, batch_user_ids
)
- for r in rows:
- rawdict = {
- PushRuleTable.fields[i]: r[i] for i in range(len(r))
- }
-
- if rawdict['user_name'] not in results:
- results[rawdict['user_name']] = []
- results[rawdict['user_name']].append(rawdict)
+ for row in rows:
+ results.setdefault(row['user_name'], []).append(row)
defer.returnValue(results)
@defer.inlineCallbacks
|