summary refs log tree commit diff
path: root/synapse/storage/search.py
diff options
context:
space:
mode:
authorErik Johnston <erik@matrix.org>2016-04-21 18:01:49 +0100
committerErik Johnston <erik@matrix.org>2016-04-21 18:01:49 +0100
commit8fae3d7b1eea87b48db96f1671d850a4a247e926 (patch)
treed0605340c09aef751b80680d636fca22745e9863 /synapse/storage/search.py
parentTypo (diff)
downloadsynapse-8fae3d7b1eea87b48db96f1671d850a4a247e926.tar.xz
Use special UPDATE syntax
Diffstat (limited to '')
-rw-r--r--synapse/storage/search.py32
1 files changed, 12 insertions, 20 deletions
diff --git a/synapse/storage/search.py b/synapse/storage/search.py
index dc47425c23..813e1e90ac 100644
--- a/synapse/storage/search.py
+++ b/synapse/storage/search.py
@@ -169,34 +169,26 @@ class SearchStore(BackgroundUpdateStore):
                 self.EVENT_SEARCH_ORDER_UPDATE_NAME, progress,
             )
 
-        INSERT_CLUMP_SIZE = 1000
-
         def reindex_search_txn(txn):
-            sql = (
-                "SELECT e.stream_ordering, e.origin_server_ts, event_id FROM events as e"
-                " INNER JOIN event_search USING (room_id, event_id)"
-                " WHERE ? <= e.stream_ordering AND e.stream_ordering < ?"
-                " ORDER BY e.stream_ordering DESC"
+            events_sql = (
+                "SELECT stream_ordering, origin_server_ts, event_id FROM events"
+                " WHERE ? <= stream_ordering AND stream_ordering < ?"
+                " ORDER BY stream_ordering DESC"
                 " LIMIT ?"
             )
 
-            txn.execute(sql, (target_min_stream_id, max_stream_id, batch_size))
+            sql = (
+                "UPDATE event_search AS es SET es.stream_ordering = e.stream_ordering,"
+                " es.origin_server_ts = e.origin_server_ts"
+                " FROM (%s) AS e"
+                " WHERE e.event_id = es.event_id"
+                " RETURNING es.stream_ordering"
+            ) % (events_sql,)
 
+            txn.execute(sql, (target_min_stream_id, max_stream_id, batch_size))
             rows = txn.fetchall()
-            if not rows:
-                return 0
-
             min_stream_id = rows[-1][0]
 
-            sql = (
-                "UPDATE event_search SET stream_ordering = ?, origin_server_ts = ?"
-                " WHERE event_id = ?"
-            )
-
-            for index in range(0, len(rows), INSERT_CLUMP_SIZE):
-                clump = rows[index:index + INSERT_CLUMP_SIZE]
-                txn.executemany(sql, clump)
-
             progress = {
                 "target_min_stream_id_inclusive": target_min_stream_id,
                 "max_stream_id_exclusive": min_stream_id,