summary refs log tree commit diff
path: root/synapse/storage/search.py
diff options
context:
space:
mode:
authorErik Johnston <erik@matrix.org>2016-04-22 09:37:16 +0100
committerErik Johnston <erik@matrix.org>2016-04-22 10:01:57 +0100
commit183cacac90ca237b448da244270d55920470389b (patch)
tree9fb099d9986b135a555018d4b35b302b5c2c8be1 /synapse/storage/search.py
parentOrder NULLs first (diff)
downloadsynapse-183cacac90ca237b448da244270d55920470389b.tar.xz
Simplify query and handle finishing correctly
Diffstat (limited to 'synapse/storage/search.py')
-rw-r--r--synapse/storage/search.py30
1 files changed, 14 insertions, 16 deletions
diff --git a/synapse/storage/search.py b/synapse/storage/search.py
index 2c71db8c96..0224299625 100644
--- a/synapse/storage/search.py
+++ b/synapse/storage/search.py
@@ -169,28 +169,26 @@ class SearchStore(BackgroundUpdateStore):
             yield self.runInteraction(
                 self.EVENT_SEARCH_ORDER_UPDATE_NAME,
                 self._background_update_progress_txn,
-                self.EVENT_SEARCH_ORDER_UPDATE_NAME, progress,
+                self.EVENT_SEARCH_ORDER_UPDATE_NAME, pg,
             )
 
         def reindex_search_txn(txn):
-            events_sql = (
-                "SELECT stream_ordering, origin_server_ts, event_id FROM events"
-                " WHERE ? <= stream_ordering AND stream_ordering < ?"
-                " ORDER BY stream_ordering DESC"
-                " LIMIT ?"
-            )
-
             sql = (
                 "UPDATE event_search AS es SET stream_ordering = e.stream_ordering,"
                 " origin_server_ts = e.origin_server_ts"
-                " FROM (%s) AS e"
+                " FROM events AS e"
                 " WHERE e.event_id = es.event_id"
+                " AND ? <= e.stream_ordering AND e.stream_ordering < ?"
                 " RETURNING es.stream_ordering"
-            ) % (events_sql,)
+            )
 
-            txn.execute(sql, (target_min_stream_id, max_stream_id, batch_size))
+            min_stream_id = max_stream_id - batch_size
+            txn.execute(sql, (min_stream_id, max_stream_id))
             rows = txn.fetchall()
-            min_stream_id = rows[-1][0]
+
+            if min_stream_id < target_min_stream_id:
+                # We've recached the end.
+                return len(rows), False
 
             progress = {
                 "target_min_stream_id_inclusive": target_min_stream_id,
@@ -203,16 +201,16 @@ class SearchStore(BackgroundUpdateStore):
                 txn, self.EVENT_SEARCH_ORDER_UPDATE_NAME, progress
             )
 
-            return len(rows)
+            return len(rows), True
 
-        result = yield self.runInteraction(
+        num_rows, finished = yield self.runInteraction(
             self.EVENT_SEARCH_ORDER_UPDATE_NAME, reindex_search_txn
         )
 
-        if not result:
+        if not finished:
             yield self._end_background_update(self.EVENT_SEARCH_ORDER_UPDATE_NAME)
 
-        defer.returnValue(result)
+        defer.returnValue(num_rows)
 
     @defer.inlineCallbacks
     def search_msgs(self, room_ids, search_term, keys):