summary refs log tree commit diff
path: root/synapse/storage
diff options
context:
space:
mode:
authorErik Johnston <erik@matrix.org>2016-04-21 17:16:11 +0100
committerErik Johnston <erik@matrix.org>2016-04-21 17:16:11 +0100
commit51bb339ab2130ab29ee9fcfec48d8e62f46c75f6 (patch)
treeb2aa9fdbb39ea611fb79038a69b0274fe3f305ae /synapse/storage
parentAdd missing run_upgrade (diff)
downloadsynapse-51bb339ab2130ab29ee9fcfec48d8e62f46c75f6.tar.xz
Create index concurrently
Diffstat (limited to 'synapse/storage')
-rw-r--r--synapse/storage/schema/delta/31/search_update.py6
-rw-r--r--synapse/storage/search.py14
2 files changed, 14 insertions, 6 deletions
diff --git a/synapse/storage/schema/delta/31/search_update.py b/synapse/storage/schema/delta/31/search_update.py
index 989e990dbd..470ae0c005 100644
--- a/synapse/storage/schema/delta/31/search_update.py
+++ b/synapse/storage/schema/delta/31/search_update.py
@@ -24,11 +24,6 @@ logger = logging.getLogger(__name__)
 ALTER_TABLE = """
 ALTER TABLE event_search ADD COLUMN origin_server_ts BIGINT;
 ALTER TABLE event_search ADD COLUMN stream_ordering BIGINT;
-
-CREATE INDEX event_search_room_order ON event_search(
-    room_id, origin_server_ts, stream_ordering
-);
-CREATE INDEX event_search_order ON event_search(origin_server_ts, stream_ordering);
 """
 
 
@@ -52,6 +47,7 @@ def run_create(cur, database_engine, *args, **kwargs):
             "target_min_stream_id_inclusive": min_stream_id,
             "max_stream_id_exclusive": max_stream_id + 1,
             "rows_inserted": 0,
+            "have_added_indexes": False,
         }
         progress_json = ujson.dumps(progress)
 
diff --git a/synapse/storage/search.py b/synapse/storage/search.py
index 375057fa3e..548e9eeaef 100644
--- a/synapse/storage/search.py
+++ b/synapse/storage/search.py
@@ -141,10 +141,21 @@ class SearchStore(BackgroundUpdateStore):
         target_min_stream_id = progress["target_min_stream_id_inclusive"]
         max_stream_id = progress["max_stream_id_exclusive"]
         rows_inserted = progress.get("rows_inserted", 0)
+        have_added_index = progress['have_added_indexes']
 
         INSERT_CLUMP_SIZE = 1000
 
         def reindex_search_txn(txn):
+            if not have_added_index:
+                txn.execute(
+                    "CREATE INDEX CONCURRENTLY event_search_room_order ON event_search("
+                    "room_id, origin_server_ts, stream_ordering)"
+                )
+                txn.execute(
+                    "CREATE INDEX CONCURRENTLY event_search_order ON event_search("
+                    "origin_server_ts, stream_ordering)"
+                )
+
             sql = (
                 "SELECT stream_ordering, origin_server_ts, event_id FROM events"
                 " INNER JOIN event_search USING (room_id, event_id)"
@@ -173,7 +184,8 @@ class SearchStore(BackgroundUpdateStore):
             progress = {
                 "target_min_stream_id_inclusive": target_min_stream_id,
                 "max_stream_id_exclusive": min_stream_id,
-                "rows_inserted": rows_inserted + len(rows)
+                "rows_inserted": rows_inserted + len(rows),
+                "have_added_index": True,
             }
 
             self._background_update_progress_txn(