summary refs log tree commit diff
path: root/synapse/storage/search.py
diff options
context:
space:
mode:
authorErik Johnston <erik@matrix.org>2016-11-03 14:59:59 +0000
committerErik Johnston <erik@matrix.org>2016-11-03 15:00:03 +0000
commit8fd4d9129f17e9df42302269e4f73ae8417dde23 (patch)
tree494394e95b344e45453935b18f7de226a16d7320 /synapse/storage/search.py
parentMerge pull request #1191 from matrix-org/dbkr/non_ascii_passwords (diff)
downloadsynapse-8fd4d9129f17e9df42302269e4f73ae8417dde23.tar.xz
Replace postgres GIN with GIST
This is because GIN can be slow to write too, especially when the table
gets large.
Diffstat (limited to '')
-rw-r--r--synapse/storage/search.py27
1 files changed, 27 insertions, 0 deletions
diff --git a/synapse/storage/search.py b/synapse/storage/search.py
index 12941d1775..eae90c2fd8 100644
--- a/synapse/storage/search.py
+++ b/synapse/storage/search.py
@@ -31,6 +31,7 @@ class SearchStore(BackgroundUpdateStore):
 
     EVENT_SEARCH_UPDATE_NAME = "event_search"
     EVENT_SEARCH_ORDER_UPDATE_NAME = "event_search_order"
+    EVENT_SEARCH_USE_GIST_POSTGRES_NAME = "event_search_postgres_gist"
 
     def __init__(self, hs):
         super(SearchStore, self).__init__(hs)
@@ -41,6 +42,10 @@ class SearchStore(BackgroundUpdateStore):
             self.EVENT_SEARCH_ORDER_UPDATE_NAME,
             self._background_reindex_search_order
         )
+        self.register_background_update_handler(
+            self.EVENT_SEARCH_USE_GIST_POSTGRES_NAME,
+            self._background_reindex_gist_search
+        )
 
     @defer.inlineCallbacks
     def _background_reindex_search(self, progress, batch_size):
@@ -140,6 +145,28 @@ class SearchStore(BackgroundUpdateStore):
         defer.returnValue(result)
 
     @defer.inlineCallbacks
+    def _background_reindex_gist_search(self, progress, batch_size):
+        def create_index(conn):
+            conn.rollback()
+            conn.set_session(autocommit=True)
+            c = conn.cursor()
+
+            # We create with NULLS FIRST so that when we search *backwards*
+            # we get the ones with non null origin_server_ts *first*
+            c.execute(
+                "CREATE INDEX CONCURRENTLY event_search_fts_idx_gist"
+                " ON event_search USING GIST (vector)"
+            )
+            c.execute("DROP INDEX event_search_fts_idx")
+            conn.set_session(autocommit=False)
+
+        if isinstance(self.database_engine, PostgresEngine):
+            yield self.runWithConnection(create_index)
+
+        yield self._end_background_update(self.EVENT_SEARCH_USE_GIST_POSTGRES_NAME)
+        defer.returnValue(1)
+
+    @defer.inlineCallbacks
     def _background_reindex_search_order(self, progress, batch_size):
         target_min_stream_id = progress["target_min_stream_id_inclusive"]
         max_stream_id = progress["max_stream_id_exclusive"]