summary refs log tree commit diff
path: root/synapse/storage/schema/delta/31
diff options
context:
space:
mode:
authorErik Johnston <erik@matrix.org>2016-04-21 16:41:39 +0100
committerErik Johnston <erik@matrix.org>2016-04-21 16:56:14 +0100
commitc877f0f0345f1ff6d329af2920d7d1a6b5659a86 (patch)
tree88c1372151b89c7ee5722627670eb3eb5a650f70 /synapse/storage/schema/delta/31
parentpip install new python dependencies in jenkins.sh (diff)
downloadsynapse-c877f0f0345f1ff6d329af2920d7d1a6b5659a86.tar.xz
Optimise event_search in postgres
Diffstat (limited to 'synapse/storage/schema/delta/31')
-rw-r--r--synapse/storage/schema/delta/31/search_update.py65
1 files changed, 65 insertions, 0 deletions
diff --git a/synapse/storage/schema/delta/31/search_update.py b/synapse/storage/schema/delta/31/search_update.py
new file mode 100644
index 0000000000..46a3795d12
--- /dev/null
+++ b/synapse/storage/schema/delta/31/search_update.py
@@ -0,0 +1,65 @@
+# Copyright 2016 OpenMarket Ltd
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from synapse.storage.engines import PostgresEngine
+from synapse.storage.prepare_database import get_statements
+
+import logging
+import ujson
+
+logger = logging.getLogger(__name__)
+
+
+ALTER_TABLE = """
+ALTER TABLE event_search ADD COLUMN origin_server_ts BIGINT;
+ALTER TABLE event_search ADD COLUMN stream_ordering BIGINT;
+
+CREATE INDEX event_search_room_order ON event_search(
+    room_id, origin_server_ts, stream_ordering
+);
+CREATE INDEX event_search_order ON event_search(origin_server_ts, stream_ordering);
+"""
+
+
+def run_create(cur, database_engine, *args, **kwargs):
+    if not isinstance(database_engine, PostgresEngine):
+        return
+
+    for statement in get_statements(ALTER_TABLE.splitlines()):
+        cur.execute(statement)
+
+    cur.execute("SELECT MIN(stream_ordering) FROM events")
+    rows = cur.fetchall()
+    min_stream_id = rows[0][0]
+
+    cur.execute("SELECT MAX(stream_ordering) FROM events")
+    rows = cur.fetchall()
+    max_stream_id = rows[0][0]
+
+    if min_stream_id is not None and max_stream_id is not None:
+        progress = {
+            "target_min_stream_id_inclusive": min_stream_id,
+            "max_stream_id_exclusive": max_stream_id + 1,
+            "rows_inserted": 0,
+        }
+        progress_json = ujson.dumps(progress)
+
+        sql = (
+            "INSERT into background_updates (update_name, progress_json)"
+            " VALUES (?, ?)"
+        )
+
+        sql = database_engine.convert_param_style(sql)
+
+        cur.execute(sql, ("event_search_order", progress_json))