diff options
author | Erik Johnston <erik@matrix.org> | 2016-04-21 18:01:49 +0100 |
---|---|---|
committer | Erik Johnston <erik@matrix.org> | 2016-04-21 18:01:49 +0100 |
commit | 8fae3d7b1eea87b48db96f1671d850a4a247e926 (patch) | |
tree | d0605340c09aef751b80680d636fca22745e9863 | |
parent | Typo (diff) | |
download | synapse-8fae3d7b1eea87b48db96f1671d850a4a247e926.tar.xz |
Use special UPDATE syntax
-rw-r--r-- | synapse/storage/schema/delta/31/search_update.py | 4 | ||||
-rw-r--r-- | synapse/storage/search.py | 32 |
2 files changed, 14 insertions, 22 deletions
diff --git a/synapse/storage/schema/delta/31/search_update.py b/synapse/storage/schema/delta/31/search_update.py index 470ae0c005..2c15edd1a4 100644 --- a/synapse/storage/schema/delta/31/search_update.py +++ b/synapse/storage/schema/delta/31/search_update.py @@ -22,8 +22,8 @@ logger = logging.getLogger(__name__) ALTER_TABLE = """ -ALTER TABLE event_search ADD COLUMN origin_server_ts BIGINT; -ALTER TABLE event_search ADD COLUMN stream_ordering BIGINT; +ALTER TABLE event_search ADD COLUMN origin_server_ts BIGINT DEFAULT 0; +ALTER TABLE event_search ADD COLUMN stream_ordering BIGINT DEFAULT 0; """ diff --git a/synapse/storage/search.py b/synapse/storage/search.py index dc47425c23..813e1e90ac 100644 --- a/synapse/storage/search.py +++ b/synapse/storage/search.py @@ -169,34 +169,26 @@ class SearchStore(BackgroundUpdateStore): self.EVENT_SEARCH_ORDER_UPDATE_NAME, progress, ) - INSERT_CLUMP_SIZE = 1000 - def reindex_search_txn(txn): - sql = ( - "SELECT e.stream_ordering, e.origin_server_ts, event_id FROM events as e" - " INNER JOIN event_search USING (room_id, event_id)" - " WHERE ? <= e.stream_ordering AND e.stream_ordering < ?" - " ORDER BY e.stream_ordering DESC" + events_sql = ( + "SELECT stream_ordering, origin_server_ts, event_id FROM events" + " WHERE ? <= stream_ordering AND stream_ordering < ?" + " ORDER BY stream_ordering DESC" " LIMIT ?" ) - txn.execute(sql, (target_min_stream_id, max_stream_id, batch_size)) + sql = ( + "UPDATE event_search AS es SET es.stream_ordering = e.stream_ordering," + " es.origin_server_ts = e.origin_server_ts" + " FROM (%s) AS e" + " WHERE e.event_id = es.event_id" + " RETURNING es.stream_ordering" + ) % (events_sql,) + txn.execute(sql, (target_min_stream_id, max_stream_id, batch_size)) rows = txn.fetchall() - if not rows: - return 0 - min_stream_id = rows[-1][0] - sql = ( - "UPDATE event_search SET stream_ordering = ?, origin_server_ts = ?" - " WHERE event_id = ?" - ) - - for index in range(0, len(rows), INSERT_CLUMP_SIZE): - clump = rows[index:index + INSERT_CLUMP_SIZE] - txn.executemany(sql, clump) - progress = { "target_min_stream_id_inclusive": target_min_stream_id, "max_stream_id_exclusive": min_stream_id, |