ANALYZE new stream ordering column (#10326)
Fixes #10325
2 files changed, 11 insertions, 0 deletions
diff --git a/changelog.d/10326.bugfix b/changelog.d/10326.bugfix
new file mode 100644
index 0000000000..7ebda7cdc2
--- /dev/null
+++ b/changelog.d/10326.bugfix
@@ -0,0 +1 @@
+Fix a long-standing bug where Synapse would return errors after 2<sup>31</sup> events were handled by the server.
diff --git a/synapse/storage/databases/main/events_bg_updates.py b/synapse/storage/databases/main/events_bg_updates.py
index 1c95c66648..29f33bac55 100644
--- a/synapse/storage/databases/main/events_bg_updates.py
+++ b/synapse/storage/databases/main/events_bg_updates.py
@@ -1146,6 +1146,16 @@ class EventsBackgroundUpdatesStore(SQLBaseStore):
logger.info("completing stream_ordering migration: %s", sql)
txn.execute(sql)
+ # ANALYZE the new column to build stats on it, to encourage PostgreSQL to use the
+ # indexes on it.
+ # We need to pass execute a dummy function to handle the txn's result otherwise
+ # it tries to call fetchall() on it and fails because there's no result to fetch.
+ await self.db_pool.execute(
+ "background_analyze_new_stream_ordering_column",
+ lambda txn: None,
+ "ANALYZE events(stream_ordering2)",
+ )
+
await self.db_pool.runInteraction(
"_background_replace_stream_ordering_column", process
)
|