summary refs log tree commit diff
path: root/scripts
diff options
context:
space:
mode:
authorErik Johnston <erik@matrix.org>2020-11-13 11:53:51 +0000
committerGitHub <noreply@github.com>2020-11-13 11:53:51 +0000
commit1b15a3d92cbe1ee9475319ff81abe8760d6be19f (patch)
tree012224f584228dd8892a4803079ab0996a427326 /scripts
parentCache event ID to auth event IDs lookups (#8752) (diff)
downloadsynapse-1b15a3d92cbe1ee9475319ff81abe8760d6be19f.tar.xz
Fix port script so that it can be run again after failure. (#8755)
If the script fails (or is CTRL-C'ed) between porting some of the events table and copying of the sequences then the port script will immediately die if run again due to the postgres DB having inconsistencies between sequences and tables.

The fix is to move the porting of sequences to before porting the tables, so that there is never a period where the Postgres DB is inconsistent. To do that we need to change how we port the sequences so that it calculates the values from the SQLite DB rather than the Postgres DB.

Fixes #8619 
Diffstat (limited to 'scripts')
-rwxr-xr-xscripts/synapse_port_db84
1 files changed, 54 insertions, 30 deletions
diff --git a/scripts/synapse_port_db b/scripts/synapse_port_db
index 604b961bd2..5ad17aa90f 100755
--- a/scripts/synapse_port_db
+++ b/scripts/synapse_port_db
@@ -619,7 +619,18 @@ class Porter(object):
                 "create_port_table", create_port_table
             )
 
-            # Step 2. Get tables.
+            # Step 2. Set up sequences
+            #
+            # We do this before porting the tables so that event if we fail half
+            # way through the postgres DB always have sequences that are greater
+            # than their respective tables. If we don't then creating the
+            # `DataStore` object will fail due to the inconsistency.
+            self.progress.set_state("Setting up sequence generators")
+            await self._setup_state_group_id_seq()
+            await self._setup_user_id_seq()
+            await self._setup_events_stream_seqs()
+
+            # Step 3. Get tables.
             self.progress.set_state("Fetching tables")
             sqlite_tables = await self.sqlite_store.db_pool.simple_select_onecol(
                 table="sqlite_master", keyvalues={"type": "table"}, retcol="name"
@@ -634,7 +645,7 @@ class Porter(object):
             tables = set(sqlite_tables) & set(postgres_tables)
             logger.info("Found %d tables", len(tables))
 
-            # Step 3. Figure out what still needs copying
+            # Step 4. Figure out what still needs copying
             self.progress.set_state("Checking on port progress")
             setup_res = await make_deferred_yieldable(
                 defer.gatherResults(
@@ -651,7 +662,7 @@ class Porter(object):
             # of: `postgres_size`, `table_size`, `forward_chunk`, `backward_chunk`.
             tables_to_port_info_map = {r[0]: r[1:] for r in setup_res}
 
-            # Step 4. Do the copying.
+            # Step 5. Do the copying.
             #
             # This is slightly convoluted as we need to ensure tables are ported
             # in the correct order due to foreign key constraints.
@@ -685,12 +696,6 @@ class Porter(object):
 
                 tables_ported.update(tables_to_port)
 
-            # Step 5. Set up sequences
-            self.progress.set_state("Setting up sequence generators")
-            await self._setup_state_group_id_seq()
-            await self._setup_user_id_seq()
-            await self._setup_events_stream_seqs()
-
             self.progress.done()
         except Exception as e:
             global end_error_exec_info
@@ -848,43 +853,62 @@ class Porter(object):
 
         return done, remaining + done
 
-    def _setup_state_group_id_seq(self):
+    async def _setup_state_group_id_seq(self):
+        curr_id = await self.sqlite_store.db_pool.simple_select_one_onecol(
+            table="state_groups", keyvalues={}, retcol="MAX(id)", allow_none=True
+        )
+
+        if not curr_id:
+            return
+
         def r(txn):
-            txn.execute("SELECT MAX(id) FROM state_groups")
-            curr_id = txn.fetchone()[0]
-            if not curr_id:
-                return
             next_id = curr_id + 1
             txn.execute("ALTER SEQUENCE state_group_id_seq RESTART WITH %s", (next_id,))
 
-        return self.postgres_store.db_pool.runInteraction("setup_state_group_id_seq", r)
+        await self.postgres_store.db_pool.runInteraction("setup_state_group_id_seq", r)
+
+    async def _setup_user_id_seq(self):
+        curr_id = await self.sqlite_store.db_pool.runInteraction(
+            "setup_user_id_seq", find_max_generated_user_id_localpart
+        )
 
-    def _setup_user_id_seq(self):
         def r(txn):
-            next_id = find_max_generated_user_id_localpart(txn) + 1
+            next_id = curr_id + 1
             txn.execute("ALTER SEQUENCE user_id_seq RESTART WITH %s", (next_id,))
 
         return self.postgres_store.db_pool.runInteraction("setup_user_id_seq", r)
 
-    def _setup_events_stream_seqs(self):
-        def r(txn):
-            txn.execute("SELECT MAX(stream_ordering) FROM events")
-            curr_id = txn.fetchone()[0]
-            if curr_id:
-                next_id = curr_id + 1
+    async def _setup_events_stream_seqs(self):
+        """Set the event stream sequences to the correct values.
+        """
+
+        # We get called before we've ported the events table, so we need to
+        # fetch the current positions from the SQLite store.
+        curr_forward_id = await self.sqlite_store.db_pool.simple_select_one_onecol(
+            table="events", keyvalues={}, retcol="MAX(stream_ordering)", allow_none=True
+        )
+
+        curr_backward_id = await self.sqlite_store.db_pool.simple_select_one_onecol(
+            table="events",
+            keyvalues={},
+            retcol="MAX(-MIN(stream_ordering), 1)",
+            allow_none=True,
+        )
+
+        def _setup_events_stream_seqs_set_pos(txn):
+            if curr_forward_id:
                 txn.execute(
-                    "ALTER SEQUENCE events_stream_seq RESTART WITH %s", (next_id,)
+                    "ALTER SEQUENCE events_stream_seq RESTART WITH %s",
+                    (curr_forward_id + 1,),
                 )
 
-            txn.execute("SELECT GREATEST(-MIN(stream_ordering), 1) FROM events")
-            curr_id = txn.fetchone()[0]
-            next_id = curr_id + 1
             txn.execute(
-                "ALTER SEQUENCE events_backfill_stream_seq RESTART WITH %s", (next_id,),
+                "ALTER SEQUENCE events_backfill_stream_seq RESTART WITH %s",
+                (curr_backward_id + 1,),
             )
 
-        return self.postgres_store.db_pool.runInteraction(
-            "_setup_events_stream_seqs", r
+        return await self.postgres_store.db_pool.runInteraction(
+            "_setup_events_stream_seqs", _setup_events_stream_seqs_set_pos,
         )