summary refs log tree commit diff
diff options
context:
space:
mode:
authorErik Johnston <erik@matrix.org>2015-11-19 13:04:28 +0000
committerErik Johnston <erik@matrix.org>2015-11-19 13:04:28 +0000
commite5d91b8e57c132b54f9fb11027465f2da1e02904 (patch)
tree05b74c1a87ee0a5693ee9748046e425b2891d2d7
parentMerge pull request #381 from matrix-org/daniel/jenkins-sytest-cached (diff)
parentSpelling (diff)
downloadsynapse-e5d91b8e57c132b54f9fb11027465f2da1e02904.tar.xz
Merge pull request #387 from matrix-org/erikj/fix_port_script
Fix database port script to work with new event_search table
-rwxr-xr-xscripts/synapse_port_db55
1 files changed, 44 insertions, 11 deletions
diff --git a/scripts/synapse_port_db b/scripts/synapse_port_db
index 62515997b1..d4772fcf6e 100755
--- a/scripts/synapse_port_db
+++ b/scripts/synapse_port_db
@@ -68,6 +68,7 @@ APPEND_ONLY_TABLES = [
     "state_groups_state",
     "event_to_state_groups",
     "rejections",
+    "event_search",
 ]
 
 
@@ -229,19 +230,51 @@ class Porter(object):
             if rows:
                 next_chunk = rows[-1][0] + 1
 
-                self._convert_rows(table, headers, rows)
+                if table == "event_search":
+                    # We have to treat event_search differently since it has a
+                    # different structure in the two different databases.
+                    def insert(txn):
+                        sql = (
+                            "INSERT INTO event_search (event_id, room_id, key, sender, vector)"
+                            " VALUES (?,?,?,?,to_tsvector('english', ?))"
+                        )
 
-                def insert(txn):
-                    self.postgres_store.insert_many_txn(
-                        txn, table, headers[1:], rows
-                    )
+                        rows_dict = [
+                            dict(zip(headers, row))
+                            for row in rows
+                        ]
+
+                        txn.executemany(sql, [
+                            (
+                                row["event_id"],
+                                row["room_id"],
+                                row["key"],
+                                row["sender"],
+                                row["value"],
+                            )
+                            for row in rows_dict
+                        ])
+
+                        self.postgres_store._simple_update_one_txn(
+                            txn,
+                            table="port_from_sqlite3",
+                            keyvalues={"table_name": table},
+                            updatevalues={"rowid": next_chunk},
+                        )
+                else:
+                    self._convert_rows(table, headers, rows)
 
-                    self.postgres_store._simple_update_one_txn(
-                        txn,
-                        table="port_from_sqlite3",
-                        keyvalues={"table_name": table},
-                        updatevalues={"rowid": next_chunk},
-                    )
+                    def insert(txn):
+                        self.postgres_store.insert_many_txn(
+                            txn, table, headers[1:], rows
+                        )
+
+                        self.postgres_store._simple_update_one_txn(
+                            txn,
+                            table="port_from_sqlite3",
+                            keyvalues={"table_name": table},
+                            updatevalues={"rowid": next_chunk},
+                        )
 
                 yield self.postgres_store.execute(insert)