summary refs log tree commit diff
path: root/synapse/storage
diff options
context:
space:
mode:
Diffstat (limited to 'synapse/storage')
-rw-r--r--synapse/storage/databases/main/filtering.py12
-rw-r--r--synapse/storage/databases/main/profile.py12
-rw-r--r--synapse/storage/schema/__init__.py5
-rw-r--r--synapse/storage/schema/main/delta/78/01_validate_and_update_profiles.py92
-rw-r--r--synapse/storage/schema/main/delta/78/02_validate_and_update_user_filters.py95
5 files changed, 203 insertions, 13 deletions
diff --git a/synapse/storage/databases/main/filtering.py b/synapse/storage/databases/main/filtering.py
index f777777cbf..fff417f9e3 100644
--- a/synapse/storage/databases/main/filtering.py
+++ b/synapse/storage/databases/main/filtering.py
@@ -145,7 +145,7 @@ class FilteringWorkerStore(SQLBaseStore):
 
     @cached(num_args=2)
     async def get_user_filter(
-        self, user_localpart: str, filter_id: Union[int, str]
+        self, user_id: UserID, filter_id: Union[int, str]
     ) -> JsonDict:
         # filter_id is BIGINT UNSIGNED, so if it isn't a number, fail
         # with a coherent error message rather than 500 M_UNKNOWN.
@@ -156,7 +156,7 @@ class FilteringWorkerStore(SQLBaseStore):
 
         def_json = await self.db_pool.simple_select_one_onecol(
             table="user_filters",
-            keyvalues={"user_id": user_localpart, "filter_id": filter_id},
+            keyvalues={"full_user_id": user_id.to_string(), "filter_id": filter_id},
             retcol="filter_json",
             allow_none=False,
             desc="get_user_filter",
@@ -172,15 +172,15 @@ class FilteringWorkerStore(SQLBaseStore):
         def _do_txn(txn: LoggingTransaction) -> int:
             sql = (
                 "SELECT filter_id FROM user_filters "
-                "WHERE user_id = ? AND filter_json = ?"
+                "WHERE full_user_id = ? AND filter_json = ?"
             )
-            txn.execute(sql, (user_id.localpart, bytearray(def_json)))
+            txn.execute(sql, (user_id.to_string(), bytearray(def_json)))
             filter_id_response = txn.fetchone()
             if filter_id_response is not None:
                 return filter_id_response[0]
 
-            sql = "SELECT MAX(filter_id) FROM user_filters WHERE user_id = ?"
-            txn.execute(sql, (user_id.localpart,))
+            sql = "SELECT MAX(filter_id) FROM user_filters WHERE full_user_id = ?"
+            txn.execute(sql, (user_id.to_string(),))
             max_id = cast(Tuple[Optional[int]], txn.fetchone())[0]
             if max_id is None:
                 filter_id = 0
diff --git a/synapse/storage/databases/main/profile.py b/synapse/storage/databases/main/profile.py
index 21d54c7a7a..3ba9cc8853 100644
--- a/synapse/storage/databases/main/profile.py
+++ b/synapse/storage/databases/main/profile.py
@@ -137,11 +137,11 @@ class ProfileWorkerStore(SQLBaseStore):
 
         return 50
 
-    async def get_profileinfo(self, user_localpart: str) -> ProfileInfo:
+    async def get_profileinfo(self, user_id: UserID) -> ProfileInfo:
         try:
             profile = await self.db_pool.simple_select_one(
                 table="profiles",
-                keyvalues={"user_id": user_localpart},
+                keyvalues={"full_user_id": user_id.to_string()},
                 retcols=("displayname", "avatar_url"),
                 desc="get_profileinfo",
             )
@@ -156,18 +156,18 @@ class ProfileWorkerStore(SQLBaseStore):
             avatar_url=profile["avatar_url"], display_name=profile["displayname"]
         )
 
-    async def get_profile_displayname(self, user_localpart: str) -> Optional[str]:
+    async def get_profile_displayname(self, user_id: UserID) -> Optional[str]:
         return await self.db_pool.simple_select_one_onecol(
             table="profiles",
-            keyvalues={"user_id": user_localpart},
+            keyvalues={"full_user_id": user_id.to_string()},
             retcol="displayname",
             desc="get_profile_displayname",
         )
 
-    async def get_profile_avatar_url(self, user_localpart: str) -> Optional[str]:
+    async def get_profile_avatar_url(self, user_id: UserID) -> Optional[str]:
         return await self.db_pool.simple_select_one_onecol(
             table="profiles",
-            keyvalues={"user_id": user_localpart},
+            keyvalues={"full_user_id": user_id.to_string()},
             retcol="avatar_url",
             desc="get_profile_avatar_url",
         )
diff --git a/synapse/storage/schema/__init__.py b/synapse/storage/schema/__init__.py
index 5cc786f030..fc190a8b13 100644
--- a/synapse/storage/schema/__init__.py
+++ b/synapse/storage/schema/__init__.py
@@ -12,7 +12,7 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-SCHEMA_VERSION = 77  # remember to update the list below when updating
+SCHEMA_VERSION = 78  # remember to update the list below when updating
 """Represents the expectations made by the codebase about the database schema
 
 This should be incremented whenever the codebase changes its requirements on the
@@ -103,6 +103,9 @@ Changes in SCHEMA_VERSION = 76:
 
 Changes in SCHEMA_VERSION = 77
     - (Postgres) Add NOT VALID CHECK (full_user_id IS NOT NULL) to tables profiles and user_filters
+
+Changes in SCHEMA_VERSION = 78
+    - Validate check (full_user_id IS NOT NULL) on tables profiles and user_filters
 """
 
 
diff --git a/synapse/storage/schema/main/delta/78/01_validate_and_update_profiles.py b/synapse/storage/schema/main/delta/78/01_validate_and_update_profiles.py
new file mode 100644
index 0000000000..8398d8f548
--- /dev/null
+++ b/synapse/storage/schema/main/delta/78/01_validate_and_update_profiles.py
@@ -0,0 +1,92 @@
+# Copyright 2023 The Matrix.org Foundation C.I.C
+#
+#  Licensed under the Apache License, Version 2.0 (the "License");
+#  you may not use this file except in compliance with the License.
+#  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+#  Unless required by applicable law or agreed to in writing, software
+#  distributed under the License is distributed on an "AS IS" BASIS,
+#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#  See the License for the specific language governing permissions and
+#  limitations under the License.
+
+from synapse.config.homeserver import HomeServerConfig
+from synapse.storage.database import LoggingTransaction
+from synapse.storage.engines import BaseDatabaseEngine, PostgresEngine
+
+
+def run_upgrade(
+    cur: LoggingTransaction,
+    database_engine: BaseDatabaseEngine,
+    config: HomeServerConfig,
+) -> None:
+    """
+    Part 3 of a multi-step migration to drop the column `user_id` and replace it with
+    `full_user_id`. See the database schema docs for more information on the full
+    migration steps.
+    """
+    hostname = config.server.server_name
+
+    if isinstance(database_engine, PostgresEngine):
+        # check if the constraint can be validated
+        check_sql = """
+        SELECT user_id from profiles WHERE full_user_id IS NULL
+        """
+        cur.execute(check_sql)
+        res = cur.fetchall()
+
+        if res:
+            # there are rows the background job missed, finish them here before we validate the constraint
+            process_rows_sql = """
+            UPDATE profiles
+            SET full_user_id = '@' || user_id || ?
+            WHERE user_id IN (
+                SELECT user_id FROM profiles WHERE full_user_id IS NULL
+            )
+            """
+            cur.execute(process_rows_sql, (f":{hostname}",))
+
+        # Now we can validate
+        validate_sql = """
+        ALTER TABLE profiles VALIDATE CONSTRAINT full_user_id_not_null
+        """
+        cur.execute(validate_sql)
+
+    else:
+        # in SQLite we need to rewrite the table to add the constraint.
+        # First drop any temporary table that might be here from a previous failed migration.
+        cur.execute("DROP TABLE IF EXISTS temp_profiles")
+
+        create_sql = """
+        CREATE TABLE temp_profiles (
+            full_user_id text NOT NULL,
+            user_id text,
+            displayname text,
+            avatar_url text,
+            UNIQUE (full_user_id),
+            UNIQUE (user_id)
+        )
+        """
+        cur.execute(create_sql)
+
+        copy_sql = """
+        INSERT INTO temp_profiles (
+            user_id,
+            displayname,
+            avatar_url,
+            full_user_id)
+            SELECT user_id, displayname, avatar_url, '@' || user_id || ':' || ? FROM profiles
+        """
+        cur.execute(copy_sql, (f"{hostname}",))
+
+        drop_sql = """
+        DROP TABLE profiles
+        """
+        cur.execute(drop_sql)
+
+        rename_sql = """
+        ALTER TABLE temp_profiles RENAME to profiles
+        """
+        cur.execute(rename_sql)
diff --git a/synapse/storage/schema/main/delta/78/02_validate_and_update_user_filters.py b/synapse/storage/schema/main/delta/78/02_validate_and_update_user_filters.py
new file mode 100644
index 0000000000..8ef63335e7
--- /dev/null
+++ b/synapse/storage/schema/main/delta/78/02_validate_and_update_user_filters.py
@@ -0,0 +1,95 @@
+# Copyright 2023 The Matrix.org Foundation C.I.C
+#
+#  Licensed under the Apache License, Version 2.0 (the "License");
+#  you may not use this file except in compliance with the License.
+#  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+#  Unless required by applicable law or agreed to in writing, software
+#  distributed under the License is distributed on an "AS IS" BASIS,
+#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#  See the License for the specific language governing permissions and
+#  limitations under the License.
+
+from synapse.config.homeserver import HomeServerConfig
+from synapse.storage.database import LoggingTransaction
+from synapse.storage.engines import BaseDatabaseEngine, PostgresEngine
+
+
+def run_upgrade(
+    cur: LoggingTransaction,
+    database_engine: BaseDatabaseEngine,
+    config: HomeServerConfig,
+) -> None:
+    """
+    Part 3 of a multi-step migration to drop the column `user_id` and replace it with
+    `full_user_id`. See the database schema docs for more information on the full
+    migration steps.
+    """
+    hostname = config.server.server_name
+
+    if isinstance(database_engine, PostgresEngine):
+        # check if the constraint can be validated
+        check_sql = """
+        SELECT user_id from user_filters WHERE full_user_id IS NULL
+        """
+        cur.execute(check_sql)
+        res = cur.fetchall()
+
+        if res:
+            # there are rows the background job missed, finish them here before we validate constraint
+            process_rows_sql = """
+            UPDATE user_filters
+            SET full_user_id = '@' || user_id || ?
+            WHERE user_id IN (
+                SELECT user_id FROM user_filters WHERE full_user_id IS NULL
+            )
+            """
+            cur.execute(process_rows_sql, (f":{hostname}",))
+
+        # Now we can validate
+        validate_sql = """
+        ALTER TABLE user_filters VALIDATE CONSTRAINT full_user_id_not_null
+        """
+        cur.execute(validate_sql)
+
+    else:
+        cur.execute("DROP TABLE IF EXISTS temp_user_filters")
+        create_sql = """
+        CREATE TABLE temp_user_filters (
+            full_user_id text NOT NULL,
+            user_id text NOT NULL,
+            filter_id bigint NOT NULL,
+            filter_json bytea NOT NULL,
+            UNIQUE (full_user_id),
+            UNIQUE (user_id)
+        )
+        """
+        cur.execute(create_sql)
+
+        index_sql = """
+        CREATE UNIQUE INDEX IF NOT EXISTS user_filters_unique ON
+            temp_user_filters (user_id, filter_id)
+        """
+        cur.execute(index_sql)
+
+        copy_sql = """
+        INSERT INTO temp_user_filters (
+            user_id,
+            filter_id,
+            filter_json,
+            full_user_id)
+            SELECT user_id, filter_id, filter_json, '@' || user_id || ':' || ? FROM user_filters
+        """
+        cur.execute(copy_sql, (f"{hostname}",))
+
+        drop_sql = """
+        DROP TABLE user_filters
+        """
+        cur.execute(drop_sql)
+
+        rename_sql = """
+        ALTER TABLE temp_user_filters RENAME to user_filters
+        """
+        cur.execute(rename_sql)