diff --git a/changelog.d/10754.misc b/changelog.d/10754.misc
new file mode 100644
index 0000000000..3b7acff03f
--- /dev/null
+++ b/changelog.d/10754.misc
@@ -0,0 +1 @@
+Minor speed ups when joining large rooms over federation.
diff --git a/synapse/storage/database.py b/synapse/storage/database.py
index 95d2caff62..0084d9f96c 100644
--- a/synapse/storage/database.py
+++ b/synapse/storage/database.py
@@ -280,18 +280,18 @@ class LoggingTransaction:
else:
self.executemany(sql, args)
- def execute_values(self, sql: str, *args: Any) -> List[Tuple]:
+ def execute_values(self, sql: str, *args: Any, fetch: bool = True) -> List[Tuple]:
"""Corresponds to psycopg2.extras.execute_values. Only available when
using postgres.
- Always sets fetch=True when caling `execute_values`, so will return the
- results.
+ The `fetch` parameter must be set to False if the query does not return
+ rows (e.g. INSERTs).
"""
assert isinstance(self.database_engine, PostgresEngine)
from psycopg2.extras import execute_values # type: ignore
return self._do_execute(
- lambda *x: execute_values(self.txn, *x, fetch=True), sql, *args
+ lambda *x: execute_values(self.txn, *x, fetch=fetch), sql, *args
)
def execute(self, sql: str, *args: Any) -> None:
@@ -920,13 +920,23 @@ class DatabasePool:
if k != keys[0]:
raise RuntimeError("All items must have the same keys")
- sql = "INSERT INTO %s (%s) VALUES(%s)" % (
- table,
- ", ".join(k for k in keys[0]),
- ", ".join("?" for _ in keys[0]),
- )
+ if isinstance(txn.database_engine, PostgresEngine):
+ # We use `execute_values` as it can be a lot faster than `execute_batch`,
+ # but it's only available on postgres.
+ sql = "INSERT INTO %s (%s) VALUES ?" % (
+ table,
+ ", ".join(k for k in keys[0]),
+ )
- txn.execute_batch(sql, vals)
+ txn.execute_values(sql, vals, fetch=False)
+ else:
+ sql = "INSERT INTO %s (%s) VALUES(%s)" % (
+ table,
+ ", ".join(k for k in keys[0]),
+ ", ".join("?" for _ in keys[0]),
+ )
+
+ txn.execute_batch(sql, vals)
async def simple_upsert(
self,
@@ -1281,20 +1291,33 @@ class DatabasePool:
k + "=EXCLUDED." + k for k in value_names
)
- sql = "INSERT INTO %s (%s) VALUES (%s) ON CONFLICT (%s) DO %s" % (
- table,
- ", ".join(k for k in allnames),
- ", ".join("?" for _ in allnames),
- ", ".join(key_names),
- latter,
- )
-
args = []
for x, y in zip(key_values, value_values):
args.append(tuple(x) + tuple(y))
- return txn.execute_batch(sql, args)
+ if isinstance(txn.database_engine, PostgresEngine):
+ # We use `execute_values` as it can be a lot faster than `execute_batch`,
+ # but it's only available on postgres.
+ sql = "INSERT INTO %s (%s) VALUES ? ON CONFLICT (%s) DO %s" % (
+ table,
+ ", ".join(k for k in allnames),
+ ", ".join(key_names),
+ latter,
+ )
+
+ txn.execute_values(sql, args, fetch=False)
+
+ else:
+ sql = "INSERT INTO %s (%s) VALUES (%s) ON CONFLICT (%s) DO %s" % (
+ table,
+ ", ".join(k for k in allnames),
+ ", ".join("?" for _ in allnames),
+ ", ".join(key_names),
+ latter,
+ )
+
+ return txn.execute_batch(sql, args)
@overload
async def simple_select_one(
|