summary refs log tree commit diff
path: root/synapse/storage/databases/main/event_federation.py
diff options
context:
space:
mode:
authorErik Johnston <erikj@element.io>2024-02-09 10:51:00 +0000
committerGitHub <noreply@github.com>2024-02-09 10:51:00 +0000
commit02a147039cc1694b1ea3d80636697b11e6a4461b (patch)
treefe346768c06747c8535d045222b18add3c05ebb2 /synapse/storage/databases/main/event_federation.py
parent1.101.0rc1 (diff)
downloadsynapse-02a147039cc1694b1ea3d80636697b11e6a4461b.tar.xz
Increase batching when fetching auth chains (#16893)
This basically reverts a change that was in
https://github.com/element-hq/synapse/pull/16833, where we reduced the
batching.

The smaller batching can cause performance issues on busy servers and
databases.
Diffstat (limited to 'synapse/storage/databases/main/event_federation.py')
-rw-r--r--synapse/storage/databases/main/event_federation.py4
1 files changed, 2 insertions, 2 deletions
diff --git a/synapse/storage/databases/main/event_federation.py b/synapse/storage/databases/main/event_federation.py
index 12e882062a..846c3f363a 100644
--- a/synapse/storage/databases/main/event_federation.py
+++ b/synapse/storage/databases/main/event_federation.py
@@ -310,7 +310,7 @@ class EventFederationWorkerStore(SignatureWorkerStore, EventsWorkerStore, SQLBas
         # Add all linked chains reachable from initial set of chains.
         chains_to_fetch = set(event_chains.keys())
         while chains_to_fetch:
-            batch2 = tuple(itertools.islice(chains_to_fetch, 100))
+            batch2 = tuple(itertools.islice(chains_to_fetch, 1000))
             chains_to_fetch.difference_update(batch2)
             clause, args = make_in_list_sql_clause(
                 txn.database_engine, "origin_chain_id", batch2
@@ -593,7 +593,7 @@ class EventFederationWorkerStore(SignatureWorkerStore, EventsWorkerStore, SQLBas
         # the loop)
         chains_to_fetch = set(seen_chains)
         while chains_to_fetch:
-            batch2 = tuple(itertools.islice(chains_to_fetch, 100))
+            batch2 = tuple(itertools.islice(chains_to_fetch, 1000))
             clause, args = make_in_list_sql_clause(
                 txn.database_engine, "origin_chain_id", batch2
             )