summary refs log tree commit diff
diff options
context:
space:
mode:
authorErik Johnston <erik@matrix.org>2020-02-19 11:37:35 +0000
committerGitHub <noreply@github.com>2020-02-19 11:37:35 +0000
commit099c96b89b54b58ecb9b6b6ed781f66f97dea112 (patch)
tree8ff31f99b13ae5693ca11757fff97ad0cd6e3f68
parentMerge pull request #6949 from matrix-org/rav/list_room_aliases_peekable (diff)
downloadsynapse-099c96b89b54b58ecb9b6b6ed781f66f97dea112.tar.xz
Revert `get_auth_chain_ids` changes (#6951)
-rw-r--r--changelog.d/6951.misc1
-rw-r--r--synapse/storage/data_stores/main/event_federation.py28
2 files changed, 1 insertions, 28 deletions
diff --git a/changelog.d/6951.misc b/changelog.d/6951.misc
new file mode 100644
index 0000000000..378f52f0a7
--- /dev/null
+++ b/changelog.d/6951.misc
@@ -0,0 +1 @@
+Revert #6937.
diff --git a/synapse/storage/data_stores/main/event_federation.py b/synapse/storage/data_stores/main/event_federation.py
index dcc375b840..60c67457b4 100644
--- a/synapse/storage/data_stores/main/event_federation.py
+++ b/synapse/storage/data_stores/main/event_federation.py
@@ -26,7 +26,6 @@ from synapse.storage._base import SQLBaseStore, make_in_list_sql_clause
 from synapse.storage.data_stores.main.events_worker import EventsWorkerStore
 from synapse.storage.data_stores.main.signatures import SignatureWorkerStore
 from synapse.storage.database import Database
-from synapse.storage.engines import PostgresEngine
 from synapse.util.caches.descriptors import cached
 
 logger = logging.getLogger(__name__)
@@ -67,33 +66,6 @@ class EventFederationWorkerStore(EventsWorkerStore, SignatureWorkerStore, SQLBas
         else:
             results = set()
 
-        if isinstance(self.database_engine, PostgresEngine):
-            # For efficiency we make the database do this if we can.
-
-            # We need to be a little careful with querying large amounts at
-            # once, for some reason postgres really doesn't like it. We do this
-            # by only asking for auth chain of 500 events at a time.
-            event_ids = list(event_ids)
-            chunks = [event_ids[x : x + 500] for x in range(0, len(event_ids), 500)]
-            for chunk in chunks:
-                sql = """
-                    WITH RECURSIVE auth_chain(event_id) AS (
-                        SELECT auth_id FROM event_auth WHERE event_id = ANY(?)
-                        UNION
-                        SELECT auth_id FROM event_auth
-                        INNER JOIN auth_chain USING (event_id)
-                    )
-                    SELECT event_id FROM auth_chain
-                """
-                txn.execute(sql, (chunk,))
-
-                results.update(event_id for event_id, in txn)
-
-            return list(results)
-
-        # Database doesn't necessarily support recursive CTE, so we fall
-        # back to do doing it manually.
-
         base_sql = "SELECT auth_id FROM event_auth WHERE "
 
         front = set(event_ids)