From 19f0497ac319a7e6797a38d00e8aa1e5dd62948e Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Mon, 22 Jun 2015 18:06:13 +0100 Subject: Batch outside transactions --- synapse/storage/events.py | 33 +++++++++++++-------------------- 1 file changed, 13 insertions(+), 20 deletions(-) diff --git a/synapse/storage/events.py b/synapse/storage/events.py index 29491dc947..2e7ef9a93b 100644 --- a/synapse/storage/events.py +++ b/synapse/storage/events.py @@ -70,13 +70,19 @@ class EventsStore(SQLBaseStore): for (event, _), stream in zip(events_and_contexts, stream_orderings): event.internal_metadata.stream_ordering = stream - yield self.runInteraction( - "persist_events", - self._persist_events_txn, - events_and_contexts=events_and_contexts, - backfilled=backfilled, - is_new_state=is_new_state, - ) + chunks = [ + events_and_contexts[x:x+100] + for x in xrange(0, len(events_and_contexts), 100) + ] + + for chunk in chunks: + yield self.runInteraction( + "persist_events", + self._persist_events_txn, + events_and_contexts=chunk, + backfilled=backfilled, + is_new_state=is_new_state, + ) @defer.inlineCallbacks @log_function @@ -416,19 +422,6 @@ class EventsStore(SQLBaseStore): def _persist_events_txn(self, txn, events_and_contexts, backfilled, is_new_state=True): - if len(events_and_contexts) > 100: - chunks = [ - events_and_contexts[x:x+100] - for x in xrange(0, len(events_and_contexts), 100) - ] - - for chunk in chunks: - self._persist_events_txn( - txn, - chunk, backfilled, is_new_state, - ) - return - # Remove the any existing cache entries for the event_ids for event, _ in events_and_contexts: txn.call_after(self._invalidate_get_event_cache, event.event_id) -- cgit 1.4.1