summary refs log tree commit diff
path: root/synapse/push
diff options
context:
space:
mode:
authorRichard van der Hoff <richard@matrix.org>2018-10-22 16:12:11 +0100
committerRichard van der Hoff <richard@matrix.org>2018-10-22 16:12:11 +0100
commite7a16c6210191e9556fb1c11cbff2d98f0a5206c (patch)
tree306f39afc6b054f22d04076278c44153865d8e08 /synapse/push
parentGive pushers their own background logcontext (diff)
downloadsynapse-e7a16c6210191e9556fb1c11cbff2d98f0a5206c.tar.xz
Remove redundant run_as_background_process() from pusherpool
`on_new_notifications` and `on_new_receipts` in `HttpPusher` and `EmailPusher`
now always return synchronously, so we can remove the `defer.gatherResults` on
their results, and the `run_as_background_process` wrappers can be removed too
because the PusherPool methods will now complete quickly enough.
Diffstat (limited to 'synapse/push')
-rw-r--r--synapse/push/emailpusher.py3
-rw-r--r--synapse/push/httppusher.py2
-rw-r--r--synapse/push/pusherpool.py47
3 files changed, 8 insertions, 44 deletions
diff --git a/synapse/push/emailpusher.py b/synapse/push/emailpusher.py
index 0c9c0201e8..d5a99b838c 100644
--- a/synapse/push/emailpusher.py
+++ b/synapse/push/emailpusher.py
@@ -94,13 +94,12 @@ class EmailPusher(object):
     def on_new_notifications(self, min_stream_ordering, max_stream_ordering):
         self.max_stream_ordering = max(max_stream_ordering, self.max_stream_ordering)
         self._start_processing()
-        return defer.succeed(None)
 
     def on_new_receipts(self, min_stream_id, max_stream_id):
         # We could wake up and cancel the timer but there tend to be quite a
         # lot of read receipts so it's probably less work to just let the
         # timer fire
-        return defer.succeed(None)
+        pass
 
     def on_timer(self):
         self.timed_call = None
diff --git a/synapse/push/httppusher.py b/synapse/push/httppusher.py
index 5f6b21bc67..770f55feae 100644
--- a/synapse/push/httppusher.py
+++ b/synapse/push/httppusher.py
@@ -98,7 +98,6 @@ class HttpPusher(object):
     def on_new_notifications(self, min_stream_ordering, max_stream_ordering):
         self.max_stream_ordering = max(max_stream_ordering, self.max_stream_ordering or 0)
         self._start_processing()
-        return defer.suceed(None)
 
     def on_new_receipts(self, min_stream_id, max_stream_id):
         # Note that the min here shouldn't be relied upon to be accurate.
@@ -106,7 +105,6 @@ class HttpPusher(object):
         # We could check the receipts are actually m.read receipts here,
         # but currently that's the only type of receipt anyway...
         run_as_background_process("http_pusher.on_new_receipts", self._update_badge)
-        return defer.succeed(None)
 
     @defer.inlineCallbacks
     def _update_badge(self):
diff --git a/synapse/push/pusherpool.py b/synapse/push/pusherpool.py
index b9b68ec829..a4d1ce3aad 100644
--- a/synapse/push/pusherpool.py
+++ b/synapse/push/pusherpool.py
@@ -18,9 +18,8 @@ import logging
 
 from twisted.internet import defer
 
-from synapse.metrics.background_process_metrics import run_as_background_process
 from synapse.push.pusher import PusherFactory
-from synapse.util.logcontext import make_deferred_yieldable, run_in_background
+from synapse.util.logcontext import run_in_background
 
 logger = logging.getLogger(__name__)
 
@@ -122,45 +121,23 @@ class PusherPool:
                     p['app_id'], p['pushkey'], p['user_name'],
                 )
 
-    def on_new_notifications(self, min_stream_id, max_stream_id):
-        run_as_background_process(
-            "on_new_notifications",
-            self._on_new_notifications, min_stream_id, max_stream_id,
-        )
-
     @defer.inlineCallbacks
-    def _on_new_notifications(self, min_stream_id, max_stream_id):
+    def on_new_notifications(self, min_stream_id, max_stream_id):
         try:
             users_affected = yield self.store.get_push_action_users_in_range(
                 min_stream_id, max_stream_id
             )
 
-            deferreds = []
-
             for u in users_affected:
                 if u in self.pushers:
                     for p in self.pushers[u].values():
-                        deferreds.append(
-                            run_in_background(
-                                p.on_new_notifications,
-                                min_stream_id, max_stream_id,
-                            )
-                        )
-
-            yield make_deferred_yieldable(
-                defer.gatherResults(deferreds, consumeErrors=True),
-            )
+                        p.on_new_notifications(min_stream_id, max_stream_id)
+
         except Exception:
             logger.exception("Exception in pusher on_new_notifications")
 
-    def on_new_receipts(self, min_stream_id, max_stream_id, affected_room_ids):
-        run_as_background_process(
-            "on_new_receipts",
-            self._on_new_receipts, min_stream_id, max_stream_id, affected_room_ids,
-        )
-
     @defer.inlineCallbacks
-    def _on_new_receipts(self, min_stream_id, max_stream_id, affected_room_ids):
+    def on_new_receipts(self, min_stream_id, max_stream_id, affected_room_ids):
         try:
             # Need to subtract 1 from the minimum because the lower bound here
             # is not inclusive
@@ -170,21 +147,11 @@ class PusherPool:
             # This returns a tuple, user_id is at index 3
             users_affected = set([r[3] for r in updated_receipts])
 
-            deferreds = []
-
             for u in users_affected:
                 if u in self.pushers:
                     for p in self.pushers[u].values():
-                        deferreds.append(
-                            run_in_background(
-                                p.on_new_receipts,
-                                min_stream_id, max_stream_id,
-                            )
-                        )
-
-            yield make_deferred_yieldable(
-                defer.gatherResults(deferreds, consumeErrors=True),
-            )
+                        p.on_new_receipts(min_stream_id, max_stream_id)
+
         except Exception:
             logger.exception("Exception in pusher on_new_receipts")