diff options
Diffstat (limited to 'synapse/util')
-rw-r--r-- | synapse/util/__init__.py | 2 | ||||
-rw-r--r-- | synapse/util/async_helpers.py | 16 | ||||
-rw-r--r-- | synapse/util/batching_queue.py | 2 | ||||
-rw-r--r-- | synapse/util/caches/cached_call.py | 2 | ||||
-rw-r--r-- | synapse/util/caches/descriptors.py | 2 | ||||
-rw-r--r-- | synapse/util/distributor.py | 2 | ||||
-rw-r--r-- | synapse/util/patch_inline_callbacks.py | 2 | ||||
-rw-r--r-- | synapse/util/ratelimitutils.py | 6 | ||||
-rw-r--r-- | synapse/util/retryutils.py | 2 |
9 files changed, 18 insertions, 18 deletions
diff --git a/synapse/util/__init__.py b/synapse/util/__init__.py index 7be9d5f113..714be27d86 100644 --- a/synapse/util/__init__.py +++ b/synapse/util/__init__.py @@ -132,7 +132,7 @@ class Clock: call = task.LoopingCall(f, *args, **kwargs) call.clock = self._reactor d = call.start(msec / 1000.0, now=False) - d.addErrback(log_failure, "Looping call died", consumeErrors=False) + d.addErrback(log_failure, "Looping call died", consumeErrors=False) # type: ignore[unused-awaitable] return call def call_later( diff --git a/synapse/util/async_helpers.py b/synapse/util/async_helpers.py index 01e3cd46f6..d612fca03d 100644 --- a/synapse/util/async_helpers.py +++ b/synapse/util/async_helpers.py @@ -154,7 +154,7 @@ class ObservableDeferred(Generic[_T], AbstractObservableDeferred[_T]): else: return f - deferred.addCallbacks(callback, errback) + deferred.addCallbacks(callback, errback) # type: ignore[unused-awaitable] def observe(self) -> "defer.Deferred[_T]": """Observe the underlying deferred. @@ -635,7 +635,7 @@ class ReadWriteLock: # writer waiting for us and it completed entirely within the # `new_defer.callback()` call above. if self.key_to_current_writer.get(key) == new_defer: - self.key_to_current_writer.pop(key) + self.key_to_current_writer.pop(key) # type: ignore[unused-awaitable] return _ctx_manager() @@ -693,7 +693,7 @@ def timeout_deferred( raise defer.TimeoutError("Timed out after %gs" % (timeout,)) return value - deferred.addErrback(convert_cancelled) + deferred.addErrback(convert_cancelled) # type: ignore[unused-awaitable] def cancel_timeout(result: _T) -> _T: # stop the pending call to cancel the deferred if it's been fired @@ -701,7 +701,7 @@ def timeout_deferred( delayed_call.cancel() return result - deferred.addBoth(cancel_timeout) + deferred.addBoth(cancel_timeout) # type: ignore[unused-awaitable] def success_cb(val: _T) -> None: if not new_d.called: @@ -711,7 +711,7 @@ def timeout_deferred( if not new_d.called: new_d.errback(val) - deferred.addCallbacks(success_cb, failure_cb) + deferred.addCallbacks(success_cb, failure_cb) # type: ignore[unused-awaitable] return new_d @@ -759,7 +759,7 @@ def stop_cancellation(deferred: "defer.Deferred[T]") -> "defer.Deferred[T]": wrapped with `make_deferred_yieldable`. """ new_deferred: "defer.Deferred[T]" = defer.Deferred() - deferred.chainDeferred(new_deferred) + deferred.chainDeferred(new_deferred) # type: ignore[unused-awaitable] return new_deferred @@ -821,10 +821,10 @@ def delay_cancellation(awaitable: Awaitable[T]) -> Awaitable[T]: new_deferred.pause() new_deferred.errback(Failure(CancelledError())) - deferred.addBoth(lambda _: new_deferred.unpause()) + deferred.addBoth(lambda _: new_deferred.unpause()) # type: ignore[unused-awaitable] new_deferred: "defer.Deferred[T]" = defer.Deferred(handle_cancel) - deferred.chainDeferred(new_deferred) + deferred.chainDeferred(new_deferred) # type: ignore[unused-awaitable] return new_deferred diff --git a/synapse/util/batching_queue.py b/synapse/util/batching_queue.py index 2a903004a9..72a13cd1a4 100644 --- a/synapse/util/batching_queue.py +++ b/synapse/util/batching_queue.py @@ -128,7 +128,7 @@ class BatchingQueue(Generic[V, R]): # If we're not currently processing the key fire off a background # process to start processing. if key not in self._processing_keys: - run_as_background_process(self._name, self._process_queue, key) + run_as_background_process(self._name, self._process_queue, key) # type: ignore[unused-awaitable] with self._number_in_flight_metric.track_inprogress(): return await make_deferred_yieldable(d) diff --git a/synapse/util/caches/cached_call.py b/synapse/util/caches/cached_call.py index e325f44da3..4061db56a8 100644 --- a/synapse/util/caches/cached_call.py +++ b/synapse/util/caches/cached_call.py @@ -89,7 +89,7 @@ class CachedCall(Generic[TV]): def got_result(r: Union[TV, Failure]) -> None: self._result = r - self._deferred.addBoth(got_result) + self._deferred.addBoth(got_result) # type: ignore[unused-awaitable] # TODO: consider cancellation semantics. Currently, if the call to get() # is cancelled, the underlying call will continue (and any future calls diff --git a/synapse/util/caches/descriptors.py b/synapse/util/caches/descriptors.py index 81df71a0c5..740d9585cf 100644 --- a/synapse/util/caches/descriptors.py +++ b/synapse/util/caches/descriptors.py @@ -377,7 +377,7 @@ class DeferredCacheListDescriptor(_CacheDescriptorBase): for k, v in r.items(): results[cache_key_to_arg(k)] = v - pending_deferred.addCallback(update_results) + pending_deferred.addCallback(update_results) # type: ignore[unused-awaitable] cached_defers.append(pending_deferred) if missing: diff --git a/synapse/util/distributor.py b/synapse/util/distributor.py index b580bdd0de..c5019a1074 100644 --- a/synapse/util/distributor.py +++ b/synapse/util/distributor.py @@ -84,7 +84,7 @@ class Distributor: if name not in self.signals: raise KeyError("%r does not have a signal named %s" % (self, name)) - run_as_background_process(name, self.signals[name].fire, *args, **kwargs) + run_as_background_process(name, self.signals[name].fire, *args, **kwargs) # type: ignore[unused-awaitable] P = ParamSpec("P") diff --git a/synapse/util/patch_inline_callbacks.py b/synapse/util/patch_inline_callbacks.py index d00d34e652..8078ab0bef 100644 --- a/synapse/util/patch_inline_callbacks.py +++ b/synapse/util/patch_inline_callbacks.py @@ -108,7 +108,7 @@ def do_patch() -> None: raise Exception(err) return r - res.addBoth(check_ctx) + res.addBoth(check_ctx) # type: ignore[unused-awaitable] return res return wrapped diff --git a/synapse/util/ratelimitutils.py b/synapse/util/ratelimitutils.py index f262bf95a0..e01645f1ab 100644 --- a/synapse/util/ratelimitutils.py +++ b/synapse/util/ratelimitutils.py @@ -334,7 +334,7 @@ class _PerHostRatelimiter: queue_defer = queue_request() return queue_defer - ret_defer.addBoth(on_wait_finished) + ret_defer.addBoth(on_wait_finished) # type: ignore[unused-awaitable] else: ret_defer = queue_request() @@ -358,8 +358,8 @@ class _PerHostRatelimiter: self.ready_request_queue.pop(request_id, None) return r - ret_defer.addCallbacks(on_start, on_err) - ret_defer.addBoth(on_both) + ret_defer.addCallbacks(on_start, on_err) # type: ignore[unused-awaitable] + ret_defer.addBoth(on_both) # type: ignore[unused-awaitable] return make_deferred_yieldable(ret_defer) def _on_exit(self, request_id: object) -> None: diff --git a/synapse/util/retryutils.py b/synapse/util/retryutils.py index dcc037b982..9ddc00fddd 100644 --- a/synapse/util/retryutils.py +++ b/synapse/util/retryutils.py @@ -265,4 +265,4 @@ class RetryDestinationLimiter: logger.exception("Failed to store destination_retry_timings") # we deliberately do this in the background. - run_as_background_process("store_retry_timings", store_retry_timings) + run_as_background_process("store_retry_timings", store_retry_timings) # type: ignore[unused-awaitable] |