diff --git a/synapse/util/__init__.py b/synapse/util/__init__.py
index b2a22dbd5c..3ad4b28fc7 100644
--- a/synapse/util/__init__.py
+++ b/synapse/util/__init__.py
@@ -46,7 +46,7 @@ def unwrapFirstError(failure):
@attr.s
-class Clock(object):
+class Clock:
"""
A Clock wraps a Twisted reactor and provides utilities on top of it.
diff --git a/synapse/util/async_helpers.py b/synapse/util/async_helpers.py
index dfefbd996d..bb57e27beb 100644
--- a/synapse/util/async_helpers.py
+++ b/synapse/util/async_helpers.py
@@ -36,7 +36,7 @@ from synapse.util import Clock, unwrapFirstError
logger = logging.getLogger(__name__)
-class ObservableDeferred(object):
+class ObservableDeferred:
"""Wraps a deferred object so that we can add observer deferreds. These
observer deferreds do not affect the callback chain of the original
deferred.
@@ -188,7 +188,7 @@ def yieldable_gather_results(func, iter, *args, **kwargs):
).addErrback(unwrapFirstError)
-class Linearizer(object):
+class Linearizer:
"""Limits concurrent access to resources based on a key. Useful to ensure
only a few things happen at a time on a given resource.
@@ -338,7 +338,7 @@ class Linearizer(object):
return new_defer
-class ReadWriteLock(object):
+class ReadWriteLock:
"""An async read write lock.
Example:
@@ -502,7 +502,7 @@ def timeout_deferred(deferred, timeout, reactor, on_timeout_cancel=None):
@attr.s(slots=True, frozen=True)
-class DoneAwaitable(object):
+class DoneAwaitable:
"""Simple awaitable that returns the provided value.
"""
diff --git a/synapse/util/caches/__init__.py b/synapse/util/caches/__init__.py
index dd356bf156..237f588658 100644
--- a/synapse/util/caches/__init__.py
+++ b/synapse/util/caches/__init__.py
@@ -43,7 +43,7 @@ response_cache_total = Gauge("synapse_util_caches_response_cache:total", "", ["n
@attr.s
-class CacheMetric(object):
+class CacheMetric:
_cache = attr.ib()
_cache_type = attr.ib(type=str)
diff --git a/synapse/util/caches/descriptors.py b/synapse/util/caches/descriptors.py
index 825810eb16..98b34f2223 100644
--- a/synapse/util/caches/descriptors.py
+++ b/synapse/util/caches/descriptors.py
@@ -64,7 +64,7 @@ cache_pending_metric = Gauge(
_CacheSentinel = object()
-class CacheEntry(object):
+class CacheEntry:
__slots__ = ["deferred", "callbacks", "invalidated"]
def __init__(self, deferred, callbacks):
@@ -80,7 +80,7 @@ class CacheEntry(object):
self.callbacks.clear()
-class Cache(object):
+class Cache:
__slots__ = (
"cache",
"name",
@@ -288,7 +288,7 @@ class Cache(object):
self._pending_deferred_cache.clear()
-class _CacheDescriptorBase(object):
+class _CacheDescriptorBase:
def __init__(self, orig: _CachedFunction, num_args, cache_context=False):
self.orig = orig
@@ -705,7 +705,7 @@ def cachedList(
Example:
- class Example(object):
+ class Example:
@cached(num_args=2)
def do_something(self, first_arg):
...
diff --git a/synapse/util/caches/dictionary_cache.py b/synapse/util/caches/dictionary_cache.py
index 6834e6f3ae..8592b93689 100644
--- a/synapse/util/caches/dictionary_cache.py
+++ b/synapse/util/caches/dictionary_cache.py
@@ -40,7 +40,7 @@ class DictionaryEntry(namedtuple("DictionaryEntry", ("full", "known_absent", "va
return len(self.value)
-class DictionaryCache(object):
+class DictionaryCache:
"""Caches key -> dictionary lookups, supporting caching partial dicts, i.e.
fetching a subset of dictionary keys for a particular key.
"""
@@ -53,7 +53,7 @@ class DictionaryCache(object):
self.thread = None
# caches_by_name[name] = self.cache
- class Sentinel(object):
+ class Sentinel:
__slots__ = []
self.sentinel = Sentinel()
diff --git a/synapse/util/caches/expiringcache.py b/synapse/util/caches/expiringcache.py
index 89a3420f92..e15f7ee698 100644
--- a/synapse/util/caches/expiringcache.py
+++ b/synapse/util/caches/expiringcache.py
@@ -26,7 +26,7 @@ logger = logging.getLogger(__name__)
SENTINEL = object()
-class ExpiringCache(object):
+class ExpiringCache:
def __init__(
self,
cache_name,
@@ -190,7 +190,7 @@ class ExpiringCache(object):
return False
-class _CacheEntry(object):
+class _CacheEntry:
__slots__ = ["time", "value"]
def __init__(self, time, value):
diff --git a/synapse/util/caches/lrucache.py b/synapse/util/caches/lrucache.py
index df4ea5901d..4bc1a67b58 100644
--- a/synapse/util/caches/lrucache.py
+++ b/synapse/util/caches/lrucache.py
@@ -30,7 +30,7 @@ def enumerate_leaves(node, depth):
yield m
-class _Node(object):
+class _Node:
__slots__ = ["prev_node", "next_node", "key", "value", "callbacks"]
def __init__(self, prev_node, next_node, key, value, callbacks=set()):
@@ -41,7 +41,7 @@ class _Node(object):
self.callbacks = callbacks
-class LruCache(object):
+class LruCache:
"""
Least-recently-used cache.
Supports del_multi only if cache_type=TreeCache
diff --git a/synapse/util/caches/response_cache.py b/synapse/util/caches/response_cache.py
index a6c60888e5..df1a721add 100644
--- a/synapse/util/caches/response_cache.py
+++ b/synapse/util/caches/response_cache.py
@@ -23,7 +23,7 @@ from synapse.util.caches import register_cache
logger = logging.getLogger(__name__)
-class ResponseCache(object):
+class ResponseCache:
"""
This caches a deferred response. Until the deferred completes it will be
returned from the cache. This means that if the client retries the request
diff --git a/synapse/util/caches/treecache.py b/synapse/util/caches/treecache.py
index ecd9948e79..eb4d98f683 100644
--- a/synapse/util/caches/treecache.py
+++ b/synapse/util/caches/treecache.py
@@ -3,7 +3,7 @@ from typing import Dict
SENTINEL = object()
-class TreeCache(object):
+class TreeCache:
"""
Tree-based backing store for LruCache. Allows subtrees of data to be deleted
efficiently.
@@ -89,7 +89,7 @@ def iterate_tree_cache_entry(d):
yield d
-class _Entry(object):
+class _Entry:
__slots__ = ["value"]
def __init__(self, value):
diff --git a/synapse/util/caches/ttlcache.py b/synapse/util/caches/ttlcache.py
index 6437aa907e..3e180cafd3 100644
--- a/synapse/util/caches/ttlcache.py
+++ b/synapse/util/caches/ttlcache.py
@@ -26,7 +26,7 @@ logger = logging.getLogger(__name__)
SENTINEL = object()
-class TTLCache(object):
+class TTLCache:
"""A key/value cache implementation where each entry has its own TTL"""
def __init__(self, cache_name, timer=time.time):
@@ -154,7 +154,7 @@ class TTLCache(object):
@attr.s(frozen=True, slots=True)
-class _CacheEntry(object):
+class _CacheEntry:
"""TTLCache entry"""
# expiry_time is the first attribute, so that entries are sorted by expiry.
diff --git a/synapse/util/distributor.py b/synapse/util/distributor.py
index 22a857a306..a750261e77 100644
--- a/synapse/util/distributor.py
+++ b/synapse/util/distributor.py
@@ -34,7 +34,7 @@ def user_joined_room(distributor, user, room_id):
distributor.fire("user_joined_room", user=user, room_id=room_id)
-class Distributor(object):
+class Distributor:
"""A central dispatch point for loosely-connected pieces of code to
register, observe, and fire signals.
@@ -103,7 +103,7 @@ def maybeAwaitableDeferred(f, *args, **kw):
return succeed(result)
-class Signal(object):
+class Signal:
"""A Signal is a dispatch point that stores a list of callables as
observers of it.
diff --git a/synapse/util/file_consumer.py b/synapse/util/file_consumer.py
index 6a3f6177b1..733f5e26e6 100644
--- a/synapse/util/file_consumer.py
+++ b/synapse/util/file_consumer.py
@@ -20,7 +20,7 @@ from twisted.internet import threads
from synapse.logging.context import make_deferred_yieldable, run_in_background
-class BackgroundFileConsumer(object):
+class BackgroundFileConsumer:
"""A consumer that writes to a file like object. Supports both push
and pull producers
diff --git a/synapse/util/jsonobject.py b/synapse/util/jsonobject.py
index 6dce03dd3a..50516926f3 100644
--- a/synapse/util/jsonobject.py
+++ b/synapse/util/jsonobject.py
@@ -14,7 +14,7 @@
# limitations under the License.
-class JsonEncodedObject(object):
+class JsonEncodedObject:
""" A common base class for defining protocol units that are represented
as JSON.
diff --git a/synapse/util/metrics.py b/synapse/util/metrics.py
index 13775b43f9..6e57c1ee72 100644
--- a/synapse/util/metrics.py
+++ b/synapse/util/metrics.py
@@ -93,7 +93,7 @@ def measure_func(name: Optional[str] = None) -> Callable[[T], T]:
return wrapper
-class Measure(object):
+class Measure:
__slots__ = [
"clock",
"name",
diff --git a/synapse/util/ratelimitutils.py b/synapse/util/ratelimitutils.py
index e5efdfcd02..70d11e1ec3 100644
--- a/synapse/util/ratelimitutils.py
+++ b/synapse/util/ratelimitutils.py
@@ -29,7 +29,7 @@ from synapse.logging.context import (
logger = logging.getLogger(__name__)
-class FederationRateLimiter(object):
+class FederationRateLimiter:
def __init__(self, clock, config):
"""
Args:
@@ -60,7 +60,7 @@ class FederationRateLimiter(object):
return self.ratelimiters[host].ratelimit()
-class _PerHostRatelimiter(object):
+class _PerHostRatelimiter:
def __init__(self, clock, config):
"""
Args:
diff --git a/synapse/util/retryutils.py b/synapse/util/retryutils.py
index 919988d3bc..79869aaa44 100644
--- a/synapse/util/retryutils.py
+++ b/synapse/util/retryutils.py
@@ -114,7 +114,7 @@ async def get_retry_limiter(destination, clock, store, ignore_backoff=False, **k
)
-class RetryDestinationLimiter(object):
+class RetryDestinationLimiter:
def __init__(
self,
destination,
diff --git a/synapse/util/wheel_timer.py b/synapse/util/wheel_timer.py
index 023beb5ede..be3b22469d 100644
--- a/synapse/util/wheel_timer.py
+++ b/synapse/util/wheel_timer.py
@@ -14,7 +14,7 @@
# limitations under the License.
-class _Entry(object):
+class _Entry:
__slots__ = ["end_key", "queue"]
def __init__(self, end_key):
@@ -22,7 +22,7 @@ class _Entry(object):
self.queue = []
-class WheelTimer(object):
+class WheelTimer:
"""Stores arbitrary objects that will be returned after their timers have
expired.
"""
|