diff --git a/synapse/util/caches/lrucache.py b/synapse/util/caches/lrucache.py
index 60bb6ff642..20c8e2d9f5 100644
--- a/synapse/util/caches/lrucache.py
+++ b/synapse/util/caches/lrucache.py
@@ -57,12 +57,14 @@ def enumerate_leaves(node, depth):
class _Node:
__slots__ = ["prev_node", "next_node", "key", "value", "callbacks"]
- def __init__(self, prev_node, next_node, key, value, callbacks=set()):
+ def __init__(
+ self, prev_node, next_node, key, value, callbacks: Optional[set] = None
+ ):
self.prev_node = prev_node
self.next_node = next_node
self.key = key
self.value = value
- self.callbacks = callbacks
+ self.callbacks = callbacks or set()
class LruCache(Generic[KT, VT]):
@@ -176,10 +178,10 @@ class LruCache(Generic[KT, VT]):
self.len = synchronized(cache_len)
- def add_node(key, value, callbacks=set()):
+ def add_node(key, value, callbacks: Optional[set] = None):
prev_node = list_root
next_node = prev_node.next_node
- node = _Node(prev_node, next_node, key, value, callbacks)
+ node = _Node(prev_node, next_node, key, value, callbacks or set())
prev_node.next_node = node
next_node.prev_node = node
cache[key] = node
@@ -237,7 +239,7 @@ class LruCache(Generic[KT, VT]):
def cache_get(
key: KT,
default: Optional[T] = None,
- callbacks: Iterable[Callable[[], None]] = [],
+ callbacks: Iterable[Callable[[], None]] = (),
update_metrics: bool = True,
):
node = cache.get(key, None)
@@ -253,7 +255,7 @@ class LruCache(Generic[KT, VT]):
return default
@synchronized
- def cache_set(key: KT, value: VT, callbacks: Iterable[Callable[[], None]] = []):
+ def cache_set(key: KT, value: VT, callbacks: Iterable[Callable[[], None]] = ()):
node = cache.get(key, None)
if node is not None:
# We sometimes store large objects, e.g. dicts, which cause
|