diff --git a/changelog.d/16894.bugfix b/changelog.d/16894.bugfix
new file mode 100644
index 0000000000..7e05370be7
--- /dev/null
+++ b/changelog.d/16894.bugfix
@@ -0,0 +1 @@
+Do not send multiple concurrent requests for keys for the same server.
diff --git a/changelog.d/16905.misc b/changelog.d/16905.misc
new file mode 100644
index 0000000000..c5f47eb3e9
--- /dev/null
+++ b/changelog.d/16905.misc
@@ -0,0 +1 @@
+Don't invalidate the entire event cache when we purge history.
diff --git a/changelog.d/16909.misc b/changelog.d/16909.misc
new file mode 100644
index 0000000000..f958936584
--- /dev/null
+++ b/changelog.d/16909.misc
@@ -0,0 +1 @@
+Add experimental config option to not send device list updates for specific users.
diff --git a/poetry.lock b/poetry.lock
index c9e5ad9e15..834d6512d7 100644
--- a/poetry.lock
+++ b/poetry.lock
@@ -46,21 +46,22 @@ wrapt = [
[[package]]
name = "attrs"
-version = "23.1.0"
+version = "23.2.0"
description = "Classes Without Boilerplate"
optional = false
python-versions = ">=3.7"
files = [
- {file = "attrs-23.1.0-py3-none-any.whl", hash = "sha256:1f28b4522cdc2fb4256ac1a020c78acf9cba2c6b461ccd2c126f3aa8e8335d04"},
- {file = "attrs-23.1.0.tar.gz", hash = "sha256:6279836d581513a26f1bf235f9acd333bc9115683f14f7e8fae46c98fc50e015"},
+ {file = "attrs-23.2.0-py3-none-any.whl", hash = "sha256:99b87a485a5820b23b879f04c2305b44b951b502fd64be915879d77a7e8fc6f1"},
+ {file = "attrs-23.2.0.tar.gz", hash = "sha256:935dc3b529c262f6cf76e50877d35a4bd3c1de194fd41f47a2b7ae8f19971f30"},
]
[package.extras]
cov = ["attrs[tests]", "coverage[toml] (>=5.3)"]
-dev = ["attrs[docs,tests]", "pre-commit"]
+dev = ["attrs[tests]", "pre-commit"]
docs = ["furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier", "zope-interface"]
tests = ["attrs[tests-no-zope]", "zope-interface"]
-tests-no-zope = ["cloudpickle", "hypothesis", "mypy (>=1.1.1)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"]
+tests-mypy = ["mypy (>=1.6)", "pytest-mypy-plugins"]
+tests-no-zope = ["attrs[tests-mypy]", "cloudpickle", "hypothesis", "pympler", "pytest (>=4.3.0)", "pytest-xdist[psutil]"]
[[package]]
name = "authlib"
@@ -110,32 +111,38 @@ pytz = {version = ">=2015.7", markers = "python_version < \"3.9\""}
[[package]]
name = "bcrypt"
-version = "4.0.1"
+version = "4.1.2"
description = "Modern password hashing for your software and your servers"
optional = false
-python-versions = ">=3.6"
+python-versions = ">=3.7"
files = [
- {file = "bcrypt-4.0.1-cp36-abi3-macosx_10_10_universal2.whl", hash = "sha256:b1023030aec778185a6c16cf70f359cbb6e0c289fd564a7cfa29e727a1c38f8f"},
- {file = "bcrypt-4.0.1-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:08d2947c490093a11416df18043c27abe3921558d2c03e2076ccb28a116cb6d0"},
- {file = "bcrypt-4.0.1-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0eaa47d4661c326bfc9d08d16debbc4edf78778e6aaba29c1bc7ce67214d4410"},
- {file = "bcrypt-4.0.1-cp36-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ae88eca3024bb34bb3430f964beab71226e761f51b912de5133470b649d82344"},
- {file = "bcrypt-4.0.1-cp36-abi3-manylinux_2_24_x86_64.whl", hash = "sha256:a522427293d77e1c29e303fc282e2d71864579527a04ddcfda6d4f8396c6c36a"},
- {file = "bcrypt-4.0.1-cp36-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:fbdaec13c5105f0c4e5c52614d04f0bca5f5af007910daa8b6b12095edaa67b3"},
- {file = "bcrypt-4.0.1-cp36-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:ca3204d00d3cb2dfed07f2d74a25f12fc12f73e606fcaa6975d1f7ae69cacbb2"},
- {file = "bcrypt-4.0.1-cp36-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:089098effa1bc35dc055366740a067a2fc76987e8ec75349eb9484061c54f535"},
- {file = "bcrypt-4.0.1-cp36-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:e9a51bbfe7e9802b5f3508687758b564069ba937748ad7b9e890086290d2f79e"},
- {file = "bcrypt-4.0.1-cp36-abi3-win32.whl", hash = "sha256:2caffdae059e06ac23fce178d31b4a702f2a3264c20bfb5ff541b338194d8fab"},
- {file = "bcrypt-4.0.1-cp36-abi3-win_amd64.whl", hash = "sha256:8a68f4341daf7522fe8d73874de8906f3a339048ba406be6ddc1b3ccb16fc0d9"},
- {file = "bcrypt-4.0.1-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bf4fa8b2ca74381bb5442c089350f09a3f17797829d958fad058d6e44d9eb83c"},
- {file = "bcrypt-4.0.1-pp37-pypy37_pp73-manylinux_2_24_x86_64.whl", hash = "sha256:67a97e1c405b24f19d08890e7ae0c4f7ce1e56a712a016746c8b2d7732d65d4b"},
- {file = "bcrypt-4.0.1-pp37-pypy37_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:b3b85202d95dd568efcb35b53936c5e3b3600c7cdcc6115ba461df3a8e89f38d"},
- {file = "bcrypt-4.0.1-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cbb03eec97496166b704ed663a53680ab57c5084b2fc98ef23291987b525cb7d"},
- {file = "bcrypt-4.0.1-pp38-pypy38_pp73-manylinux_2_24_x86_64.whl", hash = "sha256:5ad4d32a28b80c5fa6671ccfb43676e8c1cc232887759d1cd7b6f56ea4355215"},
- {file = "bcrypt-4.0.1-pp38-pypy38_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:b57adba8a1444faf784394de3436233728a1ecaeb6e07e8c22c8848f179b893c"},
- {file = "bcrypt-4.0.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:705b2cea8a9ed3d55b4491887ceadb0106acf7c6387699fca771af56b1cdeeda"},
- {file = "bcrypt-4.0.1-pp39-pypy39_pp73-manylinux_2_24_x86_64.whl", hash = "sha256:2b3ac11cf45161628f1f3733263e63194f22664bf4d0c0f3ab34099c02134665"},
- {file = "bcrypt-4.0.1-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:3100851841186c25f127731b9fa11909ab7b1df6fc4b9f8353f4f1fd952fbf71"},
- {file = "bcrypt-4.0.1.tar.gz", hash = "sha256:27d375903ac8261cfe4047f6709d16f7d18d39b1ec92aaf72af989552a650ebd"},
+ {file = "bcrypt-4.1.2-cp37-abi3-macosx_10_12_universal2.whl", hash = "sha256:ac621c093edb28200728a9cca214d7e838529e557027ef0581685909acd28b5e"},
+ {file = "bcrypt-4.1.2-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ea505c97a5c465ab8c3ba75c0805a102ce526695cd6818c6de3b1a38f6f60da1"},
+ {file = "bcrypt-4.1.2-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:57fa9442758da926ed33a91644649d3e340a71e2d0a5a8de064fb621fd5a3326"},
+ {file = "bcrypt-4.1.2-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:eb3bd3321517916696233b5e0c67fd7d6281f0ef48e66812db35fc963a422a1c"},
+ {file = "bcrypt-4.1.2-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:6cad43d8c63f34b26aef462b6f5e44fdcf9860b723d2453b5d391258c4c8e966"},
+ {file = "bcrypt-4.1.2-cp37-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:44290ccc827d3a24604f2c8bcd00d0da349e336e6503656cb8192133e27335e2"},
+ {file = "bcrypt-4.1.2-cp37-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:732b3920a08eacf12f93e6b04ea276c489f1c8fb49344f564cca2adb663b3e4c"},
+ {file = "bcrypt-4.1.2-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:1c28973decf4e0e69cee78c68e30a523be441972c826703bb93099868a8ff5b5"},
+ {file = "bcrypt-4.1.2-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:b8df79979c5bae07f1db22dcc49cc5bccf08a0380ca5c6f391cbb5790355c0b0"},
+ {file = "bcrypt-4.1.2-cp37-abi3-win32.whl", hash = "sha256:fbe188b878313d01b7718390f31528be4010fed1faa798c5a1d0469c9c48c369"},
+ {file = "bcrypt-4.1.2-cp37-abi3-win_amd64.whl", hash = "sha256:9800ae5bd5077b13725e2e3934aa3c9c37e49d3ea3d06318010aa40f54c63551"},
+ {file = "bcrypt-4.1.2-cp39-abi3-macosx_10_12_universal2.whl", hash = "sha256:71b8be82bc46cedd61a9f4ccb6c1a493211d031415a34adde3669ee1b0afbb63"},
+ {file = "bcrypt-4.1.2-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:68e3c6642077b0c8092580c819c1684161262b2e30c4f45deb000c38947bf483"},
+ {file = "bcrypt-4.1.2-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:387e7e1af9a4dd636b9505a465032f2f5cb8e61ba1120e79a0e1cd0b512f3dfc"},
+ {file = "bcrypt-4.1.2-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:f70d9c61f9c4ca7d57f3bfe88a5ccf62546ffbadf3681bb1e268d9d2e41c91a7"},
+ {file = "bcrypt-4.1.2-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:2a298db2a8ab20056120b45e86c00a0a5eb50ec4075b6142db35f593b97cb3fb"},
+ {file = "bcrypt-4.1.2-cp39-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:ba55e40de38a24e2d78d34c2d36d6e864f93e0d79d0b6ce915e4335aa81d01b1"},
+ {file = "bcrypt-4.1.2-cp39-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:3566a88234e8de2ccae31968127b0ecccbb4cddb629da744165db72b58d88ca4"},
+ {file = "bcrypt-4.1.2-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:b90e216dc36864ae7132cb151ffe95155a37a14e0de3a8f64b49655dd959ff9c"},
+ {file = "bcrypt-4.1.2-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:69057b9fc5093ea1ab00dd24ede891f3e5e65bee040395fb1e66ee196f9c9b4a"},
+ {file = "bcrypt-4.1.2-cp39-abi3-win32.whl", hash = "sha256:02d9ef8915f72dd6daaef40e0baeef8a017ce624369f09754baf32bb32dba25f"},
+ {file = "bcrypt-4.1.2-cp39-abi3-win_amd64.whl", hash = "sha256:be3ab1071662f6065899fe08428e45c16aa36e28bc42921c4901a191fda6ee42"},
+ {file = "bcrypt-4.1.2-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:d75fc8cd0ba23f97bae88a6ec04e9e5351ff3c6ad06f38fe32ba50cbd0d11946"},
+ {file = "bcrypt-4.1.2-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:a97e07e83e3262599434816f631cc4c7ca2aa8e9c072c1b1a7fec2ae809a1d2d"},
+ {file = "bcrypt-4.1.2-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:e51c42750b7585cee7892c2614be0d14107fad9581d1738d954a262556dd1aab"},
+ {file = "bcrypt-4.1.2-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:ba4e4cc26610581a6329b3937e02d319f5ad4b85b074846bf4fef8a8cf51e7bb"},
+ {file = "bcrypt-4.1.2.tar.gz", hash = "sha256:33313a1200a3ae90b75587ceac502b048b840fc69e7f7a0905b5f87fac7a1258"},
]
[package.extras]
@@ -1959,20 +1966,19 @@ typing-extensions = ">=4.6.0,<4.7.0 || >4.7.0"
[[package]]
name = "pygithub"
-version = "2.1.1"
+version = "2.2.0"
description = "Use the full Github API v3"
optional = false
python-versions = ">=3.7"
files = [
- {file = "PyGithub-2.1.1-py3-none-any.whl", hash = "sha256:4b528d5d6f35e991ea5fd3f942f58748f24938805cb7fcf24486546637917337"},
- {file = "PyGithub-2.1.1.tar.gz", hash = "sha256:ecf12c2809c44147bce63b047b3d2e9dac8a41b63e90fcb263c703f64936b97c"},
+ {file = "PyGithub-2.2.0-py3-none-any.whl", hash = "sha256:41042ea53e4c372219db708c38d2ca1fd4fadab75475bac27d89d339596cfad1"},
+ {file = "PyGithub-2.2.0.tar.gz", hash = "sha256:e39be7c4dc39418bdd6e3ecab5931c636170b8b21b4d26f9ecf7e6102a3b51c3"},
]
[package.dependencies]
Deprecated = "*"
pyjwt = {version = ">=2.4.0", extras = ["crypto"]}
pynacl = ">=1.4.0"
-python-dateutil = "*"
requests = ">=2.14.0"
typing-extensions = ">=4.0.0"
urllib3 = ">=1.26.0"
@@ -2119,7 +2125,7 @@ s2repoze = ["paste", "repoze.who", "zope.interface"]
name = "python-dateutil"
version = "2.8.2"
description = "Extensions to the standard Python datetime module"
-optional = false
+optional = true
python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7"
files = [
{file = "python-dateutil-2.8.2.tar.gz", hash = "sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86"},
@@ -2477,13 +2483,13 @@ doc = ["Sphinx", "sphinx-rtd-theme"]
[[package]]
name = "sentry-sdk"
-version = "1.40.0"
+version = "1.40.3"
description = "Python client for Sentry (https://sentry.io)"
optional = true
python-versions = "*"
files = [
- {file = "sentry-sdk-1.40.0.tar.gz", hash = "sha256:34ad8cfc9b877aaa2a8eb86bfe5296a467fffe0619b931a05b181c45f6da59bf"},
- {file = "sentry_sdk-1.40.0-py2.py3-none-any.whl", hash = "sha256:78575620331186d32f34b7ece6edea97ce751f58df822547d3ab85517881a27a"},
+ {file = "sentry-sdk-1.40.3.tar.gz", hash = "sha256:3c2b027979bb400cd65a47970e64f8cef8acda86b288a27f42a98692505086cd"},
+ {file = "sentry_sdk-1.40.3-py2.py3-none-any.whl", hash = "sha256:73383f28311ae55602bb6cc3b013830811135ba5521e41333a6e68f269413502"},
]
[package.dependencies]
diff --git a/synapse/config/registration.py b/synapse/config/registration.py
index 9e2b1f3de1..3fe0f050cd 100644
--- a/synapse/config/registration.py
+++ b/synapse/config/registration.py
@@ -237,6 +237,14 @@ class RegistrationConfig(Config):
self.inhibit_user_in_use_error = config.get("inhibit_user_in_use_error", False)
+ # List of user IDs not to send out device list updates for when they
+ # register new devices. This is useful to handle bot accounts.
+ #
+ # Note: This will still send out device list updates if the device is
+ # later updated, e.g. end to end keys are added.
+ dont_notify_new_devices_for = config.get("dont_notify_new_devices_for", [])
+ self.dont_notify_new_devices_for = frozenset(dont_notify_new_devices_for)
+
def generate_config_section(
self, generate_secrets: bool = False, **kwargs: Any
) -> str:
diff --git a/synapse/crypto/keyring.py b/synapse/crypto/keyring.py
index 1e7e5f70fe..8c301e077c 100644
--- a/synapse/crypto/keyring.py
+++ b/synapse/crypto/keyring.py
@@ -839,11 +839,12 @@ class ServerKeyFetcher(BaseV2KeyFetcher):
Map from server_name -> key_id -> FetchKeyResult
"""
- results = {}
+ # We only need to do one request per server.
+ servers_to_fetch = {k.server_name for k in keys_to_fetch}
- async def get_keys(key_to_fetch_item: _FetchKeyRequest) -> None:
- server_name = key_to_fetch_item.server_name
+ results = {}
+ async def get_keys(server_name: str) -> None:
try:
keys = await self.get_server_verify_keys_v2_direct(server_name)
results[server_name] = keys
@@ -852,7 +853,7 @@ class ServerKeyFetcher(BaseV2KeyFetcher):
except Exception:
logger.exception("Error getting keys from %s", server_name)
- await yieldable_gather_results(get_keys, keys_to_fetch)
+ await yieldable_gather_results(get_keys, servers_to_fetch)
return results
async def get_server_verify_keys_v2_direct(
diff --git a/synapse/handlers/device.py b/synapse/handlers/device.py
index 9062fac91a..67953a3ed9 100644
--- a/synapse/handlers/device.py
+++ b/synapse/handlers/device.py
@@ -429,6 +429,10 @@ class DeviceHandler(DeviceWorkerHandler):
self._storage_controllers = hs.get_storage_controllers()
self.db_pool = hs.get_datastores().main.db_pool
+ self._dont_notify_new_devices_for = (
+ hs.config.registration.dont_notify_new_devices_for
+ )
+
self.device_list_updater = DeviceListUpdater(hs, self)
federation_registry = hs.get_federation_registry()
@@ -505,6 +509,9 @@ class DeviceHandler(DeviceWorkerHandler):
self._check_device_name_length(initial_device_display_name)
+ # Check if we should send out device lists updates for this new device.
+ notify = user_id not in self._dont_notify_new_devices_for
+
if device_id is not None:
new_device = await self.store.store_device(
user_id=user_id,
@@ -514,7 +521,8 @@ class DeviceHandler(DeviceWorkerHandler):
auth_provider_session_id=auth_provider_session_id,
)
if new_device:
- await self.notify_device_update(user_id, [device_id])
+ if notify:
+ await self.notify_device_update(user_id, [device_id])
return device_id
# if the device id is not specified, we'll autogen one, but loop a few
@@ -530,7 +538,8 @@ class DeviceHandler(DeviceWorkerHandler):
auth_provider_session_id=auth_provider_session_id,
)
if new_device:
- await self.notify_device_update(user_id, [new_device_id])
+ if notify:
+ await self.notify_device_update(user_id, [new_device_id])
return new_device_id
attempts += 1
diff --git a/synapse/storage/databases/main/cache.py b/synapse/storage/databases/main/cache.py
index 7314d87404..bfd492d95d 100644
--- a/synapse/storage/databases/main/cache.py
+++ b/synapse/storage/databases/main/cache.py
@@ -373,7 +373,7 @@ class CacheInvalidationWorkerStore(SQLBaseStore):
deleted.
"""
- self._invalidate_local_get_event_cache_all() # type: ignore[attr-defined]
+ self._invalidate_local_get_event_cache_room_id(room_id) # type: ignore[attr-defined]
self._attempt_to_invalidate_cache("have_seen_event", (room_id,))
self._attempt_to_invalidate_cache("get_latest_event_ids_in_room", (room_id,))
diff --git a/synapse/storage/databases/main/events_worker.py b/synapse/storage/databases/main/events_worker.py
index 1fd458b510..9c3775bb7c 100644
--- a/synapse/storage/databases/main/events_worker.py
+++ b/synapse/storage/databases/main/events_worker.py
@@ -268,6 +268,8 @@ class EventsWorkerStore(SQLBaseStore):
] = AsyncLruCache(
cache_name="*getEvent*",
max_size=hs.config.caches.event_cache_size,
+ # `extra_index_cb` Returns a tuple as that is the key type
+ extra_index_cb=lambda _, v: (v.event.room_id,),
)
# Map from event ID to a deferred that will result in a map from event
@@ -782,9 +784,9 @@ class EventsWorkerStore(SQLBaseStore):
if missing_events_ids:
- async def get_missing_events_from_cache_or_db() -> Dict[
- str, EventCacheEntry
- ]:
+ async def get_missing_events_from_cache_or_db() -> (
+ Dict[str, EventCacheEntry]
+ ):
"""Fetches the events in `missing_event_ids` from the database.
Also creates entries in `self._current_event_fetches` to allow
@@ -910,12 +912,12 @@ class EventsWorkerStore(SQLBaseStore):
self._event_ref.pop(event_id, None)
self._current_event_fetches.pop(event_id, None)
- def _invalidate_local_get_event_cache_all(self) -> None:
- """Clears the in-memory get event caches.
+ def _invalidate_local_get_event_cache_room_id(self, room_id: str) -> None:
+ """Clears the in-memory get event caches for a room.
Used when we purge room history.
"""
- self._get_event_cache.clear()
+ self._get_event_cache.invalidate_on_extra_index_local((room_id,))
self._event_ref.clear()
self._current_event_fetches.clear()
diff --git a/synapse/util/caches/lrucache.py b/synapse/util/caches/lrucache.py
index 6e8c1e84ac..a1b4f5b6a7 100644
--- a/synapse/util/caches/lrucache.py
+++ b/synapse/util/caches/lrucache.py
@@ -35,6 +35,7 @@ from typing import (
Iterable,
List,
Optional,
+ Set,
Tuple,
Type,
TypeVar,
@@ -386,6 +387,7 @@ class LruCache(Generic[KT, VT]):
apply_cache_factor_from_config: bool = True,
clock: Optional[Clock] = None,
prune_unread_entries: bool = True,
+ extra_index_cb: Optional[Callable[[KT, VT], KT]] = None,
):
"""
Args:
@@ -416,6 +418,20 @@ class LruCache(Generic[KT, VT]):
prune_unread_entries: If True, cache entries that haven't been read recently
will be evicted from the cache in the background. Set to False to
opt-out of this behaviour.
+
+ extra_index_cb: If provided, the cache keeps a second index from a
+ (different) key to a cache entry based on the return value of
+ the callback. This can then be used to invalidate entries based
+ on the second type of key.
+
+ For example, for the event cache this would be a callback that
+ maps an event to its room ID, allowing invalidation of all
+ events in a given room.
+
+ Note: Though the two types of key have the same type, they are
+ in different namespaces.
+
+ Note: The new key does not have to be unique.
"""
# Default `clock` to something sensible. Note that we rename it to
# `real_clock` so that mypy doesn't think its still `Optional`.
@@ -463,6 +479,8 @@ class LruCache(Generic[KT, VT]):
lock = threading.Lock()
+ extra_index: Dict[KT, Set[KT]] = {}
+
def evict() -> None:
while cache_len() > self.max_size:
# Get the last node in the list (i.e. the oldest node).
@@ -521,6 +539,11 @@ class LruCache(Generic[KT, VT]):
if size_callback:
cached_cache_len[0] += size_callback(node.value)
+ if extra_index_cb:
+ index_key = extra_index_cb(node.key, node.value)
+ mapped_keys = extra_index.setdefault(index_key, set())
+ mapped_keys.add(node.key)
+
if caches.TRACK_MEMORY_USAGE and metrics:
metrics.inc_memory_usage(node.memory)
@@ -537,6 +560,14 @@ class LruCache(Generic[KT, VT]):
node.run_and_clear_callbacks()
+ if extra_index_cb:
+ index_key = extra_index_cb(node.key, node.value)
+ mapped_keys = extra_index.get(index_key)
+ if mapped_keys is not None:
+ mapped_keys.discard(node.key)
+ if not mapped_keys:
+ extra_index.pop(index_key, None)
+
if caches.TRACK_MEMORY_USAGE and metrics:
metrics.dec_memory_usage(node.memory)
@@ -748,6 +779,8 @@ class LruCache(Generic[KT, VT]):
if size_callback:
cached_cache_len[0] = 0
+ extra_index.clear()
+
if caches.TRACK_MEMORY_USAGE and metrics:
metrics.clear_memory_usage()
@@ -755,6 +788,28 @@ class LruCache(Generic[KT, VT]):
def cache_contains(key: KT) -> bool:
return key in cache
+ @synchronized
+ def cache_invalidate_on_extra_index(index_key: KT) -> None:
+ """Invalidates all entries that match the given extra index key.
+
+ This can only be called when `extra_index_cb` was specified.
+ """
+
+ assert extra_index_cb is not None
+
+ keys = extra_index.pop(index_key, None)
+ if not keys:
+ return
+
+ for key in keys:
+ node = cache.pop(key, None)
+ if not node:
+ continue
+
+ evicted_len = delete_node(node)
+ if metrics:
+ metrics.inc_evictions(EvictionReason.invalidation, evicted_len)
+
# make sure that we clear out any excess entries after we get resized.
self._on_resize = evict
@@ -771,6 +826,7 @@ class LruCache(Generic[KT, VT]):
self.len = synchronized(cache_len)
self.contains = cache_contains
self.clear = cache_clear
+ self.invalidate_on_extra_index = cache_invalidate_on_extra_index
def __getitem__(self, key: KT) -> VT:
result = self.get(key, _Sentinel.sentinel)
@@ -864,6 +920,9 @@ class AsyncLruCache(Generic[KT, VT]):
# This method should invalidate any external cache and then invalidate the LruCache.
return self._lru_cache.invalidate(key)
+ def invalidate_on_extra_index_local(self, index_key: KT) -> None:
+ self._lru_cache.invalidate_on_extra_index(index_key)
+
def invalidate_local(self, key: KT) -> None:
"""Remove an entry from the local cache
diff --git a/tests/util/test_lrucache.py b/tests/util/test_lrucache.py
index dcc2b4be89..3f0d8139f8 100644
--- a/tests/util/test_lrucache.py
+++ b/tests/util/test_lrucache.py
@@ -383,3 +383,34 @@ class MemoryEvictionTestCase(unittest.HomeserverTestCase):
# the items should still be in the cache
self.assertEqual(cache.get("key1"), 1)
self.assertEqual(cache.get("key2"), 2)
+
+
+class ExtraIndexLruCacheTestCase(unittest.HomeserverTestCase):
+ def test_invalidate_simple(self) -> None:
+ cache: LruCache[str, int] = LruCache(10, extra_index_cb=lambda k, v: str(v))
+ cache["key1"] = 1
+ cache["key2"] = 2
+
+ cache.invalidate_on_extra_index("key1")
+ self.assertEqual(cache.get("key1"), 1)
+ self.assertEqual(cache.get("key2"), 2)
+
+ cache.invalidate_on_extra_index("1")
+ self.assertEqual(cache.get("key1"), None)
+ self.assertEqual(cache.get("key2"), 2)
+
+ def test_invalidate_multi(self) -> None:
+ cache: LruCache[str, int] = LruCache(10, extra_index_cb=lambda k, v: str(v))
+ cache["key1"] = 1
+ cache["key2"] = 1
+ cache["key3"] = 2
+
+ cache.invalidate_on_extra_index("key1")
+ self.assertEqual(cache.get("key1"), 1)
+ self.assertEqual(cache.get("key2"), 1)
+ self.assertEqual(cache.get("key3"), 2)
+
+ cache.invalidate_on_extra_index("1")
+ self.assertEqual(cache.get("key1"), None)
+ self.assertEqual(cache.get("key2"), None)
+ self.assertEqual(cache.get("key3"), 2)
|