diff --git a/.github/workflows/docker.yml b/.github/workflows/docker.yml
index 1a97809a26..ebf866e3d5 100644
--- a/.github/workflows/docker.yml
+++ b/.github/workflows/docker.yml
@@ -30,7 +30,7 @@ jobs:
run: docker buildx inspect
- name: Install Cosign
- uses: sigstore/cosign-installer@v3.6.0
+ uses: sigstore/cosign-installer@v3.7.0
- name: Checkout repository
uses: actions/checkout@v4
diff --git a/changelog.d/17627.doc b/changelog.d/17627.doc
new file mode 100644
index 0000000000..487a0aea0d
--- /dev/null
+++ b/changelog.d/17627.doc
@@ -0,0 +1 @@
+Clarify when the `user_may_invite` and `user_may_send_3pid_invite` module callbacks are called.
\ No newline at end of file
diff --git a/changelog.d/17708.feature b/changelog.d/17708.feature
new file mode 100644
index 0000000000..90ec810f50
--- /dev/null
+++ b/changelog.d/17708.feature
@@ -0,0 +1 @@
+Added the `display_name_claim` option to the JWT configuration. This option allows specifying the claim key that contains the user's display name in the JWT payload.
\ No newline at end of file
diff --git a/changelog.d/17718.misc b/changelog.d/17718.misc
new file mode 100644
index 0000000000..ea73a03f53
--- /dev/null
+++ b/changelog.d/17718.misc
@@ -0,0 +1 @@
+Slight optimization when fetching state/events for Sliding Sync.
diff --git a/changelog.d/17736.bugfix b/changelog.d/17736.bugfix
new file mode 100644
index 0000000000..0d3fd06962
--- /dev/null
+++ b/changelog.d/17736.bugfix
@@ -0,0 +1 @@
+Fix saving of PNG thumbnails, when the original image is in the CMYK color space.
diff --git a/changelog.d/17783.feature b/changelog.d/17783.feature
new file mode 100644
index 0000000000..ce8c216418
--- /dev/null
+++ b/changelog.d/17783.feature
@@ -0,0 +1 @@
+Implement [MSC4210](https://github.com/matrix-org/matrix-spec-proposals/pull/4210): Remove legacy mentions. Contributed by @tulir @ Beeper.
diff --git a/changelog.d/17785.bugfix b/changelog.d/17785.bugfix
new file mode 100644
index 0000000000..df2898f54e
--- /dev/null
+++ b/changelog.d/17785.bugfix
@@ -0,0 +1 @@
+Fix bug with sliding sync where the server would not return state that was added to the `required_state` config.
diff --git a/changelog.d/17802.doc b/changelog.d/17802.doc
new file mode 100644
index 0000000000..72e653d3c4
--- /dev/null
+++ b/changelog.d/17802.doc
@@ -0,0 +1 @@
+Correct documentation to refer to the `--config-path` argument instead of `--config-file`.
diff --git a/changelog.d/17803.misc b/changelog.d/17803.misc
new file mode 100644
index 0000000000..a267df8b83
--- /dev/null
+++ b/changelog.d/17803.misc
@@ -0,0 +1 @@
+Test github token before running release script steps.
diff --git a/changelog.d/17805.bugfix b/changelog.d/17805.bugfix
new file mode 100644
index 0000000000..df2898f54e
--- /dev/null
+++ b/changelog.d/17805.bugfix
@@ -0,0 +1 @@
+Fix bug with sliding sync where the server would not return state that was added to the `required_state` config.
diff --git a/changelog.d/17824.misc b/changelog.d/17824.misc
new file mode 100644
index 0000000000..22574f00ec
--- /dev/null
+++ b/changelog.d/17824.misc
@@ -0,0 +1 @@
+Build debian packages for new Ubuntu versions, and stop building for no longer supported versions.
diff --git a/changelog.d/17825.doc b/changelog.d/17825.doc
new file mode 100644
index 0000000000..ee43667417
--- /dev/null
+++ b/changelog.d/17825.doc
@@ -0,0 +1 @@
+Fix typo in `target_cache_memory_usage` docs.
\ No newline at end of file
diff --git a/changelog.d/17826.misc b/changelog.d/17826.misc
new file mode 100644
index 0000000000..9148c96a0d
--- /dev/null
+++ b/changelog.d/17826.misc
@@ -0,0 +1 @@
+Enable the `.org.matrix.msc4028.encrypted_event` push rule by default in accordance with [MSC4028](https://github.com/matrix-org/matrix-spec-proposals/pull/4028). Note that the corresponding experimental feature must still be switched on for this push rule to have any effect.
\ No newline at end of file
diff --git a/docs/modules/spam_checker_callbacks.md b/docs/modules/spam_checker_callbacks.md
index ffdfe6082e..ec306d81ab 100644
--- a/docs/modules/spam_checker_callbacks.md
+++ b/docs/modules/spam_checker_callbacks.md
@@ -76,8 +76,9 @@ _Changed in Synapse v1.62.0: `synapse.module_api.NOT_SPAM` and `synapse.module_a
async def user_may_invite(inviter: str, invitee: str, room_id: str) -> Union["synapse.module_api.NOT_SPAM", "synapse.module_api.errors.Codes", bool]
```
-Called when processing an invitation. Both inviter and invitee are
-represented by their Matrix user ID (e.g. `@alice:example.com`).
+Called when processing an invitation, both when one is created locally or when
+receiving an invite over federation. Both inviter and invitee are represented by
+their Matrix user ID (e.g. `@alice:example.com`).
The callback must return one of:
@@ -112,7 +113,9 @@ async def user_may_send_3pid_invite(
```
Called when processing an invitation using a third-party identifier (also called a 3PID,
-e.g. an email address or a phone number).
+e.g. an email address or a phone number). It is only called when a 3PID invite is created
+locally - not when one is received in a room over federation. If the 3PID is already associated
+with a Matrix ID, the spam check will go through the `user_may_invite` callback instead.
The inviter is represented by their Matrix user ID (e.g. `@alice:example.com`), and the
invitee is represented by its medium (e.g. "email") and its address
diff --git a/docs/usage/configuration/config_documentation.md b/docs/usage/configuration/config_documentation.md
index 29f3528c7e..47e3ef1287 100644
--- a/docs/usage/configuration/config_documentation.md
+++ b/docs/usage/configuration/config_documentation.md
@@ -1434,7 +1434,7 @@ number of entries that can be stored.
Please see the [Config Conventions](#config-conventions) for information on how to specify memory size and cache expiry
durations.
* `max_cache_memory_usage` sets a ceiling on how much memory the cache can use before caches begin to be continuously evicted.
- They will continue to be evicted until the memory usage drops below the `target_memory_usage`, set in
+ They will continue to be evicted until the memory usage drops below the `target_cache_memory_usage`, set in
the setting below, or until the `min_cache_ttl` is hit. There is no default value for this option.
* `target_cache_memory_usage` sets a rough target for the desired memory usage of the caches. There is no default value
for this option.
@@ -3722,6 +3722,8 @@ Additional sub-options for this setting include:
Required if `enabled` is set to true.
* `subject_claim`: Name of the claim containing a unique identifier for the user.
Optional, defaults to `sub`.
+* `display_name_claim`: Name of the claim containing the display name for the user. Optional.
+ If provided, the display name will be set to the value of this claim upon first login.
* `issuer`: The issuer to validate the "iss" claim against. Optional. If provided the
"iss" claim will be required and validated for all JSON web tokens.
* `audiences`: A list of audiences to validate the "aud" claim against. Optional.
@@ -3736,6 +3738,7 @@ jwt_config:
secret: "provided-by-your-issuer"
algorithm: "provided-by-your-issuer"
subject_claim: "name_of_claim"
+ display_name_claim: "name_of_claim"
issuer: "provided-by-your-issuer"
audiences:
- "provided-by-your-issuer"
diff --git a/docs/workers.md b/docs/workers.md
index 51b22fef9b..0116c455bc 100644
--- a/docs/workers.md
+++ b/docs/workers.md
@@ -177,11 +177,11 @@ The following applies to Synapse installations that have been installed from sou
You can start the main Synapse process with Poetry by running the following command:
```console
-poetry run synapse_homeserver --config-file [your homeserver.yaml]
+poetry run synapse_homeserver --config-path [your homeserver.yaml]
```
For worker setups, you can run the following command
```console
-poetry run synapse_worker --config-file [your homeserver.yaml] --config-file [your worker.yaml]
+poetry run synapse_worker --config-path [your homeserver.yaml] --config-path [your worker.yaml]
```
## Available worker applications
diff --git a/poetry.lock b/poetry.lock
index bf30fbbe15..9fd95ff447 100644
--- a/poetry.lock
+++ b/poetry.lock
@@ -1377,16 +1377,17 @@ files = [
[[package]]
name = "mypy-zope"
-version = "1.0.5"
+version = "1.0.7"
description = "Plugin for mypy to support zope interfaces"
optional = false
python-versions = "*"
files = [
- {file = "mypy_zope-1.0.5.tar.gz", hash = "sha256:2440406d49c0e1199c1cd819c92a2c4957de65579c6abc8a081c927f4bdc8d49"},
+ {file = "mypy_zope-1.0.7-py3-none-any.whl", hash = "sha256:f19de249574319d81083b15f8a022c6b15583582f23340a860922141f1b651ca"},
+ {file = "mypy_zope-1.0.7.tar.gz", hash = "sha256:32a79ce78647c0bea61e7e0c0eb1233fcb97bb94e8950cca73f17d3419c602f7"},
]
[package.dependencies]
-mypy = ">=1.0.0,<1.11.0"
+mypy = ">=1.0.0,<1.12.0"
"zope.interface" = "*"
"zope.schema" = "*"
@@ -1447,13 +1448,13 @@ dev = ["jinja2"]
[[package]]
name = "phonenumbers"
-version = "8.13.46"
+version = "8.13.47"
description = "Python version of Google's common library for parsing, formatting, storing and validating international phone numbers."
optional = false
python-versions = "*"
files = [
- {file = "phonenumbers-8.13.46-py2.py3-none-any.whl", hash = "sha256:519422d407af066fdbf98e179ea2e214487060f26526d67871f817eefbbb2134"},
- {file = "phonenumbers-8.13.46.tar.gz", hash = "sha256:94bf18ba9725bb6868d29473b13f78ef01e2585c5cb561ec0200be7676e77452"},
+ {file = "phonenumbers-8.13.47-py2.py3-none-any.whl", hash = "sha256:5d3c0142ef7055ca5551884352e3b6b93bfe002a0bc95b8eaba39b0e2184541b"},
+ {file = "phonenumbers-8.13.47.tar.gz", hash = "sha256:53c5e7c6d431cafe4efdd44956078404ae9bc8b0eacc47be3105d3ccc88aaffa"},
]
[[package]]
@@ -2277,29 +2278,29 @@ files = [
[[package]]
name = "ruff"
-version = "0.6.8"
+version = "0.6.9"
description = "An extremely fast Python linter and code formatter, written in Rust."
optional = false
python-versions = ">=3.7"
files = [
- {file = "ruff-0.6.8-py3-none-linux_armv6l.whl", hash = "sha256:77944bca110ff0a43b768f05a529fecd0706aac7bcce36d7f1eeb4cbfca5f0f2"},
- {file = "ruff-0.6.8-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:27b87e1801e786cd6ede4ada3faa5e254ce774de835e6723fd94551464c56b8c"},
- {file = "ruff-0.6.8-py3-none-macosx_11_0_arm64.whl", hash = "sha256:cd48f945da2a6334f1793d7f701725a76ba93bf3d73c36f6b21fb04d5338dcf5"},
- {file = "ruff-0.6.8-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:677e03c00f37c66cea033274295a983c7c546edea5043d0c798833adf4cf4c6f"},
- {file = "ruff-0.6.8-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:9f1476236b3eacfacfc0f66aa9e6cd39f2a624cb73ea99189556015f27c0bdeb"},
- {file = "ruff-0.6.8-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6f5a2f17c7d32991169195d52a04c95b256378bbf0de8cb98478351eb70d526f"},
- {file = "ruff-0.6.8-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:5fd0d4b7b1457c49e435ee1e437900ced9b35cb8dc5178921dfb7d98d65a08d0"},
- {file = "ruff-0.6.8-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f8034b19b993e9601f2ddf2c517451e17a6ab5cdb1c13fdff50c1442a7171d87"},
- {file = "ruff-0.6.8-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6cfb227b932ba8ef6e56c9f875d987973cd5e35bc5d05f5abf045af78ad8e098"},
- {file = "ruff-0.6.8-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6ef0411eccfc3909269fed47c61ffebdcb84a04504bafa6b6df9b85c27e813b0"},
- {file = "ruff-0.6.8-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:007dee844738c3d2e6c24ab5bc7d43c99ba3e1943bd2d95d598582e9c1b27750"},
- {file = "ruff-0.6.8-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:ce60058d3cdd8490e5e5471ef086b3f1e90ab872b548814e35930e21d848c9ce"},
- {file = "ruff-0.6.8-py3-none-musllinux_1_2_i686.whl", hash = "sha256:1085c455d1b3fdb8021ad534379c60353b81ba079712bce7a900e834859182fa"},
- {file = "ruff-0.6.8-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:70edf6a93b19481affd287d696d9e311388d808671bc209fb8907b46a8c3af44"},
- {file = "ruff-0.6.8-py3-none-win32.whl", hash = "sha256:792213f7be25316f9b46b854df80a77e0da87ec66691e8f012f887b4a671ab5a"},
- {file = "ruff-0.6.8-py3-none-win_amd64.whl", hash = "sha256:ec0517dc0f37cad14a5319ba7bba6e7e339d03fbf967a6d69b0907d61be7a263"},
- {file = "ruff-0.6.8-py3-none-win_arm64.whl", hash = "sha256:8d3bb2e3fbb9875172119021a13eed38849e762499e3cfde9588e4b4d70968dc"},
- {file = "ruff-0.6.8.tar.gz", hash = "sha256:a5bf44b1aa0adaf6d9d20f86162b34f7c593bfedabc51239953e446aefc8ce18"},
+ {file = "ruff-0.6.9-py3-none-linux_armv6l.whl", hash = "sha256:064df58d84ccc0ac0fcd63bc3090b251d90e2a372558c0f057c3f75ed73e1ccd"},
+ {file = "ruff-0.6.9-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:140d4b5c9f5fc7a7b074908a78ab8d384dd7f6510402267bc76c37195c02a7ec"},
+ {file = "ruff-0.6.9-py3-none-macosx_11_0_arm64.whl", hash = "sha256:53fd8ca5e82bdee8da7f506d7b03a261f24cd43d090ea9db9a1dc59d9313914c"},
+ {file = "ruff-0.6.9-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:645d7d8761f915e48a00d4ecc3686969761df69fb561dd914a773c1a8266e14e"},
+ {file = "ruff-0.6.9-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:eae02b700763e3847595b9d2891488989cac00214da7f845f4bcf2989007d577"},
+ {file = "ruff-0.6.9-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7d5ccc9e58112441de8ad4b29dcb7a86dc25c5f770e3c06a9d57e0e5eba48829"},
+ {file = "ruff-0.6.9-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:417b81aa1c9b60b2f8edc463c58363075412866ae4e2b9ab0f690dc1e87ac1b5"},
+ {file = "ruff-0.6.9-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3c866b631f5fbce896a74a6e4383407ba7507b815ccc52bcedabb6810fdb3ef7"},
+ {file = "ruff-0.6.9-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7b118afbb3202f5911486ad52da86d1d52305b59e7ef2031cea3425142b97d6f"},
+ {file = "ruff-0.6.9-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a67267654edc23c97335586774790cde402fb6bbdb3c2314f1fc087dee320bfa"},
+ {file = "ruff-0.6.9-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:3ef0cc774b00fec123f635ce5c547dac263f6ee9fb9cc83437c5904183b55ceb"},
+ {file = "ruff-0.6.9-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:12edd2af0c60fa61ff31cefb90aef4288ac4d372b4962c2864aeea3a1a2460c0"},
+ {file = "ruff-0.6.9-py3-none-musllinux_1_2_i686.whl", hash = "sha256:55bb01caeaf3a60b2b2bba07308a02fca6ab56233302406ed5245180a05c5625"},
+ {file = "ruff-0.6.9-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:925d26471fa24b0ce5a6cdfab1bb526fb4159952385f386bdcc643813d472039"},
+ {file = "ruff-0.6.9-py3-none-win32.whl", hash = "sha256:eb61ec9bdb2506cffd492e05ac40e5bc6284873aceb605503d8494180d6fc84d"},
+ {file = "ruff-0.6.9-py3-none-win_amd64.whl", hash = "sha256:785d31851c1ae91f45b3d8fe23b8ae4b5170089021fbb42402d811135f0b7117"},
+ {file = "ruff-0.6.9-py3-none-win_arm64.whl", hash = "sha256:a9641e31476d601f83cd602608739a0840e348bda93fec9f1ee816f8b6798b93"},
+ {file = "ruff-0.6.9.tar.gz", hash = "sha256:b076ef717a8e5bc819514ee1d602bbdca5b4420ae13a9cf61a0c0a4f53a2baa2"},
]
[[package]]
@@ -2334,13 +2335,13 @@ doc = ["Sphinx", "sphinx-rtd-theme"]
[[package]]
name = "sentry-sdk"
-version = "2.14.0"
+version = "2.16.0"
description = "Python client for Sentry (https://sentry.io)"
optional = true
python-versions = ">=3.6"
files = [
- {file = "sentry_sdk-2.14.0-py2.py3-none-any.whl", hash = "sha256:b8bc3dc51d06590df1291b7519b85c75e2ced4f28d9ea655b6d54033503b5bf4"},
- {file = "sentry_sdk-2.14.0.tar.gz", hash = "sha256:1e0e2eaf6dad918c7d1e0edac868a7bf20017b177f242cefe2a6bcd47955961d"},
+ {file = "sentry_sdk-2.16.0-py2.py3-none-any.whl", hash = "sha256:49139c31ebcd398f4f6396b18910610a0c1602f6e67083240c33019d1f6aa30c"},
+ {file = "sentry_sdk-2.16.0.tar.gz", hash = "sha256:90f733b32e15dfc1999e6b7aca67a38688a567329de4d6e184154a73f96c6892"},
]
[package.dependencies]
@@ -2363,6 +2364,7 @@ falcon = ["falcon (>=1.4)"]
fastapi = ["fastapi (>=0.79.0)"]
flask = ["blinker (>=1.1)", "flask (>=0.11)", "markupsafe"]
grpcio = ["grpcio (>=1.21.1)", "protobuf (>=3.8.0)"]
+http2 = ["httpcore[http2] (==1.*)"]
httpx = ["httpx (>=0.16.0)"]
huey = ["huey (>=2)"]
huggingface-hub = ["huggingface-hub (>=0.22)"]
@@ -2535,13 +2537,13 @@ twisted = ["twisted"]
[[package]]
name = "tomli"
-version = "2.0.1"
+version = "2.0.2"
description = "A lil' TOML parser"
optional = false
-python-versions = ">=3.7"
+python-versions = ">=3.8"
files = [
- {file = "tomli-2.0.1-py3-none-any.whl", hash = "sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc"},
- {file = "tomli-2.0.1.tar.gz", hash = "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f"},
+ {file = "tomli-2.0.2-py3-none-any.whl", hash = "sha256:2ebe24485c53d303f690b0ec092806a085f07af5a5aa1464f3931eec36caaa38"},
+ {file = "tomli-2.0.2.tar.gz", hash = "sha256:d46d457a85337051c36524bc5349dd91b1877838e2979ac5ced3e710ed8a60ed"},
]
[[package]]
@@ -2833,13 +2835,13 @@ urllib3 = ">=2"
[[package]]
name = "types-setuptools"
-version = "75.1.0.20240917"
+version = "75.1.0.20241014"
description = "Typing stubs for setuptools"
optional = false
python-versions = ">=3.8"
files = [
- {file = "types-setuptools-75.1.0.20240917.tar.gz", hash = "sha256:12f12a165e7ed383f31def705e5c0fa1c26215dd466b0af34bd042f7d5331f55"},
- {file = "types_setuptools-75.1.0.20240917-py3-none-any.whl", hash = "sha256:06f78307e68d1bbde6938072c57b81cf8a99bc84bd6dc7e4c5014730b097dc0c"},
+ {file = "types-setuptools-75.1.0.20241014.tar.gz", hash = "sha256:29b0560a8d4b4a91174be085847002c69abfcb048e20b33fc663005aedf56804"},
+ {file = "types_setuptools-75.1.0.20241014-py3-none-any.whl", hash = "sha256:caab58366741fb99673d0138b6e2d760717f154cfb981b74fea5e8de40f0b703"},
]
[[package]]
@@ -3114,4 +3116,4 @@ user-search = ["pyicu"]
[metadata]
lock-version = "2.0"
python-versions = "^3.8.0"
-content-hash = "304d03b74d2886def69ae44ce5afaed21318db9f09aae91281e0f182e1660ffd"
+content-hash = "c8a22f901970b2f851151e731532757fd3acf7ba02930952636d2e6c5c9c0c90"
diff --git a/pyproject.toml b/pyproject.toml
index 078ad3bc95..658771a89a 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -320,7 +320,7 @@ all = [
# failing on new releases. Keeping lower bounds loose here means that dependabot
# can bump versions without having to update the content-hash in the lockfile.
# This helps prevents merge conflicts when running a batch of dependabot updates.
-ruff = "0.6.8"
+ruff = "0.6.9"
# Type checking only works with the pydantic.v1 compat module from pydantic v2
pydantic = "^2"
diff --git a/rust/benches/evaluator.rs b/rust/benches/evaluator.rs
index 4fea035b96..28537e187e 100644
--- a/rust/benches/evaluator.rs
+++ b/rust/benches/evaluator.rs
@@ -60,6 +60,7 @@ fn bench_match_exact(b: &mut Bencher) {
true,
vec![],
false,
+ false,
)
.unwrap();
@@ -105,6 +106,7 @@ fn bench_match_word(b: &mut Bencher) {
true,
vec![],
false,
+ false,
)
.unwrap();
@@ -150,6 +152,7 @@ fn bench_match_word_miss(b: &mut Bencher) {
true,
vec![],
false,
+ false,
)
.unwrap();
@@ -195,6 +198,7 @@ fn bench_eval_message(b: &mut Bencher) {
true,
vec![],
false,
+ false,
)
.unwrap();
@@ -205,6 +209,7 @@ fn bench_eval_message(b: &mut Bencher) {
false,
false,
false,
+ false,
);
b.iter(|| eval.run(&rules, Some("bob"), Some("person")));
diff --git a/rust/src/push/base_rules.rs b/rust/src/push/base_rules.rs
index 74f02d6001..e0832ada1c 100644
--- a/rust/src/push/base_rules.rs
+++ b/rust/src/push/base_rules.rs
@@ -81,7 +81,7 @@ pub const BASE_APPEND_OVERRIDE_RULES: &[PushRule] = &[
))]),
actions: Cow::Borrowed(&[Action::Notify]),
default: true,
- default_enabled: false,
+ default_enabled: true,
},
PushRule {
rule_id: Cow::Borrowed("global/override/.m.rule.suppress_notices"),
diff --git a/rust/src/push/evaluator.rs b/rust/src/push/evaluator.rs
index 2f4b6d47bb..0d436a1d7b 100644
--- a/rust/src/push/evaluator.rs
+++ b/rust/src/push/evaluator.rs
@@ -105,6 +105,9 @@ pub struct PushRuleEvaluator {
/// If MSC3931 (room version feature flags) is enabled. Usually controlled by the same
/// flag as MSC1767 (extensible events core).
msc3931_enabled: bool,
+
+ // If MSC4210 (remove legacy mentions) is enabled.
+ msc4210_enabled: bool,
}
#[pymethods]
@@ -122,6 +125,7 @@ impl PushRuleEvaluator {
related_event_match_enabled,
room_version_feature_flags,
msc3931_enabled,
+ msc4210_enabled,
))]
pub fn py_new(
flattened_keys: BTreeMap<String, JsonValue>,
@@ -133,6 +137,7 @@ impl PushRuleEvaluator {
related_event_match_enabled: bool,
room_version_feature_flags: Vec<String>,
msc3931_enabled: bool,
+ msc4210_enabled: bool,
) -> Result<Self, Error> {
let body = match flattened_keys.get("content.body") {
Some(JsonValue::Value(SimpleJsonValue::Str(s))) => s.clone().into_owned(),
@@ -150,6 +155,7 @@ impl PushRuleEvaluator {
related_event_match_enabled,
room_version_feature_flags,
msc3931_enabled,
+ msc4210_enabled,
})
}
@@ -176,7 +182,8 @@ impl PushRuleEvaluator {
// For backwards-compatibility the legacy mention rules are disabled
// if the event contains the 'm.mentions' property.
- if self.has_mentions
+ // Additionally, MSC4210 always disables the legacy rules.
+ if (self.has_mentions || self.msc4210_enabled)
&& (rule_id == "global/override/.m.rule.contains_display_name"
|| rule_id == "global/content/.m.rule.contains_user_name"
|| rule_id == "global/override/.m.rule.roomnotif")
@@ -526,6 +533,7 @@ fn push_rule_evaluator() {
true,
vec![],
true,
+ false,
)
.unwrap();
@@ -555,6 +563,7 @@ fn test_requires_room_version_supports_condition() {
false,
flags,
true,
+ false,
)
.unwrap();
@@ -582,7 +591,7 @@ fn test_requires_room_version_supports_condition() {
};
let rules = PushRules::new(vec![custom_rule]);
result = evaluator.run(
- &FilteredPushRules::py_new(rules, BTreeMap::new(), true, false, true, false),
+ &FilteredPushRules::py_new(rules, BTreeMap::new(), true, false, true, false, false),
None,
None,
);
diff --git a/rust/src/push/mod.rs b/rust/src/push/mod.rs
index 2a452b69a3..ef8ed150d4 100644
--- a/rust/src/push/mod.rs
+++ b/rust/src/push/mod.rs
@@ -534,6 +534,7 @@ pub struct FilteredPushRules {
msc3381_polls_enabled: bool,
msc3664_enabled: bool,
msc4028_push_encrypted_events: bool,
+ msc4210_enabled: bool,
}
#[pymethods]
@@ -546,6 +547,7 @@ impl FilteredPushRules {
msc3381_polls_enabled: bool,
msc3664_enabled: bool,
msc4028_push_encrypted_events: bool,
+ msc4210_enabled: bool,
) -> Self {
Self {
push_rules,
@@ -554,6 +556,7 @@ impl FilteredPushRules {
msc3381_polls_enabled,
msc3664_enabled,
msc4028_push_encrypted_events,
+ msc4210_enabled,
}
}
@@ -596,6 +599,14 @@ impl FilteredPushRules {
return false;
}
+ if self.msc4210_enabled
+ && (rule.rule_id == "global/override/.m.rule.contains_display_name"
+ || rule.rule_id == "global/content/.m.rule.contains_user_name"
+ || rule.rule_id == "global/override/.m.rule.roomnotif")
+ {
+ return false;
+ }
+
true
})
.map(|r| {
diff --git a/scripts-dev/build_debian_packages.py b/scripts-dev/build_debian_packages.py
index de2a134544..88c8419400 100755
--- a/scripts-dev/build_debian_packages.py
+++ b/scripts-dev/build_debian_packages.py
@@ -32,8 +32,8 @@ DISTS = (
"debian:sid", # (EOL not specified yet) (our EOL forced by Python 3.11 is 2027-10-24)
"ubuntu:focal", # 20.04 LTS (EOL 2025-04) (our EOL forced by Python 3.8 is 2024-10-14)
"ubuntu:jammy", # 22.04 LTS (EOL 2027-04) (our EOL forced by Python 3.10 is 2026-10-04)
- "ubuntu:lunar", # 23.04 (EOL 2024-01) (our EOL forced by Python 3.11 is 2027-10-24)
- "ubuntu:mantic", # 23.10 (EOL 2024-07) (our EOL forced by Python 3.11 is 2027-10-24)
+ "ubuntu:noble", # 24.04 LTS (EOL 2029-06)
+ "ubuntu:oracular", # 24.10 (EOL 2025-07)
"debian:trixie", # (EOL not specified yet)
)
diff --git a/scripts-dev/release.py b/scripts-dev/release.py
index 4435624267..b14b61c705 100755
--- a/scripts-dev/release.py
+++ b/scripts-dev/release.py
@@ -40,7 +40,7 @@ import commonmark
import git
from click.exceptions import ClickException
from git import GitCommandError, Repo
-from github import Github
+from github import BadCredentialsException, Github
from packaging import version
@@ -323,10 +323,8 @@ def tag(gh_token: Optional[str]) -> None:
def _tag(gh_token: Optional[str]) -> None:
"""Tags the release and generates a draft GitHub release"""
- if gh_token:
- # Test that the GH Token is valid before continuing.
- gh = Github(gh_token)
- gh.get_user()
+ # Test that the GH Token is valid before continuing.
+ check_valid_gh_token(gh_token)
# Make sure we're in a git repo.
repo = get_repo_and_check_clean_checkout()
@@ -469,10 +467,8 @@ def upload(gh_token: Optional[str]) -> None:
def _upload(gh_token: Optional[str]) -> None:
"""Upload release to pypi."""
- if gh_token:
- # Test that the GH Token is valid before continuing.
- gh = Github(gh_token)
- gh.get_user()
+ # Test that the GH Token is valid before continuing.
+ check_valid_gh_token(gh_token)
current_version = get_package_version()
tag_name = f"v{current_version}"
@@ -569,10 +565,8 @@ def wait_for_actions(gh_token: Optional[str]) -> None:
def _wait_for_actions(gh_token: Optional[str]) -> None:
- if gh_token:
- # Test that the GH Token is valid before continuing.
- gh = Github(gh_token)
- gh.get_user()
+ # Test that the GH Token is valid before continuing.
+ check_valid_gh_token(gh_token)
# Find out the version and tag name.
current_version = get_package_version()
@@ -806,6 +800,22 @@ def get_repo_and_check_clean_checkout(
return repo
+def check_valid_gh_token(gh_token: Optional[str]) -> None:
+ """Check that a github token is valid, if supplied"""
+
+ if not gh_token:
+ # No github token supplied, so nothing to do.
+ return
+
+ try:
+ gh = Github(gh_token)
+
+ # We need to lookup name to trigger a request.
+ _name = gh.get_user().name
+ except BadCredentialsException as e:
+ raise click.ClickException(f"Github credentials are bad: {e}")
+
+
def find_ref(repo: git.Repo, ref_name: str) -> Optional[git.HEAD]:
"""Find the branch/ref, looking first locally then in the remote."""
if ref_name in repo.references:
diff --git a/synapse/config/experimental.py b/synapse/config/experimental.py
index 99185db93d..fd14db0d02 100644
--- a/synapse/config/experimental.py
+++ b/synapse/config/experimental.py
@@ -447,3 +447,6 @@ class ExperimentalConfig(Config):
# MSC4151: Report room API (Client-Server API)
self.msc4151_enabled: bool = experimental.get("msc4151_enabled", False)
+
+ # MSC4210: Remove legacy mentions
+ self.msc4210_enabled: bool = experimental.get("msc4210_enabled", False)
diff --git a/synapse/config/jwt.py b/synapse/config/jwt.py
index b41f2dc08f..5c76551f33 100644
--- a/synapse/config/jwt.py
+++ b/synapse/config/jwt.py
@@ -38,6 +38,7 @@ class JWTConfig(Config):
self.jwt_algorithm = jwt_config["algorithm"]
self.jwt_subject_claim = jwt_config.get("subject_claim", "sub")
+ self.jwt_display_name_claim = jwt_config.get("display_name_claim")
# The issuer and audiences are optional, if provided, it is asserted
# that the claims exist on the JWT.
@@ -49,5 +50,6 @@ class JWTConfig(Config):
self.jwt_secret = None
self.jwt_algorithm = None
self.jwt_subject_claim = None
+ self.jwt_display_name_claim = None
self.jwt_issuer = None
self.jwt_audiences = None
diff --git a/synapse/handlers/jwt.py b/synapse/handlers/jwt.py
index 5fa7a305ad..400f3a59aa 100644
--- a/synapse/handlers/jwt.py
+++ b/synapse/handlers/jwt.py
@@ -18,7 +18,7 @@
# [This file includes modifications made by New Vector Limited]
#
#
-from typing import TYPE_CHECKING
+from typing import TYPE_CHECKING, Optional, Tuple
from authlib.jose import JsonWebToken, JWTClaims
from authlib.jose.errors import BadSignatureError, InvalidClaimError, JoseError
@@ -36,11 +36,12 @@ class JwtHandler:
self.jwt_secret = hs.config.jwt.jwt_secret
self.jwt_subject_claim = hs.config.jwt.jwt_subject_claim
+ self.jwt_display_name_claim = hs.config.jwt.jwt_display_name_claim
self.jwt_algorithm = hs.config.jwt.jwt_algorithm
self.jwt_issuer = hs.config.jwt.jwt_issuer
self.jwt_audiences = hs.config.jwt.jwt_audiences
- def validate_login(self, login_submission: JsonDict) -> str:
+ def validate_login(self, login_submission: JsonDict) -> Tuple[str, Optional[str]]:
"""
Authenticates the user for the /login API
@@ -49,7 +50,8 @@ class JwtHandler:
(including 'type' and other relevant fields)
Returns:
- The user ID that is logging in.
+ A tuple of (user_id, display_name) of the user that is logging in.
+ If the JWT does not contain a display name, the second element of the tuple will be None.
Raises:
LoginError if there was an authentication problem.
@@ -109,4 +111,10 @@ class JwtHandler:
if user is None:
raise LoginError(403, "Invalid JWT", errcode=Codes.FORBIDDEN)
- return UserID(user, self.hs.hostname).to_string()
+ default_display_name = None
+ if self.jwt_display_name_claim:
+ display_name_claim = claims.get(self.jwt_display_name_claim)
+ if display_name_claim is not None:
+ default_display_name = display_name_claim
+
+ return UserID(user, self.hs.hostname).to_string(), default_display_name
diff --git a/synapse/handlers/sliding_sync/__init__.py b/synapse/handlers/sliding_sync/__init__.py
index 8c12cea8eb..a1a6728fb9 100644
--- a/synapse/handlers/sliding_sync/__init__.py
+++ b/synapse/handlers/sliding_sync/__init__.py
@@ -14,7 +14,7 @@
import logging
from itertools import chain
-from typing import TYPE_CHECKING, Dict, List, Mapping, Optional, Set, Tuple
+from typing import TYPE_CHECKING, AbstractSet, Dict, List, Mapping, Optional, Set, Tuple
from prometheus_client import Histogram
from typing_extensions import assert_never
@@ -452,13 +452,11 @@ class SlidingSyncHandler:
to_token=to_token,
)
- event_map = await self.store.get_events(list(state_ids.values()))
+ events = await self.store.get_events_as_list(list(state_ids.values()))
state_map = {}
- for key, event_id in state_ids.items():
- event = event_map.get(event_id)
- if event:
- state_map[key] = event
+ for event in events:
+ state_map[(event.type, event.state_key)] = event
return state_map
@@ -522,6 +520,8 @@ class SlidingSyncHandler:
state_reset_out_of_room = True
+ prev_room_sync_config = previous_connection_state.room_configs.get(room_id)
+
# Determine whether we should limit the timeline to the token range.
#
# We should return historical messages (before token range) in the
@@ -550,7 +550,6 @@ class SlidingSyncHandler:
# or `limited` mean for clients that interpret them correctly. In future this
# behavior is almost certainly going to change.
#
- # TODO: Also handle changes to `required_state`
from_bound = None
initial = True
ignore_timeline_bound = False
@@ -571,7 +570,6 @@ class SlidingSyncHandler:
log_kv({"sliding_sync.room_status": room_status})
- prev_room_sync_config = previous_connection_state.room_configs.get(room_id)
if prev_room_sync_config is not None:
# Check if the timeline limit has increased, if so ignore the
# timeline bound and record the change (see "XXX: Odd behavior"
@@ -582,8 +580,6 @@ class SlidingSyncHandler:
):
ignore_timeline_bound = True
- # TODO: Check for changes in `required_state``
-
log_kv(
{
"sliding_sync.from_bound": from_bound,
@@ -997,6 +993,10 @@ class SlidingSyncHandler:
include_others=required_state_filter.include_others,
)
+ # The required state map to store in the room sync config, if it has
+ # changed.
+ changed_required_state_map: Optional[Mapping[str, AbstractSet[str]]] = None
+
# We can return all of the state that was requested if this was the first
# time we've sent the room down this connection.
room_state: StateMap[EventBase] = {}
@@ -1010,6 +1010,29 @@ class SlidingSyncHandler:
else:
assert from_bound is not None
+ if prev_room_sync_config is not None:
+ # Check if there are any changes to the required state config
+ # that we need to handle.
+ changed_required_state_map, added_state_filter = (
+ _required_state_changes(
+ user.to_string(),
+ previous_room_config=prev_room_sync_config,
+ room_sync_config=room_sync_config,
+ state_deltas=room_state_delta_id_map,
+ )
+ )
+
+ if added_state_filter:
+ # Some state entries got added, so we pull out the current
+ # state for them. If we don't do this we'd only send down new deltas.
+ state_ids = await self.get_current_state_ids_at(
+ room_id=room_id,
+ room_membership_for_user_at_to_token=room_membership_for_user_at_to_token,
+ state_filter=added_state_filter,
+ to_token=to_token,
+ )
+ room_state_delta_id_map.update(state_ids)
+
events = await self.store.get_events(
state_filter.filter_state(room_state_delta_id_map).values()
)
@@ -1108,10 +1131,13 @@ class SlidingSyncHandler:
# sensible order again.
bump_stamp = 0
- unstable_expanded_timeline = False
- prev_room_sync_config = previous_connection_state.room_configs.get(room_id)
+ room_sync_required_state_map_to_persist = room_sync_config.required_state_map
+ if changed_required_state_map:
+ room_sync_required_state_map_to_persist = changed_required_state_map
+
# Record the `room_sync_config` if we're `ignore_timeline_bound` (which means
# that the `timeline_limit` has increased)
+ unstable_expanded_timeline = False
if ignore_timeline_bound:
# FIXME: We signal the fact that we're sending down more events to
# the client by setting `unstable_expanded_timeline` to true (see
@@ -1120,7 +1146,7 @@ class SlidingSyncHandler:
new_connection_state.room_configs[room_id] = RoomSyncConfig(
timeline_limit=room_sync_config.timeline_limit,
- required_state_map=room_sync_config.required_state_map,
+ required_state_map=room_sync_required_state_map_to_persist,
)
elif prev_room_sync_config is not None:
# If the result is `limited` then we need to record that the
@@ -1149,10 +1175,14 @@ class SlidingSyncHandler:
):
new_connection_state.room_configs[room_id] = RoomSyncConfig(
timeline_limit=room_sync_config.timeline_limit,
- required_state_map=room_sync_config.required_state_map,
+ required_state_map=room_sync_required_state_map_to_persist,
)
- # TODO: Record changes in required_state.
+ elif changed_required_state_map is not None:
+ new_connection_state.room_configs[room_id] = RoomSyncConfig(
+ timeline_limit=room_sync_config.timeline_limit,
+ required_state_map=room_sync_required_state_map_to_persist,
+ )
else:
new_connection_state.room_configs[room_id] = room_sync_config
@@ -1285,3 +1315,185 @@ class SlidingSyncHandler:
return new_bump_event_pos.stream
return None
+
+
+def _required_state_changes(
+ user_id: str,
+ *,
+ previous_room_config: "RoomSyncConfig",
+ room_sync_config: RoomSyncConfig,
+ state_deltas: StateMap[str],
+) -> Tuple[Optional[Mapping[str, AbstractSet[str]]], StateFilter]:
+ """Calculates the changes between the required state room config from the
+ previous requests compared with the current request.
+
+ This does two things. First, it calculates if we need to update the room
+ config due to changes to required state. Secondly, it works out which state
+ entries we need to pull from current state and return due to the state entry
+ now appearing in the required state when it previously wasn't (on top of the
+ state deltas).
+
+ This function tries to ensure to handle the case where a state entry is
+ added, removed and then added again to the required state. In that case we
+ only want to re-send that entry down sync if it has changed.
+
+ Returns:
+ A 2-tuple of updated required state config (or None if there is no update)
+ and the state filter to use to fetch extra current state that we need to
+ return.
+ """
+
+ prev_required_state_map = previous_room_config.required_state_map
+ request_required_state_map = room_sync_config.required_state_map
+
+ if prev_required_state_map == request_required_state_map:
+ # There has been no change. Return immediately.
+ return None, StateFilter.none()
+
+ prev_wildcard = prev_required_state_map.get(StateValues.WILDCARD, set())
+ request_wildcard = request_required_state_map.get(StateValues.WILDCARD, set())
+
+ # If we were previously fetching everything ("*", "*"), always update the effective
+ # room required state config to match the request. And since we we're previously
+ # already fetching everything, we don't have to fetch anything now that they've
+ # narrowed.
+ if StateValues.WILDCARD in prev_wildcard:
+ return request_required_state_map, StateFilter.none()
+
+ # If a event type wildcard has been added or removed we don't try and do
+ # anything fancy, and instead always update the effective room required
+ # state config to match the request.
+ if request_wildcard - prev_wildcard:
+ # Some keys were added, so we need to fetch everything
+ return request_required_state_map, StateFilter.all()
+ if prev_wildcard - request_wildcard:
+ # Keys were only removed, so we don't have to fetch everything.
+ return request_required_state_map, StateFilter.none()
+
+ # Contains updates to the required state map compared with the previous room
+ # config. This has the same format as `RoomSyncConfig.required_state`
+ changes: Dict[str, AbstractSet[str]] = {}
+
+ # The set of types/state keys that we need to fetch and return to the
+ # client. Passed to `StateFilter.from_types(...)`
+ added: List[Tuple[str, Optional[str]]] = []
+
+ # First we calculate what, if anything, has been *added*.
+ for event_type in (
+ prev_required_state_map.keys() | request_required_state_map.keys()
+ ):
+ old_state_keys = prev_required_state_map.get(event_type, set())
+ request_state_keys = request_required_state_map.get(event_type, set())
+
+ if old_state_keys == request_state_keys:
+ # No change to this type
+ continue
+
+ if not request_state_keys - old_state_keys:
+ # Nothing *added*, so we skip. Removals happen below.
+ continue
+
+ # Always update changes to include the newly added keys
+ changes[event_type] = request_state_keys
+
+ if StateValues.WILDCARD in old_state_keys:
+ # We were previously fetching everything for this type, so we don't need to
+ # fetch anything new.
+ continue
+
+ # Record the new state keys to fetch for this type.
+ if StateValues.WILDCARD in request_state_keys:
+ # If we have added a wildcard then we always just fetch everything.
+ added.append((event_type, None))
+ else:
+ for state_key in request_state_keys - old_state_keys:
+ if state_key == StateValues.ME:
+ added.append((event_type, user_id))
+ elif state_key == StateValues.LAZY:
+ # We handle lazy loading separately (outside this function),
+ # so don't need to explicitly add anything here.
+ #
+ # LAZY values should also be ignore for event types that are
+ # not membership.
+ pass
+ else:
+ added.append((event_type, state_key))
+
+ added_state_filter = StateFilter.from_types(added)
+
+ # Convert the list of state deltas to map from type to state_keys that have
+ # changed.
+ changed_types_to_state_keys: Dict[str, Set[str]] = {}
+ for event_type, state_key in state_deltas:
+ changed_types_to_state_keys.setdefault(event_type, set()).add(state_key)
+
+ # Figure out what changes we need to apply to the effective required state
+ # config.
+ for event_type, changed_state_keys in changed_types_to_state_keys.items():
+ old_state_keys = prev_required_state_map.get(event_type, set())
+ request_state_keys = request_required_state_map.get(event_type, set())
+
+ if old_state_keys == request_state_keys:
+ # No change.
+ continue
+
+ if request_state_keys - old_state_keys:
+ # We've expanded the set of state keys, so we just clobber the
+ # current set with the new set.
+ #
+ # We could also ensure that we keep entries where the state hasn't
+ # changed, but are no longer in the requested required state, but
+ # that's a sufficient edge case that we can ignore (as its only a
+ # performance optimization).
+ changes[event_type] = request_state_keys
+ continue
+
+ old_state_key_wildcard = StateValues.WILDCARD in old_state_keys
+ request_state_key_wildcard = StateValues.WILDCARD in request_state_keys
+
+ if old_state_key_wildcard != request_state_key_wildcard:
+ # If a state_key wildcard has been added or removed, we always update the
+ # effective room required state config to match the request.
+ changes[event_type] = request_state_keys
+ continue
+
+ if event_type == EventTypes.Member:
+ old_state_key_lazy = StateValues.LAZY in old_state_keys
+ request_state_key_lazy = StateValues.LAZY in request_state_keys
+
+ if old_state_key_lazy != request_state_key_lazy:
+ # If a "$LAZY" has been added or removed we always update the effective room
+ # required state config to match the request.
+ changes[event_type] = request_state_keys
+ continue
+
+ # Handle "$ME" values by adding "$ME" if the state key matches the user
+ # ID.
+ if user_id in changed_state_keys:
+ changed_state_keys.add(StateValues.ME)
+
+ # At this point there are no wildcards and no additions to the set of
+ # state keys requested, only deletions.
+ #
+ # We only remove state keys from the effective state if they've been
+ # removed from the request *and* the state has changed. This ensures
+ # that if a client removes and then re-adds a state key, we only send
+ # down the associated current state event if its changed (rather than
+ # sending down the same event twice).
+ invalidated = (old_state_keys - request_state_keys) & changed_state_keys
+ if invalidated:
+ changes[event_type] = old_state_keys - invalidated
+
+ if changes:
+ # Update the required state config based on the changes.
+ new_required_state_map = dict(prev_required_state_map)
+ for event_type, state_keys in changes.items():
+ if state_keys:
+ new_required_state_map[event_type] = state_keys
+ else:
+ # Remove entries with empty state keys.
+ new_required_state_map.pop(event_type, None)
+
+ return new_required_state_map, added_state_filter
+ else:
+ return None, added_state_filter
diff --git a/synapse/media/thumbnailer.py b/synapse/media/thumbnailer.py
index ee1118a53a..3845067835 100644
--- a/synapse/media/thumbnailer.py
+++ b/synapse/media/thumbnailer.py
@@ -206,7 +206,7 @@ class Thumbnailer:
def _encode_image(self, output_image: Image.Image, output_type: str) -> BytesIO:
output_bytes_io = BytesIO()
fmt = self.FORMATS[output_type]
- if fmt == "JPEG":
+ if fmt == "JPEG" or fmt == "PNG" and output_image.mode == "CMYK":
output_image = output_image.convert("RGB")
output_image.save(output_bytes_io, fmt, quality=80)
return output_bytes_io
diff --git a/synapse/push/bulk_push_rule_evaluator.py b/synapse/push/bulk_push_rule_evaluator.py
index 679cbe9afa..9c0592a902 100644
--- a/synapse/push/bulk_push_rule_evaluator.py
+++ b/synapse/push/bulk_push_rule_evaluator.py
@@ -436,6 +436,7 @@ class BulkPushRuleEvaluator:
self._related_event_match_enabled,
event.room_version.msc3931_push_features,
self.hs.config.experimental.msc1767_enabled, # MSC3931 flag
+ self.hs.config.experimental.msc4210_enabled,
)
for uid, rules in rules_by_user.items():
diff --git a/synapse/rest/client/login.py b/synapse/rest/client/login.py
index 03b1e7edc4..3271b02d40 100644
--- a/synapse/rest/client/login.py
+++ b/synapse/rest/client/login.py
@@ -363,6 +363,7 @@ class LoginRestServlet(RestServlet):
login_submission: JsonDict,
callback: Optional[Callable[[LoginResponse], Awaitable[None]]] = None,
create_non_existent_users: bool = False,
+ default_display_name: Optional[str] = None,
ratelimit: bool = True,
auth_provider_id: Optional[str] = None,
should_issue_refresh_token: bool = False,
@@ -410,7 +411,8 @@ class LoginRestServlet(RestServlet):
canonical_uid = await self.auth_handler.check_user_exists(user_id)
if not canonical_uid:
canonical_uid = await self.registration_handler.register_user(
- localpart=UserID.from_string(user_id).localpart
+ localpart=UserID.from_string(user_id).localpart,
+ default_display_name=default_display_name,
)
user_id = canonical_uid
@@ -546,11 +548,14 @@ class LoginRestServlet(RestServlet):
Returns:
The body of the JSON response.
"""
- user_id = self.hs.get_jwt_handler().validate_login(login_submission)
+ user_id, default_display_name = self.hs.get_jwt_handler().validate_login(
+ login_submission
+ )
return await self._complete_login(
user_id,
login_submission,
create_non_existent_users=True,
+ default_display_name=default_display_name,
should_issue_refresh_token=should_issue_refresh_token,
request_info=request_info,
)
diff --git a/synapse/storage/databases/main/events_worker.py b/synapse/storage/databases/main/events_worker.py
index c029228422..403407068c 100644
--- a/synapse/storage/databases/main/events_worker.py
+++ b/synapse/storage/databases/main/events_worker.py
@@ -61,7 +61,13 @@ from synapse.logging.context import (
current_context,
make_deferred_yieldable,
)
-from synapse.logging.opentracing import start_active_span, tag_args, trace
+from synapse.logging.opentracing import (
+ SynapseTags,
+ set_tag,
+ start_active_span,
+ tag_args,
+ trace,
+)
from synapse.metrics.background_process_metrics import (
run_as_background_process,
wrap_as_background_process,
@@ -525,6 +531,7 @@ class EventsWorkerStore(SQLBaseStore):
return event
+ @trace
async def get_events(
self,
event_ids: Collection[str],
@@ -556,6 +563,11 @@ class EventsWorkerStore(SQLBaseStore):
Returns:
A mapping from event_id to event.
"""
+ set_tag(
+ SynapseTags.FUNC_ARG_PREFIX + "event_ids.length",
+ str(len(event_ids)),
+ )
+
events = await self.get_events_as_list(
event_ids,
redact_behaviour=redact_behaviour,
@@ -603,6 +615,10 @@ class EventsWorkerStore(SQLBaseStore):
Note that the returned list may be smaller than the list of event
IDs if not all events could be fetched.
"""
+ set_tag(
+ SynapseTags.FUNC_ARG_PREFIX + "event_ids.length",
+ str(len(event_ids)),
+ )
if not event_ids:
return []
@@ -723,10 +739,11 @@ class EventsWorkerStore(SQLBaseStore):
return events
+ @trace
@cancellable
async def get_unredacted_events_from_cache_or_db(
self,
- event_ids: Iterable[str],
+ event_ids: Collection[str],
allow_rejected: bool = False,
) -> Dict[str, EventCacheEntry]:
"""Fetch a bunch of events from the cache or the database.
@@ -748,6 +765,11 @@ class EventsWorkerStore(SQLBaseStore):
Returns:
map from event id to result
"""
+ set_tag(
+ SynapseTags.FUNC_ARG_PREFIX + "event_ids.length",
+ str(len(event_ids)),
+ )
+
# Shortcut: check if we have any events in the *in memory* cache - this function
# may be called repeatedly for the same event so at this point we cannot reach
# out to any external cache for performance reasons. The external cache is
@@ -936,7 +958,7 @@ class EventsWorkerStore(SQLBaseStore):
events, update_metrics=update_metrics
)
- missing_event_ids = (e for e in events if e not in event_map)
+ missing_event_ids = [e for e in events if e not in event_map]
event_map.update(
await self._get_events_from_external_cache(
events=missing_event_ids,
@@ -946,8 +968,9 @@ class EventsWorkerStore(SQLBaseStore):
return event_map
+ @trace
async def _get_events_from_external_cache(
- self, events: Iterable[str], update_metrics: bool = True
+ self, events: Collection[str], update_metrics: bool = True
) -> Dict[str, EventCacheEntry]:
"""Fetch events from any configured external cache.
@@ -957,6 +980,10 @@ class EventsWorkerStore(SQLBaseStore):
events: list of event_ids to fetch
update_metrics: Whether to update the cache hit ratio metrics
"""
+ set_tag(
+ SynapseTags.FUNC_ARG_PREFIX + "events.length",
+ str(len(events)),
+ )
event_map = {}
for event_id in events:
@@ -1222,6 +1249,7 @@ class EventsWorkerStore(SQLBaseStore):
with PreserveLoggingContext():
self.hs.get_reactor().callFromThread(fire_errback, e)
+ @trace
async def _get_events_from_db(
self, event_ids: Collection[str]
) -> Dict[str, EventCacheEntry]:
@@ -1240,6 +1268,11 @@ class EventsWorkerStore(SQLBaseStore):
map from event id to result. May return extra events which
weren't asked for.
"""
+ set_tag(
+ SynapseTags.FUNC_ARG_PREFIX + "event_ids.length",
+ str(len(event_ids)),
+ )
+
fetched_event_ids: Set[str] = set()
fetched_events: Dict[str, _EventRow] = {}
diff --git a/synapse/storage/databases/main/push_rule.py b/synapse/storage/databases/main/push_rule.py
index bbdde17711..86c87f78bf 100644
--- a/synapse/storage/databases/main/push_rule.py
+++ b/synapse/storage/databases/main/push_rule.py
@@ -109,6 +109,7 @@ def _load_rules(
msc3664_enabled=experimental_config.msc3664_enabled,
msc3381_polls_enabled=experimental_config.msc3381_polls_enabled,
msc4028_push_encrypted_events=experimental_config.msc4028_push_encrypted_events,
+ msc4210_enabled=experimental_config.msc4210_enabled,
)
return filtered_rules
diff --git a/synapse/storage/databases/main/sliding_sync.py b/synapse/storage/databases/main/sliding_sync.py
index f2df37fec1..7b357c1ffe 100644
--- a/synapse/storage/databases/main/sliding_sync.py
+++ b/synapse/storage/databases/main/sliding_sync.py
@@ -386,8 +386,8 @@ class SlidingSyncStore(SQLBaseStore):
required_state_map: Dict[int, Dict[str, Set[str]]] = {}
for row in rows:
state = required_state_map[row[0]] = {}
- for event_type, state_keys in db_to_json(row[1]):
- state[event_type] = set(state_keys)
+ for event_type, state_key in db_to_json(row[1]):
+ state.setdefault(event_type, set()).add(state_key)
# Get all the room configs, looking up the required state from the map
# above.
diff --git a/synapse/synapse_rust/push.pyi b/synapse/synapse_rust/push.pyi
index 27a974e1bb..3f317c3288 100644
--- a/synapse/synapse_rust/push.pyi
+++ b/synapse/synapse_rust/push.pyi
@@ -48,6 +48,7 @@ class FilteredPushRules:
msc3381_polls_enabled: bool,
msc3664_enabled: bool,
msc4028_push_encrypted_events: bool,
+ msc4210_enabled: bool,
): ...
def rules(self) -> Collection[Tuple[PushRule, bool]]: ...
@@ -65,6 +66,7 @@ class PushRuleEvaluator:
related_event_match_enabled: bool,
room_version_feature_flags: Tuple[str, ...],
msc3931_enabled: bool,
+ msc4210_enabled: bool,
): ...
def run(
self,
diff --git a/synapse/types/state.py b/synapse/types/state.py
index 1141c4b5c1..67d1c3fe97 100644
--- a/synapse/types/state.py
+++ b/synapse/types/state.py
@@ -616,6 +616,13 @@ class StateFilter:
return False
+ def __bool__(self) -> bool:
+ """Returns true if this state filter will match any state, or false if
+ this is the empty filter"""
+ if self.include_others:
+ return True
+ return bool(self.types)
+
_ALL_STATE_FILTER = StateFilter(types=immutabledict(), include_others=True)
_ALL_NON_MEMBER_STATE_FILTER = StateFilter(
diff --git a/tests/handlers/test_sliding_sync.py b/tests/handlers/test_sliding_sync.py
index e2c7a94ce2..9a68d1dd95 100644
--- a/tests/handlers/test_sliding_sync.py
+++ b/tests/handlers/test_sliding_sync.py
@@ -18,9 +18,10 @@
#
#
import logging
-from typing import AbstractSet, Dict, Optional, Tuple
+from typing import AbstractSet, Dict, Mapping, Optional, Set, Tuple
from unittest.mock import patch
+import attr
from parameterized import parameterized
from twisted.test.proto_helpers import MemoryReactor
@@ -35,15 +36,18 @@ from synapse.handlers.sliding_sync import (
RoomsForUserType,
RoomSyncConfig,
StateValues,
+ _required_state_changes,
)
from synapse.rest import admin
from synapse.rest.client import knock, login, room
from synapse.server import HomeServer
from synapse.storage.util.id_generators import MultiWriterIdGenerator
-from synapse.types import JsonDict, StreamToken, UserID
+from synapse.types import JsonDict, StateMap, StreamToken, UserID
from synapse.types.handlers.sliding_sync import SlidingSyncConfig
+from synapse.types.state import StateFilter
from synapse.util import Clock
+from tests import unittest
from tests.replication._base import BaseMultiWorkerStreamTestCase
from tests.unittest import HomeserverTestCase, TestCase
@@ -3213,3 +3217,689 @@ class SortRoomsTestCase(HomeserverTestCase):
# We only care about the *latest* event in the room.
[room_id1, room_id2],
)
+
+
+@attr.s(slots=True, auto_attribs=True, frozen=True)
+class RequiredStateChangesTestParameters:
+ previous_required_state_map: Dict[str, Set[str]]
+ request_required_state_map: Dict[str, Set[str]]
+ state_deltas: StateMap[str]
+ expected_with_state_deltas: Tuple[
+ Optional[Mapping[str, AbstractSet[str]]], StateFilter
+ ]
+ expected_without_state_deltas: Tuple[
+ Optional[Mapping[str, AbstractSet[str]]], StateFilter
+ ]
+
+
+class RequiredStateChangesTestCase(unittest.TestCase):
+ """Test cases for `_required_state_changes`"""
+
+ @parameterized.expand(
+ [
+ (
+ "simple_no_change",
+ """Test no change to required state""",
+ RequiredStateChangesTestParameters(
+ previous_required_state_map={"type1": {"state_key"}},
+ request_required_state_map={"type1": {"state_key"}},
+ state_deltas={("type1", "state_key"): "$event_id"},
+ # No changes
+ expected_with_state_deltas=(None, StateFilter.none()),
+ expected_without_state_deltas=(None, StateFilter.none()),
+ ),
+ ),
+ (
+ "simple_add_type",
+ """Test adding a type to the config""",
+ RequiredStateChangesTestParameters(
+ previous_required_state_map={"type1": {"state_key"}},
+ request_required_state_map={
+ "type1": {"state_key"},
+ "type2": {"state_key"},
+ },
+ state_deltas={("type2", "state_key"): "$event_id"},
+ expected_with_state_deltas=(
+ # We've added a type so we should persist the changed required state
+ # config.
+ {"type1": {"state_key"}, "type2": {"state_key"}},
+ # We should see the new type added
+ StateFilter.from_types([("type2", "state_key")]),
+ ),
+ expected_without_state_deltas=(
+ {"type1": {"state_key"}, "type2": {"state_key"}},
+ StateFilter.from_types([("type2", "state_key")]),
+ ),
+ ),
+ ),
+ (
+ "simple_add_type_from_nothing",
+ """Test adding a type to the config when previously requesting nothing""",
+ RequiredStateChangesTestParameters(
+ previous_required_state_map={},
+ request_required_state_map={
+ "type1": {"state_key"},
+ "type2": {"state_key"},
+ },
+ state_deltas={("type2", "state_key"): "$event_id"},
+ expected_with_state_deltas=(
+ # We've added a type so we should persist the changed required state
+ # config.
+ {"type1": {"state_key"}, "type2": {"state_key"}},
+ # We should see the new types added
+ StateFilter.from_types(
+ [("type1", "state_key"), ("type2", "state_key")]
+ ),
+ ),
+ expected_without_state_deltas=(
+ {"type1": {"state_key"}, "type2": {"state_key"}},
+ StateFilter.from_types(
+ [("type1", "state_key"), ("type2", "state_key")]
+ ),
+ ),
+ ),
+ ),
+ (
+ "simple_add_state_key",
+ """Test adding a state key to the config""",
+ RequiredStateChangesTestParameters(
+ previous_required_state_map={"type": {"state_key1"}},
+ request_required_state_map={"type": {"state_key1", "state_key2"}},
+ state_deltas={("type", "state_key2"): "$event_id"},
+ expected_with_state_deltas=(
+ # We've added a key so we should persist the changed required state
+ # config.
+ {"type": {"state_key1", "state_key2"}},
+ # We should see the new state_keys added
+ StateFilter.from_types([("type", "state_key2")]),
+ ),
+ expected_without_state_deltas=(
+ {"type": {"state_key1", "state_key2"}},
+ StateFilter.from_types([("type", "state_key2")]),
+ ),
+ ),
+ ),
+ (
+ "simple_remove_type",
+ """
+ Test removing a type from the config when there are a matching state
+ delta does cause the persisted required state config to change
+
+ Test removing a type from the config when there are no matching state
+ deltas does *not* cause the persisted required state config to change
+ """,
+ RequiredStateChangesTestParameters(
+ previous_required_state_map={
+ "type1": {"state_key"},
+ "type2": {"state_key"},
+ },
+ request_required_state_map={"type1": {"state_key"}},
+ state_deltas={("type2", "state_key"): "$event_id"},
+ expected_with_state_deltas=(
+ # Remove `type2` since there's been a change to that state,
+ # (persist the change to required state). That way next time,
+ # they request `type2`, we see that we haven't sent it before
+ # and send the new state. (we should still keep track that we've
+ # sent `type1` before).
+ {"type1": {"state_key"}},
+ # We don't need to request anything more if they are requesting
+ # less state now
+ StateFilter.none(),
+ ),
+ expected_without_state_deltas=(
+ # `type2` is no longer requested but since that state hasn't
+ # changed, nothing should change (we should still keep track
+ # that we've sent `type2` before).
+ None,
+ # We don't need to request anything more if they are requesting
+ # less state now
+ StateFilter.none(),
+ ),
+ ),
+ ),
+ (
+ "simple_remove_type_to_nothing",
+ """
+ Test removing a type from the config and no longer requesting any state
+ """,
+ RequiredStateChangesTestParameters(
+ previous_required_state_map={
+ "type1": {"state_key"},
+ "type2": {"state_key"},
+ },
+ request_required_state_map={},
+ state_deltas={("type2", "state_key"): "$event_id"},
+ expected_with_state_deltas=(
+ # Remove `type2` since there's been a change to that state,
+ # (persist the change to required state). That way next time,
+ # they request `type2`, we see that we haven't sent it before
+ # and send the new state. (we should still keep track that we've
+ # sent `type1` before).
+ {"type1": {"state_key"}},
+ # We don't need to request anything more if they are requesting
+ # less state now
+ StateFilter.none(),
+ ),
+ expected_without_state_deltas=(
+ # `type2` is no longer requested but since that state hasn't
+ # changed, nothing should change (we should still keep track
+ # that we've sent `type2` before).
+ None,
+ # We don't need to request anything more if they are requesting
+ # less state now
+ StateFilter.none(),
+ ),
+ ),
+ ),
+ (
+ "simple_remove_state_key",
+ """
+ Test removing a state_key from the config
+ """,
+ RequiredStateChangesTestParameters(
+ previous_required_state_map={"type": {"state_key1", "state_key2"}},
+ request_required_state_map={"type": {"state_key1"}},
+ state_deltas={("type", "state_key2"): "$event_id"},
+ expected_with_state_deltas=(
+ # Remove `(type, state_key2)` since there's been a change
+ # to that state (persist the change to required state).
+ # That way next time, they request `(type, state_key2)`, we see
+ # that we haven't sent it before and send the new state. (we
+ # should still keep track that we've sent `(type, state_key1)`
+ # before).
+ {"type": {"state_key1"}},
+ # We don't need to request anything more if they are requesting
+ # less state now
+ StateFilter.none(),
+ ),
+ expected_without_state_deltas=(
+ # `(type, state_key2)` is no longer requested but since that
+ # state hasn't changed, nothing should change (we should still
+ # keep track that we've sent `(type, state_key1)` and `(type,
+ # state_key2)` before).
+ None,
+ # We don't need to request anything more if they are requesting
+ # less state now
+ StateFilter.none(),
+ ),
+ ),
+ ),
+ (
+ "type_wildcards_add",
+ """
+ Test adding a wildcard type causes the persisted required state config
+ to change and we request everything.
+
+ If a event type wildcard has been added or removed we don't try and do
+ anything fancy, and instead always update the effective room required
+ state config to match the request.
+ """,
+ RequiredStateChangesTestParameters(
+ previous_required_state_map={"type1": {"state_key2"}},
+ request_required_state_map={
+ "type1": {"state_key2"},
+ StateValues.WILDCARD: {"state_key"},
+ },
+ state_deltas={
+ ("other_type", "state_key"): "$event_id",
+ },
+ # We've added a wildcard, so we persist the change and request everything
+ expected_with_state_deltas=(
+ {"type1": {"state_key2"}, StateValues.WILDCARD: {"state_key"}},
+ StateFilter.all(),
+ ),
+ expected_without_state_deltas=(
+ {"type1": {"state_key2"}, StateValues.WILDCARD: {"state_key"}},
+ StateFilter.all(),
+ ),
+ ),
+ ),
+ (
+ "type_wildcards_remove",
+ """
+ Test removing a wildcard type causes the persisted required state config
+ to change and request nothing.
+
+ If a event type wildcard has been added or removed we don't try and do
+ anything fancy, and instead always update the effective room required
+ state config to match the request.
+ """,
+ RequiredStateChangesTestParameters(
+ previous_required_state_map={
+ "type1": {"state_key2"},
+ StateValues.WILDCARD: {"state_key"},
+ },
+ request_required_state_map={"type1": {"state_key2"}},
+ state_deltas={
+ ("other_type", "state_key"): "$event_id",
+ },
+ # We've removed a type wildcard, so we persist the change but don't request anything
+ expected_with_state_deltas=(
+ {"type1": {"state_key2"}},
+ # We don't need to request anything more if they are requesting
+ # less state now
+ StateFilter.none(),
+ ),
+ expected_without_state_deltas=(
+ {"type1": {"state_key2"}},
+ # We don't need to request anything more if they are requesting
+ # less state now
+ StateFilter.none(),
+ ),
+ ),
+ ),
+ (
+ "state_key_wildcards_add",
+ """Test adding a wildcard state_key""",
+ RequiredStateChangesTestParameters(
+ previous_required_state_map={"type1": {"state_key"}},
+ request_required_state_map={
+ "type1": {"state_key"},
+ "type2": {StateValues.WILDCARD},
+ },
+ state_deltas={("type2", "state_key"): "$event_id"},
+ # We've added a wildcard state_key, so we persist the change and
+ # request all of the state for that type
+ expected_with_state_deltas=(
+ {"type1": {"state_key"}, "type2": {StateValues.WILDCARD}},
+ StateFilter.from_types([("type2", None)]),
+ ),
+ expected_without_state_deltas=(
+ {"type1": {"state_key"}, "type2": {StateValues.WILDCARD}},
+ StateFilter.from_types([("type2", None)]),
+ ),
+ ),
+ ),
+ (
+ "state_key_wildcards_remove",
+ """Test removing a wildcard state_key""",
+ RequiredStateChangesTestParameters(
+ previous_required_state_map={
+ "type1": {"state_key"},
+ "type2": {StateValues.WILDCARD},
+ },
+ request_required_state_map={"type1": {"state_key"}},
+ state_deltas={("type2", "state_key"): "$event_id"},
+ # We've removed a state_key wildcard, so we persist the change and
+ # request nothing
+ expected_with_state_deltas=(
+ {"type1": {"state_key"}},
+ # We don't need to request anything more if they are requesting
+ # less state now
+ StateFilter.none(),
+ ),
+ # We've removed a state_key wildcard but there have been no matching
+ # state changes, so no changes needed, just persist the
+ # `request_required_state_map` as-is.
+ expected_without_state_deltas=(
+ None,
+ # We don't need to request anything more if they are requesting
+ # less state now
+ StateFilter.none(),
+ ),
+ ),
+ ),
+ (
+ "state_key_remove_some",
+ """
+ Test that removing state keys work when only some of the state keys have
+ changed
+ """,
+ RequiredStateChangesTestParameters(
+ previous_required_state_map={
+ "type1": {"state_key1", "state_key2", "state_key3"}
+ },
+ request_required_state_map={"type1": {"state_key1"}},
+ state_deltas={("type1", "state_key3"): "$event_id"},
+ expected_with_state_deltas=(
+ # We've removed some state keys from the type, but only state_key3 was
+ # changed so only that one should be removed.
+ {"type1": {"state_key1", "state_key2"}},
+ # We don't need to request anything more if they are requesting
+ # less state now
+ StateFilter.none(),
+ ),
+ expected_without_state_deltas=(
+ # No changes needed, just persist the
+ # `request_required_state_map` as-is
+ None,
+ # We don't need to request anything more if they are requesting
+ # less state now
+ StateFilter.none(),
+ ),
+ ),
+ ),
+ (
+ "state_key_me_add",
+ """
+ Test adding state keys work when using "$ME"
+ """,
+ RequiredStateChangesTestParameters(
+ previous_required_state_map={},
+ request_required_state_map={"type1": {StateValues.ME}},
+ state_deltas={("type1", "@user:test"): "$event_id"},
+ expected_with_state_deltas=(
+ # We've added a type so we should persist the changed required state
+ # config.
+ {"type1": {StateValues.ME}},
+ # We should see the new state_keys added
+ StateFilter.from_types([("type1", "@user:test")]),
+ ),
+ expected_without_state_deltas=(
+ {"type1": {StateValues.ME}},
+ StateFilter.from_types([("type1", "@user:test")]),
+ ),
+ ),
+ ),
+ (
+ "state_key_me_remove",
+ """
+ Test removing state keys work when using "$ME"
+ """,
+ RequiredStateChangesTestParameters(
+ previous_required_state_map={"type1": {StateValues.ME}},
+ request_required_state_map={},
+ state_deltas={("type1", "@user:test"): "$event_id"},
+ expected_with_state_deltas=(
+ # Remove `type1` since there's been a change to that state,
+ # (persist the change to required state). That way next time,
+ # they request `type1`, we see that we haven't sent it before
+ # and send the new state. (if we were tracking that we sent any
+ # other state, we should still keep track that).
+ {},
+ # We don't need to request anything more if they are requesting
+ # less state now
+ StateFilter.none(),
+ ),
+ expected_without_state_deltas=(
+ # `type1` is no longer requested but since that state hasn't
+ # changed, nothing should change (we should still keep track
+ # that we've sent `type1` before).
+ None,
+ # We don't need to request anything more if they are requesting
+ # less state now
+ StateFilter.none(),
+ ),
+ ),
+ ),
+ (
+ "state_key_user_id_add",
+ """
+ Test adding state keys work when using your own user ID
+ """,
+ RequiredStateChangesTestParameters(
+ previous_required_state_map={},
+ request_required_state_map={"type1": {"@user:test"}},
+ state_deltas={("type1", "@user:test"): "$event_id"},
+ expected_with_state_deltas=(
+ # We've added a type so we should persist the changed required state
+ # config.
+ {"type1": {"@user:test"}},
+ # We should see the new state_keys added
+ StateFilter.from_types([("type1", "@user:test")]),
+ ),
+ expected_without_state_deltas=(
+ {"type1": {"@user:test"}},
+ StateFilter.from_types([("type1", "@user:test")]),
+ ),
+ ),
+ ),
+ (
+ "state_key_me_remove",
+ """
+ Test removing state keys work when using your own user ID
+ """,
+ RequiredStateChangesTestParameters(
+ previous_required_state_map={"type1": {"@user:test"}},
+ request_required_state_map={},
+ state_deltas={("type1", "@user:test"): "$event_id"},
+ expected_with_state_deltas=(
+ # Remove `type1` since there's been a change to that state,
+ # (persist the change to required state). That way next time,
+ # they request `type1`, we see that we haven't sent it before
+ # and send the new state. (if we were tracking that we sent any
+ # other state, we should still keep track that).
+ {},
+ # We don't need to request anything more if they are requesting
+ # less state now
+ StateFilter.none(),
+ ),
+ expected_without_state_deltas=(
+ # `type1` is no longer requested but since that state hasn't
+ # changed, nothing should change (we should still keep track
+ # that we've sent `type1` before).
+ None,
+ # We don't need to request anything more if they are requesting
+ # less state now
+ StateFilter.none(),
+ ),
+ ),
+ ),
+ (
+ "state_key_lazy_add",
+ """
+ Test adding state keys work when using "$LAZY"
+ """,
+ RequiredStateChangesTestParameters(
+ previous_required_state_map={},
+ request_required_state_map={EventTypes.Member: {StateValues.LAZY}},
+ state_deltas={(EventTypes.Member, "@user:test"): "$event_id"},
+ expected_with_state_deltas=(
+ # If a "$LAZY" has been added or removed we always update the
+ # required state to what was requested for simplicity.
+ {EventTypes.Member: {StateValues.LAZY}},
+ StateFilter.none(),
+ ),
+ expected_without_state_deltas=(
+ {EventTypes.Member: {StateValues.LAZY}},
+ StateFilter.none(),
+ ),
+ ),
+ ),
+ (
+ "state_key_lazy_remove",
+ """
+ Test removing state keys work when using "$LAZY"
+ """,
+ RequiredStateChangesTestParameters(
+ previous_required_state_map={EventTypes.Member: {StateValues.LAZY}},
+ request_required_state_map={},
+ state_deltas={(EventTypes.Member, "@user:test"): "$event_id"},
+ expected_with_state_deltas=(
+ # If a "$LAZY" has been added or removed we always update the
+ # required state to what was requested for simplicity.
+ {},
+ # We don't need to request anything more if they are requesting
+ # less state now
+ StateFilter.none(),
+ ),
+ expected_without_state_deltas=(
+ # `EventTypes.Member` is no longer requested but since that
+ # state hasn't changed, nothing should change (we should still
+ # keep track that we've sent `EventTypes.Member` before).
+ None,
+ # We don't need to request anything more if they are requesting
+ # less state now
+ StateFilter.none(),
+ ),
+ ),
+ ),
+ (
+ "type_wildcard_with_state_key_wildcard_to_explicit_state_keys",
+ """
+ Test switching from a wildcard ("*", "*") to explicit state keys
+ """,
+ RequiredStateChangesTestParameters(
+ previous_required_state_map={
+ StateValues.WILDCARD: {StateValues.WILDCARD}
+ },
+ request_required_state_map={
+ StateValues.WILDCARD: {"state_key1", "state_key2", "state_key3"}
+ },
+ state_deltas={("type1", "state_key1"): "$event_id"},
+ # If we were previously fetching everything ("*", "*"), always update the effective
+ # room required state config to match the request. And since we we're previously
+ # already fetching everything, we don't have to fetch anything now that they've
+ # narrowed.
+ expected_with_state_deltas=(
+ {
+ StateValues.WILDCARD: {
+ "state_key1",
+ "state_key2",
+ "state_key3",
+ }
+ },
+ StateFilter.none(),
+ ),
+ expected_without_state_deltas=(
+ {
+ StateValues.WILDCARD: {
+ "state_key1",
+ "state_key2",
+ "state_key3",
+ }
+ },
+ StateFilter.none(),
+ ),
+ ),
+ ),
+ (
+ "type_wildcard_with_explicit_state_keys_to_wildcard_state_key",
+ """
+ Test switching from explicit to wildcard state keys ("*", "*")
+ """,
+ RequiredStateChangesTestParameters(
+ previous_required_state_map={
+ StateValues.WILDCARD: {"state_key1", "state_key2", "state_key3"}
+ },
+ request_required_state_map={
+ StateValues.WILDCARD: {StateValues.WILDCARD}
+ },
+ state_deltas={("type1", "state_key1"): "$event_id"},
+ # We've added a wildcard, so we persist the change and request everything
+ expected_with_state_deltas=(
+ {StateValues.WILDCARD: {StateValues.WILDCARD}},
+ StateFilter.all(),
+ ),
+ expected_without_state_deltas=(
+ {StateValues.WILDCARD: {StateValues.WILDCARD}},
+ StateFilter.all(),
+ ),
+ ),
+ ),
+ (
+ "state_key_wildcard_to_explicit_state_keys",
+ """Test switching from a wildcard to explicit state keys with a concrete type""",
+ RequiredStateChangesTestParameters(
+ previous_required_state_map={"type1": {StateValues.WILDCARD}},
+ request_required_state_map={
+ "type1": {"state_key1", "state_key2", "state_key3"}
+ },
+ state_deltas={("type1", "state_key1"): "$event_id"},
+ # If a state_key wildcard has been added or removed, we always
+ # update the effective room required state config to match the
+ # request. And since we we're previously already fetching
+ # everything, we don't have to fetch anything now that they've
+ # narrowed.
+ expected_with_state_deltas=(
+ {
+ "type1": {
+ "state_key1",
+ "state_key2",
+ "state_key3",
+ }
+ },
+ StateFilter.none(),
+ ),
+ expected_without_state_deltas=(
+ {
+ "type1": {
+ "state_key1",
+ "state_key2",
+ "state_key3",
+ }
+ },
+ StateFilter.none(),
+ ),
+ ),
+ ),
+ (
+ "state_key_wildcard_to_explicit_state_keys",
+ """Test switching from a wildcard to explicit state keys with a concrete type""",
+ RequiredStateChangesTestParameters(
+ previous_required_state_map={
+ "type1": {"state_key1", "state_key2", "state_key3"}
+ },
+ request_required_state_map={"type1": {StateValues.WILDCARD}},
+ state_deltas={("type1", "state_key1"): "$event_id"},
+ # If a state_key wildcard has been added or removed, we always
+ # update the effective room required state config to match the
+ # request. And we need to request all of the state for that type
+ # because we previously, only sent down a few keys.
+ expected_with_state_deltas=(
+ {"type1": {StateValues.WILDCARD}},
+ StateFilter.from_types([("type1", None)]),
+ ),
+ expected_without_state_deltas=(
+ {"type1": {StateValues.WILDCARD}},
+ StateFilter.from_types([("type1", None)]),
+ ),
+ ),
+ ),
+ ]
+ )
+ def test_xxx(
+ self,
+ _test_label: str,
+ _test_description: str,
+ test_parameters: RequiredStateChangesTestParameters,
+ ) -> None:
+ # Without `state_deltas`
+ changed_required_state_map, added_state_filter = _required_state_changes(
+ user_id="@user:test",
+ previous_room_config=RoomSyncConfig(
+ timeline_limit=0,
+ required_state_map=test_parameters.previous_required_state_map,
+ ),
+ room_sync_config=RoomSyncConfig(
+ timeline_limit=0,
+ required_state_map=test_parameters.request_required_state_map,
+ ),
+ state_deltas={},
+ )
+
+ self.assertEqual(
+ changed_required_state_map,
+ test_parameters.expected_without_state_deltas[0],
+ "changed_required_state_map does not match (without state_deltas)",
+ )
+ self.assertEqual(
+ added_state_filter,
+ test_parameters.expected_without_state_deltas[1],
+ "added_state_filter does not match (without state_deltas)",
+ )
+
+ # With `state_deltas`
+ changed_required_state_map, added_state_filter = _required_state_changes(
+ user_id="@user:test",
+ previous_room_config=RoomSyncConfig(
+ timeline_limit=0,
+ required_state_map=test_parameters.previous_required_state_map,
+ ),
+ room_sync_config=RoomSyncConfig(
+ timeline_limit=0,
+ required_state_map=test_parameters.request_required_state_map,
+ ),
+ state_deltas=test_parameters.state_deltas,
+ )
+
+ self.assertEqual(
+ changed_required_state_map,
+ test_parameters.expected_with_state_deltas[0],
+ "changed_required_state_map does not match (with state_deltas)",
+ )
+ self.assertEqual(
+ added_state_filter,
+ test_parameters.expected_with_state_deltas[1],
+ "added_state_filter does not match (with state_deltas)",
+ )
diff --git a/tests/push/test_push_rule_evaluator.py b/tests/push/test_push_rule_evaluator.py
index 420fbea998..c1f8e18bd9 100644
--- a/tests/push/test_push_rule_evaluator.py
+++ b/tests/push/test_push_rule_evaluator.py
@@ -149,6 +149,7 @@ class PushRuleEvaluatorTestCase(unittest.TestCase):
content: JsonMapping,
*,
related_events: Optional[JsonDict] = None,
+ msc4210: bool = False,
) -> PushRuleEvaluator:
event = FrozenEvent(
{
@@ -174,6 +175,7 @@ class PushRuleEvaluatorTestCase(unittest.TestCase):
related_event_match_enabled=True,
room_version_feature_flags=event.room_version.msc3931_push_features,
msc3931_enabled=True,
+ msc4210_enabled=msc4210,
)
def test_display_name(self) -> None:
diff --git a/tests/rest/client/sliding_sync/test_rooms_required_state.py b/tests/rest/client/sliding_sync/test_rooms_required_state.py
index 91ac6c5a0e..7da51d4954 100644
--- a/tests/rest/client/sliding_sync/test_rooms_required_state.py
+++ b/tests/rest/client/sliding_sync/test_rooms_required_state.py
@@ -862,3 +862,264 @@ class SlidingSyncRoomsRequiredStateTestCase(SlidingSyncBase):
exact=True,
message=f"Expected only fully-stated rooms to show up for test_key={list_key}.",
)
+
+ def test_rooms_required_state_expand(self) -> None:
+ """Test that when we expand the required state argument we get the
+ expanded state, and not just the changes to the new expanded."""
+
+ user1_id = self.register_user("user1", "pass")
+ user1_tok = self.login(user1_id, "pass")
+
+ # Create a room with a room name.
+ room_id1 = self.helper.create_room_as(
+ user1_id, tok=user1_tok, extra_content={"name": "Foo"}
+ )
+
+ # Only request the state event to begin with
+ sync_body = {
+ "lists": {
+ "foo-list": {
+ "ranges": [[0, 1]],
+ "required_state": [
+ [EventTypes.Create, ""],
+ ],
+ "timeline_limit": 1,
+ }
+ }
+ }
+ response_body, from_token = self.do_sync(sync_body, tok=user1_tok)
+
+ state_map = self.get_success(
+ self.storage_controllers.state.get_current_state(room_id1)
+ )
+
+ self._assertRequiredStateIncludes(
+ response_body["rooms"][room_id1]["required_state"],
+ {
+ state_map[(EventTypes.Create, "")],
+ },
+ exact=True,
+ )
+
+ # Send a message so the room comes down sync.
+ self.helper.send(room_id1, "msg", tok=user1_tok)
+
+ # Update the sliding sync requests to include the room name
+ sync_body["lists"]["foo-list"]["required_state"] = [
+ [EventTypes.Create, ""],
+ [EventTypes.Name, ""],
+ ]
+ response_body, from_token = self.do_sync(
+ sync_body, since=from_token, tok=user1_tok
+ )
+
+ # We should see the room name, even though there haven't been any
+ # changes.
+ self._assertRequiredStateIncludes(
+ response_body["rooms"][room_id1]["required_state"],
+ {
+ state_map[(EventTypes.Name, "")],
+ },
+ exact=True,
+ )
+
+ # Send a message so the room comes down sync.
+ self.helper.send(room_id1, "msg", tok=user1_tok)
+
+ # We should not see any state changes.
+ response_body, from_token = self.do_sync(
+ sync_body, since=from_token, tok=user1_tok
+ )
+ self.assertIsNone(response_body["rooms"][room_id1].get("required_state"))
+
+ def test_rooms_required_state_expand_retract_expand(self) -> None:
+ """Test that when expanding, retracting and then expanding the required
+ state, we get the changes that happened."""
+
+ user1_id = self.register_user("user1", "pass")
+ user1_tok = self.login(user1_id, "pass")
+
+ # Create a room with a room name.
+ room_id1 = self.helper.create_room_as(
+ user1_id, tok=user1_tok, extra_content={"name": "Foo"}
+ )
+
+ # Only request the state event to begin with
+ sync_body = {
+ "lists": {
+ "foo-list": {
+ "ranges": [[0, 1]],
+ "required_state": [
+ [EventTypes.Create, ""],
+ ],
+ "timeline_limit": 1,
+ }
+ }
+ }
+ response_body, from_token = self.do_sync(sync_body, tok=user1_tok)
+
+ state_map = self.get_success(
+ self.storage_controllers.state.get_current_state(room_id1)
+ )
+
+ self._assertRequiredStateIncludes(
+ response_body["rooms"][room_id1]["required_state"],
+ {
+ state_map[(EventTypes.Create, "")],
+ },
+ exact=True,
+ )
+
+ # Send a message so the room comes down sync.
+ self.helper.send(room_id1, "msg", tok=user1_tok)
+
+ # Update the sliding sync requests to include the room name
+ sync_body["lists"]["foo-list"]["required_state"] = [
+ [EventTypes.Create, ""],
+ [EventTypes.Name, ""],
+ ]
+ response_body, from_token = self.do_sync(
+ sync_body, since=from_token, tok=user1_tok
+ )
+
+ # We should see the room name, even though there haven't been any
+ # changes.
+ self._assertRequiredStateIncludes(
+ response_body["rooms"][room_id1]["required_state"],
+ {
+ state_map[(EventTypes.Name, "")],
+ },
+ exact=True,
+ )
+
+ # Update the room name
+ self.helper.send_state(
+ room_id1, "m.room.name", {"name": "Bar"}, state_key="", tok=user1_tok
+ )
+
+ # Update the sliding sync requests to exclude the room name again
+ sync_body["lists"]["foo-list"]["required_state"] = [
+ [EventTypes.Create, ""],
+ ]
+ response_body, from_token = self.do_sync(
+ sync_body, since=from_token, tok=user1_tok
+ )
+
+ # We should not see the updated room name in state (though it will be in
+ # the timeline).
+ self.assertIsNone(response_body["rooms"][room_id1].get("required_state"))
+
+ # Send a message so the room comes down sync.
+ self.helper.send(room_id1, "msg", tok=user1_tok)
+
+ # Update the sliding sync requests to include the room name again
+ sync_body["lists"]["foo-list"]["required_state"] = [
+ [EventTypes.Create, ""],
+ [EventTypes.Name, ""],
+ ]
+ response_body, from_token = self.do_sync(
+ sync_body, since=from_token, tok=user1_tok
+ )
+
+ # We should see the *new* room name, even though there haven't been any
+ # changes.
+ state_map = self.get_success(
+ self.storage_controllers.state.get_current_state(room_id1)
+ )
+ self._assertRequiredStateIncludes(
+ response_body["rooms"][room_id1]["required_state"],
+ {
+ state_map[(EventTypes.Name, "")],
+ },
+ exact=True,
+ )
+
+ def test_rooms_required_state_expand_deduplicate(self) -> None:
+ """Test that when expanding, retracting and then expanding the required
+ state, we don't get the state down again if it hasn't changed"""
+
+ user1_id = self.register_user("user1", "pass")
+ user1_tok = self.login(user1_id, "pass")
+
+ # Create a room with a room name.
+ room_id1 = self.helper.create_room_as(
+ user1_id, tok=user1_tok, extra_content={"name": "Foo"}
+ )
+
+ # Only request the state event to begin with
+ sync_body = {
+ "lists": {
+ "foo-list": {
+ "ranges": [[0, 1]],
+ "required_state": [
+ [EventTypes.Create, ""],
+ ],
+ "timeline_limit": 1,
+ }
+ }
+ }
+ response_body, from_token = self.do_sync(sync_body, tok=user1_tok)
+
+ state_map = self.get_success(
+ self.storage_controllers.state.get_current_state(room_id1)
+ )
+
+ self._assertRequiredStateIncludes(
+ response_body["rooms"][room_id1]["required_state"],
+ {
+ state_map[(EventTypes.Create, "")],
+ },
+ exact=True,
+ )
+
+ # Send a message so the room comes down sync.
+ self.helper.send(room_id1, "msg", tok=user1_tok)
+
+ # Update the sliding sync requests to include the room name
+ sync_body["lists"]["foo-list"]["required_state"] = [
+ [EventTypes.Create, ""],
+ [EventTypes.Name, ""],
+ ]
+ response_body, from_token = self.do_sync(
+ sync_body, since=from_token, tok=user1_tok
+ )
+
+ # We should see the room name, even though there haven't been any
+ # changes.
+ self._assertRequiredStateIncludes(
+ response_body["rooms"][room_id1]["required_state"],
+ {
+ state_map[(EventTypes.Name, "")],
+ },
+ exact=True,
+ )
+
+ # Send a message so the room comes down sync.
+ self.helper.send(room_id1, "msg", tok=user1_tok)
+
+ # Update the sliding sync requests to exclude the room name again
+ sync_body["lists"]["foo-list"]["required_state"] = [
+ [EventTypes.Create, ""],
+ ]
+ response_body, from_token = self.do_sync(
+ sync_body, since=from_token, tok=user1_tok
+ )
+
+ # We should not see any state updates
+ self.assertIsNone(response_body["rooms"][room_id1].get("required_state"))
+
+ # Send a message so the room comes down sync.
+ self.helper.send(room_id1, "msg", tok=user1_tok)
+
+ # Update the sliding sync requests to include the room name again
+ sync_body["lists"]["foo-list"]["required_state"] = [
+ [EventTypes.Create, ""],
+ [EventTypes.Name, ""],
+ ]
+ response_body, from_token = self.do_sync(
+ sync_body, since=from_token, tok=user1_tok
+ )
+
+ # We should not see the room name again, as we have already sent that
+ # down.
+ self.assertIsNone(response_body["rooms"][room_id1].get("required_state"))
diff --git a/tests/rest/client/test_login.py b/tests/rest/client/test_login.py
index 2b1e44381b..cbd6d8d4bf 100644
--- a/tests/rest/client/test_login.py
+++ b/tests/rest/client/test_login.py
@@ -1047,6 +1047,7 @@ class JWTTestCase(unittest.HomeserverTestCase):
servlets = [
synapse.rest.admin.register_servlets_for_client_rest_resource,
login.register_servlets,
+ profile.register_servlets,
]
jwt_secret = "secret"
@@ -1202,6 +1203,30 @@ class JWTTestCase(unittest.HomeserverTestCase):
self.assertEqual(channel.code, 200, msg=channel.result)
self.assertEqual(channel.json_body["user_id"], "@frog:test")
+ @override_config(
+ {"jwt_config": {**base_config, "display_name_claim": "display_name"}}
+ )
+ def test_login_custom_display_name(self) -> None:
+ """Test setting a custom display name."""
+ localpart = "pinkie"
+ user_id = f"@{localpart}:test"
+ display_name = "Pinkie Pie"
+
+ # Perform the login, specifying a custom display name.
+ channel = self.jwt_login({"sub": localpart, "display_name": display_name})
+ self.assertEqual(channel.code, 200, msg=channel.result)
+ self.assertEqual(channel.json_body["user_id"], user_id)
+
+ # Fetch the user's display name and check that it was set correctly.
+ access_token = channel.json_body["access_token"]
+ channel = self.make_request(
+ "GET",
+ f"/_matrix/client/v3/profile/{user_id}/displayname",
+ access_token=access_token,
+ )
+ self.assertEqual(channel.code, 200, msg=channel.result)
+ self.assertEqual(channel.json_body["displayname"], display_name)
+
def test_login_no_token(self) -> None:
params = {"type": "org.matrix.login.jwt"}
channel = self.make_request(b"POST", LOGIN_URL, params)
diff --git a/tests/storage/databases/main/test_events_worker.py b/tests/storage/databases/main/test_events_worker.py
index fd1f5e7fd5..104d141a72 100644
--- a/tests/storage/databases/main/test_events_worker.py
+++ b/tests/storage/databases/main/test_events_worker.py
@@ -20,7 +20,7 @@
#
import json
from contextlib import contextmanager
-from typing import Generator, List, Tuple
+from typing import Generator, List, Set, Tuple
from unittest import mock
from twisted.enterprise.adbapi import ConnectionPool
@@ -295,6 +295,53 @@ class EventCacheTestCase(unittest.HomeserverTestCase):
self.assertEqual(ctx.get_resource_usage().evt_db_fetch_count, 1)
+class GetEventsTestCase(unittest.HomeserverTestCase):
+ """Test `get_events(...)`/`get_events_as_list(...)`"""
+
+ servlets = [
+ admin.register_servlets,
+ room.register_servlets,
+ login.register_servlets,
+ ]
+
+ def prepare(self, reactor: MemoryReactor, clock: Clock, hs: HomeServer) -> None:
+ self.store: EventsWorkerStore = hs.get_datastores().main
+
+ def test_get_lots_of_messages(self) -> None:
+ """Sanity check that `get_events(...)`/`get_events_as_list(...)` works"""
+ num_events = 100
+
+ user_id = self.register_user("user", "pass")
+ user_tok = self.login(user_id, "pass")
+
+ room_id = self.helper.create_room_as(user_id, tok=user_tok)
+
+ event_ids: Set[str] = set()
+ for i in range(num_events):
+ event = self.get_success(
+ inject_event(
+ self.hs,
+ room_id=room_id,
+ type="m.room.message",
+ sender=user_id,
+ content={
+ "body": f"foo{i}",
+ "msgtype": "m.text",
+ },
+ )
+ )
+ event_ids.add(event.event_id)
+
+ # Sanity check that we actually created the events
+ self.assertEqual(len(event_ids), num_events)
+
+ # This is the function under test
+ fetched_event_map = self.get_success(self.store.get_events(event_ids))
+
+ # Sanity check that we got the events back
+ self.assertIncludes(fetched_event_map.keys(), event_ids, exact=True)
+
+
class DatabaseOutageTestCase(unittest.HomeserverTestCase):
"""Test event fetching during a database outage."""
|