diff --git a/.github/workflows/docker.yml b/.github/workflows/docker.yml
index 6574550447..391e9c96ff 100644
--- a/.github/workflows/docker.yml
+++ b/.github/workflows/docker.yml
@@ -30,7 +30,7 @@ jobs:
run: docker buildx inspect
- name: Install Cosign
- uses: sigstore/cosign-installer@v3.4.0
+ uses: sigstore/cosign-installer@v3.5.0
- name: Checkout repository
uses: actions/checkout@v4
diff --git a/.github/workflows/docs-pr.yaml b/.github/workflows/docs-pr.yaml
index 652ef90095..07dc301b1a 100644
--- a/.github/workflows/docs-pr.yaml
+++ b/.github/workflows/docs-pr.yaml
@@ -19,7 +19,7 @@ jobs:
fetch-depth: 0
- name: Setup mdbook
- uses: peaceiris/actions-mdbook@adeb05db28a0c0004681db83893d56c0388ea9ea # v1.2.0
+ uses: peaceiris/actions-mdbook@ee69d230fe19748b7abf22df32acaa93833fad08 # v2.0.0
with:
mdbook-version: '0.4.17'
@@ -53,7 +53,7 @@ jobs:
- uses: actions/checkout@v4
- name: Setup mdbook
- uses: peaceiris/actions-mdbook@adeb05db28a0c0004681db83893d56c0388ea9ea # v1.2.0
+ uses: peaceiris/actions-mdbook@ee69d230fe19748b7abf22df32acaa93833fad08 # v2.0.0
with:
mdbook-version: '0.4.17'
diff --git a/.github/workflows/docs.yaml b/.github/workflows/docs.yaml
index d611fdc924..fe3212f82a 100644
--- a/.github/workflows/docs.yaml
+++ b/.github/workflows/docs.yaml
@@ -56,7 +56,7 @@ jobs:
fetch-depth: 0
- name: Setup mdbook
- uses: peaceiris/actions-mdbook@adeb05db28a0c0004681db83893d56c0388ea9ea # v1.2.0
+ uses: peaceiris/actions-mdbook@ee69d230fe19748b7abf22df32acaa93833fad08 # v2.0.0
with:
mdbook-version: '0.4.17'
@@ -80,7 +80,7 @@ jobs:
# Deploy to the target directory.
- name: Deploy to gh pages
- uses: peaceiris/actions-gh-pages@373f7f263a76c20808c831209c920827a82a2847 # v3.9.3
+ uses: peaceiris/actions-gh-pages@4f9cc6602d3f66b9c108549d475ec49e8ef4d45e # v4.0.0
with:
github_token: ${{ secrets.GITHUB_TOKEN }}
publish_dir: ./book
@@ -110,7 +110,7 @@ jobs:
# Deploy to the target directory.
- name: Deploy to gh pages
- uses: peaceiris/actions-gh-pages@373f7f263a76c20808c831209c920827a82a2847 # v3.9.3
+ uses: peaceiris/actions-gh-pages@4f9cc6602d3f66b9c108549d475ec49e8ef4d45e # v4.0.0
with:
github_token: ${{ secrets.GITHUB_TOKEN }}
publish_dir: ./dev-docs/_build/html
diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml
index 295461aad6..20afe311fe 100644
--- a/.github/workflows/tests.yml
+++ b/.github/workflows/tests.yml
@@ -81,7 +81,7 @@ jobs:
steps:
- uses: actions/checkout@v4
- name: Install Rust
- uses: dtolnay/rust-toolchain@1.65.0
+ uses: dtolnay/rust-toolchain@1.66.0
- uses: Swatinem/rust-cache@v2
- uses: matrix-org/setup-python-poetry@v1
with:
@@ -148,7 +148,7 @@ jobs:
uses: actions/checkout@v4
- name: Install Rust
- uses: dtolnay/rust-toolchain@1.65.0
+ uses: dtolnay/rust-toolchain@1.66.0
- uses: Swatinem/rust-cache@v2
- name: Setup Poetry
@@ -208,7 +208,7 @@ jobs:
with:
ref: ${{ github.event.pull_request.head.sha }}
- name: Install Rust
- uses: dtolnay/rust-toolchain@1.65.0
+ uses: dtolnay/rust-toolchain@1.66.0
- uses: Swatinem/rust-cache@v2
- uses: matrix-org/setup-python-poetry@v1
with:
@@ -225,7 +225,7 @@ jobs:
- uses: actions/checkout@v4
- name: Install Rust
- uses: dtolnay/rust-toolchain@1.65.0
+ uses: dtolnay/rust-toolchain@1.66.0
with:
components: clippy
- uses: Swatinem/rust-cache@v2
@@ -344,7 +344,7 @@ jobs:
postgres:${{ matrix.job.postgres-version }}
- name: Install Rust
- uses: dtolnay/rust-toolchain@1.65.0
+ uses: dtolnay/rust-toolchain@1.66.0
- uses: Swatinem/rust-cache@v2
- uses: matrix-org/setup-python-poetry@v1
@@ -386,7 +386,7 @@ jobs:
- uses: actions/checkout@v4
- name: Install Rust
- uses: dtolnay/rust-toolchain@1.65.0
+ uses: dtolnay/rust-toolchain@1.66.0
- uses: Swatinem/rust-cache@v2
# There aren't wheels for some of the older deps, so we need to install
@@ -498,7 +498,7 @@ jobs:
run: cat sytest-blacklist .ci/worker-blacklist > synapse-blacklist-with-workers
- name: Install Rust
- uses: dtolnay/rust-toolchain@1.65.0
+ uses: dtolnay/rust-toolchain@1.66.0
- uses: Swatinem/rust-cache@v2
- name: Run SyTest
@@ -642,7 +642,7 @@ jobs:
path: synapse
- name: Install Rust
- uses: dtolnay/rust-toolchain@1.65.0
+ uses: dtolnay/rust-toolchain@1.66.0
- uses: Swatinem/rust-cache@v2
- name: Prepare Complement's Prerequisites
@@ -674,7 +674,7 @@ jobs:
- uses: actions/checkout@v4
- name: Install Rust
- uses: dtolnay/rust-toolchain@1.65.0
+ uses: dtolnay/rust-toolchain@1.66.0
- uses: Swatinem/rust-cache@v2
- run: cargo test
diff --git a/Cargo.lock b/Cargo.lock
index 630d38c2f4..faac6b3c8a 100644
--- a/Cargo.lock
+++ b/Cargo.lock
@@ -13,9 +13,9 @@ dependencies = [
[[package]]
name = "anyhow"
-version = "1.0.81"
+version = "1.0.82"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "0952808a6c2afd1aa8947271f3a60f1a6763c7b912d210184c5149b5cf147247"
+checksum = "f538837af36e6f6a9be0faa67f9a314f8119e4e4b5867c6ab40ed60360142519"
[[package]]
name = "arc-swap"
@@ -30,6 +30,12 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d468802bab17cbc0cc575e9b053f41e72aa36bfa6b7f55e3529ffa43161b97fa"
[[package]]
+name = "base64"
+version = "0.21.7"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "9d297deb1925b89f2ccc13d7635fa0714f12c87adce1c75356b39ca9b7178567"
+
+[[package]]
name = "bitflags"
version = "1.3.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
@@ -54,12 +60,27 @@ dependencies = [
]
[[package]]
+name = "bytes"
+version = "1.6.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "514de17de45fdb8dc022b1a7975556c53c86f9f0aa5f534b98977b171857c2c9"
+
+[[package]]
name = "cfg-if"
version = "1.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd"
[[package]]
+name = "cpufeatures"
+version = "0.2.12"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "53fe5e26ff1b7aef8bca9c6080520cfb8d9333c7568e1829cef191a9723e5504"
+dependencies = [
+ "libc",
+]
+
+[[package]]
name = "crypto-common"
version = "0.1.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
@@ -81,6 +102,12 @@ dependencies = [
]
[[package]]
+name = "fnv"
+version = "1.0.7"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "3f9eec918d3f24069decb9af1554cad7c880e2da24a9afd88aca000531ab82c1"
+
+[[package]]
name = "generic-array"
version = "0.14.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
@@ -91,6 +118,30 @@ dependencies = [
]
[[package]]
+name = "headers"
+version = "0.4.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "322106e6bd0cba2d5ead589ddb8150a13d7c4217cf80d7c4f682ca994ccc6aa9"
+dependencies = [
+ "base64",
+ "bytes",
+ "headers-core",
+ "http",
+ "httpdate",
+ "mime",
+ "sha1",
+]
+
+[[package]]
+name = "headers-core"
+version = "0.3.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "54b4a22553d4242c49fddb9ba998a99962b5cc6f22cb5a3482bec22522403ce4"
+dependencies = [
+ "http",
+]
+
+[[package]]
name = "heck"
version = "0.4.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
@@ -103,6 +154,23 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7f24254aa9a54b5c858eaee2f5bccdb46aaf0e486a595ed5fd8f86ba55232a70"
[[package]]
+name = "http"
+version = "1.1.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "21b9ddb458710bc376481b842f5da65cdf31522de232c1ca8146abce2a358258"
+dependencies = [
+ "bytes",
+ "fnv",
+ "itoa",
+]
+
+[[package]]
+name = "httpdate"
+version = "1.0.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "df3b46402a9d5adb4c86a0cf463f42e19994e3ee891101b1841f30a545cb49a9"
+
+[[package]]
name = "indoc"
version = "2.0.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
@@ -122,9 +190,9 @@ checksum = "e2abad23fbc42b3700f2f279844dc832adb2b2eb069b2df918f455c4e18cc646"
[[package]]
name = "libc"
-version = "0.2.135"
+version = "0.2.153"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "68783febc7782c6c5cb401fbda4de5a9898be1762314da0bb2c10ced61f18b0c"
+checksum = "9c198f91728a82281a64e1f4f9eeb25d82cb32a5de251c6bd1b5154d63a8e7bd"
[[package]]
name = "lock_api"
@@ -158,6 +226,12 @@ dependencies = [
]
[[package]]
+name = "mime"
+version = "0.3.17"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "6877bb514081ee2a7ff5ef9de3281f14a4dd4bceac4c09388074a6b5df8a139a"
+
+[[package]]
name = "once_cell"
version = "1.15.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
@@ -377,6 +451,17 @@ dependencies = [
]
[[package]]
+name = "sha1"
+version = "0.10.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "f04293dc80c3993519f2d7f6f511707ee7094fe0c6d3406feb330cdb3540eba3"
+dependencies = [
+ "cfg-if",
+ "cpufeatures",
+ "digest",
+]
+
+[[package]]
name = "smallvec"
version = "1.10.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
@@ -405,7 +490,10 @@ version = "0.1.0"
dependencies = [
"anyhow",
"blake2",
+ "bytes",
+ "headers",
"hex",
+ "http",
"lazy_static",
"log",
"pyo3",
diff --git a/changelog.d/16819.feature b/changelog.d/16819.feature
new file mode 100644
index 0000000000..1af6f466b7
--- /dev/null
+++ b/changelog.d/16819.feature
@@ -0,0 +1 @@
+Send an email if the address is already bound to an user account.
diff --git a/changelog.d/16920.bugfix b/changelog.d/16920.bugfix
new file mode 100644
index 0000000000..460f4f7160
--- /dev/null
+++ b/changelog.d/16920.bugfix
@@ -0,0 +1 @@
+Adds validation to ensure that the `limit` parameter on `/publicRooms` is non-negative.
diff --git a/changelog.d/16923.bugfix b/changelog.d/16923.bugfix
new file mode 100644
index 0000000000..bd6f24925e
--- /dev/null
+++ b/changelog.d/16923.bugfix
@@ -0,0 +1 @@
+Return `400 M_NOT_JSON` upon receiving invalid JSON in query parameters across various client and admin endpoints, rather than an internal server error.
\ No newline at end of file
diff --git a/changelog.d/16943.bugfix b/changelog.d/16943.bugfix
new file mode 100644
index 0000000000..4360741132
--- /dev/null
+++ b/changelog.d/16943.bugfix
@@ -0,0 +1 @@
+Make the CSAPI endpoint `/keys/device_signing/upload` idempotent.
\ No newline at end of file
diff --git a/changelog.d/17032.misc b/changelog.d/17032.misc
new file mode 100644
index 0000000000..b03f6f42e5
--- /dev/null
+++ b/changelog.d/17032.misc
@@ -0,0 +1 @@
+Use new receipts column to optimise receipt and push action SQL queries. Contributed by Nick @ Beeper (@fizzadar).
diff --git a/changelog.d/17036.misc b/changelog.d/17036.misc
new file mode 100644
index 0000000000..3296668059
--- /dev/null
+++ b/changelog.d/17036.misc
@@ -0,0 +1 @@
+Fix mypy with latest Twisted release.
diff --git a/changelog.d/17069.doc b/changelog.d/17069.doc
new file mode 100644
index 0000000000..f5a7f599d1
--- /dev/null
+++ b/changelog.d/17069.doc
@@ -0,0 +1 @@
+Add a prompt in the contributing guide to manually configure icu4c.
diff --git a/changelog.d/17079.misc b/changelog.d/17079.misc
new file mode 100644
index 0000000000..340e40d194
--- /dev/null
+++ b/changelog.d/17079.misc
@@ -0,0 +1 @@
+Bump minimum supported Rust version to 1.66.0.
diff --git a/changelog.d/17081.misc b/changelog.d/17081.misc
new file mode 100644
index 0000000000..d1ab69126c
--- /dev/null
+++ b/changelog.d/17081.misc
@@ -0,0 +1 @@
+Add helpers to transform Twisted requests to Rust http Requests/Responses.
diff --git a/changelog.d/17086.feature b/changelog.d/17086.feature
new file mode 100644
index 0000000000..08b407d316
--- /dev/null
+++ b/changelog.d/17086.feature
@@ -0,0 +1 @@
+Support delegating the rendezvous mechanism described MSC4108 to an external implementation.
diff --git a/changelog.d/17096.misc b/changelog.d/17096.misc
new file mode 100644
index 0000000000..b03f6f42e5
--- /dev/null
+++ b/changelog.d/17096.misc
@@ -0,0 +1 @@
+Use new receipts column to optimise receipt and push action SQL queries. Contributed by Nick @ Beeper (@fizzadar).
diff --git a/changelog.d/17099.doc b/changelog.d/17099.doc
new file mode 100644
index 0000000000..d8d10fa53a
--- /dev/null
+++ b/changelog.d/17099.doc
@@ -0,0 +1 @@
+Clarify what part of message retention is still experimental.
diff --git a/docker/complement/conf/workers-shared-extra.yaml.j2 b/docker/complement/conf/workers-shared-extra.yaml.j2
index 2b11b487f6..32eada4419 100644
--- a/docker/complement/conf/workers-shared-extra.yaml.j2
+++ b/docker/complement/conf/workers-shared-extra.yaml.j2
@@ -102,6 +102,8 @@ experimental_features:
msc3391_enabled: true
# Filtering /messages by relation type.
msc3874_enabled: true
+ # no UIA for x-signing upload for the first time
+ msc3967_enabled: true
server_notices:
system_mxid_localpart: _server
diff --git a/docs/development/contributing_guide.md b/docs/development/contributing_guide.md
index ac8a7039d1..76c3e790cd 100644
--- a/docs/development/contributing_guide.md
+++ b/docs/development/contributing_guide.md
@@ -86,6 +86,8 @@ poetry install --extras all
This will install the runtime and developer dependencies for the project. Be sure to check
that the `poetry install` step completed cleanly.
+For OSX users, be sure to set `PKG_CONFIG_PATH` to support `icu4c`. Run `brew info icu4c` for more details.
+
## Running Synapse via poetry
To start a local instance of Synapse in the locked poetry environment, create a config file:
diff --git a/docs/message_retention_policies.md b/docs/message_retention_policies.md
index 2746a106b3..c64d1539b0 100644
--- a/docs/message_retention_policies.md
+++ b/docs/message_retention_policies.md
@@ -7,8 +7,10 @@ follow the semantics described in
and allow server and room admins to configure how long messages should
be kept in a homeserver's database before being purged from it.
**Please note that, as this feature isn't part of the Matrix
-specification yet, this implementation is to be considered as
-experimental.**
+specification yet, the use of `m.room.retention` events for per-room
+retention policies is to be considered as experimental. However, the use
+of a default message retention policy is considered a stable feature
+in Synapse.**
A message retention policy is mainly defined by its `max_lifetime`
parameter, which defines how long a message can be kept around after
diff --git a/poetry.lock b/poetry.lock
index 814877b70a..d916c627a0 100644
--- a/poetry.lock
+++ b/poetry.lock
@@ -1848,17 +1848,17 @@ files = [
[[package]]
name = "pyasn1-modules"
-version = "0.3.0"
+version = "0.4.0"
description = "A collection of ASN.1-based protocols modules"
optional = false
-python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7"
+python-versions = ">=3.8"
files = [
- {file = "pyasn1_modules-0.3.0-py2.py3-none-any.whl", hash = "sha256:d3ccd6ed470d9ffbc716be08bd90efbd44d0734bc9303818f7336070984a162d"},
- {file = "pyasn1_modules-0.3.0.tar.gz", hash = "sha256:5bd01446b736eb9d31512a30d46c1ac3395d676c6f3cafa4c03eb54b9925631c"},
+ {file = "pyasn1_modules-0.4.0-py3-none-any.whl", hash = "sha256:be04f15b66c206eed667e0bb5ab27e2b1855ea54a842e5037738099e8ca4ae0b"},
+ {file = "pyasn1_modules-0.4.0.tar.gz", hash = "sha256:831dbcea1b177b28c9baddf4c6d1013c24c3accd14a1873fffaa6a2e905f17b6"},
]
[package.dependencies]
-pyasn1 = ">=0.4.6,<0.6.0"
+pyasn1 = ">=0.4.6,<0.7.0"
[[package]]
name = "pycparser"
@@ -1983,13 +1983,13 @@ typing-extensions = ">=4.6.0,<4.7.0 || >4.7.0"
[[package]]
name = "pygithub"
-version = "2.2.0"
+version = "2.3.0"
description = "Use the full Github API v3"
optional = false
python-versions = ">=3.7"
files = [
- {file = "PyGithub-2.2.0-py3-none-any.whl", hash = "sha256:41042ea53e4c372219db708c38d2ca1fd4fadab75475bac27d89d339596cfad1"},
- {file = "PyGithub-2.2.0.tar.gz", hash = "sha256:e39be7c4dc39418bdd6e3ecab5931c636170b8b21b4d26f9ecf7e6102a3b51c3"},
+ {file = "PyGithub-2.3.0-py3-none-any.whl", hash = "sha256:65b499728be3ce7b0cd2cd760da3b32f0f4d7bc55e5e0677617f90f6564e793e"},
+ {file = "PyGithub-2.3.0.tar.gz", hash = "sha256:0148d7347a1cdeed99af905077010aef81a4dad988b0ba51d4108bf66b443f7e"},
]
[package.dependencies]
@@ -2444,28 +2444,28 @@ files = [
[[package]]
name = "ruff"
-version = "0.3.5"
+version = "0.3.7"
description = "An extremely fast Python linter and code formatter, written in Rust."
optional = false
python-versions = ">=3.7"
files = [
- {file = "ruff-0.3.5-py3-none-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:aef5bd3b89e657007e1be6b16553c8813b221ff6d92c7526b7e0227450981eac"},
- {file = "ruff-0.3.5-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:89b1e92b3bd9fca249153a97d23f29bed3992cff414b222fcd361d763fc53f12"},
- {file = "ruff-0.3.5-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5e55771559c89272c3ebab23326dc23e7f813e492052391fe7950c1a5a139d89"},
- {file = "ruff-0.3.5-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:dabc62195bf54b8a7876add6e789caae0268f34582333cda340497c886111c39"},
- {file = "ruff-0.3.5-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3a05f3793ba25f194f395578579c546ca5d83e0195f992edc32e5907d142bfa3"},
- {file = "ruff-0.3.5-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:dfd3504e881082959b4160ab02f7a205f0fadc0a9619cc481982b6837b2fd4c0"},
- {file = "ruff-0.3.5-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:87258e0d4b04046cf1d6cc1c56fadbf7a880cc3de1f7294938e923234cf9e498"},
- {file = "ruff-0.3.5-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:712e71283fc7d9f95047ed5f793bc019b0b0a29849b14664a60fd66c23b96da1"},
- {file = "ruff-0.3.5-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a532a90b4a18d3f722c124c513ffb5e5eaff0cc4f6d3aa4bda38e691b8600c9f"},
- {file = "ruff-0.3.5-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:122de171a147c76ada00f76df533b54676f6e321e61bd8656ae54be326c10296"},
- {file = "ruff-0.3.5-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:d80a6b18a6c3b6ed25b71b05eba183f37d9bc8b16ace9e3d700997f00b74660b"},
- {file = "ruff-0.3.5-py3-none-musllinux_1_2_i686.whl", hash = "sha256:a7b6e63194c68bca8e71f81de30cfa6f58ff70393cf45aab4c20f158227d5936"},
- {file = "ruff-0.3.5-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:a759d33a20c72f2dfa54dae6e85e1225b8e302e8ac655773aff22e542a300985"},
- {file = "ruff-0.3.5-py3-none-win32.whl", hash = "sha256:9d8605aa990045517c911726d21293ef4baa64f87265896e491a05461cae078d"},
- {file = "ruff-0.3.5-py3-none-win_amd64.whl", hash = "sha256:dc56bb16a63c1303bd47563c60482a1512721053d93231cf7e9e1c6954395a0e"},
- {file = "ruff-0.3.5-py3-none-win_arm64.whl", hash = "sha256:faeeae9905446b975dcf6d4499dc93439b131f1443ee264055c5716dd947af55"},
- {file = "ruff-0.3.5.tar.gz", hash = "sha256:a067daaeb1dc2baf9b82a32dae67d154d95212080c80435eb052d95da647763d"},
+ {file = "ruff-0.3.7-py3-none-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:0e8377cccb2f07abd25e84fc5b2cbe48eeb0fea9f1719cad7caedb061d70e5ce"},
+ {file = "ruff-0.3.7-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:15a4d1cc1e64e556fa0d67bfd388fed416b7f3b26d5d1c3e7d192c897e39ba4b"},
+ {file = "ruff-0.3.7-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d28bdf3d7dc71dd46929fafeec98ba89b7c3550c3f0978e36389b5631b793663"},
+ {file = "ruff-0.3.7-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:379b67d4f49774ba679593b232dcd90d9e10f04d96e3c8ce4a28037ae473f7bb"},
+ {file = "ruff-0.3.7-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c060aea8ad5ef21cdfbbe05475ab5104ce7827b639a78dd55383a6e9895b7c51"},
+ {file = "ruff-0.3.7-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:ebf8f615dde968272d70502c083ebf963b6781aacd3079081e03b32adfe4d58a"},
+ {file = "ruff-0.3.7-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d48098bd8f5c38897b03604f5428901b65e3c97d40b3952e38637b5404b739a2"},
+ {file = "ruff-0.3.7-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:da8a4fda219bf9024692b1bc68c9cff4b80507879ada8769dc7e985755d662ea"},
+ {file = "ruff-0.3.7-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c44e0149f1d8b48c4d5c33d88c677a4aa22fd09b1683d6a7ff55b816b5d074f"},
+ {file = "ruff-0.3.7-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:3050ec0af72b709a62ecc2aca941b9cd479a7bf2b36cc4562f0033d688e44fa1"},
+ {file = "ruff-0.3.7-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:a29cc38e4c1ab00da18a3f6777f8b50099d73326981bb7d182e54a9a21bb4ff7"},
+ {file = "ruff-0.3.7-py3-none-musllinux_1_2_i686.whl", hash = "sha256:5b15cc59c19edca917f51b1956637db47e200b0fc5e6e1878233d3a938384b0b"},
+ {file = "ruff-0.3.7-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:e491045781b1e38b72c91247cf4634f040f8d0cb3e6d3d64d38dcf43616650b4"},
+ {file = "ruff-0.3.7-py3-none-win32.whl", hash = "sha256:bc931de87593d64fad3a22e201e55ad76271f1d5bfc44e1a1887edd0903c7d9f"},
+ {file = "ruff-0.3.7-py3-none-win_amd64.whl", hash = "sha256:5ef0e501e1e39f35e03c2acb1d1238c595b8bb36cf7a170e7c1df1b73da00e74"},
+ {file = "ruff-0.3.7-py3-none-win_arm64.whl", hash = "sha256:789e144f6dc7019d1f92a812891c645274ed08af6037d11fc65fcbc183b7d59f"},
+ {file = "ruff-0.3.7.tar.gz", hash = "sha256:d5c1aebee5162c2226784800ae031f660c350e7a3402c4d1f8ea4e97e232e3ba"},
]
[[package]]
@@ -2954,13 +2954,13 @@ docs = ["sphinx (<7.0.0)"]
[[package]]
name = "twine"
-version = "4.0.2"
+version = "5.0.0"
description = "Collection of utilities for publishing packages on PyPI"
optional = false
-python-versions = ">=3.7"
+python-versions = ">=3.8"
files = [
- {file = "twine-4.0.2-py3-none-any.whl", hash = "sha256:929bc3c280033347a00f847236564d1c52a3e61b1ac2516c97c48f3ceab756d8"},
- {file = "twine-4.0.2.tar.gz", hash = "sha256:9e102ef5fdd5a20661eb88fad46338806c3bd32cf1db729603fe3697b1bc83c8"},
+ {file = "twine-5.0.0-py3-none-any.whl", hash = "sha256:a262933de0b484c53408f9edae2e7821c1c45a3314ff2df9bdd343aa7ab8edc0"},
+ {file = "twine-5.0.0.tar.gz", hash = "sha256:89b0cc7d370a4b66421cc6102f269aa910fe0f1861c124f573cf2ddedbc10cf4"},
]
[package.dependencies]
@@ -3109,13 +3109,13 @@ files = [
[[package]]
name = "types-pillow"
-version = "10.2.0.20240406"
+version = "10.2.0.20240415"
description = "Typing stubs for Pillow"
optional = false
python-versions = ">=3.8"
files = [
- {file = "types-Pillow-10.2.0.20240406.tar.gz", hash = "sha256:62e0cc1f17caba40e72e7154a483f4c7f3bea0e1c34c0ebba9de3c7745bc306d"},
- {file = "types_Pillow-10.2.0.20240406-py3-none-any.whl", hash = "sha256:5ac182e8afce53de30abca2fdf9cbec7b2500e549d0be84da035a729a84c7c47"},
+ {file = "types-Pillow-10.2.0.20240415.tar.gz", hash = "sha256:dd6058027639bcdc66ba78b228cc25fdae42524c2150c78c804da427e7e76e70"},
+ {file = "types_Pillow-10.2.0.20240415-py3-none-any.whl", hash = "sha256:f933332b7e96010bae9b9cf82a4c9979ff0c270d63f5c5bbffb2d789b85cd00b"},
]
[[package]]
@@ -3451,4 +3451,4 @@ user-search = ["pyicu"]
[metadata]
lock-version = "2.0"
python-versions = "^3.8.0"
-content-hash = "4abda113a01f162bb3978b0372956d569364533aa39f57863c234363f8449a4f"
+content-hash = "1951f2b4623138d47db08a405edd970e67599d05804bb459af21a085e1665f69"
diff --git a/pyproject.toml b/pyproject.toml
index 508d31d8d7..ed0f5ef4ba 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -321,7 +321,7 @@ all = [
# This helps prevents merge conflicts when running a batch of dependabot updates.
isort = ">=5.10.1"
black = ">=22.7.0"
-ruff = "0.3.5"
+ruff = "0.3.7"
# Type checking only works with the pydantic.v1 compat module from pydantic v2
pydantic = "^2"
diff --git a/rust/Cargo.toml b/rust/Cargo.toml
index d89def1843..9ac766182b 100644
--- a/rust/Cargo.toml
+++ b/rust/Cargo.toml
@@ -7,7 +7,7 @@ name = "synapse"
version = "0.1.0"
edition = "2021"
-rust-version = "1.65.0"
+rust-version = "1.66.0"
[lib]
name = "synapse"
@@ -23,6 +23,9 @@ name = "synapse.synapse_rust"
[dependencies]
anyhow = "1.0.63"
+bytes = "1.6.0"
+headers = "0.4.0"
+http = "1.1.0"
lazy_static = "1.4.0"
log = "0.4.17"
pyo3 = { version = "0.20.0", features = [
diff --git a/rust/src/errors.rs b/rust/src/errors.rs
new file mode 100644
index 0000000000..4e580e3e8c
--- /dev/null
+++ b/rust/src/errors.rs
@@ -0,0 +1,60 @@
+/*
+ * This file is licensed under the Affero General Public License (AGPL) version 3.
+ *
+ * Copyright (C) 2024 New Vector, Ltd
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU Affero General Public License as
+ * published by the Free Software Foundation, either version 3 of the
+ * License, or (at your option) any later version.
+ *
+ * See the GNU Affero General Public License for more details:
+ * <https://www.gnu.org/licenses/agpl-3.0.html>.
+ */
+
+#![allow(clippy::new_ret_no_self)]
+
+use std::collections::HashMap;
+
+use http::{HeaderMap, StatusCode};
+use pyo3::{exceptions::PyValueError, import_exception};
+
+import_exception!(synapse.api.errors, SynapseError);
+
+impl SynapseError {
+ pub fn new(
+ code: StatusCode,
+ message: String,
+ errcode: &'static str,
+ additional_fields: Option<HashMap<String, String>>,
+ headers: Option<HeaderMap>,
+ ) -> pyo3::PyErr {
+ // Transform the HeaderMap into a HashMap<String, String>
+ let headers = if let Some(headers) = headers {
+ let mut map = HashMap::with_capacity(headers.len());
+ for (key, value) in headers.iter() {
+ let Ok(value) = value.to_str() else {
+ // This should never happen, but we don't want to panic in case it does
+ return PyValueError::new_err(
+ "Could not construct SynapseError: header value is not valid ASCII",
+ );
+ };
+
+ map.insert(key.as_str().to_owned(), value.to_owned());
+ }
+ Some(map)
+ } else {
+ None
+ };
+
+ SynapseError::new_err((code.as_u16(), message, errcode, additional_fields, headers))
+ }
+}
+
+import_exception!(synapse.api.errors, NotFoundError);
+
+impl NotFoundError {
+ pub fn new() -> pyo3::PyErr {
+ NotFoundError::new_err(())
+ }
+}
diff --git a/rust/src/http.rs b/rust/src/http.rs
new file mode 100644
index 0000000000..74098f4c8b
--- /dev/null
+++ b/rust/src/http.rs
@@ -0,0 +1,165 @@
+/*
+ * This file is licensed under the Affero General Public License (AGPL) version 3.
+ *
+ * Copyright (C) 2024 New Vector, Ltd
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU Affero General Public License as
+ * published by the Free Software Foundation, either version 3 of the
+ * License, or (at your option) any later version.
+ *
+ * See the GNU Affero General Public License for more details:
+ * <https://www.gnu.org/licenses/agpl-3.0.html>.
+ */
+
+use bytes::{Buf, BufMut, Bytes, BytesMut};
+use headers::{Header, HeaderMapExt};
+use http::{HeaderName, HeaderValue, Method, Request, Response, StatusCode, Uri};
+use pyo3::{
+ exceptions::PyValueError,
+ types::{PyBytes, PySequence, PyTuple},
+ PyAny, PyResult,
+};
+
+use crate::errors::SynapseError;
+
+/// Read a file-like Python object by chunks
+///
+/// # Errors
+///
+/// Returns an error if calling the `read` on the Python object failed
+fn read_io_body(body: &PyAny, chunk_size: usize) -> PyResult<Bytes> {
+ let mut buf = BytesMut::new();
+ loop {
+ let bytes: &PyBytes = body.call_method1("read", (chunk_size,))?.downcast()?;
+ if bytes.as_bytes().is_empty() {
+ return Ok(buf.into());
+ }
+ buf.put(bytes.as_bytes());
+ }
+}
+
+/// Transform a Twisted `IRequest` to an [`http::Request`]
+///
+/// It uses the following members of `IRequest`:
+/// - `content`, which is expected to be a file-like object with a `read` method
+/// - `uri`, which is expected to be a valid URI as `bytes`
+/// - `method`, which is expected to be a valid HTTP method as `bytes`
+/// - `requestHeaders`, which is expected to have a `getAllRawHeaders` method
+///
+/// # Errors
+///
+/// Returns an error if the Python object doesn't properly implement `IRequest`
+pub fn http_request_from_twisted(request: &PyAny) -> PyResult<Request<Bytes>> {
+ let content = request.getattr("content")?;
+ let body = read_io_body(content, 4096)?;
+
+ let mut req = Request::new(body);
+
+ let uri: &PyBytes = request.getattr("uri")?.downcast()?;
+ *req.uri_mut() =
+ Uri::try_from(uri.as_bytes()).map_err(|_| PyValueError::new_err("invalid uri"))?;
+
+ let method: &PyBytes = request.getattr("method")?.downcast()?;
+ *req.method_mut() = Method::from_bytes(method.as_bytes())
+ .map_err(|_| PyValueError::new_err("invalid method"))?;
+
+ let headers_iter = request
+ .getattr("requestHeaders")?
+ .call_method0("getAllRawHeaders")?
+ .iter()?;
+
+ for header in headers_iter {
+ let header = header?;
+ let header: &PyTuple = header.downcast()?;
+ let name: &PyBytes = header.get_item(0)?.downcast()?;
+ let name = HeaderName::from_bytes(name.as_bytes())
+ .map_err(|_| PyValueError::new_err("invalid header name"))?;
+
+ let values: &PySequence = header.get_item(1)?.downcast()?;
+ for index in 0..values.len()? {
+ let value: &PyBytes = values.get_item(index)?.downcast()?;
+ let value = HeaderValue::from_bytes(value.as_bytes())
+ .map_err(|_| PyValueError::new_err("invalid header value"))?;
+ req.headers_mut().append(name.clone(), value);
+ }
+ }
+
+ Ok(req)
+}
+
+/// Send an [`http::Response`] through a Twisted `IRequest`
+///
+/// It uses the following members of `IRequest`:
+///
+/// - `responseHeaders`, which is expected to have a `addRawHeader(bytes, bytes)` method
+/// - `setResponseCode(int)` method
+/// - `write(bytes)` method
+/// - `finish()` method
+///
+/// # Errors
+///
+/// Returns an error if the Python object doesn't properly implement `IRequest`
+pub fn http_response_to_twisted<B>(request: &PyAny, response: Response<B>) -> PyResult<()>
+where
+ B: Buf,
+{
+ let (parts, mut body) = response.into_parts();
+
+ request.call_method1("setResponseCode", (parts.status.as_u16(),))?;
+
+ let response_headers = request.getattr("responseHeaders")?;
+ for (name, value) in parts.headers.iter() {
+ response_headers.call_method1("addRawHeader", (name.as_str(), value.as_bytes()))?;
+ }
+
+ while body.remaining() != 0 {
+ let chunk = body.chunk();
+ request.call_method1("write", (chunk,))?;
+ body.advance(chunk.len());
+ }
+
+ request.call_method0("finish")?;
+
+ Ok(())
+}
+
+/// An extension trait for [`HeaderMap`] that provides typed access to headers, and throws the
+/// right python exceptions when the header is missing or fails to parse.
+///
+/// [`HeaderMap`]: headers::HeaderMap
+pub trait HeaderMapPyExt: HeaderMapExt {
+ /// Get a header from the map, returning an error if it is missing or invalid.
+ fn typed_get_required<H>(&self) -> PyResult<H>
+ where
+ H: Header,
+ {
+ self.typed_get_optional::<H>()?.ok_or_else(|| {
+ SynapseError::new(
+ StatusCode::BAD_REQUEST,
+ format!("Missing required header: {}", H::name()),
+ "M_MISSING_PARAM",
+ None,
+ None,
+ )
+ })
+ }
+
+ /// Get a header from the map, returning `None` if it is missing and an error if it is invalid.
+ fn typed_get_optional<H>(&self) -> PyResult<Option<H>>
+ where
+ H: Header,
+ {
+ self.typed_try_get::<H>().map_err(|_| {
+ SynapseError::new(
+ StatusCode::BAD_REQUEST,
+ format!("Invalid header: {}", H::name()),
+ "M_INVALID_PARAM",
+ None,
+ None,
+ )
+ })
+ }
+}
+
+impl<T: HeaderMapExt> HeaderMapPyExt for T {}
diff --git a/rust/src/lib.rs b/rust/src/lib.rs
index 7b3b579e55..36a3d64528 100644
--- a/rust/src/lib.rs
+++ b/rust/src/lib.rs
@@ -3,7 +3,9 @@ use pyo3::prelude::*;
use pyo3_log::ResetHandle;
pub mod acl;
+pub mod errors;
pub mod events;
+pub mod http;
pub mod push;
lazy_static! {
diff --git a/scripts-dev/complement.sh b/scripts-dev/complement.sh
index b1a8724b7e..2a779f8255 100755
--- a/scripts-dev/complement.sh
+++ b/scripts-dev/complement.sh
@@ -214,7 +214,7 @@ fi
extra_test_args=()
-test_packages="./tests/csapi ./tests ./tests/msc3874 ./tests/msc3890 ./tests/msc3391 ./tests/msc3930 ./tests/msc3902"
+test_packages="./tests/csapi ./tests ./tests/msc3874 ./tests/msc3890 ./tests/msc3391 ./tests/msc3930 ./tests/msc3902 ./tests/msc3967"
# Enable dirty runs, so tests will reuse the same container where possible.
# This significantly speeds up tests, but increases the possibility of test pollution.
diff --git a/synapse/config/emailconfig.py b/synapse/config/emailconfig.py
index a4dc9db03e..8033fa2e52 100644
--- a/synapse/config/emailconfig.py
+++ b/synapse/config/emailconfig.py
@@ -52,6 +52,7 @@ DEFAULT_SUBJECTS = {
"invite_from_person_to_space": "[%(app)s] %(person)s has invited you to join the %(space)s space on %(app)s...",
"password_reset": "[%(server_name)s] Password reset",
"email_validation": "[%(server_name)s] Validate your email",
+ "email_already_in_use": "[%(server_name)s] Email already in use",
}
LEGACY_TEMPLATE_DIR_WARNING = """
@@ -76,6 +77,7 @@ class EmailSubjectConfig:
invite_from_person_to_space: str
password_reset: str
email_validation: str
+ email_already_in_use: str
class EmailConfig(Config):
@@ -180,6 +182,12 @@ class EmailConfig(Config):
registration_template_text = email_config.get(
"registration_template_text", "registration.txt"
)
+ already_in_use_template_html = email_config.get(
+ "already_in_use_template_html", "already_in_use.html"
+ )
+ already_in_use_template_text = email_config.get(
+ "already_in_use_template_html", "already_in_use.txt"
+ )
add_threepid_template_html = email_config.get(
"add_threepid_template_html", "add_threepid.html"
)
@@ -215,6 +223,8 @@ class EmailConfig(Config):
self.email_password_reset_template_text,
self.email_registration_template_html,
self.email_registration_template_text,
+ self.email_already_in_use_template_html,
+ self.email_already_in_use_template_text,
self.email_add_threepid_template_html,
self.email_add_threepid_template_text,
self.email_password_reset_template_confirmation_html,
@@ -230,6 +240,8 @@ class EmailConfig(Config):
password_reset_template_text,
registration_template_html,
registration_template_text,
+ already_in_use_template_html,
+ already_in_use_template_text,
add_threepid_template_html,
add_threepid_template_text,
"password_reset_confirmation.html",
diff --git a/synapse/config/experimental.py b/synapse/config/experimental.py
index fcc78d2d81..353ae23f91 100644
--- a/synapse/config/experimental.py
+++ b/synapse/config/experimental.py
@@ -411,3 +411,14 @@ class ExperimentalConfig(Config):
self.msc4069_profile_inhibit_propagation = experimental.get(
"msc4069_profile_inhibit_propagation", False
)
+
+ # MSC4108: Mechanism to allow OIDC sign in and E2EE set up via QR code
+ self.msc4108_delegation_endpoint: Optional[str] = experimental.get(
+ "msc4108_delegation_endpoint", None
+ )
+
+ if self.msc4108_delegation_endpoint is not None and not self.msc3861.enabled:
+ raise ConfigError(
+ "MSC4108 requires MSC3861 to be enabled",
+ ("experimental", "msc4108_delegation_endpoint"),
+ )
diff --git a/synapse/handlers/e2e_keys.py b/synapse/handlers/e2e_keys.py
index 63e00f102e..1ece54ccfc 100644
--- a/synapse/handlers/e2e_keys.py
+++ b/synapse/handlers/e2e_keys.py
@@ -1476,6 +1476,42 @@ class E2eKeysHandler:
else:
return exists, self.clock.time_msec() < ts_replacable_without_uia_before
+ async def has_different_keys(self, user_id: str, body: JsonDict) -> bool:
+ """
+ Check if a key provided in `body` differs from the same key stored in the DB. Returns
+ true on the first difference. If a key exists in `body` but does not exist in the DB,
+ returns True. If `body` has no keys, this always returns False.
+ Note by 'key' we mean Matrix key rather than JSON key.
+
+ The purpose of this function is to detect whether or not we need to apply UIA checks.
+ We must apply UIA checks if any key in the database is being overwritten. If a key is
+ being inserted for the first time, or if the key exactly matches what is in the database,
+ then no UIA check needs to be performed.
+
+ Args:
+ user_id: The user who sent the `body`.
+ body: The JSON request body from POST /keys/device_signing/upload
+ Returns:
+ True if any key in `body` has a different value in the database.
+ """
+ # Ensure that each key provided in the request body exactly matches the one we have stored.
+ # The first time we see the DB having a different key to the matching request key, bail.
+ # Note: we do not care if the DB has a key which the request does not specify, as we only
+ # care about *replacements* or *insertions* (i.e UPSERT)
+ req_body_key_to_db_key = {
+ "master_key": "master",
+ "self_signing_key": "self_signing",
+ "user_signing_key": "user_signing",
+ }
+ for req_body_key, db_key in req_body_key_to_db_key.items():
+ if req_body_key in body:
+ existing_key = await self.store.get_e2e_cross_signing_key(
+ user_id, db_key
+ )
+ if existing_key != body[req_body_key]:
+ return True
+ return False
+
def _check_cross_signing_key(
key: JsonDict, user_id: str, key_type: str, signing_key: Optional[VerifyKey] = None
diff --git a/synapse/http/proxy.py b/synapse/http/proxy.py
index 6cbbd5741b..5b5ded757b 100644
--- a/synapse/http/proxy.py
+++ b/synapse/http/proxy.py
@@ -262,7 +262,8 @@ class _ProxyResponseBody(protocol.Protocol):
self._request.finish()
else:
# Abort the underlying request since our remote request also failed.
- self._request.transport.abortConnection()
+ if self._request.channel:
+ self._request.channel.forceAbortClient()
class ProxySite(Site):
diff --git a/synapse/http/server.py b/synapse/http/server.py
index 632284712c..45b2cbffcd 100644
--- a/synapse/http/server.py
+++ b/synapse/http/server.py
@@ -153,9 +153,9 @@ def return_json_error(
# Only respond with an error response if we haven't already started writing,
# otherwise lets just kill the connection
if request.startedWriting:
- if request.transport:
+ if request.channel:
try:
- request.transport.abortConnection()
+ request.channel.forceAbortClient()
except Exception:
# abortConnection throws if the connection is already closed
pass
@@ -909,7 +909,18 @@ def set_cors_headers(request: "SynapseRequest") -> None:
request.setHeader(
b"Access-Control-Allow-Methods", b"GET, HEAD, POST, PUT, DELETE, OPTIONS"
)
- if request.experimental_cors_msc3886:
+ if request.path is not None and request.path.startswith(
+ b"/_matrix/client/unstable/org.matrix.msc4108/rendezvous"
+ ):
+ request.setHeader(
+ b"Access-Control-Allow-Headers",
+ b"Content-Type, If-Match, If-None-Match",
+ )
+ request.setHeader(
+ b"Access-Control-Expose-Headers",
+ b"Synapse-Trace-Id, Server, ETag",
+ )
+ elif request.experimental_cors_msc3886:
request.setHeader(
b"Access-Control-Allow-Headers",
b"X-Requested-With, Content-Type, Authorization, Date, If-Match, If-None-Match",
diff --git a/synapse/http/servlet.py b/synapse/http/servlet.py
index b73d06f1d3..ab12951da8 100644
--- a/synapse/http/servlet.py
+++ b/synapse/http/servlet.py
@@ -19,9 +19,11 @@
#
#
-""" This module contains base REST classes for constructing REST servlets. """
+"""This module contains base REST classes for constructing REST servlets."""
+
import enum
import logging
+import urllib.parse as urlparse
from http import HTTPStatus
from typing import (
TYPE_CHECKING,
@@ -65,17 +67,49 @@ def parse_integer(request: Request, name: str, default: int) -> int: ...
@overload
-def parse_integer(request: Request, name: str, *, required: Literal[True]) -> int: ...
+def parse_integer(
+ request: Request, name: str, *, default: int, negative: bool
+) -> int: ...
+
+
+@overload
+def parse_integer(
+ request: Request, name: str, *, default: int, negative: bool = False
+) -> int: ...
+
+
+@overload
+def parse_integer(
+ request: Request, name: str, *, required: Literal[True], negative: bool = False
+) -> int: ...
@overload
def parse_integer(
- request: Request, name: str, default: Optional[int] = None, required: bool = False
+ request: Request, name: str, *, default: Literal[None], negative: bool = False
+) -> None: ...
+
+
+@overload
+def parse_integer(request: Request, name: str, *, negative: bool) -> Optional[int]: ...
+
+
+@overload
+def parse_integer(
+ request: Request,
+ name: str,
+ default: Optional[int] = None,
+ required: bool = False,
+ negative: bool = False,
) -> Optional[int]: ...
def parse_integer(
- request: Request, name: str, default: Optional[int] = None, required: bool = False
+ request: Request,
+ name: str,
+ default: Optional[int] = None,
+ required: bool = False,
+ negative: bool = False,
) -> Optional[int]:
"""Parse an integer parameter from the request string
@@ -85,16 +119,17 @@ def parse_integer(
default: value to use if the parameter is absent, defaults to None.
required: whether to raise a 400 SynapseError if the parameter is absent,
defaults to False.
-
+ negative: whether to allow negative integers, defaults to True.
Returns:
An int value or the default.
Raises:
- SynapseError: if the parameter is absent and required, or if the
- parameter is present and not an integer.
+ SynapseError: if the parameter is absent and required, if the
+ parameter is present and not an integer, or if the
+ parameter is illegitimate negative.
"""
args: Mapping[bytes, Sequence[bytes]] = request.args # type: ignore
- return parse_integer_from_args(args, name, default, required)
+ return parse_integer_from_args(args, name, default, required, negative)
@overload
@@ -120,6 +155,7 @@ def parse_integer_from_args(
name: str,
default: Optional[int] = None,
required: bool = False,
+ negative: bool = False,
) -> Optional[int]: ...
@@ -128,6 +164,7 @@ def parse_integer_from_args(
name: str,
default: Optional[int] = None,
required: bool = False,
+ negative: bool = True,
) -> Optional[int]:
"""Parse an integer parameter from the request string
@@ -137,33 +174,37 @@ def parse_integer_from_args(
default: value to use if the parameter is absent, defaults to None.
required: whether to raise a 400 SynapseError if the parameter is absent,
defaults to False.
+ negative: whether to allow negative integers, defaults to True.
Returns:
An int value or the default.
Raises:
- SynapseError: if the parameter is absent and required, or if the
- parameter is present and not an integer.
+ SynapseError: if the parameter is absent and required, if the
+ parameter is present and not an integer, or if the
+ parameter is illegitimate negative.
"""
name_bytes = name.encode("ascii")
- if name_bytes in args:
- try:
- return int(args[name_bytes][0])
- except Exception:
- message = "Query parameter %r must be an integer" % (name,)
- raise SynapseError(
- HTTPStatus.BAD_REQUEST, message, errcode=Codes.INVALID_PARAM
- )
- else:
- if required:
- message = "Missing integer query parameter %r" % (name,)
- raise SynapseError(
- HTTPStatus.BAD_REQUEST, message, errcode=Codes.MISSING_PARAM
- )
- else:
+ if name_bytes not in args:
+ if not required:
return default
+ message = f"Missing required integer query parameter {name}"
+ raise SynapseError(HTTPStatus.BAD_REQUEST, message, errcode=Codes.MISSING_PARAM)
+
+ try:
+ integer = int(args[name_bytes][0])
+ except Exception:
+ message = f"Query parameter {name} must be an integer"
+ raise SynapseError(HTTPStatus.BAD_REQUEST, message, errcode=Codes.INVALID_PARAM)
+
+ if not negative and integer < 0:
+ message = f"Query parameter {name} must be a positive integer."
+ raise SynapseError(HTTPStatus.BAD_REQUEST, message, errcode=Codes.INVALID_PARAM)
+
+ return integer
+
@overload
def parse_boolean(request: Request, name: str, default: bool) -> bool: ...
@@ -410,6 +451,87 @@ def parse_string(
)
+def parse_json(
+ request: Request,
+ name: str,
+ default: Optional[dict] = None,
+ required: bool = False,
+ encoding: str = "ascii",
+) -> Optional[JsonDict]:
+ """
+ Parse a JSON parameter from the request query string.
+
+ Args:
+ request: the twisted HTTP request.
+ name: the name of the query parameter.
+ default: value to use if the parameter is absent,
+ defaults to None.
+ required: whether to raise a 400 SynapseError if the
+ parameter is absent, defaults to False.
+ encoding: The encoding to decode the string content with.
+
+ Returns:
+ A JSON value, or `default` if the named query parameter was not found
+ and `required` was False.
+
+ Raises:
+ SynapseError if the parameter is absent and required, or if the
+ parameter is present and not a JSON object.
+ """
+ args: Mapping[bytes, Sequence[bytes]] = request.args # type: ignore
+ return parse_json_from_args(
+ args,
+ name,
+ default,
+ required=required,
+ encoding=encoding,
+ )
+
+
+def parse_json_from_args(
+ args: Mapping[bytes, Sequence[bytes]],
+ name: str,
+ default: Optional[dict] = None,
+ required: bool = False,
+ encoding: str = "ascii",
+) -> Optional[JsonDict]:
+ """
+ Parse a JSON parameter from the request query string.
+
+ Args:
+ args: a mapping of request args as bytes to a list of bytes (e.g. request.args).
+ name: the name of the query parameter.
+ default: value to use if the parameter is absent,
+ defaults to None.
+ required: whether to raise a 400 SynapseError if the
+ parameter is absent, defaults to False.
+ encoding: the encoding to decode the string content with.
+
+ A JSON value, or `default` if the named query parameter was not found
+ and `required` was False.
+
+ Raises:
+ SynapseError if the parameter is absent and required, or if the
+ parameter is present and not a JSON object.
+ """
+ name_bytes = name.encode("ascii")
+
+ if name_bytes not in args:
+ if not required:
+ return default
+
+ message = f"Missing required integer query parameter {name}"
+ raise SynapseError(HTTPStatus.BAD_REQUEST, message, errcode=Codes.MISSING_PARAM)
+
+ json_str = parse_string_from_args(args, name, required=True, encoding=encoding)
+
+ try:
+ return json_decoder.decode(urlparse.unquote(json_str))
+ except Exception:
+ message = f"Query parameter {name} must be a valid JSON object"
+ raise SynapseError(HTTPStatus.BAD_REQUEST, message, errcode=Codes.NOT_JSON)
+
+
EnumT = TypeVar("EnumT", bound=enum.Enum)
diff --git a/synapse/http/site.py b/synapse/http/site.py
index 682b28e4c6..a5b5780679 100644
--- a/synapse/http/site.py
+++ b/synapse/http/site.py
@@ -150,7 +150,8 @@ class SynapseRequest(Request):
self.get_method(),
self.get_redacted_uri(),
)
- self.transport.abortConnection()
+ if self.channel:
+ self.channel.forceAbortClient()
return
super().handleContentChunk(data)
diff --git a/synapse/push/mailer.py b/synapse/push/mailer.py
index f1ffc8115f..7c15eb7440 100644
--- a/synapse/push/mailer.py
+++ b/synapse/push/mailer.py
@@ -205,6 +205,22 @@ class Mailer:
template_vars,
)
+ emails_sent_counter.labels("already_in_use")
+
+ async def send_already_in_use_mail(self, email_address: str) -> None:
+ """Send an email if the address is already bound to an user account
+
+ Args:
+ email_address: Email address we're sending to the "already in use" mail
+ """
+
+ await self.send_email(
+ email_address,
+ self.email_subjects.email_already_in_use
+ % {"server_name": self.hs.config.server.server_name, "app": self.app_name},
+ {},
+ )
+
emails_sent_counter.labels("add_threepid")
async def send_add_threepid_mail(
diff --git a/synapse/res/templates/already_in_use.html b/synapse/res/templates/already_in_use.html
new file mode 100644
index 0000000000..4c4c3c36a7
--- /dev/null
+++ b/synapse/res/templates/already_in_use.html
@@ -0,0 +1,12 @@
+{% extends "_base.html" %}
+{% block title %}Email already in use{% endblock %}
+
+{% block body %}
+<p>You have asked us to register this email with a new Matrix account, but this email is already registered with an existing account.</p>
+
+<p>Please reset your password if needed.</p>
+
+<p>If this was not you, you can safely disregard this email.</p>
+
+<p>Thank you.</p>
+{% endblock %}
diff --git a/synapse/res/templates/already_in_use.txt b/synapse/res/templates/already_in_use.txt
new file mode 100644
index 0000000000..c60401a940
--- /dev/null
+++ b/synapse/res/templates/already_in_use.txt
@@ -0,0 +1,10 @@
+Hello there,
+
+You have asked us to register this email with a new Matrix account,
+but this email is already registered with an existing account.
+
+Please reset your password if needed.
+
+If this was not you, you can safely disregard this email.
+
+Thank you.
diff --git a/synapse/rest/admin/federation.py b/synapse/rest/admin/federation.py
index 045153e0cb..14ab4644cb 100644
--- a/synapse/rest/admin/federation.py
+++ b/synapse/rest/admin/federation.py
@@ -23,7 +23,7 @@ from http import HTTPStatus
from typing import TYPE_CHECKING, Tuple
from synapse.api.constants import Direction
-from synapse.api.errors import Codes, NotFoundError, SynapseError
+from synapse.api.errors import NotFoundError, SynapseError
from synapse.federation.transport.server import Authenticator
from synapse.http.servlet import RestServlet, parse_enum, parse_integer, parse_string
from synapse.http.site import SynapseRequest
@@ -61,22 +61,8 @@ class ListDestinationsRestServlet(RestServlet):
async def on_GET(self, request: SynapseRequest) -> Tuple[int, JsonDict]:
await assert_requester_is_admin(self._auth, request)
- start = parse_integer(request, "from", default=0)
- limit = parse_integer(request, "limit", default=100)
-
- if start < 0:
- raise SynapseError(
- HTTPStatus.BAD_REQUEST,
- "Query parameter from must be a string representing a positive integer.",
- errcode=Codes.INVALID_PARAM,
- )
-
- if limit < 0:
- raise SynapseError(
- HTTPStatus.BAD_REQUEST,
- "Query parameter limit must be a string representing a positive integer.",
- errcode=Codes.INVALID_PARAM,
- )
+ start = parse_integer(request, "from", default=0, negative=False)
+ limit = parse_integer(request, "limit", default=100, negative=False)
destination = parse_string(request, "destination")
@@ -195,22 +181,8 @@ class DestinationMembershipRestServlet(RestServlet):
if not await self._store.is_destination_known(destination):
raise NotFoundError("Unknown destination")
- start = parse_integer(request, "from", default=0)
- limit = parse_integer(request, "limit", default=100)
-
- if start < 0:
- raise SynapseError(
- HTTPStatus.BAD_REQUEST,
- "Query parameter from must be a string representing a positive integer.",
- errcode=Codes.INVALID_PARAM,
- )
-
- if limit < 0:
- raise SynapseError(
- HTTPStatus.BAD_REQUEST,
- "Query parameter limit must be a string representing a positive integer.",
- errcode=Codes.INVALID_PARAM,
- )
+ start = parse_integer(request, "from", default=0, negative=False)
+ limit = parse_integer(request, "limit", default=100, negative=False)
direction = parse_enum(request, "dir", Direction, default=Direction.FORWARDS)
diff --git a/synapse/rest/admin/media.py b/synapse/rest/admin/media.py
index 27f0808658..a05b7252ec 100644
--- a/synapse/rest/admin/media.py
+++ b/synapse/rest/admin/media.py
@@ -311,29 +311,17 @@ class DeleteMediaByDateSize(RestServlet):
) -> Tuple[int, JsonDict]:
await assert_requester_is_admin(self.auth, request)
- before_ts = parse_integer(request, "before_ts", required=True)
- size_gt = parse_integer(request, "size_gt", default=0)
+ before_ts = parse_integer(request, "before_ts", required=True, negative=False)
+ size_gt = parse_integer(request, "size_gt", default=0, negative=False)
keep_profiles = parse_boolean(request, "keep_profiles", default=True)
- if before_ts < 0:
- raise SynapseError(
- HTTPStatus.BAD_REQUEST,
- "Query parameter before_ts must be a positive integer.",
- errcode=Codes.INVALID_PARAM,
- )
- elif before_ts < 30000000000: # Dec 1970 in milliseconds, Aug 2920 in seconds
+ if before_ts < 30000000000: # Dec 1970 in milliseconds, Aug 2920 in seconds
raise SynapseError(
HTTPStatus.BAD_REQUEST,
"Query parameter before_ts you provided is from the year 1970. "
+ "Double check that you are providing a timestamp in milliseconds.",
errcode=Codes.INVALID_PARAM,
)
- if size_gt < 0:
- raise SynapseError(
- HTTPStatus.BAD_REQUEST,
- "Query parameter size_gt must be a string representing a positive integer.",
- errcode=Codes.INVALID_PARAM,
- )
# This check is useless, we keep it for the legacy endpoint only.
if server_name is not None and self.server_name != server_name:
@@ -389,22 +377,8 @@ class UserMediaRestServlet(RestServlet):
if user is None:
raise NotFoundError("Unknown user")
- start = parse_integer(request, "from", default=0)
- limit = parse_integer(request, "limit", default=100)
-
- if start < 0:
- raise SynapseError(
- HTTPStatus.BAD_REQUEST,
- "Query parameter from must be a string representing a positive integer.",
- errcode=Codes.INVALID_PARAM,
- )
-
- if limit < 0:
- raise SynapseError(
- HTTPStatus.BAD_REQUEST,
- "Query parameter limit must be a string representing a positive integer.",
- errcode=Codes.INVALID_PARAM,
- )
+ start = parse_integer(request, "from", default=0, negative=False)
+ limit = parse_integer(request, "limit", default=100, negative=False)
# If neither `order_by` nor `dir` is set, set the default order
# to newest media is on top for backward compatibility.
@@ -447,22 +421,8 @@ class UserMediaRestServlet(RestServlet):
if user is None:
raise NotFoundError("Unknown user")
- start = parse_integer(request, "from", default=0)
- limit = parse_integer(request, "limit", default=100)
-
- if start < 0:
- raise SynapseError(
- HTTPStatus.BAD_REQUEST,
- "Query parameter from must be a string representing a positive integer.",
- errcode=Codes.INVALID_PARAM,
- )
-
- if limit < 0:
- raise SynapseError(
- HTTPStatus.BAD_REQUEST,
- "Query parameter limit must be a string representing a positive integer.",
- errcode=Codes.INVALID_PARAM,
- )
+ start = parse_integer(request, "from", default=0, negative=False)
+ limit = parse_integer(request, "limit", default=100, negative=False)
# If neither `order_by` nor `dir` is set, set the default order
# to newest media is on top for backward compatibility.
diff --git a/synapse/rest/admin/rooms.py b/synapse/rest/admin/rooms.py
index 4252f98a6c..0d86a4e15f 100644
--- a/synapse/rest/admin/rooms.py
+++ b/synapse/rest/admin/rooms.py
@@ -21,7 +21,6 @@
import logging
from http import HTTPStatus
from typing import TYPE_CHECKING, List, Optional, Tuple, cast
-from urllib import parse as urlparse
import attr
@@ -38,6 +37,7 @@ from synapse.http.servlet import (
assert_params_in_dict,
parse_enum,
parse_integer,
+ parse_json,
parse_json_object_from_request,
parse_string,
)
@@ -51,7 +51,6 @@ from synapse.storage.databases.main.room import RoomSortOrder
from synapse.streams.config import PaginationConfig
from synapse.types import JsonDict, RoomID, ScheduledTask, UserID, create_requester
from synapse.types.state import StateFilter
-from synapse.util import json_decoder
if TYPE_CHECKING:
from synapse.api.auth import Auth
@@ -776,14 +775,8 @@ class RoomEventContextServlet(RestServlet):
limit = parse_integer(request, "limit", default=10)
# picking the API shape for symmetry with /messages
- filter_str = parse_string(request, "filter", encoding="utf-8")
- if filter_str:
- filter_json = urlparse.unquote(filter_str)
- event_filter: Optional[Filter] = Filter(
- self._hs, json_decoder.decode(filter_json)
- )
- else:
- event_filter = None
+ filter_json = parse_json(request, "filter", encoding="utf-8")
+ event_filter = Filter(self._hs, filter_json) if filter_json else None
event_context = await self.room_context_handler.get_event_context(
requester,
@@ -914,21 +907,16 @@ class RoomMessagesRestServlet(RestServlet):
)
# Twisted will have processed the args by now.
assert request.args is not None
+
+ filter_json = parse_json(request, "filter", encoding="utf-8")
+ event_filter = Filter(self._hs, filter_json) if filter_json else None
+
as_client_event = b"raw" not in request.args
- filter_str = parse_string(request, "filter", encoding="utf-8")
- if filter_str:
- filter_json = urlparse.unquote(filter_str)
- event_filter: Optional[Filter] = Filter(
- self._hs, json_decoder.decode(filter_json)
- )
- if (
- event_filter
- and event_filter.filter_json.get("event_format", "client")
- == "federation"
- ):
- as_client_event = False
- else:
- event_filter = None
+ if (
+ event_filter
+ and event_filter.filter_json.get("event_format", "client") == "federation"
+ ):
+ as_client_event = False
msgs = await self._pagination_handler.get_messages(
room_id=room_id,
diff --git a/synapse/rest/admin/statistics.py b/synapse/rest/admin/statistics.py
index 832f20402e..dc27a41dd9 100644
--- a/synapse/rest/admin/statistics.py
+++ b/synapse/rest/admin/statistics.py
@@ -63,38 +63,12 @@ class UserMediaStatisticsRestServlet(RestServlet):
),
)
- start = parse_integer(request, "from", default=0)
- if start < 0:
- raise SynapseError(
- HTTPStatus.BAD_REQUEST,
- "Query parameter from must be a string representing a positive integer.",
- errcode=Codes.INVALID_PARAM,
- )
-
- limit = parse_integer(request, "limit", default=100)
- if limit < 0:
- raise SynapseError(
- HTTPStatus.BAD_REQUEST,
- "Query parameter limit must be a string representing a positive integer.",
- errcode=Codes.INVALID_PARAM,
- )
+ start = parse_integer(request, "from", default=0, negative=False)
+ limit = parse_integer(request, "limit", default=100, negative=False)
+ from_ts = parse_integer(request, "from_ts", default=0, negative=False)
+ until_ts = parse_integer(request, "until_ts", negative=False)
- from_ts = parse_integer(request, "from_ts", default=0)
- if from_ts < 0:
- raise SynapseError(
- HTTPStatus.BAD_REQUEST,
- "Query parameter from_ts must be a string representing a positive integer.",
- errcode=Codes.INVALID_PARAM,
- )
-
- until_ts = parse_integer(request, "until_ts")
if until_ts is not None:
- if until_ts < 0:
- raise SynapseError(
- HTTPStatus.BAD_REQUEST,
- "Query parameter until_ts must be a string representing a positive integer.",
- errcode=Codes.INVALID_PARAM,
- )
if until_ts <= from_ts:
raise SynapseError(
HTTPStatus.BAD_REQUEST,
diff --git a/synapse/rest/admin/users.py b/synapse/rest/admin/users.py
index 4e34e46512..5bf12c4979 100644
--- a/synapse/rest/admin/users.py
+++ b/synapse/rest/admin/users.py
@@ -90,22 +90,8 @@ class UsersRestServletV2(RestServlet):
async def on_GET(self, request: SynapseRequest) -> Tuple[int, JsonDict]:
await assert_requester_is_admin(self.auth, request)
- start = parse_integer(request, "from", default=0)
- limit = parse_integer(request, "limit", default=100)
-
- if start < 0:
- raise SynapseError(
- HTTPStatus.BAD_REQUEST,
- "Query parameter from must be a string representing a positive integer.",
- errcode=Codes.INVALID_PARAM,
- )
-
- if limit < 0:
- raise SynapseError(
- HTTPStatus.BAD_REQUEST,
- "Query parameter limit must be a string representing a positive integer.",
- errcode=Codes.INVALID_PARAM,
- )
+ start = parse_integer(request, "from", default=0, negative=False)
+ limit = parse_integer(request, "limit", default=100, negative=False)
user_id = parse_string(request, "user_id")
name = parse_string(request, "name", encoding="utf-8")
diff --git a/synapse/rest/client/keys.py b/synapse/rest/client/keys.py
index b6d9ee074a..86c9515854 100644
--- a/synapse/rest/client/keys.py
+++ b/synapse/rest/client/keys.py
@@ -409,7 +409,18 @@ class SigningKeyUploadServlet(RestServlet):
# But first-time setup is fine
elif self.hs.config.experimental.msc3967_enabled:
- # If we already have a master key then cross signing is set up and we require UIA to reset
+ # MSC3967 allows this endpoint to 200 OK for idempotency. Resending exactly the same
+ # keys should just 200 OK without doing a UIA prompt.
+ keys_are_different = await self.e2e_keys_handler.has_different_keys(
+ user_id, body
+ )
+ if not keys_are_different:
+ # FIXME: we do not fallthrough to upload_signing_keys_for_user because confusingly
+ # if we do, we 500 as it looks like it tries to INSERT the same key twice, causing a
+ # unique key constraint violation. This sounds like a bug?
+ return 200, {}
+ # the keys are different, is x-signing set up? If no, then the keys don't exist which is
+ # why they are different. If yes, then we need to UIA to change them.
if is_cross_signing_setup:
await self.auth_handler.validate_user_via_ui_auth(
requester,
@@ -420,7 +431,6 @@ class SigningKeyUploadServlet(RestServlet):
can_skip_ui_auth=False,
)
# Otherwise we don't require UIA since we are setting up cross signing for first time
-
else:
# Previous behaviour is to always require UIA but allow it to be skipped
await self.auth_handler.validate_user_via_ui_auth(
diff --git a/synapse/rest/client/register.py b/synapse/rest/client/register.py
index 634ebed2be..5dddbc69be 100644
--- a/synapse/rest/client/register.py
+++ b/synapse/rest/client/register.py
@@ -86,12 +86,18 @@ class EmailRegisterRequestTokenRestServlet(RestServlet):
self.config = hs.config
if self.hs.config.email.can_verify_email:
- self.mailer = Mailer(
+ self.registration_mailer = Mailer(
hs=self.hs,
app_name=self.config.email.email_app_name,
template_html=self.config.email.email_registration_template_html,
template_text=self.config.email.email_registration_template_text,
)
+ self.already_in_use_mailer = Mailer(
+ hs=self.hs,
+ app_name=self.config.email.email_app_name,
+ template_html=self.config.email.email_already_in_use_template_html,
+ template_text=self.config.email.email_already_in_use_template_text,
+ )
async def on_POST(self, request: SynapseRequest) -> Tuple[int, JsonDict]:
if not self.hs.config.email.can_verify_email:
@@ -139,8 +145,10 @@ class EmailRegisterRequestTokenRestServlet(RestServlet):
if self.hs.config.server.request_token_inhibit_3pid_errors:
# Make the client think the operation succeeded. See the rationale in the
# comments for request_token_inhibit_3pid_errors.
+ # Still send an email to warn the user that an account already exists.
# Also wait for some random amount of time between 100ms and 1s to make it
# look like we did something.
+ await self.already_in_use_mailer.send_already_in_use_mail(email)
await self.hs.get_clock().sleep(random.randint(1, 10) / 10)
return 200, {"sid": random_string(16)}
@@ -151,7 +159,7 @@ class EmailRegisterRequestTokenRestServlet(RestServlet):
email,
client_secret,
send_attempt,
- self.mailer.send_registration_mail,
+ self.registration_mailer.send_registration_mail,
next_link,
)
diff --git a/synapse/rest/client/rendezvous.py b/synapse/rest/client/rendezvous.py
index dee7c37ec5..ed06a29987 100644
--- a/synapse/rest/client/rendezvous.py
+++ b/synapse/rest/client/rendezvous.py
@@ -2,7 +2,7 @@
# This file is licensed under the Affero General Public License (AGPL) version 3.
#
# Copyright 2022 The Matrix.org Foundation C.I.C.
-# Copyright (C) 2023 New Vector, Ltd
+# Copyright (C) 2023-2024 New Vector, Ltd
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
@@ -34,7 +34,7 @@ if TYPE_CHECKING:
logger = logging.getLogger(__name__)
-class RendezvousServlet(RestServlet):
+class MSC3886RendezvousServlet(RestServlet):
"""
This is a placeholder implementation of [MSC3886](https://github.com/matrix-org/matrix-spec-proposals/pull/3886)
simple client rendezvous capability that is used by the "Sign in with QR" functionality.
@@ -76,6 +76,30 @@ class RendezvousServlet(RestServlet):
# PUT, GET and DELETE are not implemented as they should be fulfilled by the redirect target.
+class MSC4108DelegationRendezvousServlet(RestServlet):
+ PATTERNS = client_patterns(
+ "/org.matrix.msc4108/rendezvous$", releases=[], v1=False, unstable=True
+ )
+
+ def __init__(self, hs: "HomeServer"):
+ super().__init__()
+ redirection_target: Optional[str] = (
+ hs.config.experimental.msc4108_delegation_endpoint
+ )
+ assert (
+ redirection_target is not None
+ ), "Servlet is only registered if there is a delegation target"
+ self.endpoint = redirection_target.encode("utf-8")
+
+ async def on_POST(self, request: SynapseRequest) -> None:
+ respond_with_redirect(
+ request, self.endpoint, statusCode=TEMPORARY_REDIRECT, cors=True
+ )
+
+
def register_servlets(hs: "HomeServer", http_server: HttpServer) -> None:
if hs.config.experimental.msc3886_endpoint is not None:
- RendezvousServlet(hs).register(http_server)
+ MSC3886RendezvousServlet(hs).register(http_server)
+
+ if hs.config.experimental.msc4108_delegation_endpoint is not None:
+ MSC4108DelegationRendezvousServlet(hs).register(http_server)
diff --git a/synapse/rest/client/room.py b/synapse/rest/client/room.py
index 65dedb8b92..e4c7dd1a58 100644
--- a/synapse/rest/client/room.py
+++ b/synapse/rest/client/room.py
@@ -52,6 +52,7 @@ from synapse.http.servlet import (
parse_boolean,
parse_enum,
parse_integer,
+ parse_json,
parse_json_object_from_request,
parse_string,
parse_strings_from_args,
@@ -65,7 +66,6 @@ from synapse.rest.client.transactions import HttpTransactionCache
from synapse.streams.config import PaginationConfig
from synapse.types import JsonDict, Requester, StreamToken, ThirdPartyInstanceID, UserID
from synapse.types.state import StateFilter
-from synapse.util import json_decoder
from synapse.util.cancellation import cancellable
from synapse.util.stringutils import parse_and_validate_server_name, random_string
@@ -499,7 +499,7 @@ class PublicRoomListRestServlet(RestServlet):
if server:
raise e
- limit: Optional[int] = parse_integer(request, "limit", 0)
+ limit: Optional[int] = parse_integer(request, "limit", 0, negative=False)
since_token = parse_string(request, "since")
if limit == 0:
@@ -703,21 +703,16 @@ class RoomMessageListRestServlet(RestServlet):
)
# Twisted will have processed the args by now.
assert request.args is not None
+
+ filter_json = parse_json(request, "filter", encoding="utf-8")
+ event_filter = Filter(self._hs, filter_json) if filter_json else None
+
as_client_event = b"raw" not in request.args
- filter_str = parse_string(request, "filter", encoding="utf-8")
- if filter_str:
- filter_json = urlparse.unquote(filter_str)
- event_filter: Optional[Filter] = Filter(
- self._hs, json_decoder.decode(filter_json)
- )
- if (
- event_filter
- and event_filter.filter_json.get("event_format", "client")
- == "federation"
- ):
- as_client_event = False
- else:
- event_filter = None
+ if (
+ event_filter
+ and event_filter.filter_json.get("event_format", "client") == "federation"
+ ):
+ as_client_event = False
msgs = await self.pagination_handler.get_messages(
room_id=room_id,
@@ -898,14 +893,8 @@ class RoomEventContextServlet(RestServlet):
limit = parse_integer(request, "limit", default=10)
# picking the API shape for symmetry with /messages
- filter_str = parse_string(request, "filter", encoding="utf-8")
- if filter_str:
- filter_json = urlparse.unquote(filter_str)
- event_filter: Optional[Filter] = Filter(
- self._hs, json_decoder.decode(filter_json)
- )
- else:
- event_filter = None
+ filter_json = parse_json(request, "filter", encoding="utf-8")
+ event_filter = Filter(self._hs, filter_json) if filter_json else None
event_context = await self.room_context_handler.get_event_context(
requester, room_id, event_id, limit, event_filter
diff --git a/synapse/rest/client/versions.py b/synapse/rest/client/versions.py
index c46d4fe8cf..638d4c45ae 100644
--- a/synapse/rest/client/versions.py
+++ b/synapse/rest/client/versions.py
@@ -140,6 +140,9 @@ class VersionsRestServlet(RestServlet):
"org.matrix.msc4069": self.config.experimental.msc4069_profile_inhibit_propagation,
# Allows clients to handle push for encrypted events.
"org.matrix.msc4028": self.config.experimental.msc4028_push_encrypted_events,
+ # MSC4108: Mechanism to allow OIDC sign in and E2EE set up via QR code
+ "org.matrix.msc4108": self.config.experimental.msc4108_delegation_endpoint
+ is not None,
},
},
)
diff --git a/synapse/rest/media/preview_url_resource.py b/synapse/rest/media/preview_url_resource.py
index 6724986fcc..bfeff2179b 100644
--- a/synapse/rest/media/preview_url_resource.py
+++ b/synapse/rest/media/preview_url_resource.py
@@ -72,9 +72,6 @@ class PreviewUrlResource(RestServlet):
# XXX: if get_user_by_req fails, what should we do in an async render?
requester = await self.auth.get_user_by_req(request)
url = parse_string(request, "url", required=True)
- ts = parse_integer(request, "ts")
- if ts is None:
- ts = self.clock.time_msec()
-
+ ts = parse_integer(request, "ts", default=self.clock.time_msec())
og = await self.url_previewer.preview(url, requester.user, ts)
respond_with_json_bytes(request, 200, og, send_cors=True)
diff --git a/synapse/storage/databases/main/event_push_actions.py b/synapse/storage/databases/main/event_push_actions.py
index 40bf000e9c..bdd0781c48 100644
--- a/synapse/storage/databases/main/event_push_actions.py
+++ b/synapse/storage/databases/main/event_push_actions.py
@@ -385,7 +385,6 @@ class EventPushActionsWorkerStore(ReceiptsWorkerStore, StreamWorkerStore, SQLBas
WITH all_receipts AS (
SELECT room_id, thread_id, MAX(event_stream_ordering) AS max_receipt_stream_ordering
FROM receipts_linearized
- LEFT JOIN events USING (room_id, event_id)
WHERE
{receipt_types_clause}
AND user_id = ?
@@ -621,13 +620,12 @@ class EventPushActionsWorkerStore(ReceiptsWorkerStore, StreamWorkerStore, SQLBas
SELECT notif_count, COALESCE(unread_count, 0), thread_id
FROM event_push_summary
LEFT JOIN (
- SELECT thread_id, MAX(stream_ordering) AS threaded_receipt_stream_ordering
+ SELECT thread_id, MAX(event_stream_ordering) AS threaded_receipt_stream_ordering
FROM receipts_linearized
- LEFT JOIN events USING (room_id, event_id)
WHERE
user_id = ?
AND room_id = ?
- AND stream_ordering > ?
+ AND event_stream_ordering > ?
AND {receipt_types_clause}
GROUP BY thread_id
) AS receipts USING (thread_id)
@@ -659,13 +657,12 @@ class EventPushActionsWorkerStore(ReceiptsWorkerStore, StreamWorkerStore, SQLBas
sql = f"""
SELECT COUNT(*), thread_id FROM event_push_actions
LEFT JOIN (
- SELECT thread_id, MAX(stream_ordering) AS threaded_receipt_stream_ordering
+ SELECT thread_id, MAX(event_stream_ordering) AS threaded_receipt_stream_ordering
FROM receipts_linearized
- LEFT JOIN events USING (room_id, event_id)
WHERE
user_id = ?
AND room_id = ?
- AND stream_ordering > ?
+ AND event_stream_ordering > ?
AND {receipt_types_clause}
GROUP BY thread_id
) AS receipts USING (thread_id)
@@ -738,13 +735,12 @@ class EventPushActionsWorkerStore(ReceiptsWorkerStore, StreamWorkerStore, SQLBas
thread_id
FROM event_push_actions
LEFT JOIN (
- SELECT thread_id, MAX(stream_ordering) AS threaded_receipt_stream_ordering
+ SELECT thread_id, MAX(event_stream_ordering) AS threaded_receipt_stream_ordering
FROM receipts_linearized
- LEFT JOIN events USING (room_id, event_id)
WHERE
user_id = ?
AND room_id = ?
- AND stream_ordering > ?
+ AND event_stream_ordering > ?
AND {receipt_types_clause}
GROUP BY thread_id
) AS receipts USING (thread_id)
@@ -910,9 +906,8 @@ class EventPushActionsWorkerStore(ReceiptsWorkerStore, StreamWorkerStore, SQLBas
# given this function generally gets called with only one room and
# thread ID.
sql = f"""
- SELECT room_id, thread_id, MAX(stream_ordering)
+ SELECT room_id, thread_id, MAX(event_stream_ordering)
FROM receipts_linearized
- INNER JOIN events USING (room_id, event_id)
WHERE {receipt_types_clause}
AND {thread_ids_clause}
AND {room_ids_clause}
@@ -1442,9 +1437,8 @@ class EventPushActionsWorkerStore(ReceiptsWorkerStore, StreamWorkerStore, SQLBas
)
sql = """
- SELECT r.stream_id, r.room_id, r.user_id, r.thread_id, e.stream_ordering
+ SELECT r.stream_id, r.room_id, r.user_id, r.thread_id, r.event_stream_ordering
FROM receipts_linearized AS r
- INNER JOIN events AS e USING (event_id)
WHERE ? < r.stream_id AND r.stream_id <= ? AND user_id LIKE ?
ORDER BY r.stream_id ASC
LIMIT ?
diff --git a/synapse/storage/databases/main/receipts.py b/synapse/storage/databases/main/receipts.py
index d513c42530..13387a3839 100644
--- a/synapse/storage/databases/main/receipts.py
+++ b/synapse/storage/databases/main/receipts.py
@@ -178,14 +178,13 @@ class ReceiptsWorkerStore(SQLBaseStore):
)
sql = f"""
- SELECT event_id, stream_ordering
+ SELECT event_id, event_stream_ordering
FROM receipts_linearized
- INNER JOIN events USING (room_id, event_id)
WHERE {clause}
AND user_id = ?
AND room_id = ?
AND thread_id IS NULL
- ORDER BY stream_ordering DESC
+ ORDER BY event_stream_ordering DESC
LIMIT 1
"""
@@ -735,10 +734,13 @@ class ReceiptsWorkerStore(SQLBaseStore):
thread_clause = "r.thread_id = ?"
thread_args = (thread_id,)
+ # If the receipt doesn't have a stream ordering it is because we
+ # don't have the associated event, and so must be a remote receipt.
+ # Hence it's safe to just allow new receipts to clobber it.
sql = f"""
- SELECT stream_ordering, event_id FROM events
- INNER JOIN receipts_linearized AS r USING (event_id, room_id)
- WHERE r.room_id = ? AND r.receipt_type = ? AND r.user_id = ? AND {thread_clause}
+ SELECT r.event_stream_ordering, r.event_id FROM receipts_linearized AS r
+ WHERE r.room_id = ? AND r.receipt_type = ? AND r.user_id = ?
+ AND r.event_stream_ordering IS NOT NULL AND {thread_clause}
"""
txn.execute(
sql,
diff --git a/tests/handlers/test_e2e_keys.py b/tests/handlers/test_e2e_keys.py
index 3d931abb06..0e6352ff4b 100644
--- a/tests/handlers/test_e2e_keys.py
+++ b/tests/handlers/test_e2e_keys.py
@@ -1101,6 +1101,56 @@ class E2eKeysHandlerTestCase(unittest.HomeserverTestCase):
},
)
+ def test_has_different_keys(self) -> None:
+ """check that has_different_keys returns True when the keys provided are different to what
+ is in the database."""
+ local_user = "@boris:" + self.hs.hostname
+ keys1 = {
+ "master_key": {
+ # private key: 2lonYOM6xYKdEsO+6KrC766xBcHnYnim1x/4LFGF8B0
+ "user_id": local_user,
+ "usage": ["master"],
+ "keys": {
+ "ed25519:nqOvzeuGWT/sRx3h7+MHoInYj3Uk2LD/unI9kDYcHwk": "nqOvzeuGWT/sRx3h7+MHoInYj3Uk2LD/unI9kDYcHwk"
+ },
+ }
+ }
+ self.get_success(self.handler.upload_signing_keys_for_user(local_user, keys1))
+ is_different = self.get_success(
+ self.handler.has_different_keys(
+ local_user,
+ {
+ "master_key": keys1["master_key"],
+ },
+ )
+ )
+ self.assertEqual(is_different, False)
+ # change the usage => different keys
+ keys1["master_key"]["usage"] = ["develop"]
+ is_different = self.get_success(
+ self.handler.has_different_keys(
+ local_user,
+ {
+ "master_key": keys1["master_key"],
+ },
+ )
+ )
+ self.assertEqual(is_different, True)
+ keys1["master_key"]["usage"] = ["master"] # reset
+ # change the key => different keys
+ keys1["master_key"]["keys"] = {
+ "ed25519:nqOvzeuGWT/sRx3h7+MHoInYj3Uk2LD/unIc0rncs": "nqOvzeuGWT/sRx3h7+MHoInYj3Uk2LD/unIc0rncs"
+ }
+ is_different = self.get_success(
+ self.handler.has_different_keys(
+ local_user,
+ {
+ "master_key": keys1["master_key"],
+ },
+ )
+ )
+ self.assertEqual(is_different, True)
+
def test_query_devices_remote_sync(self) -> None:
"""Tests that querying keys for a remote user that we share a room with,
but haven't yet fetched the keys for, returns the cross signing keys
diff --git a/tests/rest/admin/test_media.py b/tests/rest/admin/test_media.py
index 493e1d1919..f378165513 100644
--- a/tests/rest/admin/test_media.py
+++ b/tests/rest/admin/test_media.py
@@ -277,7 +277,8 @@ class DeleteMediaByDateSizeTestCase(_AdminMediaTests):
self.assertEqual(400, channel.code, msg=channel.json_body)
self.assertEqual(Codes.MISSING_PARAM, channel.json_body["errcode"])
self.assertEqual(
- "Missing integer query parameter 'before_ts'", channel.json_body["error"]
+ "Missing required integer query parameter before_ts",
+ channel.json_body["error"],
)
def test_invalid_parameter(self) -> None:
@@ -320,7 +321,7 @@ class DeleteMediaByDateSizeTestCase(_AdminMediaTests):
self.assertEqual(400, channel.code, msg=channel.json_body)
self.assertEqual(Codes.INVALID_PARAM, channel.json_body["errcode"])
self.assertEqual(
- "Query parameter size_gt must be a string representing a positive integer.",
+ "Query parameter size_gt must be a positive integer.",
channel.json_body["error"],
)
diff --git a/tests/rest/admin/test_room.py b/tests/rest/admin/test_room.py
index 0b669b6ee7..7562747260 100644
--- a/tests/rest/admin/test_room.py
+++ b/tests/rest/admin/test_room.py
@@ -21,6 +21,7 @@
import json
import time
import urllib.parse
+from http import HTTPStatus
from typing import List, Optional
from unittest.mock import AsyncMock, Mock
@@ -2190,6 +2191,33 @@ class RoomMessagesTestCase(unittest.HomeserverTestCase):
chunk = channel.json_body["chunk"]
self.assertEqual(len(chunk), 0, [event["content"] for event in chunk])
+ def test_room_message_filter_query_validation(self) -> None:
+ # Test json validation in (filter) query parameter.
+ # Does not test the validity of the filter, only the json validation.
+
+ # Check Get with valid json filter parameter, expect 200.
+ valid_filter_str = '{"types": ["m.room.message"]}'
+ channel = self.make_request(
+ "GET",
+ f"/_synapse/admin/v1/rooms/{self.room_id}/messages?dir=b&filter={valid_filter_str}",
+ access_token=self.admin_user_tok,
+ )
+
+ self.assertEqual(channel.code, HTTPStatus.OK, channel.json_body)
+
+ # Check Get with invalid json filter parameter, expect 400 NOT_JSON.
+ invalid_filter_str = "}}}{}"
+ channel = self.make_request(
+ "GET",
+ f"/_synapse/admin/v1/rooms/{self.room_id}/messages?dir=b&filter={invalid_filter_str}",
+ access_token=self.admin_user_tok,
+ )
+
+ self.assertEqual(channel.code, HTTPStatus.BAD_REQUEST, channel.json_body)
+ self.assertEqual(
+ channel.json_body["errcode"], Codes.NOT_JSON, channel.json_body
+ )
+
class JoinAliasRoomTestCase(unittest.HomeserverTestCase):
servlets = [
@@ -2522,6 +2550,39 @@ class JoinAliasRoomTestCase(unittest.HomeserverTestCase):
else:
self.fail("Event %s from events_after not found" % j)
+ def test_room_event_context_filter_query_validation(self) -> None:
+ # Test json validation in (filter) query parameter.
+ # Does not test the validity of the filter, only the json validation.
+
+ # Create a user with room and event_id.
+ user_id = self.register_user("test", "test")
+ user_tok = self.login("test", "test")
+ room_id = self.helper.create_room_as(user_id, tok=user_tok)
+ event_id = self.helper.send(room_id, "message 1", tok=user_tok)["event_id"]
+
+ # Check Get with valid json filter parameter, expect 200.
+ valid_filter_str = '{"types": ["m.room.message"]}'
+ channel = self.make_request(
+ "GET",
+ f"/_synapse/admin/v1/rooms/{room_id}/context/{event_id}?filter={valid_filter_str}",
+ access_token=self.admin_user_tok,
+ )
+
+ self.assertEqual(channel.code, HTTPStatus.OK, channel.json_body)
+
+ # Check Get with invalid json filter parameter, expect 400 NOT_JSON.
+ invalid_filter_str = "}}}{}"
+ channel = self.make_request(
+ "GET",
+ f"/_synapse/admin/v1/rooms/{room_id}/context/{event_id}?filter={invalid_filter_str}",
+ access_token=self.admin_user_tok,
+ )
+
+ self.assertEqual(channel.code, HTTPStatus.BAD_REQUEST, channel.json_body)
+ self.assertEqual(
+ channel.json_body["errcode"], Codes.NOT_JSON, channel.json_body
+ )
+
class MakeRoomAdminTestCase(unittest.HomeserverTestCase):
servlets = [
diff --git a/tests/rest/client/test_register.py b/tests/rest/client/test_register.py
index 859051cdda..694f143eff 100644
--- a/tests/rest/client/test_register.py
+++ b/tests/rest/client/test_register.py
@@ -22,6 +22,7 @@
import datetime
import os
from typing import Any, Dict, List, Tuple
+from unittest.mock import AsyncMock
import pkg_resources
@@ -42,6 +43,7 @@ from synapse.types import JsonDict
from synapse.util import Clock
from tests import unittest
+from tests.server import ThreadedMemoryReactorClock
from tests.unittest import override_config
@@ -58,6 +60,13 @@ class RegisterRestServletTestCase(unittest.HomeserverTestCase):
config["allow_guest_access"] = True
return config
+ def make_homeserver(
+ self, reactor: ThreadedMemoryReactorClock, clock: Clock
+ ) -> HomeServer:
+ hs = super().make_homeserver(reactor, clock)
+ hs.get_send_email_handler()._sendmail = AsyncMock()
+ return hs
+
def test_POST_appservice_registration_valid(self) -> None:
user_id = "@as_user_kermit:test"
as_token = "i_am_an_app_service"
diff --git a/tests/rest/client/test_rendezvous.py b/tests/rest/client/test_rendezvous.py
index 294b39f179..c84704c090 100644
--- a/tests/rest/client/test_rendezvous.py
+++ b/tests/rest/client/test_rendezvous.py
@@ -27,8 +27,10 @@ from synapse.util import Clock
from tests import unittest
from tests.unittest import override_config
+from tests.utils import HAS_AUTHLIB
-endpoint = "/_matrix/client/unstable/org.matrix.msc3886/rendezvous"
+msc3886_endpoint = "/_matrix/client/unstable/org.matrix.msc3886/rendezvous"
+msc4108_endpoint = "/_matrix/client/unstable/org.matrix.msc4108/rendezvous"
class RendezvousServletTestCase(unittest.HomeserverTestCase):
@@ -41,11 +43,35 @@ class RendezvousServletTestCase(unittest.HomeserverTestCase):
return self.hs
def test_disabled(self) -> None:
- channel = self.make_request("POST", endpoint, {}, access_token=None)
+ channel = self.make_request("POST", msc3886_endpoint, {}, access_token=None)
+ self.assertEqual(channel.code, 404)
+ channel = self.make_request("POST", msc4108_endpoint, {}, access_token=None)
self.assertEqual(channel.code, 404)
@override_config({"experimental_features": {"msc3886_endpoint": "/asd"}})
- def test_redirect(self) -> None:
- channel = self.make_request("POST", endpoint, {}, access_token=None)
+ def test_msc3886_redirect(self) -> None:
+ channel = self.make_request("POST", msc3886_endpoint, {}, access_token=None)
self.assertEqual(channel.code, 307)
self.assertEqual(channel.headers.getRawHeaders("Location"), ["/asd"])
+
+ @unittest.skip_unless(HAS_AUTHLIB, "requires authlib")
+ @override_config(
+ {
+ "disable_registration": True,
+ "experimental_features": {
+ "msc4108_delegation_endpoint": "https://asd",
+ "msc3861": {
+ "enabled": True,
+ "issuer": "https://issuer",
+ "client_id": "client_id",
+ "client_auth_method": "client_secret_post",
+ "client_secret": "client_secret",
+ "admin_token": "admin_token_value",
+ },
+ },
+ }
+ )
+ def test_msc4108_delegation(self) -> None:
+ channel = self.make_request("POST", msc4108_endpoint, {}, access_token=None)
+ self.assertEqual(channel.code, 307)
+ self.assertEqual(channel.headers.getRawHeaders("Location"), ["https://asd"])
diff --git a/tests/rest/client/test_rooms.py b/tests/rest/client/test_rooms.py
index 1364615085..b796163dcb 100644
--- a/tests/rest/client/test_rooms.py
+++ b/tests/rest/client/test_rooms.py
@@ -2175,6 +2175,31 @@ class RoomMessageListTestCase(RoomBase):
chunk = channel.json_body["chunk"]
self.assertEqual(len(chunk), 0, [event["content"] for event in chunk])
+ def test_room_message_filter_query_validation(self) -> None:
+ # Test json validation in (filter) query parameter.
+ # Does not test the validity of the filter, only the json validation.
+
+ # Check Get with valid json filter parameter, expect 200.
+ valid_filter_str = '{"types": ["m.room.message"]}'
+ channel = self.make_request(
+ "GET",
+ f"/rooms/{self.room_id}/messages?access_token=x&dir=b&filter={valid_filter_str}",
+ )
+
+ self.assertEqual(channel.code, HTTPStatus.OK, channel.json_body)
+
+ # Check Get with invalid json filter parameter, expect 400 NOT_JSON.
+ invalid_filter_str = "}}}{}"
+ channel = self.make_request(
+ "GET",
+ f"/rooms/{self.room_id}/messages?access_token=x&dir=b&filter={invalid_filter_str}",
+ )
+
+ self.assertEqual(channel.code, HTTPStatus.BAD_REQUEST, channel.json_body)
+ self.assertEqual(
+ channel.json_body["errcode"], Codes.NOT_JSON, channel.json_body
+ )
+
class RoomMessageFilterTestCase(RoomBase):
"""Tests /rooms/$room_id/messages REST events."""
@@ -3213,6 +3238,33 @@ class ContextTestCase(unittest.HomeserverTestCase):
self.assertDictEqual(events_after[0].get("content"), {}, events_after[0])
self.assertEqual(events_after[1].get("content"), {}, events_after[1])
+ def test_room_event_context_filter_query_validation(self) -> None:
+ # Test json validation in (filter) query parameter.
+ # Does not test the validity of the filter, only the json validation.
+ event_id = self.helper.send(self.room_id, "message 7", tok=self.tok)["event_id"]
+
+ # Check Get with valid json filter parameter, expect 200.
+ valid_filter_str = '{"types": ["m.room.message"]}'
+ channel = self.make_request(
+ "GET",
+ f"/rooms/{self.room_id}/context/{event_id}?filter={valid_filter_str}",
+ access_token=self.tok,
+ )
+ self.assertEqual(channel.code, HTTPStatus.OK, channel.json_body)
+
+ # Check Get with invalid json filter parameter, expect 400 NOT_JSON.
+ invalid_filter_str = "}}}{}"
+ channel = self.make_request(
+ "GET",
+ f"/rooms/{self.room_id}/context/{event_id}?filter={invalid_filter_str}",
+ access_token=self.tok,
+ )
+
+ self.assertEqual(channel.code, HTTPStatus.BAD_REQUEST, channel.json_body)
+ self.assertEqual(
+ channel.json_body["errcode"], Codes.NOT_JSON, channel.json_body
+ )
+
class RoomAliasListTestCase(unittest.HomeserverTestCase):
servlets = [
|