diff --git a/packages/overlays/matrix-synapse/patches/0001-Bump-mypy-zope-from-1.0.9-to-1.0.11-18428.patch b/packages/overlays/matrix-synapse/patches/0001-Bump-mypy-zope-from-1.0.9-to-1.0.11-18428.patch
deleted file mode 100644
index 22757b6..0000000
--- a/packages/overlays/matrix-synapse/patches/0001-Bump-mypy-zope-from-1.0.9-to-1.0.11-18428.patch
+++ /dev/null
@@ -1,39 +0,0 @@
-From c626d54cea3a99200c162a2578550e56242e8213 Mon Sep 17 00:00:00 2001
-From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com>
-Date: Tue, 13 May 2025 15:12:22 +0100
-Subject: [PATCH 01/34] Bump mypy-zope from 1.0.9 to 1.0.11 (#18428)
-
----
- poetry.lock | 8 ++++----
- 1 file changed, 4 insertions(+), 4 deletions(-)
-
-diff --git a/poetry.lock b/poetry.lock
-index 1935df638a..b6e32f43b8 100644
---- a/poetry.lock
-+++ b/poetry.lock
-@@ -1511,18 +1511,18 @@ files = [
-
- [[package]]
- name = "mypy-zope"
--version = "1.0.9"
-+version = "1.0.11"
- description = "Plugin for mypy to support zope interfaces"
- optional = false
- python-versions = "*"
- groups = ["dev"]
- files = [
-- {file = "mypy_zope-1.0.9-py3-none-any.whl", hash = "sha256:6666c1556891a3cb186137519dbd7a58cb30fb72b2504798cad47b35391921ba"},
-- {file = "mypy_zope-1.0.9.tar.gz", hash = "sha256:37d6985dfb05a4c27b35cff47577fd5bad878db4893ddedf54d165f7389a1cdb"},
-+ {file = "mypy_zope-1.0.11-py3-none-any.whl", hash = "sha256:4395d716b43ab89916edf6d0b5761655b4d4a43b2692fce806bbd733829977ee"},
-+ {file = "mypy_zope-1.0.11.tar.gz", hash = "sha256:1c95e49e9dcdf070a0858f067dac55e8e4e47519fdc15dfdab9b7eee273a0e01"},
- ]
-
- [package.dependencies]
--mypy = ">=1.0.0,<1.14.0"
-+mypy = ">=1.0.0,<1.16.0"
- "zope.interface" = "*"
- "zope.schema" = "*"
-
---
-2.49.0
-
diff --git a/packages/overlays/matrix-synapse/patches/0029-Hotfix-ignore-rejected-events-in-delayed_events.patch b/packages/overlays/matrix-synapse/patches/0001-Hotfix-ignore-rejected-events-in-delayed_events.patch
index f31ce15..b4a7b1a 100644
--- a/packages/overlays/matrix-synapse/patches/0029-Hotfix-ignore-rejected-events-in-delayed_events.patch
+++ b/packages/overlays/matrix-synapse/patches/0001-Hotfix-ignore-rejected-events-in-delayed_events.patch
@@ -1,7 +1,7 @@
-From 0c61ad92eee86fb135de689db54860f6b346a9da Mon Sep 17 00:00:00 2001
+From bd7ef5a0cd885a56741a908842d21efd1a8be96e Mon Sep 17 00:00:00 2001
From: Rory& <root@rory.gay>
Date: Sun, 20 Apr 2025 00:30:29 +0200
-Subject: [PATCH 29/34] Hotfix: ignore rejected events in delayed_events
+Subject: [PATCH 01/10] Hotfix: ignore rejected events in delayed_events
---
synapse/handlers/delayed_events.py | 7 ++++++-
diff --git a/packages/overlays/matrix-synapse/patches/0030-Add-too-much-logging-to-room-summary-over-federation.patch b/packages/overlays/matrix-synapse/patches/0002-Add-too-much-logging-to-room-summary-over-federation.patch
index c6aa256..edb9c17 100644
--- a/packages/overlays/matrix-synapse/patches/0030-Add-too-much-logging-to-room-summary-over-federation.patch
+++ b/packages/overlays/matrix-synapse/patches/0002-Add-too-much-logging-to-room-summary-over-federation.patch
@@ -1,7 +1,7 @@
-From 70c7508b48f0550f59cb2d3a534da524557c166f Mon Sep 17 00:00:00 2001
+From 2ee6ab89ac2cffd375d5c6cf21eb397f27a28018 Mon Sep 17 00:00:00 2001
From: Rory& <root@rory.gay>
Date: Wed, 23 Apr 2025 17:53:52 +0200
-Subject: [PATCH 30/34] Add too much logging to room summary over federation
+Subject: [PATCH 02/10] Add too much logging to room summary over federation
Signed-off-by: Rory& <root@rory.gay>
---
diff --git a/packages/overlays/matrix-synapse/patches/0002-Bump-types-requests-from-2.32.0.20241016-to-2.32.0.2.patch b/packages/overlays/matrix-synapse/patches/0002-Bump-types-requests-from-2.32.0.20241016-to-2.32.0.2.patch
deleted file mode 100644
index 363c861..0000000
--- a/packages/overlays/matrix-synapse/patches/0002-Bump-types-requests-from-2.32.0.20241016-to-2.32.0.2.patch
+++ /dev/null
@@ -1,36 +0,0 @@
-From ba2f1be891a4dbc2fe55af968dd72a146a8c9068 Mon Sep 17 00:00:00 2001
-From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com>
-Date: Tue, 13 May 2025 15:12:34 +0100
-Subject: [PATCH 02/34] Bump types-requests from 2.32.0.20241016 to
- 2.32.0.20250328 (#18427)
-
----
- poetry.lock | 8 ++++----
- 1 file changed, 4 insertions(+), 4 deletions(-)
-
-diff --git a/poetry.lock b/poetry.lock
-index b6e32f43b8..7d7868d37f 100644
---- a/poetry.lock
-+++ b/poetry.lock
-@@ -3059,14 +3059,14 @@ files = [
-
- [[package]]
- name = "types-requests"
--version = "2.32.0.20241016"
-+version = "2.32.0.20250328"
- description = "Typing stubs for requests"
- optional = false
--python-versions = ">=3.8"
-+python-versions = ">=3.9"
- groups = ["dev"]
- files = [
-- {file = "types-requests-2.32.0.20241016.tar.gz", hash = "sha256:0d9cad2f27515d0e3e3da7134a1b6f28fb97129d86b867f24d9c726452634d95"},
-- {file = "types_requests-2.32.0.20241016-py3-none-any.whl", hash = "sha256:4195d62d6d3e043a4eaaf08ff8a62184584d2e8684e9d2aa178c7915a7da3747"},
-+ {file = "types_requests-2.32.0.20250328-py3-none-any.whl", hash = "sha256:72ff80f84b15eb3aa7a8e2625fffb6a93f2ad5a0c20215fc1dcfa61117bcb2a2"},
-+ {file = "types_requests-2.32.0.20250328.tar.gz", hash = "sha256:c9e67228ea103bd811c96984fac36ed2ae8da87a36a633964a21f199d60baf32"},
- ]
-
- [package.dependencies]
---
-2.49.0
-
diff --git a/packages/overlays/matrix-synapse/patches/0031-Log-entire-room-if-accessibility-check-fails.patch b/packages/overlays/matrix-synapse/patches/0003-Log-entire-room-if-accessibility-check-fails.patch
index a3dbcad..267ed45 100644
--- a/packages/overlays/matrix-synapse/patches/0031-Log-entire-room-if-accessibility-check-fails.patch
+++ b/packages/overlays/matrix-synapse/patches/0003-Log-entire-room-if-accessibility-check-fails.patch
@@ -1,7 +1,7 @@
-From 5951a67a191cb54a9b9b801ca5faf994ec106143 Mon Sep 17 00:00:00 2001
+From 3a1af2e91a7ca9a9b7de3d1bcd6bab9d16745d84 Mon Sep 17 00:00:00 2001
From: Rory& <root@rory.gay>
Date: Wed, 23 Apr 2025 18:24:57 +0200
-Subject: [PATCH 31/34] Log entire room if accessibility check fails
+Subject: [PATCH 03/10] Log entire room if accessibility check fails
Signed-off-by: Rory& <root@rory.gay>
---
diff --git a/packages/overlays/matrix-synapse/patches/0003-Remove-newline-from-final-bullet-point-of-PR-templat.patch b/packages/overlays/matrix-synapse/patches/0003-Remove-newline-from-final-bullet-point-of-PR-templat.patch
deleted file mode 100644
index 2fdf9be..0000000
--- a/packages/overlays/matrix-synapse/patches/0003-Remove-newline-from-final-bullet-point-of-PR-templat.patch
+++ /dev/null
@@ -1,34 +0,0 @@
-From 480d4faa38401f37b0b5608356ee1959aa5829c8 Mon Sep 17 00:00:00 2001
-From: Andrew Morgan <1342360+anoadragon453@users.noreply.github.com>
-Date: Tue, 13 May 2025 15:14:00 +0100
-Subject: [PATCH 03/34] Remove newline from final bullet point of PR template
- (#18419)
-
----
- .github/PULL_REQUEST_TEMPLATE.md | 3 +--
- changelog.d/18419.misc | 1 +
- 2 files changed, 2 insertions(+), 2 deletions(-)
- create mode 100644 changelog.d/18419.misc
-
-diff --git a/.github/PULL_REQUEST_TEMPLATE.md b/.github/PULL_REQUEST_TEMPLATE.md
-index 07d4f6dfce..f8e60815fa 100644
---- a/.github/PULL_REQUEST_TEMPLATE.md
-+++ b/.github/PULL_REQUEST_TEMPLATE.md
-@@ -9,5 +9,4 @@
- - End with either a period (.) or an exclamation mark (!).
- - Start with a capital letter.
- - Feel free to credit yourself, by adding a sentence "Contributed by @github_username." or "Contributed by [Your Name]." to the end of the entry.
--* [ ] [Code style](https://element-hq.github.io/synapse/latest/code_style.html) is correct
-- (run the [linters](https://element-hq.github.io/synapse/latest/development/contributing_guide.html#run-the-linters))
-+* [ ] [Code style](https://element-hq.github.io/synapse/latest/code_style.html) is correct (run the [linters](https://element-hq.github.io/synapse/latest/development/contributing_guide.html#run-the-linters))
-diff --git a/changelog.d/18419.misc b/changelog.d/18419.misc
-new file mode 100644
-index 0000000000..0ff36e27b8
---- /dev/null
-+++ b/changelog.d/18419.misc
-@@ -0,0 +1 @@
-+Update the PR review template to remove an erroneous line break from the final bullet point.
-\ No newline at end of file
---
-2.49.0
-
diff --git a/packages/overlays/matrix-synapse/patches/0004-Explicitly-enable-pypy-for-cibuildwheel-18417.patch b/packages/overlays/matrix-synapse/patches/0004-Explicitly-enable-pypy-for-cibuildwheel-18417.patch
deleted file mode 100644
index a85de14..0000000
--- a/packages/overlays/matrix-synapse/patches/0004-Explicitly-enable-pypy-for-cibuildwheel-18417.patch
+++ /dev/null
@@ -1,262 +0,0 @@
-From 2db54c88ff54a5377d96088c23ac1f4dfef8faf3 Mon Sep 17 00:00:00 2001
-From: Andrew Morgan <1342360+anoadragon453@users.noreply.github.com>
-Date: Tue, 13 May 2025 15:19:30 +0100
-Subject: [PATCH 04/34] Explicitly enable pypy for cibuildwheel (#18417)
-
----
- changelog.d/18417.misc | 1 +
- poetry.lock | 49 +++++++++++++++++++++---------------------
- pyproject.toml | 3 +++
- 3 files changed, 28 insertions(+), 25 deletions(-)
- create mode 100644 changelog.d/18417.misc
-
-diff --git a/changelog.d/18417.misc b/changelog.d/18417.misc
-new file mode 100644
-index 0000000000..5f650a202a
---- /dev/null
-+++ b/changelog.d/18417.misc
-@@ -0,0 +1 @@
-+Explicitly enable PyPy builds in `cibuildwheel`s config to avoid it being disabled on a future upgrade to `cibuildwheel` v3.
-\ No newline at end of file
-diff --git a/poetry.lock b/poetry.lock
-index 7d7868d37f..7190d0f788 100644
---- a/poetry.lock
-+++ b/poetry.lock
-@@ -1,4 +1,4 @@
--# This file is automatically @generated by Poetry 2.1.1 and should not be changed by hand.
-+# This file is automatically @generated by Poetry 2.1.3 and should not be changed by hand.
-
- [[package]]
- name = "annotated-types"
-@@ -39,7 +39,7 @@ description = "The ultimate Python library in building OAuth and OpenID Connect
- optional = true
- python-versions = ">=3.9"
- groups = ["main"]
--markers = "extra == \"all\" or extra == \"jwt\" or extra == \"oidc\""
-+markers = "extra == \"oidc\" or extra == \"jwt\" or extra == \"all\""
- files = [
- {file = "authlib-1.5.1-py2.py3-none-any.whl", hash = "sha256:8408861cbd9b4ea2ff759b00b6f02fd7d81ac5a56d0b2b22c08606c6049aae11"},
- {file = "authlib-1.5.1.tar.gz", hash = "sha256:5cbc85ecb0667312c1cdc2f9095680bb735883b123fb509fde1e65b1c5df972e"},
-@@ -451,7 +451,7 @@ description = "XML bomb protection for Python stdlib modules"
- optional = true
- python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*"
- groups = ["main"]
--markers = "extra == \"all\" or extra == \"saml2\""
-+markers = "extra == \"saml2\" or extra == \"all\""
- files = [
- {file = "defusedxml-0.7.1-py2.py3-none-any.whl", hash = "sha256:a352e7e428770286cc899e2542b6cdaedb2b4953ff269a210103ec58f6198a61"},
- {file = "defusedxml-0.7.1.tar.gz", hash = "sha256:1bb3032db185915b62d7c6209c5a8792be6a32ab2fedacc84e01b52c51aa3e69"},
-@@ -494,7 +494,7 @@ description = "XPath 1.0/2.0/3.0/3.1 parsers and selectors for ElementTree and l
- optional = true
- python-versions = ">=3.7"
- groups = ["main"]
--markers = "extra == \"all\" or extra == \"saml2\""
-+markers = "extra == \"saml2\" or extra == \"all\""
- files = [
- {file = "elementpath-4.1.5-py3-none-any.whl", hash = "sha256:2ac1a2fb31eb22bbbf817f8cf6752f844513216263f0e3892c8e79782fe4bb55"},
- {file = "elementpath-4.1.5.tar.gz", hash = "sha256:c2d6dc524b29ef751ecfc416b0627668119d8812441c555d7471da41d4bacb8d"},
-@@ -544,7 +544,7 @@ description = "Python wrapper for hiredis"
- optional = true
- python-versions = ">=3.8"
- groups = ["main"]
--markers = "extra == \"all\" or extra == \"redis\""
-+markers = "extra == \"redis\" or extra == \"all\""
- files = [
- {file = "hiredis-3.1.0-cp310-cp310-macosx_10_15_universal2.whl", hash = "sha256:2892db9db21f0cf7cc298d09f85d3e1f6dc4c4c24463ab67f79bc7a006d51867"},
- {file = "hiredis-3.1.0-cp310-cp310-macosx_10_15_x86_64.whl", hash = "sha256:93cfa6cc25ee2ceb0be81dc61eca9995160b9e16bdb7cca4a00607d57e998918"},
-@@ -890,7 +890,7 @@ description = "Jaeger Python OpenTracing Tracer implementation"
- optional = true
- python-versions = ">=3.7"
- groups = ["main"]
--markers = "extra == \"all\" or extra == \"opentracing\""
-+markers = "extra == \"opentracing\" or extra == \"all\""
- files = [
- {file = "jaeger-client-4.8.0.tar.gz", hash = "sha256:3157836edab8e2c209bd2d6ae61113db36f7ee399e66b1dcbb715d87ab49bfe0"},
- ]
-@@ -1028,7 +1028,7 @@ description = "A strictly RFC 4510 conforming LDAP V3 pure Python client library
- optional = true
- python-versions = "*"
- groups = ["main"]
--markers = "extra == \"all\" or extra == \"matrix-synapse-ldap3\""
-+markers = "extra == \"matrix-synapse-ldap3\" or extra == \"all\""
- files = [
- {file = "ldap3-2.9.1-py2.py3-none-any.whl", hash = "sha256:5869596fc4948797020d3f03b7939da938778a0f9e2009f7a072ccf92b8e8d70"},
- {file = "ldap3-2.9.1.tar.gz", hash = "sha256:f3e7fc4718e3f09dda568b57100095e0ce58633bcabbed8667ce3f8fbaa4229f"},
-@@ -1044,7 +1044,7 @@ description = "Powerful and Pythonic XML processing library combining libxml2/li
- optional = true
- python-versions = ">=3.6"
- groups = ["main"]
--markers = "extra == \"all\" or extra == \"url-preview\""
-+markers = "extra == \"url-preview\" or extra == \"all\""
- files = [
- {file = "lxml-5.3.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:dd36439be765e2dde7660212b5275641edbc813e7b24668831a5c8ac91180656"},
- {file = "lxml-5.3.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:ae5fe5c4b525aa82b8076c1a59d642c17b6e8739ecf852522c6321852178119d"},
-@@ -1330,7 +1330,7 @@ description = "An LDAP3 auth provider for Synapse"
- optional = true
- python-versions = ">=3.7"
- groups = ["main"]
--markers = "extra == \"all\" or extra == \"matrix-synapse-ldap3\""
-+markers = "extra == \"matrix-synapse-ldap3\" or extra == \"all\""
- files = [
- {file = "matrix-synapse-ldap3-0.3.0.tar.gz", hash = "sha256:8bb6517173164d4b9cc44f49de411d8cebdb2e705d5dd1ea1f38733c4a009e1d"},
- {file = "matrix_synapse_ldap3-0.3.0-py3-none-any.whl", hash = "sha256:8b4d701f8702551e98cc1d8c20dbed532de5613584c08d0df22de376ba99159d"},
-@@ -1551,7 +1551,7 @@ description = "OpenTracing API for Python. See documentation at http://opentraci
- optional = true
- python-versions = "*"
- groups = ["main"]
--markers = "extra == \"all\" or extra == \"opentracing\""
-+markers = "extra == \"opentracing\" or extra == \"all\""
- files = [
- {file = "opentracing-2.4.0.tar.gz", hash = "sha256:a173117e6ef580d55874734d1fa7ecb6f3655160b8b8974a2a1e98e5ec9c840d"},
- ]
-@@ -1720,7 +1720,7 @@ description = "psycopg2 - Python-PostgreSQL Database Adapter"
- optional = true
- python-versions = ">=3.8"
- groups = ["main"]
--markers = "extra == \"all\" or extra == \"postgres\""
-+markers = "extra == \"postgres\" or extra == \"all\""
- files = [
- {file = "psycopg2-2.9.10-cp310-cp310-win32.whl", hash = "sha256:5df2b672140f95adb453af93a7d669d7a7bf0a56bcd26f1502329166f4a61716"},
- {file = "psycopg2-2.9.10-cp310-cp310-win_amd64.whl", hash = "sha256:c6f7b8561225f9e711a9c47087388a97fdc948211c10a4bccbf0ba68ab7b3b5a"},
-@@ -1728,7 +1728,6 @@ files = [
- {file = "psycopg2-2.9.10-cp311-cp311-win_amd64.whl", hash = "sha256:0435034157049f6846e95103bd8f5a668788dd913a7c30162ca9503fdf542cb4"},
- {file = "psycopg2-2.9.10-cp312-cp312-win32.whl", hash = "sha256:65a63d7ab0e067e2cdb3cf266de39663203d38d6a8ed97f5ca0cb315c73fe067"},
- {file = "psycopg2-2.9.10-cp312-cp312-win_amd64.whl", hash = "sha256:4a579d6243da40a7b3182e0430493dbd55950c493d8c68f4eec0b302f6bbf20e"},
-- {file = "psycopg2-2.9.10-cp313-cp313-win_amd64.whl", hash = "sha256:91fd603a2155da8d0cfcdbf8ab24a2d54bca72795b90d2a3ed2b6da8d979dee2"},
- {file = "psycopg2-2.9.10-cp39-cp39-win32.whl", hash = "sha256:9d5b3b94b79a844a986d029eee38998232451119ad653aea42bb9220a8c5066b"},
- {file = "psycopg2-2.9.10-cp39-cp39-win_amd64.whl", hash = "sha256:88138c8dedcbfa96408023ea2b0c369eda40fe5d75002c0964c78f46f11fa442"},
- {file = "psycopg2-2.9.10.tar.gz", hash = "sha256:12ec0b40b0273f95296233e8750441339298e6a572f7039da5b260e3c8b60e11"},
-@@ -1741,7 +1740,7 @@ description = ".. image:: https://travis-ci.org/chtd/psycopg2cffi.svg?branch=mas
- optional = true
- python-versions = "*"
- groups = ["main"]
--markers = "platform_python_implementation == \"PyPy\" and (extra == \"all\" or extra == \"postgres\")"
-+markers = "platform_python_implementation == \"PyPy\" and (extra == \"postgres\" or extra == \"all\")"
- files = [
- {file = "psycopg2cffi-2.9.0.tar.gz", hash = "sha256:7e272edcd837de3a1d12b62185eb85c45a19feda9e62fa1b120c54f9e8d35c52"},
- ]
-@@ -1757,7 +1756,7 @@ description = "A Simple library to enable psycopg2 compatability"
- optional = true
- python-versions = "*"
- groups = ["main"]
--markers = "platform_python_implementation == \"PyPy\" and (extra == \"all\" or extra == \"postgres\")"
-+markers = "platform_python_implementation == \"PyPy\" and (extra == \"postgres\" or extra == \"all\")"
- files = [
- {file = "psycopg2cffi-compat-1.1.tar.gz", hash = "sha256:d25e921748475522b33d13420aad5c2831c743227dc1f1f2585e0fdb5c914e05"},
- ]
-@@ -1980,7 +1979,7 @@ description = "Python extension wrapping the ICU C++ API"
- optional = true
- python-versions = "*"
- groups = ["main"]
--markers = "extra == \"all\" or extra == \"user-search\""
-+markers = "extra == \"user-search\" or extra == \"all\""
- files = [
- {file = "PyICU-2.14.tar.gz", hash = "sha256:acc7eb92bd5c554ed577249c6978450a4feda0aa6f01470152b3a7b382a02132"},
- ]
-@@ -2029,7 +2028,7 @@ description = "A development tool to measure, monitor and analyze the memory beh
- optional = true
- python-versions = ">=3.6"
- groups = ["main"]
--markers = "extra == \"all\" or extra == \"cache-memory\""
-+markers = "extra == \"cache-memory\" or extra == \"all\""
- files = [
- {file = "Pympler-1.0.1-py3-none-any.whl", hash = "sha256:d260dda9ae781e1eab6ea15bacb84015849833ba5555f141d2d9b7b7473b307d"},
- {file = "Pympler-1.0.1.tar.gz", hash = "sha256:993f1a3599ca3f4fcd7160c7545ad06310c9e12f70174ae7ae8d4e25f6c5d3fa"},
-@@ -2089,7 +2088,7 @@ description = "Python implementation of SAML Version 2 Standard"
- optional = true
- python-versions = ">=3.9,<4.0"
- groups = ["main"]
--markers = "extra == \"all\" or extra == \"saml2\""
-+markers = "extra == \"saml2\" or extra == \"all\""
- files = [
- {file = "pysaml2-7.5.0-py3-none-any.whl", hash = "sha256:bc6627cc344476a83c757f440a73fda1369f13b6fda1b4e16bca63ffbabb5318"},
- {file = "pysaml2-7.5.0.tar.gz", hash = "sha256:f36871d4e5ee857c6b85532e942550d2cf90ea4ee943d75eb681044bbc4f54f7"},
-@@ -2114,7 +2113,7 @@ description = "Extensions to the standard Python datetime module"
- optional = true
- python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7"
- groups = ["main"]
--markers = "extra == \"all\" or extra == \"saml2\""
-+markers = "extra == \"saml2\" or extra == \"all\""
- files = [
- {file = "python-dateutil-2.8.2.tar.gz", hash = "sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86"},
- {file = "python_dateutil-2.8.2-py2.py3-none-any.whl", hash = "sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9"},
-@@ -2142,7 +2141,7 @@ description = "World timezone definitions, modern and historical"
- optional = true
- python-versions = "*"
- groups = ["main"]
--markers = "extra == \"all\" or extra == \"saml2\""
-+markers = "extra == \"saml2\" or extra == \"all\""
- files = [
- {file = "pytz-2022.7.1-py2.py3-none-any.whl", hash = "sha256:78f4f37d8198e0627c5f1143240bb0206b8691d8d7ac6d78fee88b78733f8c4a"},
- {file = "pytz-2022.7.1.tar.gz", hash = "sha256:01a0681c4b9684a28304615eba55d1ab31ae00bf68ec157ec3708a8182dbbcd0"},
-@@ -2506,7 +2505,7 @@ description = "Python client for Sentry (https://sentry.io)"
- optional = true
- python-versions = ">=3.6"
- groups = ["main"]
--markers = "extra == \"all\" or extra == \"sentry\""
-+markers = "extra == \"sentry\" or extra == \"all\""
- files = [
- {file = "sentry_sdk-2.22.0-py2.py3-none-any.whl", hash = "sha256:3d791d631a6c97aad4da7074081a57073126c69487560c6f8bffcf586461de66"},
- {file = "sentry_sdk-2.22.0.tar.gz", hash = "sha256:b4bf43bb38f547c84b2eadcefbe389b36ef75f3f38253d7a74d6b928c07ae944"},
-@@ -2690,7 +2689,7 @@ description = "Tornado IOLoop Backed Concurrent Futures"
- optional = true
- python-versions = "*"
- groups = ["main"]
--markers = "extra == \"all\" or extra == \"opentracing\""
-+markers = "extra == \"opentracing\" or extra == \"all\""
- files = [
- {file = "threadloop-1.0.2-py2-none-any.whl", hash = "sha256:5c90dbefab6ffbdba26afb4829d2a9df8275d13ac7dc58dccb0e279992679599"},
- {file = "threadloop-1.0.2.tar.gz", hash = "sha256:8b180aac31013de13c2ad5c834819771992d350267bddb854613ae77ef571944"},
-@@ -2706,7 +2705,7 @@ description = "Python bindings for the Apache Thrift RPC system"
- optional = true
- python-versions = "*"
- groups = ["main"]
--markers = "extra == \"all\" or extra == \"opentracing\""
-+markers = "extra == \"opentracing\" or extra == \"all\""
- files = [
- {file = "thrift-0.16.0.tar.gz", hash = "sha256:2b5b6488fcded21f9d312aa23c9ff6a0195d0f6ae26ddbd5ad9e3e25dfc14408"},
- ]
-@@ -2768,7 +2767,7 @@ description = "Tornado is a Python web framework and asynchronous networking lib
- optional = true
- python-versions = ">=3.8"
- groups = ["main"]
--markers = "extra == \"all\" or extra == \"opentracing\""
-+markers = "extra == \"opentracing\" or extra == \"all\""
- files = [
- {file = "tornado-6.4.2-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:e828cce1123e9e44ae2a50a9de3055497ab1d0aeb440c5ac23064d9e44880da1"},
- {file = "tornado-6.4.2-cp38-abi3-macosx_10_9_x86_64.whl", hash = "sha256:072ce12ada169c5b00b7d92a99ba089447ccc993ea2143c9ede887e0937aa803"},
-@@ -2902,7 +2901,7 @@ description = "non-blocking redis client for python"
- optional = true
- python-versions = "*"
- groups = ["main"]
--markers = "extra == \"all\" or extra == \"redis\""
-+markers = "extra == \"redis\" or extra == \"all\""
- files = [
- {file = "txredisapi-1.4.11-py3-none-any.whl", hash = "sha256:ac64d7a9342b58edca13ef267d4fa7637c1aa63f8595e066801c1e8b56b22d0b"},
- {file = "txredisapi-1.4.11.tar.gz", hash = "sha256:3eb1af99aefdefb59eb877b1dd08861efad60915e30ad5bf3d5bf6c5cedcdbc6"},
-@@ -3245,7 +3244,7 @@ description = "An XML Schema validator and decoder"
- optional = true
- python-versions = ">=3.7"
- groups = ["main"]
--markers = "extra == \"all\" or extra == \"saml2\""
-+markers = "extra == \"saml2\" or extra == \"all\""
- files = [
- {file = "xmlschema-2.4.0-py3-none-any.whl", hash = "sha256:dc87be0caaa61f42649899189aab2fd8e0d567f2cf548433ba7b79278d231a4a"},
- {file = "xmlschema-2.4.0.tar.gz", hash = "sha256:d74cd0c10866ac609e1ef94a5a69b018ad16e39077bc6393408b40c6babee793"},
-diff --git a/pyproject.toml b/pyproject.toml
-index 5f80d28344..914a5804aa 100644
---- a/pyproject.toml
-+++ b/pyproject.toml
-@@ -385,6 +385,9 @@ build-backend = "poetry.core.masonry.api"
- # - PyPy on Aarch64 and musllinux on aarch64: too slow to build.
- # c.f. https://github.com/matrix-org/synapse/pull/14259
- skip = "cp36* cp37* cp38* pp37* pp38* *-musllinux_i686 pp*aarch64 *-musllinux_aarch64"
-+# Enable non-default builds.
-+# "pypy" used to be included by default up until cibuildwheel 3.
-+enable = "pypy"
-
- # We need a rust compiler.
- #
---
-2.49.0
-
diff --git a/packages/overlays/matrix-synapse/patches/0032-Log-policy-server-rejected-events.patch b/packages/overlays/matrix-synapse/patches/0004-Log-policy-server-rejected-events.patch
index 66c0250..f2893ac 100644
--- a/packages/overlays/matrix-synapse/patches/0032-Log-policy-server-rejected-events.patch
+++ b/packages/overlays/matrix-synapse/patches/0004-Log-policy-server-rejected-events.patch
@@ -1,7 +1,7 @@
-From abdbd4952722a7bf816e12d0a70192f88ec6041e Mon Sep 17 00:00:00 2001
+From 2180ad7da49e89a3a46ce569534abb3b43fe2d7e Mon Sep 17 00:00:00 2001
From: Rory& <root@rory.gay>
Date: Tue, 27 May 2025 05:21:46 +0200
-Subject: [PATCH 32/34] Log policy server rejected events
+Subject: [PATCH 04/10] Log policy server rejected events
---
synapse/handlers/room_policy.py | 7 +++++++
diff --git a/packages/overlays/matrix-synapse/patches/0005-Fix-a-couple-type-annotations-in-the-RootConfig-Conf.patch b/packages/overlays/matrix-synapse/patches/0005-Fix-a-couple-type-annotations-in-the-RootConfig-Conf.patch
deleted file mode 100644
index 5b1ec8e..0000000
--- a/packages/overlays/matrix-synapse/patches/0005-Fix-a-couple-type-annotations-in-the-RootConfig-Conf.patch
+++ /dev/null
@@ -1,360 +0,0 @@
-From 6e910e2b2c5cef393473dcc6bf957a8671a1186e Mon Sep 17 00:00:00 2001
-From: Eric Eastwood <erice@element.io>
-Date: Tue, 13 May 2025 10:22:15 -0500
-Subject: [PATCH 05/34] Fix a couple type annotations in the
- `RootConfig`/`Config` (#18409)
-
-Fix a couple type annotations in the `RootConfig`/`Config`. Discovered
-while cribbing this code for another project.
-
-It's really sucks that `mypy` type checking doesn't catch this. I assume
-this is because we also have a `synapse/config/_base.pyi` that overrides
-all of this. Still unclear to me why the `Iterable[str]` vs
-`StrSequence` issue wasn't caught as that's what `ConfigError` expects.
----
- changelog.d/18409.misc | 1 +
- synapse/config/_base.py | 6 +++---
- synapse/config/_base.pyi | 4 ++--
- synapse/config/experimental.py | 6 +++---
- synapse/config/key.py | 6 ++++--
- synapse/config/workers.py | 2 +-
- tests/config/test_api.py | 3 ++-
- tests/config/test_appservice.py | 7 ++++---
- tests/config/test_cache.py | 3 ++-
- tests/config/test_database.py | 5 ++++-
- tests/config/test_room_directory.py | 5 +++--
- tests/config/test_server.py | 10 +++++-----
- tests/events/test_auto_accept_invites.py | 5 +++--
- 13 files changed, 37 insertions(+), 26 deletions(-)
- create mode 100644 changelog.d/18409.misc
-
-diff --git a/changelog.d/18409.misc b/changelog.d/18409.misc
-new file mode 100644
-index 0000000000..bbb9bdbb1b
---- /dev/null
-+++ b/changelog.d/18409.misc
-@@ -0,0 +1 @@
-+Fix a couple type annotations in the `RootConfig`/`Config`.
-diff --git a/synapse/config/_base.py b/synapse/config/_base.py
-index 132ba26af9..d367d45fea 100644
---- a/synapse/config/_base.py
-+++ b/synapse/config/_base.py
-@@ -170,7 +170,7 @@ class Config:
-
- section: ClassVar[str]
-
-- def __init__(self, root_config: "RootConfig" = None):
-+ def __init__(self, root_config: "RootConfig"):
- self.root = root_config
-
- # Get the path to the default Synapse template directory
-@@ -445,7 +445,7 @@ class RootConfig:
- return res
-
- @classmethod
-- def invoke_all_static(cls, func_name: str, *args: Any, **kwargs: any) -> None:
-+ def invoke_all_static(cls, func_name: str, *args: Any, **kwargs: Any) -> None:
- """
- Invoke a static function on config objects this RootConfig is
- configured to use.
-@@ -1047,7 +1047,7 @@ class RoutableShardedWorkerHandlingConfig(ShardedWorkerHandlingConfig):
- return self._get_instance(key)
-
-
--def read_file(file_path: Any, config_path: Iterable[str]) -> str:
-+def read_file(file_path: Any, config_path: StrSequence) -> str:
- """Check the given file exists, and read it into a string
-
- If it does not, emit an error indicating the problem
-diff --git a/synapse/config/_base.pyi b/synapse/config/_base.pyi
-index 55b0e2cbf4..9c4ec8f713 100644
---- a/synapse/config/_base.pyi
-+++ b/synapse/config/_base.pyi
-@@ -179,7 +179,7 @@ class RootConfig:
- class Config:
- root: RootConfig
- default_template_dir: str
-- def __init__(self, root_config: Optional[RootConfig] = ...) -> None: ...
-+ def __init__(self, root_config: RootConfig = ...) -> None: ...
- @staticmethod
- def parse_size(value: Union[str, int]) -> int: ...
- @staticmethod
-@@ -212,4 +212,4 @@ class ShardedWorkerHandlingConfig:
- class RoutableShardedWorkerHandlingConfig(ShardedWorkerHandlingConfig):
- def get_instance(self, key: str) -> str: ... # noqa: F811
-
--def read_file(file_path: Any, config_path: Iterable[str]) -> str: ...
-+def read_file(file_path: Any, config_path: StrSequence) -> str: ...
-diff --git a/synapse/config/experimental.py b/synapse/config/experimental.py
-index 0a963b121a..1226eaa58a 100644
---- a/synapse/config/experimental.py
-+++ b/synapse/config/experimental.py
-@@ -21,7 +21,7 @@
-
- import enum
- from functools import cache
--from typing import TYPE_CHECKING, Any, Iterable, Optional
-+from typing import TYPE_CHECKING, Any, Optional
-
- import attr
- import attr.validators
-@@ -29,7 +29,7 @@ import attr.validators
- from synapse.api.room_versions import KNOWN_ROOM_VERSIONS, RoomVersions
- from synapse.config import ConfigError
- from synapse.config._base import Config, RootConfig, read_file
--from synapse.types import JsonDict
-+from synapse.types import JsonDict, StrSequence
-
- # Determine whether authlib is installed.
- try:
-@@ -45,7 +45,7 @@ if TYPE_CHECKING:
-
-
- @cache
--def read_secret_from_file_once(file_path: Any, config_path: Iterable[str]) -> str:
-+def read_secret_from_file_once(file_path: Any, config_path: StrSequence) -> str:
- """Returns the memoized secret read from file."""
- return read_file(file_path, config_path).strip()
-
-diff --git a/synapse/config/key.py b/synapse/config/key.py
-index 337f98dbc1..29c558448b 100644
---- a/synapse/config/key.py
-+++ b/synapse/config/key.py
-@@ -191,7 +191,7 @@ class KeyConfig(Config):
- if macaroon_secret_key:
- raise ConfigError(CONFLICTING_MACAROON_SECRET_KEY_OPTS_ERROR)
- macaroon_secret_key = read_file(
-- macaroon_secret_key_path, "macaroon_secret_key_path"
-+ macaroon_secret_key_path, ("macaroon_secret_key_path",)
- ).strip()
- if not macaroon_secret_key:
- macaroon_secret_key = self.root.registration.registration_shared_secret
-@@ -216,7 +216,9 @@ class KeyConfig(Config):
- if form_secret_path:
- if form_secret:
- raise ConfigError(CONFLICTING_FORM_SECRET_OPTS_ERROR)
-- self.form_secret = read_file(form_secret_path, "form_secret_path").strip()
-+ self.form_secret = read_file(
-+ form_secret_path, ("form_secret_path",)
-+ ).strip()
- else:
- self.form_secret = form_secret
-
-diff --git a/synapse/config/workers.py b/synapse/config/workers.py
-index 5af50ee952..2dfeb47c2e 100644
---- a/synapse/config/workers.py
-+++ b/synapse/config/workers.py
-@@ -263,7 +263,7 @@ class WorkerConfig(Config):
- if worker_replication_secret:
- raise ConfigError(CONFLICTING_WORKER_REPLICATION_SECRET_OPTS_ERROR)
- self.worker_replication_secret = read_file(
-- worker_replication_secret_path, "worker_replication_secret_path"
-+ worker_replication_secret_path, ("worker_replication_secret_path",)
- ).strip()
- else:
- self.worker_replication_secret = worker_replication_secret
-diff --git a/tests/config/test_api.py b/tests/config/test_api.py
-index 6773c9a277..e6cc3e21ed 100644
---- a/tests/config/test_api.py
-+++ b/tests/config/test_api.py
-@@ -3,6 +3,7 @@ from unittest import TestCase as StdlibTestCase
- import yaml
-
- from synapse.config import ConfigError
-+from synapse.config._base import RootConfig
- from synapse.config.api import ApiConfig
- from synapse.types.state import StateFilter
-
-@@ -19,7 +20,7 @@ DEFAULT_PREJOIN_STATE_PAIRS = {
-
- class TestRoomPrejoinState(StdlibTestCase):
- def read_config(self, source: str) -> ApiConfig:
-- config = ApiConfig()
-+ config = ApiConfig(RootConfig())
- config.read_config(yaml.safe_load(source))
- return config
-
-diff --git a/tests/config/test_appservice.py b/tests/config/test_appservice.py
-index e3021b59d8..2572681224 100644
---- a/tests/config/test_appservice.py
-+++ b/tests/config/test_appservice.py
-@@ -19,6 +19,7 @@
- #
- #
-
-+from synapse.config._base import RootConfig
- from synapse.config.appservice import AppServiceConfig, ConfigError
-
- from tests.unittest import TestCase
-@@ -36,12 +37,12 @@ class AppServiceConfigTest(TestCase):
- ["foo", "bar", False],
- ]:
- with self.assertRaises(ConfigError):
-- AppServiceConfig().read_config(
-+ AppServiceConfig(RootConfig()).read_config(
- {"app_service_config_files": invalid_value}
- )
-
- def test_valid_app_service_config_files(self) -> None:
-- AppServiceConfig().read_config({"app_service_config_files": []})
-- AppServiceConfig().read_config(
-+ AppServiceConfig(RootConfig()).read_config({"app_service_config_files": []})
-+ AppServiceConfig(RootConfig()).read_config(
- {"app_service_config_files": ["/not/a/real/path", "/not/a/real/path/2"]}
- )
-diff --git a/tests/config/test_cache.py b/tests/config/test_cache.py
-index 631263b5ca..aead73e059 100644
---- a/tests/config/test_cache.py
-+++ b/tests/config/test_cache.py
-@@ -19,6 +19,7 @@
- #
- #
-
-+from synapse.config._base import RootConfig
- from synapse.config.cache import CacheConfig, add_resizable_cache
- from synapse.types import JsonDict
- from synapse.util.caches.lrucache import LruCache
-@@ -29,7 +30,7 @@ from tests.unittest import TestCase
- class CacheConfigTests(TestCase):
- def setUp(self) -> None:
- # Reset caches before each test since there's global state involved.
-- self.config = CacheConfig()
-+ self.config = CacheConfig(RootConfig())
- self.config.reset()
-
- def tearDown(self) -> None:
-diff --git a/tests/config/test_database.py b/tests/config/test_database.py
-index b46519f84a..3fa5fff2b2 100644
---- a/tests/config/test_database.py
-+++ b/tests/config/test_database.py
-@@ -20,6 +20,7 @@
-
- import yaml
-
-+from synapse.config._base import RootConfig
- from synapse.config.database import DatabaseConfig
-
- from tests import unittest
-@@ -28,7 +29,9 @@ from tests import unittest
- class DatabaseConfigTestCase(unittest.TestCase):
- def test_database_configured_correctly(self) -> None:
- conf = yaml.safe_load(
-- DatabaseConfig().generate_config_section(data_dir_path="/data_dir_path")
-+ DatabaseConfig(RootConfig()).generate_config_section(
-+ data_dir_path="/data_dir_path"
-+ )
- )
-
- expected_database_conf = {
-diff --git a/tests/config/test_room_directory.py b/tests/config/test_room_directory.py
-index e25f7787f4..5208381279 100644
---- a/tests/config/test_room_directory.py
-+++ b/tests/config/test_room_directory.py
-@@ -24,6 +24,7 @@ from twisted.test.proto_helpers import MemoryReactor
- import synapse.rest.admin
- import synapse.rest.client.login
- import synapse.rest.client.room
-+from synapse.config._base import RootConfig
- from synapse.config.room_directory import RoomDirectoryConfig
- from synapse.server import HomeServer
- from synapse.util import Clock
-@@ -63,7 +64,7 @@ class RoomDirectoryConfigTestCase(unittest.HomeserverTestCase):
- """
- )
-
-- rd_config = RoomDirectoryConfig()
-+ rd_config = RoomDirectoryConfig(RootConfig())
- rd_config.read_config(config)
-
- self.assertFalse(
-@@ -123,7 +124,7 @@ class RoomDirectoryConfigTestCase(unittest.HomeserverTestCase):
- """
- )
-
-- rd_config = RoomDirectoryConfig()
-+ rd_config = RoomDirectoryConfig(RootConfig())
- rd_config.read_config(config)
-
- self.assertFalse(
-diff --git a/tests/config/test_server.py b/tests/config/test_server.py
-index 74073cfdc5..5eb2540439 100644
---- a/tests/config/test_server.py
-+++ b/tests/config/test_server.py
-@@ -20,7 +20,7 @@
-
- import yaml
-
--from synapse.config._base import ConfigError
-+from synapse.config._base import ConfigError, RootConfig
- from synapse.config.server import ServerConfig, generate_ip_set, is_threepid_reserved
-
- from tests import unittest
-@@ -40,7 +40,7 @@ class ServerConfigTestCase(unittest.TestCase):
-
- def test_unsecure_listener_no_listeners_open_private_ports_false(self) -> None:
- conf = yaml.safe_load(
-- ServerConfig().generate_config_section(
-+ ServerConfig(RootConfig()).generate_config_section(
- "CONFDIR", "/data_dir_path", "che.org", False, None
- )
- )
-@@ -60,7 +60,7 @@ class ServerConfigTestCase(unittest.TestCase):
-
- def test_unsecure_listener_no_listeners_open_private_ports_true(self) -> None:
- conf = yaml.safe_load(
-- ServerConfig().generate_config_section(
-+ ServerConfig(RootConfig()).generate_config_section(
- "CONFDIR", "/data_dir_path", "che.org", True, None
- )
- )
-@@ -94,7 +94,7 @@ class ServerConfigTestCase(unittest.TestCase):
- ]
-
- conf = yaml.safe_load(
-- ServerConfig().generate_config_section(
-+ ServerConfig(RootConfig()).generate_config_section(
- "CONFDIR", "/data_dir_path", "this.one.listens", True, listeners
- )
- )
-@@ -128,7 +128,7 @@ class ServerConfigTestCase(unittest.TestCase):
- expected_listeners[1]["bind_addresses"] = ["::1", "127.0.0.1"]
-
- conf = yaml.safe_load(
-- ServerConfig().generate_config_section(
-+ ServerConfig(RootConfig()).generate_config_section(
- "CONFDIR", "/data_dir_path", "this.one.listens", True, listeners
- )
- )
-diff --git a/tests/events/test_auto_accept_invites.py b/tests/events/test_auto_accept_invites.py
-index d4e87b1b7f..d2100e9903 100644
---- a/tests/events/test_auto_accept_invites.py
-+++ b/tests/events/test_auto_accept_invites.py
-@@ -31,6 +31,7 @@ from twisted.test.proto_helpers import MemoryReactor
-
- from synapse.api.constants import EventTypes
- from synapse.api.errors import SynapseError
-+from synapse.config._base import RootConfig
- from synapse.config.auto_accept_invites import AutoAcceptInvitesConfig
- from synapse.events.auto_accept_invites import InviteAutoAccepter
- from synapse.federation.federation_base import event_from_pdu_json
-@@ -690,7 +691,7 @@ class InviteAutoAccepterInternalTestCase(TestCase):
- "only_from_local_users": True,
- }
- }
-- parsed_config = AutoAcceptInvitesConfig()
-+ parsed_config = AutoAcceptInvitesConfig(RootConfig())
- parsed_config.read_config(config)
-
- self.assertTrue(parsed_config.enabled)
-@@ -830,7 +831,7 @@ def create_module(
- if config_override is None:
- config_override = {}
-
-- config = AutoAcceptInvitesConfig()
-+ config = AutoAcceptInvitesConfig(RootConfig())
- config.read_config(config_override)
-
- return InviteAutoAccepter(config, module_api)
---
-2.49.0
-
diff --git a/packages/overlays/matrix-synapse/patches/0033-Use-parse_boolean-for-unredacted-content.patch b/packages/overlays/matrix-synapse/patches/0005-Use-parse_boolean-for-unredacted-content.patch
index 93567d4..57efd58 100644
--- a/packages/overlays/matrix-synapse/patches/0033-Use-parse_boolean-for-unredacted-content.patch
+++ b/packages/overlays/matrix-synapse/patches/0005-Use-parse_boolean-for-unredacted-content.patch
@@ -1,7 +1,7 @@
-From 22d37c310c09dcc65f118cc3d39bda0a65507759 Mon Sep 17 00:00:00 2001
+From 7dcc4610727538d7fa0e29765c0a9755b3043cf6 Mon Sep 17 00:00:00 2001
From: Rory& <root@rory.gay>
Date: Tue, 27 May 2025 06:14:26 +0200
-Subject: [PATCH 33/34] Use parse_boolean for unredacted content
+Subject: [PATCH 05/10] Use parse_boolean for unredacted content
---
synapse/rest/client/room.py | 5 ++---
diff --git a/packages/overlays/matrix-synapse/patches/0006-Explain-why-we-flush_buffer-for-Python-print-.-outpu.patch b/packages/overlays/matrix-synapse/patches/0006-Explain-why-we-flush_buffer-for-Python-print-.-outpu.patch
deleted file mode 100644
index 39fc2fe..0000000
--- a/packages/overlays/matrix-synapse/patches/0006-Explain-why-we-flush_buffer-for-Python-print-.-outpu.patch
+++ /dev/null
@@ -1,75 +0,0 @@
-From a3bbd7eeabee7c6b229e95e0e04af5b430ea32db Mon Sep 17 00:00:00 2001
-From: Eric Eastwood <erice@element.io>
-Date: Tue, 13 May 2025 10:40:49 -0500
-Subject: [PATCH 06/34] Explain why we `flush_buffer()` for Python `print(...)`
- output (#18420)
-MIME-Version: 1.0
-Content-Type: text/plain; charset=UTF-8
-Content-Transfer-Encoding: 8bit
-
-Spawning from using this code elsewhere and not knowing why it's there.
-
-Based on this article and @reivilibre's experience mentioning
-`PYTHONUNBUFFERED=1`,
-
-> #### programming languages where the default “print” statement buffers
->
-> Also, here are a few programming language where the default print
-statement will buffer output when writing to a pipe, and some ways to
-disable buffering if you want:
->
-> - Python (disable with `python -u`, or `PYTHONUNBUFFERED=1`, or
-`sys.stdout.reconfigure(line_buffering=False)`, or `print(x,
-flush=True)`)
->
-> _--
-https://jvns.ca/blog/2024/11/29/why-pipes-get-stuck-buffering/#programming-languages-where-the-default-print-statement-buffers_
----
- changelog.d/18420.misc | 1 +
- docker/configure_workers_and_start.py | 5 +++++
- docker/start.py | 5 +++++
- 3 files changed, 11 insertions(+)
- create mode 100644 changelog.d/18420.misc
-
-diff --git a/changelog.d/18420.misc b/changelog.d/18420.misc
-new file mode 100644
-index 0000000000..d52175af91
---- /dev/null
-+++ b/changelog.d/18420.misc
-@@ -0,0 +1 @@
-+Explain why we `flush_buffer()` for Python `print(...)` output.
-diff --git a/docker/configure_workers_and_start.py b/docker/configure_workers_and_start.py
-index df34d51f77..102a88fad1 100755
---- a/docker/configure_workers_and_start.py
-+++ b/docker/configure_workers_and_start.py
-@@ -352,6 +352,11 @@ def error(txt: str) -> NoReturn:
-
-
- def flush_buffers() -> None:
-+ """
-+ Python's `print()` buffers output by default, typically waiting until ~8KB
-+ accumulates. This method can be used to flush the buffers so we can see the output
-+ of any print statements so far.
-+ """
- sys.stdout.flush()
- sys.stderr.flush()
-
-diff --git a/docker/start.py b/docker/start.py
-index 818a5355ca..0be9976a0c 100755
---- a/docker/start.py
-+++ b/docker/start.py
-@@ -22,6 +22,11 @@ def error(txt: str) -> NoReturn:
-
-
- def flush_buffers() -> None:
-+ """
-+ Python's `print()` buffers output by default, typically waiting until ~8KB
-+ accumulates. This method can be used to flush the buffers so we can see the output
-+ of any print statements so far.
-+ """
- sys.stdout.flush()
- sys.stderr.flush()
-
---
-2.49.0
-
diff --git a/packages/overlays/matrix-synapse/patches/0006-Expose-tombstone-in-room-admin-api.patch b/packages/overlays/matrix-synapse/patches/0006-Expose-tombstone-in-room-admin-api.patch
new file mode 100644
index 0000000..904cff5
--- /dev/null
+++ b/packages/overlays/matrix-synapse/patches/0006-Expose-tombstone-in-room-admin-api.patch
@@ -0,0 +1,114 @@
+From e2ccfffacfe7b889c1529bbf1b984ad2015d0509 Mon Sep 17 00:00:00 2001
+From: Rory& <root@rory.gay>
+Date: Tue, 27 May 2025 06:37:52 +0200
+Subject: [PATCH 06/10] Expose tombstone in room admin api
+
+---
+ synapse/rest/admin/rooms.py | 5 ++++
+ synapse/rest/client/room.py | 1 -
+ synapse/storage/databases/main/room.py | 36 +++++++++++++++++++++++++-
+ 3 files changed, 40 insertions(+), 2 deletions(-)
+
+diff --git a/synapse/rest/admin/rooms.py b/synapse/rest/admin/rooms.py
+index f8c5bf18d4..adac1f0362 100644
+--- a/synapse/rest/admin/rooms.py
++++ b/synapse/rest/admin/rooms.py
+@@ -251,6 +251,10 @@ class ListRoomRestServlet(RestServlet):
+ direction = parse_enum(request, "dir", Direction, default=Direction.FORWARDS)
+ reverse_order = True if direction == Direction.BACKWARDS else False
+
++ emma_include_tombstone = parse_boolean(
++ request, "emma_include_tombstone", default=False
++ )
++
+ # Return list of rooms according to parameters
+ rooms, total_rooms = await self.store.get_rooms_paginate(
+ start,
+@@ -260,6 +264,7 @@ class ListRoomRestServlet(RestServlet):
+ search_term,
+ public_rooms,
+ empty_rooms,
++ emma_include_tombstone = emma_include_tombstone
+ )
+
+ response = {
+diff --git a/synapse/rest/client/room.py b/synapse/rest/client/room.py
+index 725b2162fd..8408c687cc 100644
+--- a/synapse/rest/client/room.py
++++ b/synapse/rest/client/room.py
+@@ -898,7 +898,6 @@ class RoomEventServlet(RestServlet):
+ request,
+ "fi.mau.msc2815.include_unredacted_content"
+ )
+- == "true"
+ )
+ if include_unredacted_content and not await self.auth.is_server_admin(
+ requester
+diff --git a/synapse/storage/databases/main/room.py b/synapse/storage/databases/main/room.py
+index 56217fccdf..9ecad17567 100644
+--- a/synapse/storage/databases/main/room.py
++++ b/synapse/storage/databases/main/room.py
+@@ -608,6 +608,7 @@ class RoomWorkerStore(CacheInvalidationWorkerStore):
+ search_term: Optional[str],
+ public_rooms: Optional[bool],
+ empty_rooms: Optional[bool],
++ emma_include_tombstone: bool = False,
+ ) -> Tuple[List[Dict[str, Any]], int]:
+ """Function to retrieve a paginated list of rooms as json.
+
+@@ -627,6 +628,7 @@ class RoomWorkerStore(CacheInvalidationWorkerStore):
+ If true, empty rooms are queried.
+ if false, empty rooms are excluded from the query. When it is
+ none (the default), both empty rooms and none-empty rooms are queried.
++ emma_include_tombstone: If true, include tombstone events in the results.
+ Returns:
+ A list of room dicts and an integer representing the total number of
+ rooms that exist given this query
+@@ -795,11 +797,43 @@ class RoomWorkerStore(CacheInvalidationWorkerStore):
+ room_count = cast(Tuple[int], txn.fetchone())
+ return rooms, room_count[0]
+
+- return await self.db_pool.runInteraction(
++ result = await self.db_pool.runInteraction(
+ "get_rooms_paginate",
+ _get_rooms_paginate_txn,
+ )
+
++ if emma_include_tombstone:
++ room_id_sql, room_id_args = make_in_list_sql_clause(
++ self.database_engine, "cse.room_id", [r["room_id"] for r in result[0]]
++ )
++
++ tombstone_sql = """
++ SELECT cse.room_id, cse.event_id, ej.json
++ FROM current_state_events cse
++ JOIN event_json ej USING (event_id)
++ WHERE cse.type = 'm.room.tombstone'
++ AND {room_id_sql}
++ """.format(
++ room_id_sql=room_id_sql
++ )
++
++ def _get_tombstones_txn(
++ txn: LoggingTransaction,
++ ) -> Dict[str, JsonDict]:
++ txn.execute(tombstone_sql, room_id_args)
++ for room_id, event_id, json in txn:
++ for result_room in result[0]:
++ if result_room["room_id"] == room_id:
++ result_room["gay.rory.synapse_admin_extensions.tombstone"] = db_to_json(json)
++ break
++ return result[0], result[1]
++
++ result = await self.db_pool.runInteraction(
++ "get_rooms_tombstones", _get_tombstones_txn,
++ )
++
++ return result
++
+ @cached(max_entries=10000)
+ async def get_ratelimit_for_user(self, user_id: str) -> Optional[RatelimitOverride]:
+ """Check if there are any overrides for ratelimiting for the given user
+--
+2.49.0
+
diff --git a/packages/overlays/matrix-synapse/patches/0007-Fix-room_list_publication_rules-docs-for-v1.126.0-18.patch b/packages/overlays/matrix-synapse/patches/0007-Fix-room_list_publication_rules-docs-for-v1.126.0-18.patch
deleted file mode 100644
index 3e21694..0000000
--- a/packages/overlays/matrix-synapse/patches/0007-Fix-room_list_publication_rules-docs-for-v1.126.0-18.patch
+++ /dev/null
@@ -1,64 +0,0 @@
-From 194b923a6e625af6ca90bbbdc1f8a85a9215797e Mon Sep 17 00:00:00 2001
-From: Kim Brose <2803622+HarHarLinks@users.noreply.github.com>
-Date: Wed, 14 May 2025 10:36:54 +0000
-Subject: [PATCH 07/34] Fix room_list_publication_rules docs for v1.126.0
- (#18286)
-
-Co-authored-by: Andrew Morgan <1342360+anoadragon453@users.noreply.github.com>
----
- changelog.d/18286.doc | 1 +
- docs/usage/configuration/config_documentation.md | 15 ++++++++-------
- 2 files changed, 9 insertions(+), 7 deletions(-)
- create mode 100644 changelog.d/18286.doc
-
-diff --git a/changelog.d/18286.doc b/changelog.d/18286.doc
-new file mode 100644
-index 0000000000..37728351c5
---- /dev/null
-+++ b/changelog.d/18286.doc
-@@ -0,0 +1 @@
-+Update `room_list_publication_rules` docs to consider defaults that changed in v1.126.0. Contributed by @HarHarLinks.
-diff --git a/docs/usage/configuration/config_documentation.md b/docs/usage/configuration/config_documentation.md
-index 5351bef83a..2228c18a6c 100644
---- a/docs/usage/configuration/config_documentation.md
-+++ b/docs/usage/configuration/config_documentation.md
-@@ -4331,28 +4331,29 @@ room list by default_
- Example configuration:
-
- ```yaml
--# No rule list specified. Anyone may publish any room to the public list.
-+# No rule list specified. No one may publish any room to the public list, except server admins.
- # This is the default behaviour.
- room_list_publication_rules:
- ```
-
- ```yaml
--# A list of one rule which allows everything.
-+# A list of one rule which denies everything.
- # This has the same effect as the previous example.
- room_list_publication_rules:
-- - "action": "allow"
-+ - "action": "deny"
- ```
-
- ```yaml
--# An empty list of rules. No-one may publish to the room list.
-+# An empty list of rules.
-+# This has the same effect as the previous example.
- room_list_publication_rules: []
- ```
-
- ```yaml
--# A list of one rule which denies everything.
--# This has the same effect as the previous example.
-+# A list of one rule which allows everything.
-+# This was the default behaviour pre v1.126.0.
- room_list_publication_rules:
-- - "action": "deny"
-+ - "action": "allow"
- ```
-
- ```yaml
---
-2.49.0
-
diff --git a/packages/overlays/matrix-synapse/patches/0007-fix-Always-recheck-messages-pagination-data-if-a-bac.patch b/packages/overlays/matrix-synapse/patches/0007-fix-Always-recheck-messages-pagination-data-if-a-bac.patch
new file mode 100644
index 0000000..6943969
--- /dev/null
+++ b/packages/overlays/matrix-synapse/patches/0007-fix-Always-recheck-messages-pagination-data-if-a-bac.patch
@@ -0,0 +1,204 @@
+From 9e7ab16a878bef50a777833f8401a14e6f07effd Mon Sep 17 00:00:00 2001
+From: Jason Little <j.little@famedly.com>
+Date: Wed, 30 Apr 2025 09:29:42 -0500
+Subject: [PATCH 07/10] fix: Always recheck `/messages` pagination data if a
+ backfill might have been needed (#28)
+
+---
+ synapse/handlers/federation.py | 35 +++++++++++++--------------------
+ synapse/handlers/pagination.py | 36 +++++++++++++++++++---------------
+ 2 files changed, 34 insertions(+), 37 deletions(-)
+
+diff --git a/synapse/handlers/federation.py b/synapse/handlers/federation.py
+index b1640e3246..926be5e415 100644
+--- a/synapse/handlers/federation.py
++++ b/synapse/handlers/federation.py
+@@ -210,7 +210,7 @@ class FederationHandler:
+ @tag_args
+ async def maybe_backfill(
+ self, room_id: str, current_depth: int, limit: int, record_time: bool = True
+- ) -> bool:
++ ) -> None:
+ """Checks the database to see if we should backfill before paginating,
+ and if so do.
+
+@@ -224,8 +224,6 @@ class FederationHandler:
+ should back paginate.
+ record_time: Whether to record the time it takes to backfill.
+
+- Returns:
+- True if we actually tried to backfill something, otherwise False.
+ """
+ # Starting the processing time here so we can include the room backfill
+ # linearizer lock queue in the timing
+@@ -251,7 +249,7 @@ class FederationHandler:
+ limit: int,
+ *,
+ processing_start_time: Optional[int],
+- ) -> bool:
++ ) -> None:
+ """
+ Checks whether the `current_depth` is at or approaching any backfill
+ points in the room and if so, will backfill. We only care about
+@@ -325,7 +323,7 @@ class FederationHandler:
+ limit=1,
+ )
+ if not have_later_backfill_points:
+- return False
++ return None
+
+ logger.debug(
+ "_maybe_backfill_inner: all backfill points are *after* current depth. Trying again with later backfill points."
+@@ -345,15 +343,15 @@ class FederationHandler:
+ )
+ # We return `False` because we're backfilling in the background and there is
+ # no new events immediately for the caller to know about yet.
+- return False
++ return None
+
+ # Even after recursing with `MAX_DEPTH`, we didn't find any
+ # backward extremities to backfill from.
+ if not sorted_backfill_points:
+ logger.debug(
+- "_maybe_backfill_inner: Not backfilling as no backward extremeties found."
++ "_maybe_backfill_inner: Not backfilling as no backward extremities found."
+ )
+- return False
++ return None
+
+ # If we're approaching an extremity we trigger a backfill, otherwise we
+ # no-op.
+@@ -372,7 +370,7 @@ class FederationHandler:
+ current_depth,
+ limit,
+ )
+- return False
++ return None
+
+ # For performance's sake, we only want to paginate from a particular extremity
+ # if we can actually see the events we'll get. Otherwise, we'd just spend a lot
+@@ -440,7 +438,7 @@ class FederationHandler:
+ logger.debug(
+ "_maybe_backfill_inner: found no extremities which would be visible"
+ )
+- return False
++ return None
+
+ logger.debug(
+ "_maybe_backfill_inner: extremities_to_request %s", extremities_to_request
+@@ -463,7 +461,7 @@ class FederationHandler:
+ )
+ )
+
+- async def try_backfill(domains: StrCollection) -> bool:
++ async def try_backfill(domains: StrCollection) -> None:
+ # TODO: Should we try multiple of these at a time?
+
+ # Number of contacted remote homeservers that have denied our backfill
+@@ -486,7 +484,7 @@ class FederationHandler:
+ # If this succeeded then we probably already have the
+ # appropriate stuff.
+ # TODO: We can probably do something more intelligent here.
+- return True
++ return None
+ except NotRetryingDestination as e:
+ logger.info("_maybe_backfill_inner: %s", e)
+ continue
+@@ -510,7 +508,7 @@ class FederationHandler:
+ )
+ denied_count += 1
+ if denied_count >= max_denied_count:
+- return False
++ return None
+ continue
+
+ logger.info("Failed to backfill from %s because %s", dom, e)
+@@ -526,7 +524,7 @@ class FederationHandler:
+ )
+ denied_count += 1
+ if denied_count >= max_denied_count:
+- return False
++ return None
+ continue
+
+ logger.info("Failed to backfill from %s because %s", dom, e)
+@@ -538,7 +536,7 @@ class FederationHandler:
+ logger.exception("Failed to backfill from %s because %s", dom, e)
+ continue
+
+- return False
++ return None
+
+ # If we have the `processing_start_time`, then we can make an
+ # observation. We wouldn't have the `processing_start_time` in the case
+@@ -550,14 +548,9 @@ class FederationHandler:
+ (processing_end_time - processing_start_time) / 1000
+ )
+
+- success = await try_backfill(likely_domains)
+- if success:
+- return True
+-
+ # TODO: we could also try servers which were previously in the room, but
+ # are no longer.
+-
+- return False
++ return await try_backfill(likely_domains)
+
+ async def send_invite(self, target_host: str, event: EventBase) -> EventBase:
+ """Sends the invite to the remote server for signing.
+diff --git a/synapse/handlers/pagination.py b/synapse/handlers/pagination.py
+index 4070b74b7a..81cda38549 100644
+--- a/synapse/handlers/pagination.py
++++ b/synapse/handlers/pagination.py
+@@ -577,27 +577,31 @@ class PaginationHandler:
+ or missing_too_many_events
+ or not_enough_events_to_fill_response
+ ):
+- did_backfill = await self.hs.get_federation_handler().maybe_backfill(
++ # Historical Note: There used to be a check here for if backfill was
++ # successful or not
++ await self.hs.get_federation_handler().maybe_backfill(
+ room_id,
+ curr_topo,
+ limit=pagin_config.limit,
+ )
+
+- # If we did backfill something, refetch the events from the database to
+- # catch anything new that might have been added since we last fetched.
+- if did_backfill:
+- (
+- events,
+- next_key,
+- _,
+- ) = await self.store.paginate_room_events_by_topological_ordering(
+- room_id=room_id,
+- from_key=from_token.room_key,
+- to_key=to_room_key,
+- direction=pagin_config.direction,
+- limit=pagin_config.limit,
+- event_filter=event_filter,
+- )
++ # Regardless if we backfilled or not, another worker or even a
++ # simultaneous request may have backfilled for us while we were held
++ # behind the linearizer. This should not have too much additional
++ # database load as it will only be triggered if a backfill *might* have
++ # been needed
++ (
++ events,
++ next_key,
++ _,
++ ) = await self.store.paginate_room_events_by_topological_ordering(
++ room_id=room_id,
++ from_key=from_token.room_key,
++ to_key=to_room_key,
++ direction=pagin_config.direction,
++ limit=pagin_config.limit,
++ event_filter=event_filter,
++ )
+ else:
+ # Otherwise, we can backfill in the background for eventual
+ # consistency's sake but we don't need to block the client waiting
+--
+2.49.0
+
diff --git a/packages/overlays/matrix-synapse/patches/0008-Add-option-to-allow-registrations-that-begin-with-_-.patch b/packages/overlays/matrix-synapse/patches/0008-Add-option-to-allow-registrations-that-begin-with-_-.patch
deleted file mode 100644
index fa37022..0000000
--- a/packages/overlays/matrix-synapse/patches/0008-Add-option-to-allow-registrations-that-begin-with-_-.patch
+++ /dev/null
@@ -1,116 +0,0 @@
-From 44ae5362fd952dbb209f4b52ee9c96641163f032 Mon Sep 17 00:00:00 2001
-From: _ <x5f@fastmail.com>
-Date: Thu, 15 May 2025 04:31:52 -0700
-Subject: [PATCH 08/34] Add option to allow registrations that begin with '_'
- (#18262)
-
-Co-authored-by: Andrew Morgan <1342360+anoadragon453@users.noreply.github.com>
----
- changelog.d/18262.feature | 1 +
- .../configuration/config_documentation.md | 14 +++++++++++
- synapse/config/registration.py | 4 ++++
- synapse/handlers/register.py | 5 +++-
- tests/handlers/test_register.py | 23 +++++++++++++++++++
- 5 files changed, 46 insertions(+), 1 deletion(-)
- create mode 100644 changelog.d/18262.feature
-
-diff --git a/changelog.d/18262.feature b/changelog.d/18262.feature
-new file mode 100644
-index 0000000000..c8249faa76
---- /dev/null
-+++ b/changelog.d/18262.feature
-@@ -0,0 +1 @@
-+Add option to allow registrations that begin with `_`. Contributed by `_` (@hex5f).
-diff --git a/docs/usage/configuration/config_documentation.md b/docs/usage/configuration/config_documentation.md
-index 2228c18a6c..e688bc5cd8 100644
---- a/docs/usage/configuration/config_documentation.md
-+++ b/docs/usage/configuration/config_documentation.md
-@@ -2887,6 +2887,20 @@ Example configuration:
- inhibit_user_in_use_error: true
- ```
- ---
-+### `allow_underscore_prefixed_registration`
-+
-+Whether users are allowed to register with a underscore-prefixed localpart.
-+By default, AppServices use prefixes like `_example` to namespace their
-+associated ghost users. If turned on, this may result in clashes or confusion.
-+Useful when provisioning users from an external identity provider.
-+
-+Defaults to false.
-+
-+Example configuration:
-+```yaml
-+allow_underscore_prefixed_registration: false
-+```
-+---
- ## User session management
- ---
- ### `session_lifetime`
-diff --git a/synapse/config/registration.py b/synapse/config/registration.py
-index 3cf7031656..8adf21079e 100644
---- a/synapse/config/registration.py
-+++ b/synapse/config/registration.py
-@@ -162,6 +162,10 @@ class RegistrationConfig(Config):
- "disable_msisdn_registration", False
- )
-
-+ self.allow_underscore_prefixed_localpart = config.get(
-+ "allow_underscore_prefixed_localpart", False
-+ )
-+
- session_lifetime = config.get("session_lifetime")
- if session_lifetime is not None:
- session_lifetime = self.parse_duration(session_lifetime)
-diff --git a/synapse/handlers/register.py b/synapse/handlers/register.py
-index ecfea175c7..3e86349981 100644
---- a/synapse/handlers/register.py
-+++ b/synapse/handlers/register.py
-@@ -159,7 +159,10 @@ class RegistrationHandler:
- if not localpart:
- raise SynapseError(400, "User ID cannot be empty", Codes.INVALID_USERNAME)
-
-- if localpart[0] == "_":
-+ if (
-+ localpart[0] == "_"
-+ and not self.hs.config.registration.allow_underscore_prefixed_localpart
-+ ):
- raise SynapseError(
- 400, "User ID may not begin with _", Codes.INVALID_USERNAME
- )
-diff --git a/tests/handlers/test_register.py b/tests/handlers/test_register.py
-index 92487692db..dda389c08b 100644
---- a/tests/handlers/test_register.py
-+++ b/tests/handlers/test_register.py
-@@ -588,6 +588,29 @@ class RegistrationTestCase(unittest.HomeserverTestCase):
- d = self.store.is_support_user(user_id)
- self.assertFalse(self.get_success(d))
-
-+ def test_underscore_localpart_rejected_by_default(self) -> None:
-+ for invalid_user_id in ("_", "_prefixed"):
-+ with self.subTest(invalid_user_id=invalid_user_id):
-+ self.get_failure(
-+ self.handler.register_user(localpart=invalid_user_id),
-+ SynapseError,
-+ )
-+
-+ @override_config(
-+ {
-+ "allow_underscore_prefixed_localpart": True,
-+ }
-+ )
-+ def test_underscore_localpart_allowed_if_configured(self) -> None:
-+ for valid_user_id in ("_", "_prefixed"):
-+ with self.subTest(valid_user_id=valid_user_id):
-+ user_id = self.get_success(
-+ self.handler.register_user(
-+ localpart=valid_user_id,
-+ ),
-+ )
-+ self.assertEqual(user_id, f"@{valid_user_id}:test")
-+
- def test_invalid_user_id(self) -> None:
- invalid_user_id = "^abcd"
- self.get_failure(
---
-2.49.0
-
diff --git a/packages/overlays/matrix-synapse/patches/0008-Fix-pagination-with-large-gaps-of-rejected-events.patch b/packages/overlays/matrix-synapse/patches/0008-Fix-pagination-with-large-gaps-of-rejected-events.patch
new file mode 100644
index 0000000..ecbdd62
--- /dev/null
+++ b/packages/overlays/matrix-synapse/patches/0008-Fix-pagination-with-large-gaps-of-rejected-events.patch
@@ -0,0 +1,50 @@
+From 209e561ab2a299892e00250fe10d8c31e1714f1e Mon Sep 17 00:00:00 2001
+From: Nicolas Werner <nicolas.werner@hotmail.de>
+Date: Sun, 8 Jun 2025 23:14:31 +0200
+Subject: [PATCH 08/10] Fix pagination with large gaps of rejected events
+
+---
+ synapse/handlers/pagination.py | 13 +++++++++++--
+ 1 file changed, 11 insertions(+), 2 deletions(-)
+
+diff --git a/synapse/handlers/pagination.py b/synapse/handlers/pagination.py
+index 81cda38549..365c9cabcb 100644
+--- a/synapse/handlers/pagination.py
++++ b/synapse/handlers/pagination.py
+@@ -510,7 +510,7 @@ class PaginationHandler:
+ (
+ events,
+ next_key,
+- _,
++ limited,
+ ) = await self.store.paginate_room_events_by_topological_ordering(
+ room_id=room_id,
+ from_key=from_token.room_key,
+@@ -593,7 +593,7 @@ class PaginationHandler:
+ (
+ events,
+ next_key,
+- _,
++ limited,
+ ) = await self.store.paginate_room_events_by_topological_ordering(
+ room_id=room_id,
+ from_key=from_token.room_key,
+@@ -616,6 +616,15 @@ class PaginationHandler:
+
+ next_token = from_token.copy_and_replace(StreamKeyType.ROOM, next_key)
+
++ # We might have hit some internal filtering first, for example rejected
++ # events. Ensure we return a pagination token then.
++ if not events and limited:
++ return {
++ "chunk": [],
++ "start": await from_token.to_string(self.store),
++ "end": await next_token.to_string(self.store),
++ }
++
+ # if no events are returned from pagination, that implies
+ # we have reached the end of the available events.
+ # In that case we do not return end, to tell the client
+--
+2.49.0
+
diff --git a/packages/overlays/matrix-synapse/patches/0009-Fix-nix-flake.patch b/packages/overlays/matrix-synapse/patches/0009-Fix-nix-flake.patch
new file mode 100644
index 0000000..ecd6866
--- /dev/null
+++ b/packages/overlays/matrix-synapse/patches/0009-Fix-nix-flake.patch
@@ -0,0 +1,189 @@
+From b010181b70ef40f0259c561be50c307f2b651298 Mon Sep 17 00:00:00 2001
+From: Rory& <root@rory.gay>
+Date: Mon, 9 Jun 2025 17:38:34 +0200
+Subject: [PATCH 09/10] Fix nix flake
+
+---
+ flake.lock | 58 +++++++++++++++++++-----------------------------------
+ flake.nix | 10 +++++++++-
+ 2 files changed, 29 insertions(+), 39 deletions(-)
+
+diff --git a/flake.lock b/flake.lock
+index a6a2aea328..4e2f01153b 100644
+--- a/flake.lock
++++ b/flake.lock
+@@ -39,15 +39,12 @@
+ }
+ },
+ "flake-utils": {
+- "inputs": {
+- "systems": "systems"
+- },
+ "locked": {
+- "lastModified": 1685518550,
+- "narHash": "sha256-o2d0KcvaXzTrPRIo0kOLV0/QXHhDQ5DTi+OxcjO8xqY=",
++ "lastModified": 1667395993,
++ "narHash": "sha256-nuEHfE/LcWyuSWnS8t12N1wc105Qtau+/OdUAjtQ0rA=",
+ "owner": "numtide",
+ "repo": "flake-utils",
+- "rev": "a1720a10a6cfe8234c0e93907ffe81be440f4cef",
++ "rev": "5aed5285a952e0b949eb3ba02c12fa4fcfef535f",
+ "type": "github"
+ },
+ "original": {
+@@ -152,27 +149,27 @@
+ },
+ "nixpkgs-stable": {
+ "locked": {
+- "lastModified": 1685801374,
+- "narHash": "sha256-otaSUoFEMM+LjBI1XL/xGB5ao6IwnZOXc47qhIgJe8U=",
++ "lastModified": 1678872516,
++ "narHash": "sha256-/E1YwtMtFAu2KUQKV/1+KFuReYPANM2Rzehk84VxVoc=",
+ "owner": "NixOS",
+ "repo": "nixpkgs",
+- "rev": "c37ca420157f4abc31e26f436c1145f8951ff373",
++ "rev": "9b8e5abb18324c7fe9f07cb100c3cd4a29cda8b8",
+ "type": "github"
+ },
+ "original": {
+ "owner": "NixOS",
+- "ref": "nixos-23.05",
++ "ref": "nixos-22.11",
+ "repo": "nixpkgs",
+ "type": "github"
+ }
+ },
+ "nixpkgs_2": {
+ "locked": {
+- "lastModified": 1729265718,
+- "narHash": "sha256-4HQI+6LsO3kpWTYuVGIzhJs1cetFcwT7quWCk/6rqeo=",
++ "lastModified": 1748217807,
++ "narHash": "sha256-P3u2PXxMlo49PutQLnk2PhI/imC69hFl1yY4aT5Nax8=",
+ "owner": "NixOS",
+ "repo": "nixpkgs",
+- "rev": "ccc0c2126893dd20963580b6478d1a10a4512185",
++ "rev": "3108eaa516ae22c2360928589731a4f1581526ef",
+ "type": "github"
+ },
+ "original": {
+@@ -184,11 +181,11 @@
+ },
+ "nixpkgs_3": {
+ "locked": {
+- "lastModified": 1728538411,
+- "narHash": "sha256-f0SBJz1eZ2yOuKUr5CA9BHULGXVSn6miBuUWdTyhUhU=",
++ "lastModified": 1744536153,
++ "narHash": "sha256-awS2zRgF4uTwrOKwwiJcByDzDOdo3Q1rPZbiHQg/N38=",
+ "owner": "NixOS",
+ "repo": "nixpkgs",
+- "rev": "b69de56fac8c2b6f8fd27f2eca01dcda8e0a4221",
++ "rev": "18dd725c29603f582cf1900e0d25f9f1063dbf11",
+ "type": "github"
+ },
+ "original": {
+@@ -213,11 +210,11 @@
+ "nixpkgs-stable": "nixpkgs-stable"
+ },
+ "locked": {
+- "lastModified": 1688056373,
+- "narHash": "sha256-2+SDlNRTKsgo3LBRiMUcoEUb6sDViRNQhzJquZ4koOI=",
++ "lastModified": 1686050334,
++ "narHash": "sha256-R0mczWjDzBpIvM3XXhO908X5e2CQqjyh/gFbwZk/7/Q=",
+ "owner": "cachix",
+ "repo": "pre-commit-hooks.nix",
+- "rev": "5843cf069272d92b60c3ed9e55b7a8989c01d4c7",
++ "rev": "6881eb2ae5d8a3516e34714e7a90d9d95914c4dc",
+ "type": "github"
+ },
+ "original": {
+@@ -231,7 +228,7 @@
+ "devenv": "devenv",
+ "nixpkgs": "nixpkgs_2",
+ "rust-overlay": "rust-overlay",
+- "systems": "systems_2"
++ "systems": "systems"
+ }
+ },
+ "rust-overlay": {
+@@ -239,11 +236,11 @@
+ "nixpkgs": "nixpkgs_3"
+ },
+ "locked": {
+- "lastModified": 1731897198,
+- "narHash": "sha256-Ou7vLETSKwmE/HRQz4cImXXJBr/k9gp4J4z/PF8LzTE=",
++ "lastModified": 1748313401,
++ "narHash": "sha256-x5UuDKP2Ui/TresAngUo9U4Ss9xfOmN8dAXU8OrkZmA=",
+ "owner": "oxalica",
+ "repo": "rust-overlay",
+- "rev": "0be641045af6d8666c11c2c40e45ffc9667839b5",
++ "rev": "9c8ea175cf9af29edbcff121512e44092a8f37e4",
+ "type": "github"
+ },
+ "original": {
+@@ -266,21 +263,6 @@
+ "repo": "default",
+ "type": "github"
+ }
+- },
+- "systems_2": {
+- "locked": {
+- "lastModified": 1681028828,
+- "narHash": "sha256-Vy1rq5AaRuLzOxct8nz4T6wlgyUR7zLU309k9mBC768=",
+- "owner": "nix-systems",
+- "repo": "default",
+- "rev": "da67096a3b9bf56a91d16901293e51ba5b49a27e",
+- "type": "github"
+- },
+- "original": {
+- "owner": "nix-systems",
+- "repo": "default",
+- "type": "github"
+- }
+ }
+ },
+ "root": "root",
+diff --git a/flake.nix b/flake.nix
+index 749c10da1d..e33b233ece 100644
+--- a/flake.nix
++++ b/flake.nix
+@@ -82,7 +82,7 @@
+ #
+ # NOTE: We currently need to set the Rust version unnecessarily high
+ # in order to work around https://github.com/matrix-org/synapse/issues/15939
+- (rust-bin.stable."1.82.0".default.override {
++ (rust-bin.stable."1.87.0".default.override {
+ # Additionally install the "rust-src" extension to allow diving into the
+ # Rust source code in an IDE (rust-analyzer will also make use of it).
+ extensions = [ "rust-src" ];
+@@ -118,6 +118,8 @@
+ # For releasing Synapse
+ debian-devscripts # (`dch` for manipulating the Debian changelog)
+ libnotify # (the release script uses `notify-send` to tell you when CI jobs are done)
++
++ postgresql.pg_config
+ ];
+
+ # Install Python and manage a virtualenv with Poetry.
+@@ -140,6 +142,9 @@
+ # force compiling those binaries locally instead.
+ env.POETRY_INSTALLER_NO_BINARY = "ruff";
+
++ # Required to make git work
++ env.CARGO_NET_GIT_FETCH_WITH_CLI = "true";
++
+ # Install dependencies for the additional programming languages
+ # involved with Synapse development.
+ #
+@@ -160,6 +165,9 @@
+ services.postgres.initialDatabases = [
+ { name = "synapse"; }
+ ];
++
++ services.postgres.port = 5433;
++
+ # Create a postgres user called 'synapse_user' which has ownership
+ # over the 'synapse' database.
+ services.postgres.initialScript = ''
+--
+2.49.0
+
diff --git a/packages/overlays/matrix-synapse/patches/0009-remove-room-without-listeners-from-Notifier.room_to_.patch b/packages/overlays/matrix-synapse/patches/0009-remove-room-without-listeners-from-Notifier.room_to_.patch
deleted file mode 100644
index dfced13..0000000
--- a/packages/overlays/matrix-synapse/patches/0009-remove-room-without-listeners-from-Notifier.room_to_.patch
+++ /dev/null
@@ -1,38 +0,0 @@
-From 0afdc0fc7ffe2cb7a2fa6d47f22b685cbacc7223 Mon Sep 17 00:00:00 2001
-From: Stanislav Kazantsev <stas.kazancev54@gmail.com>
-Date: Thu, 15 May 2025 23:18:17 +0600
-Subject: [PATCH 09/34] remove room without listeners from
- Notifier.room_to_user_streams (#18380)
-
-Co-authored-by: Andrew Morgan <andrew@amorgan.xyz>
----
- changelog.d/18380.misc | 1 +
- synapse/notifier.py | 3 +++
- 2 files changed, 4 insertions(+)
- create mode 100644 changelog.d/18380.misc
-
-diff --git a/changelog.d/18380.misc b/changelog.d/18380.misc
-new file mode 100644
-index 0000000000..5d6017be26
---- /dev/null
-+++ b/changelog.d/18380.misc
-@@ -0,0 +1 @@
-+Fix a memory leak in `_NotifierUserStream`.
-\ No newline at end of file
-diff --git a/synapse/notifier.py b/synapse/notifier.py
-index 1914d0c914..6190432b87 100644
---- a/synapse/notifier.py
-+++ b/synapse/notifier.py
-@@ -158,6 +158,9 @@ class _NotifierUserStream:
- lst = notifier.room_to_user_streams.get(room, set())
- lst.discard(self)
-
-+ if not lst:
-+ notifier.room_to_user_streams.pop(room, None)
-+
- notifier.user_to_user_stream.pop(self.user_id)
-
- def count_listeners(self) -> int:
---
-2.49.0
-
diff --git a/packages/overlays/matrix-synapse/patches/0010-Fix-admin-redaction-endpoint-not-redacting-encrypted.patch b/packages/overlays/matrix-synapse/patches/0010-Fix-admin-redaction-endpoint-not-redacting-encrypted.patch
deleted file mode 100644
index 203103d..0000000
--- a/packages/overlays/matrix-synapse/patches/0010-Fix-admin-redaction-endpoint-not-redacting-encrypted.patch
+++ /dev/null
@@ -1,110 +0,0 @@
-From 74e2f028bbcaeb2a572d03e66334f3c671bffae2 Mon Sep 17 00:00:00 2001
-From: Shay <hillerys@element.io>
-Date: Mon, 19 May 2025 01:48:46 -0700
-Subject: [PATCH 10/34] Fix admin redaction endpoint not redacting encrypted
- messages (#18434)
-
----
- changelog.d/18434.bugfix | 1 +
- synapse/handlers/admin.py | 2 +-
- tests/rest/admin/test_user.py | 55 ++++++++++++++++++++++++++++++++++-
- 3 files changed, 56 insertions(+), 2 deletions(-)
- create mode 100644 changelog.d/18434.bugfix
-
-diff --git a/changelog.d/18434.bugfix b/changelog.d/18434.bugfix
-new file mode 100644
-index 0000000000..dd094c83e8
---- /dev/null
-+++ b/changelog.d/18434.bugfix
-@@ -0,0 +1 @@
-+Fix admin redaction endpoint not redacting encrypted messages.
-\ No newline at end of file
-diff --git a/synapse/handlers/admin.py b/synapse/handlers/admin.py
-index f3e7790d43..971a74244f 100644
---- a/synapse/handlers/admin.py
-+++ b/synapse/handlers/admin.py
-@@ -445,7 +445,7 @@ class AdminHandler:
- user_id,
- room,
- limit,
-- ["m.room.member", "m.room.message"],
-+ ["m.room.member", "m.room.message", "m.room.encrypted"],
- )
- if not event_ids:
- # nothing to redact in this room
-diff --git a/tests/rest/admin/test_user.py b/tests/rest/admin/test_user.py
-index a35a250975..874c29c935 100644
---- a/tests/rest/admin/test_user.py
-+++ b/tests/rest/admin/test_user.py
-@@ -36,7 +36,13 @@ from twisted.test.proto_helpers import MemoryReactor
- from twisted.web.resource import Resource
-
- import synapse.rest.admin
--from synapse.api.constants import ApprovalNoticeMedium, EventTypes, LoginType, UserTypes
-+from synapse.api.constants import (
-+ ApprovalNoticeMedium,
-+ EventContentFields,
-+ EventTypes,
-+ LoginType,
-+ UserTypes,
-+)
- from synapse.api.errors import Codes, HttpResponseException, ResourceLimitError
- from synapse.api.room_versions import RoomVersions
- from synapse.media.filepath import MediaFilePaths
-@@ -5467,6 +5473,53 @@ class UserRedactionTestCase(unittest.HomeserverTestCase):
- # we originally sent 5 messages so 5 should be redacted
- self.assertEqual(len(original_message_ids), 0)
-
-+ def test_redact_redacts_encrypted_messages(self) -> None:
-+ """
-+ Test that user's encrypted messages are redacted
-+ """
-+ encrypted_room = self.helper.create_room_as(
-+ self.admin, tok=self.admin_tok, room_version="7"
-+ )
-+ self.helper.send_state(
-+ encrypted_room,
-+ EventTypes.RoomEncryption,
-+ {EventContentFields.ENCRYPTION_ALGORITHM: "m.megolm.v1.aes-sha2"},
-+ tok=self.admin_tok,
-+ )
-+ # join room send some messages
-+ originals = []
-+ join = self.helper.join(encrypted_room, self.bad_user, tok=self.bad_user_tok)
-+ originals.append(join["event_id"])
-+ for _ in range(15):
-+ res = self.helper.send_event(
-+ encrypted_room, "m.room.encrypted", {}, tok=self.bad_user_tok
-+ )
-+ originals.append(res["event_id"])
-+
-+ # redact user's events
-+ channel = self.make_request(
-+ "POST",
-+ f"/_synapse/admin/v1/user/{self.bad_user}/redact",
-+ content={"rooms": []},
-+ access_token=self.admin_tok,
-+ )
-+ self.assertEqual(channel.code, 200)
-+
-+ matched = []
-+ filter = json.dumps({"types": [EventTypes.Redaction]})
-+ channel = self.make_request(
-+ "GET",
-+ f"rooms/{encrypted_room}/messages?filter={filter}&limit=50",
-+ access_token=self.admin_tok,
-+ )
-+ self.assertEqual(channel.code, 200)
-+
-+ for event in channel.json_body["chunk"]:
-+ for event_id in originals:
-+ if event["type"] == "m.room.redaction" and event["redacts"] == event_id:
-+ matched.append(event_id)
-+ self.assertEqual(len(matched), len(originals))
-+
-
- class UserRedactionBackgroundTaskTestCase(BaseMultiWorkerStreamTestCase):
- servlets = [
---
-2.49.0
-
diff --git a/packages/overlays/matrix-synapse/patches/0010-Fix-gitignore-to-ignore-.venv.patch b/packages/overlays/matrix-synapse/patches/0010-Fix-gitignore-to-ignore-.venv.patch
new file mode 100644
index 0000000..eb3a553
--- /dev/null
+++ b/packages/overlays/matrix-synapse/patches/0010-Fix-gitignore-to-ignore-.venv.patch
@@ -0,0 +1,24 @@
+From 6a4413abf22aaf7d10bc81de5f46fe83bcd52a7d Mon Sep 17 00:00:00 2001
+From: Rory& <root@rory.gay>
+Date: Mon, 9 Jun 2025 17:46:10 +0200
+Subject: [PATCH 10/10] Fix gitignore to ignore .venv
+
+---
+ .gitignore | 1 +
+ 1 file changed, 1 insertion(+)
+
+diff --git a/.gitignore b/.gitignore
+index a89f149ec1..0567934c4e 100644
+--- a/.gitignore
++++ b/.gitignore
+@@ -30,6 +30,7 @@ __pycache__/
+ /*.signing.key
+ /env/
+ /.venv*/
++/.venv
+ /homeserver*.yaml
+ /logs
+ /media_store/
+--
+2.49.0
+
diff --git a/packages/overlays/matrix-synapse/patches/0011-Bump-actions-setup-python-from-5.5.0-to-5.6.0-18398.patch b/packages/overlays/matrix-synapse/patches/0011-Bump-actions-setup-python-from-5.5.0-to-5.6.0-18398.patch
deleted file mode 100644
index bb31453..0000000
--- a/packages/overlays/matrix-synapse/patches/0011-Bump-actions-setup-python-from-5.5.0-to-5.6.0-18398.patch
+++ /dev/null
@@ -1,280 +0,0 @@
-From 078cefd014806a67249ddb59b5976c7e93227f37 Mon Sep 17 00:00:00 2001
-From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com>
-Date: Mon, 19 May 2025 09:51:08 +0100
-Subject: [PATCH 11/34] Bump actions/setup-python from 5.5.0 to 5.6.0 (#18398)
-MIME-Version: 1.0
-Content-Type: text/plain; charset=UTF-8
-Content-Transfer-Encoding: 8bit
-
-Bumps [actions/setup-python](https://github.com/actions/setup-python)
-from 5.5.0 to 5.6.0.
-<details>
-<summary>Release notes</summary>
-<p><em>Sourced from <a
-href="https://github.com/actions/setup-python/releases">actions/setup-python's
-releases</a>.</em></p>
-<blockquote>
-<h2>v5.6.0</h2>
-<h2>What's Changed</h2>
-<ul>
-<li>Workflow updates related to Ubuntu 20.04 by <a
-href="https://github.com/aparnajyothi-y"><code>@aparnajyothi-y</code></a>
-in <a
-href="https://redirect.github.com/actions/setup-python/pull/1065">actions/setup-python#1065</a></li>
-<li>Fix for Candidate Not Iterable Error by <a
-href="https://github.com/aparnajyothi-y"><code>@aparnajyothi-y</code></a>
-in <a
-href="https://redirect.github.com/actions/setup-python/pull/1082">actions/setup-python#1082</a></li>
-<li>Upgrade semver and <code>@types/semver</code> by <a
-href="https://github.com/dependabot"><code>@dependabot</code></a> in <a
-href="https://redirect.github.com/actions/setup-python/pull/1091">actions/setup-python#1091</a></li>
-<li>Upgrade prettier from 2.8.8 to 3.5.3 by <a
-href="https://github.com/dependabot"><code>@dependabot</code></a> in <a
-href="https://redirect.github.com/actions/setup-python/pull/1046">actions/setup-python#1046</a></li>
-<li>Upgrade ts-jest from 29.1.2 to 29.3.2 by <a
-href="https://github.com/dependabot"><code>@dependabot</code></a> in <a
-href="https://redirect.github.com/actions/setup-python/pull/1081">actions/setup-python#1081</a></li>
-</ul>
-<p><strong>Full Changelog</strong>: <a
-href="https://github.com/actions/setup-python/compare/v5...v5.6.0">https://github.com/actions/setup-python/compare/v5...v5.6.0</a></p>
-</blockquote>
-</details>
-<details>
-<summary>Commits</summary>
-<ul>
-<li><a
-href="https://github.com/actions/setup-python/commit/a26af69be951a213d495a4c3e4e4022e16d87065"><code>a26af69</code></a>
-Bump ts-jest from 29.1.2 to 29.3.2 (<a
-href="https://redirect.github.com/actions/setup-python/issues/1081">#1081</a>)</li>
-<li><a
-href="https://github.com/actions/setup-python/commit/30eafe95483bd95135b7eda0c66a0369af9afdf1"><code>30eafe9</code></a>
-Bump prettier from 2.8.8 to 3.5.3 (<a
-href="https://redirect.github.com/actions/setup-python/issues/1046">#1046</a>)</li>
-<li><a
-href="https://github.com/actions/setup-python/commit/5d95bc16d4bc83bb56202da9630d84c6f8a2d8f5"><code>5d95bc1</code></a>
-Bump semver and <code>@types/semver</code> (<a
-href="https://redirect.github.com/actions/setup-python/issues/1091">#1091</a>)</li>
-<li><a
-href="https://github.com/actions/setup-python/commit/6ed2c67c8abe7646815dbd50364eea862d396fd9"><code>6ed2c67</code></a>
-Fix for Candidate Not Iterable Error (<a
-href="https://redirect.github.com/actions/setup-python/issues/1082">#1082</a>)</li>
-<li><a
-href="https://github.com/actions/setup-python/commit/e348410e00f449ece8581cb8e88be8f0e7712da6"><code>e348410</code></a>
-Remove Ubuntu 20.04 from workflows due to deprecation from 2025-04-15
-(<a
-href="https://redirect.github.com/actions/setup-python/issues/1065">#1065</a>)</li>
-<li>See full diff in <a
-href="https://github.com/actions/setup-python/compare/8d9ed9ac5c53483de85588cdf95a591a75ab9f55...a26af69be951a213d495a4c3e4e4022e16d87065">compare
-view</a></li>
-</ul>
-</details>
-<br />
-
-
-[](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores)
-
-Dependabot will resolve any conflicts with this PR as long as you don't
-alter it yourself. You can also trigger a rebase manually by commenting
-`@dependabot rebase`.
-
-[//]: # (dependabot-automerge-start)
-[//]: # (dependabot-automerge-end)
-
----
-
-<details>
-<summary>Dependabot commands and options</summary>
-<br />
-
-You can trigger Dependabot actions by commenting on this PR:
-- `@dependabot rebase` will rebase this PR
-- `@dependabot recreate` will recreate this PR, overwriting any edits
-that have been made to it
-- `@dependabot merge` will merge this PR after your CI passes on it
-- `@dependabot squash and merge` will squash and merge this PR after
-your CI passes on it
-- `@dependabot cancel merge` will cancel a previously requested merge
-and block automerging
-- `@dependabot reopen` will reopen this PR if it is closed
-- `@dependabot close` will close this PR and stop Dependabot recreating
-it. You can achieve the same result by closing it manually
-- `@dependabot show <dependency name> ignore conditions` will show all
-of the ignore conditions of the specified dependency
-- `@dependabot ignore this major version` will close this PR and stop
-Dependabot creating any more for this major version (unless you reopen
-the PR or upgrade to it yourself)
-- `@dependabot ignore this minor version` will close this PR and stop
-Dependabot creating any more for this minor version (unless you reopen
-the PR or upgrade to it yourself)
-- `@dependabot ignore this dependency` will close this PR and stop
-Dependabot creating any more for this dependency (unless you reopen the
-PR or upgrade to it yourself)
-
-
-</details>
-
-Signed-off-by: dependabot[bot] <support@github.com>
-Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
----
- .github/workflows/docs-pr.yaml | 2 +-
- .github/workflows/docs.yaml | 2 +-
- .github/workflows/latest_deps.yml | 2 +-
- .github/workflows/poetry_lockfile.yaml | 2 +-
- .github/workflows/release-artifacts.yml | 8 ++++----
- .github/workflows/tests.yml | 12 ++++++------
- 6 files changed, 14 insertions(+), 14 deletions(-)
-
-diff --git a/.github/workflows/docs-pr.yaml b/.github/workflows/docs-pr.yaml
-index 616ef0f9cf..1f4f79598a 100644
---- a/.github/workflows/docs-pr.yaml
-+++ b/.github/workflows/docs-pr.yaml
-@@ -24,7 +24,7 @@ jobs:
- mdbook-version: '0.4.17'
-
- - name: Setup python
-- uses: actions/setup-python@8d9ed9ac5c53483de85588cdf95a591a75ab9f55 # v5.5.0
-+ uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # v5.6.0
- with:
- python-version: "3.x"
-
-diff --git a/.github/workflows/docs.yaml b/.github/workflows/docs.yaml
-index 05ae608d06..930c71a8b4 100644
---- a/.github/workflows/docs.yaml
-+++ b/.github/workflows/docs.yaml
-@@ -64,7 +64,7 @@ jobs:
- run: echo 'window.SYNAPSE_VERSION = "${{ needs.pre.outputs.branch-version }}";' > ./docs/website_files/version.js
-
- - name: Setup python
-- uses: actions/setup-python@8d9ed9ac5c53483de85588cdf95a591a75ab9f55 # v5.5.0
-+ uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # v5.6.0
- with:
- python-version: "3.x"
-
-diff --git a/.github/workflows/latest_deps.yml b/.github/workflows/latest_deps.yml
-index 366bb4cddb..ee0dac3beb 100644
---- a/.github/workflows/latest_deps.yml
-+++ b/.github/workflows/latest_deps.yml
-@@ -86,7 +86,7 @@ jobs:
- -e POSTGRES_PASSWORD=postgres \
- -e POSTGRES_INITDB_ARGS="--lc-collate C --lc-ctype C --encoding UTF8" \
- postgres:${{ matrix.postgres-version }}
-- - uses: actions/setup-python@8d9ed9ac5c53483de85588cdf95a591a75ab9f55 # v5.5.0
-+ - uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # v5.6.0
- with:
- python-version: "3.x"
- - run: pip install .[all,test]
-diff --git a/.github/workflows/poetry_lockfile.yaml b/.github/workflows/poetry_lockfile.yaml
-index 31b9147e98..1668ad81d2 100644
---- a/.github/workflows/poetry_lockfile.yaml
-+++ b/.github/workflows/poetry_lockfile.yaml
-@@ -17,7 +17,7 @@ jobs:
- runs-on: ubuntu-latest
- steps:
- - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
-- - uses: actions/setup-python@8d9ed9ac5c53483de85588cdf95a591a75ab9f55 # v5.5.0
-+ - uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # v5.6.0
- with:
- python-version: '3.x'
- - run: pip install tomli
-diff --git a/.github/workflows/release-artifacts.yml b/.github/workflows/release-artifacts.yml
-index e03c9d2bd5..572d73e6ad 100644
---- a/.github/workflows/release-artifacts.yml
-+++ b/.github/workflows/release-artifacts.yml
-@@ -28,7 +28,7 @@ jobs:
- runs-on: ubuntu-latest
- steps:
- - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
-- - uses: actions/setup-python@8d9ed9ac5c53483de85588cdf95a591a75ab9f55 # v5.5.0
-+ - uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # v5.6.0
- with:
- python-version: '3.x'
- - id: set-distros
-@@ -74,7 +74,7 @@ jobs:
- ${{ runner.os }}-buildx-
-
- - name: Set up python
-- uses: actions/setup-python@8d9ed9ac5c53483de85588cdf95a591a75ab9f55 # v5.5.0
-+ uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # v5.6.0
- with:
- python-version: '3.x'
-
-@@ -132,7 +132,7 @@ jobs:
- steps:
- - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
-
-- - uses: actions/setup-python@8d9ed9ac5c53483de85588cdf95a591a75ab9f55 # v5.5.0
-+ - uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # v5.6.0
- with:
- # setup-python@v4 doesn't impose a default python version. Need to use 3.x
- # here, because `python` on osx points to Python 2.7.
-@@ -177,7 +177,7 @@ jobs:
-
- steps:
- - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
-- - uses: actions/setup-python@8d9ed9ac5c53483de85588cdf95a591a75ab9f55 # v5.5.0
-+ - uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # v5.6.0
- with:
- python-version: '3.10'
-
-diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml
-index a7e35a0ece..848240f68e 100644
---- a/.github/workflows/tests.yml
-+++ b/.github/workflows/tests.yml
-@@ -102,7 +102,7 @@ jobs:
-
- steps:
- - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
-- - uses: actions/setup-python@8d9ed9ac5c53483de85588cdf95a591a75ab9f55 # v5.5.0
-+ - uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # v5.6.0
- with:
- python-version: "3.x"
- - run: "pip install 'click==8.1.1' 'GitPython>=3.1.20'"
-@@ -112,7 +112,7 @@ jobs:
- runs-on: ubuntu-latest
- steps:
- - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
-- - uses: actions/setup-python@8d9ed9ac5c53483de85588cdf95a591a75ab9f55 # v5.5.0
-+ - uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # v5.6.0
- with:
- python-version: "3.x"
- - run: .ci/scripts/check_lockfile.py
-@@ -192,7 +192,7 @@ jobs:
- with:
- ref: ${{ github.event.pull_request.head.sha }}
- fetch-depth: 0
-- - uses: actions/setup-python@8d9ed9ac5c53483de85588cdf95a591a75ab9f55 # v5.5.0
-+ - uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # v5.6.0
- with:
- python-version: "3.x"
- - run: "pip install 'towncrier>=18.6.0rc1'"
-@@ -279,7 +279,7 @@ jobs:
- if: ${{ needs.changes.outputs.linting_readme == 'true' }}
- steps:
- - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
-- - uses: actions/setup-python@8d9ed9ac5c53483de85588cdf95a591a75ab9f55 # v5.5.0
-+ - uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # v5.6.0
- with:
- python-version: "3.x"
- - run: "pip install rstcheck"
-@@ -327,7 +327,7 @@ jobs:
- runs-on: ubuntu-latest
- steps:
- - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
-- - uses: actions/setup-python@8d9ed9ac5c53483de85588cdf95a591a75ab9f55 # v5.5.0
-+ - uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # v5.6.0
- with:
- python-version: "3.x"
- - id: get-matrix
-@@ -414,7 +414,7 @@ jobs:
- sudo apt-get -qq install build-essential libffi-dev python3-dev \
- libxml2-dev libxslt-dev xmlsec1 zlib1g-dev libjpeg-dev libwebp-dev
-
-- - uses: actions/setup-python@8d9ed9ac5c53483de85588cdf95a591a75ab9f55 # v5.5.0
-+ - uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # v5.6.0
- with:
- python-version: '3.9'
-
---
-2.49.0
-
diff --git a/packages/overlays/matrix-synapse/patches/0012-Bump-docker-build-push-action-from-6.15.0-to-6.16.0-.patch b/packages/overlays/matrix-synapse/patches/0012-Bump-docker-build-push-action-from-6.15.0-to-6.16.0-.patch
deleted file mode 100644
index 6b1a48c..0000000
--- a/packages/overlays/matrix-synapse/patches/0012-Bump-docker-build-push-action-from-6.15.0-to-6.16.0-.patch
+++ /dev/null
@@ -1,150 +0,0 @@
-From 7d4c3b64e34571f3ace10fa7e33d07853bf16d67 Mon Sep 17 00:00:00 2001
-From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com>
-Date: Mon, 19 May 2025 09:51:52 +0100
-Subject: [PATCH 12/34] Bump docker/build-push-action from 6.15.0 to 6.16.0
- (#18397)
-MIME-Version: 1.0
-Content-Type: text/plain; charset=UTF-8
-Content-Transfer-Encoding: 8bit
-
-Bumps
-[docker/build-push-action](https://github.com/docker/build-push-action)
-from 6.15.0 to 6.16.0.
-<details>
-<summary>Release notes</summary>
-<p><em>Sourced from <a
-href="https://github.com/docker/build-push-action/releases">docker/build-push-action's
-releases</a>.</em></p>
-<blockquote>
-<h2>v6.16.0</h2>
-<ul>
-<li>Handle no default attestations env var by <a
-href="https://github.com/crazy-max"><code>@crazy-max</code></a> in <a
-href="https://redirect.github.com/docker/build-push-action/pull/1343">docker/build-push-action#1343</a></li>
-<li>Only print secret keys in build summary output by <a
-href="https://github.com/crazy-max"><code>@crazy-max</code></a> in <a
-href="https://redirect.github.com/docker/build-push-action/pull/1353">docker/build-push-action#1353</a></li>
-<li>Bump <code>@docker/actions-toolkit</code> from 0.56.0 to 0.59.0 in
-<a
-href="https://redirect.github.com/docker/build-push-action/pull/1352">docker/build-push-action#1352</a></li>
-</ul>
-<p><strong>Full Changelog</strong>: <a
-href="https://github.com/docker/build-push-action/compare/v6.15.0...v6.16.0">https://github.com/docker/build-push-action/compare/v6.15.0...v6.16.0</a></p>
-</blockquote>
-</details>
-<details>
-<summary>Commits</summary>
-<ul>
-<li><a
-href="https://github.com/docker/build-push-action/commit/14487ce63c7a62a4a324b0bfb37086795e31c6c1"><code>14487ce</code></a>
-Merge pull request <a
-href="https://redirect.github.com/docker/build-push-action/issues/1343">#1343</a>
-from crazy-max/fix-no-default-attest</li>
-<li><a
-href="https://github.com/docker/build-push-action/commit/0ec91264d895acf7dfe05d54d8a3cc28f95b6346"><code>0ec9126</code></a>
-Merge pull request <a
-href="https://redirect.github.com/docker/build-push-action/issues/1366">#1366</a>
-from crazy-max/pr-assign-author</li>
-<li><a
-href="https://github.com/docker/build-push-action/commit/b749522b90af1b517f52d8c1e67b2a965cea5eae"><code>b749522</code></a>
-pr-assign-author workflow</li>
-<li><a
-href="https://github.com/docker/build-push-action/commit/c566248492c912e39910ac79e2f05a82260233a8"><code>c566248</code></a>
-Merge pull request <a
-href="https://redirect.github.com/docker/build-push-action/issues/1363">#1363</a>
-from crazy-max/fix-codecov</li>
-<li><a
-href="https://github.com/docker/build-push-action/commit/13275dd76e44afdffdd61da8b8ae8e26ee11671f"><code>13275dd</code></a>
-ci: fix missing source for codecov</li>
-<li><a
-href="https://github.com/docker/build-push-action/commit/67dc78bbaf388b3265f7e1c880e681f4b90d5f48"><code>67dc78b</code></a>
-Merge pull request <a
-href="https://redirect.github.com/docker/build-push-action/issues/1361">#1361</a>
-from mschoettle/patch-1</li>
-<li><a
-href="https://github.com/docker/build-push-action/commit/0760504437ba8d0d98e7d5b625560bdede11b3b5"><code>0760504</code></a>
-docs: add validating build configuration example</li>
-<li><a
-href="https://github.com/docker/build-push-action/commit/1c198f4467ce458288d816cabd773cd574f16977"><code>1c198f4</code></a>
-chore: update generated content</li>
-<li><a
-href="https://github.com/docker/build-push-action/commit/288d9e2e4a70c24711ba959b94c2209b9205347e"><code>288d9e2</code></a>
-handle no default attestations env var</li>
-<li><a
-href="https://github.com/docker/build-push-action/commit/88844b95d8cbbb41035fa9c94e5967a33b92db78"><code>88844b9</code></a>
-Merge pull request <a
-href="https://redirect.github.com/docker/build-push-action/issues/1353">#1353</a>
-from crazy-max/summary-secret-keys</li>
-<li>Additional commits viewable in <a
-href="https://github.com/docker/build-push-action/compare/471d1dc4e07e5cdedd4c2171150001c434f0b7a4...14487ce63c7a62a4a324b0bfb37086795e31c6c1">compare
-view</a></li>
-</ul>
-</details>
-<br />
-
-
-[](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores)
-
-Dependabot will resolve any conflicts with this PR as long as you don't
-alter it yourself. You can also trigger a rebase manually by commenting
-`@dependabot rebase`.
-
-[//]: # (dependabot-automerge-start)
-[//]: # (dependabot-automerge-end)
-
----
-
-<details>
-<summary>Dependabot commands and options</summary>
-<br />
-
-You can trigger Dependabot actions by commenting on this PR:
-- `@dependabot rebase` will rebase this PR
-- `@dependabot recreate` will recreate this PR, overwriting any edits
-that have been made to it
-- `@dependabot merge` will merge this PR after your CI passes on it
-- `@dependabot squash and merge` will squash and merge this PR after
-your CI passes on it
-- `@dependabot cancel merge` will cancel a previously requested merge
-and block automerging
-- `@dependabot reopen` will reopen this PR if it is closed
-- `@dependabot close` will close this PR and stop Dependabot recreating
-it. You can achieve the same result by closing it manually
-- `@dependabot show <dependency name> ignore conditions` will show all
-of the ignore conditions of the specified dependency
-- `@dependabot ignore this major version` will close this PR and stop
-Dependabot creating any more for this major version (unless you reopen
-the PR or upgrade to it yourself)
-- `@dependabot ignore this minor version` will close this PR and stop
-Dependabot creating any more for this minor version (unless you reopen
-the PR or upgrade to it yourself)
-- `@dependabot ignore this dependency` will close this PR and stop
-Dependabot creating any more for this dependency (unless you reopen the
-PR or upgrade to it yourself)
-
-
-</details>
-
-Signed-off-by: dependabot[bot] <support@github.com>
-Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
----
- .github/workflows/docker.yml | 2 +-
- 1 file changed, 1 insertion(+), 1 deletion(-)
-
-diff --git a/.github/workflows/docker.yml b/.github/workflows/docker.yml
-index c617753c7a..009089db3a 100644
---- a/.github/workflows/docker.yml
-+++ b/.github/workflows/docker.yml
-@@ -72,7 +72,7 @@ jobs:
-
- - name: Build and push all platforms
- id: build-and-push
-- uses: docker/build-push-action@471d1dc4e07e5cdedd4c2171150001c434f0b7a4 # v6.15.0
-+ uses: docker/build-push-action@14487ce63c7a62a4a324b0bfb37086795e31c6c1 # v6.16.0
- with:
- push: true
- labels: |
---
-2.49.0
-
diff --git a/packages/overlays/matrix-synapse/patches/0013-Check-for-CREATE-DROP-INDEX-in-schema-deltas-18440.patch b/packages/overlays/matrix-synapse/patches/0013-Check-for-CREATE-DROP-INDEX-in-schema-deltas-18440.patch
deleted file mode 100644
index ce18e13..0000000
--- a/packages/overlays/matrix-synapse/patches/0013-Check-for-CREATE-DROP-INDEX-in-schema-deltas-18440.patch
+++ /dev/null
@@ -1,235 +0,0 @@
-From fa4a00a2da753a52dde582c0f56e3ea6567bd53b Mon Sep 17 00:00:00 2001
-From: Erik Johnston <erikj@element.io>
-Date: Mon, 19 May 2025 11:52:05 +0100
-Subject: [PATCH 13/34] Check for `CREATE/DROP INDEX` in schema deltas (#18440)
-
-As these should be background updates.
----
- changelog.d/18440.misc | 1 +
- scripts-dev/check_schema_delta.py | 127 +++++++++++++++++++++++-------
- 2 files changed, 99 insertions(+), 29 deletions(-)
- create mode 100644 changelog.d/18440.misc
-
-diff --git a/changelog.d/18440.misc b/changelog.d/18440.misc
-new file mode 100644
-index 0000000000..6aaa6dde5c
---- /dev/null
-+++ b/changelog.d/18440.misc
-@@ -0,0 +1 @@
-+Add lint to ensure we don't add a `CREATE/DROP INDEX` in a schema delta.
-diff --git a/scripts-dev/check_schema_delta.py b/scripts-dev/check_schema_delta.py
-index 467be96fdf..454784c3ae 100755
---- a/scripts-dev/check_schema_delta.py
-+++ b/scripts-dev/check_schema_delta.py
-@@ -1,6 +1,8 @@
- #!/usr/bin/env python3
-
- # Check that no schema deltas have been added to the wrong version.
-+#
-+# Also checks that schema deltas do not try and create or drop indices.
-
- import re
- from typing import Any, Dict, List
-@@ -9,6 +11,13 @@ import click
- import git
-
- SCHEMA_FILE_REGEX = re.compile(r"^synapse/storage/schema/(.*)/delta/(.*)/(.*)$")
-+INDEX_CREATION_REGEX = re.compile(r"CREATE .*INDEX .*ON ([a-z_]+)", flags=re.IGNORECASE)
-+INDEX_DELETION_REGEX = re.compile(r"DROP .*INDEX ([a-z_]+)", flags=re.IGNORECASE)
-+TABLE_CREATION_REGEX = re.compile(r"CREATE .*TABLE ([a-z_]+)", flags=re.IGNORECASE)
-+
-+# The base branch we want to check against. We use the main development branch
-+# on the assumption that is what we are developing against.
-+DEVELOP_BRANCH = "develop"
-
-
- @click.command()
-@@ -20,6 +29,9 @@ SCHEMA_FILE_REGEX = re.compile(r"^synapse/storage/schema/(.*)/delta/(.*)/(.*)$")
- help="Always output ANSI colours",
- )
- def main(force_colors: bool) -> None:
-+ # Return code. Set to non-zero when we encounter an error
-+ return_code = 0
-+
- click.secho(
- "+++ Checking schema deltas are in the right folder",
- fg="green",
-@@ -30,17 +42,17 @@ def main(force_colors: bool) -> None:
- click.secho("Updating repo...")
-
- repo = git.Repo()
-- repo.remote().fetch()
-+ repo.remote().fetch(refspec=DEVELOP_BRANCH)
-
- click.secho("Getting current schema version...")
-
-- r = repo.git.show("origin/develop:synapse/storage/schema/__init__.py")
-+ r = repo.git.show(f"origin/{DEVELOP_BRANCH}:synapse/storage/schema/__init__.py")
-
- locals: Dict[str, Any] = {}
- exec(r, locals)
- current_schema_version = locals["SCHEMA_VERSION"]
-
-- diffs: List[git.Diff] = repo.remote().refs.develop.commit.diff(None)
-+ diffs: List[git.Diff] = repo.remote().refs[DEVELOP_BRANCH].commit.diff(None)
-
- # Get the schema version of the local file to check against current schema on develop
- with open("synapse/storage/schema/__init__.py") as file:
-@@ -53,7 +65,7 @@ def main(force_colors: bool) -> None:
- # local schema version must be +/-1 the current schema version on develop
- if abs(local_schema_version - current_schema_version) != 1:
- click.secho(
-- "The proposed schema version has diverged more than one version from develop, please fix!",
-+ f"The proposed schema version has diverged more than one version from {DEVELOP_BRANCH}, please fix!",
- fg="red",
- bold=True,
- color=force_colors,
-@@ -67,21 +79,28 @@ def main(force_colors: bool) -> None:
- click.secho(f"Current schema version: {current_schema_version}")
-
- seen_deltas = False
-- bad_files = []
-+ bad_delta_files = []
-+ changed_delta_files = []
- for diff in diffs:
-- if not diff.new_file or diff.b_path is None:
-+ if diff.b_path is None:
-+ # We don't lint deleted files.
- continue
-
- match = SCHEMA_FILE_REGEX.match(diff.b_path)
- if not match:
- continue
-
-+ changed_delta_files.append(diff.b_path)
-+
-+ if not diff.new_file:
-+ continue
-+
- seen_deltas = True
-
- _, delta_version, _ = match.groups()
-
- if delta_version != str(current_schema_version):
-- bad_files.append(diff.b_path)
-+ bad_delta_files.append(diff.b_path)
-
- if not seen_deltas:
- click.secho(
-@@ -92,41 +111,91 @@ def main(force_colors: bool) -> None:
- )
- return
-
-- if not bad_files:
-+ if bad_delta_files:
-+ bad_delta_files.sort()
-+
- click.secho(
-- f"All deltas are in the correct folder: {current_schema_version}!",
-- fg="green",
-+ "Found deltas in the wrong folder!",
-+ fg="red",
- bold=True,
- color=force_colors,
- )
-- return
-
-- bad_files.sort()
--
-- click.secho(
-- "Found deltas in the wrong folder!",
-- fg="red",
-- bold=True,
-- color=force_colors,
-- )
-+ for f in bad_delta_files:
-+ click.secho(
-+ f"\t{f}",
-+ fg="red",
-+ bold=True,
-+ color=force_colors,
-+ )
-
-- for f in bad_files:
-+ click.secho()
- click.secho(
-- f"\t{f}",
-+ f"Please move these files to delta/{current_schema_version}/",
- fg="red",
- bold=True,
- color=force_colors,
- )
-
-- click.secho()
-- click.secho(
-- f"Please move these files to delta/{current_schema_version}/",
-- fg="red",
-- bold=True,
-- color=force_colors,
-- )
-+ else:
-+ click.secho(
-+ f"All deltas are in the correct folder: {current_schema_version}!",
-+ fg="green",
-+ bold=True,
-+ color=force_colors,
-+ )
-
-- click.get_current_context().exit(1)
-+ # Make sure we process them in order. This sort works because deltas are numbered
-+ # and delta files are also numbered in order.
-+ changed_delta_files.sort()
-+
-+ # Now check that we're not trying to create or drop indices. If we want to
-+ # do that they should be in background updates. The exception is when we
-+ # create indices on tables we've just created.
-+ created_tables = set()
-+ for delta_file in changed_delta_files:
-+ with open(delta_file) as fd:
-+ delta_lines = fd.readlines()
-+
-+ for line in delta_lines:
-+ # Strip SQL comments
-+ line = line.split("--", maxsplit=1)[0]
-+
-+ # Check and track any tables we create
-+ match = TABLE_CREATION_REGEX.search(line)
-+ if match:
-+ table_name = match.group(1)
-+ created_tables.add(table_name)
-+
-+ # Check for dropping indices, these are always banned
-+ match = INDEX_DELETION_REGEX.search(line)
-+ if match:
-+ clause = match.group()
-+
-+ click.secho(
-+ f"Found delta with index deletion: '{clause}' in {delta_file}\nThese should be in background updates.",
-+ fg="red",
-+ bold=True,
-+ color=force_colors,
-+ )
-+ return_code = 1
-+
-+ # Check for index creation, which is only allowed for tables we've
-+ # created.
-+ match = INDEX_CREATION_REGEX.search(line)
-+ if match:
-+ clause = match.group()
-+ table_name = match.group(1)
-+ if table_name not in created_tables:
-+ click.secho(
-+ f"Found delta with index creation: '{clause}' in {delta_file}\nThese should be in background updates.",
-+ fg="red",
-+ bold=True,
-+ color=force_colors,
-+ )
-+ return_code = 1
-+
-+ click.get_current_context().exit(return_code)
-
-
- if __name__ == "__main__":
---
-2.49.0
-
diff --git a/packages/overlays/matrix-synapse/patches/0014-Bump-pyo3-log-from-0.12.3-to-0.12.4-18453.patch b/packages/overlays/matrix-synapse/patches/0014-Bump-pyo3-log-from-0.12.3-to-0.12.4-18453.patch
deleted file mode 100644
index 57dcac6..0000000
--- a/packages/overlays/matrix-synapse/patches/0014-Bump-pyo3-log-from-0.12.3-to-0.12.4-18453.patch
+++ /dev/null
@@ -1,29 +0,0 @@
-From b3b24c69fcbdb67de04b0388aa104d43780ba88f Mon Sep 17 00:00:00 2001
-From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com>
-Date: Mon, 19 May 2025 13:04:15 +0100
-Subject: [PATCH 14/34] Bump pyo3-log from 0.12.3 to 0.12.4 (#18453)
-
-Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
----
- Cargo.lock | 4 ++--
- 1 file changed, 2 insertions(+), 2 deletions(-)
-
-diff --git a/Cargo.lock b/Cargo.lock
-index 27a2e26be5..13156e67b5 100644
---- a/Cargo.lock
-+++ b/Cargo.lock
-@@ -316,9 +316,9 @@ dependencies = [
-
- [[package]]
- name = "pyo3-log"
--version = "0.12.3"
-+version = "0.12.4"
- source = "registry+https://github.com/rust-lang/crates.io-index"
--checksum = "7079e412e909af5d6be7c04a7f29f6a2837a080410e1c529c9dee2c367383db4"
-+checksum = "45192e5e4a4d2505587e27806c7b710c231c40c56f3bfc19535d0bb25df52264"
- dependencies = [
- "arc-swap",
- "log",
---
-2.49.0
-
diff --git a/packages/overlays/matrix-synapse/patches/0015-Bump-authlib-from-1.5.1-to-1.5.2-18452.patch b/packages/overlays/matrix-synapse/patches/0015-Bump-authlib-from-1.5.1-to-1.5.2-18452.patch
deleted file mode 100644
index a0dda80..0000000
--- a/packages/overlays/matrix-synapse/patches/0015-Bump-authlib-from-1.5.1-to-1.5.2-18452.patch
+++ /dev/null
@@ -1,249 +0,0 @@
-From cd1a3ac584d9a353e24e42354ae71028654f7f61 Mon Sep 17 00:00:00 2001
-From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com>
-Date: Mon, 19 May 2025 13:06:11 +0100
-Subject: [PATCH 15/34] Bump authlib from 1.5.1 to 1.5.2 (#18452)
-
-Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
----
- poetry.lock | 55 +++++++++++++++++++++++++++--------------------------
- 1 file changed, 28 insertions(+), 27 deletions(-)
-
-diff --git a/poetry.lock b/poetry.lock
-index 7190d0f788..cf3ca18611 100644
---- a/poetry.lock
-+++ b/poetry.lock
-@@ -1,4 +1,4 @@
--# This file is automatically @generated by Poetry 2.1.3 and should not be changed by hand.
-+# This file is automatically @generated by Poetry 2.1.1 and should not be changed by hand.
-
- [[package]]
- name = "annotated-types"
-@@ -34,15 +34,15 @@ tests-mypy = ["mypy (>=1.11.1) ; platform_python_implementation == \"CPython\" a
-
- [[package]]
- name = "authlib"
--version = "1.5.1"
-+version = "1.5.2"
- description = "The ultimate Python library in building OAuth and OpenID Connect servers and clients."
- optional = true
- python-versions = ">=3.9"
- groups = ["main"]
--markers = "extra == \"oidc\" or extra == \"jwt\" or extra == \"all\""
-+markers = "extra == \"all\" or extra == \"jwt\" or extra == \"oidc\""
- files = [
-- {file = "authlib-1.5.1-py2.py3-none-any.whl", hash = "sha256:8408861cbd9b4ea2ff759b00b6f02fd7d81ac5a56d0b2b22c08606c6049aae11"},
-- {file = "authlib-1.5.1.tar.gz", hash = "sha256:5cbc85ecb0667312c1cdc2f9095680bb735883b123fb509fde1e65b1c5df972e"},
-+ {file = "authlib-1.5.2-py2.py3-none-any.whl", hash = "sha256:8804dd4402ac5e4a0435ac49e0b6e19e395357cfa632a3f624dcb4f6df13b4b1"},
-+ {file = "authlib-1.5.2.tar.gz", hash = "sha256:fe85ec7e50c5f86f1e2603518bb3b4f632985eb4a355e52256530790e326c512"},
- ]
-
- [package.dependencies]
-@@ -451,7 +451,7 @@ description = "XML bomb protection for Python stdlib modules"
- optional = true
- python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*"
- groups = ["main"]
--markers = "extra == \"saml2\" or extra == \"all\""
-+markers = "extra == \"all\" or extra == \"saml2\""
- files = [
- {file = "defusedxml-0.7.1-py2.py3-none-any.whl", hash = "sha256:a352e7e428770286cc899e2542b6cdaedb2b4953ff269a210103ec58f6198a61"},
- {file = "defusedxml-0.7.1.tar.gz", hash = "sha256:1bb3032db185915b62d7c6209c5a8792be6a32ab2fedacc84e01b52c51aa3e69"},
-@@ -494,7 +494,7 @@ description = "XPath 1.0/2.0/3.0/3.1 parsers and selectors for ElementTree and l
- optional = true
- python-versions = ">=3.7"
- groups = ["main"]
--markers = "extra == \"saml2\" or extra == \"all\""
-+markers = "extra == \"all\" or extra == \"saml2\""
- files = [
- {file = "elementpath-4.1.5-py3-none-any.whl", hash = "sha256:2ac1a2fb31eb22bbbf817f8cf6752f844513216263f0e3892c8e79782fe4bb55"},
- {file = "elementpath-4.1.5.tar.gz", hash = "sha256:c2d6dc524b29ef751ecfc416b0627668119d8812441c555d7471da41d4bacb8d"},
-@@ -544,7 +544,7 @@ description = "Python wrapper for hiredis"
- optional = true
- python-versions = ">=3.8"
- groups = ["main"]
--markers = "extra == \"redis\" or extra == \"all\""
-+markers = "extra == \"all\" or extra == \"redis\""
- files = [
- {file = "hiredis-3.1.0-cp310-cp310-macosx_10_15_universal2.whl", hash = "sha256:2892db9db21f0cf7cc298d09f85d3e1f6dc4c4c24463ab67f79bc7a006d51867"},
- {file = "hiredis-3.1.0-cp310-cp310-macosx_10_15_x86_64.whl", hash = "sha256:93cfa6cc25ee2ceb0be81dc61eca9995160b9e16bdb7cca4a00607d57e998918"},
-@@ -890,7 +890,7 @@ description = "Jaeger Python OpenTracing Tracer implementation"
- optional = true
- python-versions = ">=3.7"
- groups = ["main"]
--markers = "extra == \"opentracing\" or extra == \"all\""
-+markers = "extra == \"all\" or extra == \"opentracing\""
- files = [
- {file = "jaeger-client-4.8.0.tar.gz", hash = "sha256:3157836edab8e2c209bd2d6ae61113db36f7ee399e66b1dcbb715d87ab49bfe0"},
- ]
-@@ -1028,7 +1028,7 @@ description = "A strictly RFC 4510 conforming LDAP V3 pure Python client library
- optional = true
- python-versions = "*"
- groups = ["main"]
--markers = "extra == \"matrix-synapse-ldap3\" or extra == \"all\""
-+markers = "extra == \"all\" or extra == \"matrix-synapse-ldap3\""
- files = [
- {file = "ldap3-2.9.1-py2.py3-none-any.whl", hash = "sha256:5869596fc4948797020d3f03b7939da938778a0f9e2009f7a072ccf92b8e8d70"},
- {file = "ldap3-2.9.1.tar.gz", hash = "sha256:f3e7fc4718e3f09dda568b57100095e0ce58633bcabbed8667ce3f8fbaa4229f"},
-@@ -1044,7 +1044,7 @@ description = "Powerful and Pythonic XML processing library combining libxml2/li
- optional = true
- python-versions = ">=3.6"
- groups = ["main"]
--markers = "extra == \"url-preview\" or extra == \"all\""
-+markers = "extra == \"all\" or extra == \"url-preview\""
- files = [
- {file = "lxml-5.3.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:dd36439be765e2dde7660212b5275641edbc813e7b24668831a5c8ac91180656"},
- {file = "lxml-5.3.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:ae5fe5c4b525aa82b8076c1a59d642c17b6e8739ecf852522c6321852178119d"},
-@@ -1330,7 +1330,7 @@ description = "An LDAP3 auth provider for Synapse"
- optional = true
- python-versions = ">=3.7"
- groups = ["main"]
--markers = "extra == \"matrix-synapse-ldap3\" or extra == \"all\""
-+markers = "extra == \"all\" or extra == \"matrix-synapse-ldap3\""
- files = [
- {file = "matrix-synapse-ldap3-0.3.0.tar.gz", hash = "sha256:8bb6517173164d4b9cc44f49de411d8cebdb2e705d5dd1ea1f38733c4a009e1d"},
- {file = "matrix_synapse_ldap3-0.3.0-py3-none-any.whl", hash = "sha256:8b4d701f8702551e98cc1d8c20dbed532de5613584c08d0df22de376ba99159d"},
-@@ -1551,7 +1551,7 @@ description = "OpenTracing API for Python. See documentation at http://opentraci
- optional = true
- python-versions = "*"
- groups = ["main"]
--markers = "extra == \"opentracing\" or extra == \"all\""
-+markers = "extra == \"all\" or extra == \"opentracing\""
- files = [
- {file = "opentracing-2.4.0.tar.gz", hash = "sha256:a173117e6ef580d55874734d1fa7ecb6f3655160b8b8974a2a1e98e5ec9c840d"},
- ]
-@@ -1720,7 +1720,7 @@ description = "psycopg2 - Python-PostgreSQL Database Adapter"
- optional = true
- python-versions = ">=3.8"
- groups = ["main"]
--markers = "extra == \"postgres\" or extra == \"all\""
-+markers = "extra == \"all\" or extra == \"postgres\""
- files = [
- {file = "psycopg2-2.9.10-cp310-cp310-win32.whl", hash = "sha256:5df2b672140f95adb453af93a7d669d7a7bf0a56bcd26f1502329166f4a61716"},
- {file = "psycopg2-2.9.10-cp310-cp310-win_amd64.whl", hash = "sha256:c6f7b8561225f9e711a9c47087388a97fdc948211c10a4bccbf0ba68ab7b3b5a"},
-@@ -1728,6 +1728,7 @@ files = [
- {file = "psycopg2-2.9.10-cp311-cp311-win_amd64.whl", hash = "sha256:0435034157049f6846e95103bd8f5a668788dd913a7c30162ca9503fdf542cb4"},
- {file = "psycopg2-2.9.10-cp312-cp312-win32.whl", hash = "sha256:65a63d7ab0e067e2cdb3cf266de39663203d38d6a8ed97f5ca0cb315c73fe067"},
- {file = "psycopg2-2.9.10-cp312-cp312-win_amd64.whl", hash = "sha256:4a579d6243da40a7b3182e0430493dbd55950c493d8c68f4eec0b302f6bbf20e"},
-+ {file = "psycopg2-2.9.10-cp313-cp313-win_amd64.whl", hash = "sha256:91fd603a2155da8d0cfcdbf8ab24a2d54bca72795b90d2a3ed2b6da8d979dee2"},
- {file = "psycopg2-2.9.10-cp39-cp39-win32.whl", hash = "sha256:9d5b3b94b79a844a986d029eee38998232451119ad653aea42bb9220a8c5066b"},
- {file = "psycopg2-2.9.10-cp39-cp39-win_amd64.whl", hash = "sha256:88138c8dedcbfa96408023ea2b0c369eda40fe5d75002c0964c78f46f11fa442"},
- {file = "psycopg2-2.9.10.tar.gz", hash = "sha256:12ec0b40b0273f95296233e8750441339298e6a572f7039da5b260e3c8b60e11"},
-@@ -1740,7 +1741,7 @@ description = ".. image:: https://travis-ci.org/chtd/psycopg2cffi.svg?branch=mas
- optional = true
- python-versions = "*"
- groups = ["main"]
--markers = "platform_python_implementation == \"PyPy\" and (extra == \"postgres\" or extra == \"all\")"
-+markers = "platform_python_implementation == \"PyPy\" and (extra == \"all\" or extra == \"postgres\")"
- files = [
- {file = "psycopg2cffi-2.9.0.tar.gz", hash = "sha256:7e272edcd837de3a1d12b62185eb85c45a19feda9e62fa1b120c54f9e8d35c52"},
- ]
-@@ -1756,7 +1757,7 @@ description = "A Simple library to enable psycopg2 compatability"
- optional = true
- python-versions = "*"
- groups = ["main"]
--markers = "platform_python_implementation == \"PyPy\" and (extra == \"postgres\" or extra == \"all\")"
-+markers = "platform_python_implementation == \"PyPy\" and (extra == \"all\" or extra == \"postgres\")"
- files = [
- {file = "psycopg2cffi-compat-1.1.tar.gz", hash = "sha256:d25e921748475522b33d13420aad5c2831c743227dc1f1f2585e0fdb5c914e05"},
- ]
-@@ -1979,7 +1980,7 @@ description = "Python extension wrapping the ICU C++ API"
- optional = true
- python-versions = "*"
- groups = ["main"]
--markers = "extra == \"user-search\" or extra == \"all\""
-+markers = "extra == \"all\" or extra == \"user-search\""
- files = [
- {file = "PyICU-2.14.tar.gz", hash = "sha256:acc7eb92bd5c554ed577249c6978450a4feda0aa6f01470152b3a7b382a02132"},
- ]
-@@ -2028,7 +2029,7 @@ description = "A development tool to measure, monitor and analyze the memory beh
- optional = true
- python-versions = ">=3.6"
- groups = ["main"]
--markers = "extra == \"cache-memory\" or extra == \"all\""
-+markers = "extra == \"all\" or extra == \"cache-memory\""
- files = [
- {file = "Pympler-1.0.1-py3-none-any.whl", hash = "sha256:d260dda9ae781e1eab6ea15bacb84015849833ba5555f141d2d9b7b7473b307d"},
- {file = "Pympler-1.0.1.tar.gz", hash = "sha256:993f1a3599ca3f4fcd7160c7545ad06310c9e12f70174ae7ae8d4e25f6c5d3fa"},
-@@ -2088,7 +2089,7 @@ description = "Python implementation of SAML Version 2 Standard"
- optional = true
- python-versions = ">=3.9,<4.0"
- groups = ["main"]
--markers = "extra == \"saml2\" or extra == \"all\""
-+markers = "extra == \"all\" or extra == \"saml2\""
- files = [
- {file = "pysaml2-7.5.0-py3-none-any.whl", hash = "sha256:bc6627cc344476a83c757f440a73fda1369f13b6fda1b4e16bca63ffbabb5318"},
- {file = "pysaml2-7.5.0.tar.gz", hash = "sha256:f36871d4e5ee857c6b85532e942550d2cf90ea4ee943d75eb681044bbc4f54f7"},
-@@ -2113,7 +2114,7 @@ description = "Extensions to the standard Python datetime module"
- optional = true
- python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7"
- groups = ["main"]
--markers = "extra == \"saml2\" or extra == \"all\""
-+markers = "extra == \"all\" or extra == \"saml2\""
- files = [
- {file = "python-dateutil-2.8.2.tar.gz", hash = "sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86"},
- {file = "python_dateutil-2.8.2-py2.py3-none-any.whl", hash = "sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9"},
-@@ -2141,7 +2142,7 @@ description = "World timezone definitions, modern and historical"
- optional = true
- python-versions = "*"
- groups = ["main"]
--markers = "extra == \"saml2\" or extra == \"all\""
-+markers = "extra == \"all\" or extra == \"saml2\""
- files = [
- {file = "pytz-2022.7.1-py2.py3-none-any.whl", hash = "sha256:78f4f37d8198e0627c5f1143240bb0206b8691d8d7ac6d78fee88b78733f8c4a"},
- {file = "pytz-2022.7.1.tar.gz", hash = "sha256:01a0681c4b9684a28304615eba55d1ab31ae00bf68ec157ec3708a8182dbbcd0"},
-@@ -2505,7 +2506,7 @@ description = "Python client for Sentry (https://sentry.io)"
- optional = true
- python-versions = ">=3.6"
- groups = ["main"]
--markers = "extra == \"sentry\" or extra == \"all\""
-+markers = "extra == \"all\" or extra == \"sentry\""
- files = [
- {file = "sentry_sdk-2.22.0-py2.py3-none-any.whl", hash = "sha256:3d791d631a6c97aad4da7074081a57073126c69487560c6f8bffcf586461de66"},
- {file = "sentry_sdk-2.22.0.tar.gz", hash = "sha256:b4bf43bb38f547c84b2eadcefbe389b36ef75f3f38253d7a74d6b928c07ae944"},
-@@ -2689,7 +2690,7 @@ description = "Tornado IOLoop Backed Concurrent Futures"
- optional = true
- python-versions = "*"
- groups = ["main"]
--markers = "extra == \"opentracing\" or extra == \"all\""
-+markers = "extra == \"all\" or extra == \"opentracing\""
- files = [
- {file = "threadloop-1.0.2-py2-none-any.whl", hash = "sha256:5c90dbefab6ffbdba26afb4829d2a9df8275d13ac7dc58dccb0e279992679599"},
- {file = "threadloop-1.0.2.tar.gz", hash = "sha256:8b180aac31013de13c2ad5c834819771992d350267bddb854613ae77ef571944"},
-@@ -2705,7 +2706,7 @@ description = "Python bindings for the Apache Thrift RPC system"
- optional = true
- python-versions = "*"
- groups = ["main"]
--markers = "extra == \"opentracing\" or extra == \"all\""
-+markers = "extra == \"all\" or extra == \"opentracing\""
- files = [
- {file = "thrift-0.16.0.tar.gz", hash = "sha256:2b5b6488fcded21f9d312aa23c9ff6a0195d0f6ae26ddbd5ad9e3e25dfc14408"},
- ]
-@@ -2767,7 +2768,7 @@ description = "Tornado is a Python web framework and asynchronous networking lib
- optional = true
- python-versions = ">=3.8"
- groups = ["main"]
--markers = "extra == \"opentracing\" or extra == \"all\""
-+markers = "extra == \"all\" or extra == \"opentracing\""
- files = [
- {file = "tornado-6.4.2-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:e828cce1123e9e44ae2a50a9de3055497ab1d0aeb440c5ac23064d9e44880da1"},
- {file = "tornado-6.4.2-cp38-abi3-macosx_10_9_x86_64.whl", hash = "sha256:072ce12ada169c5b00b7d92a99ba089447ccc993ea2143c9ede887e0937aa803"},
-@@ -2901,7 +2902,7 @@ description = "non-blocking redis client for python"
- optional = true
- python-versions = "*"
- groups = ["main"]
--markers = "extra == \"redis\" or extra == \"all\""
-+markers = "extra == \"all\" or extra == \"redis\""
- files = [
- {file = "txredisapi-1.4.11-py3-none-any.whl", hash = "sha256:ac64d7a9342b58edca13ef267d4fa7637c1aa63f8595e066801c1e8b56b22d0b"},
- {file = "txredisapi-1.4.11.tar.gz", hash = "sha256:3eb1af99aefdefb59eb877b1dd08861efad60915e30ad5bf3d5bf6c5cedcdbc6"},
-@@ -3244,7 +3245,7 @@ description = "An XML Schema validator and decoder"
- optional = true
- python-versions = ">=3.7"
- groups = ["main"]
--markers = "extra == \"saml2\" or extra == \"all\""
-+markers = "extra == \"all\" or extra == \"saml2\""
- files = [
- {file = "xmlschema-2.4.0-py3-none-any.whl", hash = "sha256:dc87be0caaa61f42649899189aab2fd8e0d567f2cf548433ba7b79278d231a4a"},
- {file = "xmlschema-2.4.0.tar.gz", hash = "sha256:d74cd0c10866ac609e1ef94a5a69b018ad16e39077bc6393408b40c6babee793"},
---
-2.49.0
-
diff --git a/packages/overlays/matrix-synapse/patches/0016-Bump-pyopenssl-from-25.0.0-to-25.1.0-18450.patch b/packages/overlays/matrix-synapse/patches/0016-Bump-pyopenssl-from-25.0.0-to-25.1.0-18450.patch
deleted file mode 100644
index f78bb13..0000000
--- a/packages/overlays/matrix-synapse/patches/0016-Bump-pyopenssl-from-25.0.0-to-25.1.0-18450.patch
+++ /dev/null
@@ -1,40 +0,0 @@
-From afeb0e01c552216d0d987cd504aab440b07bdb10 Mon Sep 17 00:00:00 2001
-From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com>
-Date: Mon, 19 May 2025 13:06:45 +0100
-Subject: [PATCH 16/34] Bump pyopenssl from 25.0.0 to 25.1.0 (#18450)
-
-Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
----
- poetry.lock | 8 ++++----
- 1 file changed, 4 insertions(+), 4 deletions(-)
-
-diff --git a/poetry.lock b/poetry.lock
-index cf3ca18611..54ddad3bdd 100644
---- a/poetry.lock
-+++ b/poetry.lock
-@@ -2064,18 +2064,18 @@ tests = ["hypothesis (>=3.27.0)", "pytest (>=3.2.1,!=3.3.0)"]
-
- [[package]]
- name = "pyopenssl"
--version = "25.0.0"
-+version = "25.1.0"
- description = "Python wrapper module around the OpenSSL library"
- optional = false
- python-versions = ">=3.7"
- groups = ["main"]
- files = [
-- {file = "pyOpenSSL-25.0.0-py3-none-any.whl", hash = "sha256:424c247065e46e76a37411b9ab1782541c23bb658bf003772c3405fbaa128e90"},
-- {file = "pyopenssl-25.0.0.tar.gz", hash = "sha256:cd2cef799efa3936bb08e8ccb9433a575722b9dd986023f1cabc4ae64e9dac16"},
-+ {file = "pyopenssl-25.1.0-py3-none-any.whl", hash = "sha256:2b11f239acc47ac2e5aca04fd7fa829800aeee22a2eb30d744572a157bd8a1ab"},
-+ {file = "pyopenssl-25.1.0.tar.gz", hash = "sha256:8d031884482e0c67ee92bf9a4d8cceb08d92aba7136432ffb0703c5280fc205b"},
- ]
-
- [package.dependencies]
--cryptography = ">=41.0.5,<45"
-+cryptography = ">=41.0.5,<46"
- typing-extensions = {version = ">=4.9", markers = "python_version < \"3.13\" and python_version >= \"3.8\""}
-
- [package.extras]
---
-2.49.0
-
diff --git a/packages/overlays/matrix-synapse/patches/0017-Bump-docker-build-push-action-from-6.16.0-to-6.17.0-.patch b/packages/overlays/matrix-synapse/patches/0017-Bump-docker-build-push-action-from-6.16.0-to-6.17.0-.patch
deleted file mode 100644
index 46ea888..0000000
--- a/packages/overlays/matrix-synapse/patches/0017-Bump-docker-build-push-action-from-6.16.0-to-6.17.0-.patch
+++ /dev/null
@@ -1,27 +0,0 @@
-From 17e6b32966670550c5fb4f232b390dd25ec77759 Mon Sep 17 00:00:00 2001
-From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com>
-Date: Mon, 19 May 2025 13:07:24 +0100
-Subject: [PATCH 17/34] Bump docker/build-push-action from 6.16.0 to 6.17.0
- (#18449)
-
-Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
----
- .github/workflows/docker.yml | 2 +-
- 1 file changed, 1 insertion(+), 1 deletion(-)
-
-diff --git a/.github/workflows/docker.yml b/.github/workflows/docker.yml
-index 009089db3a..feeadf170d 100644
---- a/.github/workflows/docker.yml
-+++ b/.github/workflows/docker.yml
-@@ -72,7 +72,7 @@ jobs:
-
- - name: Build and push all platforms
- id: build-and-push
-- uses: docker/build-push-action@14487ce63c7a62a4a324b0bfb37086795e31c6c1 # v6.16.0
-+ uses: docker/build-push-action@1dc73863535b631f98b2378be8619f83b136f4a0 # v6.17.0
- with:
- push: true
- labels: |
---
-2.49.0
-
diff --git a/packages/overlays/matrix-synapse/patches/0018-Allow-only-requiring-a-field-be-present-in-an-SSO-re.patch b/packages/overlays/matrix-synapse/patches/0018-Allow-only-requiring-a-field-be-present-in-an-SSO-re.patch
deleted file mode 100644
index 9aba9cb..0000000
--- a/packages/overlays/matrix-synapse/patches/0018-Allow-only-requiring-a-field-be-present-in-an-SSO-re.patch
+++ /dev/null
@@ -1,175 +0,0 @@
-From 1f4ae2f9eb94808f651b683b4650092015ec39e1 Mon Sep 17 00:00:00 2001
-From: Andrew Morgan <1342360+anoadragon453@users.noreply.github.com>
-Date: Mon, 19 May 2025 17:50:02 +0100
-Subject: [PATCH 18/34] Allow only requiring a field be present in an SSO
- response, rather than specifying a required value (#18454)
-
----
- changelog.d/18454.misc | 1 +
- .../configuration/config_documentation.md | 10 ++-
- synapse/config/sso.py | 7 +-
- tests/handlers/test_oidc.py | 77 ++++++++++++++++++-
- 4 files changed, 86 insertions(+), 9 deletions(-)
- create mode 100644 changelog.d/18454.misc
-
-diff --git a/changelog.d/18454.misc b/changelog.d/18454.misc
-new file mode 100644
-index 0000000000..892fbd1d94
---- /dev/null
-+++ b/changelog.d/18454.misc
-@@ -0,0 +1 @@
-+Allow checking only for the existence of a field in an SSO provider's response, rather than requiring the value(s) to check.
-\ No newline at end of file
-diff --git a/docs/usage/configuration/config_documentation.md b/docs/usage/configuration/config_documentation.md
-index e688bc5cd8..3927b9ca14 100644
---- a/docs/usage/configuration/config_documentation.md
-+++ b/docs/usage/configuration/config_documentation.md
-@@ -3782,17 +3782,23 @@ match particular values in the OIDC userinfo. The requirements can be listed und
- ```yaml
- attribute_requirements:
- - attribute: family_name
-- value: "Stephensson"
-+ one_of: ["Stephensson", "Smith"]
- - attribute: groups
- value: "admin"
-+ # If `value` or `one_of` are not specified, the attribute only needs
-+ # to exist, regardless of value.
-+ - attribute: picture
- ```
-+
-+`attribute` is a required field, while `value` and `one_of` are optional.
-+
- All of the listed attributes must match for the login to be permitted. Additional attributes can be added to
- userinfo by expanding the `scopes` section of the OIDC config to retrieve
- additional information from the OIDC provider.
-
- If the OIDC claim is a list, then the attribute must match any value in the list.
- Otherwise, it must exactly match the value of the claim. Using the example
--above, the `family_name` claim MUST be "Stephensson", but the `groups`
-+above, the `family_name` claim MUST be either "Stephensson" or "Smith", but the `groups`
- claim MUST contain "admin".
-
- Example configuration:
-diff --git a/synapse/config/sso.py b/synapse/config/sso.py
-index 97b85e47ea..cf27a7ee13 100644
---- a/synapse/config/sso.py
-+++ b/synapse/config/sso.py
-@@ -43,8 +43,7 @@ class SsoAttributeRequirement:
- """Object describing a single requirement for SSO attributes."""
-
- attribute: str
-- # If neither value nor one_of is given, the attribute must simply exist. This is
-- # only true for CAS configs which use a different JSON schema than the one below.
-+ # If neither `value` nor `one_of` is given, the attribute must simply exist.
- value: Optional[str] = None
- one_of: Optional[List[str]] = None
-
-@@ -56,10 +55,6 @@ class SsoAttributeRequirement:
- "one_of": {"type": "array", "items": {"type": "string"}},
- },
- "required": ["attribute"],
-- "oneOf": [
-- {"required": ["value"]},
-- {"required": ["one_of"]},
-- ],
- }
-
-
-diff --git a/tests/handlers/test_oidc.py b/tests/handlers/test_oidc.py
-index e5f31d57ca..ff8e3c5cb6 100644
---- a/tests/handlers/test_oidc.py
-+++ b/tests/handlers/test_oidc.py
-@@ -1453,7 +1453,7 @@ class OidcHandlerTestCase(HomeserverTestCase):
- }
- }
- )
-- def test_attribute_requirements_one_of(self) -> None:
-+ def test_attribute_requirements_one_of_succeeds(self) -> None:
- """Test that auth succeeds if userinfo attribute has multiple values and CONTAINS required value"""
- # userinfo with "test": ["bar"] attribute should succeed.
- userinfo = {
-@@ -1475,6 +1475,81 @@ class OidcHandlerTestCase(HomeserverTestCase):
- auth_provider_session_id=None,
- )
-
-+ @override_config(
-+ {
-+ "oidc_config": {
-+ **DEFAULT_CONFIG,
-+ "attribute_requirements": [
-+ {"attribute": "test", "one_of": ["foo", "bar"]}
-+ ],
-+ }
-+ }
-+ )
-+ def test_attribute_requirements_one_of_fails(self) -> None:
-+ """Test that auth fails if userinfo attribute has multiple values yet
-+ DOES NOT CONTAIN a required value
-+ """
-+ # userinfo with "test": ["something else"] attribute should fail.
-+ userinfo = {
-+ "sub": "tester",
-+ "username": "tester",
-+ "test": ["something else"],
-+ }
-+ request, _ = self.start_authorization(userinfo)
-+ self.get_success(self.handler.handle_oidc_callback(request))
-+ self.complete_sso_login.assert_not_called()
-+
-+ @override_config(
-+ {
-+ "oidc_config": {
-+ **DEFAULT_CONFIG,
-+ "attribute_requirements": [{"attribute": "test"}],
-+ }
-+ }
-+ )
-+ def test_attribute_requirements_does_not_exist(self) -> None:
-+ """OIDC login fails if the required attribute does not exist in the OIDC userinfo response."""
-+ # userinfo lacking "test" attribute should fail.
-+ userinfo = {
-+ "sub": "tester",
-+ "username": "tester",
-+ }
-+ request, _ = self.start_authorization(userinfo)
-+ self.get_success(self.handler.handle_oidc_callback(request))
-+ self.complete_sso_login.assert_not_called()
-+
-+ @override_config(
-+ {
-+ "oidc_config": {
-+ **DEFAULT_CONFIG,
-+ "attribute_requirements": [{"attribute": "test"}],
-+ }
-+ }
-+ )
-+ def test_attribute_requirements_exist(self) -> None:
-+ """OIDC login succeeds if the required attribute exist (regardless of value)
-+ in the OIDC userinfo response.
-+ """
-+ # userinfo with "test" attribute and random value should succeed.
-+ userinfo = {
-+ "sub": "tester",
-+ "username": "tester",
-+ "test": random_string(5), # value does not matter
-+ }
-+ request, _ = self.start_authorization(userinfo)
-+ self.get_success(self.handler.handle_oidc_callback(request))
-+
-+ # check that the auth handler got called as expected
-+ self.complete_sso_login.assert_called_once_with(
-+ "@tester:test",
-+ self.provider.idp_id,
-+ request,
-+ ANY,
-+ None,
-+ new_user=True,
-+ auth_provider_session_id=None,
-+ )
-+
- @override_config(
- {
- "oidc_config": {
---
-2.49.0
-
diff --git a/packages/overlays/matrix-synapse/patches/0019-Bump-setuptools-from-72.1.0-to-78.1.1-18461.patch b/packages/overlays/matrix-synapse/patches/0019-Bump-setuptools-from-72.1.0-to-78.1.1-18461.patch
deleted file mode 100644
index 1905b13..0000000
--- a/packages/overlays/matrix-synapse/patches/0019-Bump-setuptools-from-72.1.0-to-78.1.1-18461.patch
+++ /dev/null
@@ -1,49 +0,0 @@
-From 303c5c4daa6986a91ab4632bd4df0448199b1813 Mon Sep 17 00:00:00 2001
-From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com>
-Date: Tue, 20 May 2025 12:03:10 +0100
-Subject: [PATCH 19/34] Bump setuptools from 72.1.0 to 78.1.1 (#18461)
-
-Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
----
- poetry.lock | 18 +++++++++++-------
- 1 file changed, 11 insertions(+), 7 deletions(-)
-
-diff --git a/poetry.lock b/poetry.lock
-index 54ddad3bdd..3c53dfb376 100644
---- a/poetry.lock
-+++ b/poetry.lock
-@@ -2584,20 +2584,24 @@ tests = ["coverage[toml] (>=5.0.2)", "pytest"]
-
- [[package]]
- name = "setuptools"
--version = "72.1.0"
-+version = "78.1.1"
- description = "Easily download, build, install, upgrade, and uninstall Python packages"
- optional = false
--python-versions = ">=3.8"
-+python-versions = ">=3.9"
- groups = ["main", "dev"]
- files = [
-- {file = "setuptools-72.1.0-py3-none-any.whl", hash = "sha256:5a03e1860cf56bb6ef48ce186b0e557fdba433237481a9a625176c2831be15d1"},
-- {file = "setuptools-72.1.0.tar.gz", hash = "sha256:8d243eff56d095e5817f796ede6ae32941278f542e0f941867cc05ae52b162ec"},
-+ {file = "setuptools-78.1.1-py3-none-any.whl", hash = "sha256:c3a9c4211ff4c309edb8b8c4f1cbfa7ae324c4ba9f91ff254e3d305b9fd54561"},
-+ {file = "setuptools-78.1.1.tar.gz", hash = "sha256:fcc17fd9cd898242f6b4adfaca46137a9edef687f43e6f78469692a5e70d851d"},
- ]
-
- [package.extras]
--core = ["importlib-metadata (>=6) ; python_version < \"3.10\"", "importlib-resources (>=5.10.2) ; python_version < \"3.9\"", "jaraco.text (>=3.7)", "more-itertools (>=8.8)", "ordered-set (>=3.1.1)", "packaging (>=24)", "platformdirs (>=2.6.2)", "tomli (>=2.0.1) ; python_version < \"3.11\"", "wheel (>=0.43.0)"]
--doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "pyproject-hooks (!=1.1)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier"]
--test = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "importlib-metadata", "ini2toml[lite] (>=0.14)", "jaraco.develop (>=7.21) ; python_version >= \"3.9\" and sys_platform != \"cygwin\"", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "jaraco.test", "mypy (==1.11.*)", "packaging (>=23.2)", "pip (>=19.1)", "pyproject-hooks (!=1.1)", "pytest (>=6,!=8.1.*)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-home (>=0.5)", "pytest-mypy", "pytest-perf ; sys_platform != \"cygwin\"", "pytest-ruff (<0.4) ; platform_system == \"Windows\"", "pytest-ruff (>=0.2.1) ; sys_platform != \"cygwin\"", "pytest-ruff (>=0.3.2) ; sys_platform != \"cygwin\"", "pytest-subprocess", "pytest-timeout", "pytest-xdist (>=3)", "tomli", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"]
-+check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1) ; sys_platform != \"cygwin\"", "ruff (>=0.8.0) ; sys_platform != \"cygwin\""]
-+core = ["importlib_metadata (>=6) ; python_version < \"3.10\"", "jaraco.functools (>=4)", "jaraco.text (>=3.7)", "more_itertools", "more_itertools (>=8.8)", "packaging (>=24.2)", "platformdirs (>=4.2.2)", "tomli (>=2.0.1) ; python_version < \"3.11\"", "wheel (>=0.43.0)"]
-+cover = ["pytest-cov"]
-+doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "pyproject-hooks (!=1.1)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier", "towncrier (<24.7)"]
-+enabler = ["pytest-enabler (>=2.2)"]
-+test = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "ini2toml[lite] (>=0.14)", "jaraco.develop (>=7.21) ; python_version >= \"3.9\" and sys_platform != \"cygwin\"", "jaraco.envs (>=2.2)", "jaraco.path (>=3.7.2)", "jaraco.test (>=5.5)", "packaging (>=24.2)", "pip (>=19.1)", "pyproject-hooks (!=1.1)", "pytest (>=6,!=8.1.*)", "pytest-home (>=0.5)", "pytest-perf ; sys_platform != \"cygwin\"", "pytest-subprocess", "pytest-timeout", "pytest-xdist (>=3)", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel (>=0.44.0)"]
-+type = ["importlib_metadata (>=7.0.2) ; python_version < \"3.10\"", "jaraco.develop (>=7.21) ; sys_platform != \"cygwin\"", "mypy (==1.14.*)", "pytest-mypy"]
-
- [[package]]
- name = "setuptools-rust"
---
-2.49.0
-
diff --git a/packages/overlays/matrix-synapse/patches/0020-Update-postgres.md-18445.patch b/packages/overlays/matrix-synapse/patches/0020-Update-postgres.md-18445.patch
deleted file mode 100644
index 62b2710..0000000
--- a/packages/overlays/matrix-synapse/patches/0020-Update-postgres.md-18445.patch
+++ /dev/null
@@ -1,40 +0,0 @@
-From a6cb3533db77ebeb6b7ed86fb3d3dd86c046f4a4 Mon Sep 17 00:00:00 2001
-From: Strac Consulting Engineers Pty Ltd <preminik@preminik.com>
-Date: Tue, 20 May 2025 23:31:05 +1000
-Subject: [PATCH 20/34] Update postgres.md (#18445)
-
----
- changelog.d/18445.doc | 1 +
- docs/postgres.md | 8 ++++++++
- 2 files changed, 9 insertions(+)
- create mode 100644 changelog.d/18445.doc
-
-diff --git a/changelog.d/18445.doc b/changelog.d/18445.doc
-new file mode 100644
-index 0000000000..1e05a791b2
---- /dev/null
-+++ b/changelog.d/18445.doc
-@@ -0,0 +1 @@
-+Add advice for upgrading between major PostgreSQL versions to the database documentation.
-diff --git a/docs/postgres.md b/docs/postgres.md
-index 51670667e8..d51f54c722 100644
---- a/docs/postgres.md
-+++ b/docs/postgres.md
-@@ -100,6 +100,14 @@ database:
- keepalives_count: 3
- ```
-
-+## Postgresql major version upgrades
-+
-+Postgres uses separate directories for database locations between major versions (typically `/var/lib/postgresql/<version>/main`).
-+
-+Therefore, it is recommended to stop Synapse and other services (MAS, etc) before upgrading Postgres major versions.
-+
-+It is also strongly recommended to [back up](./usage/administration/backups.md#database) your database beforehand to ensure no data loss arising from a failed upgrade.
-+
- ## Backups
-
- Don't forget to [back up](./usage/administration/backups.md#database) your database!
---
-2.49.0
-
diff --git a/packages/overlays/matrix-synapse/patches/0021-Bump-ruff-from-0.7.3-to-0.11.10-18451.patch b/packages/overlays/matrix-synapse/patches/0021-Bump-ruff-from-0.7.3-to-0.11.10-18451.patch
deleted file mode 100644
index f2c0d5c..0000000
--- a/packages/overlays/matrix-synapse/patches/0021-Bump-ruff-from-0.7.3-to-0.11.10-18451.patch
+++ /dev/null
@@ -1,1259 +0,0 @@
-From 9d43bec3268d9a454fe992f25edfc013a50fb9cc Mon Sep 17 00:00:00 2001
-From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com>
-Date: Tue, 20 May 2025 15:23:30 +0100
-Subject: [PATCH 21/34] Bump ruff from 0.7.3 to 0.11.10 (#18451)
-
-Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
-Co-authored-by: Andrew Morgan <andrew@amorgan.xyz>
-Co-authored-by: Andrew Morgan <1342360+anoadragon453@users.noreply.github.com>
----
- changelog.d/18451.misc | 1 +
- poetry.lock | 40 +++++++++----------
- pyproject.toml | 2 +-
- synapse/_scripts/synapse_port_db.py | 2 +-
- synapse/_scripts/synctl.py | 6 +--
- synapse/app/generic_worker.py | 3 +-
- synapse/app/homeserver.py | 3 +-
- synapse/config/tls.py | 3 +-
- synapse/event_auth.py | 3 +-
- synapse/handlers/e2e_keys.py | 12 ++----
- synapse/handlers/federation.py | 6 +--
- synapse/handlers/message.py | 22 +++++-----
- synapse/handlers/sso.py | 6 +--
- synapse/http/matrixfederationclient.py | 6 +--
- synapse/http/proxyagent.py | 12 +++---
- synapse/http/servlet.py | 6 +--
- synapse/module_api/__init__.py | 6 +--
- synapse/replication/http/_base.py | 6 +--
- synapse/replication/tcp/streams/events.py | 6 +--
- synapse/rest/admin/__init__.py | 3 +-
- synapse/rest/client/receipts.py | 4 +-
- synapse/rest/client/rendezvous.py | 6 +--
- synapse/rest/client/transactions.py | 6 +--
- synapse/storage/background_updates.py | 12 +++---
- synapse/storage/controllers/persist_events.py | 3 +-
- synapse/storage/databases/main/client_ips.py | 12 +++---
- synapse/storage/databases/main/deviceinbox.py | 6 +--
- synapse/storage/databases/main/devices.py | 2 +-
- synapse/storage/databases/main/events.py | 27 ++++++-------
- .../storage/databases/main/events_worker.py | 6 +--
- .../databases/main/monthly_active_users.py | 24 +++++------
- .../storage/databases/main/purge_events.py | 3 +-
- .../storage/databases/main/state_deltas.py | 6 +--
- synapse/storage/databases/main/tags.py | 5 +--
- .../storage/databases/main/user_directory.py | 6 +--
- synapse/storage/databases/state/bg_updates.py | 3 +-
- synapse/storage/schema/main/delta/25/fts.py | 3 +-
- synapse/storage/schema/main/delta/27/ts.py | 3 +-
- .../schema/main/delta/31/search_update.py | 3 +-
- .../schema/main/delta/33/event_fields.py | 3 +-
- synapse/types/__init__.py | 3 +-
- synapse/types/state.py | 2 +-
- synapse/util/iterutils.py | 4 +-
- .../test_federation_out_of_band_membership.py | 18 ++++-----
- tests/handlers/test_user_directory.py | 4 +-
- tests/http/test_matrixfederationclient.py | 8 +---
- tests/media/test_media_storage.py | 4 +-
- tests/replication/tcp/streams/test_events.py | 2 +-
- tests/rest/admin/test_room.py | 2 +-
- tests/rest/admin/test_user.py | 4 +-
- .../sliding_sync/test_rooms_timeline.py | 6 +--
- tests/rest/client/test_media.py | 2 +-
- tests/rest/client/utils.py | 6 +--
- tests/rest/media/test_url_preview.py | 2 +-
- tests/server.py | 6 +--
- tests/storage/test_base.py | 2 +-
- tests/storage/test_devices.py | 6 +--
- tests/storage/test_event_federation.py | 2 +-
- tests/test_state.py | 2 +-
- tests/test_utils/logging_setup.py | 2 +-
- 60 files changed, 178 insertions(+), 206 deletions(-)
- create mode 100644 changelog.d/18451.misc
-
-diff --git a/changelog.d/18451.misc b/changelog.d/18451.misc
-new file mode 100644
-index 0000000000..593e83eb7f
---- /dev/null
-+++ b/changelog.d/18451.misc
-@@ -0,0 +1 @@
-+Bump ruff from 0.7.3 to 0.11.10.
-\ No newline at end of file
-diff --git a/poetry.lock b/poetry.lock
-index 3c53dfb376..ada0646215 100644
---- a/poetry.lock
-+++ b/poetry.lock
-@@ -2440,30 +2440,30 @@ files = [
-
- [[package]]
- name = "ruff"
--version = "0.7.3"
-+version = "0.11.10"
- description = "An extremely fast Python linter and code formatter, written in Rust."
- optional = false
- python-versions = ">=3.7"
- groups = ["dev"]
- files = [
-- {file = "ruff-0.7.3-py3-none-linux_armv6l.whl", hash = "sha256:34f2339dc22687ec7e7002792d1f50712bf84a13d5152e75712ac08be565d344"},
-- {file = "ruff-0.7.3-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:fb397332a1879b9764a3455a0bb1087bda876c2db8aca3a3cbb67b3dbce8cda0"},
-- {file = "ruff-0.7.3-py3-none-macosx_11_0_arm64.whl", hash = "sha256:37d0b619546103274e7f62643d14e1adcbccb242efda4e4bdb9544d7764782e9"},
-- {file = "ruff-0.7.3-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5d59f0c3ee4d1a6787614e7135b72e21024875266101142a09a61439cb6e38a5"},
-- {file = "ruff-0.7.3-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:44eb93c2499a169d49fafd07bc62ac89b1bc800b197e50ff4633aed212569299"},
-- {file = "ruff-0.7.3-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6d0242ce53f3a576c35ee32d907475a8d569944c0407f91d207c8af5be5dae4e"},
-- {file = "ruff-0.7.3-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:6b6224af8b5e09772c2ecb8dc9f3f344c1aa48201c7f07e7315367f6dd90ac29"},
-- {file = "ruff-0.7.3-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c50f95a82b94421c964fae4c27c0242890a20fe67d203d127e84fbb8013855f5"},
-- {file = "ruff-0.7.3-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7f3eff9961b5d2644bcf1616c606e93baa2d6b349e8aa8b035f654df252c8c67"},
-- {file = "ruff-0.7.3-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b8963cab06d130c4df2fd52c84e9f10d297826d2e8169ae0c798b6221be1d1d2"},
-- {file = "ruff-0.7.3-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:61b46049d6edc0e4317fb14b33bd693245281a3007288b68a3f5b74a22a0746d"},
-- {file = "ruff-0.7.3-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:10ebce7696afe4644e8c1a23b3cf8c0f2193a310c18387c06e583ae9ef284de2"},
-- {file = "ruff-0.7.3-py3-none-musllinux_1_2_i686.whl", hash = "sha256:3f36d56326b3aef8eeee150b700e519880d1aab92f471eefdef656fd57492aa2"},
-- {file = "ruff-0.7.3-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:5d024301109a0007b78d57ab0ba190087b43dce852e552734ebf0b0b85e4fb16"},
-- {file = "ruff-0.7.3-py3-none-win32.whl", hash = "sha256:4ba81a5f0c5478aa61674c5a2194de8b02652f17addf8dfc40c8937e6e7d79fc"},
-- {file = "ruff-0.7.3-py3-none-win_amd64.whl", hash = "sha256:588a9ff2fecf01025ed065fe28809cd5a53b43505f48b69a1ac7707b1b7e4088"},
-- {file = "ruff-0.7.3-py3-none-win_arm64.whl", hash = "sha256:1713e2c5545863cdbfe2cbce21f69ffaf37b813bfd1fb3b90dc9a6f1963f5a8c"},
-- {file = "ruff-0.7.3.tar.gz", hash = "sha256:e1d1ba2e40b6e71a61b063354d04be669ab0d39c352461f3d789cac68b54a313"},
-+ {file = "ruff-0.11.10-py3-none-linux_armv6l.whl", hash = "sha256:859a7bfa7bc8888abbea31ef8a2b411714e6a80f0d173c2a82f9041ed6b50f58"},
-+ {file = "ruff-0.11.10-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:968220a57e09ea5e4fd48ed1c646419961a0570727c7e069842edd018ee8afed"},
-+ {file = "ruff-0.11.10-py3-none-macosx_11_0_arm64.whl", hash = "sha256:1067245bad978e7aa7b22f67113ecc6eb241dca0d9b696144256c3a879663bca"},
-+ {file = "ruff-0.11.10-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f4854fd09c7aed5b1590e996a81aeff0c9ff51378b084eb5a0b9cd9518e6cff2"},
-+ {file = "ruff-0.11.10-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:8b4564e9f99168c0f9195a0fd5fa5928004b33b377137f978055e40008a082c5"},
-+ {file = "ruff-0.11.10-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5b6a9cc5b62c03cc1fea0044ed8576379dbaf751d5503d718c973d5418483641"},
-+ {file = "ruff-0.11.10-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:607ecbb6f03e44c9e0a93aedacb17b4eb4f3563d00e8b474298a201622677947"},
-+ {file = "ruff-0.11.10-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7b3a522fa389402cd2137df9ddefe848f727250535c70dafa840badffb56b7a4"},
-+ {file = "ruff-0.11.10-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2f071b0deed7e9245d5820dac235cbdd4ef99d7b12ff04c330a241ad3534319f"},
-+ {file = "ruff-0.11.10-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4a60e3a0a617eafba1f2e4186d827759d65348fa53708ca547e384db28406a0b"},
-+ {file = "ruff-0.11.10-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:da8ec977eaa4b7bf75470fb575bea2cb41a0e07c7ea9d5a0a97d13dbca697bf2"},
-+ {file = "ruff-0.11.10-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:ddf8967e08227d1bd95cc0851ef80d2ad9c7c0c5aab1eba31db49cf0a7b99523"},
-+ {file = "ruff-0.11.10-py3-none-musllinux_1_2_i686.whl", hash = "sha256:5a94acf798a82db188f6f36575d80609072b032105d114b0f98661e1679c9125"},
-+ {file = "ruff-0.11.10-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:3afead355f1d16d95630df28d4ba17fb2cb9c8dfac8d21ced14984121f639bad"},
-+ {file = "ruff-0.11.10-py3-none-win32.whl", hash = "sha256:dc061a98d32a97211af7e7f3fa1d4ca2fcf919fb96c28f39551f35fc55bdbc19"},
-+ {file = "ruff-0.11.10-py3-none-win_amd64.whl", hash = "sha256:5cc725fbb4d25b0f185cb42df07ab6b76c4489b4bfb740a175f3a59c70e8a224"},
-+ {file = "ruff-0.11.10-py3-none-win_arm64.whl", hash = "sha256:ef69637b35fb8b210743926778d0e45e1bffa850a7c61e428c6b971549b5f5d1"},
-+ {file = "ruff-0.11.10.tar.gz", hash = "sha256:d522fb204b4959909ecac47da02830daec102eeb100fb50ea9554818d47a5fa6"},
- ]
-
- [[package]]
-@@ -3394,4 +3394,4 @@ user-search = ["pyicu"]
- [metadata]
- lock-version = "2.1"
- python-versions = "^3.9.0"
--content-hash = "d71159b19349fdc0b7cd8e06e8c8778b603fc37b941c6df34ddc31746783d94d"
-+content-hash = "522f5bacf5610646876452e0e397038dd5c959692d2ab76214431bff78562d01"
-diff --git a/pyproject.toml b/pyproject.toml
-index 914a5804aa..6ce05805a9 100644
---- a/pyproject.toml
-+++ b/pyproject.toml
-@@ -320,7 +320,7 @@ all = [
- # failing on new releases. Keeping lower bounds loose here means that dependabot
- # can bump versions without having to update the content-hash in the lockfile.
- # This helps prevents merge conflicts when running a batch of dependabot updates.
--ruff = "0.7.3"
-+ruff = "0.11.10"
- # Type checking only works with the pydantic.v1 compat module from pydantic v2
- pydantic = "^2"
-
-diff --git a/synapse/_scripts/synapse_port_db.py b/synapse/_scripts/synapse_port_db.py
-index 438b2ff8a0..573c70696e 100755
---- a/synapse/_scripts/synapse_port_db.py
-+++ b/synapse/_scripts/synapse_port_db.py
-@@ -1065,7 +1065,7 @@ class Porter:
-
- def get_sent_table_size(txn: LoggingTransaction) -> int:
- txn.execute(
-- "SELECT count(*) FROM sent_transactions" " WHERE ts >= ?", (yesterday,)
-+ "SELECT count(*) FROM sent_transactions WHERE ts >= ?", (yesterday,)
- )
- result = txn.fetchone()
- assert result is not None
-diff --git a/synapse/_scripts/synctl.py b/synapse/_scripts/synctl.py
-index 688df9485c..2e2aa27a17 100755
---- a/synapse/_scripts/synctl.py
-+++ b/synapse/_scripts/synctl.py
-@@ -292,9 +292,9 @@ def main() -> None:
- for key in worker_config:
- if key == "worker_app": # But we allow worker_app
- continue
-- assert not key.startswith(
-- "worker_"
-- ), "Main process cannot use worker_* config"
-+ assert not key.startswith("worker_"), (
-+ "Main process cannot use worker_* config"
-+ )
- else:
- worker_pidfile = worker_config["worker_pid_file"]
- worker_cache_factor = worker_config.get("synctl_cache_factor")
-diff --git a/synapse/app/generic_worker.py b/synapse/app/generic_worker.py
-index f495d5b7e4..75c65ccc0d 100644
---- a/synapse/app/generic_worker.py
-+++ b/synapse/app/generic_worker.py
-@@ -287,8 +287,7 @@ class GenericWorkerServer(HomeServer):
- elif listener.type == "metrics":
- if not self.config.metrics.enable_metrics:
- logger.warning(
-- "Metrics listener configured, but "
-- "enable_metrics is not True!"
-+ "Metrics listener configured, but enable_metrics is not True!"
- )
- else:
- if isinstance(listener, TCPListenerConfig):
-diff --git a/synapse/app/homeserver.py b/synapse/app/homeserver.py
-index 6da2194cf7..e027b5eaea 100644
---- a/synapse/app/homeserver.py
-+++ b/synapse/app/homeserver.py
-@@ -289,8 +289,7 @@ class SynapseHomeServer(HomeServer):
- elif listener.type == "metrics":
- if not self.config.metrics.enable_metrics:
- logger.warning(
-- "Metrics listener configured, but "
-- "enable_metrics is not True!"
-+ "Metrics listener configured, but enable_metrics is not True!"
- )
- else:
- if isinstance(listener, TCPListenerConfig):
-diff --git a/synapse/config/tls.py b/synapse/config/tls.py
-index 51dc15eb61..a48d81fdc3 100644
---- a/synapse/config/tls.py
-+++ b/synapse/config/tls.py
-@@ -108,8 +108,7 @@ class TlsConfig(Config):
- # Raise an error if this option has been specified without any
- # corresponding certificates.
- raise ConfigError(
-- "federation_custom_ca_list specified without "
-- "any certificate files"
-+ "federation_custom_ca_list specified without any certificate files"
- )
-
- certs = []
-diff --git a/synapse/event_auth.py b/synapse/event_auth.py
-index 5ecf493f98..5999c264dc 100644
---- a/synapse/event_auth.py
-+++ b/synapse/event_auth.py
-@@ -986,8 +986,7 @@ def _check_power_levels(
- if old_level == user_level:
- raise AuthError(
- 403,
-- "You don't have permission to remove ops level equal "
-- "to your own",
-+ "You don't have permission to remove ops level equal to your own",
- )
-
- # Check if the old and new levels are greater than the user level
-diff --git a/synapse/handlers/e2e_keys.py b/synapse/handlers/e2e_keys.py
-index 540995e062..f2b2e30bf4 100644
---- a/synapse/handlers/e2e_keys.py
-+++ b/synapse/handlers/e2e_keys.py
-@@ -1163,7 +1163,7 @@ class E2eKeysHandler:
- devices = devices[user_id]
- except SynapseError as e:
- failure = _exception_to_failure(e)
-- failures[user_id] = {device: failure for device in signatures.keys()}
-+ failures[user_id] = dict.fromkeys(signatures.keys(), failure)
- return signature_list, failures
-
- for device_id, device in signatures.items():
-@@ -1303,7 +1303,7 @@ class E2eKeysHandler:
- except SynapseError as e:
- failure = _exception_to_failure(e)
- for user, devicemap in signatures.items():
-- failures[user] = {device_id: failure for device_id in devicemap.keys()}
-+ failures[user] = dict.fromkeys(devicemap.keys(), failure)
- return signature_list, failures
-
- for target_user, devicemap in signatures.items():
-@@ -1344,9 +1344,7 @@ class E2eKeysHandler:
- # other devices were signed -- mark those as failures
- logger.debug("upload signature: too many devices specified")
- failure = _exception_to_failure(NotFoundError("Unknown device"))
-- failures[target_user] = {
-- device: failure for device in other_devices
-- }
-+ failures[target_user] = dict.fromkeys(other_devices, failure)
-
- if user_signing_key_id in master_key.get("signatures", {}).get(
- user_id, {}
-@@ -1367,9 +1365,7 @@ class E2eKeysHandler:
- except SynapseError as e:
- failure = _exception_to_failure(e)
- if device_id is None:
-- failures[target_user] = {
-- device_id: failure for device_id in devicemap.keys()
-- }
-+ failures[target_user] = dict.fromkeys(devicemap.keys(), failure)
- else:
- failures.setdefault(target_user, {})[device_id] = failure
-
-diff --git a/synapse/handlers/federation.py b/synapse/handlers/federation.py
-index 17dd4af13e..b1640e3246 100644
---- a/synapse/handlers/federation.py
-+++ b/synapse/handlers/federation.py
-@@ -1312,9 +1312,9 @@ class FederationHandler:
- if state_key is not None:
- # the event was not rejected (get_event raises a NotFoundError for rejected
- # events) so the state at the event should include the event itself.
-- assert (
-- state_map.get((event.type, state_key)) == event.event_id
-- ), "State at event did not include event itself"
-+ assert state_map.get((event.type, state_key)) == event.event_id, (
-+ "State at event did not include event itself"
-+ )
-
- # ... but we need the state *before* that event
- if "replaces_state" in event.unsigned:
-diff --git a/synapse/handlers/message.py b/synapse/handlers/message.py
-index 52c61cfa54..ff6eb5a514 100644
---- a/synapse/handlers/message.py
-+++ b/synapse/handlers/message.py
-@@ -143,9 +143,9 @@ class MessageHandler:
- elif membership == Membership.LEAVE:
- key = (event_type, state_key)
- # If the membership is not JOIN, then the event ID should exist.
-- assert (
-- membership_event_id is not None
-- ), "check_user_in_room_or_world_readable returned invalid data"
-+ assert membership_event_id is not None, (
-+ "check_user_in_room_or_world_readable returned invalid data"
-+ )
- room_state = await self._state_storage_controller.get_state_for_events(
- [membership_event_id], StateFilter.from_types([key])
- )
-@@ -242,9 +242,9 @@ class MessageHandler:
- room_state = await self.store.get_events(state_ids.values())
- elif membership == Membership.LEAVE:
- # If the membership is not JOIN, then the event ID should exist.
-- assert (
-- membership_event_id is not None
-- ), "check_user_in_room_or_world_readable returned invalid data"
-+ assert membership_event_id is not None, (
-+ "check_user_in_room_or_world_readable returned invalid data"
-+ )
- room_state_events = (
- await self._state_storage_controller.get_state_for_events(
- [membership_event_id], state_filter=state_filter
-@@ -1266,12 +1266,14 @@ class EventCreationHandler:
- # Allow an event to have empty list of prev_event_ids
- # only if it has auth_event_ids.
- or auth_event_ids
-- ), "Attempting to create a non-m.room.create event with no prev_events or auth_event_ids"
-+ ), (
-+ "Attempting to create a non-m.room.create event with no prev_events or auth_event_ids"
-+ )
- else:
- # we now ought to have some prev_events (unless it's a create event).
-- assert (
-- builder.type == EventTypes.Create or prev_event_ids
-- ), "Attempting to create a non-m.room.create event with no prev_events"
-+ assert builder.type == EventTypes.Create or prev_event_ids, (
-+ "Attempting to create a non-m.room.create event with no prev_events"
-+ )
-
- if for_batch:
- assert prev_event_ids is not None
-diff --git a/synapse/handlers/sso.py b/synapse/handlers/sso.py
-index 9c0d665461..07827cf95b 100644
---- a/synapse/handlers/sso.py
-+++ b/synapse/handlers/sso.py
-@@ -1192,9 +1192,9 @@ class SsoHandler:
- """
-
- # It is expected that this is the main process.
-- assert isinstance(
-- self._device_handler, DeviceHandler
-- ), "revoking SSO sessions can only be called on the main process"
-+ assert isinstance(self._device_handler, DeviceHandler), (
-+ "revoking SSO sessions can only be called on the main process"
-+ )
-
- # Invalidate any running user-mapping sessions
- to_delete = []
-diff --git a/synapse/http/matrixfederationclient.py b/synapse/http/matrixfederationclient.py
-index f6d2536957..88bf98045c 100644
---- a/synapse/http/matrixfederationclient.py
-+++ b/synapse/http/matrixfederationclient.py
-@@ -425,9 +425,9 @@ class MatrixFederationHttpClient:
- )
- else:
- proxy_authorization_secret = hs.config.worker.worker_replication_secret
-- assert (
-- proxy_authorization_secret is not None
-- ), "`worker_replication_secret` must be set when using `outbound_federation_restricted_to` (used to authenticate requests across workers)"
-+ assert proxy_authorization_secret is not None, (
-+ "`worker_replication_secret` must be set when using `outbound_federation_restricted_to` (used to authenticate requests across workers)"
-+ )
- federation_proxy_credentials = BearerProxyCredentials(
- proxy_authorization_secret.encode("ascii")
- )
-diff --git a/synapse/http/proxyagent.py b/synapse/http/proxyagent.py
-index fd16ee42dd..6817199035 100644
---- a/synapse/http/proxyagent.py
-+++ b/synapse/http/proxyagent.py
-@@ -173,9 +173,9 @@ class ProxyAgent(_AgentBase):
- self._federation_proxy_endpoint: Optional[IStreamClientEndpoint] = None
- self._federation_proxy_credentials: Optional[ProxyCredentials] = None
- if federation_proxy_locations:
-- assert (
-- federation_proxy_credentials is not None
-- ), "`federation_proxy_credentials` are required when using `federation_proxy_locations`"
-+ assert federation_proxy_credentials is not None, (
-+ "`federation_proxy_credentials` are required when using `federation_proxy_locations`"
-+ )
-
- endpoints: List[IStreamClientEndpoint] = []
- for federation_proxy_location in federation_proxy_locations:
-@@ -302,9 +302,9 @@ class ProxyAgent(_AgentBase):
- parsed_uri.scheme == b"matrix-federation"
- and self._federation_proxy_endpoint
- ):
-- assert (
-- self._federation_proxy_credentials is not None
-- ), "`federation_proxy_credentials` are required when using `federation_proxy_locations`"
-+ assert self._federation_proxy_credentials is not None, (
-+ "`federation_proxy_credentials` are required when using `federation_proxy_locations`"
-+ )
-
- # Set a Proxy-Authorization header
- if headers is None:
-diff --git a/synapse/http/servlet.py b/synapse/http/servlet.py
-index ed6ab08336..47d8bd5eaf 100644
---- a/synapse/http/servlet.py
-+++ b/synapse/http/servlet.py
-@@ -582,9 +582,9 @@ def parse_enum(
- is not one of those allowed values.
- """
- # Assert the enum values are strings.
-- assert all(
-- isinstance(e.value, str) for e in E
-- ), "parse_enum only works with string values"
-+ assert all(isinstance(e.value, str) for e in E), (
-+ "parse_enum only works with string values"
-+ )
- str_value = parse_string(
- request,
- name,
-diff --git a/synapse/module_api/__init__.py b/synapse/module_api/__init__.py
-index bf9532e891..7834da759c 100644
---- a/synapse/module_api/__init__.py
-+++ b/synapse/module_api/__init__.py
-@@ -894,9 +894,9 @@ class ModuleApi:
- Raises:
- synapse.api.errors.AuthError: the access token is invalid
- """
-- assert isinstance(
-- self._device_handler, DeviceHandler
-- ), "invalidate_access_token can only be called on the main process"
-+ assert isinstance(self._device_handler, DeviceHandler), (
-+ "invalidate_access_token can only be called on the main process"
-+ )
-
- # see if the access token corresponds to a device
- user_info = yield defer.ensureDeferred(
-diff --git a/synapse/replication/http/_base.py b/synapse/replication/http/_base.py
-index 9aa8d90bfe..0002538680 100644
---- a/synapse/replication/http/_base.py
-+++ b/synapse/replication/http/_base.py
-@@ -128,9 +128,9 @@ class ReplicationEndpoint(metaclass=abc.ABCMeta):
-
- # We reserve `instance_name` as a parameter to sending requests, so we
- # assert here that sub classes don't try and use the name.
-- assert (
-- "instance_name" not in self.PATH_ARGS
-- ), "`instance_name` is a reserved parameter name"
-+ assert "instance_name" not in self.PATH_ARGS, (
-+ "`instance_name` is a reserved parameter name"
-+ )
- assert (
- "instance_name"
- not in signature(self.__class__._serialize_payload).parameters
-diff --git a/synapse/replication/tcp/streams/events.py b/synapse/replication/tcp/streams/events.py
-index ea0803dfc2..05b55fb033 100644
---- a/synapse/replication/tcp/streams/events.py
-+++ b/synapse/replication/tcp/streams/events.py
-@@ -200,9 +200,9 @@ class EventsStream(_StreamFromIdGen):
-
- # we rely on get_all_new_forward_event_rows strictly honouring the limit, so
- # that we know it is safe to just take upper_limit = event_rows[-1][0].
-- assert (
-- len(event_rows) <= target_row_count
-- ), "get_all_new_forward_event_rows did not honour row limit"
-+ assert len(event_rows) <= target_row_count, (
-+ "get_all_new_forward_event_rows did not honour row limit"
-+ )
-
- # if we hit the limit on event_updates, there's no point in going beyond the
- # last stream_id in the batch for the other sources.
-diff --git a/synapse/rest/admin/__init__.py b/synapse/rest/admin/__init__.py
-index b1335fed66..e55cdc0470 100644
---- a/synapse/rest/admin/__init__.py
-+++ b/synapse/rest/admin/__init__.py
-@@ -207,8 +207,7 @@ class PurgeHistoryRestServlet(RestServlet):
- (stream, topo, _event_id) = r
- token = "t%d-%d" % (topo, stream)
- logger.info(
-- "[purge] purging up to token %s (received_ts %i => "
-- "stream_ordering %i)",
-+ "[purge] purging up to token %s (received_ts %i => stream_ordering %i)",
- token,
- ts,
- stream_ordering,
-diff --git a/synapse/rest/client/receipts.py b/synapse/rest/client/receipts.py
-index 89203dc45a..4bf93f485c 100644
---- a/synapse/rest/client/receipts.py
-+++ b/synapse/rest/client/receipts.py
-@@ -39,9 +39,7 @@ logger = logging.getLogger(__name__)
-
- class ReceiptRestServlet(RestServlet):
- PATTERNS = client_patterns(
-- "/rooms/(?P<room_id>[^/]*)"
-- "/receipt/(?P<receipt_type>[^/]*)"
-- "/(?P<event_id>[^/]*)$"
-+ "/rooms/(?P<room_id>[^/]*)/receipt/(?P<receipt_type>[^/]*)/(?P<event_id>[^/]*)$"
- )
- CATEGORY = "Receipts requests"
-
-diff --git a/synapse/rest/client/rendezvous.py b/synapse/rest/client/rendezvous.py
-index 02f166b4ea..a1808847f0 100644
---- a/synapse/rest/client/rendezvous.py
-+++ b/synapse/rest/client/rendezvous.py
-@@ -44,9 +44,9 @@ class MSC4108DelegationRendezvousServlet(RestServlet):
- redirection_target: Optional[str] = (
- hs.config.experimental.msc4108_delegation_endpoint
- )
-- assert (
-- redirection_target is not None
-- ), "Servlet is only registered if there is a delegation target"
-+ assert redirection_target is not None, (
-+ "Servlet is only registered if there is a delegation target"
-+ )
- self.endpoint = redirection_target.encode("utf-8")
-
- async def on_POST(self, request: SynapseRequest) -> None:
-diff --git a/synapse/rest/client/transactions.py b/synapse/rest/client/transactions.py
-index f791904168..1a57996aec 100644
---- a/synapse/rest/client/transactions.py
-+++ b/synapse/rest/client/transactions.py
-@@ -94,9 +94,9 @@ class HttpTransactionCache:
- # (appservice and guest users), but does not cover access tokens minted
- # by the admin API. Use the access token ID instead.
- else:
-- assert (
-- requester.access_token_id is not None
-- ), "Requester must have an access_token_id"
-+ assert requester.access_token_id is not None, (
-+ "Requester must have an access_token_id"
-+ )
- return (path, "user_admin", requester.access_token_id)
-
- def fetch_or_execute_request(
-diff --git a/synapse/storage/background_updates.py b/synapse/storage/background_updates.py
-index a02b4cc9ce..d170bbddaa 100644
---- a/synapse/storage/background_updates.py
-+++ b/synapse/storage/background_updates.py
-@@ -739,9 +739,9 @@ class BackgroundUpdater:
- c.execute(sql)
-
- async def updater(progress: JsonDict, batch_size: int) -> int:
-- assert isinstance(
-- self.db_pool.engine, engines.PostgresEngine
-- ), "validate constraint background update registered for non-Postres database"
-+ assert isinstance(self.db_pool.engine, engines.PostgresEngine), (
-+ "validate constraint background update registered for non-Postres database"
-+ )
-
- logger.info("Validating constraint %s to %s", constraint_name, table)
- await self.db_pool.runWithConnection(runner)
-@@ -900,9 +900,9 @@ class BackgroundUpdater:
- on the table. Used to iterate over the table.
- """
-
-- assert isinstance(
-- self.db_pool.engine, engines.PostgresEngine
-- ), "validate constraint background update registered for non-Postres database"
-+ assert isinstance(self.db_pool.engine, engines.PostgresEngine), (
-+ "validate constraint background update registered for non-Postres database"
-+ )
-
- async def updater(progress: JsonDict, batch_size: int) -> int:
- return await self.validate_constraint_and_delete_in_background(
-diff --git a/synapse/storage/controllers/persist_events.py b/synapse/storage/controllers/persist_events.py
-index 7963905479..f5131fe291 100644
---- a/synapse/storage/controllers/persist_events.py
-+++ b/synapse/storage/controllers/persist_events.py
-@@ -870,8 +870,7 @@ class EventsPersistenceStorageController:
- # This should only happen for outlier events.
- if not ev.internal_metadata.is_outlier():
- raise Exception(
-- "Context for new event %s has no state "
-- "group" % (ev.event_id,)
-+ "Context for new event %s has no state group" % (ev.event_id,)
- )
- continue
- if ctx.state_group_deltas:
-diff --git a/synapse/storage/databases/main/client_ips.py b/synapse/storage/databases/main/client_ips.py
-index e8c322ab5c..69008804bd 100644
---- a/synapse/storage/databases/main/client_ips.py
-+++ b/synapse/storage/databases/main/client_ips.py
-@@ -650,9 +650,9 @@ class ClientIpWorkerStore(ClientIpBackgroundUpdateStore, MonthlyActiveUsersWorke
-
- @wrap_as_background_process("update_client_ips")
- async def _update_client_ips_batch(self) -> None:
-- assert (
-- self._update_on_this_worker
-- ), "This worker is not designated to update client IPs"
-+ assert self._update_on_this_worker, (
-+ "This worker is not designated to update client IPs"
-+ )
-
- # If the DB pool has already terminated, don't try updating
- if not self.db_pool.is_running():
-@@ -671,9 +671,9 @@ class ClientIpWorkerStore(ClientIpBackgroundUpdateStore, MonthlyActiveUsersWorke
- txn: LoggingTransaction,
- to_update: Mapping[Tuple[str, str, str], Tuple[str, Optional[str], int]],
- ) -> None:
-- assert (
-- self._update_on_this_worker
-- ), "This worker is not designated to update client IPs"
-+ assert self._update_on_this_worker, (
-+ "This worker is not designated to update client IPs"
-+ )
-
- # Keys and values for the `user_ips` upsert.
- user_ips_keys = []
-diff --git a/synapse/storage/databases/main/deviceinbox.py b/synapse/storage/databases/main/deviceinbox.py
-index 0612b82b9b..d47833655d 100644
---- a/synapse/storage/databases/main/deviceinbox.py
-+++ b/synapse/storage/databases/main/deviceinbox.py
-@@ -200,9 +200,9 @@ class DeviceInboxWorkerStore(SQLBaseStore):
- to_stream_id=to_stream_id,
- )
-
-- assert (
-- last_processed_stream_id == to_stream_id
-- ), "Expected _get_device_messages to process all to-device messages up to `to_stream_id`"
-+ assert last_processed_stream_id == to_stream_id, (
-+ "Expected _get_device_messages to process all to-device messages up to `to_stream_id`"
-+ )
-
- return user_id_device_id_to_messages
-
-diff --git a/synapse/storage/databases/main/devices.py b/synapse/storage/databases/main/devices.py
-index 3f0b2f5d84..6191f22cd6 100644
---- a/synapse/storage/databases/main/devices.py
-+++ b/synapse/storage/databases/main/devices.py
-@@ -1092,7 +1092,7 @@ class DeviceWorkerStore(RoomMemberWorkerStore, EndToEndKeyWorkerStore):
- ),
- )
-
-- results: Dict[str, Optional[str]] = {user_id: None for user_id in user_ids}
-+ results: Dict[str, Optional[str]] = dict.fromkeys(user_ids)
- results.update(rows)
-
- return results
-diff --git a/synapse/storage/databases/main/events.py b/synapse/storage/databases/main/events.py
-index 26fbc1a483..b7cc0433e7 100644
---- a/synapse/storage/databases/main/events.py
-+++ b/synapse/storage/databases/main/events.py
-@@ -246,9 +246,9 @@ class PersistEventsStore:
- self.is_mine_id = hs.is_mine_id
-
- # This should only exist on instances that are configured to write
-- assert (
-- hs.get_instance_name() in hs.config.worker.writers.events
-- ), "Can only instantiate EventsStore on master"
-+ assert hs.get_instance_name() in hs.config.worker.writers.events, (
-+ "Can only instantiate EventsStore on master"
-+ )
-
- # Since we have been configured to write, we ought to have id generators,
- # rather than id trackers.
-@@ -465,9 +465,9 @@ class PersistEventsStore:
- missing_membership_event_ids
- )
- # There shouldn't be any missing events
-- assert (
-- remaining_events.keys() == missing_membership_event_ids
-- ), missing_membership_event_ids.difference(remaining_events.keys())
-+ assert remaining_events.keys() == missing_membership_event_ids, (
-+ missing_membership_event_ids.difference(remaining_events.keys())
-+ )
- membership_event_map.update(remaining_events)
-
- for (
-@@ -534,9 +534,9 @@ class PersistEventsStore:
- missing_state_event_ids
- )
- # There shouldn't be any missing events
-- assert (
-- remaining_events.keys() == missing_state_event_ids
-- ), missing_state_event_ids.difference(remaining_events.keys())
-+ assert remaining_events.keys() == missing_state_event_ids, (
-+ missing_state_event_ids.difference(remaining_events.keys())
-+ )
- for event in remaining_events.values():
- current_state_map[(event.type, event.state_key)] = event
-
-@@ -644,9 +644,9 @@ class PersistEventsStore:
- if missing_event_ids:
- remaining_events = await self.store.get_events(missing_event_ids)
- # There shouldn't be any missing events
-- assert (
-- remaining_events.keys() == missing_event_ids
-- ), missing_event_ids.difference(remaining_events.keys())
-+ assert remaining_events.keys() == missing_event_ids, (
-+ missing_event_ids.difference(remaining_events.keys())
-+ )
- for event in remaining_events.values():
- current_state_map[(event.type, event.state_key)] = event
-
-@@ -3448,8 +3448,7 @@ class PersistEventsStore:
- # Delete all these events that we've already fetched and now know that their
- # prev events are the new backwards extremeties.
- query = (
-- "DELETE FROM event_backward_extremities"
-- " WHERE event_id = ? AND room_id = ?"
-+ "DELETE FROM event_backward_extremities WHERE event_id = ? AND room_id = ?"
- )
- backward_extremity_tuples_to_remove = [
- (ev.event_id, ev.room_id)
-diff --git a/synapse/storage/databases/main/events_worker.py b/synapse/storage/databases/main/events_worker.py
-index 82b2ad4408..3db4460f57 100644
---- a/synapse/storage/databases/main/events_worker.py
-+++ b/synapse/storage/databases/main/events_worker.py
-@@ -824,9 +824,9 @@ class EventsWorkerStore(SQLBaseStore):
-
- if missing_events_ids:
-
-- async def get_missing_events_from_cache_or_db() -> (
-- Dict[str, EventCacheEntry]
-- ):
-+ async def get_missing_events_from_cache_or_db() -> Dict[
-+ str, EventCacheEntry
-+ ]:
- """Fetches the events in `missing_event_ids` from the database.
-
- Also creates entries in `self._current_event_fetches` to allow
-diff --git a/synapse/storage/databases/main/monthly_active_users.py b/synapse/storage/databases/main/monthly_active_users.py
-index 8e948c5e8d..659ee13d71 100644
---- a/synapse/storage/databases/main/monthly_active_users.py
-+++ b/synapse/storage/databases/main/monthly_active_users.py
-@@ -304,9 +304,9 @@ class MonthlyActiveUsersWorkerStore(RegistrationWorkerStore):
- txn:
- threepids: List of threepid dicts to reserve
- """
-- assert (
-- self._update_on_this_worker
-- ), "This worker is not designated to update MAUs"
-+ assert self._update_on_this_worker, (
-+ "This worker is not designated to update MAUs"
-+ )
-
- # XXX what is this function trying to achieve? It upserts into
- # monthly_active_users for each *registered* reserved mau user, but why?
-@@ -340,9 +340,9 @@ class MonthlyActiveUsersWorkerStore(RegistrationWorkerStore):
- Args:
- user_id: user to add/update
- """
-- assert (
-- self._update_on_this_worker
-- ), "This worker is not designated to update MAUs"
-+ assert self._update_on_this_worker, (
-+ "This worker is not designated to update MAUs"
-+ )
-
- # Support user never to be included in MAU stats. Note I can't easily call this
- # from upsert_monthly_active_user_txn because then I need a _txn form of
-@@ -379,9 +379,9 @@ class MonthlyActiveUsersWorkerStore(RegistrationWorkerStore):
- txn:
- user_id: user to add/update
- """
-- assert (
-- self._update_on_this_worker
-- ), "This worker is not designated to update MAUs"
-+ assert self._update_on_this_worker, (
-+ "This worker is not designated to update MAUs"
-+ )
-
- # Am consciously deciding to lock the table on the basis that is ought
- # never be a big table and alternative approaches (batching multiple
-@@ -409,9 +409,9 @@ class MonthlyActiveUsersWorkerStore(RegistrationWorkerStore):
- Args:
- user_id: the user_id to query
- """
-- assert (
-- self._update_on_this_worker
-- ), "This worker is not designated to update MAUs"
-+ assert self._update_on_this_worker, (
-+ "This worker is not designated to update MAUs"
-+ )
-
- if self._limit_usage_by_mau or self._mau_stats_only:
- # Trial users and guests should not be included as part of MAU group
-diff --git a/synapse/storage/databases/main/purge_events.py b/synapse/storage/databases/main/purge_events.py
-index ebdeb8fbd7..a11f522f03 100644
---- a/synapse/storage/databases/main/purge_events.py
-+++ b/synapse/storage/databases/main/purge_events.py
-@@ -199,8 +199,7 @@ class PurgeEventsStore(StateGroupWorkerStore, CacheInvalidationWorkerStore):
-
- # Update backward extremeties
- txn.execute_batch(
-- "INSERT INTO event_backward_extremities (room_id, event_id)"
-- " VALUES (?, ?)",
-+ "INSERT INTO event_backward_extremities (room_id, event_id) VALUES (?, ?)",
- [(room_id, event_id) for (event_id,) in new_backwards_extrems],
- )
-
-diff --git a/synapse/storage/databases/main/state_deltas.py b/synapse/storage/databases/main/state_deltas.py
-index b90f667da8..00f87cc3a1 100644
---- a/synapse/storage/databases/main/state_deltas.py
-+++ b/synapse/storage/databases/main/state_deltas.py
-@@ -98,9 +98,9 @@ class StateDeltasStore(SQLBaseStore):
- prev_stream_id = int(prev_stream_id)
-
- # check we're not going backwards
-- assert (
-- prev_stream_id <= max_stream_id
-- ), f"New stream id {max_stream_id} is smaller than prev stream id {prev_stream_id}"
-+ assert prev_stream_id <= max_stream_id, (
-+ f"New stream id {max_stream_id} is smaller than prev stream id {prev_stream_id}"
-+ )
-
- if not self._curr_state_delta_stream_cache.has_any_entity_changed(
- prev_stream_id
-diff --git a/synapse/storage/databases/main/tags.py b/synapse/storage/databases/main/tags.py
-index 44f395f315..97b190bccc 100644
---- a/synapse/storage/databases/main/tags.py
-+++ b/synapse/storage/databases/main/tags.py
-@@ -274,10 +274,7 @@ class TagsWorkerStore(AccountDataWorkerStore):
- assert isinstance(self._account_data_id_gen, AbstractStreamIdGenerator)
-
- def remove_tag_txn(txn: LoggingTransaction, next_id: int) -> None:
-- sql = (
-- "DELETE FROM room_tags "
-- " WHERE user_id = ? AND room_id = ? AND tag = ?"
-- )
-+ sql = "DELETE FROM room_tags WHERE user_id = ? AND room_id = ? AND tag = ?"
- txn.execute(sql, (user_id, room_id, tag))
- self._update_revision_txn(txn, user_id, room_id, next_id)
-
-diff --git a/synapse/storage/databases/main/user_directory.py b/synapse/storage/databases/main/user_directory.py
-index 391f0dd638..2b867cdb6e 100644
---- a/synapse/storage/databases/main/user_directory.py
-+++ b/synapse/storage/databases/main/user_directory.py
-@@ -582,9 +582,9 @@ class UserDirectoryBackgroundUpdateStore(StateDeltasStore):
- retry_counter: number of failures in refreshing the profile so far. Used for
- exponential backoff calculations.
- """
-- assert not self.hs.is_mine_id(
-- user_id
-- ), "Can't mark a local user as a stale remote user."
-+ assert not self.hs.is_mine_id(user_id), (
-+ "Can't mark a local user as a stale remote user."
-+ )
-
- server_name = UserID.from_string(user_id).domain
-
-diff --git a/synapse/storage/databases/state/bg_updates.py b/synapse/storage/databases/state/bg_updates.py
-index 95fd0ae73a..5b594fe8dd 100644
---- a/synapse/storage/databases/state/bg_updates.py
-+++ b/synapse/storage/databases/state/bg_updates.py
-@@ -396,8 +396,7 @@ class StateBackgroundUpdateStore(StateGroupBackgroundUpdateStore):
- return True, count
-
- txn.execute(
-- "SELECT state_group FROM state_group_edges"
-- " WHERE state_group = ?",
-+ "SELECT state_group FROM state_group_edges WHERE state_group = ?",
- (state_group,),
- )
-
-diff --git a/synapse/storage/schema/main/delta/25/fts.py b/synapse/storage/schema/main/delta/25/fts.py
-index b050cc16a7..c01c1325cb 100644
---- a/synapse/storage/schema/main/delta/25/fts.py
-+++ b/synapse/storage/schema/main/delta/25/fts.py
-@@ -75,8 +75,7 @@ def run_create(cur: LoggingTransaction, database_engine: BaseDatabaseEngine) ->
- progress_json = json.dumps(progress)
-
- sql = (
-- "INSERT into background_updates (update_name, progress_json)"
-- " VALUES (?, ?)"
-+ "INSERT into background_updates (update_name, progress_json) VALUES (?, ?)"
- )
-
- cur.execute(sql, ("event_search", progress_json))
-diff --git a/synapse/storage/schema/main/delta/27/ts.py b/synapse/storage/schema/main/delta/27/ts.py
-index d7f360b6e6..e6e73e1b77 100644
---- a/synapse/storage/schema/main/delta/27/ts.py
-+++ b/synapse/storage/schema/main/delta/27/ts.py
-@@ -55,8 +55,7 @@ def run_create(cur: LoggingTransaction, database_engine: BaseDatabaseEngine) ->
- progress_json = json.dumps(progress)
-
- sql = (
-- "INSERT into background_updates (update_name, progress_json)"
-- " VALUES (?, ?)"
-+ "INSERT into background_updates (update_name, progress_json) VALUES (?, ?)"
- )
-
- cur.execute(sql, ("event_origin_server_ts", progress_json))
-diff --git a/synapse/storage/schema/main/delta/31/search_update.py b/synapse/storage/schema/main/delta/31/search_update.py
-index 0e65c9a841..46355122bb 100644
---- a/synapse/storage/schema/main/delta/31/search_update.py
-+++ b/synapse/storage/schema/main/delta/31/search_update.py
-@@ -59,8 +59,7 @@ def run_create(cur: LoggingTransaction, database_engine: BaseDatabaseEngine) ->
- progress_json = json.dumps(progress)
-
- sql = (
-- "INSERT into background_updates (update_name, progress_json)"
-- " VALUES (?, ?)"
-+ "INSERT into background_updates (update_name, progress_json) VALUES (?, ?)"
- )
-
- cur.execute(sql, ("event_search_order", progress_json))
-diff --git a/synapse/storage/schema/main/delta/33/event_fields.py b/synapse/storage/schema/main/delta/33/event_fields.py
-index 9c02aeda88..53d215337e 100644
---- a/synapse/storage/schema/main/delta/33/event_fields.py
-+++ b/synapse/storage/schema/main/delta/33/event_fields.py
-@@ -55,8 +55,7 @@ def run_create(cur: LoggingTransaction, database_engine: BaseDatabaseEngine) ->
- progress_json = json.dumps(progress)
-
- sql = (
-- "INSERT into background_updates (update_name, progress_json)"
-- " VALUES (?, ?)"
-+ "INSERT into background_updates (update_name, progress_json) VALUES (?, ?)"
- )
-
- cur.execute(sql, ("event_fields_sender_url", progress_json))
-diff --git a/synapse/types/__init__.py b/synapse/types/__init__.py
-index e9cdd19868..5549f3c9f8 100644
---- a/synapse/types/__init__.py
-+++ b/synapse/types/__init__.py
-@@ -889,8 +889,7 @@ class MultiWriterStreamToken(AbstractMultiWriterStreamToken):
- def __str__(self) -> str:
- instances = ", ".join(f"{k}: {v}" for k, v in sorted(self.instance_map.items()))
- return (
-- f"MultiWriterStreamToken(stream: {self.stream}, "
-- f"instances: {{{instances}}})"
-+ f"MultiWriterStreamToken(stream: {self.stream}, instances: {{{instances}}})"
- )
-
-
-diff --git a/synapse/types/state.py b/synapse/types/state.py
-index e641215f18..6420e050a5 100644
---- a/synapse/types/state.py
-+++ b/synapse/types/state.py
-@@ -462,7 +462,7 @@ class StateFilter:
- new_types.update({state_type: set() for state_type in minus_wildcards})
-
- # insert the plus wildcards
-- new_types.update({state_type: None for state_type in plus_wildcards})
-+ new_types.update(dict.fromkeys(plus_wildcards))
-
- # insert the specific state keys
- for state_type, state_key in plus_state_keys:
-diff --git a/synapse/util/iterutils.py b/synapse/util/iterutils.py
-index ff6adeb716..0a6a30aab2 100644
---- a/synapse/util/iterutils.py
-+++ b/synapse/util/iterutils.py
-@@ -114,7 +114,7 @@ def sorted_topologically(
-
- # This is implemented by Kahn's algorithm.
-
-- degree_map = {node: 0 for node in nodes}
-+ degree_map = dict.fromkeys(nodes, 0)
- reverse_graph: Dict[T, Set[T]] = {}
-
- for node, edges in graph.items():
-@@ -164,7 +164,7 @@ def sorted_topologically_batched(
- persisted.
- """
-
-- degree_map = {node: 0 for node in nodes}
-+ degree_map = dict.fromkeys(nodes, 0)
- reverse_graph: Dict[T, Set[T]] = {}
-
- for node, edges in graph.items():
-diff --git a/tests/federation/test_federation_out_of_band_membership.py b/tests/federation/test_federation_out_of_band_membership.py
-index a4a266cf06..f77b8fe300 100644
---- a/tests/federation/test_federation_out_of_band_membership.py
-+++ b/tests/federation/test_federation_out_of_band_membership.py
-@@ -65,20 +65,20 @@ def required_state_json_to_state_map(required_state: Any) -> StateMap[EventBase]
- if isinstance(required_state, list):
- for state_event_dict in required_state:
- # Yell because we're in a test and this is unexpected
-- assert isinstance(
-- state_event_dict, dict
-- ), "`required_state` should be a list of event dicts"
-+ assert isinstance(state_event_dict, dict), (
-+ "`required_state` should be a list of event dicts"
-+ )
-
- event_type = state_event_dict["type"]
- event_state_key = state_event_dict["state_key"]
-
- # Yell because we're in a test and this is unexpected
-- assert isinstance(
-- event_type, str
-- ), "Each event in `required_state` should have a string `type`"
-- assert isinstance(
-- event_state_key, str
-- ), "Each event in `required_state` should have a string `state_key`"
-+ assert isinstance(event_type, str), (
-+ "Each event in `required_state` should have a string `type`"
-+ )
-+ assert isinstance(event_state_key, str), (
-+ "Each event in `required_state` should have a string `state_key`"
-+ )
-
- state_map[(event_type, event_state_key)] = make_event_from_dict(
- state_event_dict
-diff --git a/tests/handlers/test_user_directory.py b/tests/handlers/test_user_directory.py
-index a9e9d7d7ea..b12ffc3665 100644
---- a/tests/handlers/test_user_directory.py
-+++ b/tests/handlers/test_user_directory.py
-@@ -1178,10 +1178,10 @@ class UserDirectoryTestCase(unittest.HomeserverTestCase):
- for use_numeric in [False, True]:
- if use_numeric:
- prefix1 = f"{i}"
-- prefix2 = f"{i+1}"
-+ prefix2 = f"{i + 1}"
- else:
- prefix1 = f"a{i}"
-- prefix2 = f"a{i+1}"
-+ prefix2 = f"a{i + 1}"
-
- local_user_1 = self.register_user(f"user{char}{prefix1}", "password")
- local_user_2 = self.register_user(f"user{char}{prefix2}", "password")
-diff --git a/tests/http/test_matrixfederationclient.py b/tests/http/test_matrixfederationclient.py
-index e34df54e13..d5ebf10eac 100644
---- a/tests/http/test_matrixfederationclient.py
-+++ b/tests/http/test_matrixfederationclient.py
-@@ -436,8 +436,7 @@ class FederationClientTests(HomeserverTestCase):
-
- # Send it the HTTP response
- client.dataReceived(
-- b"HTTP/1.1 200 OK\r\nContent-Type: application/json\r\n"
-- b"Server: Fake\r\n\r\n"
-+ b"HTTP/1.1 200 OK\r\nContent-Type: application/json\r\nServer: Fake\r\n\r\n"
- )
-
- # Push by enough to time it out
-@@ -691,10 +690,7 @@ class FederationClientTests(HomeserverTestCase):
-
- # Send it a huge HTTP response
- protocol.dataReceived(
-- b"HTTP/1.1 200 OK\r\n"
-- b"Server: Fake\r\n"
-- b"Content-Type: application/json\r\n"
-- b"\r\n"
-+ b"HTTP/1.1 200 OK\r\nServer: Fake\r\nContent-Type: application/json\r\n\r\n"
- )
-
- self.pump()
-diff --git a/tests/media/test_media_storage.py b/tests/media/test_media_storage.py
-index 35e16a99ba..31dc32d67e 100644
---- a/tests/media/test_media_storage.py
-+++ b/tests/media/test_media_storage.py
-@@ -250,9 +250,7 @@ small_cmyk_jpeg = TestImage(
- )
-
- small_lossless_webp = TestImage(
-- unhexlify(
-- b"524946461a000000574542505650384c0d0000002f0000001007" b"1011118888fe0700"
-- ),
-+ unhexlify(b"524946461a000000574542505650384c0d0000002f00000010071011118888fe0700"),
- b"image/webp",
- b".webp",
- )
-diff --git a/tests/replication/tcp/streams/test_events.py b/tests/replication/tcp/streams/test_events.py
-index fdc74efb5a..2a0189a4e1 100644
---- a/tests/replication/tcp/streams/test_events.py
-+++ b/tests/replication/tcp/streams/test_events.py
-@@ -324,7 +324,7 @@ class EventsStreamTestCase(BaseStreamTestCase):
- pls = self.helper.get_state(
- self.room_id, EventTypes.PowerLevels, tok=self.user_tok
- )
-- pls["users"].update({u: 50 for u in user_ids})
-+ pls["users"].update(dict.fromkeys(user_ids, 50))
- self.helper.send_state(
- self.room_id,
- EventTypes.PowerLevels,
-diff --git a/tests/rest/admin/test_room.py b/tests/rest/admin/test_room.py
-index 1d44106bd7..165d175ab2 100644
---- a/tests/rest/admin/test_room.py
-+++ b/tests/rest/admin/test_room.py
-@@ -1312,7 +1312,7 @@ class RoomTestCase(unittest.HomeserverTestCase):
- # Check that response json body contains a "rooms" key
- self.assertTrue(
- "rooms" in channel.json_body,
-- msg="Response body does not " "contain a 'rooms' key",
-+ msg="Response body does not contain a 'rooms' key",
- )
-
- # Check that 3 rooms were returned
-diff --git a/tests/rest/admin/test_user.py b/tests/rest/admin/test_user.py
-index 874c29c935..f09f66da00 100644
---- a/tests/rest/admin/test_user.py
-+++ b/tests/rest/admin/test_user.py
-@@ -3901,9 +3901,7 @@ class UserMediaRestTestCase(unittest.HomeserverTestCase):
- image_data1 = SMALL_PNG
- # Resolution: 1×1, MIME type: image/gif, Extension: gif, Size: 35 B
- image_data2 = unhexlify(
-- b"47494638376101000100800100000000"
-- b"ffffff2c00000000010001000002024c"
-- b"01003b"
-+ b"47494638376101000100800100000000ffffff2c00000000010001000002024c01003b"
- )
- # Resolution: 1×1, MIME type: image/bmp, Extension: bmp, Size: 54 B
- image_data3 = unhexlify(
-diff --git a/tests/rest/client/sliding_sync/test_rooms_timeline.py b/tests/rest/client/sliding_sync/test_rooms_timeline.py
-index 2293994793..535420209b 100644
---- a/tests/rest/client/sliding_sync/test_rooms_timeline.py
-+++ b/tests/rest/client/sliding_sync/test_rooms_timeline.py
-@@ -309,8 +309,8 @@ class SlidingSyncRoomsTimelineTestCase(SlidingSyncBase):
- self.assertEqual(
- response_body["rooms"][room_id1]["limited"],
- False,
-- f'Our `timeline_limit` was {sync_body["lists"]["foo-list"]["timeline_limit"]} '
-- + f'and {len(response_body["rooms"][room_id1]["timeline"])} events were returned in the timeline. '
-+ f"Our `timeline_limit` was {sync_body['lists']['foo-list']['timeline_limit']} "
-+ + f"and {len(response_body['rooms'][room_id1]['timeline'])} events were returned in the timeline. "
- + str(response_body["rooms"][room_id1]),
- )
- # Check to make sure the latest events are returned
-@@ -387,7 +387,7 @@ class SlidingSyncRoomsTimelineTestCase(SlidingSyncBase):
- response_body["rooms"][room_id1]["limited"],
- True,
- f"Our `timeline_limit` was {timeline_limit} "
-- + f'and {len(response_body["rooms"][room_id1]["timeline"])} events were returned in the timeline. '
-+ + f"and {len(response_body['rooms'][room_id1]['timeline'])} events were returned in the timeline. "
- + str(response_body["rooms"][room_id1]),
- )
- # Check to make sure that the "live" and historical events are returned
-diff --git a/tests/rest/client/test_media.py b/tests/rest/client/test_media.py
-index 1ea2a5c884..9ad8ecf1cd 100644
---- a/tests/rest/client/test_media.py
-+++ b/tests/rest/client/test_media.py
-@@ -1006,7 +1006,7 @@ class URLPreviewTests(unittest.HomeserverTestCase):
- data = base64.b64encode(SMALL_PNG)
-
- end_content = (
-- b"<html><head>" b'<img src="data:image/png;base64,%s" />' b"</head></html>"
-+ b'<html><head><img src="data:image/png;base64,%s" /></head></html>'
- ) % (data,)
-
- channel = self.make_request(
-diff --git a/tests/rest/client/utils.py b/tests/rest/client/utils.py
-index 53f1782d59..280486da08 100644
---- a/tests/rest/client/utils.py
-+++ b/tests/rest/client/utils.py
-@@ -716,9 +716,9 @@ class RestHelper:
- "/login",
- content={"type": "m.login.token", "token": login_token},
- )
-- assert (
-- channel.code == expected_status
-- ), f"unexpected status in response: {channel.code}"
-+ assert channel.code == expected_status, (
-+ f"unexpected status in response: {channel.code}"
-+ )
- return channel.json_body
-
- def auth_via_oidc(
-diff --git a/tests/rest/media/test_url_preview.py b/tests/rest/media/test_url_preview.py
-index 103d7662d9..2a7bee19f9 100644
---- a/tests/rest/media/test_url_preview.py
-+++ b/tests/rest/media/test_url_preview.py
-@@ -878,7 +878,7 @@ class URLPreviewTests(unittest.HomeserverTestCase):
- data = base64.b64encode(SMALL_PNG)
-
- end_content = (
-- b"<html><head>" b'<img src="data:image/png;base64,%s" />' b"</head></html>"
-+ b'<html><head><img src="data:image/png;base64,%s" /></head></html>'
- ) % (data,)
-
- channel = self.make_request(
-diff --git a/tests/server.py b/tests/server.py
-index 84ed9f68eb..f01708b77f 100644
---- a/tests/server.py
-+++ b/tests/server.py
-@@ -225,9 +225,9 @@ class FakeChannel:
- new_headers.addRawHeader(k, v)
- headers = new_headers
-
-- assert isinstance(
-- headers, Headers
-- ), f"headers are of the wrong type: {headers!r}"
-+ assert isinstance(headers, Headers), (
-+ f"headers are of the wrong type: {headers!r}"
-+ )
-
- self.result["headers"] = headers
-
-diff --git a/tests/storage/test_base.py b/tests/storage/test_base.py
-index 9420d03841..11313fc933 100644
---- a/tests/storage/test_base.py
-+++ b/tests/storage/test_base.py
-@@ -349,7 +349,7 @@ class SQLBaseStoreTestCase(unittest.TestCase):
- )
-
- self.mock_txn.execute.assert_called_once_with(
-- "UPDATE tablename SET colC = ?, colD = ? WHERE" " colA = ? AND colB = ?",
-+ "UPDATE tablename SET colC = ?, colD = ? WHERE colA = ? AND colB = ?",
- [3, 4, 1, 2],
- )
-
-diff --git a/tests/storage/test_devices.py b/tests/storage/test_devices.py
-index ba01b038ab..74edca7523 100644
---- a/tests/storage/test_devices.py
-+++ b/tests/storage/test_devices.py
-@@ -211,9 +211,9 @@ class DeviceStoreTestCase(HomeserverTestCase):
- even if that means leaving an earlier batch one EDU short of the limit.
- """
-
-- assert self.hs.is_mine_id(
-- "@user_id:test"
-- ), "Test not valid: this MXID should be considered local"
-+ assert self.hs.is_mine_id("@user_id:test"), (
-+ "Test not valid: this MXID should be considered local"
-+ )
-
- self.get_success(
- self.store.set_e2e_cross_signing_key(
-diff --git a/tests/storage/test_event_federation.py b/tests/storage/test_event_federation.py
-index 088f0d24f9..0500c68e9d 100644
---- a/tests/storage/test_event_federation.py
-+++ b/tests/storage/test_event_federation.py
-@@ -114,7 +114,7 @@ def get_all_topologically_sorted_orders(
- # This is implemented by Kahn's algorithm, and forking execution each time
- # we have a choice over which node to consider next.
-
-- degree_map = {node: 0 for node in nodes}
-+ degree_map = dict.fromkeys(nodes, 0)
- reverse_graph: Dict[T, Set[T]] = {}
-
- for node, edges in graph.items():
-diff --git a/tests/test_state.py b/tests/test_state.py
-index dce56fe78a..adb72b0730 100644
---- a/tests/test_state.py
-+++ b/tests/test_state.py
-@@ -149,7 +149,7 @@ class _DummyStore:
- async def get_partial_state_events(
- self, event_ids: Collection[str]
- ) -> Dict[str, bool]:
-- return {e: False for e in event_ids}
-+ return dict.fromkeys(event_ids, False)
-
- async def get_state_group_delta(
- self, name: str
-diff --git a/tests/test_utils/logging_setup.py b/tests/test_utils/logging_setup.py
-index dd40c338d6..d58222a9f6 100644
---- a/tests/test_utils/logging_setup.py
-+++ b/tests/test_utils/logging_setup.py
-@@ -48,7 +48,7 @@ def setup_logging() -> None:
-
- # We exclude `%(asctime)s` from this format because the Twisted logger adds its own
- # timestamp
-- log_format = "%(name)s - %(lineno)d - " "%(levelname)s - %(request)s - %(message)s"
-+ log_format = "%(name)s - %(lineno)d - %(levelname)s - %(request)s - %(message)s"
-
- handler = ToTwistedHandler()
- formatter = logging.Formatter(log_format)
---
-2.49.0
-
diff --git a/packages/overlays/matrix-synapse/patches/0022-Add-a-unit-test-for-the-phone-home-stats-18463.patch b/packages/overlays/matrix-synapse/patches/0022-Add-a-unit-test-for-the-phone-home-stats-18463.patch
deleted file mode 100644
index b00e1c4..0000000
--- a/packages/overlays/matrix-synapse/patches/0022-Add-a-unit-test-for-the-phone-home-stats-18463.patch
+++ /dev/null
@@ -1,384 +0,0 @@
-From 4b1d9d5d0e3df7a3151c07f9d42b02dad13a27bf Mon Sep 17 00:00:00 2001
-From: Andrew Morgan <1342360+anoadragon453@users.noreply.github.com>
-Date: Tue, 20 May 2025 16:26:45 +0100
-Subject: [PATCH 22/34] Add a unit test for the phone home stats (#18463)
-
----
- changelog.d/18463.misc | 1 +
- .../reporting_homeserver_usage_statistics.md | 6 +-
- synapse/app/phone_stats_home.py | 33 ++-
- tests/metrics/test_phone_home_stats.py | 263 ++++++++++++++++++
- 4 files changed, 296 insertions(+), 7 deletions(-)
- create mode 100644 changelog.d/18463.misc
- create mode 100644 tests/metrics/test_phone_home_stats.py
-
-diff --git a/changelog.d/18463.misc b/changelog.d/18463.misc
-new file mode 100644
-index 0000000000..1264758d7c
---- /dev/null
-+++ b/changelog.d/18463.misc
-@@ -0,0 +1 @@
-+Add unit tests for homeserver usage statistics.
-\ No newline at end of file
-diff --git a/docs/usage/administration/monitoring/reporting_homeserver_usage_statistics.md b/docs/usage/administration/monitoring/reporting_homeserver_usage_statistics.md
-index 4c0dbb5acd..a8a717e2a2 100644
---- a/docs/usage/administration/monitoring/reporting_homeserver_usage_statistics.md
-+++ b/docs/usage/administration/monitoring/reporting_homeserver_usage_statistics.md
-@@ -30,7 +30,7 @@ The following statistics are sent to the configured reporting endpoint:
- | `python_version` | string | The Python version number in use (e.g "3.7.1"). Taken from `sys.version_info`. |
- | `total_users` | int | The number of registered users on the homeserver. |
- | `total_nonbridged_users` | int | The number of users, excluding those created by an Application Service. |
--| `daily_user_type_native` | int | The number of native users created in the last 24 hours. |
-+| `daily_user_type_native` | int | The number of native, non-guest users created in the last 24 hours. |
- | `daily_user_type_guest` | int | The number of guest users created in the last 24 hours. |
- | `daily_user_type_bridged` | int | The number of users created by Application Services in the last 24 hours. |
- | `total_room_count` | int | The total number of rooms present on the homeserver. |
-@@ -50,8 +50,8 @@ The following statistics are sent to the configured reporting endpoint:
- | `cache_factor` | int | The configured [`global factor`](../../configuration/config_documentation.md#caching) value for caching. |
- | `event_cache_size` | int | The configured [`event_cache_size`](../../configuration/config_documentation.md#caching) value for caching. |
- | `database_engine` | string | The database engine that is in use. Either "psycopg2" meaning PostgreSQL is in use, or "sqlite3" for SQLite3. |
--| `database_server_version` | string | The version of the database server. Examples being "10.10" for PostgreSQL server version 10.0, and "3.38.5" for SQLite 3.38.5 installed on the system. |
--| `log_level` | string | The log level in use. Examples are "INFO", "WARNING", "ERROR", "DEBUG", etc. |
-+| `database_server_version` | string | The version of the database server. Examples being "10.10" for PostgreSQL server version 10.0, and "3.38.5" for SQLite 3.38.5 installed on the system. |
-+| `log_level` | string | The log level in use. Examples are "INFO", "WARNING", "ERROR", "DEBUG", etc. |
-
-
- [^1]: Native matrix users and guests are always counted. If the
-diff --git a/synapse/app/phone_stats_home.py b/synapse/app/phone_stats_home.py
-index f602bbbeea..bb450a235c 100644
---- a/synapse/app/phone_stats_home.py
-+++ b/synapse/app/phone_stats_home.py
-@@ -34,6 +34,22 @@ if TYPE_CHECKING:
-
- logger = logging.getLogger("synapse.app.homeserver")
-
-+ONE_MINUTE_SECONDS = 60
-+ONE_HOUR_SECONDS = 60 * ONE_MINUTE_SECONDS
-+
-+MILLISECONDS_PER_SECOND = 1000
-+
-+INITIAL_DELAY_BEFORE_FIRST_PHONE_HOME_SECONDS = 5 * ONE_MINUTE_SECONDS
-+"""
-+We wait 5 minutes to send the first set of stats as the server can be quite busy the
-+first few minutes
-+"""
-+
-+PHONE_HOME_INTERVAL_SECONDS = 3 * ONE_HOUR_SECONDS
-+"""
-+Phone home stats are sent every 3 hours
-+"""
-+
- # Contains the list of processes we will be monitoring
- # currently either 0 or 1
- _stats_process: List[Tuple[int, "resource.struct_rusage"]] = []
-@@ -185,12 +201,14 @@ def start_phone_stats_home(hs: "HomeServer") -> None:
- # If you increase the loop period, the accuracy of user_daily_visits
- # table will decrease
- clock.looping_call(
-- hs.get_datastores().main.generate_user_daily_visits, 5 * 60 * 1000
-+ hs.get_datastores().main.generate_user_daily_visits,
-+ 5 * ONE_MINUTE_SECONDS * MILLISECONDS_PER_SECOND,
- )
-
- # monthly active user limiting functionality
- clock.looping_call(
-- hs.get_datastores().main.reap_monthly_active_users, 1000 * 60 * 60
-+ hs.get_datastores().main.reap_monthly_active_users,
-+ ONE_HOUR_SECONDS * MILLISECONDS_PER_SECOND,
- )
- hs.get_datastores().main.reap_monthly_active_users()
-
-@@ -221,7 +239,12 @@ def start_phone_stats_home(hs: "HomeServer") -> None:
-
- if hs.config.metrics.report_stats:
- logger.info("Scheduling stats reporting for 3 hour intervals")
-- clock.looping_call(phone_stats_home, 3 * 60 * 60 * 1000, hs, stats)
-+ clock.looping_call(
-+ phone_stats_home,
-+ PHONE_HOME_INTERVAL_SECONDS * MILLISECONDS_PER_SECOND,
-+ hs,
-+ stats,
-+ )
-
- # We need to defer this init for the cases that we daemonize
- # otherwise the process ID we get is that of the non-daemon process
-@@ -229,4 +252,6 @@ def start_phone_stats_home(hs: "HomeServer") -> None:
-
- # We wait 5 minutes to send the first set of stats as the server can
- # be quite busy the first few minutes
-- clock.call_later(5 * 60, phone_stats_home, hs, stats)
-+ clock.call_later(
-+ INITIAL_DELAY_BEFORE_FIRST_PHONE_HOME_SECONDS, phone_stats_home, hs, stats
-+ )
-diff --git a/tests/metrics/test_phone_home_stats.py b/tests/metrics/test_phone_home_stats.py
-new file mode 100644
-index 0000000000..5339d649df
---- /dev/null
-+++ b/tests/metrics/test_phone_home_stats.py
-@@ -0,0 +1,263 @@
-+#
-+# This file is licensed under the Affero General Public License (AGPL) version 3.
-+#
-+# Copyright (C) 2025 New Vector, Ltd
-+#
-+# This program is free software: you can redistribute it and/or modify
-+# it under the terms of the GNU Affero General Public License as
-+# published by the Free Software Foundation, either version 3 of the
-+# License, or (at your option) any later version.
-+#
-+# See the GNU Affero General Public License for more details:
-+# <https://www.gnu.org/licenses/agpl-3.0.html>.
-+
-+import logging
-+from unittest.mock import AsyncMock
-+
-+from twisted.test.proto_helpers import MemoryReactor
-+
-+from synapse.app.phone_stats_home import (
-+ PHONE_HOME_INTERVAL_SECONDS,
-+ start_phone_stats_home,
-+)
-+from synapse.rest import admin, login, register, room
-+from synapse.server import HomeServer
-+from synapse.types import JsonDict
-+from synapse.util import Clock
-+
-+from tests import unittest
-+from tests.server import ThreadedMemoryReactorClock
-+
-+TEST_REPORT_STATS_ENDPOINT = "https://fake.endpoint/stats"
-+TEST_SERVER_CONTEXT = "test-server-context"
-+
-+
-+class PhoneHomeStatsTestCase(unittest.HomeserverTestCase):
-+ servlets = [
-+ admin.register_servlets_for_client_rest_resource,
-+ room.register_servlets,
-+ register.register_servlets,
-+ login.register_servlets,
-+ ]
-+
-+ def make_homeserver(
-+ self, reactor: ThreadedMemoryReactorClock, clock: Clock
-+ ) -> HomeServer:
-+ # Configure the homeserver to enable stats reporting.
-+ config = self.default_config()
-+ config["report_stats"] = True
-+ config["report_stats_endpoint"] = TEST_REPORT_STATS_ENDPOINT
-+
-+ # Configure the server context so we can check it ends up being reported
-+ config["server_context"] = TEST_SERVER_CONTEXT
-+
-+ # Allow guests to be registered
-+ config["allow_guest_access"] = True
-+
-+ hs = self.setup_test_homeserver(config=config)
-+
-+ # Replace the proxied http client with a mock, so we can inspect outbound requests to
-+ # the configured stats endpoint.
-+ self.put_json_mock = AsyncMock(return_value={})
-+ hs.get_proxied_http_client().put_json = self.put_json_mock # type: ignore[method-assign]
-+ return hs
-+
-+ def prepare(
-+ self, reactor: MemoryReactor, clock: Clock, homeserver: HomeServer
-+ ) -> None:
-+ self.store = homeserver.get_datastores().main
-+
-+ # Wait for the background updates to add the database triggers that keep the
-+ # `event_stats` table up-to-date.
-+ self.wait_for_background_updates()
-+
-+ # Force stats reporting to occur
-+ start_phone_stats_home(hs=homeserver)
-+
-+ super().prepare(reactor, clock, homeserver)
-+
-+ def _get_latest_phone_home_stats(self) -> JsonDict:
-+ # Wait for `phone_stats_home` to be called again + a healthy margin (50s).
-+ self.reactor.advance(2 * PHONE_HOME_INTERVAL_SECONDS + 50)
-+
-+ # Extract the reported stats from our http client mock
-+ mock_calls = self.put_json_mock.call_args_list
-+ report_stats_calls = []
-+ for call in mock_calls:
-+ if call.args[0] == TEST_REPORT_STATS_ENDPOINT:
-+ report_stats_calls.append(call)
-+
-+ self.assertGreaterEqual(
-+ (len(report_stats_calls)),
-+ 1,
-+ "Expected at-least one call to the report_stats endpoint",
-+ )
-+
-+ # Extract the phone home stats from the call
-+ phone_home_stats = report_stats_calls[0].args[1]
-+
-+ return phone_home_stats
-+
-+ def _perform_user_actions(self) -> None:
-+ """
-+ Perform some actions on the homeserver that would bump the phone home
-+ stats.
-+
-+ This creates a few users (including a guest), a room, and sends some messages.
-+ Expected number of events:
-+ - 10 unencrypted messages
-+ - 5 encrypted messages
-+ - 24 total events (including room state, etc)
-+ """
-+
-+ # Create some users
-+ user_1_mxid = self.register_user(
-+ username="test_user_1",
-+ password="test",
-+ )
-+ user_2_mxid = self.register_user(
-+ username="test_user_2",
-+ password="test",
-+ )
-+ # Note: `self.register_user` does not support guest registration, and updating the
-+ # Admin API it calls to add a new parameter would cause the `mac` parameter to fail
-+ # in a backwards-incompatible manner. Hence, we make a manual request here.
-+ _guest_user_mxid = self.make_request(
-+ method="POST",
-+ path="/_matrix/client/v3/register?kind=guest",
-+ content={
-+ "username": "guest_user",
-+ "password": "test",
-+ },
-+ shorthand=False,
-+ )
-+
-+ # Log in to each user
-+ user_1_token = self.login(username=user_1_mxid, password="test")
-+ user_2_token = self.login(username=user_2_mxid, password="test")
-+
-+ # Create a room between the two users
-+ room_1_id = self.helper.create_room_as(
-+ is_public=False,
-+ tok=user_1_token,
-+ )
-+
-+ # Mark this room as end-to-end encrypted
-+ self.helper.send_state(
-+ room_id=room_1_id,
-+ event_type="m.room.encryption",
-+ body={
-+ "algorithm": "m.megolm.v1.aes-sha2",
-+ "rotation_period_ms": 604800000,
-+ "rotation_period_msgs": 100,
-+ },
-+ state_key="",
-+ tok=user_1_token,
-+ )
-+
-+ # User 1 invites user 2
-+ self.helper.invite(
-+ room=room_1_id,
-+ src=user_1_mxid,
-+ targ=user_2_mxid,
-+ tok=user_1_token,
-+ )
-+
-+ # User 2 joins
-+ self.helper.join(
-+ room=room_1_id,
-+ user=user_2_mxid,
-+ tok=user_2_token,
-+ )
-+
-+ # User 1 sends 10 unencrypted messages
-+ for _ in range(10):
-+ self.helper.send(
-+ room_id=room_1_id,
-+ body="Zoinks Scoob! A message!",
-+ tok=user_1_token,
-+ )
-+
-+ # User 2 sends 5 encrypted "messages"
-+ for _ in range(5):
-+ self.helper.send_event(
-+ room_id=room_1_id,
-+ type="m.room.encrypted",
-+ content={
-+ "algorithm": "m.olm.v1.curve25519-aes-sha2",
-+ "sender_key": "some_key",
-+ "ciphertext": {
-+ "some_key": {
-+ "type": 0,
-+ "body": "encrypted_payload",
-+ },
-+ },
-+ },
-+ tok=user_2_token,
-+ )
-+
-+ def test_phone_home_stats(self) -> None:
-+ """
-+ Test that the phone home stats contain the stats we expect based on
-+ the scenario carried out in `prepare`
-+ """
-+ # Do things to bump the stats
-+ self._perform_user_actions()
-+
-+ # Wait for the stats to be reported
-+ phone_home_stats = self._get_latest_phone_home_stats()
-+
-+ self.assertEqual(
-+ phone_home_stats["homeserver"], self.hs.config.server.server_name
-+ )
-+
-+ self.assertTrue(isinstance(phone_home_stats["memory_rss"], int))
-+ self.assertTrue(isinstance(phone_home_stats["cpu_average"], int))
-+
-+ self.assertEqual(phone_home_stats["server_context"], TEST_SERVER_CONTEXT)
-+
-+ self.assertTrue(isinstance(phone_home_stats["timestamp"], int))
-+ self.assertTrue(isinstance(phone_home_stats["uptime_seconds"], int))
-+ self.assertTrue(isinstance(phone_home_stats["python_version"], str))
-+
-+ # We expect only our test users to exist on the homeserver
-+ self.assertEqual(phone_home_stats["total_users"], 3)
-+ self.assertEqual(phone_home_stats["total_nonbridged_users"], 3)
-+ self.assertEqual(phone_home_stats["daily_user_type_native"], 2)
-+ self.assertEqual(phone_home_stats["daily_user_type_guest"], 1)
-+ self.assertEqual(phone_home_stats["daily_user_type_bridged"], 0)
-+ self.assertEqual(phone_home_stats["total_room_count"], 1)
-+ self.assertEqual(phone_home_stats["daily_active_users"], 2)
-+ self.assertEqual(phone_home_stats["monthly_active_users"], 2)
-+ self.assertEqual(phone_home_stats["daily_active_rooms"], 1)
-+ self.assertEqual(phone_home_stats["daily_active_e2ee_rooms"], 1)
-+ self.assertEqual(phone_home_stats["daily_messages"], 10)
-+ self.assertEqual(phone_home_stats["daily_e2ee_messages"], 5)
-+ self.assertEqual(phone_home_stats["daily_sent_messages"], 10)
-+ self.assertEqual(phone_home_stats["daily_sent_e2ee_messages"], 5)
-+
-+ # Our users have not been around for >30 days, hence these are all 0.
-+ self.assertEqual(phone_home_stats["r30v2_users_all"], 0)
-+ self.assertEqual(phone_home_stats["r30v2_users_android"], 0)
-+ self.assertEqual(phone_home_stats["r30v2_users_ios"], 0)
-+ self.assertEqual(phone_home_stats["r30v2_users_electron"], 0)
-+ self.assertEqual(phone_home_stats["r30v2_users_web"], 0)
-+ self.assertEqual(
-+ phone_home_stats["cache_factor"], self.hs.config.caches.global_factor
-+ )
-+ self.assertEqual(
-+ phone_home_stats["event_cache_size"],
-+ self.hs.config.caches.event_cache_size,
-+ )
-+ self.assertEqual(
-+ phone_home_stats["database_engine"],
-+ self.hs.config.database.databases[0].config["name"],
-+ )
-+ self.assertEqual(
-+ phone_home_stats["database_server_version"],
-+ self.hs.get_datastores().main.database_engine.server_version,
-+ )
-+
-+ synapse_logger = logging.getLogger("synapse")
-+ log_level = synapse_logger.getEffectiveLevel()
-+ self.assertEqual(phone_home_stats["log_level"], logging.getLevelName(log_level))
---
-2.49.0
-
diff --git a/packages/overlays/matrix-synapse/patches/0023-Include-room-ID-in-room-deletion-status-response-183.patch b/packages/overlays/matrix-synapse/patches/0023-Include-room-ID-in-room-deletion-status-response-183.patch
deleted file mode 100644
index f14e7ed..0000000
--- a/packages/overlays/matrix-synapse/patches/0023-Include-room-ID-in-room-deletion-status-response-183.patch
+++ /dev/null
@@ -1,116 +0,0 @@
-From 553e124f766584456fbdb6d1aa37fdd12ad54dad Mon Sep 17 00:00:00 2001
-From: =?UTF-8?q?Dagfinn=20Ilmari=20Manns=C3=A5ker?= <ilmari@ilmari.org>
-Date: Tue, 20 May 2025 17:53:30 +0100
-Subject: [PATCH 23/34] Include room ID in room deletion status response
- (#18318)
-
-When querying by `delete_id` it's handy to see which room the delete
-pertains to.
----
- changelog.d/18318.feature | 1 +
- docs/admin_api/rooms.md | 7 ++++++-
- synapse/rest/admin/rooms.py | 1 +
- tests/rest/admin/test_room.py | 7 +++++++
- 4 files changed, 15 insertions(+), 1 deletion(-)
- create mode 100644 changelog.d/18318.feature
-
-diff --git a/changelog.d/18318.feature b/changelog.d/18318.feature
-new file mode 100644
-index 0000000000..fba0e83577
---- /dev/null
-+++ b/changelog.d/18318.feature
-@@ -0,0 +1 @@
-+Include room ID in room deletion status response.
-diff --git a/docs/admin_api/rooms.md b/docs/admin_api/rooms.md
-index bfc2cd4376..bdda9b47ad 100644
---- a/docs/admin_api/rooms.md
-+++ b/docs/admin_api/rooms.md
-@@ -794,6 +794,7 @@ A response body like the following is returned:
- "results": [
- {
- "delete_id": "delete_id1",
-+ "room_id": "!roomid:example.com",
- "status": "failed",
- "error": "error message",
- "shutdown_room": {
-@@ -804,6 +805,7 @@ A response body like the following is returned:
- }
- }, {
- "delete_id": "delete_id2",
-+ "room_id": "!roomid:example.com",
- "status": "purging",
- "shutdown_room": {
- "kicked_users": [
-@@ -842,6 +844,8 @@ A response body like the following is returned:
- ```json
- {
- "status": "purging",
-+ "delete_id": "bHkCNQpHqOaFhPtK",
-+ "room_id": "!roomid:example.com",
- "shutdown_room": {
- "kicked_users": [
- "@foobar:example.com"
-@@ -869,7 +873,8 @@ The following fields are returned in the JSON response body:
- - `results` - An array of objects, each containing information about one task.
- This field is omitted from the result when you query by `delete_id`.
- Task objects contain the following fields:
-- - `delete_id` - The ID for this purge if you query by `room_id`.
-+ - `delete_id` - The ID for this purge
-+ - `room_id` - The ID of the room being deleted
- - `status` - The status will be one of:
- - `shutting_down` - The process is removing users from the room.
- - `purging` - The process is purging the room and event data from database.
-diff --git a/synapse/rest/admin/rooms.py b/synapse/rest/admin/rooms.py
-index 3097cb1a9d..f8c5bf18d4 100644
---- a/synapse/rest/admin/rooms.py
-+++ b/synapse/rest/admin/rooms.py
-@@ -150,6 +150,7 @@ class RoomRestV2Servlet(RestServlet):
- def _convert_delete_task_to_response(task: ScheduledTask) -> JsonDict:
- return {
- "delete_id": task.id,
-+ "room_id": task.resource_id,
- "status": task.status,
- "shutdown_room": task.result,
- }
-diff --git a/tests/rest/admin/test_room.py b/tests/rest/admin/test_room.py
-index 165d175ab2..8d806082aa 100644
---- a/tests/rest/admin/test_room.py
-+++ b/tests/rest/admin/test_room.py
-@@ -758,6 +758,8 @@ class DeleteRoomV2TestCase(unittest.HomeserverTestCase):
- self.assertEqual(2, len(channel.json_body["results"]))
- self.assertEqual("complete", channel.json_body["results"][0]["status"])
- self.assertEqual("complete", channel.json_body["results"][1]["status"])
-+ self.assertEqual(self.room_id, channel.json_body["results"][0]["room_id"])
-+ self.assertEqual(self.room_id, channel.json_body["results"][1]["room_id"])
- delete_ids = {delete_id1, delete_id2}
- self.assertTrue(channel.json_body["results"][0]["delete_id"] in delete_ids)
- delete_ids.remove(channel.json_body["results"][0]["delete_id"])
-@@ -777,6 +779,7 @@ class DeleteRoomV2TestCase(unittest.HomeserverTestCase):
- self.assertEqual(1, len(channel.json_body["results"]))
- self.assertEqual("complete", channel.json_body["results"][0]["status"])
- self.assertEqual(delete_id2, channel.json_body["results"][0]["delete_id"])
-+ self.assertEqual(self.room_id, channel.json_body["results"][0]["room_id"])
-
- # get status after more than clearing time for all tasks
- self.reactor.advance(TaskScheduler.KEEP_TASKS_FOR_MS / 1000 / 2)
-@@ -1237,6 +1240,9 @@ class DeleteRoomV2TestCase(unittest.HomeserverTestCase):
- self.assertEqual(
- delete_id, channel_room_id.json_body["results"][0]["delete_id"]
- )
-+ self.assertEqual(
-+ self.room_id, channel_room_id.json_body["results"][0]["room_id"]
-+ )
-
- # get information by delete_id
- channel_delete_id = self.make_request(
-@@ -1249,6 +1255,7 @@ class DeleteRoomV2TestCase(unittest.HomeserverTestCase):
- channel_delete_id.code,
- msg=channel_delete_id.json_body,
- )
-+ self.assertEqual(self.room_id, channel_delete_id.json_body["room_id"])
-
- # test values that are the same in both responses
- for content in [
---
-2.49.0
-
diff --git a/packages/overlays/matrix-synapse/patches/0024-Policy-server-part-1-Actually-call-the-policy-server.patch b/packages/overlays/matrix-synapse/patches/0024-Policy-server-part-1-Actually-call-the-policy-server.patch
deleted file mode 100644
index 528c970..0000000
--- a/packages/overlays/matrix-synapse/patches/0024-Policy-server-part-1-Actually-call-the-policy-server.patch
+++ /dev/null
@@ -1,666 +0,0 @@
-From b7d48419476f70e54dc24ecd986562ba22be52ec Mon Sep 17 00:00:00 2001
-From: Travis Ralston <travisr@element.io>
-Date: Wed, 21 May 2025 16:09:09 -0600
-Subject: [PATCH 24/34] Policy server part 1: Actually call the policy server
- (#18387)
-
-Roughly reviewable commit-by-commit.
-
-This is the first part of adding policy server support to Synapse. Other
-parts (unordered), which may or may not be bundled into fewer PRs,
-include:
-
-* Implementation of a bulk API
-* Supporting a moderation server config (the `fallback_*` options of
-https://github.com/element-hq/policyserv_spam_checker )
-* Adding an "early event hook" for appservices to receive federation
-transactions *before* events are processed formally
-* Performance and stability improvements
-
-### Pull Request Checklist
-
-<!-- Please read
-https://element-hq.github.io/synapse/latest/development/contributing_guide.html
-before submitting your pull request -->
-
-* [x] Pull request is based on the develop branch
-* [x] Pull request includes a [changelog
-file](https://element-hq.github.io/synapse/latest/development/contributing_guide.html#changelog).
-The entry should:
-- Be a short description of your change which makes sense to users.
-"Fixed a bug that prevented receiving messages from other servers."
-instead of "Moved X method from `EventStore` to `EventWorkerStore`.".
- - Use markdown where necessary, mostly for `code blocks`.
- - End with either a period (.) or an exclamation mark (!).
- - Start with a capital letter.
-- Feel free to credit yourself, by adding a sentence "Contributed by
-@github_username." or "Contributed by [Your Name]." to the end of the
-entry.
-* [x] [Code
-style](https://element-hq.github.io/synapse/latest/code_style.html) is
-correct
-(run the
-[linters](https://element-hq.github.io/synapse/latest/development/contributing_guide.html#run-the-linters))
-
----------
-
-Co-authored-by: turt2live <1190097+turt2live@users.noreply.github.com>
-Co-authored-by: Devon Hudson <devon.dmytro@gmail.com>
----
- changelog.d/18387.feature | 1 +
- synapse/federation/federation_base.py | 34 ++++
- synapse/federation/federation_client.py | 57 ++++++
- synapse/federation/transport/client.py | 27 +++
- synapse/handlers/message.py | 15 +-
- synapse/handlers/room_policy.py | 89 ++++++++++
- synapse/server.py | 5 +
- synapse/types/handlers/policy_server.py | 16 ++
- tests/handlers/test_room_policy.py | 226 ++++++++++++++++++++++++
- 9 files changed, 469 insertions(+), 1 deletion(-)
- create mode 100644 changelog.d/18387.feature
- create mode 100644 synapse/handlers/room_policy.py
- create mode 100644 synapse/types/handlers/policy_server.py
- create mode 100644 tests/handlers/test_room_policy.py
-
-diff --git a/changelog.d/18387.feature b/changelog.d/18387.feature
-new file mode 100644
-index 0000000000..2d9ff2cea2
---- /dev/null
-+++ b/changelog.d/18387.feature
-@@ -0,0 +1 @@
-+Add support for calling Policy Servers ([MSC4284](https://github.com/matrix-org/matrix-spec-proposals/pull/4284)) to mark events as spam.
-\ No newline at end of file
-diff --git a/synapse/federation/federation_base.py b/synapse/federation/federation_base.py
-index 3796bff5e7..45593430e8 100644
---- a/synapse/federation/federation_base.py
-+++ b/synapse/federation/federation_base.py
-@@ -30,6 +30,7 @@ from synapse.crypto.keyring import Keyring
- from synapse.events import EventBase, make_event_from_dict
- from synapse.events.utils import prune_event, validate_canonicaljson
- from synapse.federation.units import filter_pdus_for_valid_depth
-+from synapse.handlers.room_policy import RoomPolicyHandler
- from synapse.http.servlet import assert_params_in_dict
- from synapse.logging.opentracing import log_kv, trace
- from synapse.types import JsonDict, get_domain_from_id
-@@ -64,6 +65,24 @@ class FederationBase:
- self._clock = hs.get_clock()
- self._storage_controllers = hs.get_storage_controllers()
-
-+ # We need to define this lazily otherwise we get a cyclic dependency.
-+ # self._policy_handler = hs.get_room_policy_handler()
-+ self._policy_handler: Optional[RoomPolicyHandler] = None
-+
-+ def _lazily_get_policy_handler(self) -> RoomPolicyHandler:
-+ """Lazily get the room policy handler.
-+
-+ This is required to avoid an import cycle: RoomPolicyHandler requires a
-+ FederationClient, which requires a FederationBase, which requires a
-+ RoomPolicyHandler.
-+
-+ Returns:
-+ RoomPolicyHandler: The room policy handler.
-+ """
-+ if self._policy_handler is None:
-+ self._policy_handler = self.hs.get_room_policy_handler()
-+ return self._policy_handler
-+
- @trace
- async def _check_sigs_and_hash(
- self,
-@@ -80,6 +99,10 @@ class FederationBase:
- Also runs the event through the spam checker; if it fails, redacts the event
- and flags it as soft-failed.
-
-+ Also checks that the event is allowed by the policy server, if the room uses
-+ a policy server. If the event is not allowed, the event is flagged as
-+ soft-failed but not redacted.
-+
- Args:
- room_version: The room version of the PDU
- pdu: the event to be checked
-@@ -145,6 +168,17 @@ class FederationBase:
- )
- return redacted_event
-
-+ policy_allowed = await self._lazily_get_policy_handler().is_event_allowed(pdu)
-+ if not policy_allowed:
-+ logger.warning(
-+ "Event not allowed by policy server, soft-failing %s", pdu.event_id
-+ )
-+ pdu.internal_metadata.soft_failed = True
-+ # Note: we don't redact the event so admins can inspect the event after the
-+ # fact. Other processes may redact the event, but that won't be applied to
-+ # the database copy of the event until the server's config requires it.
-+ return pdu
-+
- spam_check = await self._spam_checker_module_callbacks.check_event_for_spam(pdu)
-
- if spam_check != self._spam_checker_module_callbacks.NOT_SPAM:
-diff --git a/synapse/federation/federation_client.py b/synapse/federation/federation_client.py
-index 9fc5b70e9a..7c485aa7e0 100644
---- a/synapse/federation/federation_client.py
-+++ b/synapse/federation/federation_client.py
-@@ -75,6 +75,7 @@ from synapse.http.client import is_unknown_endpoint
- from synapse.http.types import QueryParams
- from synapse.logging.opentracing import SynapseTags, log_kv, set_tag, tag_args, trace
- from synapse.types import JsonDict, StrCollection, UserID, get_domain_from_id
-+from synapse.types.handlers.policy_server import RECOMMENDATION_OK, RECOMMENDATION_SPAM
- from synapse.util.async_helpers import concurrently_execute
- from synapse.util.caches.expiringcache import ExpiringCache
- from synapse.util.retryutils import NotRetryingDestination
-@@ -421,6 +422,62 @@ class FederationClient(FederationBase):
-
- return None
-
-+ @trace
-+ @tag_args
-+ async def get_pdu_policy_recommendation(
-+ self, destination: str, pdu: EventBase, timeout: Optional[int] = None
-+ ) -> str:
-+ """Requests that the destination server (typically a policy server)
-+ check the event and return its recommendation on how to handle the
-+ event.
-+
-+ If the policy server could not be contacted or the policy server
-+ returned an unknown recommendation, this returns an OK recommendation.
-+ This type fixing behaviour is done because the typical caller will be
-+ in a critical call path and would generally interpret a `None` or similar
-+ response as "weird value; don't care; move on without taking action". We
-+ just frontload that logic here.
-+
-+
-+ Args:
-+ destination: The remote homeserver to ask (a policy server)
-+ pdu: The event to check
-+ timeout: How long to try (in ms) the destination for before
-+ giving up. None indicates no timeout.
-+
-+ Returns:
-+ The policy recommendation, or RECOMMENDATION_OK if the policy server was
-+ uncontactable or returned an unknown recommendation.
-+ """
-+
-+ logger.debug(
-+ "get_pdu_policy_recommendation for event_id=%s from %s",
-+ pdu.event_id,
-+ destination,
-+ )
-+
-+ try:
-+ res = await self.transport_layer.get_policy_recommendation_for_pdu(
-+ destination, pdu, timeout=timeout
-+ )
-+ recommendation = res.get("recommendation")
-+ if not isinstance(recommendation, str):
-+ raise InvalidResponseError("recommendation is not a string")
-+ if recommendation not in (RECOMMENDATION_OK, RECOMMENDATION_SPAM):
-+ logger.warning(
-+ "get_pdu_policy_recommendation: unknown recommendation: %s",
-+ recommendation,
-+ )
-+ return RECOMMENDATION_OK
-+ return recommendation
-+ except Exception as e:
-+ logger.warning(
-+ "get_pdu_policy_recommendation: server %s responded with error, assuming OK recommendation: %s",
-+ destination,
-+ e,
-+ )
-+ return RECOMMENDATION_OK
-+
- @trace
- @tag_args
- async def get_pdu(
-diff --git a/synapse/federation/transport/client.py b/synapse/federation/transport/client.py
-index 206e91ed14..62bf96ce91 100644
---- a/synapse/federation/transport/client.py
-+++ b/synapse/federation/transport/client.py
-@@ -143,6 +143,33 @@ class TransportLayerClient:
- destination, path=path, timeout=timeout, try_trailing_slash_on_400=True
- )
-
-+ async def get_policy_recommendation_for_pdu(
-+ self, destination: str, event: EventBase, timeout: Optional[int] = None
-+ ) -> JsonDict:
-+ """Requests the policy recommendation for the given pdu from the given policy server.
-+
-+ Args:
-+ destination: The host name of the remote homeserver checking the event.
-+ event: The event to check.
-+ timeout: How long to try (in ms) the destination for before giving up.
-+ None indicates no timeout.
-+
-+ Returns:
-+ The full recommendation object from the remote server.
-+ """
-+ logger.debug(
-+ "get_policy_recommendation_for_pdu dest=%s, event_id=%s",
-+ destination,
-+ event.event_id,
-+ )
-+ return await self.client.post_json(
-+ destination=destination,
-+ path=f"/_matrix/policy/unstable/org.matrix.msc4284/event/{event.event_id}/check",
-+ data=event.get_pdu_json(),
-+ ignore_backoff=True,
-+ timeout=timeout,
-+ )
-+
- async def backfill(
- self, destination: str, room_id: str, event_tuples: Collection[str], limit: int
- ) -> Optional[Union[JsonDict, list]]:
-diff --git a/synapse/handlers/message.py b/synapse/handlers/message.py
-index ff6eb5a514..cb6de02309 100644
---- a/synapse/handlers/message.py
-+++ b/synapse/handlers/message.py
-@@ -495,6 +495,7 @@ class EventCreationHandler:
- self._instance_name = hs.get_instance_name()
- self._notifier = hs.get_notifier()
- self._worker_lock_handler = hs.get_worker_locks_handler()
-+ self._policy_handler = hs.get_room_policy_handler()
-
- self.room_prejoin_state_types = self.hs.config.api.room_prejoin_state
-
-@@ -1108,6 +1109,18 @@ class EventCreationHandler:
- event.sender,
- )
-
-+ policy_allowed = await self._policy_handler.is_event_allowed(event)
-+ if not policy_allowed:
-+ logger.warning(
-+ "Event not allowed by policy server, rejecting %s",
-+ event.event_id,
-+ )
-+ raise SynapseError(
-+ 403,
-+ "This message has been rejected as probable spam",
-+ Codes.FORBIDDEN,
-+ )
-+
- spam_check_result = (
- await self._spam_checker_module_callbacks.check_event_for_spam(
- event
-@@ -1119,7 +1132,7 @@ class EventCreationHandler:
- [code, dict] = spam_check_result
- raise SynapseError(
- 403,
-- "This message had been rejected as probable spam",
-+ "This message has been rejected as probable spam",
- code,
- dict,
- )
-diff --git a/synapse/handlers/room_policy.py b/synapse/handlers/room_policy.py
-new file mode 100644
-index 0000000000..dcfebb128c
---- /dev/null
-+++ b/synapse/handlers/room_policy.py
-@@ -0,0 +1,89 @@
-+#
-+# This file is licensed under the Affero General Public License (AGPL) version 3.
-+#
-+# Copyright 2016-2021 The Matrix.org Foundation C.I.C.
-+# Copyright (C) 2023 New Vector, Ltd
-+#
-+# This program is free software: you can redistribute it and/or modify
-+# it under the terms of the GNU Affero General Public License as
-+# published by the Free Software Foundation, either version 3 of the
-+# License, or (at your option) any later version.
-+#
-+# See the GNU Affero General Public License for more details:
-+# <https://www.gnu.org/licenses/agpl-3.0.html>.
-+#
-+#
-+
-+import logging
-+from typing import TYPE_CHECKING
-+
-+from synapse.events import EventBase
-+from synapse.types.handlers.policy_server import RECOMMENDATION_OK
-+from synapse.util.stringutils import parse_and_validate_server_name
-+
-+if TYPE_CHECKING:
-+ from synapse.server import HomeServer
-+
-+logger = logging.getLogger(__name__)
-+
-+
-+class RoomPolicyHandler:
-+ def __init__(self, hs: "HomeServer"):
-+ self._hs = hs
-+ self._store = hs.get_datastores().main
-+ self._storage_controllers = hs.get_storage_controllers()
-+ self._event_auth_handler = hs.get_event_auth_handler()
-+ self._federation_client = hs.get_federation_client()
-+
-+ async def is_event_allowed(self, event: EventBase) -> bool:
-+ """Check if the given event is allowed in the room by the policy server.
-+
-+ Note: This will *always* return True if the room's policy server is Synapse
-+ itself. This is because Synapse can't be a policy server (currently).
-+
-+ If no policy server is configured in the room, this returns True. Similarly, if
-+ the policy server is invalid in any way (not joined, not a server, etc), this
-+ returns True.
-+
-+ If a valid and contactable policy server is configured in the room, this returns
-+ True if that server suggests the event is not spammy, and False otherwise.
-+
-+ Args:
-+ event: The event to check. This should be a fully-formed PDU.
-+
-+ Returns:
-+ bool: True if the event is allowed in the room, False otherwise.
-+ """
-+ policy_event = await self._storage_controllers.state.get_current_state_event(
-+ event.room_id, "org.matrix.msc4284.policy", ""
-+ )
-+ if not policy_event:
-+ return True # no policy server == default allow
-+
-+ policy_server = policy_event.content.get("via", "")
-+ if policy_server is None or not isinstance(policy_server, str):
-+ return True # no policy server == default allow
-+
-+ if policy_server == self._hs.hostname:
-+ return True # Synapse itself can't be a policy server (currently)
-+
-+ try:
-+ parse_and_validate_server_name(policy_server)
-+ except ValueError:
-+ return True # invalid policy server == default allow
-+
-+ is_in_room = await self._event_auth_handler.is_host_in_room(
-+ event.room_id, policy_server
-+ )
-+ if not is_in_room:
-+ return True # policy server not in room == default allow
-+
-+ # At this point, the server appears valid and is in the room, so ask it to check
-+ # the event.
-+ recommendation = await self._federation_client.get_pdu_policy_recommendation(
-+ policy_server, event
-+ )
-+ if recommendation != RECOMMENDATION_OK:
-+ return False
-+
-+ return True # default allow
-diff --git a/synapse/server.py b/synapse/server.py
-index bd2faa61b9..2add4d4e6e 100644
---- a/synapse/server.py
-+++ b/synapse/server.py
-@@ -107,6 +107,7 @@ from synapse.handlers.room_member import (
- RoomMemberMasterHandler,
- )
- from synapse.handlers.room_member_worker import RoomMemberWorkerHandler
-+from synapse.handlers.room_policy import RoomPolicyHandler
- from synapse.handlers.room_summary import RoomSummaryHandler
- from synapse.handlers.search import SearchHandler
- from synapse.handlers.send_email import SendEmailHandler
-@@ -807,6 +808,10 @@ class HomeServer(metaclass=abc.ABCMeta):
-
- return OidcHandler(self)
-
-+ @cache_in_self
-+ def get_room_policy_handler(self) -> RoomPolicyHandler:
-+ return RoomPolicyHandler(self)
-+
- @cache_in_self
- def get_event_client_serializer(self) -> EventClientSerializer:
- return EventClientSerializer(self)
-diff --git a/synapse/types/handlers/policy_server.py b/synapse/types/handlers/policy_server.py
-new file mode 100644
-index 0000000000..bfc09dabf4
---- /dev/null
-+++ b/synapse/types/handlers/policy_server.py
-@@ -0,0 +1,16 @@
-+#
-+# This file is licensed under the Affero General Public License (AGPL) version 3.
-+#
-+# Copyright (C) 2025 New Vector, Ltd
-+#
-+# This program is free software: you can redistribute it and/or modify
-+# it under the terms of the GNU Affero General Public License as
-+# published by the Free Software Foundation, either version 3 of the
-+# License, or (at your option) any later version.
-+#
-+# See the GNU Affero General Public License for more details:
-+# <https://www.gnu.org/licenses/agpl-3.0.html>.
-+#
-+
-+RECOMMENDATION_OK = "ok"
-+RECOMMENDATION_SPAM = "spam"
-diff --git a/tests/handlers/test_room_policy.py b/tests/handlers/test_room_policy.py
-new file mode 100644
-index 0000000000..26642c18ea
---- /dev/null
-+++ b/tests/handlers/test_room_policy.py
-@@ -0,0 +1,226 @@
-+#
-+# This file is licensed under the Affero General Public License (AGPL) version 3.
-+#
-+# Copyright (C) 2025 New Vector, Ltd
-+#
-+# This program is free software: you can redistribute it and/or modify
-+# it under the terms of the GNU Affero General Public License as
-+# published by the Free Software Foundation, either version 3 of the
-+# License, or (at your option) any later version.
-+#
-+# See the GNU Affero General Public License for more details:
-+# <https://www.gnu.org/licenses/agpl-3.0.html>.
-+#
-+#
-+from typing import Optional
-+from unittest import mock
-+
-+from twisted.test.proto_helpers import MemoryReactor
-+
-+from synapse.events import EventBase, make_event_from_dict
-+from synapse.rest import admin
-+from synapse.rest.client import login, room
-+from synapse.server import HomeServer
-+from synapse.types import JsonDict, UserID
-+from synapse.types.handlers.policy_server import RECOMMENDATION_OK, RECOMMENDATION_SPAM
-+from synapse.util import Clock
-+
-+from tests import unittest
-+from tests.test_utils import event_injection
-+
-+
-+class RoomPolicyTestCase(unittest.FederatingHomeserverTestCase):
-+ """Tests room policy handler."""
-+
-+ servlets = [
-+ admin.register_servlets,
-+ login.register_servlets,
-+ room.register_servlets,
-+ ]
-+
-+ def make_homeserver(self, reactor: MemoryReactor, clock: Clock) -> HomeServer:
-+ # mock out the federation transport client
-+ self.mock_federation_transport_client = mock.Mock(
-+ spec=["get_policy_recommendation_for_pdu"]
-+ )
-+ self.mock_federation_transport_client.get_policy_recommendation_for_pdu = (
-+ mock.AsyncMock()
-+ )
-+ return super().setup_test_homeserver(
-+ federation_transport_client=self.mock_federation_transport_client
-+ )
-+
-+ def prepare(self, reactor: MemoryReactor, clock: Clock, hs: HomeServer) -> None:
-+ self.hs = hs
-+ self.handler = hs.get_room_policy_handler()
-+ main_store = self.hs.get_datastores().main
-+
-+ # Create a room
-+ self.creator = self.register_user("creator", "test1234")
-+ self.creator_token = self.login("creator", "test1234")
-+ self.room_id = self.helper.create_room_as(
-+ room_creator=self.creator, tok=self.creator_token
-+ )
-+ room_version = self.get_success(main_store.get_room_version(self.room_id))
-+
-+ # Create some sample events
-+ self.spammy_event = make_event_from_dict(
-+ room_version=room_version,
-+ internal_metadata_dict={},
-+ event_dict={
-+ "room_id": self.room_id,
-+ "type": "m.room.message",
-+ "sender": "@spammy:example.org",
-+ "content": {
-+ "msgtype": "m.text",
-+ "body": "This is a spammy event.",
-+ },
-+ },
-+ )
-+ self.not_spammy_event = make_event_from_dict(
-+ room_version=room_version,
-+ internal_metadata_dict={},
-+ event_dict={
-+ "room_id": self.room_id,
-+ "type": "m.room.message",
-+ "sender": "@not_spammy:example.org",
-+ "content": {
-+ "msgtype": "m.text",
-+ "body": "This is a NOT spammy event.",
-+ },
-+ },
-+ )
-+
-+ # Prepare the policy server mock to decide spam vs not spam on those events
-+ self.call_count = 0
-+
-+ async def get_policy_recommendation_for_pdu(
-+ destination: str,
-+ pdu: EventBase,
-+ timeout: Optional[int] = None,
-+ ) -> JsonDict:
-+ self.call_count += 1
-+ self.assertEqual(destination, self.OTHER_SERVER_NAME)
-+ if pdu.event_id == self.spammy_event.event_id:
-+ return {"recommendation": RECOMMENDATION_SPAM}
-+ elif pdu.event_id == self.not_spammy_event.event_id:
-+ return {"recommendation": RECOMMENDATION_OK}
-+ else:
-+ self.fail("Unexpected event ID")
-+
-+ self.mock_federation_transport_client.get_policy_recommendation_for_pdu.side_effect = get_policy_recommendation_for_pdu
-+
-+ def _add_policy_server_to_room(self) -> None:
-+ # Inject a member event into the room
-+ policy_user_id = f"@policy:{self.OTHER_SERVER_NAME}"
-+ self.get_success(
-+ event_injection.inject_member_event(
-+ self.hs, self.room_id, policy_user_id, "join"
-+ )
-+ )
-+ self.helper.send_state(
-+ self.room_id,
-+ "org.matrix.msc4284.policy",
-+ {
-+ "via": self.OTHER_SERVER_NAME,
-+ },
-+ tok=self.creator_token,
-+ state_key="",
-+ )
-+
-+ def test_no_policy_event_set(self) -> None:
-+ # We don't need to modify the room state at all - we're testing the default
-+ # case where a room doesn't use a policy server.
-+ ok = self.get_success(self.handler.is_event_allowed(self.spammy_event))
-+ self.assertEqual(ok, True)
-+ self.assertEqual(self.call_count, 0)
-+
-+ def test_empty_policy_event_set(self) -> None:
-+ self.helper.send_state(
-+ self.room_id,
-+ "org.matrix.msc4284.policy",
-+ {
-+ # empty content (no `via`)
-+ },
-+ tok=self.creator_token,
-+ state_key="",
-+ )
-+
-+ ok = self.get_success(self.handler.is_event_allowed(self.spammy_event))
-+ self.assertEqual(ok, True)
-+ self.assertEqual(self.call_count, 0)
-+
-+ def test_nonstring_policy_event_set(self) -> None:
-+ self.helper.send_state(
-+ self.room_id,
-+ "org.matrix.msc4284.policy",
-+ {
-+ "via": 42, # should be a server name
-+ },
-+ tok=self.creator_token,
-+ state_key="",
-+ )
-+
-+ ok = self.get_success(self.handler.is_event_allowed(self.spammy_event))
-+ self.assertEqual(ok, True)
-+ self.assertEqual(self.call_count, 0)
-+
-+ def test_self_policy_event_set(self) -> None:
-+ self.helper.send_state(
-+ self.room_id,
-+ "org.matrix.msc4284.policy",
-+ {
-+ # We ignore events when the policy server is ourselves (for now?)
-+ "via": (UserID.from_string(self.creator)).domain,
-+ },
-+ tok=self.creator_token,
-+ state_key="",
-+ )
-+
-+ ok = self.get_success(self.handler.is_event_allowed(self.spammy_event))
-+ self.assertEqual(ok, True)
-+ self.assertEqual(self.call_count, 0)
-+
-+ def test_invalid_server_policy_event_set(self) -> None:
-+ self.helper.send_state(
-+ self.room_id,
-+ "org.matrix.msc4284.policy",
-+ {
-+ "via": "|this| is *not* a (valid) server name.com",
-+ },
-+ tok=self.creator_token,
-+ state_key="",
-+ )
-+
-+ ok = self.get_success(self.handler.is_event_allowed(self.spammy_event))
-+ self.assertEqual(ok, True)
-+ self.assertEqual(self.call_count, 0)
-+
-+ def test_not_in_room_policy_event_set(self) -> None:
-+ self.helper.send_state(
-+ self.room_id,
-+ "org.matrix.msc4284.policy",
-+ {
-+ "via": f"x.{self.OTHER_SERVER_NAME}",
-+ },
-+ tok=self.creator_token,
-+ state_key="",
-+ )
-+
-+ ok = self.get_success(self.handler.is_event_allowed(self.spammy_event))
-+ self.assertEqual(ok, True)
-+ self.assertEqual(self.call_count, 0)
-+
-+ def test_spammy_event_is_spam(self) -> None:
-+ self._add_policy_server_to_room()
-+
-+ ok = self.get_success(self.handler.is_event_allowed(self.spammy_event))
-+ self.assertEqual(ok, False)
-+ self.assertEqual(self.call_count, 1)
-+
-+ def test_not_spammy_event_is_not_spam(self) -> None:
-+ self._add_policy_server_to_room()
-+
-+ ok = self.get_success(self.handler.is_event_allowed(self.not_spammy_event))
-+ self.assertEqual(ok, True)
-+ self.assertEqual(self.call_count, 1)
---
-2.49.0
-
diff --git a/packages/overlays/matrix-synapse/patches/0025-Bump-pyo3-from-0.23.5-to-0.24.2-18460.patch b/packages/overlays/matrix-synapse/patches/0025-Bump-pyo3-from-0.23.5-to-0.24.2-18460.patch
deleted file mode 100644
index 7d1ee5d..0000000
--- a/packages/overlays/matrix-synapse/patches/0025-Bump-pyo3-from-0.23.5-to-0.24.2-18460.patch
+++ /dev/null
@@ -1,166 +0,0 @@
-From ed6b7ba9c3da3add7a1551069411fa3697b0efc4 Mon Sep 17 00:00:00 2001
-From: Andrew Morgan <1342360+anoadragon453@users.noreply.github.com>
-Date: Wed, 21 May 2025 23:12:01 +0100
-Subject: [PATCH 25/34] Bump pyo3 from 0.23.5 to 0.24.2 (#18460)
-
-Also bump pythonize from 0.23.0 to 0.24.0, otherwise we couldn't compile
-as pythonize 0.23.0 required pyo3 "^0.23.0".
-
-Addresses
-[RUSTSEC-2025-0020](https://rustsec.org/advisories/RUSTSEC-2025-0020),
-although Synapse is not affected as we don't make use of
-`PyString::from_object`.
-
-[pyo3 0.24.x](https://github.com/PyO3/pyo3/releases/tag/v0.24.0) include
-some performance optimisations apparently, and no breaking changes.
-
-### Pull Request Checklist
-
-<!-- Please read
-https://element-hq.github.io/synapse/latest/development/contributing_guide.html
-before submitting your pull request -->
-
-* [x] Pull request is based on the develop branch
-* [x] Pull request includes a [changelog
-file](https://element-hq.github.io/synapse/latest/development/contributing_guide.html#changelog).
-The entry should:
-- Be a short description of your change which makes sense to users.
-"Fixed a bug that prevented receiving messages from other servers."
-instead of "Moved X method from `EventStore` to `EventWorkerStore`.".
- - Use markdown where necessary, mostly for `code blocks`.
- - End with either a period (.) or an exclamation mark (!).
- - Start with a capital letter.
-- Feel free to credit yourself, by adding a sentence "Contributed by
-@github_username." or "Contributed by [Your Name]." to the end of the
-entry.
-* [x] [Code
-style](https://element-hq.github.io/synapse/latest/code_style.html) is
-correct (run the
-[linters](https://element-hq.github.io/synapse/latest/development/contributing_guide.html#run-the-linters))
----
- Cargo.lock | 28 ++++++++++++++--------------
- changelog.d/18460.misc | 1 +
- rust/Cargo.toml | 4 ++--
- 3 files changed, 17 insertions(+), 16 deletions(-)
- create mode 100644 changelog.d/18460.misc
-
-diff --git a/Cargo.lock b/Cargo.lock
-index 13156e67b5..980dff6987 100644
---- a/Cargo.lock
-+++ b/Cargo.lock
-@@ -277,9 +277,9 @@ dependencies = [
-
- [[package]]
- name = "pyo3"
--version = "0.23.5"
-+version = "0.24.2"
- source = "registry+https://github.com/rust-lang/crates.io-index"
--checksum = "7778bffd85cf38175ac1f545509665d0b9b92a198ca7941f131f85f7a4f9a872"
-+checksum = "e5203598f366b11a02b13aa20cab591229ff0a89fd121a308a5df751d5fc9219"
- dependencies = [
- "anyhow",
- "cfg-if",
-@@ -296,9 +296,9 @@ dependencies = [
-
- [[package]]
- name = "pyo3-build-config"
--version = "0.23.5"
-+version = "0.24.2"
- source = "registry+https://github.com/rust-lang/crates.io-index"
--checksum = "94f6cbe86ef3bf18998d9df6e0f3fc1050a8c5efa409bf712e661a4366e010fb"
-+checksum = "99636d423fa2ca130fa5acde3059308006d46f98caac629418e53f7ebb1e9999"
- dependencies = [
- "once_cell",
- "target-lexicon",
-@@ -306,9 +306,9 @@ dependencies = [
-
- [[package]]
- name = "pyo3-ffi"
--version = "0.23.5"
-+version = "0.24.2"
- source = "registry+https://github.com/rust-lang/crates.io-index"
--checksum = "e9f1b4c431c0bb1c8fb0a338709859eed0d030ff6daa34368d3b152a63dfdd8d"
-+checksum = "78f9cf92ba9c409279bc3305b5409d90db2d2c22392d443a87df3a1adad59e33"
- dependencies = [
- "libc",
- "pyo3-build-config",
-@@ -327,9 +327,9 @@ dependencies = [
-
- [[package]]
- name = "pyo3-macros"
--version = "0.23.5"
-+version = "0.24.2"
- source = "registry+https://github.com/rust-lang/crates.io-index"
--checksum = "fbc2201328f63c4710f68abdf653c89d8dbc2858b88c5d88b0ff38a75288a9da"
-+checksum = "0b999cb1a6ce21f9a6b147dcf1be9ffedf02e0043aec74dc390f3007047cecd9"
- dependencies = [
- "proc-macro2",
- "pyo3-macros-backend",
-@@ -339,9 +339,9 @@ dependencies = [
-
- [[package]]
- name = "pyo3-macros-backend"
--version = "0.23.5"
-+version = "0.24.2"
- source = "registry+https://github.com/rust-lang/crates.io-index"
--checksum = "fca6726ad0f3da9c9de093d6f116a93c1a38e417ed73bf138472cf4064f72028"
-+checksum = "822ece1c7e1012745607d5cf0bcb2874769f0f7cb34c4cde03b9358eb9ef911a"
- dependencies = [
- "heck",
- "proc-macro2",
-@@ -352,9 +352,9 @@ dependencies = [
-
- [[package]]
- name = "pythonize"
--version = "0.23.0"
-+version = "0.24.0"
- source = "registry+https://github.com/rust-lang/crates.io-index"
--checksum = "91a6ee7a084f913f98d70cdc3ebec07e852b735ae3059a1500db2661265da9ff"
-+checksum = "d5bcac0d0b71821f0d69e42654f1e15e5c94b85196446c4de9588951a2117e7b"
- dependencies = [
- "pyo3",
- "serde",
-@@ -532,9 +532,9 @@ dependencies = [
-
- [[package]]
- name = "target-lexicon"
--version = "0.12.14"
-+version = "0.13.2"
- source = "registry+https://github.com/rust-lang/crates.io-index"
--checksum = "e1fc403891a21bcfb7c37834ba66a547a8f402146eba7265b5a6d88059c9ff2f"
-+checksum = "e502f78cdbb8ba4718f566c418c52bc729126ffd16baee5baa718cf25dd5a69a"
-
- [[package]]
- name = "typenum"
-diff --git a/changelog.d/18460.misc b/changelog.d/18460.misc
-new file mode 100644
-index 0000000000..5aa19683eb
---- /dev/null
-+++ b/changelog.d/18460.misc
-@@ -0,0 +1 @@
-+Bump pyo3 from 0.23.5 to 0.24.2.
-\ No newline at end of file
-diff --git a/rust/Cargo.toml b/rust/Cargo.toml
-index 651b268f86..840988e74e 100644
---- a/rust/Cargo.toml
-+++ b/rust/Cargo.toml
-@@ -30,14 +30,14 @@ http = "1.1.0"
- lazy_static = "1.4.0"
- log = "0.4.17"
- mime = "0.3.17"
--pyo3 = { version = "0.23.5", features = [
-+pyo3 = { version = "0.24.2", features = [
- "macros",
- "anyhow",
- "abi3",
- "abi3-py39",
- ] }
- pyo3-log = "0.12.0"
--pythonize = "0.23.0"
-+pythonize = "0.24.0"
- regex = "1.6.0"
- sha2 = "0.10.8"
- serde = { version = "1.0.144", features = ["derive"] }
---
-2.49.0
-
diff --git a/packages/overlays/matrix-synapse/patches/0026-Bump-Tornado-from-6.4.2-to-6.5.0-18459.patch b/packages/overlays/matrix-synapse/patches/0026-Bump-Tornado-from-6.4.2-to-6.5.0-18459.patch
deleted file mode 100644
index f4f4784..0000000
--- a/packages/overlays/matrix-synapse/patches/0026-Bump-Tornado-from-6.4.2-to-6.5.0-18459.patch
+++ /dev/null
@@ -1,93 +0,0 @@
-From 162407319103a9f553225a925017cf3f951a4644 Mon Sep 17 00:00:00 2001
-From: Andrew Morgan <1342360+anoadragon453@users.noreply.github.com>
-Date: Wed, 21 May 2025 23:24:12 +0100
-Subject: [PATCH 26/34] Bump Tornado from 6.4.2 to 6.5.0 (#18459)
-
-Bumps tornado 6.5.0 to mitigate
-[CVE-2025-47287](https://nvd.nist.gov/vuln/detail/CVE-2025-47287).
-
-This dependency is only used indirectly through our sentry dependency.
-
-### Pull Request Checklist
-
-<!-- Please read
-https://element-hq.github.io/synapse/latest/development/contributing_guide.html
-before submitting your pull request -->
-
-* [x] Pull request is based on the develop branch
-* [x] Pull request includes a [changelog
-file](https://element-hq.github.io/synapse/latest/development/contributing_guide.html#changelog).
-The entry should:
-- Be a short description of your change which makes sense to users.
-"Fixed a bug that prevented receiving messages from other servers."
-instead of "Moved X method from `EventStore` to `EventWorkerStore`.".
- - Use markdown where necessary, mostly for `code blocks`.
- - End with either a period (.) or an exclamation mark (!).
- - Start with a capital letter.
-- Feel free to credit yourself, by adding a sentence "Contributed by
-@github_username." or "Contributed by [Your Name]." to the end of the
-entry.
-* [ ] [Code
-style](https://element-hq.github.io/synapse/latest/code_style.html) is
-correct (run the
-[linters](https://element-hq.github.io/synapse/latest/development/contributing_guide.html#run-the-linters))
----
- changelog.d/18459.misc | 1 +
- poetry.lock | 27 ++++++++++++++-------------
- 2 files changed, 15 insertions(+), 13 deletions(-)
- create mode 100644 changelog.d/18459.misc
-
-diff --git a/changelog.d/18459.misc b/changelog.d/18459.misc
-new file mode 100644
-index 0000000000..e148825696
---- /dev/null
-+++ b/changelog.d/18459.misc
-@@ -0,0 +1 @@
-+Bump tornado from 6.4.2 to 6.5.0.
-\ No newline at end of file
-diff --git a/poetry.lock b/poetry.lock
-index ada0646215..9938e46780 100644
---- a/poetry.lock
-+++ b/poetry.lock
-@@ -2767,24 +2767,25 @@ files = [
-
- [[package]]
- name = "tornado"
--version = "6.4.2"
-+version = "6.5"
- description = "Tornado is a Python web framework and asynchronous networking library, originally developed at FriendFeed."
- optional = true
--python-versions = ">=3.8"
-+python-versions = ">=3.9"
- groups = ["main"]
- markers = "extra == \"all\" or extra == \"opentracing\""
- files = [
-- {file = "tornado-6.4.2-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:e828cce1123e9e44ae2a50a9de3055497ab1d0aeb440c5ac23064d9e44880da1"},
-- {file = "tornado-6.4.2-cp38-abi3-macosx_10_9_x86_64.whl", hash = "sha256:072ce12ada169c5b00b7d92a99ba089447ccc993ea2143c9ede887e0937aa803"},
-- {file = "tornado-6.4.2-cp38-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1a017d239bd1bb0919f72af256a970624241f070496635784d9bf0db640d3fec"},
-- {file = "tornado-6.4.2-cp38-abi3-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c36e62ce8f63409301537222faffcef7dfc5284f27eec227389f2ad11b09d946"},
-- {file = "tornado-6.4.2-cp38-abi3-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bca9eb02196e789c9cb5c3c7c0f04fb447dc2adffd95265b2c7223a8a615ccbf"},
-- {file = "tornado-6.4.2-cp38-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:304463bd0772442ff4d0f5149c6f1c2135a1fae045adf070821c6cdc76980634"},
-- {file = "tornado-6.4.2-cp38-abi3-musllinux_1_2_i686.whl", hash = "sha256:c82c46813ba483a385ab2a99caeaedf92585a1f90defb5693351fa7e4ea0bf73"},
-- {file = "tornado-6.4.2-cp38-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:932d195ca9015956fa502c6b56af9eb06106140d844a335590c1ec7f5277d10c"},
-- {file = "tornado-6.4.2-cp38-abi3-win32.whl", hash = "sha256:2876cef82e6c5978fde1e0d5b1f919d756968d5b4282418f3146b79b58556482"},
-- {file = "tornado-6.4.2-cp38-abi3-win_amd64.whl", hash = "sha256:908b71bf3ff37d81073356a5fadcc660eb10c1476ee6e2725588626ce7e5ca38"},
-- {file = "tornado-6.4.2.tar.gz", hash = "sha256:92bad5b4746e9879fd7bf1eb21dce4e3fc5128d71601f80005afa39237ad620b"},
-+ {file = "tornado-6.5-cp39-abi3-macosx_10_9_universal2.whl", hash = "sha256:f81067dad2e4443b015368b24e802d0083fecada4f0a4572fdb72fc06e54a9a6"},
-+ {file = "tornado-6.5-cp39-abi3-macosx_10_9_x86_64.whl", hash = "sha256:9ac1cbe1db860b3cbb251e795c701c41d343f06a96049d6274e7c77559117e41"},
-+ {file = "tornado-6.5-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7c625b9d03f1fb4d64149c47d0135227f0434ebb803e2008040eb92906b0105a"},
-+ {file = "tornado-6.5-cp39-abi3-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9a0d8d2309faf015903080fb5bdd969ecf9aa5ff893290845cf3fd5b2dd101bc"},
-+ {file = "tornado-6.5-cp39-abi3-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:03576ab51e9b1677e4cdaae620d6700d9823568b7939277e4690fe4085886c55"},
-+ {file = "tornado-6.5-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:ab75fe43d0e1b3a5e3ceddb2a611cb40090dd116a84fc216a07a298d9e000471"},
-+ {file = "tornado-6.5-cp39-abi3-musllinux_1_2_i686.whl", hash = "sha256:119c03f440a832128820e87add8a175d211b7f36e7ee161c631780877c28f4fb"},
-+ {file = "tornado-6.5-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:231f2193bb4c28db2bdee9e57bc6ca0cd491f345cd307c57d79613b058e807e0"},
-+ {file = "tornado-6.5-cp39-abi3-win32.whl", hash = "sha256:fd20c816e31be1bbff1f7681f970bbbd0bb241c364220140228ba24242bcdc59"},
-+ {file = "tornado-6.5-cp39-abi3-win_amd64.whl", hash = "sha256:007f036f7b661e899bd9ef3fa5f87eb2cb4d1b2e7d67368e778e140a2f101a7a"},
-+ {file = "tornado-6.5-cp39-abi3-win_arm64.whl", hash = "sha256:542e380658dcec911215c4820654662810c06ad872eefe10def6a5e9b20e9633"},
-+ {file = "tornado-6.5.tar.gz", hash = "sha256:c70c0a26d5b2d85440e4debd14a8d0b463a0cf35d92d3af05f5f1ffa8675c826"},
- ]
-
- [[package]]
---
-2.49.0
-
diff --git a/packages/overlays/matrix-synapse/patches/0027-Don-t-move-invited-users-to-new-room-when-shutting-d.patch b/packages/overlays/matrix-synapse/patches/0027-Don-t-move-invited-users-to-new-room-when-shutting-d.patch
deleted file mode 100644
index bbec2e2..0000000
--- a/packages/overlays/matrix-synapse/patches/0027-Don-t-move-invited-users-to-new-room-when-shutting-d.patch
+++ /dev/null
@@ -1,118 +0,0 @@
-From 24e849e483820762fa2d231ad702e6aeaa23829c Mon Sep 17 00:00:00 2001
-From: Shay <hillerys@element.io>
-Date: Fri, 23 May 2025 01:59:40 -0700
-Subject: [PATCH 27/34] Don't move invited users to new room when shutting down
- room (#18471)
-
-This is confusing to users who received unwanted invites.
----
- changelog.d/18471.misc | 1 +
- synapse/handlers/room.py | 23 ++++++++++----------
- tests/rest/admin/test_room.py | 41 +++++++++++++++++++++++++++++++++++
- 3 files changed, 54 insertions(+), 11 deletions(-)
- create mode 100644 changelog.d/18471.misc
-
-diff --git a/changelog.d/18471.misc b/changelog.d/18471.misc
-new file mode 100644
-index 0000000000..b36712bea3
---- /dev/null
-+++ b/changelog.d/18471.misc
-@@ -0,0 +1 @@
-+Don't move invited users to new room when shutting down room.
-\ No newline at end of file
-diff --git a/synapse/handlers/room.py b/synapse/handlers/room.py
-index 386375d64b..763f99e028 100644
---- a/synapse/handlers/room.py
-+++ b/synapse/handlers/room.py
-@@ -1806,7 +1806,7 @@ class RoomShutdownHandler:
- ] = None,
- ) -> Optional[ShutdownRoomResponse]:
- """
-- Shuts down a room. Moves all local users and room aliases automatically
-+ Shuts down a room. Moves all joined local users and room aliases automatically
- to a new room if `new_room_user_id` is set. Otherwise local users only
- leave the room without any information.
-
-@@ -1949,16 +1949,17 @@ class RoomShutdownHandler:
-
- # Join users to new room
- if new_room_user_id:
-- assert new_room_id is not None
-- await self.room_member_handler.update_membership(
-- requester=target_requester,
-- target=target_requester.user,
-- room_id=new_room_id,
-- action=Membership.JOIN,
-- content={},
-- ratelimit=False,
-- require_consent=False,
-- )
-+ if membership == Membership.JOIN:
-+ assert new_room_id is not None
-+ await self.room_member_handler.update_membership(
-+ requester=target_requester,
-+ target=target_requester.user,
-+ room_id=new_room_id,
-+ action=Membership.JOIN,
-+ content={},
-+ ratelimit=False,
-+ require_consent=False,
-+ )
-
- result["kicked_users"].append(user_id)
- if update_result_fct:
-diff --git a/tests/rest/admin/test_room.py b/tests/rest/admin/test_room.py
-index 8d806082aa..e22dfcba1b 100644
---- a/tests/rest/admin/test_room.py
-+++ b/tests/rest/admin/test_room.py
-@@ -369,6 +369,47 @@ class DeleteRoomTestCase(unittest.HomeserverTestCase):
- self.assertEqual(200, channel.code, msg=channel.json_body)
- self._is_blocked(room_id)
-
-+ def test_invited_users_not_joined_to_new_room(self) -> None:
-+ """
-+ Test that when a new room id is provided, users who are only invited
-+ but have not joined original room are not moved to new room.
-+ """
-+ invitee = self.register_user("invitee", "pass")
-+
-+ self.helper.invite(
-+ self.room_id, self.other_user, invitee, tok=self.other_user_tok
-+ )
-+
-+ # verify that user is invited
-+ channel = self.make_request(
-+ "GET",
-+ f"/_matrix/client/v3/rooms/{self.room_id}/members?membership=invite",
-+ access_token=self.other_user_tok,
-+ )
-+ self.assertEqual(channel.code, 200)
-+ self.assertEqual(len(channel.json_body["chunk"]), 1)
-+ invite = channel.json_body["chunk"][0]
-+ self.assertEqual(invite["state_key"], invitee)
-+
-+ # shutdown room
-+ channel = self.make_request(
-+ "DELETE",
-+ self.url,
-+ {"new_room_user_id": self.admin_user},
-+ access_token=self.admin_user_tok,
-+ )
-+ self.assertEqual(200, channel.code, msg=channel.json_body)
-+ self.assertEqual(len(channel.json_body["kicked_users"]), 2)
-+
-+ # joined member is moved to new room but invited user is not
-+ users_in_room = self.get_success(
-+ self.store.get_users_in_room(channel.json_body["new_room_id"])
-+ )
-+ self.assertNotIn(invitee, users_in_room)
-+ self.assertIn(self.other_user, users_in_room)
-+ self._is_purged(self.room_id)
-+ self._has_no_members(self.room_id)
-+
- def test_shutdown_room_consent(self) -> None:
- """Test that we can shutdown rooms with local users who have not
- yet accepted the privacy policy. This used to fail when we tried to
---
-2.49.0
-
diff --git a/packages/overlays/matrix-synapse/patches/0028-fix-device-handler-make-_maybe_retry_device_resync-t.patch b/packages/overlays/matrix-synapse/patches/0028-fix-device-handler-make-_maybe_retry_device_resync-t.patch
deleted file mode 100644
index 87859bf..0000000
--- a/packages/overlays/matrix-synapse/patches/0028-fix-device-handler-make-_maybe_retry_device_resync-t.patch
+++ /dev/null
@@ -1,94 +0,0 @@
-From 33ba8860c43d4770ea119a09a4fcbbf366f3b32e Mon Sep 17 00:00:00 2001
-From: 3nprob <74199244+3nprob@users.noreply.github.com>
-Date: Mon, 26 May 2025 14:21:43 +0000
-Subject: [PATCH 28/34] fix(device-handler): make _maybe_retry_device_resync
- thread-safe (#18391)
-
-A race-condition may render concurrent retry loops.
-
-Use an actual `Lock` for guarding single access of device resyncing
-retrying.
-
-### Pull Request Checklist
-
-* [x] Pull request is based on the develop branch
-* [x] Pull request includes a [changelog
-file](https://element-hq.github.io/synapse/latest/development/contributing_guide.html#changelog).
-The entry should:
-- Be a short description of your change which makes sense to users.
-"Fixed a bug that prevented receiving messages from other servers."
-instead of "Moved X method from `EventStore` to `EventWorkerStore`.".
- - Use markdown where necessary, mostly for `code blocks`.
- - End with either a period (.) or an exclamation mark (!).
- - Start with a capital letter.
-- Feel free to credit yourself, by adding a sentence "Contributed by
-@github_username." or "Contributed by [Your Name]." to the end of the
-entry.
-* [x] [Code
-style](https://element-hq.github.io/synapse/latest/code_style.html) is
-correct
-(run the
-[linters](https://element-hq.github.io/synapse/latest/development/contributing_guide.html#run-the-linters))
----
- changelog.d/18391.bugfix | 1 +
- synapse/handlers/device.py | 13 +++++--------
- 2 files changed, 6 insertions(+), 8 deletions(-)
- create mode 100644 changelog.d/18391.bugfix
-
-diff --git a/changelog.d/18391.bugfix b/changelog.d/18391.bugfix
-new file mode 100644
-index 0000000000..bbcb7b7a28
---- /dev/null
-+++ b/changelog.d/18391.bugfix
-@@ -0,0 +1 @@
-+Prevent race-condition in `_maybe_retry_device_resync` entrance.
-diff --git a/synapse/handlers/device.py b/synapse/handlers/device.py
-index 1efd039f22..f8b547bbed 100644
---- a/synapse/handlers/device.py
-+++ b/synapse/handlers/device.py
-@@ -20,6 +20,7 @@
- #
- #
- import logging
-+from threading import Lock
- from typing import (
- TYPE_CHECKING,
- AbstractSet,
-@@ -1237,7 +1238,7 @@ class DeviceListUpdater(DeviceListWorkerUpdater):
- )
-
- # Attempt to resync out of sync device lists every 30s.
-- self._resync_retry_in_progress = False
-+ self._resync_retry_lock = Lock()
- self.clock.looping_call(
- run_as_background_process,
- 30 * 1000,
-@@ -1419,13 +1420,10 @@ class DeviceListUpdater(DeviceListWorkerUpdater):
- """Retry to resync device lists that are out of sync, except if another retry is
- in progress.
- """
-- if self._resync_retry_in_progress:
-+ # If the lock can not be acquired we want to always return immediately instead of blocking here
-+ if not self._resync_retry_lock.acquire(blocking=False):
- return
--
- try:
-- # Prevent another call of this function to retry resyncing device lists so
-- # we don't send too many requests.
-- self._resync_retry_in_progress = True
- # Get all of the users that need resyncing.
- need_resync = await self.store.get_user_ids_requiring_device_list_resync()
-
-@@ -1466,8 +1464,7 @@ class DeviceListUpdater(DeviceListWorkerUpdater):
- e,
- )
- finally:
-- # Allow future calls to retry resyncinc out of sync device lists.
-- self._resync_retry_in_progress = False
-+ self._resync_retry_lock.release()
-
- async def multi_user_device_resync(
- self, user_ids: List[str], mark_failed_as_stale: bool = True
---
-2.49.0
-
diff --git a/packages/overlays/matrix-synapse/patches/0034-Expose-tombstone-in-room-admin-api.patch b/packages/overlays/matrix-synapse/patches/0034-Expose-tombstone-in-room-admin-api.patch
deleted file mode 100644
index 06a5789..0000000
--- a/packages/overlays/matrix-synapse/patches/0034-Expose-tombstone-in-room-admin-api.patch
+++ /dev/null
@@ -1,139 +0,0 @@
-From 8da5632efc85ad4043fd81e49b4a68fd8bab226e Mon Sep 17 00:00:00 2001
-From: Rory& <root@rory.gay>
-Date: Tue, 27 May 2025 06:37:52 +0200
-Subject: [PATCH 34/34] Expose tombstone in room admin api
-
----
- synapse/rest/admin/rooms.py | 1 +
- synapse/rest/client/room.py | 1 -
- synapse/storage/databases/main/room.py | 68 +++++++++++++++++++-------
- 3 files changed, 50 insertions(+), 20 deletions(-)
-
-diff --git a/synapse/rest/admin/rooms.py b/synapse/rest/admin/rooms.py
-index f8c5bf18d4..60a28abd18 100644
---- a/synapse/rest/admin/rooms.py
-+++ b/synapse/rest/admin/rooms.py
-@@ -260,6 +260,7 @@ class ListRoomRestServlet(RestServlet):
- search_term,
- public_rooms,
- empty_rooms,
-+ emma_include_tombstone = True
- )
-
- response = {
-diff --git a/synapse/rest/client/room.py b/synapse/rest/client/room.py
-index 725b2162fd..8408c687cc 100644
---- a/synapse/rest/client/room.py
-+++ b/synapse/rest/client/room.py
-@@ -898,7 +898,6 @@ class RoomEventServlet(RestServlet):
- request,
- "fi.mau.msc2815.include_unredacted_content"
- )
-- == "true"
- )
- if include_unredacted_content and not await self.auth.is_server_admin(
- requester
-diff --git a/synapse/storage/databases/main/room.py b/synapse/storage/databases/main/room.py
-index 56217fccdf..5f4d024fce 100644
---- a/synapse/storage/databases/main/room.py
-+++ b/synapse/storage/databases/main/room.py
-@@ -608,6 +608,7 @@ class RoomWorkerStore(CacheInvalidationWorkerStore):
- search_term: Optional[str],
- public_rooms: Optional[bool],
- empty_rooms: Optional[bool],
-+ emma_include_tombstone: bool = False,
- ) -> Tuple[List[Dict[str, Any]], int]:
- """Function to retrieve a paginated list of rooms as json.
-
-@@ -627,6 +628,7 @@ class RoomWorkerStore(CacheInvalidationWorkerStore):
- If true, empty rooms are queried.
- if false, empty rooms are excluded from the query. When it is
- none (the default), both empty rooms and none-empty rooms are queried.
-+ emma_include_tombstone: If true, include tombstone events in the results.
- Returns:
- A list of room dicts and an integer representing the total number of
- rooms that exist given this query
-@@ -755,6 +757,17 @@ class RoomWorkerStore(CacheInvalidationWorkerStore):
- where=where_clause,
- )
-
-+ # Emma: we're assuming this is the same db...
-+ get_current_state_event_id_sql = """
-+ SELECT event_id FROM current_state_events
-+ WHERE room_id = ? AND type = ? AND state_key = ?
-+ """
-+
-+ get_event_json_sql = """
-+ SELECT json FROM event_json
-+ WHERE event_id = ?
-+ """
-+
- def _get_rooms_paginate_txn(
- txn: LoggingTransaction,
- ) -> Tuple[List[Dict[str, Any]], int]:
-@@ -765,26 +778,43 @@ class RoomWorkerStore(CacheInvalidationWorkerStore):
- # Refactor room query data into a structured dictionary
- rooms = []
- for room in txn:
-+ roomData = {
-+ "room_id": room[0],
-+ "name": room[1],
-+ "canonical_alias": room[2],
-+ "joined_members": room[3],
-+ "joined_local_members": room[4],
-+ "version": room[5],
-+ "creator": room[6],
-+ "encryption": room[7],
-+ # room_stats_state.federatable is an integer on sqlite.
-+ "federatable": bool(room[8]),
-+ # rooms.is_public is an integer on sqlite.
-+ "public": bool(room[9]),
-+ "join_rules": room[10],
-+ "guest_access": room[11],
-+ "history_visibility": room[12],
-+ "state_events": room[13],
-+ "room_type": room[14],
-+ }
-+
-+ if emma_include_tombstone:
-+ tombstone_id = self.db_pool.execute("get_tombstone_event_id", get_current_state_event_id_sql, (room[0], EventTypes.Tombstone, "")).fetchone()
-+ #if tombstone_id:
-+ # tombstone_event_id = tombstone_id[0]
-+ # # Get the tombstone event
-+ # event_json = self.db_pool.execute(
-+ # "get_tombstone_event_json",
-+ # get_event_json_sql,
-+ # (tombstone_event_id)
-+ # ).fetchone()
-+#
-+ # roomData["gay.rory.synapse_extensions.tombstone"] = event_json
-+ #else:
-+ # roomData["gay.rory.synapse_extensions.tombstone"] = None
-+
- rooms.append(
-- {
-- "room_id": room[0],
-- "name": room[1],
-- "canonical_alias": room[2],
-- "joined_members": room[3],
-- "joined_local_members": room[4],
-- "version": room[5],
-- "creator": room[6],
-- "encryption": room[7],
-- # room_stats_state.federatable is an integer on sqlite.
-- "federatable": bool(room[8]),
-- # rooms.is_public is an integer on sqlite.
-- "public": bool(room[9]),
-- "join_rules": room[10],
-- "guest_access": room[11],
-- "history_visibility": room[12],
-- "state_events": room[13],
-- "room_type": room[14],
-- }
-+ roomData
- )
-
- # Execute the count query
---
-2.49.0
-
|