diff --git a/.ci/scripts/calculate_jobs.py b/.ci/scripts/calculate_jobs.py
index 15f2d94a81..ea278173db 100755
--- a/.ci/scripts/calculate_jobs.py
+++ b/.ci/scripts/calculate_jobs.py
@@ -36,11 +36,11 @@ IS_PR = os.environ["GITHUB_REF"].startswith("refs/pull/")
# First calculate the various trial jobs.
#
# For PRs, we only run each type of test with the oldest Python version supported (which
-# is Python 3.8 right now)
+# is Python 3.9 right now)
trial_sqlite_tests = [
{
- "python-version": "3.8",
+ "python-version": "3.9",
"database": "sqlite",
"extras": "all",
}
@@ -53,12 +53,12 @@ if not IS_PR:
"database": "sqlite",
"extras": "all",
}
- for version in ("3.9", "3.10", "3.11", "3.12", "3.13")
+ for version in ("3.10", "3.11", "3.12", "3.13")
)
trial_postgres_tests = [
{
- "python-version": "3.8",
+ "python-version": "3.9",
"database": "postgres",
"postgres-version": "11",
"extras": "all",
@@ -77,7 +77,7 @@ if not IS_PR:
trial_no_extra_tests = [
{
- "python-version": "3.8",
+ "python-version": "3.9",
"database": "sqlite",
"extras": "",
}
@@ -99,24 +99,24 @@ set_output("trial_test_matrix", test_matrix)
# First calculate the various sytest jobs.
#
-# For each type of test we only run on focal on PRs
+# For each type of test we only run on bullseye on PRs
sytest_tests = [
{
- "sytest-tag": "focal",
+ "sytest-tag": "bullseye",
},
{
- "sytest-tag": "focal",
+ "sytest-tag": "bullseye",
"postgres": "postgres",
},
{
- "sytest-tag": "focal",
+ "sytest-tag": "bullseye",
"postgres": "multi-postgres",
"workers": "workers",
},
{
- "sytest-tag": "focal",
+ "sytest-tag": "bullseye",
"postgres": "multi-postgres",
"workers": "workers",
"reactor": "asyncio",
@@ -127,11 +127,11 @@ if not IS_PR:
sytest_tests.extend(
[
{
- "sytest-tag": "focal",
+ "sytest-tag": "bullseye",
"reactor": "asyncio",
},
{
- "sytest-tag": "focal",
+ "sytest-tag": "bullseye",
"postgres": "postgres",
"reactor": "asyncio",
},
diff --git a/.ci/scripts/prepare_old_deps.sh b/.ci/scripts/prepare_old_deps.sh
index 580f87bbdf..3589be26f8 100755
--- a/.ci/scripts/prepare_old_deps.sh
+++ b/.ci/scripts/prepare_old_deps.sh
@@ -1,5 +1,5 @@
#!/usr/bin/env bash
-# this script is run by GitHub Actions in a plain `focal` container; it
+# this script is run by GitHub Actions in a plain `jammy` container; it
# - installs the minimal system requirements, and poetry;
# - patches the project definition file to refer to old versions only;
# - creates a venv with these old versions using poetry; and finally
diff --git a/.github/workflows/latest_deps.yml b/.github/workflows/latest_deps.yml
index b9e9a401b9..3884b6d402 100644
--- a/.github/workflows/latest_deps.yml
+++ b/.github/workflows/latest_deps.yml
@@ -132,9 +132,9 @@ jobs:
fail-fast: false
matrix:
include:
- - sytest-tag: focal
+ - sytest-tag: bullseye
- - sytest-tag: focal
+ - sytest-tag: bullseye
postgres: postgres
workers: workers
redis: redis
diff --git a/.github/workflows/release-artifacts.yml b/.github/workflows/release-artifacts.yml
index 1e2513b289..d77d7792f0 100644
--- a/.github/workflows/release-artifacts.yml
+++ b/.github/workflows/release-artifacts.yml
@@ -102,7 +102,7 @@ jobs:
runs-on: ${{ matrix.os }}
strategy:
matrix:
- os: [ubuntu-20.04, macos-12]
+ os: [ubuntu-22.04, macos-12]
arch: [x86_64, aarch64]
# is_pr is a flag used to exclude certain jobs from the matrix on PRs.
# It is not read by the rest of the workflow.
@@ -144,7 +144,7 @@ jobs:
- name: Only build a single wheel on PR
if: startsWith(github.ref, 'refs/pull/')
- run: echo "CIBW_BUILD="cp38-manylinux_${{ matrix.arch }}"" >> $GITHUB_ENV
+ run: echo "CIBW_BUILD="cp39-manylinux_${{ matrix.arch }}"" >> $GITHUB_ENV
- name: Build wheels
run: python -m cibuildwheel --output-dir wheelhouse
diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml
index 5586bd6d94..27dac89220 100644
--- a/.github/workflows/tests.yml
+++ b/.github/workflows/tests.yml
@@ -397,7 +397,7 @@ jobs:
needs:
- linting-done
- changes
- runs-on: ubuntu-20.04
+ runs-on: ubuntu-22.04
steps:
- uses: actions/checkout@v4
@@ -409,12 +409,12 @@ jobs:
# their build dependencies
- run: |
sudo apt-get -qq update
- sudo apt-get -qq install build-essential libffi-dev python-dev \
+ sudo apt-get -qq install build-essential libffi-dev python3-dev \
libxml2-dev libxslt-dev xmlsec1 zlib1g-dev libjpeg-dev libwebp-dev
- uses: actions/setup-python@v5
with:
- python-version: '3.8'
+ python-version: '3.9'
- name: Prepare old deps
if: steps.cache-poetry-old-deps.outputs.cache-hit != 'true'
@@ -458,7 +458,7 @@ jobs:
runs-on: ubuntu-latest
strategy:
matrix:
- python-version: ["pypy-3.8"]
+ python-version: ["pypy-3.9"]
extras: ["all"]
steps:
@@ -580,7 +580,7 @@ jobs:
strategy:
matrix:
include:
- - python-version: "3.8"
+ - python-version: "3.9"
postgres-version: "11"
- python-version: "3.11"
diff --git a/.github/workflows/twisted_trunk.yml b/.github/workflows/twisted_trunk.yml
index 76609c2118..cdaa00ef90 100644
--- a/.github/workflows/twisted_trunk.yml
+++ b/.github/workflows/twisted_trunk.yml
@@ -99,11 +99,11 @@ jobs:
if: needs.check_repo.outputs.should_run_workflow == 'true'
runs-on: ubuntu-latest
container:
- # We're using ubuntu:focal because it uses Python 3.8 which is our minimum supported Python version.
+ # We're using debian:bullseye because it uses Python 3.9 which is our minimum supported Python version.
# This job is a canary to warn us about unreleased twisted changes that would cause problems for us if
# they were to be released immediately. For simplicity's sake (and to save CI runners) we use the oldest
# version, assuming that any incompatibilities on newer versions would also be present on the oldest.
- image: matrixdotorg/sytest-synapse:focal
+ image: matrixdotorg/sytest-synapse:bullseye
volumes:
- ${{ github.workspace }}:/src
diff --git a/changelog.d/17908.misc b/changelog.d/17908.misc
new file mode 100644
index 0000000000..8f17729148
--- /dev/null
+++ b/changelog.d/17908.misc
@@ -0,0 +1 @@
+Remove support for python 3.8.
diff --git a/docs/development/contributing_guide.md b/docs/development/contributing_guide.md
index f079f61b48..d6efab96cf 100644
--- a/docs/development/contributing_guide.md
+++ b/docs/development/contributing_guide.md
@@ -322,7 +322,7 @@ The following command will let you run the integration test with the most common
configuration:
```sh
-$ docker run --rm -it -v /path/where/you/have/cloned/the/repository\:/src:ro -v /path/to/where/you/want/logs\:/logs matrixdotorg/sytest-synapse:focal
+$ docker run --rm -it -v /path/where/you/have/cloned/the/repository\:/src:ro -v /path/to/where/you/want/logs\:/logs matrixdotorg/sytest-synapse:bullseye
```
(Note that the paths must be full paths! You could also write `$(realpath relative/path)` if needed.)
diff --git a/docs/setup/installation.md b/docs/setup/installation.md
index 9cebb89b4d..d717880aa5 100644
--- a/docs/setup/installation.md
+++ b/docs/setup/installation.md
@@ -208,7 +208,7 @@ When following this route please make sure that the [Platform-specific prerequis
System requirements:
- POSIX-compliant system (tested on Linux & OS X)
-- Python 3.8 or later, up to Python 3.11.
+- Python 3.9 or later, up to Python 3.13.
- At least 1GB of free RAM if you want to join large public rooms like #matrix:matrix.org
If building on an uncommon architecture for which pre-built wheels are
diff --git a/docs/upgrade.md b/docs/upgrade.md
index 52b1adbe90..ea9824a5ee 100644
--- a/docs/upgrade.md
+++ b/docs/upgrade.md
@@ -117,6 +117,17 @@ each upgrade are complete before moving on to the next upgrade, to avoid
stacking them up. You can monitor the currently running background updates with
[the Admin API](usage/administration/admin_api/background_updates.html#status).
+# Upgrading to v1.119.0
+
+## Minimum supported Python version
+
+The minimum supported Python version has been increased from v3.8 to v3.9.
+You will need Python 3.9+ to run Synapse v1.119.0 (due out Nov 7th, 2024).
+
+If you use current versions of the Matrix.org-distributed Docker images, no action is required.
+Please note that support for Ubuntu `focal` was dropped as well since it uses Python 3.8.
+
+
# Upgrading to v1.111.0
## New worker endpoints for authenticated client and federation media
diff --git a/mypy.ini b/mypy.ini
index 3fca15c01b..cf64248cc5 100644
--- a/mypy.ini
+++ b/mypy.ini
@@ -26,7 +26,7 @@ strict_equality = True
# Run mypy type checking with the minimum supported Python version to catch new usage
# that isn't backwards-compatible (types, overloads, etc).
-python_version = 3.8
+python_version = 3.9
files =
docker/,
diff --git a/poetry.lock b/poetry.lock
index 6a5845fd1e..16b7dc504e 100644
--- a/poetry.lock
+++ b/poetry.lock
@@ -1,4 +1,4 @@
-# This file is automatically @generated by Poetry 1.8.3 and should not be changed by hand.
+# This file is automatically @generated by Poetry 1.8.4 and should not be changed by hand.
[[package]]
name = "annotated-types"
@@ -11,9 +11,6 @@ files = [
{file = "annotated_types-0.7.0.tar.gz", hash = "sha256:aff07c09a53a08bc8cfccb9c85b05f1aa9a2a6f23728d790723543408344ce89"},
]
-[package.dependencies]
-typing-extensions = {version = ">=4.0.0", markers = "python_version < \"3.9\""}
-
[[package]]
name = "attrs"
version = "24.2.0"
@@ -874,9 +871,7 @@ files = [
[package.dependencies]
attrs = ">=22.2.0"
-importlib-resources = {version = ">=1.4.0", markers = "python_version < \"3.9\""}
jsonschema-specifications = ">=2023.03.6"
-pkgutil-resolve-name = {version = ">=1.3.10", markers = "python_version < \"3.9\""}
referencing = ">=0.28.4"
rpds-py = ">=0.7.1"
@@ -896,7 +891,6 @@ files = [
]
[package.dependencies]
-importlib-resources = {version = ">=1.4.0", markers = "python_version < \"3.9\""}
referencing = ">=0.28.0"
[[package]]
@@ -912,7 +906,6 @@ files = [
[package.dependencies]
importlib-metadata = {version = ">=4.11.4", markers = "python_version < \"3.12\""}
-importlib-resources = {version = "*", markers = "python_version < \"3.9\""}
"jaraco.classes" = "*"
jeepney = {version = ">=0.4.2", markers = "sys_platform == \"linux\""}
pywin32-ctypes = {version = ">=0.2.0", markers = "sys_platform == \"win32\""}
@@ -1572,17 +1565,6 @@ files = [
testing = ["pytest", "pytest-cov"]
[[package]]
-name = "pkgutil-resolve-name"
-version = "1.3.10"
-description = "Resolve a name to an object."
-optional = false
-python-versions = ">=3.6"
-files = [
- {file = "pkgutil_resolve_name-1.3.10-py3-none-any.whl", hash = "sha256:ca27cc078d25c5ad71a9de0a7a330146c4e014c2462d9af19c6b828280649c5e"},
- {file = "pkgutil_resolve_name-1.3.10.tar.gz", hash = "sha256:357d6c9e6a755653cfd78893817c0853af365dd51ec97f3d358a819373bbd174"},
-]
-
-[[package]]
name = "prometheus-client"
version = "0.21.0"
description = "Python client for the Prometheus monitoring system."
@@ -1948,7 +1930,6 @@ files = [
[package.dependencies]
cryptography = ">=3.1"
defusedxml = "*"
-importlib-resources = {version = "*", markers = "python_version < \"3.9\""}
pyopenssl = "*"
python-dateutil = "*"
pytz = "*"
@@ -2164,7 +2145,6 @@ files = [
[package.dependencies]
markdown-it-py = ">=2.2.0,<3.0.0"
pygments = ">=2.13.0,<3.0.0"
-typing-extensions = {version = ">=4.0.0,<5.0", markers = "python_version < \"3.9\""}
[package.extras]
jupyter = ["ipywidgets (>=7.5.1,<9)"]
@@ -3121,5 +3101,5 @@ user-search = ["pyicu"]
[metadata]
lock-version = "2.0"
-python-versions = "^3.8.0"
-content-hash = "eaded26b4770b9d19bfcee6dee8b96203df358ce51939d9b90fdbcf605e2f5fd"
+python-versions = "^3.9.0"
+content-hash = "0cd942a5193d01cbcef135a0bebd3fa0f12f7dbc63899d6f1c301e0649e9d902"
diff --git a/pyproject.toml b/pyproject.toml
index af096a2cd4..13de146b4e 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -36,7 +36,7 @@
[tool.ruff]
line-length = 88
-target-version = "py38"
+target-version = "py39"
[tool.ruff.lint]
# See https://beta.ruff.rs/docs/rules/#error-e
@@ -155,7 +155,7 @@ synapse_review_recent_signups = "synapse._scripts.review_recent_signups:main"
update_synapse_database = "synapse._scripts.update_synapse_database:main"
[tool.poetry.dependencies]
-python = "^3.8.0"
+python = "^3.9.0"
# Mandatory Dependencies
# ----------------------
@@ -178,7 +178,7 @@ Twisted = {extras = ["tls"], version = ">=18.9.0"}
treq = ">=15.1"
# Twisted has required pyopenssl 16.0 since about Twisted 16.6.
pyOpenSSL = ">=16.0.0"
-PyYAML = ">=3.13"
+PyYAML = ">=5.3"
pyasn1 = ">=0.1.9"
pyasn1-modules = ">=0.0.7"
bcrypt = ">=3.1.7"
@@ -241,7 +241,7 @@ authlib = { version = ">=0.15.1", optional = true }
# `contrib/systemd/log_config.yaml`.
# Note: systemd-python 231 appears to have been yanked from pypi
systemd-python = { version = ">=231", optional = true }
-lxml = { version = ">=4.2.0", optional = true }
+lxml = { version = ">=4.5.2", optional = true }
sentry-sdk = { version = ">=0.7.2", optional = true }
opentracing = { version = ">=2.2.0", optional = true }
jaeger-client = { version = ">=4.0.0", optional = true }
@@ -378,13 +378,13 @@ build-backend = "poetry.core.masonry.api"
# Skip unsupported platforms (by us or by Rust).
# See https://cibuildwheel.readthedocs.io/en/stable/options/#build-skip for the list of build targets.
# We skip:
-# - CPython 3.6 and 3.7: EOLed
-# - PyPy 3.7: we only support Python 3.8+
+# - CPython 3.6, 3.7 and 3.8: EOLed
+# - PyPy 3.7 and 3.8: we only support Python 3.9+
# - musllinux i686: excluded to reduce number of wheels we build.
# c.f. https://github.com/matrix-org/synapse/pull/12595#discussion_r963107677
# - PyPy on Aarch64 and musllinux on aarch64: too slow to build.
# c.f. https://github.com/matrix-org/synapse/pull/14259
-skip = "cp36* cp37* pp37* *-musllinux_i686 pp*aarch64 *-musllinux_aarch64"
+skip = "cp36* cp37* cp38* pp37* pp38* *-musllinux_i686 pp*aarch64 *-musllinux_aarch64"
# We need a rust compiler
before-all = "curl https://sh.rustup.rs -sSf | sh -s -- --default-toolchain stable -y --profile minimal"
diff --git a/scripts-dev/build_debian_packages.py b/scripts-dev/build_debian_packages.py
index 88c8419400..6ee695b2ba 100755
--- a/scripts-dev/build_debian_packages.py
+++ b/scripts-dev/build_debian_packages.py
@@ -28,9 +28,8 @@ from typing import Collection, Optional, Sequence, Set
# example)
DISTS = (
"debian:bullseye", # (EOL ~2024-07) (our EOL forced by Python 3.9 is 2025-10-05)
- "debian:bookworm", # (EOL not specified yet) (our EOL forced by Python 3.11 is 2027-10-24)
- "debian:sid", # (EOL not specified yet) (our EOL forced by Python 3.11 is 2027-10-24)
- "ubuntu:focal", # 20.04 LTS (EOL 2025-04) (our EOL forced by Python 3.8 is 2024-10-14)
+ "debian:bookworm", # (EOL 2026-06) (our EOL forced by Python 3.11 is 2027-10-24)
+ "debian:sid", # (rolling distro, no EOL)
"ubuntu:jammy", # 22.04 LTS (EOL 2027-04) (our EOL forced by Python 3.10 is 2026-10-04)
"ubuntu:noble", # 24.04 LTS (EOL 2029-06)
"ubuntu:oracular", # 24.10 (EOL 2025-07)
diff --git a/synapse/__init__.py b/synapse/__init__.py
index 73b92f12be..e7784ac5d7 100644
--- a/synapse/__init__.py
+++ b/synapse/__init__.py
@@ -39,8 +39,8 @@ ImageFile.LOAD_TRUNCATED_IMAGES = True
# Note that we use an (unneeded) variable here so that pyupgrade doesn't nuke the
# if-statement completely.
py_version = sys.version_info
-if py_version < (3, 8):
- print("Synapse requires Python 3.8 or above.")
+if py_version < (3, 9):
+ print("Synapse requires Python 3.9 or above.")
sys.exit(1)
# Allow using the asyncio reactor via env var.
diff --git a/synapse/storage/databases/main/room.py b/synapse/storage/databases/main/room.py
index 33569a4391..cc3ce0951e 100644
--- a/synapse/storage/databases/main/room.py
+++ b/synapse/storage/databases/main/room.py
@@ -2550,7 +2550,9 @@ class RoomStore(RoomBackgroundUpdateStore, RoomWorkerStore):
still contains events with partial state.
"""
try:
- async with self._un_partial_stated_rooms_stream_id_gen.get_next() as un_partial_state_room_stream_id:
+ async with (
+ self._un_partial_stated_rooms_stream_id_gen.get_next() as un_partial_state_room_stream_id
+ ):
await self.db_pool.runInteraction(
"clear_partial_state_room",
self._clear_partial_state_room_txn,
diff --git a/synapse/storage/databases/main/state.py b/synapse/storage/databases/main/state.py
index 60312d770d..42b3638e1c 100644
--- a/synapse/storage/databases/main/state.py
+++ b/synapse/storage/databases/main/state.py
@@ -681,7 +681,9 @@ class StateGroupWorkerStore(EventsWorkerStore, SQLBaseStore):
context: EventContext,
) -> None:
"""Update the state group for a partial state event"""
- async with self._un_partial_stated_events_stream_id_gen.get_next() as un_partial_state_event_stream_id:
+ async with (
+ self._un_partial_stated_events_stream_id_gen.get_next() as un_partial_state_event_stream_id
+ ):
await self.db_pool.runInteraction(
"update_state_for_partial_state_event",
self._update_state_for_partial_state_event_txn,
diff --git a/tests/handlers/test_federation.py b/tests/handlers/test_federation.py
index 9847893fce..b64a8a86a2 100644
--- a/tests/handlers/test_federation.py
+++ b/tests/handlers/test_federation.py
@@ -661,9 +661,12 @@ class PartialJoinTestCase(unittest.FederatingHomeserverTestCase):
)
)
- with patch.object(
- fed_client, "make_membership_event", mock_make_membership_event
- ), patch.object(fed_client, "send_join", mock_send_join):
+ with (
+ patch.object(
+ fed_client, "make_membership_event", mock_make_membership_event
+ ),
+ patch.object(fed_client, "send_join", mock_send_join),
+ ):
# Join and check that our join event is rejected
# (The join event is rejected because it doesn't have any signatures)
join_exc = self.get_failure(
@@ -708,9 +711,12 @@ class PartialJoinTestCase(unittest.FederatingHomeserverTestCase):
fed_handler = self.hs.get_federation_handler()
store = self.hs.get_datastores().main
- with patch.object(
- fed_handler, "_sync_partial_state_room", mock_sync_partial_state_room
- ), patch.object(store, "is_partial_state_room", mock_is_partial_state_room):
+ with (
+ patch.object(
+ fed_handler, "_sync_partial_state_room", mock_sync_partial_state_room
+ ),
+ patch.object(store, "is_partial_state_room", mock_is_partial_state_room),
+ ):
# Start the partial state sync.
fed_handler._start_partial_state_room_sync("hs1", {"hs2"}, "room_id")
self.assertEqual(mock_sync_partial_state_room.call_count, 1)
@@ -760,9 +766,12 @@ class PartialJoinTestCase(unittest.FederatingHomeserverTestCase):
fed_handler = self.hs.get_federation_handler()
store = self.hs.get_datastores().main
- with patch.object(
- fed_handler, "_sync_partial_state_room", mock_sync_partial_state_room
- ), patch.object(store, "is_partial_state_room", mock_is_partial_state_room):
+ with (
+ patch.object(
+ fed_handler, "_sync_partial_state_room", mock_sync_partial_state_room
+ ),
+ patch.object(store, "is_partial_state_room", mock_is_partial_state_room),
+ ):
# Start the partial state sync.
fed_handler._start_partial_state_room_sync("hs1", {"hs2"}, "room_id")
self.assertEqual(mock_sync_partial_state_room.call_count, 1)
diff --git a/tests/handlers/test_room_member.py b/tests/handlers/test_room_member.py
index ad77356ede..f43ce66483 100644
--- a/tests/handlers/test_room_member.py
+++ b/tests/handlers/test_room_member.py
@@ -172,20 +172,25 @@ class TestJoinsLimitedByPerRoomRateLimiter(FederatingHomeserverTestCase):
)
)
- with patch.object(
- self.handler.federation_handler.federation_client,
- "make_membership_event",
- mock_make_membership_event,
- ), patch.object(
- self.handler.federation_handler.federation_client,
- "send_join",
- mock_send_join,
- ), patch(
- "synapse.event_auth._is_membership_change_allowed",
- return_value=None,
- ), patch(
- "synapse.handlers.federation_event.check_state_dependent_auth_rules",
- return_value=None,
+ with (
+ patch.object(
+ self.handler.federation_handler.federation_client,
+ "make_membership_event",
+ mock_make_membership_event,
+ ),
+ patch.object(
+ self.handler.federation_handler.federation_client,
+ "send_join",
+ mock_send_join,
+ ),
+ patch(
+ "synapse.event_auth._is_membership_change_allowed",
+ return_value=None,
+ ),
+ patch(
+ "synapse.handlers.federation_event.check_state_dependent_auth_rules",
+ return_value=None,
+ ),
):
self.get_success(
self.handler.update_membership(
diff --git a/tests/push/test_bulk_push_rule_evaluator.py b/tests/push/test_bulk_push_rule_evaluator.py
index fc73f3dc2a..16c1292812 100644
--- a/tests/push/test_bulk_push_rule_evaluator.py
+++ b/tests/push/test_bulk_push_rule_evaluator.py
@@ -120,9 +120,11 @@ class TestBulkPushRuleEvaluator(HomeserverTestCase):
#
# We have seen stringy and null values for "room" in the wild, so presumably
# some of this validation was missing in the past.
- with patch("synapse.events.validator.validate_canonicaljson"), patch(
- "synapse.events.validator.jsonschema.validate"
- ), patch("synapse.handlers.event_auth.check_state_dependent_auth_rules"):
+ with (
+ patch("synapse.events.validator.validate_canonicaljson"),
+ patch("synapse.events.validator.jsonschema.validate"),
+ patch("synapse.handlers.event_auth.check_state_dependent_auth_rules"),
+ ):
pl_event_id = self.helper.send_state(
self.room_id,
"m.room.power_levels",
diff --git a/tests/storage/test_stream.py b/tests/storage/test_stream.py
index ed5f286243..38a56419f3 100644
--- a/tests/storage/test_stream.py
+++ b/tests/storage/test_stream.py
@@ -1465,20 +1465,25 @@ class GetCurrentStateDeltaMembershipChangesForUserFederationTestCase(
)
)
- with patch.object(
- self.room_member_handler.federation_handler.federation_client,
- "make_membership_event",
- mock_make_membership_event,
- ), patch.object(
- self.room_member_handler.federation_handler.federation_client,
- "send_join",
- mock_send_join,
- ), patch(
- "synapse.event_auth._is_membership_change_allowed",
- return_value=None,
- ), patch(
- "synapse.handlers.federation_event.check_state_dependent_auth_rules",
- return_value=None,
+ with (
+ patch.object(
+ self.room_member_handler.federation_handler.federation_client,
+ "make_membership_event",
+ mock_make_membership_event,
+ ),
+ patch.object(
+ self.room_member_handler.federation_handler.federation_client,
+ "send_join",
+ mock_send_join,
+ ),
+ patch(
+ "synapse.event_auth._is_membership_change_allowed",
+ return_value=None,
+ ),
+ patch(
+ "synapse.handlers.federation_event.check_state_dependent_auth_rules",
+ return_value=None,
+ ),
):
self.get_success(
self.room_member_handler.update_membership(
diff --git a/tests/util/test_check_dependencies.py b/tests/util/test_check_dependencies.py
index 13a4e6ddaa..c052ba2b75 100644
--- a/tests/util/test_check_dependencies.py
+++ b/tests/util/test_check_dependencies.py
@@ -109,10 +109,13 @@ class TestDependencyChecker(TestCase):
def test_checks_ignore_dev_dependencies(self) -> None:
"""Both generic and per-extra checks should ignore dev dependencies."""
- with patch(
- "synapse.util.check_dependencies.metadata.requires",
- return_value=["dummypkg >= 1; extra == 'mypy'"],
- ), patch("synapse.util.check_dependencies.RUNTIME_EXTRAS", {"cool-extra"}):
+ with (
+ patch(
+ "synapse.util.check_dependencies.metadata.requires",
+ return_value=["dummypkg >= 1; extra == 'mypy'"],
+ ),
+ patch("synapse.util.check_dependencies.RUNTIME_EXTRAS", {"cool-extra"}),
+ ):
# We're testing that none of these calls raise.
with self.mock_installed_package(None):
check_requirements()
@@ -141,10 +144,13 @@ class TestDependencyChecker(TestCase):
def test_check_for_extra_dependencies(self) -> None:
"""Complain if a package required for an extra is missing or old."""
- with patch(
- "synapse.util.check_dependencies.metadata.requires",
- return_value=["dummypkg >= 1; extra == 'cool-extra'"],
- ), patch("synapse.util.check_dependencies.RUNTIME_EXTRAS", {"cool-extra"}):
+ with (
+ patch(
+ "synapse.util.check_dependencies.metadata.requires",
+ return_value=["dummypkg >= 1; extra == 'cool-extra'"],
+ ),
+ patch("synapse.util.check_dependencies.RUNTIME_EXTRAS", {"cool-extra"}),
+ ):
with self.mock_installed_package(None):
self.assertRaises(DependencyException, check_requirements, "cool-extra")
with self.mock_installed_package(old):
diff --git a/tox.ini b/tox.ini
index 4cd9dfb966..a506b5034d 100644
--- a/tox.ini
+++ b/tox.ini
@@ -1,5 +1,5 @@
[tox]
-envlist = py37, py38, py39, py310
+envlist = py39, py310, py311, py312, py313
# we require tox>=2.3.2 for the fix to https://github.com/tox-dev/tox/issues/208
minversion = 2.3.2
|